##// END OF EJS Templates
changelog: never inline changelog...
marmoute -
r52074:dcaa2df1 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,500 +1,506 b''
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 from .i18n import _
9 from .i18n import _
10 from .node import (
10 from .node import (
11 bin,
11 bin,
12 hex,
12 hex,
13 )
13 )
14 from .thirdparty import attr
14 from .thirdparty import attr
15
15
16 from . import (
16 from . import (
17 encoding,
17 encoding,
18 error,
18 error,
19 metadata,
19 metadata,
20 pycompat,
20 pycompat,
21 revlog,
21 revlog,
22 )
22 )
23 from .utils import (
23 from .utils import (
24 dateutil,
24 dateutil,
25 stringutil,
25 stringutil,
26 )
26 )
27 from .revlogutils import (
27 from .revlogutils import (
28 constants as revlog_constants,
28 constants as revlog_constants,
29 flagutil,
29 flagutil,
30 )
30 )
31
31
32 _defaultextra = {b'branch': b'default'}
32 _defaultextra = {b'branch': b'default'}
33
33
34
34
35 def _string_escape(text):
35 def _string_escape(text):
36 """
36 """
37 >>> from .pycompat import bytechr as chr
37 >>> from .pycompat import bytechr as chr
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
40 >>> s
40 >>> s
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
42 >>> res = _string_escape(s)
42 >>> res = _string_escape(s)
43 >>> s == _string_unescape(res)
43 >>> s == _string_unescape(res)
44 True
44 True
45 """
45 """
46 # subset of the string_escape codec
46 # subset of the string_escape codec
47 text = (
47 text = (
48 text.replace(b'\\', b'\\\\')
48 text.replace(b'\\', b'\\\\')
49 .replace(b'\n', b'\\n')
49 .replace(b'\n', b'\\n')
50 .replace(b'\r', b'\\r')
50 .replace(b'\r', b'\\r')
51 )
51 )
52 return text.replace(b'\0', b'\\0')
52 return text.replace(b'\0', b'\\0')
53
53
54
54
55 def _string_unescape(text):
55 def _string_unescape(text):
56 if b'\\0' in text:
56 if b'\\0' in text:
57 # fix up \0 without getting into trouble with \\0
57 # fix up \0 without getting into trouble with \\0
58 text = text.replace(b'\\\\', b'\\\\\n')
58 text = text.replace(b'\\\\', b'\\\\\n')
59 text = text.replace(b'\\0', b'\0')
59 text = text.replace(b'\\0', b'\0')
60 text = text.replace(b'\n', b'')
60 text = text.replace(b'\n', b'')
61 return stringutil.unescapestr(text)
61 return stringutil.unescapestr(text)
62
62
63
63
64 def decodeextra(text):
64 def decodeextra(text):
65 """
65 """
66 >>> from .pycompat import bytechr as chr
66 >>> from .pycompat import bytechr as chr
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
67 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
68 ... ).items())
68 ... ).items())
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
69 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
70 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
71 ... b'baz': chr(92) + chr(0) + b'2'})
71 ... b'baz': chr(92) + chr(0) + b'2'})
72 ... ).items())
72 ... ).items())
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
73 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
74 """
74 """
75 extra = _defaultextra.copy()
75 extra = _defaultextra.copy()
76 for l in text.split(b'\0'):
76 for l in text.split(b'\0'):
77 if l:
77 if l:
78 k, v = _string_unescape(l).split(b':', 1)
78 k, v = _string_unescape(l).split(b':', 1)
79 extra[k] = v
79 extra[k] = v
80 return extra
80 return extra
81
81
82
82
83 def encodeextra(d):
83 def encodeextra(d):
84 # keys must be sorted to produce a deterministic changelog entry
84 # keys must be sorted to produce a deterministic changelog entry
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
85 items = [_string_escape(b'%s:%s' % (k, d[k])) for k in sorted(d)]
86 return b"\0".join(items)
86 return b"\0".join(items)
87
87
88
88
89 def stripdesc(desc):
89 def stripdesc(desc):
90 """strip trailing whitespace and leading and trailing empty lines"""
90 """strip trailing whitespace and leading and trailing empty lines"""
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
92
92
93
93
94 @attr.s
94 @attr.s
95 class _changelogrevision:
95 class _changelogrevision:
96 # Extensions might modify _defaultextra, so let the constructor below pass
96 # Extensions might modify _defaultextra, so let the constructor below pass
97 # it in
97 # it in
98 extra = attr.ib()
98 extra = attr.ib()
99 manifest = attr.ib()
99 manifest = attr.ib()
100 user = attr.ib(default=b'')
100 user = attr.ib(default=b'')
101 date = attr.ib(default=(0, 0))
101 date = attr.ib(default=(0, 0))
102 files = attr.ib(default=attr.Factory(list))
102 files = attr.ib(default=attr.Factory(list))
103 filesadded = attr.ib(default=None)
103 filesadded = attr.ib(default=None)
104 filesremoved = attr.ib(default=None)
104 filesremoved = attr.ib(default=None)
105 p1copies = attr.ib(default=None)
105 p1copies = attr.ib(default=None)
106 p2copies = attr.ib(default=None)
106 p2copies = attr.ib(default=None)
107 description = attr.ib(default=b'')
107 description = attr.ib(default=b'')
108 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
108 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
109
109
110
110
111 class changelogrevision:
111 class changelogrevision:
112 """Holds results of a parsed changelog revision.
112 """Holds results of a parsed changelog revision.
113
113
114 Changelog revisions consist of multiple pieces of data, including
114 Changelog revisions consist of multiple pieces of data, including
115 the manifest node, user, and date. This object exposes a view into
115 the manifest node, user, and date. This object exposes a view into
116 the parsed object.
116 the parsed object.
117 """
117 """
118
118
119 __slots__ = (
119 __slots__ = (
120 '_offsets',
120 '_offsets',
121 '_text',
121 '_text',
122 '_sidedata',
122 '_sidedata',
123 '_cpsd',
123 '_cpsd',
124 '_changes',
124 '_changes',
125 )
125 )
126
126
127 def __new__(cls, cl, text, sidedata, cpsd):
127 def __new__(cls, cl, text, sidedata, cpsd):
128 if not text:
128 if not text:
129 return _changelogrevision(extra=_defaultextra, manifest=cl.nullid)
129 return _changelogrevision(extra=_defaultextra, manifest=cl.nullid)
130
130
131 self = super(changelogrevision, cls).__new__(cls)
131 self = super(changelogrevision, cls).__new__(cls)
132 # We could return here and implement the following as an __init__.
132 # We could return here and implement the following as an __init__.
133 # But doing it here is equivalent and saves an extra function call.
133 # But doing it here is equivalent and saves an extra function call.
134
134
135 # format used:
135 # format used:
136 # nodeid\n : manifest node in ascii
136 # nodeid\n : manifest node in ascii
137 # user\n : user, no \n or \r allowed
137 # user\n : user, no \n or \r allowed
138 # time tz extra\n : date (time is int or float, timezone is int)
138 # time tz extra\n : date (time is int or float, timezone is int)
139 # : extra is metadata, encoded and separated by '\0'
139 # : extra is metadata, encoded and separated by '\0'
140 # : older versions ignore it
140 # : older versions ignore it
141 # files\n\n : files modified by the cset, no \n or \r allowed
141 # files\n\n : files modified by the cset, no \n or \r allowed
142 # (.*) : comment (free text, ideally utf-8)
142 # (.*) : comment (free text, ideally utf-8)
143 #
143 #
144 # changelog v0 doesn't use extra
144 # changelog v0 doesn't use extra
145
145
146 nl1 = text.index(b'\n')
146 nl1 = text.index(b'\n')
147 nl2 = text.index(b'\n', nl1 + 1)
147 nl2 = text.index(b'\n', nl1 + 1)
148 nl3 = text.index(b'\n', nl2 + 1)
148 nl3 = text.index(b'\n', nl2 + 1)
149
149
150 # The list of files may be empty. Which means nl3 is the first of the
150 # The list of files may be empty. Which means nl3 is the first of the
151 # double newline that precedes the description.
151 # double newline that precedes the description.
152 if text[nl3 + 1 : nl3 + 2] == b'\n':
152 if text[nl3 + 1 : nl3 + 2] == b'\n':
153 doublenl = nl3
153 doublenl = nl3
154 else:
154 else:
155 doublenl = text.index(b'\n\n', nl3 + 1)
155 doublenl = text.index(b'\n\n', nl3 + 1)
156
156
157 self._offsets = (nl1, nl2, nl3, doublenl)
157 self._offsets = (nl1, nl2, nl3, doublenl)
158 self._text = text
158 self._text = text
159 self._sidedata = sidedata
159 self._sidedata = sidedata
160 self._cpsd = cpsd
160 self._cpsd = cpsd
161 self._changes = None
161 self._changes = None
162
162
163 return self
163 return self
164
164
165 @property
165 @property
166 def manifest(self):
166 def manifest(self):
167 return bin(self._text[0 : self._offsets[0]])
167 return bin(self._text[0 : self._offsets[0]])
168
168
169 @property
169 @property
170 def user(self):
170 def user(self):
171 off = self._offsets
171 off = self._offsets
172 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
172 return encoding.tolocal(self._text[off[0] + 1 : off[1]])
173
173
174 @property
174 @property
175 def _rawdate(self):
175 def _rawdate(self):
176 off = self._offsets
176 off = self._offsets
177 dateextra = self._text[off[1] + 1 : off[2]]
177 dateextra = self._text[off[1] + 1 : off[2]]
178 return dateextra.split(b' ', 2)[0:2]
178 return dateextra.split(b' ', 2)[0:2]
179
179
180 @property
180 @property
181 def _rawextra(self):
181 def _rawextra(self):
182 off = self._offsets
182 off = self._offsets
183 dateextra = self._text[off[1] + 1 : off[2]]
183 dateextra = self._text[off[1] + 1 : off[2]]
184 fields = dateextra.split(b' ', 2)
184 fields = dateextra.split(b' ', 2)
185 if len(fields) != 3:
185 if len(fields) != 3:
186 return None
186 return None
187
187
188 return fields[2]
188 return fields[2]
189
189
190 @property
190 @property
191 def date(self):
191 def date(self):
192 raw = self._rawdate
192 raw = self._rawdate
193 time = float(raw[0])
193 time = float(raw[0])
194 # Various tools did silly things with the timezone.
194 # Various tools did silly things with the timezone.
195 try:
195 try:
196 timezone = int(raw[1])
196 timezone = int(raw[1])
197 except ValueError:
197 except ValueError:
198 timezone = 0
198 timezone = 0
199
199
200 return time, timezone
200 return time, timezone
201
201
202 @property
202 @property
203 def extra(self):
203 def extra(self):
204 raw = self._rawextra
204 raw = self._rawextra
205 if raw is None:
205 if raw is None:
206 return _defaultextra
206 return _defaultextra
207
207
208 return decodeextra(raw)
208 return decodeextra(raw)
209
209
210 @property
210 @property
211 def changes(self):
211 def changes(self):
212 if self._changes is not None:
212 if self._changes is not None:
213 return self._changes
213 return self._changes
214 if self._cpsd:
214 if self._cpsd:
215 changes = metadata.decode_files_sidedata(self._sidedata)
215 changes = metadata.decode_files_sidedata(self._sidedata)
216 else:
216 else:
217 changes = metadata.ChangingFiles(
217 changes = metadata.ChangingFiles(
218 touched=self.files or (),
218 touched=self.files or (),
219 added=self.filesadded or (),
219 added=self.filesadded or (),
220 removed=self.filesremoved or (),
220 removed=self.filesremoved or (),
221 p1_copies=self.p1copies or {},
221 p1_copies=self.p1copies or {},
222 p2_copies=self.p2copies or {},
222 p2_copies=self.p2copies or {},
223 )
223 )
224 self._changes = changes
224 self._changes = changes
225 return changes
225 return changes
226
226
227 @property
227 @property
228 def files(self):
228 def files(self):
229 if self._cpsd:
229 if self._cpsd:
230 return sorted(self.changes.touched)
230 return sorted(self.changes.touched)
231 off = self._offsets
231 off = self._offsets
232 if off[2] == off[3]:
232 if off[2] == off[3]:
233 return []
233 return []
234
234
235 return self._text[off[2] + 1 : off[3]].split(b'\n')
235 return self._text[off[2] + 1 : off[3]].split(b'\n')
236
236
237 @property
237 @property
238 def filesadded(self):
238 def filesadded(self):
239 if self._cpsd:
239 if self._cpsd:
240 return self.changes.added
240 return self.changes.added
241 else:
241 else:
242 rawindices = self.extra.get(b'filesadded')
242 rawindices = self.extra.get(b'filesadded')
243 if rawindices is None:
243 if rawindices is None:
244 return None
244 return None
245 return metadata.decodefileindices(self.files, rawindices)
245 return metadata.decodefileindices(self.files, rawindices)
246
246
247 @property
247 @property
248 def filesremoved(self):
248 def filesremoved(self):
249 if self._cpsd:
249 if self._cpsd:
250 return self.changes.removed
250 return self.changes.removed
251 else:
251 else:
252 rawindices = self.extra.get(b'filesremoved')
252 rawindices = self.extra.get(b'filesremoved')
253 if rawindices is None:
253 if rawindices is None:
254 return None
254 return None
255 return metadata.decodefileindices(self.files, rawindices)
255 return metadata.decodefileindices(self.files, rawindices)
256
256
257 @property
257 @property
258 def p1copies(self):
258 def p1copies(self):
259 if self._cpsd:
259 if self._cpsd:
260 return self.changes.copied_from_p1
260 return self.changes.copied_from_p1
261 else:
261 else:
262 rawcopies = self.extra.get(b'p1copies')
262 rawcopies = self.extra.get(b'p1copies')
263 if rawcopies is None:
263 if rawcopies is None:
264 return None
264 return None
265 return metadata.decodecopies(self.files, rawcopies)
265 return metadata.decodecopies(self.files, rawcopies)
266
266
267 @property
267 @property
268 def p2copies(self):
268 def p2copies(self):
269 if self._cpsd:
269 if self._cpsd:
270 return self.changes.copied_from_p2
270 return self.changes.copied_from_p2
271 else:
271 else:
272 rawcopies = self.extra.get(b'p2copies')
272 rawcopies = self.extra.get(b'p2copies')
273 if rawcopies is None:
273 if rawcopies is None:
274 return None
274 return None
275 return metadata.decodecopies(self.files, rawcopies)
275 return metadata.decodecopies(self.files, rawcopies)
276
276
277 @property
277 @property
278 def description(self):
278 def description(self):
279 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
279 return encoding.tolocal(self._text[self._offsets[3] + 2 :])
280
280
281 @property
281 @property
282 def branchinfo(self):
282 def branchinfo(self):
283 extra = self.extra
283 extra = self.extra
284 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
284 return encoding.tolocal(extra.get(b"branch")), b'close' in extra
285
285
286
286
287 class changelog(revlog.revlog):
287 class changelog(revlog.revlog):
288 def __init__(self, opener, trypending=False, concurrencychecker=None):
288 def __init__(self, opener, trypending=False, concurrencychecker=None):
289 """Load a changelog revlog using an opener.
289 """Load a changelog revlog using an opener.
290
290
291 If ``trypending`` is true, we attempt to load the index from a
291 If ``trypending`` is true, we attempt to load the index from a
292 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
292 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
293 The ``00changelog.i.a`` file contains index (and possibly inline
293 The ``00changelog.i.a`` file contains index (and possibly inline
294 revision) data for a transaction that hasn't been finalized yet.
294 revision) data for a transaction that hasn't been finalized yet.
295 It exists in a separate file to facilitate readers (such as
295 It exists in a separate file to facilitate readers (such as
296 hooks processes) accessing data before a transaction is finalized.
296 hooks processes) accessing data before a transaction is finalized.
297
297
298 ``concurrencychecker`` will be passed to the revlog init function, see
298 ``concurrencychecker`` will be passed to the revlog init function, see
299 the documentation there.
299 the documentation there.
300 """
300 """
301 revlog.revlog.__init__(
301 revlog.revlog.__init__(
302 self,
302 self,
303 opener,
303 opener,
304 target=(revlog_constants.KIND_CHANGELOG, None),
304 target=(revlog_constants.KIND_CHANGELOG, None),
305 radix=b'00changelog',
305 radix=b'00changelog',
306 checkambig=True,
306 checkambig=True,
307 mmaplargeindex=True,
307 mmaplargeindex=True,
308 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
308 persistentnodemap=opener.options.get(b'persistent-nodemap', False),
309 concurrencychecker=concurrencychecker,
309 concurrencychecker=concurrencychecker,
310 trypending=trypending,
310 trypending=trypending,
311 may_inline=False,
311 )
312 )
312
313
313 if self._initempty and (self._format_version == revlog.REVLOGV1):
314 if self._initempty and (self._format_version == revlog.REVLOGV1):
314 # changelogs don't benefit from generaldelta.
315 # changelogs don't benefit from generaldelta.
315
316
316 self._format_flags &= ~revlog.FLAG_GENERALDELTA
317 self._format_flags &= ~revlog.FLAG_GENERALDELTA
317 self.delta_config.general_delta = False
318 self.delta_config.general_delta = False
318
319
319 # Delta chains for changelogs tend to be very small because entries
320 # Delta chains for changelogs tend to be very small because entries
320 # tend to be small and don't delta well with each. So disable delta
321 # tend to be small and don't delta well with each. So disable delta
321 # chains.
322 # chains.
322 self._storedeltachains = False
323 self._storedeltachains = False
323
324
324 self._v2_delayed = False
325 self._v2_delayed = False
325 self._filteredrevs = frozenset()
326 self._filteredrevs = frozenset()
326 self._filteredrevs_hashcache = {}
327 self._filteredrevs_hashcache = {}
327 self._copiesstorage = opener.options.get(b'copies-storage')
328 self._copiesstorage = opener.options.get(b'copies-storage')
328
329
329 @property
330 @property
330 def filteredrevs(self):
331 def filteredrevs(self):
331 return self._filteredrevs
332 return self._filteredrevs
332
333
333 @filteredrevs.setter
334 @filteredrevs.setter
334 def filteredrevs(self, val):
335 def filteredrevs(self, val):
335 # Ensure all updates go through this function
336 # Ensure all updates go through this function
336 assert isinstance(val, frozenset)
337 assert isinstance(val, frozenset)
337 self._filteredrevs = val
338 self._filteredrevs = val
338 self._filteredrevs_hashcache = {}
339 self._filteredrevs_hashcache = {}
339
340
340 def _write_docket(self, tr):
341 def _write_docket(self, tr):
341 if not self._v2_delayed:
342 if not self._v2_delayed:
342 super(changelog, self)._write_docket(tr)
343 super(changelog, self)._write_docket(tr)
343
344
344 def delayupdate(self, tr):
345 def delayupdate(self, tr):
345 """delay visibility of index updates to other readers"""
346 """delay visibility of index updates to other readers"""
346 assert not self._inner.is_open
347 assert not self._inner.is_open
348 assert not self._may_inline
349 # enforce that older changelog that are still inline are split at the
350 # first opportunity.
351 if self._inline:
352 self._enforceinlinesize(tr)
347 if self._docket is not None:
353 if self._docket is not None:
348 self._v2_delayed = True
354 self._v2_delayed = True
349 else:
355 else:
350 new_index = self._inner.delay()
356 new_index = self._inner.delay()
351 if new_index is not None:
357 if new_index is not None:
352 self._indexfile = new_index
358 self._indexfile = new_index
353 tr.registertmp(new_index)
359 tr.registertmp(new_index)
354 tr.addpending(b'cl-%i' % id(self), self._writepending)
360 tr.addpending(b'cl-%i' % id(self), self._writepending)
355 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
361 tr.addfinalize(b'cl-%i' % id(self), self._finalize)
356
362
357 def _finalize(self, tr):
363 def _finalize(self, tr):
358 """finalize index updates"""
364 """finalize index updates"""
359 assert not self._inner.is_open
365 assert not self._inner.is_open
360 if self._docket is not None:
366 if self._docket is not None:
361 self._docket.write(tr)
367 self._docket.write(tr)
362 self._v2_delayed = False
368 self._v2_delayed = False
363 else:
369 else:
364 new_index_file = self._inner.finalize_pending()
370 new_index_file = self._inner.finalize_pending()
365 self._indexfile = new_index_file
371 self._indexfile = new_index_file
366 # split when we're done
372 # split when we're done
367 self._enforceinlinesize(tr, side_write=False)
373 self._enforceinlinesize(tr, side_write=False)
368
374
369 def _writepending(self, tr):
375 def _writepending(self, tr):
370 """create a file containing the unfinalized state for
376 """create a file containing the unfinalized state for
371 pretxnchangegroup"""
377 pretxnchangegroup"""
372 assert not self._inner.is_open
378 assert not self._inner.is_open
373 if self._docket:
379 if self._docket:
374 any_pending = self._docket.write(tr, pending=True)
380 any_pending = self._docket.write(tr, pending=True)
375 self._v2_delayed = False
381 self._v2_delayed = False
376 else:
382 else:
377 new_index, any_pending = self._inner.write_pending()
383 new_index, any_pending = self._inner.write_pending()
378 if new_index is not None:
384 if new_index is not None:
379 self._indexfile = new_index
385 self._indexfile = new_index
380 tr.registertmp(new_index)
386 tr.registertmp(new_index)
381 return any_pending
387 return any_pending
382
388
383 def _enforceinlinesize(self, tr, side_write=True):
389 def _enforceinlinesize(self, tr, side_write=True):
384 if not self.is_delaying:
390 if not self.is_delaying:
385 revlog.revlog._enforceinlinesize(self, tr, side_write=side_write)
391 revlog.revlog._enforceinlinesize(self, tr, side_write=side_write)
386
392
387 def read(self, nodeorrev):
393 def read(self, nodeorrev):
388 """Obtain data from a parsed changelog revision.
394 """Obtain data from a parsed changelog revision.
389
395
390 Returns a 6-tuple of:
396 Returns a 6-tuple of:
391
397
392 - manifest node in binary
398 - manifest node in binary
393 - author/user as a localstr
399 - author/user as a localstr
394 - date as a 2-tuple of (time, timezone)
400 - date as a 2-tuple of (time, timezone)
395 - list of files
401 - list of files
396 - commit message as a localstr
402 - commit message as a localstr
397 - dict of extra metadata
403 - dict of extra metadata
398
404
399 Unless you need to access all fields, consider calling
405 Unless you need to access all fields, consider calling
400 ``changelogrevision`` instead, as it is faster for partial object
406 ``changelogrevision`` instead, as it is faster for partial object
401 access.
407 access.
402 """
408 """
403 d = self._revisiondata(nodeorrev)
409 d = self._revisiondata(nodeorrev)
404 sidedata = self.sidedata(nodeorrev)
410 sidedata = self.sidedata(nodeorrev)
405 copy_sd = self._copiesstorage == b'changeset-sidedata'
411 copy_sd = self._copiesstorage == b'changeset-sidedata'
406 c = changelogrevision(self, d, sidedata, copy_sd)
412 c = changelogrevision(self, d, sidedata, copy_sd)
407 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
413 return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
408
414
409 def changelogrevision(self, nodeorrev):
415 def changelogrevision(self, nodeorrev):
410 """Obtain a ``changelogrevision`` for a node or revision."""
416 """Obtain a ``changelogrevision`` for a node or revision."""
411 text = self._revisiondata(nodeorrev)
417 text = self._revisiondata(nodeorrev)
412 sidedata = self.sidedata(nodeorrev)
418 sidedata = self.sidedata(nodeorrev)
413 return changelogrevision(
419 return changelogrevision(
414 self, text, sidedata, self._copiesstorage == b'changeset-sidedata'
420 self, text, sidedata, self._copiesstorage == b'changeset-sidedata'
415 )
421 )
416
422
417 def readfiles(self, nodeorrev):
423 def readfiles(self, nodeorrev):
418 """
424 """
419 short version of read that only returns the files modified by the cset
425 short version of read that only returns the files modified by the cset
420 """
426 """
421 text = self.revision(nodeorrev)
427 text = self.revision(nodeorrev)
422 if not text:
428 if not text:
423 return []
429 return []
424 last = text.index(b"\n\n")
430 last = text.index(b"\n\n")
425 l = text[:last].split(b'\n')
431 l = text[:last].split(b'\n')
426 return l[3:]
432 return l[3:]
427
433
428 def add(
434 def add(
429 self,
435 self,
430 manifest,
436 manifest,
431 files,
437 files,
432 desc,
438 desc,
433 transaction,
439 transaction,
434 p1,
440 p1,
435 p2,
441 p2,
436 user,
442 user,
437 date=None,
443 date=None,
438 extra=None,
444 extra=None,
439 ):
445 ):
440 # Convert to UTF-8 encoded bytestrings as the very first
446 # Convert to UTF-8 encoded bytestrings as the very first
441 # thing: calling any method on a localstr object will turn it
447 # thing: calling any method on a localstr object will turn it
442 # into a str object and the cached UTF-8 string is thus lost.
448 # into a str object and the cached UTF-8 string is thus lost.
443 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
449 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
444
450
445 user = user.strip()
451 user = user.strip()
446 # An empty username or a username with a "\n" will make the
452 # An empty username or a username with a "\n" will make the
447 # revision text contain two "\n\n" sequences -> corrupt
453 # revision text contain two "\n\n" sequences -> corrupt
448 # repository since read cannot unpack the revision.
454 # repository since read cannot unpack the revision.
449 if not user:
455 if not user:
450 raise error.StorageError(_(b"empty username"))
456 raise error.StorageError(_(b"empty username"))
451 if b"\n" in user:
457 if b"\n" in user:
452 raise error.StorageError(
458 raise error.StorageError(
453 _(b"username %r contains a newline") % pycompat.bytestr(user)
459 _(b"username %r contains a newline") % pycompat.bytestr(user)
454 )
460 )
455
461
456 desc = stripdesc(desc)
462 desc = stripdesc(desc)
457
463
458 if date:
464 if date:
459 parseddate = b"%d %d" % dateutil.parsedate(date)
465 parseddate = b"%d %d" % dateutil.parsedate(date)
460 else:
466 else:
461 parseddate = b"%d %d" % dateutil.makedate()
467 parseddate = b"%d %d" % dateutil.makedate()
462 if extra:
468 if extra:
463 branch = extra.get(b"branch")
469 branch = extra.get(b"branch")
464 if branch in (b"default", b""):
470 if branch in (b"default", b""):
465 del extra[b"branch"]
471 del extra[b"branch"]
466 elif branch in (b".", b"null", b"tip"):
472 elif branch in (b".", b"null", b"tip"):
467 raise error.StorageError(
473 raise error.StorageError(
468 _(b'the name \'%s\' is reserved') % branch
474 _(b'the name \'%s\' is reserved') % branch
469 )
475 )
470 sortedfiles = sorted(files.touched)
476 sortedfiles = sorted(files.touched)
471 flags = 0
477 flags = 0
472 sidedata = None
478 sidedata = None
473 if self._copiesstorage == b'changeset-sidedata':
479 if self._copiesstorage == b'changeset-sidedata':
474 if files.has_copies_info:
480 if files.has_copies_info:
475 flags |= flagutil.REVIDX_HASCOPIESINFO
481 flags |= flagutil.REVIDX_HASCOPIESINFO
476 sidedata = metadata.encode_files_sidedata(files)
482 sidedata = metadata.encode_files_sidedata(files)
477
483
478 if extra:
484 if extra:
479 extra = encodeextra(extra)
485 extra = encodeextra(extra)
480 parseddate = b"%s %s" % (parseddate, extra)
486 parseddate = b"%s %s" % (parseddate, extra)
481 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
487 l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
482 text = b"\n".join(l)
488 text = b"\n".join(l)
483 rev = self.addrevision(
489 rev = self.addrevision(
484 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
490 text, transaction, len(self), p1, p2, sidedata=sidedata, flags=flags
485 )
491 )
486 return self.node(rev)
492 return self.node(rev)
487
493
488 def branchinfo(self, rev):
494 def branchinfo(self, rev):
489 """return the branch name and open/close state of a revision
495 """return the branch name and open/close state of a revision
490
496
491 This function exists because creating a changectx object
497 This function exists because creating a changectx object
492 just to access this is costly."""
498 just to access this is costly."""
493 return self.changelogrevision(rev).branchinfo
499 return self.changelogrevision(rev).branchinfo
494
500
495 def _nodeduplicatecallback(self, transaction, rev):
501 def _nodeduplicatecallback(self, transaction, rev):
496 # keep track of revisions that got "re-added", eg: unbunde of know rev.
502 # keep track of revisions that got "re-added", eg: unbunde of know rev.
497 #
503 #
498 # We track them in a list to preserve their order from the source bundle
504 # We track them in a list to preserve their order from the source bundle
499 duplicates = transaction.changes.setdefault(b'revduplicates', [])
505 duplicates = transaction.changes.setdefault(b'revduplicates', [])
500 duplicates.append(rev)
506 duplicates.append(rev)
@@ -1,4246 +1,4062 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 # coding: utf8
2 # coding: utf8
3 #
3 #
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Storage back-end for Mercurial.
9 """Storage back-end for Mercurial.
10
10
11 This provides efficient delta storage with O(1) retrieve and append
11 This provides efficient delta storage with O(1) retrieve and append
12 and O(changes) merge between branches.
12 and O(changes) merge between branches.
13 """
13 """
14
14
15
15
16 import binascii
16 import binascii
17 import collections
17 import collections
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import os
20 import os
21 import struct
21 import struct
22 import weakref
22 import weakref
23 import zlib
23 import zlib
24
24
25 # import stuff from node for others to import from revlog
25 # import stuff from node for others to import from revlog
26 from .node import (
26 from .node import (
27 bin,
27 bin,
28 hex,
28 hex,
29 nullrev,
29 nullrev,
30 sha1nodeconstants,
30 sha1nodeconstants,
31 short,
31 short,
32 wdirrev,
32 wdirrev,
33 )
33 )
34 from .i18n import _
34 from .i18n import _
35 from .revlogutils.constants import (
35 from .revlogutils.constants import (
36 ALL_KINDS,
36 ALL_KINDS,
37 CHANGELOGV2,
37 CHANGELOGV2,
38 COMP_MODE_DEFAULT,
38 COMP_MODE_DEFAULT,
39 COMP_MODE_INLINE,
39 COMP_MODE_INLINE,
40 COMP_MODE_PLAIN,
40 COMP_MODE_PLAIN,
41 DELTA_BASE_REUSE_NO,
41 DELTA_BASE_REUSE_NO,
42 DELTA_BASE_REUSE_TRY,
42 DELTA_BASE_REUSE_TRY,
43 ENTRY_RANK,
43 ENTRY_RANK,
44 FEATURES_BY_VERSION,
44 FEATURES_BY_VERSION,
45 FLAG_GENERALDELTA,
45 FLAG_GENERALDELTA,
46 FLAG_INLINE_DATA,
46 FLAG_INLINE_DATA,
47 INDEX_HEADER,
47 INDEX_HEADER,
48 KIND_CHANGELOG,
48 KIND_CHANGELOG,
49 KIND_FILELOG,
49 KIND_FILELOG,
50 RANK_UNKNOWN,
50 RANK_UNKNOWN,
51 REVLOGV0,
51 REVLOGV0,
52 REVLOGV1,
52 REVLOGV1,
53 REVLOGV1_FLAGS,
53 REVLOGV1_FLAGS,
54 REVLOGV2,
54 REVLOGV2,
55 REVLOGV2_FLAGS,
55 REVLOGV2_FLAGS,
56 REVLOG_DEFAULT_FLAGS,
56 REVLOG_DEFAULT_FLAGS,
57 REVLOG_DEFAULT_FORMAT,
57 REVLOG_DEFAULT_FORMAT,
58 REVLOG_DEFAULT_VERSION,
58 REVLOG_DEFAULT_VERSION,
59 SUPPORTED_FLAGS,
59 SUPPORTED_FLAGS,
60 )
60 )
61 from .revlogutils.flagutil import (
61 from .revlogutils.flagutil import (
62 REVIDX_DEFAULT_FLAGS,
62 REVIDX_DEFAULT_FLAGS,
63 REVIDX_ELLIPSIS,
63 REVIDX_ELLIPSIS,
64 REVIDX_EXTSTORED,
64 REVIDX_EXTSTORED,
65 REVIDX_FLAGS_ORDER,
65 REVIDX_FLAGS_ORDER,
66 REVIDX_HASCOPIESINFO,
66 REVIDX_HASCOPIESINFO,
67 REVIDX_ISCENSORED,
67 REVIDX_ISCENSORED,
68 REVIDX_RAWTEXT_CHANGING_FLAGS,
68 REVIDX_RAWTEXT_CHANGING_FLAGS,
69 )
69 )
70 from .thirdparty import attr
70 from .thirdparty import attr
71 from . import (
71 from . import (
72 ancestor,
72 ancestor,
73 dagop,
73 dagop,
74 error,
74 error,
75 mdiff,
75 mdiff,
76 policy,
76 policy,
77 pycompat,
77 pycompat,
78 revlogutils,
78 revlogutils,
79 templatefilters,
79 templatefilters,
80 util,
80 util,
81 )
81 )
82 from .interfaces import (
82 from .interfaces import (
83 repository,
83 repository,
84 util as interfaceutil,
84 util as interfaceutil,
85 )
85 )
86 from .revlogutils import (
86 from .revlogutils import (
87 deltas as deltautil,
87 deltas as deltautil,
88 docket as docketutil,
88 docket as docketutil,
89 flagutil,
89 flagutil,
90 nodemap as nodemaputil,
90 nodemap as nodemaputil,
91 randomaccessfile,
91 randomaccessfile,
92 revlogv0,
92 revlogv0,
93 rewrite,
93 rewrite,
94 sidedata as sidedatautil,
94 sidedata as sidedatautil,
95 )
95 )
96 from .utils import (
96 from .utils import (
97 storageutil,
97 storageutil,
98 stringutil,
98 stringutil,
99 )
99 )
100
100
101 # blanked usage of all the name to prevent pyflakes constraints
101 # blanked usage of all the name to prevent pyflakes constraints
102 # We need these name available in the module for extensions.
102 # We need these name available in the module for extensions.
103
103
104 REVLOGV0
104 REVLOGV0
105 REVLOGV1
105 REVLOGV1
106 REVLOGV2
106 REVLOGV2
107 CHANGELOGV2
107 CHANGELOGV2
108 FLAG_INLINE_DATA
108 FLAG_INLINE_DATA
109 FLAG_GENERALDELTA
109 FLAG_GENERALDELTA
110 REVLOG_DEFAULT_FLAGS
110 REVLOG_DEFAULT_FLAGS
111 REVLOG_DEFAULT_FORMAT
111 REVLOG_DEFAULT_FORMAT
112 REVLOG_DEFAULT_VERSION
112 REVLOG_DEFAULT_VERSION
113 REVLOGV1_FLAGS
113 REVLOGV1_FLAGS
114 REVLOGV2_FLAGS
114 REVLOGV2_FLAGS
115 REVIDX_ISCENSORED
115 REVIDX_ISCENSORED
116 REVIDX_ELLIPSIS
116 REVIDX_ELLIPSIS
117 REVIDX_HASCOPIESINFO
117 REVIDX_HASCOPIESINFO
118 REVIDX_EXTSTORED
118 REVIDX_EXTSTORED
119 REVIDX_DEFAULT_FLAGS
119 REVIDX_DEFAULT_FLAGS
120 REVIDX_FLAGS_ORDER
120 REVIDX_FLAGS_ORDER
121 REVIDX_RAWTEXT_CHANGING_FLAGS
121 REVIDX_RAWTEXT_CHANGING_FLAGS
122
122
123 parsers = policy.importmod('parsers')
123 parsers = policy.importmod('parsers')
124 rustancestor = policy.importrust('ancestor')
124 rustancestor = policy.importrust('ancestor')
125 rustdagop = policy.importrust('dagop')
125 rustdagop = policy.importrust('dagop')
126 rustrevlog = policy.importrust('revlog')
126 rustrevlog = policy.importrust('revlog')
127
127
128 # Aliased for performance.
128 # Aliased for performance.
129 _zlibdecompress = zlib.decompress
129 _zlibdecompress = zlib.decompress
130
130
131 # max size of inline data embedded into a revlog
131 # max size of inline data embedded into a revlog
132 _maxinline = 131072
132 _maxinline = 131072
133
133
134 # Flag processors for REVIDX_ELLIPSIS.
134 # Flag processors for REVIDX_ELLIPSIS.
135 def ellipsisreadprocessor(rl, text):
135 def ellipsisreadprocessor(rl, text):
136 return text, False
136 return text, False
137
137
138
138
139 def ellipsiswriteprocessor(rl, text):
139 def ellipsiswriteprocessor(rl, text):
140 return text, False
140 return text, False
141
141
142
142
143 def ellipsisrawprocessor(rl, text):
143 def ellipsisrawprocessor(rl, text):
144 return False
144 return False
145
145
146
146
147 ellipsisprocessor = (
147 ellipsisprocessor = (
148 ellipsisreadprocessor,
148 ellipsisreadprocessor,
149 ellipsiswriteprocessor,
149 ellipsiswriteprocessor,
150 ellipsisrawprocessor,
150 ellipsisrawprocessor,
151 )
151 )
152
152
153
153
154 def _verify_revision(rl, skipflags, state, node):
154 def _verify_revision(rl, skipflags, state, node):
155 """Verify the integrity of the given revlog ``node`` while providing a hook
155 """Verify the integrity of the given revlog ``node`` while providing a hook
156 point for extensions to influence the operation."""
156 point for extensions to influence the operation."""
157 if skipflags:
157 if skipflags:
158 state[b'skipread'].add(node)
158 state[b'skipread'].add(node)
159 else:
159 else:
160 # Side-effect: read content and verify hash.
160 # Side-effect: read content and verify hash.
161 rl.revision(node)
161 rl.revision(node)
162
162
163
163
164 # True if a fast implementation for persistent-nodemap is available
164 # True if a fast implementation for persistent-nodemap is available
165 #
165 #
166 # We also consider we have a "fast" implementation in "pure" python because
166 # We also consider we have a "fast" implementation in "pure" python because
167 # people using pure don't really have performance consideration (and a
167 # people using pure don't really have performance consideration (and a
168 # wheelbarrow of other slowness source)
168 # wheelbarrow of other slowness source)
169 HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or hasattr(
169 HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or hasattr(
170 parsers, 'BaseIndexObject'
170 parsers, 'BaseIndexObject'
171 )
171 )
172
172
173
173
174 @interfaceutil.implementer(repository.irevisiondelta)
174 @interfaceutil.implementer(repository.irevisiondelta)
175 @attr.s(slots=True)
175 @attr.s(slots=True)
176 class revlogrevisiondelta:
176 class revlogrevisiondelta:
177 node = attr.ib()
177 node = attr.ib()
178 p1node = attr.ib()
178 p1node = attr.ib()
179 p2node = attr.ib()
179 p2node = attr.ib()
180 basenode = attr.ib()
180 basenode = attr.ib()
181 flags = attr.ib()
181 flags = attr.ib()
182 baserevisionsize = attr.ib()
182 baserevisionsize = attr.ib()
183 revision = attr.ib()
183 revision = attr.ib()
184 delta = attr.ib()
184 delta = attr.ib()
185 sidedata = attr.ib()
185 sidedata = attr.ib()
186 protocol_flags = attr.ib()
186 protocol_flags = attr.ib()
187 linknode = attr.ib(default=None)
187 linknode = attr.ib(default=None)
188
188
189
189
190 @interfaceutil.implementer(repository.iverifyproblem)
190 @interfaceutil.implementer(repository.iverifyproblem)
191 @attr.s(frozen=True)
191 @attr.s(frozen=True)
192 class revlogproblem:
192 class revlogproblem:
193 warning = attr.ib(default=None)
193 warning = attr.ib(default=None)
194 error = attr.ib(default=None)
194 error = attr.ib(default=None)
195 node = attr.ib(default=None)
195 node = attr.ib(default=None)
196
196
197
197
198 def parse_index_v1(data, inline):
198 def parse_index_v1(data, inline):
199 # call the C implementation to parse the index data
199 # call the C implementation to parse the index data
200 index, cache = parsers.parse_index2(data, inline)
200 index, cache = parsers.parse_index2(data, inline)
201 return index, cache
201 return index, cache
202
202
203
203
204 def parse_index_v2(data, inline):
204 def parse_index_v2(data, inline):
205 # call the C implementation to parse the index data
205 # call the C implementation to parse the index data
206 index, cache = parsers.parse_index2(data, inline, format=REVLOGV2)
206 index, cache = parsers.parse_index2(data, inline, format=REVLOGV2)
207 return index, cache
207 return index, cache
208
208
209
209
210 def parse_index_cl_v2(data, inline):
210 def parse_index_cl_v2(data, inline):
211 # call the C implementation to parse the index data
211 # call the C implementation to parse the index data
212 index, cache = parsers.parse_index2(data, inline, format=CHANGELOGV2)
212 index, cache = parsers.parse_index2(data, inline, format=CHANGELOGV2)
213 return index, cache
213 return index, cache
214
214
215
215
216 if hasattr(parsers, 'parse_index_devel_nodemap'):
216 if hasattr(parsers, 'parse_index_devel_nodemap'):
217
217
218 def parse_index_v1_nodemap(data, inline):
218 def parse_index_v1_nodemap(data, inline):
219 index, cache = parsers.parse_index_devel_nodemap(data, inline)
219 index, cache = parsers.parse_index_devel_nodemap(data, inline)
220 return index, cache
220 return index, cache
221
221
222
222
223 else:
223 else:
224 parse_index_v1_nodemap = None
224 parse_index_v1_nodemap = None
225
225
226
226
227 def parse_index_v1_mixed(data, inline):
227 def parse_index_v1_mixed(data, inline):
228 index, cache = parse_index_v1(data, inline)
228 index, cache = parse_index_v1(data, inline)
229 return rustrevlog.MixedIndex(index), cache
229 return rustrevlog.MixedIndex(index), cache
230
230
231
231
232 # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte
232 # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte
233 # signed integer)
233 # signed integer)
234 _maxentrysize = 0x7FFFFFFF
234 _maxentrysize = 0x7FFFFFFF
235
235
236 FILE_TOO_SHORT_MSG = _(
236 FILE_TOO_SHORT_MSG = _(
237 b'cannot read from revlog %s;'
237 b'cannot read from revlog %s;'
238 b' expected %d bytes from offset %d, data size is %d'
238 b' expected %d bytes from offset %d, data size is %d'
239 )
239 )
240
240
241 hexdigits = b'0123456789abcdefABCDEF'
241 hexdigits = b'0123456789abcdefABCDEF'
242
242
243
243
244 class _Config:
244 class _Config:
245 def copy(self):
245 def copy(self):
246 return self.__class__(**self.__dict__)
246 return self.__class__(**self.__dict__)
247
247
248
248
249 @attr.s()
249 @attr.s()
250 class FeatureConfig(_Config):
250 class FeatureConfig(_Config):
251 """Hold configuration values about the available revlog features"""
251 """Hold configuration values about the available revlog features"""
252
252
253 # the default compression engine
253 # the default compression engine
254 compression_engine = attr.ib(default=b'zlib')
254 compression_engine = attr.ib(default=b'zlib')
255 # compression engines options
255 # compression engines options
256 compression_engine_options = attr.ib(default=attr.Factory(dict))
256 compression_engine_options = attr.ib(default=attr.Factory(dict))
257
257
258 # can we use censor on this revlog
258 # can we use censor on this revlog
259 censorable = attr.ib(default=False)
259 censorable = attr.ib(default=False)
260 # does this revlog use the "side data" feature
260 # does this revlog use the "side data" feature
261 has_side_data = attr.ib(default=False)
261 has_side_data = attr.ib(default=False)
262 # might remove rank configuration once the computation has no impact
262 # might remove rank configuration once the computation has no impact
263 compute_rank = attr.ib(default=False)
263 compute_rank = attr.ib(default=False)
264 # parent order is supposed to be semantically irrelevant, so we
264 # parent order is supposed to be semantically irrelevant, so we
265 # normally resort parents to ensure that the first parent is non-null,
265 # normally resort parents to ensure that the first parent is non-null,
266 # if there is a non-null parent at all.
266 # if there is a non-null parent at all.
267 # filelog abuses the parent order as flag to mark some instances of
267 # filelog abuses the parent order as flag to mark some instances of
268 # meta-encoded files, so allow it to disable this behavior.
268 # meta-encoded files, so allow it to disable this behavior.
269 canonical_parent_order = attr.ib(default=False)
269 canonical_parent_order = attr.ib(default=False)
270 # can ellipsis commit be used
270 # can ellipsis commit be used
271 enable_ellipsis = attr.ib(default=False)
271 enable_ellipsis = attr.ib(default=False)
272
272
273 def copy(self):
273 def copy(self):
274 new = super().copy()
274 new = super().copy()
275 new.compression_engine_options = self.compression_engine_options.copy()
275 new.compression_engine_options = self.compression_engine_options.copy()
276 return new
276 return new
277
277
278
278
279 @attr.s()
279 @attr.s()
280 class DataConfig(_Config):
280 class DataConfig(_Config):
281 """Hold configuration value about how the revlog data are read"""
281 """Hold configuration value about how the revlog data are read"""
282
282
283 # should we try to open the "pending" version of the revlog
283 # should we try to open the "pending" version of the revlog
284 try_pending = attr.ib(default=False)
284 try_pending = attr.ib(default=False)
285 # should we try to open the "splitted" version of the revlog
285 # should we try to open the "splitted" version of the revlog
286 try_split = attr.ib(default=False)
286 try_split = attr.ib(default=False)
287 # When True, indexfile should be opened with checkambig=True at writing,
287 # When True, indexfile should be opened with checkambig=True at writing,
288 # to avoid file stat ambiguity.
288 # to avoid file stat ambiguity.
289 check_ambig = attr.ib(default=False)
289 check_ambig = attr.ib(default=False)
290
290
291 # If true, use mmap instead of reading to deal with large index
291 # If true, use mmap instead of reading to deal with large index
292 mmap_large_index = attr.ib(default=False)
292 mmap_large_index = attr.ib(default=False)
293 # how much data is large
293 # how much data is large
294 mmap_index_threshold = attr.ib(default=None)
294 mmap_index_threshold = attr.ib(default=None)
295 # How much data to read and cache into the raw revlog data cache.
295 # How much data to read and cache into the raw revlog data cache.
296 chunk_cache_size = attr.ib(default=65536)
296 chunk_cache_size = attr.ib(default=65536)
297
297
298 # The size of the uncompressed cache compared to the largest revision seen.
298 # The size of the uncompressed cache compared to the largest revision seen.
299 uncompressed_cache_factor = attr.ib(default=None)
299 uncompressed_cache_factor = attr.ib(default=None)
300
300
301 # The number of chunk cached
301 # The number of chunk cached
302 uncompressed_cache_count = attr.ib(default=None)
302 uncompressed_cache_count = attr.ib(default=None)
303
303
304 # Allow sparse reading of the revlog data
304 # Allow sparse reading of the revlog data
305 with_sparse_read = attr.ib(default=False)
305 with_sparse_read = attr.ib(default=False)
306 # minimal density of a sparse read chunk
306 # minimal density of a sparse read chunk
307 sr_density_threshold = attr.ib(default=0.50)
307 sr_density_threshold = attr.ib(default=0.50)
308 # minimal size of data we skip when performing sparse read
308 # minimal size of data we skip when performing sparse read
309 sr_min_gap_size = attr.ib(default=262144)
309 sr_min_gap_size = attr.ib(default=262144)
310
310
311 # are delta encoded against arbitrary bases.
311 # are delta encoded against arbitrary bases.
312 generaldelta = attr.ib(default=False)
312 generaldelta = attr.ib(default=False)
313
313
314
314
315 @attr.s()
315 @attr.s()
316 class DeltaConfig(_Config):
316 class DeltaConfig(_Config):
317 """Hold configuration value about how new delta are computed
317 """Hold configuration value about how new delta are computed
318
318
319 Some attributes are duplicated from DataConfig to help havign each object
319 Some attributes are duplicated from DataConfig to help havign each object
320 self contained.
320 self contained.
321 """
321 """
322
322
323 # can delta be encoded against arbitrary bases.
323 # can delta be encoded against arbitrary bases.
324 general_delta = attr.ib(default=False)
324 general_delta = attr.ib(default=False)
325 # Allow sparse writing of the revlog data
325 # Allow sparse writing of the revlog data
326 sparse_revlog = attr.ib(default=False)
326 sparse_revlog = attr.ib(default=False)
327 # maximum length of a delta chain
327 # maximum length of a delta chain
328 max_chain_len = attr.ib(default=None)
328 max_chain_len = attr.ib(default=None)
329 # Maximum distance between delta chain base start and end
329 # Maximum distance between delta chain base start and end
330 max_deltachain_span = attr.ib(default=-1)
330 max_deltachain_span = attr.ib(default=-1)
331 # If `upper_bound_comp` is not None, this is the expected maximal gain from
331 # If `upper_bound_comp` is not None, this is the expected maximal gain from
332 # compression for the data content.
332 # compression for the data content.
333 upper_bound_comp = attr.ib(default=None)
333 upper_bound_comp = attr.ib(default=None)
334 # Should we try a delta against both parent
334 # Should we try a delta against both parent
335 delta_both_parents = attr.ib(default=True)
335 delta_both_parents = attr.ib(default=True)
336 # Test delta base candidate group by chunk of this maximal size.
336 # Test delta base candidate group by chunk of this maximal size.
337 candidate_group_chunk_size = attr.ib(default=0)
337 candidate_group_chunk_size = attr.ib(default=0)
338 # Should we display debug information about delta computation
338 # Should we display debug information about delta computation
339 debug_delta = attr.ib(default=False)
339 debug_delta = attr.ib(default=False)
340 # trust incoming delta by default
340 # trust incoming delta by default
341 lazy_delta = attr.ib(default=True)
341 lazy_delta = attr.ib(default=True)
342 # trust the base of incoming delta by default
342 # trust the base of incoming delta by default
343 lazy_delta_base = attr.ib(default=False)
343 lazy_delta_base = attr.ib(default=False)
344
344
345
345
346 class _InnerRevlog:
346 class _InnerRevlog:
347 """An inner layer of the revlog object
347 """An inner layer of the revlog object
348
348
349 That layer exist to be able to delegate some operation to Rust, its
349 That layer exist to be able to delegate some operation to Rust, its
350 boundaries are arbitrary and based on what we can delegate to Rust.
350 boundaries are arbitrary and based on what we can delegate to Rust.
351 """
351 """
352
352
353 def __init__(
353 def __init__(
354 self,
354 self,
355 opener,
355 opener,
356 index,
356 index,
357 index_file,
357 index_file,
358 data_file,
358 data_file,
359 sidedata_file,
359 sidedata_file,
360 inline,
360 inline,
361 data_config,
361 data_config,
362 delta_config,
362 delta_config,
363 feature_config,
363 feature_config,
364 chunk_cache,
364 chunk_cache,
365 default_compression_header,
365 default_compression_header,
366 ):
366 ):
367 self.opener = opener
367 self.opener = opener
368 self.index = index
368 self.index = index
369
369
370 self.__index_file = index_file
370 self.__index_file = index_file
371 self.data_file = data_file
371 self.data_file = data_file
372 self.sidedata_file = sidedata_file
372 self.sidedata_file = sidedata_file
373 self.inline = inline
373 self.inline = inline
374 self.data_config = data_config
374 self.data_config = data_config
375 self.delta_config = delta_config
375 self.delta_config = delta_config
376 self.feature_config = feature_config
376 self.feature_config = feature_config
377
377
378 # used during diverted write.
378 # used during diverted write.
379 self._orig_index_file = None
379 self._orig_index_file = None
380
380
381 self._default_compression_header = default_compression_header
381 self._default_compression_header = default_compression_header
382
382
383 # index
383 # index
384
384
385 # 3-tuple of file handles being used for active writing.
385 # 3-tuple of file handles being used for active writing.
386 self._writinghandles = None
386 self._writinghandles = None
387
387
388 self._segmentfile = randomaccessfile.randomaccessfile(
388 self._segmentfile = randomaccessfile.randomaccessfile(
389 self.opener,
389 self.opener,
390 (self.index_file if self.inline else self.data_file),
390 (self.index_file if self.inline else self.data_file),
391 self.data_config.chunk_cache_size,
391 self.data_config.chunk_cache_size,
392 chunk_cache,
392 chunk_cache,
393 )
393 )
394 self._segmentfile_sidedata = randomaccessfile.randomaccessfile(
394 self._segmentfile_sidedata = randomaccessfile.randomaccessfile(
395 self.opener,
395 self.opener,
396 self.sidedata_file,
396 self.sidedata_file,
397 self.data_config.chunk_cache_size,
397 self.data_config.chunk_cache_size,
398 )
398 )
399
399
400 # revlog header -> revlog compressor
400 # revlog header -> revlog compressor
401 self._decompressors = {}
401 self._decompressors = {}
402 # 3-tuple of (node, rev, text) for a raw revision.
402 # 3-tuple of (node, rev, text) for a raw revision.
403 self._revisioncache = None
403 self._revisioncache = None
404
404
405 # cache some uncompressed chunks
405 # cache some uncompressed chunks
406 # rev → uncompressed_chunk
406 # rev → uncompressed_chunk
407 #
407 #
408 # the max cost is dynamically updated to be proportionnal to the
408 # the max cost is dynamically updated to be proportionnal to the
409 # size of revision we actually encounter.
409 # size of revision we actually encounter.
410 self._uncompressed_chunk_cache = None
410 self._uncompressed_chunk_cache = None
411 if self.data_config.uncompressed_cache_factor is not None:
411 if self.data_config.uncompressed_cache_factor is not None:
412 self._uncompressed_chunk_cache = util.lrucachedict(
412 self._uncompressed_chunk_cache = util.lrucachedict(
413 self.data_config.uncompressed_cache_count,
413 self.data_config.uncompressed_cache_count,
414 maxcost=65536, # some arbitrary initial value
414 maxcost=65536, # some arbitrary initial value
415 )
415 )
416
416
417 self._delay_buffer = None
417 self._delay_buffer = None
418
418
419 @property
419 @property
420 def index_file(self):
420 def index_file(self):
421 return self.__index_file
421 return self.__index_file
422
422
423 @index_file.setter
423 @index_file.setter
424 def index_file(self, new_index_file):
424 def index_file(self, new_index_file):
425 self.__index_file = new_index_file
425 self.__index_file = new_index_file
426 if self.inline:
426 if self.inline:
427 self._segmentfile.filename = new_index_file
427 self._segmentfile.filename = new_index_file
428
428
429 def __len__(self):
429 def __len__(self):
430 return len(self.index)
430 return len(self.index)
431
431
432 def clear_cache(self):
432 def clear_cache(self):
433 assert not self.is_delaying
433 assert not self.is_delaying
434 self._revisioncache = None
434 self._revisioncache = None
435 if self._uncompressed_chunk_cache is not None:
435 if self._uncompressed_chunk_cache is not None:
436 self._uncompressed_chunk_cache.clear()
436 self._uncompressed_chunk_cache.clear()
437 self._segmentfile.clear_cache()
437 self._segmentfile.clear_cache()
438 self._segmentfile_sidedata.clear_cache()
438 self._segmentfile_sidedata.clear_cache()
439
439
440 @property
440 @property
441 def canonical_index_file(self):
441 def canonical_index_file(self):
442 if self._orig_index_file is not None:
442 if self._orig_index_file is not None:
443 return self._orig_index_file
443 return self._orig_index_file
444 return self.index_file
444 return self.index_file
445
445
446 @property
446 @property
447 def is_delaying(self):
447 def is_delaying(self):
448 """is the revlog is currently delaying the visibility of written data?
448 """is the revlog is currently delaying the visibility of written data?
449
449
450 The delaying mechanism can be either in-memory or written on disk in a
450 The delaying mechanism can be either in-memory or written on disk in a
451 side-file."""
451 side-file."""
452 return (self._delay_buffer is not None) or (
452 return (self._delay_buffer is not None) or (
453 self._orig_index_file is not None
453 self._orig_index_file is not None
454 )
454 )
455
455
456 # Derived from index values.
456 # Derived from index values.
457
457
458 def start(self, rev):
458 def start(self, rev):
459 """the offset of the data chunk for this revision"""
459 """the offset of the data chunk for this revision"""
460 return int(self.index[rev][0] >> 16)
460 return int(self.index[rev][0] >> 16)
461
461
462 def length(self, rev):
462 def length(self, rev):
463 """the length of the data chunk for this revision"""
463 """the length of the data chunk for this revision"""
464 return self.index[rev][1]
464 return self.index[rev][1]
465
465
466 def end(self, rev):
466 def end(self, rev):
467 """the end of the data chunk for this revision"""
467 """the end of the data chunk for this revision"""
468 return self.start(rev) + self.length(rev)
468 return self.start(rev) + self.length(rev)
469
469
470 def deltaparent(self, rev):
470 def deltaparent(self, rev):
471 """return deltaparent of the given revision"""
471 """return deltaparent of the given revision"""
472 base = self.index[rev][3]
472 base = self.index[rev][3]
473 if base == rev:
473 if base == rev:
474 return nullrev
474 return nullrev
475 elif self.delta_config.general_delta:
475 elif self.delta_config.general_delta:
476 return base
476 return base
477 else:
477 else:
478 return rev - 1
478 return rev - 1
479
479
480 def issnapshot(self, rev):
480 def issnapshot(self, rev):
481 """tells whether rev is a snapshot"""
481 """tells whether rev is a snapshot"""
482 if not self.delta_config.sparse_revlog:
482 if not self.delta_config.sparse_revlog:
483 return self.deltaparent(rev) == nullrev
483 return self.deltaparent(rev) == nullrev
484 elif hasattr(self.index, 'issnapshot'):
484 elif hasattr(self.index, 'issnapshot'):
485 # directly assign the method to cache the testing and access
485 # directly assign the method to cache the testing and access
486 self.issnapshot = self.index.issnapshot
486 self.issnapshot = self.index.issnapshot
487 return self.issnapshot(rev)
487 return self.issnapshot(rev)
488 if rev == nullrev:
488 if rev == nullrev:
489 return True
489 return True
490 entry = self.index[rev]
490 entry = self.index[rev]
491 base = entry[3]
491 base = entry[3]
492 if base == rev:
492 if base == rev:
493 return True
493 return True
494 if base == nullrev:
494 if base == nullrev:
495 return True
495 return True
496 p1 = entry[5]
496 p1 = entry[5]
497 while self.length(p1) == 0:
497 while self.length(p1) == 0:
498 b = self.deltaparent(p1)
498 b = self.deltaparent(p1)
499 if b == p1:
499 if b == p1:
500 break
500 break
501 p1 = b
501 p1 = b
502 p2 = entry[6]
502 p2 = entry[6]
503 while self.length(p2) == 0:
503 while self.length(p2) == 0:
504 b = self.deltaparent(p2)
504 b = self.deltaparent(p2)
505 if b == p2:
505 if b == p2:
506 break
506 break
507 p2 = b
507 p2 = b
508 if base == p1 or base == p2:
508 if base == p1 or base == p2:
509 return False
509 return False
510 return self.issnapshot(base)
510 return self.issnapshot(base)
511
511
512 def _deltachain(self, rev, stoprev=None):
512 def _deltachain(self, rev, stoprev=None):
513 """Obtain the delta chain for a revision.
513 """Obtain the delta chain for a revision.
514
514
515 ``stoprev`` specifies a revision to stop at. If not specified, we
515 ``stoprev`` specifies a revision to stop at. If not specified, we
516 stop at the base of the chain.
516 stop at the base of the chain.
517
517
518 Returns a 2-tuple of (chain, stopped) where ``chain`` is a list of
518 Returns a 2-tuple of (chain, stopped) where ``chain`` is a list of
519 revs in ascending order and ``stopped`` is a bool indicating whether
519 revs in ascending order and ``stopped`` is a bool indicating whether
520 ``stoprev`` was hit.
520 ``stoprev`` was hit.
521 """
521 """
522 generaldelta = self.delta_config.general_delta
522 generaldelta = self.delta_config.general_delta
523 # Try C implementation.
523 # Try C implementation.
524 try:
524 try:
525 return self.index.deltachain(rev, stoprev, generaldelta)
525 return self.index.deltachain(rev, stoprev, generaldelta)
526 except AttributeError:
526 except AttributeError:
527 pass
527 pass
528
528
529 chain = []
529 chain = []
530
530
531 # Alias to prevent attribute lookup in tight loop.
531 # Alias to prevent attribute lookup in tight loop.
532 index = self.index
532 index = self.index
533
533
534 iterrev = rev
534 iterrev = rev
535 e = index[iterrev]
535 e = index[iterrev]
536 while iterrev != e[3] and iterrev != stoprev:
536 while iterrev != e[3] and iterrev != stoprev:
537 chain.append(iterrev)
537 chain.append(iterrev)
538 if generaldelta:
538 if generaldelta:
539 iterrev = e[3]
539 iterrev = e[3]
540 else:
540 else:
541 iterrev -= 1
541 iterrev -= 1
542 e = index[iterrev]
542 e = index[iterrev]
543
543
544 if iterrev == stoprev:
544 if iterrev == stoprev:
545 stopped = True
545 stopped = True
546 else:
546 else:
547 chain.append(iterrev)
547 chain.append(iterrev)
548 stopped = False
548 stopped = False
549
549
550 chain.reverse()
550 chain.reverse()
551 return chain, stopped
551 return chain, stopped
552
552
553 @util.propertycache
553 @util.propertycache
554 def _compressor(self):
554 def _compressor(self):
555 engine = util.compengines[self.feature_config.compression_engine]
555 engine = util.compengines[self.feature_config.compression_engine]
556 return engine.revlogcompressor(
556 return engine.revlogcompressor(
557 self.feature_config.compression_engine_options
557 self.feature_config.compression_engine_options
558 )
558 )
559
559
560 @util.propertycache
560 @util.propertycache
561 def _decompressor(self):
561 def _decompressor(self):
562 """the default decompressor"""
562 """the default decompressor"""
563 if self._default_compression_header is None:
563 if self._default_compression_header is None:
564 return None
564 return None
565 t = self._default_compression_header
565 t = self._default_compression_header
566 c = self._get_decompressor(t)
566 c = self._get_decompressor(t)
567 return c.decompress
567 return c.decompress
568
568
569 def _get_decompressor(self, t):
569 def _get_decompressor(self, t):
570 try:
570 try:
571 compressor = self._decompressors[t]
571 compressor = self._decompressors[t]
572 except KeyError:
572 except KeyError:
573 try:
573 try:
574 engine = util.compengines.forrevlogheader(t)
574 engine = util.compengines.forrevlogheader(t)
575 compressor = engine.revlogcompressor(
575 compressor = engine.revlogcompressor(
576 self.feature_config.compression_engine_options
576 self.feature_config.compression_engine_options
577 )
577 )
578 self._decompressors[t] = compressor
578 self._decompressors[t] = compressor
579 except KeyError:
579 except KeyError:
580 raise error.RevlogError(
580 raise error.RevlogError(
581 _(b'unknown compression type %s') % binascii.hexlify(t)
581 _(b'unknown compression type %s') % binascii.hexlify(t)
582 )
582 )
583 return compressor
583 return compressor
584
584
585 def compress(self, data):
585 def compress(self, data):
586 """Generate a possibly-compressed representation of data."""
586 """Generate a possibly-compressed representation of data."""
587 if not data:
587 if not data:
588 return b'', data
588 return b'', data
589
589
590 compressed = self._compressor.compress(data)
590 compressed = self._compressor.compress(data)
591
591
592 if compressed:
592 if compressed:
593 # The revlog compressor added the header in the returned data.
593 # The revlog compressor added the header in the returned data.
594 return b'', compressed
594 return b'', compressed
595
595
596 if data[0:1] == b'\0':
596 if data[0:1] == b'\0':
597 return b'', data
597 return b'', data
598 return b'u', data
598 return b'u', data
599
599
600 def decompress(self, data):
600 def decompress(self, data):
601 """Decompress a revlog chunk.
601 """Decompress a revlog chunk.
602
602
603 The chunk is expected to begin with a header identifying the
603 The chunk is expected to begin with a header identifying the
604 format type so it can be routed to an appropriate decompressor.
604 format type so it can be routed to an appropriate decompressor.
605 """
605 """
606 if not data:
606 if not data:
607 return data
607 return data
608
608
609 # Revlogs are read much more frequently than they are written and many
609 # Revlogs are read much more frequently than they are written and many
610 # chunks only take microseconds to decompress, so performance is
610 # chunks only take microseconds to decompress, so performance is
611 # important here.
611 # important here.
612 #
612 #
613 # We can make a few assumptions about revlogs:
613 # We can make a few assumptions about revlogs:
614 #
614 #
615 # 1) the majority of chunks will be compressed (as opposed to inline
615 # 1) the majority of chunks will be compressed (as opposed to inline
616 # raw data).
616 # raw data).
617 # 2) decompressing *any* data will likely by at least 10x slower than
617 # 2) decompressing *any* data will likely by at least 10x slower than
618 # returning raw inline data.
618 # returning raw inline data.
619 # 3) we want to prioritize common and officially supported compression
619 # 3) we want to prioritize common and officially supported compression
620 # engines
620 # engines
621 #
621 #
622 # It follows that we want to optimize for "decompress compressed data
622 # It follows that we want to optimize for "decompress compressed data
623 # when encoded with common and officially supported compression engines"
623 # when encoded with common and officially supported compression engines"
624 # case over "raw data" and "data encoded by less common or non-official
624 # case over "raw data" and "data encoded by less common or non-official
625 # compression engines." That is why we have the inline lookup first
625 # compression engines." That is why we have the inline lookup first
626 # followed by the compengines lookup.
626 # followed by the compengines lookup.
627 #
627 #
628 # According to `hg perfrevlogchunks`, this is ~0.5% faster for zlib
628 # According to `hg perfrevlogchunks`, this is ~0.5% faster for zlib
629 # compressed chunks. And this matters for changelog and manifest reads.
629 # compressed chunks. And this matters for changelog and manifest reads.
630 t = data[0:1]
630 t = data[0:1]
631
631
632 if t == b'x':
632 if t == b'x':
633 try:
633 try:
634 return _zlibdecompress(data)
634 return _zlibdecompress(data)
635 except zlib.error as e:
635 except zlib.error as e:
636 raise error.RevlogError(
636 raise error.RevlogError(
637 _(b'revlog decompress error: %s')
637 _(b'revlog decompress error: %s')
638 % stringutil.forcebytestr(e)
638 % stringutil.forcebytestr(e)
639 )
639 )
640 # '\0' is more common than 'u' so it goes first.
640 # '\0' is more common than 'u' so it goes first.
641 elif t == b'\0':
641 elif t == b'\0':
642 return data
642 return data
643 elif t == b'u':
643 elif t == b'u':
644 return util.buffer(data, 1)
644 return util.buffer(data, 1)
645
645
646 compressor = self._get_decompressor(t)
646 compressor = self._get_decompressor(t)
647
647
648 return compressor.decompress(data)
648 return compressor.decompress(data)
649
649
650 @contextlib.contextmanager
650 @contextlib.contextmanager
651 def reading(self):
651 def reading(self):
652 """Context manager that keeps data and sidedata files open for reading"""
652 """Context manager that keeps data and sidedata files open for reading"""
653 if len(self.index) == 0:
653 if len(self.index) == 0:
654 yield # nothing to be read
654 yield # nothing to be read
655 else:
655 else:
656 with self._segmentfile.reading():
656 with self._segmentfile.reading():
657 with self._segmentfile_sidedata.reading():
657 with self._segmentfile_sidedata.reading():
658 yield
658 yield
659
659
660 @property
660 @property
661 def is_writing(self):
661 def is_writing(self):
662 """True is a writing context is open"""
662 """True is a writing context is open"""
663 return self._writinghandles is not None
663 return self._writinghandles is not None
664
664
665 @property
665 @property
666 def is_open(self):
666 def is_open(self):
667 """True if any file handle is being held
667 """True if any file handle is being held
668
668
669 Used for assert and debug in the python code"""
669 Used for assert and debug in the python code"""
670 return self._segmentfile.is_open or self._segmentfile_sidedata.is_open
670 return self._segmentfile.is_open or self._segmentfile_sidedata.is_open
671
671
672 @contextlib.contextmanager
672 @contextlib.contextmanager
673 def writing(self, transaction, data_end=None, sidedata_end=None):
673 def writing(self, transaction, data_end=None, sidedata_end=None):
674 """Open the revlog files for writing
674 """Open the revlog files for writing
675
675
676 Add content to a revlog should be done within such context.
676 Add content to a revlog should be done within such context.
677 """
677 """
678 if self.is_writing:
678 if self.is_writing:
679 yield
679 yield
680 else:
680 else:
681 ifh = dfh = sdfh = None
681 ifh = dfh = sdfh = None
682 try:
682 try:
683 r = len(self.index)
683 r = len(self.index)
684 # opening the data file.
684 # opening the data file.
685 dsize = 0
685 dsize = 0
686 if r:
686 if r:
687 dsize = self.end(r - 1)
687 dsize = self.end(r - 1)
688 dfh = None
688 dfh = None
689 if not self.inline:
689 if not self.inline:
690 try:
690 try:
691 dfh = self.opener(self.data_file, mode=b"r+")
691 dfh = self.opener(self.data_file, mode=b"r+")
692 if data_end is None:
692 if data_end is None:
693 dfh.seek(0, os.SEEK_END)
693 dfh.seek(0, os.SEEK_END)
694 else:
694 else:
695 dfh.seek(data_end, os.SEEK_SET)
695 dfh.seek(data_end, os.SEEK_SET)
696 except FileNotFoundError:
696 except FileNotFoundError:
697 dfh = self.opener(self.data_file, mode=b"w+")
697 dfh = self.opener(self.data_file, mode=b"w+")
698 transaction.add(self.data_file, dsize)
698 transaction.add(self.data_file, dsize)
699 if self.sidedata_file is not None:
699 if self.sidedata_file is not None:
700 assert sidedata_end is not None
700 assert sidedata_end is not None
701 # revlog-v2 does not inline, help Pytype
701 # revlog-v2 does not inline, help Pytype
702 assert dfh is not None
702 assert dfh is not None
703 try:
703 try:
704 sdfh = self.opener(self.sidedata_file, mode=b"r+")
704 sdfh = self.opener(self.sidedata_file, mode=b"r+")
705 dfh.seek(sidedata_end, os.SEEK_SET)
705 dfh.seek(sidedata_end, os.SEEK_SET)
706 except FileNotFoundError:
706 except FileNotFoundError:
707 sdfh = self.opener(self.sidedata_file, mode=b"w+")
707 sdfh = self.opener(self.sidedata_file, mode=b"w+")
708 transaction.add(self.sidedata_file, sidedata_end)
708 transaction.add(self.sidedata_file, sidedata_end)
709
709
710 # opening the index file.
710 # opening the index file.
711 isize = r * self.index.entry_size
711 isize = r * self.index.entry_size
712 ifh = self.__index_write_fp()
712 ifh = self.__index_write_fp()
713 if self.inline:
713 if self.inline:
714 transaction.add(self.index_file, dsize + isize)
714 transaction.add(self.index_file, dsize + isize)
715 else:
715 else:
716 transaction.add(self.index_file, isize)
716 transaction.add(self.index_file, isize)
717 # exposing all file handle for writing.
717 # exposing all file handle for writing.
718 self._writinghandles = (ifh, dfh, sdfh)
718 self._writinghandles = (ifh, dfh, sdfh)
719 self._segmentfile.writing_handle = ifh if self.inline else dfh
719 self._segmentfile.writing_handle = ifh if self.inline else dfh
720 self._segmentfile_sidedata.writing_handle = sdfh
720 self._segmentfile_sidedata.writing_handle = sdfh
721 yield
721 yield
722 finally:
722 finally:
723 self._writinghandles = None
723 self._writinghandles = None
724 self._segmentfile.writing_handle = None
724 self._segmentfile.writing_handle = None
725 self._segmentfile_sidedata.writing_handle = None
725 self._segmentfile_sidedata.writing_handle = None
726 if dfh is not None:
726 if dfh is not None:
727 dfh.close()
727 dfh.close()
728 if sdfh is not None:
728 if sdfh is not None:
729 sdfh.close()
729 sdfh.close()
730 # closing the index file last to avoid exposing referent to
730 # closing the index file last to avoid exposing referent to
731 # potential unflushed data content.
731 # potential unflushed data content.
732 if ifh is not None:
732 if ifh is not None:
733 ifh.close()
733 ifh.close()
734
734
735 def __index_write_fp(self, index_end=None):
735 def __index_write_fp(self, index_end=None):
736 """internal method to open the index file for writing
736 """internal method to open the index file for writing
737
737
738 You should not use this directly and use `_writing` instead
738 You should not use this directly and use `_writing` instead
739 """
739 """
740 try:
740 try:
741 if self._delay_buffer is None:
741 if self._delay_buffer is None:
742 f = self.opener(
742 f = self.opener(
743 self.index_file,
743 self.index_file,
744 mode=b"r+",
744 mode=b"r+",
745 checkambig=self.data_config.check_ambig,
745 checkambig=self.data_config.check_ambig,
746 )
746 )
747 else:
747 else:
748 # check_ambig affect we way we open file for writing, however
748 # check_ambig affect we way we open file for writing, however
749 # here, we do not actually open a file for writting as write
749 # here, we do not actually open a file for writting as write
750 # will appened to a delay_buffer. So check_ambig is not
750 # will appened to a delay_buffer. So check_ambig is not
751 # meaningful and unneeded here.
751 # meaningful and unneeded here.
752 f = randomaccessfile.appender(
752 f = randomaccessfile.appender(
753 self.opener, self.index_file, b"r+", self._delay_buffer
753 self.opener, self.index_file, b"r+", self._delay_buffer
754 )
754 )
755 if index_end is None:
755 if index_end is None:
756 f.seek(0, os.SEEK_END)
756 f.seek(0, os.SEEK_END)
757 else:
757 else:
758 f.seek(index_end, os.SEEK_SET)
758 f.seek(index_end, os.SEEK_SET)
759 return f
759 return f
760 except FileNotFoundError:
760 except FileNotFoundError:
761 if self._delay_buffer is None:
761 if self._delay_buffer is None:
762 return self.opener(
762 return self.opener(
763 self.index_file,
763 self.index_file,
764 mode=b"w+",
764 mode=b"w+",
765 checkambig=self.data_config.check_ambig,
765 checkambig=self.data_config.check_ambig,
766 )
766 )
767 else:
767 else:
768 return randomaccessfile.appender(
768 return randomaccessfile.appender(
769 self.opener, self.index_file, b"w+", self._delay_buffer
769 self.opener, self.index_file, b"w+", self._delay_buffer
770 )
770 )
771
771
772 def __index_new_fp(self):
772 def __index_new_fp(self):
773 """internal method to create a new index file for writing
773 """internal method to create a new index file for writing
774
774
775 You should not use this unless you are upgrading from inline revlog
775 You should not use this unless you are upgrading from inline revlog
776 """
776 """
777 return self.opener(
777 return self.opener(
778 self.index_file,
778 self.index_file,
779 mode=b"w",
779 mode=b"w",
780 checkambig=self.data_config.check_ambig,
780 checkambig=self.data_config.check_ambig,
781 atomictemp=True,
781 atomictemp=True,
782 )
782 )
783
783
784 def split_inline(self, tr, header, new_index_file_path=None):
784 def split_inline(self, tr, header, new_index_file_path=None):
785 """split the data of an inline revlog into an index and a data file"""
785 """split the data of an inline revlog into an index and a data file"""
786 assert self._delay_buffer is None
786 assert self._delay_buffer is None
787 existing_handles = False
787 existing_handles = False
788 if self._writinghandles is not None:
788 if self._writinghandles is not None:
789 existing_handles = True
789 existing_handles = True
790 fp = self._writinghandles[0]
790 fp = self._writinghandles[0]
791 fp.flush()
791 fp.flush()
792 fp.close()
792 fp.close()
793 # We can't use the cached file handle after close(). So prevent
793 # We can't use the cached file handle after close(). So prevent
794 # its usage.
794 # its usage.
795 self._writinghandles = None
795 self._writinghandles = None
796 self._segmentfile.writing_handle = None
796 self._segmentfile.writing_handle = None
797 # No need to deal with sidedata writing handle as it is only
797 # No need to deal with sidedata writing handle as it is only
798 # relevant with revlog-v2 which is never inline, not reaching
798 # relevant with revlog-v2 which is never inline, not reaching
799 # this code
799 # this code
800
800
801 new_dfh = self.opener(self.data_file, mode=b"w+")
801 new_dfh = self.opener(self.data_file, mode=b"w+")
802 new_dfh.truncate(0) # drop any potentially existing data
802 new_dfh.truncate(0) # drop any potentially existing data
803 try:
803 try:
804 with self.reading():
804 with self.reading():
805 for r in range(len(self.index)):
805 for r in range(len(self.index)):
806 new_dfh.write(self.get_segment_for_revs(r, r)[1])
806 new_dfh.write(self.get_segment_for_revs(r, r)[1])
807 new_dfh.flush()
807 new_dfh.flush()
808
808
809 if new_index_file_path is not None:
809 if new_index_file_path is not None:
810 self.index_file = new_index_file_path
810 self.index_file = new_index_file_path
811 with self.__index_new_fp() as fp:
811 with self.__index_new_fp() as fp:
812 self.inline = False
812 self.inline = False
813 for i in range(len(self.index)):
813 for i in range(len(self.index)):
814 e = self.index.entry_binary(i)
814 e = self.index.entry_binary(i)
815 if i == 0:
815 if i == 0:
816 packed_header = self.index.pack_header(header)
816 packed_header = self.index.pack_header(header)
817 e = packed_header + e
817 e = packed_header + e
818 fp.write(e)
818 fp.write(e)
819
819
820 # If we don't use side-write, the temp file replace the real
820 # If we don't use side-write, the temp file replace the real
821 # index when we exit the context manager
821 # index when we exit the context manager
822
822
823 self._segmentfile = randomaccessfile.randomaccessfile(
823 self._segmentfile = randomaccessfile.randomaccessfile(
824 self.opener,
824 self.opener,
825 self.data_file,
825 self.data_file,
826 self.data_config.chunk_cache_size,
826 self.data_config.chunk_cache_size,
827 )
827 )
828
828
829 if existing_handles:
829 if existing_handles:
830 # switched from inline to conventional reopen the index
830 # switched from inline to conventional reopen the index
831 ifh = self.__index_write_fp()
831 ifh = self.__index_write_fp()
832 self._writinghandles = (ifh, new_dfh, None)
832 self._writinghandles = (ifh, new_dfh, None)
833 self._segmentfile.writing_handle = new_dfh
833 self._segmentfile.writing_handle = new_dfh
834 new_dfh = None
834 new_dfh = None
835 # No need to deal with sidedata writing handle as it is only
835 # No need to deal with sidedata writing handle as it is only
836 # relevant with revlog-v2 which is never inline, not reaching
836 # relevant with revlog-v2 which is never inline, not reaching
837 # this code
837 # this code
838 finally:
838 finally:
839 if new_dfh is not None:
839 if new_dfh is not None:
840 new_dfh.close()
840 new_dfh.close()
841 return self.index_file
841 return self.index_file
842
842
843 def get_segment_for_revs(self, startrev, endrev):
843 def get_segment_for_revs(self, startrev, endrev):
844 """Obtain a segment of raw data corresponding to a range of revisions.
844 """Obtain a segment of raw data corresponding to a range of revisions.
845
845
846 Accepts the start and end revisions and an optional already-open
846 Accepts the start and end revisions and an optional already-open
847 file handle to be used for reading. If the file handle is read, its
847 file handle to be used for reading. If the file handle is read, its
848 seek position will not be preserved.
848 seek position will not be preserved.
849
849
850 Requests for data may be satisfied by a cache.
850 Requests for data may be satisfied by a cache.
851
851
852 Returns a 2-tuple of (offset, data) for the requested range of
852 Returns a 2-tuple of (offset, data) for the requested range of
853 revisions. Offset is the integer offset from the beginning of the
853 revisions. Offset is the integer offset from the beginning of the
854 revlog and data is a str or buffer of the raw byte data.
854 revlog and data is a str or buffer of the raw byte data.
855
855
856 Callers will need to call ``self.start(rev)`` and ``self.length(rev)``
856 Callers will need to call ``self.start(rev)`` and ``self.length(rev)``
857 to determine where each revision's data begins and ends.
857 to determine where each revision's data begins and ends.
858
858
859 API: we should consider making this a private part of the InnerRevlog
859 API: we should consider making this a private part of the InnerRevlog
860 at some point.
860 at some point.
861 """
861 """
862 # Inlined self.start(startrev) & self.end(endrev) for perf reasons
862 # Inlined self.start(startrev) & self.end(endrev) for perf reasons
863 # (functions are expensive).
863 # (functions are expensive).
864 index = self.index
864 index = self.index
865 istart = index[startrev]
865 istart = index[startrev]
866 start = int(istart[0] >> 16)
866 start = int(istart[0] >> 16)
867 if startrev == endrev:
867 if startrev == endrev:
868 end = start + istart[1]
868 end = start + istart[1]
869 else:
869 else:
870 iend = index[endrev]
870 iend = index[endrev]
871 end = int(iend[0] >> 16) + iend[1]
871 end = int(iend[0] >> 16) + iend[1]
872
872
873 if self.inline:
873 if self.inline:
874 start += (startrev + 1) * self.index.entry_size
874 start += (startrev + 1) * self.index.entry_size
875 end += (endrev + 1) * self.index.entry_size
875 end += (endrev + 1) * self.index.entry_size
876 length = end - start
876 length = end - start
877
877
878 return start, self._segmentfile.read_chunk(start, length)
878 return start, self._segmentfile.read_chunk(start, length)
879
879
880 def _chunk(self, rev):
880 def _chunk(self, rev):
881 """Obtain a single decompressed chunk for a revision.
881 """Obtain a single decompressed chunk for a revision.
882
882
883 Accepts an integer revision and an optional already-open file handle
883 Accepts an integer revision and an optional already-open file handle
884 to be used for reading. If used, the seek position of the file will not
884 to be used for reading. If used, the seek position of the file will not
885 be preserved.
885 be preserved.
886
886
887 Returns a str holding uncompressed data for the requested revision.
887 Returns a str holding uncompressed data for the requested revision.
888 """
888 """
889 if self._uncompressed_chunk_cache is not None:
889 if self._uncompressed_chunk_cache is not None:
890 uncomp = self._uncompressed_chunk_cache.get(rev)
890 uncomp = self._uncompressed_chunk_cache.get(rev)
891 if uncomp is not None:
891 if uncomp is not None:
892 return uncomp
892 return uncomp
893
893
894 compression_mode = self.index[rev][10]
894 compression_mode = self.index[rev][10]
895 data = self.get_segment_for_revs(rev, rev)[1]
895 data = self.get_segment_for_revs(rev, rev)[1]
896 if compression_mode == COMP_MODE_PLAIN:
896 if compression_mode == COMP_MODE_PLAIN:
897 uncomp = data
897 uncomp = data
898 elif compression_mode == COMP_MODE_DEFAULT:
898 elif compression_mode == COMP_MODE_DEFAULT:
899 uncomp = self._decompressor(data)
899 uncomp = self._decompressor(data)
900 elif compression_mode == COMP_MODE_INLINE:
900 elif compression_mode == COMP_MODE_INLINE:
901 uncomp = self.decompress(data)
901 uncomp = self.decompress(data)
902 else:
902 else:
903 msg = b'unknown compression mode %d'
903 msg = b'unknown compression mode %d'
904 msg %= compression_mode
904 msg %= compression_mode
905 raise error.RevlogError(msg)
905 raise error.RevlogError(msg)
906 if self._uncompressed_chunk_cache is not None:
906 if self._uncompressed_chunk_cache is not None:
907 self._uncompressed_chunk_cache.insert(rev, uncomp, cost=len(uncomp))
907 self._uncompressed_chunk_cache.insert(rev, uncomp, cost=len(uncomp))
908 return uncomp
908 return uncomp
909
909
910 def _chunks(self, revs, targetsize=None):
910 def _chunks(self, revs, targetsize=None):
911 """Obtain decompressed chunks for the specified revisions.
911 """Obtain decompressed chunks for the specified revisions.
912
912
913 Accepts an iterable of numeric revisions that are assumed to be in
913 Accepts an iterable of numeric revisions that are assumed to be in
914 ascending order. Also accepts an optional already-open file handle
914 ascending order. Also accepts an optional already-open file handle
915 to be used for reading. If used, the seek position of the file will
915 to be used for reading. If used, the seek position of the file will
916 not be preserved.
916 not be preserved.
917
917
918 This function is similar to calling ``self._chunk()`` multiple times,
918 This function is similar to calling ``self._chunk()`` multiple times,
919 but is faster.
919 but is faster.
920
920
921 Returns a list with decompressed data for each requested revision.
921 Returns a list with decompressed data for each requested revision.
922 """
922 """
923 if not revs:
923 if not revs:
924 return []
924 return []
925 start = self.start
925 start = self.start
926 length = self.length
926 length = self.length
927 inline = self.inline
927 inline = self.inline
928 iosize = self.index.entry_size
928 iosize = self.index.entry_size
929 buffer = util.buffer
929 buffer = util.buffer
930
930
931 fetched_revs = []
931 fetched_revs = []
932 fadd = fetched_revs.append
932 fadd = fetched_revs.append
933
933
934 chunks = []
934 chunks = []
935 ladd = chunks.append
935 ladd = chunks.append
936
936
937 if self._uncompressed_chunk_cache is None:
937 if self._uncompressed_chunk_cache is None:
938 fetched_revs = revs
938 fetched_revs = revs
939 else:
939 else:
940 for rev in revs:
940 for rev in revs:
941 cached_value = self._uncompressed_chunk_cache.get(rev)
941 cached_value = self._uncompressed_chunk_cache.get(rev)
942 if cached_value is None:
942 if cached_value is None:
943 fadd(rev)
943 fadd(rev)
944 else:
944 else:
945 ladd((rev, cached_value))
945 ladd((rev, cached_value))
946
946
947 if not fetched_revs:
947 if not fetched_revs:
948 slicedchunks = ()
948 slicedchunks = ()
949 elif not self.data_config.with_sparse_read:
949 elif not self.data_config.with_sparse_read:
950 slicedchunks = (fetched_revs,)
950 slicedchunks = (fetched_revs,)
951 else:
951 else:
952 slicedchunks = deltautil.slicechunk(
952 slicedchunks = deltautil.slicechunk(
953 self,
953 self,
954 fetched_revs,
954 fetched_revs,
955 targetsize=targetsize,
955 targetsize=targetsize,
956 )
956 )
957
957
958 for revschunk in slicedchunks:
958 for revschunk in slicedchunks:
959 firstrev = revschunk[0]
959 firstrev = revschunk[0]
960 # Skip trailing revisions with empty diff
960 # Skip trailing revisions with empty diff
961 for lastrev in revschunk[::-1]:
961 for lastrev in revschunk[::-1]:
962 if length(lastrev) != 0:
962 if length(lastrev) != 0:
963 break
963 break
964
964
965 try:
965 try:
966 offset, data = self.get_segment_for_revs(firstrev, lastrev)
966 offset, data = self.get_segment_for_revs(firstrev, lastrev)
967 except OverflowError:
967 except OverflowError:
968 # issue4215 - we can't cache a run of chunks greater than
968 # issue4215 - we can't cache a run of chunks greater than
969 # 2G on Windows
969 # 2G on Windows
970 for rev in revschunk:
970 for rev in revschunk:
971 ladd((rev, self._chunk(rev)))
971 ladd((rev, self._chunk(rev)))
972
972
973 decomp = self.decompress
973 decomp = self.decompress
974 # self._decompressor might be None, but will not be used in that case
974 # self._decompressor might be None, but will not be used in that case
975 def_decomp = self._decompressor
975 def_decomp = self._decompressor
976 for rev in revschunk:
976 for rev in revschunk:
977 chunkstart = start(rev)
977 chunkstart = start(rev)
978 if inline:
978 if inline:
979 chunkstart += (rev + 1) * iosize
979 chunkstart += (rev + 1) * iosize
980 chunklength = length(rev)
980 chunklength = length(rev)
981 comp_mode = self.index[rev][10]
981 comp_mode = self.index[rev][10]
982 c = buffer(data, chunkstart - offset, chunklength)
982 c = buffer(data, chunkstart - offset, chunklength)
983 if comp_mode == COMP_MODE_PLAIN:
983 if comp_mode == COMP_MODE_PLAIN:
984 c = c
984 c = c
985 elif comp_mode == COMP_MODE_INLINE:
985 elif comp_mode == COMP_MODE_INLINE:
986 c = decomp(c)
986 c = decomp(c)
987 elif comp_mode == COMP_MODE_DEFAULT:
987 elif comp_mode == COMP_MODE_DEFAULT:
988 c = def_decomp(c)
988 c = def_decomp(c)
989 else:
989 else:
990 msg = b'unknown compression mode %d'
990 msg = b'unknown compression mode %d'
991 msg %= comp_mode
991 msg %= comp_mode
992 raise error.RevlogError(msg)
992 raise error.RevlogError(msg)
993 ladd((rev, c))
993 ladd((rev, c))
994 if self._uncompressed_chunk_cache is not None:
994 if self._uncompressed_chunk_cache is not None:
995 self._uncompressed_chunk_cache.insert(rev, c, len(c))
995 self._uncompressed_chunk_cache.insert(rev, c, len(c))
996
996
997 chunks.sort()
997 chunks.sort()
998 return [x[1] for x in chunks]
998 return [x[1] for x in chunks]
999
999
1000 def raw_text(self, node, rev):
1000 def raw_text(self, node, rev):
1001 """return the possibly unvalidated rawtext for a revision
1001 """return the possibly unvalidated rawtext for a revision
1002
1002
1003 returns (rev, rawtext, validated)
1003 returns (rev, rawtext, validated)
1004 """
1004 """
1005
1005
1006 # revision in the cache (could be useful to apply delta)
1006 # revision in the cache (could be useful to apply delta)
1007 cachedrev = None
1007 cachedrev = None
1008 # An intermediate text to apply deltas to
1008 # An intermediate text to apply deltas to
1009 basetext = None
1009 basetext = None
1010
1010
1011 # Check if we have the entry in cache
1011 # Check if we have the entry in cache
1012 # The cache entry looks like (node, rev, rawtext)
1012 # The cache entry looks like (node, rev, rawtext)
1013 if self._revisioncache:
1013 if self._revisioncache:
1014 cachedrev = self._revisioncache[1]
1014 cachedrev = self._revisioncache[1]
1015
1015
1016 chain, stopped = self._deltachain(rev, stoprev=cachedrev)
1016 chain, stopped = self._deltachain(rev, stoprev=cachedrev)
1017 if stopped:
1017 if stopped:
1018 basetext = self._revisioncache[2]
1018 basetext = self._revisioncache[2]
1019
1019
1020 # drop cache to save memory, the caller is expected to
1020 # drop cache to save memory, the caller is expected to
1021 # update self._inner._revisioncache after validating the text
1021 # update self._inner._revisioncache after validating the text
1022 self._revisioncache = None
1022 self._revisioncache = None
1023
1023
1024 targetsize = None
1024 targetsize = None
1025 rawsize = self.index[rev][2]
1025 rawsize = self.index[rev][2]
1026 if 0 <= rawsize:
1026 if 0 <= rawsize:
1027 targetsize = 4 * rawsize
1027 targetsize = 4 * rawsize
1028
1028
1029 if self._uncompressed_chunk_cache is not None:
1029 if self._uncompressed_chunk_cache is not None:
1030 # dynamically update the uncompressed_chunk_cache size to the
1030 # dynamically update the uncompressed_chunk_cache size to the
1031 # largest revision we saw in this revlog.
1031 # largest revision we saw in this revlog.
1032 factor = self.data_config.uncompressed_cache_factor
1032 factor = self.data_config.uncompressed_cache_factor
1033 candidate_size = rawsize * factor
1033 candidate_size = rawsize * factor
1034 if candidate_size > self._uncompressed_chunk_cache.maxcost:
1034 if candidate_size > self._uncompressed_chunk_cache.maxcost:
1035 self._uncompressed_chunk_cache.maxcost = candidate_size
1035 self._uncompressed_chunk_cache.maxcost = candidate_size
1036
1036
1037 bins = self._chunks(chain, targetsize=targetsize)
1037 bins = self._chunks(chain, targetsize=targetsize)
1038 if basetext is None:
1038 if basetext is None:
1039 basetext = bytes(bins[0])
1039 basetext = bytes(bins[0])
1040 bins = bins[1:]
1040 bins = bins[1:]
1041
1041
1042 rawtext = mdiff.patches(basetext, bins)
1042 rawtext = mdiff.patches(basetext, bins)
1043 del basetext # let us have a chance to free memory early
1043 del basetext # let us have a chance to free memory early
1044 return (rev, rawtext, False)
1044 return (rev, rawtext, False)
1045
1045
1046 def sidedata(self, rev, sidedata_end):
1046 def sidedata(self, rev, sidedata_end):
1047 """Return the sidedata for a given revision number."""
1047 """Return the sidedata for a given revision number."""
1048 index_entry = self.index[rev]
1048 index_entry = self.index[rev]
1049 sidedata_offset = index_entry[8]
1049 sidedata_offset = index_entry[8]
1050 sidedata_size = index_entry[9]
1050 sidedata_size = index_entry[9]
1051
1051
1052 if self.inline:
1052 if self.inline:
1053 sidedata_offset += self.index.entry_size * (1 + rev)
1053 sidedata_offset += self.index.entry_size * (1 + rev)
1054 if sidedata_size == 0:
1054 if sidedata_size == 0:
1055 return {}
1055 return {}
1056
1056
1057 if sidedata_end < sidedata_offset + sidedata_size:
1057 if sidedata_end < sidedata_offset + sidedata_size:
1058 filename = self.sidedata_file
1058 filename = self.sidedata_file
1059 end = sidedata_end
1059 end = sidedata_end
1060 offset = sidedata_offset
1060 offset = sidedata_offset
1061 length = sidedata_size
1061 length = sidedata_size
1062 m = FILE_TOO_SHORT_MSG % (filename, length, offset, end)
1062 m = FILE_TOO_SHORT_MSG % (filename, length, offset, end)
1063 raise error.RevlogError(m)
1063 raise error.RevlogError(m)
1064
1064
1065 comp_segment = self._segmentfile_sidedata.read_chunk(
1065 comp_segment = self._segmentfile_sidedata.read_chunk(
1066 sidedata_offset, sidedata_size
1066 sidedata_offset, sidedata_size
1067 )
1067 )
1068
1068
1069 comp = self.index[rev][11]
1069 comp = self.index[rev][11]
1070 if comp == COMP_MODE_PLAIN:
1070 if comp == COMP_MODE_PLAIN:
1071 segment = comp_segment
1071 segment = comp_segment
1072 elif comp == COMP_MODE_DEFAULT:
1072 elif comp == COMP_MODE_DEFAULT:
1073 segment = self._decompressor(comp_segment)
1073 segment = self._decompressor(comp_segment)
1074 elif comp == COMP_MODE_INLINE:
1074 elif comp == COMP_MODE_INLINE:
1075 segment = self.decompress(comp_segment)
1075 segment = self.decompress(comp_segment)
1076 else:
1076 else:
1077 msg = b'unknown compression mode %d'
1077 msg = b'unknown compression mode %d'
1078 msg %= comp
1078 msg %= comp
1079 raise error.RevlogError(msg)
1079 raise error.RevlogError(msg)
1080
1080
1081 sidedata = sidedatautil.deserialize_sidedata(segment)
1081 sidedata = sidedatautil.deserialize_sidedata(segment)
1082 return sidedata
1082 return sidedata
1083
1083
1084 def write_entry(
1084 def write_entry(
1085 self,
1085 self,
1086 transaction,
1086 transaction,
1087 entry,
1087 entry,
1088 data,
1088 data,
1089 link,
1089 link,
1090 offset,
1090 offset,
1091 sidedata,
1091 sidedata,
1092 sidedata_offset,
1092 sidedata_offset,
1093 index_end,
1093 index_end,
1094 data_end,
1094 data_end,
1095 sidedata_end,
1095 sidedata_end,
1096 ):
1096 ):
1097 # Files opened in a+ mode have inconsistent behavior on various
1097 # Files opened in a+ mode have inconsistent behavior on various
1098 # platforms. Windows requires that a file positioning call be made
1098 # platforms. Windows requires that a file positioning call be made
1099 # when the file handle transitions between reads and writes. See
1099 # when the file handle transitions between reads and writes. See
1100 # 3686fa2b8eee and the mixedfilemodewrapper in windows.py. On other
1100 # 3686fa2b8eee and the mixedfilemodewrapper in windows.py. On other
1101 # platforms, Python or the platform itself can be buggy. Some versions
1101 # platforms, Python or the platform itself can be buggy. Some versions
1102 # of Solaris have been observed to not append at the end of the file
1102 # of Solaris have been observed to not append at the end of the file
1103 # if the file was seeked to before the end. See issue4943 for more.
1103 # if the file was seeked to before the end. See issue4943 for more.
1104 #
1104 #
1105 # We work around this issue by inserting a seek() before writing.
1105 # We work around this issue by inserting a seek() before writing.
1106 # Note: This is likely not necessary on Python 3. However, because
1106 # Note: This is likely not necessary on Python 3. However, because
1107 # the file handle is reused for reads and may be seeked there, we need
1107 # the file handle is reused for reads and may be seeked there, we need
1108 # to be careful before changing this.
1108 # to be careful before changing this.
1109 if self._writinghandles is None:
1109 if self._writinghandles is None:
1110 msg = b'adding revision outside `revlog._writing` context'
1110 msg = b'adding revision outside `revlog._writing` context'
1111 raise error.ProgrammingError(msg)
1111 raise error.ProgrammingError(msg)
1112 ifh, dfh, sdfh = self._writinghandles
1112 ifh, dfh, sdfh = self._writinghandles
1113 if index_end is None:
1113 if index_end is None:
1114 ifh.seek(0, os.SEEK_END)
1114 ifh.seek(0, os.SEEK_END)
1115 else:
1115 else:
1116 ifh.seek(index_end, os.SEEK_SET)
1116 ifh.seek(index_end, os.SEEK_SET)
1117 if dfh:
1117 if dfh:
1118 if data_end is None:
1118 if data_end is None:
1119 dfh.seek(0, os.SEEK_END)
1119 dfh.seek(0, os.SEEK_END)
1120 else:
1120 else:
1121 dfh.seek(data_end, os.SEEK_SET)
1121 dfh.seek(data_end, os.SEEK_SET)
1122 if sdfh:
1122 if sdfh:
1123 sdfh.seek(sidedata_end, os.SEEK_SET)
1123 sdfh.seek(sidedata_end, os.SEEK_SET)
1124
1124
1125 curr = len(self.index) - 1
1125 curr = len(self.index) - 1
1126 if not self.inline:
1126 if not self.inline:
1127 transaction.add(self.data_file, offset)
1127 transaction.add(self.data_file, offset)
1128 if self.sidedata_file:
1128 if self.sidedata_file:
1129 transaction.add(self.sidedata_file, sidedata_offset)
1129 transaction.add(self.sidedata_file, sidedata_offset)
1130 transaction.add(self.canonical_index_file, curr * len(entry))
1130 transaction.add(self.canonical_index_file, curr * len(entry))
1131 if data[0]:
1131 if data[0]:
1132 dfh.write(data[0])
1132 dfh.write(data[0])
1133 dfh.write(data[1])
1133 dfh.write(data[1])
1134 if sidedata:
1134 if sidedata:
1135 sdfh.write(sidedata)
1135 sdfh.write(sidedata)
1136 if self._delay_buffer is None:
1136 if self._delay_buffer is None:
1137 ifh.write(entry)
1137 ifh.write(entry)
1138 else:
1138 else:
1139 self._delay_buffer.append(entry)
1139 self._delay_buffer.append(entry)
1140 else:
1140 else:
1141 offset += curr * self.index.entry_size
1141 offset += curr * self.index.entry_size
1142 transaction.add(self.canonical_index_file, offset)
1142 transaction.add(self.canonical_index_file, offset)
1143 assert not sidedata
1143 assert not sidedata
1144 if self._delay_buffer is None:
1144 if self._delay_buffer is None:
1145 ifh.write(entry)
1145 ifh.write(entry)
1146 ifh.write(data[0])
1146 ifh.write(data[0])
1147 ifh.write(data[1])
1147 ifh.write(data[1])
1148 else:
1148 else:
1149 self._delay_buffer.append(entry)
1149 self._delay_buffer.append(entry)
1150 self._delay_buffer.append(data[0])
1150 self._delay_buffer.append(data[0])
1151 self._delay_buffer.append(data[1])
1151 self._delay_buffer.append(data[1])
1152 return (
1152 return (
1153 ifh.tell(),
1153 ifh.tell(),
1154 dfh.tell() if dfh else None,
1154 dfh.tell() if dfh else None,
1155 sdfh.tell() if sdfh else None,
1155 sdfh.tell() if sdfh else None,
1156 )
1156 )
1157
1157
1158 def _divert_index(self):
1158 def _divert_index(self):
1159 return self.index_file + b'.a'
1159 return self.index_file + b'.a'
1160
1160
1161 def delay(self):
1161 def delay(self):
1162 assert not self.is_open
1162 assert not self.is_open
1163 if self._delay_buffer is not None or self._orig_index_file is not None:
1163 if self._delay_buffer is not None or self._orig_index_file is not None:
1164 # delay or divert already in place
1164 # delay or divert already in place
1165 return None
1165 return None
1166 elif len(self.index) == 0:
1166 elif len(self.index) == 0:
1167 self._orig_index_file = self.index_file
1167 self._orig_index_file = self.index_file
1168 self.index_file = self._divert_index()
1168 self.index_file = self._divert_index()
1169 assert self._orig_index_file is not None
1169 assert self._orig_index_file is not None
1170 assert self.index_file is not None
1170 assert self.index_file is not None
1171 if self.opener.exists(self.index_file):
1171 if self.opener.exists(self.index_file):
1172 self.opener.unlink(self.index_file)
1172 self.opener.unlink(self.index_file)
1173 return self.index_file
1173 return self.index_file
1174 else:
1174 else:
1175 self._delay_buffer = []
1175 self._delay_buffer = []
1176 if self.inline:
1176 if self.inline:
1177 self._segmentfile._delay_buffer = self._delay_buffer
1177 self._segmentfile._delay_buffer = self._delay_buffer
1178 return None
1178 return None
1179
1179
1180 def write_pending(self):
1180 def write_pending(self):
1181 assert not self.is_open
1181 assert not self.is_open
1182 if self._orig_index_file is not None:
1182 if self._orig_index_file is not None:
1183 return None, True
1183 return None, True
1184 any_pending = False
1184 any_pending = False
1185 pending_index_file = self._divert_index()
1185 pending_index_file = self._divert_index()
1186 if self.opener.exists(pending_index_file):
1186 if self.opener.exists(pending_index_file):
1187 self.opener.unlink(pending_index_file)
1187 self.opener.unlink(pending_index_file)
1188 util.copyfile(
1188 util.copyfile(
1189 self.opener.join(self.index_file),
1189 self.opener.join(self.index_file),
1190 self.opener.join(pending_index_file),
1190 self.opener.join(pending_index_file),
1191 )
1191 )
1192 if self._delay_buffer:
1192 if self._delay_buffer:
1193 with self.opener(pending_index_file, b'r+') as ifh:
1193 with self.opener(pending_index_file, b'r+') as ifh:
1194 ifh.seek(0, os.SEEK_END)
1194 ifh.seek(0, os.SEEK_END)
1195 ifh.write(b"".join(self._delay_buffer))
1195 ifh.write(b"".join(self._delay_buffer))
1196 any_pending = True
1196 any_pending = True
1197 self._delay_buffer = None
1197 self._delay_buffer = None
1198 if self.inline:
1198 if self.inline:
1199 self._segmentfile._delay_buffer = self._delay_buffer
1199 self._segmentfile._delay_buffer = self._delay_buffer
1200 else:
1200 else:
1201 assert self._segmentfile._delay_buffer is None
1201 assert self._segmentfile._delay_buffer is None
1202 self._orig_index_file = self.index_file
1202 self._orig_index_file = self.index_file
1203 self.index_file = pending_index_file
1203 self.index_file = pending_index_file
1204 return self.index_file, any_pending
1204 return self.index_file, any_pending
1205
1205
1206 def finalize_pending(self):
1206 def finalize_pending(self):
1207 assert not self.is_open
1207 assert not self.is_open
1208
1208
1209 delay = self._delay_buffer is not None
1209 delay = self._delay_buffer is not None
1210 divert = self._orig_index_file is not None
1210 divert = self._orig_index_file is not None
1211
1211
1212 if delay and divert:
1212 if delay and divert:
1213 assert False, "unreachable"
1213 assert False, "unreachable"
1214 elif delay:
1214 elif delay:
1215 if self._delay_buffer:
1215 if self._delay_buffer:
1216 with self.opener(self.index_file, b'r+') as ifh:
1216 with self.opener(self.index_file, b'r+') as ifh:
1217 ifh.seek(0, os.SEEK_END)
1217 ifh.seek(0, os.SEEK_END)
1218 ifh.write(b"".join(self._delay_buffer))
1218 ifh.write(b"".join(self._delay_buffer))
1219 self._segmentfile._delay_buffer = self._delay_buffer = None
1219 self._segmentfile._delay_buffer = self._delay_buffer = None
1220 elif divert:
1220 elif divert:
1221 if self.opener.exists(self.index_file):
1221 if self.opener.exists(self.index_file):
1222 self.opener.rename(
1222 self.opener.rename(
1223 self.index_file,
1223 self.index_file,
1224 self._orig_index_file,
1224 self._orig_index_file,
1225 checkambig=True,
1225 checkambig=True,
1226 )
1226 )
1227 self.index_file = self._orig_index_file
1227 self.index_file = self._orig_index_file
1228 self._orig_index_file = None
1228 self._orig_index_file = None
1229 else:
1229 else:
1230 msg = b"not delay or divert found on this revlog"
1230 msg = b"not delay or divert found on this revlog"
1231 raise error.ProgrammingError(msg)
1231 raise error.ProgrammingError(msg)
1232 return self.canonical_index_file
1232 return self.canonical_index_file
1233
1233
1234
1234
1235 class revlog:
1235 class revlog:
1236 """
1236 """
1237 the underlying revision storage object
1237 the underlying revision storage object
1238
1238
1239 A revlog consists of two parts, an index and the revision data.
1239 A revlog consists of two parts, an index and the revision data.
1240
1240
1241 The index is a file with a fixed record size containing
1241 The index is a file with a fixed record size containing
1242 information on each revision, including its nodeid (hash), the
1242 information on each revision, including its nodeid (hash), the
1243 nodeids of its parents, the position and offset of its data within
1243 nodeids of its parents, the position and offset of its data within
1244 the data file, and the revision it's based on. Finally, each entry
1244 the data file, and the revision it's based on. Finally, each entry
1245 contains a linkrev entry that can serve as a pointer to external
1245 contains a linkrev entry that can serve as a pointer to external
1246 data.
1246 data.
1247
1247
1248 The revision data itself is a linear collection of data chunks.
1248 The revision data itself is a linear collection of data chunks.
1249 Each chunk represents a revision and is usually represented as a
1249 Each chunk represents a revision and is usually represented as a
1250 delta against the previous chunk. To bound lookup time, runs of
1250 delta against the previous chunk. To bound lookup time, runs of
1251 deltas are limited to about 2 times the length of the original
1251 deltas are limited to about 2 times the length of the original
1252 version data. This makes retrieval of a version proportional to
1252 version data. This makes retrieval of a version proportional to
1253 its size, or O(1) relative to the number of revisions.
1253 its size, or O(1) relative to the number of revisions.
1254
1254
1255 Both pieces of the revlog are written to in an append-only
1255 Both pieces of the revlog are written to in an append-only
1256 fashion, which means we never need to rewrite a file to insert or
1256 fashion, which means we never need to rewrite a file to insert or
1257 remove data, and can use some simple techniques to avoid the need
1257 remove data, and can use some simple techniques to avoid the need
1258 for locking while reading.
1258 for locking while reading.
1259
1259
1260 If checkambig, indexfile is opened with checkambig=True at
1260 If checkambig, indexfile is opened with checkambig=True at
1261 writing, to avoid file stat ambiguity.
1261 writing, to avoid file stat ambiguity.
1262
1262
1263 If mmaplargeindex is True, and an mmapindexthreshold is set, the
1263 If mmaplargeindex is True, and an mmapindexthreshold is set, the
1264 index will be mmapped rather than read if it is larger than the
1264 index will be mmapped rather than read if it is larger than the
1265 configured threshold.
1265 configured threshold.
1266
1266
1267 If censorable is True, the revlog can have censored revisions.
1267 If censorable is True, the revlog can have censored revisions.
1268
1268
1269 If `upperboundcomp` is not None, this is the expected maximal gain from
1269 If `upperboundcomp` is not None, this is the expected maximal gain from
1270 compression for the data content.
1270 compression for the data content.
1271
1271
1272 `concurrencychecker` is an optional function that receives 3 arguments: a
1272 `concurrencychecker` is an optional function that receives 3 arguments: a
1273 file handle, a filename, and an expected position. It should check whether
1273 file handle, a filename, and an expected position. It should check whether
1274 the current position in the file handle is valid, and log/warn/fail (by
1274 the current position in the file handle is valid, and log/warn/fail (by
1275 raising).
1275 raising).
1276
1276
1277 See mercurial/revlogutils/contants.py for details about the content of an
1277 See mercurial/revlogutils/contants.py for details about the content of an
1278 index entry.
1278 index entry.
1279 """
1279 """
1280
1280
1281 _flagserrorclass = error.RevlogError
1281 _flagserrorclass = error.RevlogError
1282
1282
1283 @staticmethod
1283 @staticmethod
1284 def is_inline_index(header_bytes):
1284 def is_inline_index(header_bytes):
1285 """Determine if a revlog is inline from the initial bytes of the index"""
1285 """Determine if a revlog is inline from the initial bytes of the index"""
1286 if len(header_bytes) == 0:
1286 if len(header_bytes) == 0:
1287 return True
1287 return True
1288
1288
1289 header = INDEX_HEADER.unpack(header_bytes)[0]
1289 header = INDEX_HEADER.unpack(header_bytes)[0]
1290
1290
1291 _format_flags = header & ~0xFFFF
1291 _format_flags = header & ~0xFFFF
1292 _format_version = header & 0xFFFF
1292 _format_version = header & 0xFFFF
1293
1293
1294 features = FEATURES_BY_VERSION[_format_version]
1294 features = FEATURES_BY_VERSION[_format_version]
1295 return features[b'inline'](_format_flags)
1295 return features[b'inline'](_format_flags)
1296
1296
1297 def __init__(
1297 def __init__(
1298 self,
1298 self,
1299 opener,
1299 opener,
1300 target,
1300 target,
1301 radix,
1301 radix,
1302 postfix=None, # only exist for `tmpcensored` now
1302 postfix=None, # only exist for `tmpcensored` now
1303 checkambig=False,
1303 checkambig=False,
1304 mmaplargeindex=False,
1304 mmaplargeindex=False,
1305 censorable=False,
1305 censorable=False,
1306 upperboundcomp=None,
1306 upperboundcomp=None,
1307 persistentnodemap=False,
1307 persistentnodemap=False,
1308 concurrencychecker=None,
1308 concurrencychecker=None,
1309 trypending=False,
1309 trypending=False,
1310 try_split=False,
1310 try_split=False,
1311 canonical_parent_order=True,
1311 canonical_parent_order=True,
1312 data_config=None,
1312 data_config=None,
1313 delta_config=None,
1313 delta_config=None,
1314 feature_config=None,
1314 feature_config=None,
1315 may_inline=True, # may inline new revlog
1315 may_inline=True, # may inline new revlog
1316 ):
1316 ):
1317 """
1317 """
1318 create a revlog object
1318 create a revlog object
1319
1319
1320 opener is a function that abstracts the file opening operation
1320 opener is a function that abstracts the file opening operation
1321 and can be used to implement COW semantics or the like.
1321 and can be used to implement COW semantics or the like.
1322
1322
1323 `target`: a (KIND, ID) tuple that identify the content stored in
1323 `target`: a (KIND, ID) tuple that identify the content stored in
1324 this revlog. It help the rest of the code to understand what the revlog
1324 this revlog. It help the rest of the code to understand what the revlog
1325 is about without having to resort to heuristic and index filename
1325 is about without having to resort to heuristic and index filename
1326 analysis. Note: that this must be reliably be set by normal code, but
1326 analysis. Note: that this must be reliably be set by normal code, but
1327 that test, debug, or performance measurement code might not set this to
1327 that test, debug, or performance measurement code might not set this to
1328 accurate value.
1328 accurate value.
1329 """
1329 """
1330
1330
1331 self.radix = radix
1331 self.radix = radix
1332
1332
1333 self._docket_file = None
1333 self._docket_file = None
1334 self._indexfile = None
1334 self._indexfile = None
1335 self._datafile = None
1335 self._datafile = None
1336 self._sidedatafile = None
1336 self._sidedatafile = None
1337 self._nodemap_file = None
1337 self._nodemap_file = None
1338 self.postfix = postfix
1338 self.postfix = postfix
1339 self._trypending = trypending
1339 self._trypending = trypending
1340 self._try_split = try_split
1340 self._try_split = try_split
1341 self._may_inline = may_inline
1341 self._may_inline = may_inline
1342 self.opener = opener
1342 self.opener = opener
1343 if persistentnodemap:
1343 if persistentnodemap:
1344 self._nodemap_file = nodemaputil.get_nodemap_file(self)
1344 self._nodemap_file = nodemaputil.get_nodemap_file(self)
1345
1345
1346 assert target[0] in ALL_KINDS
1346 assert target[0] in ALL_KINDS
1347 assert len(target) == 2
1347 assert len(target) == 2
1348 self.target = target
1348 self.target = target
1349 if feature_config is not None:
1349 if feature_config is not None:
1350 self.feature_config = feature_config.copy()
1350 self.feature_config = feature_config.copy()
1351 elif b'feature-config' in self.opener.options:
1351 elif b'feature-config' in self.opener.options:
1352 self.feature_config = self.opener.options[b'feature-config'].copy()
1352 self.feature_config = self.opener.options[b'feature-config'].copy()
1353 else:
1353 else:
1354 self.feature_config = FeatureConfig()
1354 self.feature_config = FeatureConfig()
1355 self.feature_config.censorable = censorable
1355 self.feature_config.censorable = censorable
1356 self.feature_config.canonical_parent_order = canonical_parent_order
1356 self.feature_config.canonical_parent_order = canonical_parent_order
1357 if data_config is not None:
1357 if data_config is not None:
1358 self.data_config = data_config.copy()
1358 self.data_config = data_config.copy()
1359 elif b'data-config' in self.opener.options:
1359 elif b'data-config' in self.opener.options:
1360 self.data_config = self.opener.options[b'data-config'].copy()
1360 self.data_config = self.opener.options[b'data-config'].copy()
1361 else:
1361 else:
1362 self.data_config = DataConfig()
1362 self.data_config = DataConfig()
1363 self.data_config.check_ambig = checkambig
1363 self.data_config.check_ambig = checkambig
1364 self.data_config.mmap_large_index = mmaplargeindex
1364 self.data_config.mmap_large_index = mmaplargeindex
1365 if delta_config is not None:
1365 if delta_config is not None:
1366 self.delta_config = delta_config.copy()
1366 self.delta_config = delta_config.copy()
1367 elif b'delta-config' in self.opener.options:
1367 elif b'delta-config' in self.opener.options:
1368 self.delta_config = self.opener.options[b'delta-config'].copy()
1368 self.delta_config = self.opener.options[b'delta-config'].copy()
1369 else:
1369 else:
1370 self.delta_config = DeltaConfig()
1370 self.delta_config = DeltaConfig()
1371 self.delta_config.upper_bound_comp = upperboundcomp
1371 self.delta_config.upper_bound_comp = upperboundcomp
1372
1372
1373 # Maps rev to chain base rev.
1373 # Maps rev to chain base rev.
1374 self._chainbasecache = util.lrucachedict(100)
1374 self._chainbasecache = util.lrucachedict(100)
1375
1375
1376 self.index = None
1376 self.index = None
1377 self._docket = None
1377 self._docket = None
1378 self._nodemap_docket = None
1378 self._nodemap_docket = None
1379 # Mapping of partial identifiers to full nodes.
1379 # Mapping of partial identifiers to full nodes.
1380 self._pcache = {}
1380 self._pcache = {}
1381
1381
1382 # other optionnals features
1382 # other optionnals features
1383
1383
1384 # Make copy of flag processors so each revlog instance can support
1384 # Make copy of flag processors so each revlog instance can support
1385 # custom flags.
1385 # custom flags.
1386 self._flagprocessors = dict(flagutil.flagprocessors)
1386 self._flagprocessors = dict(flagutil.flagprocessors)
1387 # prevent nesting of addgroup
1387 # prevent nesting of addgroup
1388 self._adding_group = None
1388 self._adding_group = None
1389
1389
1390 chunk_cache = self._loadindex()
1390 chunk_cache = self._loadindex()
1391 self._load_inner(chunk_cache)
1391 self._load_inner(chunk_cache)
1392 self._concurrencychecker = concurrencychecker
1392 self._concurrencychecker = concurrencychecker
1393
1393
1394 @property
1395 def _generaldelta(self):
1396 """temporary compatibility proxy"""
1397 util.nouideprecwarn(
1398 b"use revlog.delta_config.general_delta", b"6.6", stacklevel=2
1399 )
1400 return self.delta_config.general_delta
1401
1402 @property
1403 def _checkambig(self):
1404 """temporary compatibility proxy"""
1405 util.nouideprecwarn(
1406 b"use revlog.data_config.checkambig", b"6.6", stacklevel=2
1407 )
1408 return self.data_config.check_ambig
1409
1410 @property
1411 def _mmaplargeindex(self):
1412 """temporary compatibility proxy"""
1413 util.nouideprecwarn(
1414 b"use revlog.data_config.mmap_large_index", b"6.6", stacklevel=2
1415 )
1416 return self.data_config.mmap_large_index
1417
1418 @property
1419 def _censorable(self):
1420 """temporary compatibility proxy"""
1421 util.nouideprecwarn(
1422 b"use revlog.feature_config.censorable", b"6.6", stacklevel=2
1423 )
1424 return self.feature_config.censorable
1425
1426 @property
1427 def _chunkcachesize(self):
1428 """temporary compatibility proxy"""
1429 util.nouideprecwarn(
1430 b"use revlog.data_config.chunk_cache_size", b"6.6", stacklevel=2
1431 )
1432 return self.data_config.chunk_cache_size
1433
1434 @property
1435 def _maxchainlen(self):
1436 """temporary compatibility proxy"""
1437 util.nouideprecwarn(
1438 b"use revlog.delta_config.max_chain_len", b"6.6", stacklevel=2
1439 )
1440 return self.delta_config.max_chain_len
1441
1442 @property
1443 def _deltabothparents(self):
1444 """temporary compatibility proxy"""
1445 util.nouideprecwarn(
1446 b"use revlog.delta_config.delta_both_parents", b"6.6", stacklevel=2
1447 )
1448 return self.delta_config.delta_both_parents
1449
1450 @property
1451 def _candidate_group_chunk_size(self):
1452 """temporary compatibility proxy"""
1453 util.nouideprecwarn(
1454 b"use revlog.delta_config.candidate_group_chunk_size",
1455 b"6.6",
1456 stacklevel=2,
1457 )
1458 return self.delta_config.candidate_group_chunk_size
1459
1460 @property
1461 def _debug_delta(self):
1462 """temporary compatibility proxy"""
1463 util.nouideprecwarn(
1464 b"use revlog.delta_config.debug_delta", b"6.6", stacklevel=2
1465 )
1466 return self.delta_config.debug_delta
1467
1468 @property
1469 def _compengine(self):
1470 """temporary compatibility proxy"""
1471 util.nouideprecwarn(
1472 b"use revlog.feature_config.compression_engine",
1473 b"6.6",
1474 stacklevel=2,
1475 )
1476 return self.feature_config.compression_engine
1477
1478 @property
1479 def upperboundcomp(self):
1480 """temporary compatibility proxy"""
1481 util.nouideprecwarn(
1482 b"use revlog.delta_config.upper_bound_comp",
1483 b"6.6",
1484 stacklevel=2,
1485 )
1486 return self.delta_config.upper_bound_comp
1487
1488 @property
1489 def _compengineopts(self):
1490 """temporary compatibility proxy"""
1491 util.nouideprecwarn(
1492 b"use revlog.feature_config.compression_engine_options",
1493 b"6.6",
1494 stacklevel=2,
1495 )
1496 return self.feature_config.compression_engine_options
1497
1498 @property
1499 def _maxdeltachainspan(self):
1500 """temporary compatibility proxy"""
1501 util.nouideprecwarn(
1502 b"use revlog.delta_config.max_deltachain_span", b"6.6", stacklevel=2
1503 )
1504 return self.delta_config.max_deltachain_span
1505
1506 @property
1507 def _withsparseread(self):
1508 """temporary compatibility proxy"""
1509 util.nouideprecwarn(
1510 b"use revlog.data_config.with_sparse_read", b"6.6", stacklevel=2
1511 )
1512 return self.data_config.with_sparse_read
1513
1514 @property
1515 def _sparserevlog(self):
1516 """temporary compatibility proxy"""
1517 util.nouideprecwarn(
1518 b"use revlog.delta_config.sparse_revlog", b"6.6", stacklevel=2
1519 )
1520 return self.delta_config.sparse_revlog
1521
1522 @property
1523 def hassidedata(self):
1524 """temporary compatibility proxy"""
1525 util.nouideprecwarn(
1526 b"use revlog.feature_config.has_side_data", b"6.6", stacklevel=2
1527 )
1528 return self.feature_config.has_side_data
1529
1530 @property
1531 def _srdensitythreshold(self):
1532 """temporary compatibility proxy"""
1533 util.nouideprecwarn(
1534 b"use revlog.data_config.sr_density_threshold",
1535 b"6.6",
1536 stacklevel=2,
1537 )
1538 return self.data_config.sr_density_threshold
1539
1540 @property
1541 def _srmingapsize(self):
1542 """temporary compatibility proxy"""
1543 util.nouideprecwarn(
1544 b"use revlog.data_config.sr_min_gap_size", b"6.6", stacklevel=2
1545 )
1546 return self.data_config.sr_min_gap_size
1547
1548 @property
1549 def _compute_rank(self):
1550 """temporary compatibility proxy"""
1551 util.nouideprecwarn(
1552 b"use revlog.feature_config.compute_rank", b"6.6", stacklevel=2
1553 )
1554 return self.feature_config.compute_rank
1555
1556 @property
1557 def canonical_parent_order(self):
1558 """temporary compatibility proxy"""
1559 util.nouideprecwarn(
1560 b"use revlog.feature_config.canonical_parent_order",
1561 b"6.6",
1562 stacklevel=2,
1563 )
1564 return self.feature_config.canonical_parent_order
1565
1566 @property
1567 def _lazydelta(self):
1568 """temporary compatibility proxy"""
1569 util.nouideprecwarn(
1570 b"use revlog.delta_config.lazy_delta", b"6.6", stacklevel=2
1571 )
1572 return self.delta_config.lazy_delta
1573
1574 @property
1575 def _lazydeltabase(self):
1576 """temporary compatibility proxy"""
1577 util.nouideprecwarn(
1578 b"use revlog.delta_config.lazy_delta_base", b"6.6", stacklevel=2
1579 )
1580 return self.delta_config.lazy_delta_base
1581
1582 def _init_opts(self):
1394 def _init_opts(self):
1583 """process options (from above/config) to setup associated default revlog mode
1395 """process options (from above/config) to setup associated default revlog mode
1584
1396
1585 These values might be affected when actually reading on disk information.
1397 These values might be affected when actually reading on disk information.
1586
1398
1587 The relevant values are returned for use in _loadindex().
1399 The relevant values are returned for use in _loadindex().
1588
1400
1589 * newversionflags:
1401 * newversionflags:
1590 version header to use if we need to create a new revlog
1402 version header to use if we need to create a new revlog
1591
1403
1592 * mmapindexthreshold:
1404 * mmapindexthreshold:
1593 minimal index size for start to use mmap
1405 minimal index size for start to use mmap
1594
1406
1595 * force_nodemap:
1407 * force_nodemap:
1596 force the usage of a "development" version of the nodemap code
1408 force the usage of a "development" version of the nodemap code
1597 """
1409 """
1598 opts = self.opener.options
1410 opts = self.opener.options
1599
1411
1600 if b'changelogv2' in opts and self.revlog_kind == KIND_CHANGELOG:
1412 if b'changelogv2' in opts and self.revlog_kind == KIND_CHANGELOG:
1601 new_header = CHANGELOGV2
1413 new_header = CHANGELOGV2
1602 compute_rank = opts.get(b'changelogv2.compute-rank', True)
1414 compute_rank = opts.get(b'changelogv2.compute-rank', True)
1603 self.feature_config.compute_rank = compute_rank
1415 self.feature_config.compute_rank = compute_rank
1604 elif b'revlogv2' in opts:
1416 elif b'revlogv2' in opts:
1605 new_header = REVLOGV2
1417 new_header = REVLOGV2
1606 elif b'revlogv1' in opts:
1418 elif b'revlogv1' in opts:
1607 new_header = REVLOGV1
1419 new_header = REVLOGV1
1608 if self._may_inline:
1420 if self._may_inline:
1609 new_header |= FLAG_INLINE_DATA
1421 new_header |= FLAG_INLINE_DATA
1610 if b'generaldelta' in opts:
1422 if b'generaldelta' in opts:
1611 new_header |= FLAG_GENERALDELTA
1423 new_header |= FLAG_GENERALDELTA
1612 elif b'revlogv0' in self.opener.options:
1424 elif b'revlogv0' in self.opener.options:
1613 new_header = REVLOGV0
1425 new_header = REVLOGV0
1614 else:
1426 else:
1615 new_header = REVLOG_DEFAULT_VERSION
1427 new_header = REVLOG_DEFAULT_VERSION
1616
1428
1617 mmapindexthreshold = None
1429 mmapindexthreshold = None
1618 if self.data_config.mmap_large_index:
1430 if self.data_config.mmap_large_index:
1619 mmapindexthreshold = self.data_config.mmap_index_threshold
1431 mmapindexthreshold = self.data_config.mmap_index_threshold
1620 if self.feature_config.enable_ellipsis:
1432 if self.feature_config.enable_ellipsis:
1621 self._flagprocessors[REVIDX_ELLIPSIS] = ellipsisprocessor
1433 self._flagprocessors[REVIDX_ELLIPSIS] = ellipsisprocessor
1622
1434
1623 # revlog v0 doesn't have flag processors
1435 # revlog v0 doesn't have flag processors
1624 for flag, processor in opts.get(b'flagprocessors', {}).items():
1436 for flag, processor in opts.get(b'flagprocessors', {}).items():
1625 flagutil.insertflagprocessor(flag, processor, self._flagprocessors)
1437 flagutil.insertflagprocessor(flag, processor, self._flagprocessors)
1626
1438
1627 chunk_cache_size = self.data_config.chunk_cache_size
1439 chunk_cache_size = self.data_config.chunk_cache_size
1628 if chunk_cache_size <= 0:
1440 if chunk_cache_size <= 0:
1629 raise error.RevlogError(
1441 raise error.RevlogError(
1630 _(b'revlog chunk cache size %r is not greater than 0')
1442 _(b'revlog chunk cache size %r is not greater than 0')
1631 % chunk_cache_size
1443 % chunk_cache_size
1632 )
1444 )
1633 elif chunk_cache_size & (chunk_cache_size - 1):
1445 elif chunk_cache_size & (chunk_cache_size - 1):
1634 raise error.RevlogError(
1446 raise error.RevlogError(
1635 _(b'revlog chunk cache size %r is not a power of 2')
1447 _(b'revlog chunk cache size %r is not a power of 2')
1636 % chunk_cache_size
1448 % chunk_cache_size
1637 )
1449 )
1638 force_nodemap = opts.get(b'devel-force-nodemap', False)
1450 force_nodemap = opts.get(b'devel-force-nodemap', False)
1639 return new_header, mmapindexthreshold, force_nodemap
1451 return new_header, mmapindexthreshold, force_nodemap
1640
1452
1641 def _get_data(self, filepath, mmap_threshold, size=None):
1453 def _get_data(self, filepath, mmap_threshold, size=None):
1642 """return a file content with or without mmap
1454 """return a file content with or without mmap
1643
1455
1644 If the file is missing return the empty string"""
1456 If the file is missing return the empty string"""
1645 try:
1457 try:
1646 with self.opener(filepath) as fp:
1458 with self.opener(filepath) as fp:
1647 if mmap_threshold is not None:
1459 if mmap_threshold is not None:
1648 file_size = self.opener.fstat(fp).st_size
1460 file_size = self.opener.fstat(fp).st_size
1649 if file_size >= mmap_threshold:
1461 if file_size >= mmap_threshold:
1650 if size is not None:
1462 if size is not None:
1651 # avoid potentiel mmap crash
1463 # avoid potentiel mmap crash
1652 size = min(file_size, size)
1464 size = min(file_size, size)
1653 # TODO: should .close() to release resources without
1465 # TODO: should .close() to release resources without
1654 # relying on Python GC
1466 # relying on Python GC
1655 if size is None:
1467 if size is None:
1656 return util.buffer(util.mmapread(fp))
1468 return util.buffer(util.mmapread(fp))
1657 else:
1469 else:
1658 return util.buffer(util.mmapread(fp, size))
1470 return util.buffer(util.mmapread(fp, size))
1659 if size is None:
1471 if size is None:
1660 return fp.read()
1472 return fp.read()
1661 else:
1473 else:
1662 return fp.read(size)
1474 return fp.read(size)
1663 except FileNotFoundError:
1475 except FileNotFoundError:
1664 return b''
1476 return b''
1665
1477
1666 def get_streams(self, max_linkrev, force_inline=False):
1478 def get_streams(self, max_linkrev, force_inline=False):
1667 """return a list of streams that represent this revlog
1479 """return a list of streams that represent this revlog
1668
1480
1669 This is used by stream-clone to do bytes to bytes copies of a repository.
1481 This is used by stream-clone to do bytes to bytes copies of a repository.
1670
1482
1671 This streams data for all revisions that refer to a changelog revision up
1483 This streams data for all revisions that refer to a changelog revision up
1672 to `max_linkrev`.
1484 to `max_linkrev`.
1673
1485
1674 If `force_inline` is set, it enforces that the stream will represent an inline revlog.
1486 If `force_inline` is set, it enforces that the stream will represent an inline revlog.
1675
1487
1676 It returns is a list of three-tuple:
1488 It returns is a list of three-tuple:
1677
1489
1678 [
1490 [
1679 (filename, bytes_stream, stream_size),
1491 (filename, bytes_stream, stream_size),
1680
1492
1681 ]
1493 ]
1682 """
1494 """
1683 n = len(self)
1495 n = len(self)
1684 index = self.index
1496 index = self.index
1685 while n > 0:
1497 while n > 0:
1686 linkrev = index[n - 1][4]
1498 linkrev = index[n - 1][4]
1687 if linkrev < max_linkrev:
1499 if linkrev < max_linkrev:
1688 break
1500 break
1689 # note: this loop will rarely go through multiple iterations, since
1501 # note: this loop will rarely go through multiple iterations, since
1690 # it only traverses commits created during the current streaming
1502 # it only traverses commits created during the current streaming
1691 # pull operation.
1503 # pull operation.
1692 #
1504 #
1693 # If this become a problem, using a binary search should cap the
1505 # If this become a problem, using a binary search should cap the
1694 # runtime of this.
1506 # runtime of this.
1695 n = n - 1
1507 n = n - 1
1696 if n == 0:
1508 if n == 0:
1697 # no data to send
1509 # no data to send
1698 return []
1510 return []
1699 index_size = n * index.entry_size
1511 index_size = n * index.entry_size
1700 data_size = self.end(n - 1)
1512 data_size = self.end(n - 1)
1701
1513
1702 # XXX we might have been split (or stripped) since the object
1514 # XXX we might have been split (or stripped) since the object
1703 # initialization, We need to close this race too, but having a way to
1515 # initialization, We need to close this race too, but having a way to
1704 # pre-open the file we feed to the revlog and never closing them before
1516 # pre-open the file we feed to the revlog and never closing them before
1705 # we are done streaming.
1517 # we are done streaming.
1706
1518
1707 if self._inline:
1519 if self._inline:
1708
1520
1709 def get_stream():
1521 def get_stream():
1710 with self.opener(self._indexfile, mode=b"r") as fp:
1522 with self.opener(self._indexfile, mode=b"r") as fp:
1711 yield None
1523 yield None
1712 size = index_size + data_size
1524 size = index_size + data_size
1713 if size <= 65536:
1525 if size <= 65536:
1714 yield fp.read(size)
1526 yield fp.read(size)
1715 else:
1527 else:
1716 yield from util.filechunkiter(fp, limit=size)
1528 yield from util.filechunkiter(fp, limit=size)
1717
1529
1718 inline_stream = get_stream()
1530 inline_stream = get_stream()
1719 next(inline_stream)
1531 next(inline_stream)
1720 return [
1532 return [
1721 (self._indexfile, inline_stream, index_size + data_size),
1533 (self._indexfile, inline_stream, index_size + data_size),
1722 ]
1534 ]
1723 elif force_inline:
1535 elif force_inline:
1724
1536
1725 def get_stream():
1537 def get_stream():
1726 with self.reading():
1538 with self.reading():
1727 yield None
1539 yield None
1728
1540
1729 for rev in range(n):
1541 for rev in range(n):
1730 idx = self.index.entry_binary(rev)
1542 idx = self.index.entry_binary(rev)
1731 if rev == 0 and self._docket is None:
1543 if rev == 0 and self._docket is None:
1732 # re-inject the inline flag
1544 # re-inject the inline flag
1733 header = self._format_flags
1545 header = self._format_flags
1734 header |= self._format_version
1546 header |= self._format_version
1735 header |= FLAG_INLINE_DATA
1547 header |= FLAG_INLINE_DATA
1736 header = self.index.pack_header(header)
1548 header = self.index.pack_header(header)
1737 idx = header + idx
1549 idx = header + idx
1738 yield idx
1550 yield idx
1739 yield self._inner.get_segment_for_revs(rev, rev)[1]
1551 yield self._inner.get_segment_for_revs(rev, rev)[1]
1740
1552
1741 inline_stream = get_stream()
1553 inline_stream = get_stream()
1742 next(inline_stream)
1554 next(inline_stream)
1743 return [
1555 return [
1744 (self._indexfile, inline_stream, index_size + data_size),
1556 (self._indexfile, inline_stream, index_size + data_size),
1745 ]
1557 ]
1746 else:
1558 else:
1747
1559
1748 def get_index_stream():
1560 def get_index_stream():
1749 with self.opener(self._indexfile, mode=b"r") as fp:
1561 with self.opener(self._indexfile, mode=b"r") as fp:
1750 yield None
1562 yield None
1751 if index_size <= 65536:
1563 if index_size <= 65536:
1752 yield fp.read(index_size)
1564 yield fp.read(index_size)
1753 else:
1565 else:
1754 yield from util.filechunkiter(fp, limit=index_size)
1566 yield from util.filechunkiter(fp, limit=index_size)
1755
1567
1756 def get_data_stream():
1568 def get_data_stream():
1757 with self._datafp() as fp:
1569 with self._datafp() as fp:
1758 yield None
1570 yield None
1759 if data_size <= 65536:
1571 if data_size <= 65536:
1760 yield fp.read(data_size)
1572 yield fp.read(data_size)
1761 else:
1573 else:
1762 yield from util.filechunkiter(fp, limit=data_size)
1574 yield from util.filechunkiter(fp, limit=data_size)
1763
1575
1764 index_stream = get_index_stream()
1576 index_stream = get_index_stream()
1765 next(index_stream)
1577 next(index_stream)
1766 data_stream = get_data_stream()
1578 data_stream = get_data_stream()
1767 next(data_stream)
1579 next(data_stream)
1768 return [
1580 return [
1769 (self._datafile, data_stream, data_size),
1581 (self._datafile, data_stream, data_size),
1770 (self._indexfile, index_stream, index_size),
1582 (self._indexfile, index_stream, index_size),
1771 ]
1583 ]
1772
1584
1773 def _loadindex(self, docket=None):
1585 def _loadindex(self, docket=None):
1774
1586
1775 new_header, mmapindexthreshold, force_nodemap = self._init_opts()
1587 new_header, mmapindexthreshold, force_nodemap = self._init_opts()
1776
1588
1777 if self.postfix is not None:
1589 if self.postfix is not None:
1778 entry_point = b'%s.i.%s' % (self.radix, self.postfix)
1590 entry_point = b'%s.i.%s' % (self.radix, self.postfix)
1779 elif self._trypending and self.opener.exists(b'%s.i.a' % self.radix):
1591 elif self._trypending and self.opener.exists(b'%s.i.a' % self.radix):
1780 entry_point = b'%s.i.a' % self.radix
1592 entry_point = b'%s.i.a' % self.radix
1781 elif self._try_split and self.opener.exists(self._split_index_file):
1593 elif self._try_split and self.opener.exists(self._split_index_file):
1782 entry_point = self._split_index_file
1594 entry_point = self._split_index_file
1783 else:
1595 else:
1784 entry_point = b'%s.i' % self.radix
1596 entry_point = b'%s.i' % self.radix
1785
1597
1786 if docket is not None:
1598 if docket is not None:
1787 self._docket = docket
1599 self._docket = docket
1788 self._docket_file = entry_point
1600 self._docket_file = entry_point
1789 else:
1601 else:
1790 self._initempty = True
1602 self._initempty = True
1791 entry_data = self._get_data(entry_point, mmapindexthreshold)
1603 entry_data = self._get_data(entry_point, mmapindexthreshold)
1792 if len(entry_data) > 0:
1604 if len(entry_data) > 0:
1793 header = INDEX_HEADER.unpack(entry_data[:4])[0]
1605 header = INDEX_HEADER.unpack(entry_data[:4])[0]
1794 self._initempty = False
1606 self._initempty = False
1795 else:
1607 else:
1796 header = new_header
1608 header = new_header
1797
1609
1798 self._format_flags = header & ~0xFFFF
1610 self._format_flags = header & ~0xFFFF
1799 self._format_version = header & 0xFFFF
1611 self._format_version = header & 0xFFFF
1800
1612
1801 supported_flags = SUPPORTED_FLAGS.get(self._format_version)
1613 supported_flags = SUPPORTED_FLAGS.get(self._format_version)
1802 if supported_flags is None:
1614 if supported_flags is None:
1803 msg = _(b'unknown version (%d) in revlog %s')
1615 msg = _(b'unknown version (%d) in revlog %s')
1804 msg %= (self._format_version, self.display_id)
1616 msg %= (self._format_version, self.display_id)
1805 raise error.RevlogError(msg)
1617 raise error.RevlogError(msg)
1806 elif self._format_flags & ~supported_flags:
1618 elif self._format_flags & ~supported_flags:
1807 msg = _(b'unknown flags (%#04x) in version %d revlog %s')
1619 msg = _(b'unknown flags (%#04x) in version %d revlog %s')
1808 display_flag = self._format_flags >> 16
1620 display_flag = self._format_flags >> 16
1809 msg %= (display_flag, self._format_version, self.display_id)
1621 msg %= (display_flag, self._format_version, self.display_id)
1810 raise error.RevlogError(msg)
1622 raise error.RevlogError(msg)
1811
1623
1812 features = FEATURES_BY_VERSION[self._format_version]
1624 features = FEATURES_BY_VERSION[self._format_version]
1813 self._inline = features[b'inline'](self._format_flags)
1625 self._inline = features[b'inline'](self._format_flags)
1814 self.delta_config.general_delta = features[b'generaldelta'](
1626 self.delta_config.general_delta = features[b'generaldelta'](
1815 self._format_flags
1627 self._format_flags
1816 )
1628 )
1817 self.feature_config.has_side_data = features[b'sidedata']
1629 self.feature_config.has_side_data = features[b'sidedata']
1818
1630
1819 if not features[b'docket']:
1631 if not features[b'docket']:
1820 self._indexfile = entry_point
1632 self._indexfile = entry_point
1821 index_data = entry_data
1633 index_data = entry_data
1822 else:
1634 else:
1823 self._docket_file = entry_point
1635 self._docket_file = entry_point
1824 if self._initempty:
1636 if self._initempty:
1825 self._docket = docketutil.default_docket(self, header)
1637 self._docket = docketutil.default_docket(self, header)
1826 else:
1638 else:
1827 self._docket = docketutil.parse_docket(
1639 self._docket = docketutil.parse_docket(
1828 self, entry_data, use_pending=self._trypending
1640 self, entry_data, use_pending=self._trypending
1829 )
1641 )
1830
1642
1831 if self._docket is not None:
1643 if self._docket is not None:
1832 self._indexfile = self._docket.index_filepath()
1644 self._indexfile = self._docket.index_filepath()
1833 index_data = b''
1645 index_data = b''
1834 index_size = self._docket.index_end
1646 index_size = self._docket.index_end
1835 if index_size > 0:
1647 if index_size > 0:
1836 index_data = self._get_data(
1648 index_data = self._get_data(
1837 self._indexfile, mmapindexthreshold, size=index_size
1649 self._indexfile, mmapindexthreshold, size=index_size
1838 )
1650 )
1839 if len(index_data) < index_size:
1651 if len(index_data) < index_size:
1840 msg = _(b'too few index data for %s: got %d, expected %d')
1652 msg = _(b'too few index data for %s: got %d, expected %d')
1841 msg %= (self.display_id, len(index_data), index_size)
1653 msg %= (self.display_id, len(index_data), index_size)
1842 raise error.RevlogError(msg)
1654 raise error.RevlogError(msg)
1843
1655
1844 self._inline = False
1656 self._inline = False
1845 # generaldelta implied by version 2 revlogs.
1657 # generaldelta implied by version 2 revlogs.
1846 self.delta_config.general_delta = True
1658 self.delta_config.general_delta = True
1847 # the logic for persistent nodemap will be dealt with within the
1659 # the logic for persistent nodemap will be dealt with within the
1848 # main docket, so disable it for now.
1660 # main docket, so disable it for now.
1849 self._nodemap_file = None
1661 self._nodemap_file = None
1850
1662
1851 if self._docket is not None:
1663 if self._docket is not None:
1852 self._datafile = self._docket.data_filepath()
1664 self._datafile = self._docket.data_filepath()
1853 self._sidedatafile = self._docket.sidedata_filepath()
1665 self._sidedatafile = self._docket.sidedata_filepath()
1854 elif self.postfix is None:
1666 elif self.postfix is None:
1855 self._datafile = b'%s.d' % self.radix
1667 self._datafile = b'%s.d' % self.radix
1856 else:
1668 else:
1857 self._datafile = b'%s.d.%s' % (self.radix, self.postfix)
1669 self._datafile = b'%s.d.%s' % (self.radix, self.postfix)
1858
1670
1859 self.nodeconstants = sha1nodeconstants
1671 self.nodeconstants = sha1nodeconstants
1860 self.nullid = self.nodeconstants.nullid
1672 self.nullid = self.nodeconstants.nullid
1861
1673
1862 # sparse-revlog can't be on without general-delta (issue6056)
1674 # sparse-revlog can't be on without general-delta (issue6056)
1863 if not self.delta_config.general_delta:
1675 if not self.delta_config.general_delta:
1864 self.delta_config.sparse_revlog = False
1676 self.delta_config.sparse_revlog = False
1865
1677
1866 self._storedeltachains = True
1678 self._storedeltachains = True
1867
1679
1868 devel_nodemap = (
1680 devel_nodemap = (
1869 self._nodemap_file
1681 self._nodemap_file
1870 and force_nodemap
1682 and force_nodemap
1871 and parse_index_v1_nodemap is not None
1683 and parse_index_v1_nodemap is not None
1872 )
1684 )
1873
1685
1874 use_rust_index = False
1686 use_rust_index = False
1875 if rustrevlog is not None:
1687 if rustrevlog is not None:
1876 if self._nodemap_file is not None:
1688 if self._nodemap_file is not None:
1877 use_rust_index = True
1689 use_rust_index = True
1878 else:
1690 else:
1879 use_rust_index = self.opener.options.get(b'rust.index')
1691 use_rust_index = self.opener.options.get(b'rust.index')
1880
1692
1881 self._parse_index = parse_index_v1
1693 self._parse_index = parse_index_v1
1882 if self._format_version == REVLOGV0:
1694 if self._format_version == REVLOGV0:
1883 self._parse_index = revlogv0.parse_index_v0
1695 self._parse_index = revlogv0.parse_index_v0
1884 elif self._format_version == REVLOGV2:
1696 elif self._format_version == REVLOGV2:
1885 self._parse_index = parse_index_v2
1697 self._parse_index = parse_index_v2
1886 elif self._format_version == CHANGELOGV2:
1698 elif self._format_version == CHANGELOGV2:
1887 self._parse_index = parse_index_cl_v2
1699 self._parse_index = parse_index_cl_v2
1888 elif devel_nodemap:
1700 elif devel_nodemap:
1889 self._parse_index = parse_index_v1_nodemap
1701 self._parse_index = parse_index_v1_nodemap
1890 elif use_rust_index:
1702 elif use_rust_index:
1891 self._parse_index = parse_index_v1_mixed
1703 self._parse_index = parse_index_v1_mixed
1892 try:
1704 try:
1893 d = self._parse_index(index_data, self._inline)
1705 d = self._parse_index(index_data, self._inline)
1894 index, chunkcache = d
1706 index, chunkcache = d
1895 use_nodemap = (
1707 use_nodemap = (
1896 not self._inline
1708 not self._inline
1897 and self._nodemap_file is not None
1709 and self._nodemap_file is not None
1898 and hasattr(index, 'update_nodemap_data')
1710 and hasattr(index, 'update_nodemap_data')
1899 )
1711 )
1900 if use_nodemap:
1712 if use_nodemap:
1901 nodemap_data = nodemaputil.persisted_data(self)
1713 nodemap_data = nodemaputil.persisted_data(self)
1902 if nodemap_data is not None:
1714 if nodemap_data is not None:
1903 docket = nodemap_data[0]
1715 docket = nodemap_data[0]
1904 if (
1716 if (
1905 len(d[0]) > docket.tip_rev
1717 len(d[0]) > docket.tip_rev
1906 and d[0][docket.tip_rev][7] == docket.tip_node
1718 and d[0][docket.tip_rev][7] == docket.tip_node
1907 ):
1719 ):
1908 # no changelog tampering
1720 # no changelog tampering
1909 self._nodemap_docket = docket
1721 self._nodemap_docket = docket
1910 index.update_nodemap_data(*nodemap_data)
1722 index.update_nodemap_data(*nodemap_data)
1911 except (ValueError, IndexError):
1723 except (ValueError, IndexError):
1912 raise error.RevlogError(
1724 raise error.RevlogError(
1913 _(b"index %s is corrupted") % self.display_id
1725 _(b"index %s is corrupted") % self.display_id
1914 )
1726 )
1915 self.index = index
1727 self.index = index
1916 # revnum -> (chain-length, sum-delta-length)
1728 # revnum -> (chain-length, sum-delta-length)
1917 self._chaininfocache = util.lrucachedict(500)
1729 self._chaininfocache = util.lrucachedict(500)
1918
1730
1919 return chunkcache
1731 return chunkcache
1920
1732
1921 def _load_inner(self, chunk_cache):
1733 def _load_inner(self, chunk_cache):
1922 if self._docket is None:
1734 if self._docket is None:
1923 default_compression_header = None
1735 default_compression_header = None
1924 else:
1736 else:
1925 default_compression_header = self._docket.default_compression_header
1737 default_compression_header = self._docket.default_compression_header
1926
1738
1927 self._inner = _InnerRevlog(
1739 self._inner = _InnerRevlog(
1928 opener=self.opener,
1740 opener=self.opener,
1929 index=self.index,
1741 index=self.index,
1930 index_file=self._indexfile,
1742 index_file=self._indexfile,
1931 data_file=self._datafile,
1743 data_file=self._datafile,
1932 sidedata_file=self._sidedatafile,
1744 sidedata_file=self._sidedatafile,
1933 inline=self._inline,
1745 inline=self._inline,
1934 data_config=self.data_config,
1746 data_config=self.data_config,
1935 delta_config=self.delta_config,
1747 delta_config=self.delta_config,
1936 feature_config=self.feature_config,
1748 feature_config=self.feature_config,
1937 chunk_cache=chunk_cache,
1749 chunk_cache=chunk_cache,
1938 default_compression_header=default_compression_header,
1750 default_compression_header=default_compression_header,
1939 )
1751 )
1940
1752
1941 def get_revlog(self):
1753 def get_revlog(self):
1942 """simple function to mirror API of other not-really-revlog API"""
1754 """simple function to mirror API of other not-really-revlog API"""
1943 return self
1755 return self
1944
1756
1945 @util.propertycache
1757 @util.propertycache
1946 def revlog_kind(self):
1758 def revlog_kind(self):
1947 return self.target[0]
1759 return self.target[0]
1948
1760
1949 @util.propertycache
1761 @util.propertycache
1950 def display_id(self):
1762 def display_id(self):
1951 """The public facing "ID" of the revlog that we use in message"""
1763 """The public facing "ID" of the revlog that we use in message"""
1952 if self.revlog_kind == KIND_FILELOG:
1764 if self.revlog_kind == KIND_FILELOG:
1953 # Reference the file without the "data/" prefix, so it is familiar
1765 # Reference the file without the "data/" prefix, so it is familiar
1954 # to the user.
1766 # to the user.
1955 return self.target[1]
1767 return self.target[1]
1956 else:
1768 else:
1957 return self.radix
1769 return self.radix
1958
1770
1959 def _datafp(self, mode=b'r'):
1771 def _datafp(self, mode=b'r'):
1960 """file object for the revlog's data file"""
1772 """file object for the revlog's data file"""
1961 return self.opener(self._datafile, mode=mode)
1773 return self.opener(self._datafile, mode=mode)
1962
1774
1963 def tiprev(self):
1775 def tiprev(self):
1964 return len(self.index) - 1
1776 return len(self.index) - 1
1965
1777
1966 def tip(self):
1778 def tip(self):
1967 return self.node(self.tiprev())
1779 return self.node(self.tiprev())
1968
1780
1969 def __contains__(self, rev):
1781 def __contains__(self, rev):
1970 return 0 <= rev < len(self)
1782 return 0 <= rev < len(self)
1971
1783
1972 def __len__(self):
1784 def __len__(self):
1973 return len(self.index)
1785 return len(self.index)
1974
1786
1975 def __iter__(self):
1787 def __iter__(self):
1976 return iter(range(len(self)))
1788 return iter(range(len(self)))
1977
1789
1978 def revs(self, start=0, stop=None):
1790 def revs(self, start=0, stop=None):
1979 """iterate over all rev in this revlog (from start to stop)"""
1791 """iterate over all rev in this revlog (from start to stop)"""
1980 return storageutil.iterrevs(len(self), start=start, stop=stop)
1792 return storageutil.iterrevs(len(self), start=start, stop=stop)
1981
1793
1982 def hasnode(self, node):
1794 def hasnode(self, node):
1983 try:
1795 try:
1984 self.rev(node)
1796 self.rev(node)
1985 return True
1797 return True
1986 except KeyError:
1798 except KeyError:
1987 return False
1799 return False
1988
1800
1989 def _candelta(self, baserev, rev):
1801 def _candelta(self, baserev, rev):
1990 """whether two revisions (baserev, rev) can be delta-ed or not"""
1802 """whether two revisions (baserev, rev) can be delta-ed or not"""
1991 # Disable delta if either rev requires a content-changing flag
1803 # Disable delta if either rev requires a content-changing flag
1992 # processor (ex. LFS). This is because such flag processor can alter
1804 # processor (ex. LFS). This is because such flag processor can alter
1993 # the rawtext content that the delta will be based on, and two clients
1805 # the rawtext content that the delta will be based on, and two clients
1994 # could have a same revlog node with different flags (i.e. different
1806 # could have a same revlog node with different flags (i.e. different
1995 # rawtext contents) and the delta could be incompatible.
1807 # rawtext contents) and the delta could be incompatible.
1996 if (self.flags(baserev) & REVIDX_RAWTEXT_CHANGING_FLAGS) or (
1808 if (self.flags(baserev) & REVIDX_RAWTEXT_CHANGING_FLAGS) or (
1997 self.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS
1809 self.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS
1998 ):
1810 ):
1999 return False
1811 return False
2000 return True
1812 return True
2001
1813
2002 def update_caches(self, transaction):
1814 def update_caches(self, transaction):
2003 """update on disk cache
1815 """update on disk cache
2004
1816
2005 If a transaction is passed, the update may be delayed to transaction
1817 If a transaction is passed, the update may be delayed to transaction
2006 commit."""
1818 commit."""
2007 if self._nodemap_file is not None:
1819 if self._nodemap_file is not None:
2008 if transaction is None:
1820 if transaction is None:
2009 nodemaputil.update_persistent_nodemap(self)
1821 nodemaputil.update_persistent_nodemap(self)
2010 else:
1822 else:
2011 nodemaputil.setup_persistent_nodemap(transaction, self)
1823 nodemaputil.setup_persistent_nodemap(transaction, self)
2012
1824
2013 def clearcaches(self):
1825 def clearcaches(self):
2014 """Clear in-memory caches"""
1826 """Clear in-memory caches"""
2015 self._chainbasecache.clear()
1827 self._chainbasecache.clear()
2016 self._inner.clear_cache()
1828 self._inner.clear_cache()
2017 self._pcache = {}
1829 self._pcache = {}
2018 self._nodemap_docket = None
1830 self._nodemap_docket = None
2019 self.index.clearcaches()
1831 self.index.clearcaches()
2020 # The python code is the one responsible for validating the docket, we
1832 # The python code is the one responsible for validating the docket, we
2021 # end up having to refresh it here.
1833 # end up having to refresh it here.
2022 use_nodemap = (
1834 use_nodemap = (
2023 not self._inline
1835 not self._inline
2024 and self._nodemap_file is not None
1836 and self._nodemap_file is not None
2025 and hasattr(self.index, 'update_nodemap_data')
1837 and hasattr(self.index, 'update_nodemap_data')
2026 )
1838 )
2027 if use_nodemap:
1839 if use_nodemap:
2028 nodemap_data = nodemaputil.persisted_data(self)
1840 nodemap_data = nodemaputil.persisted_data(self)
2029 if nodemap_data is not None:
1841 if nodemap_data is not None:
2030 self._nodemap_docket = nodemap_data[0]
1842 self._nodemap_docket = nodemap_data[0]
2031 self.index.update_nodemap_data(*nodemap_data)
1843 self.index.update_nodemap_data(*nodemap_data)
2032
1844
2033 def rev(self, node):
1845 def rev(self, node):
2034 """return the revision number associated with a <nodeid>"""
1846 """return the revision number associated with a <nodeid>"""
2035 try:
1847 try:
2036 return self.index.rev(node)
1848 return self.index.rev(node)
2037 except TypeError:
1849 except TypeError:
2038 raise
1850 raise
2039 except error.RevlogError:
1851 except error.RevlogError:
2040 # parsers.c radix tree lookup failed
1852 # parsers.c radix tree lookup failed
2041 if (
1853 if (
2042 node == self.nodeconstants.wdirid
1854 node == self.nodeconstants.wdirid
2043 or node in self.nodeconstants.wdirfilenodeids
1855 or node in self.nodeconstants.wdirfilenodeids
2044 ):
1856 ):
2045 raise error.WdirUnsupported
1857 raise error.WdirUnsupported
2046 raise error.LookupError(node, self.display_id, _(b'no node'))
1858 raise error.LookupError(node, self.display_id, _(b'no node'))
2047
1859
2048 # Accessors for index entries.
1860 # Accessors for index entries.
2049
1861
2050 # First tuple entry is 8 bytes. First 6 bytes are offset. Last 2 bytes
1862 # First tuple entry is 8 bytes. First 6 bytes are offset. Last 2 bytes
2051 # are flags.
1863 # are flags.
2052 def start(self, rev):
1864 def start(self, rev):
2053 return int(self.index[rev][0] >> 16)
1865 return int(self.index[rev][0] >> 16)
2054
1866
2055 def sidedata_cut_off(self, rev):
1867 def sidedata_cut_off(self, rev):
2056 sd_cut_off = self.index[rev][8]
1868 sd_cut_off = self.index[rev][8]
2057 if sd_cut_off != 0:
1869 if sd_cut_off != 0:
2058 return sd_cut_off
1870 return sd_cut_off
2059 # This is some annoying dance, because entries without sidedata
1871 # This is some annoying dance, because entries without sidedata
2060 # currently use 0 as their ofsset. (instead of previous-offset +
1872 # currently use 0 as their ofsset. (instead of previous-offset +
2061 # previous-size)
1873 # previous-size)
2062 #
1874 #
2063 # We should reconsider this sidedata → 0 sidata_offset policy.
1875 # We should reconsider this sidedata → 0 sidata_offset policy.
2064 # In the meantime, we need this.
1876 # In the meantime, we need this.
2065 while 0 <= rev:
1877 while 0 <= rev:
2066 e = self.index[rev]
1878 e = self.index[rev]
2067 if e[9] != 0:
1879 if e[9] != 0:
2068 return e[8] + e[9]
1880 return e[8] + e[9]
2069 rev -= 1
1881 rev -= 1
2070 return 0
1882 return 0
2071
1883
2072 def flags(self, rev):
1884 def flags(self, rev):
2073 return self.index[rev][0] & 0xFFFF
1885 return self.index[rev][0] & 0xFFFF
2074
1886
2075 def length(self, rev):
1887 def length(self, rev):
2076 return self.index[rev][1]
1888 return self.index[rev][1]
2077
1889
2078 def sidedata_length(self, rev):
1890 def sidedata_length(self, rev):
2079 if not self.feature_config.has_side_data:
1891 if not self.feature_config.has_side_data:
2080 return 0
1892 return 0
2081 return self.index[rev][9]
1893 return self.index[rev][9]
2082
1894
2083 def rawsize(self, rev):
1895 def rawsize(self, rev):
2084 """return the length of the uncompressed text for a given revision"""
1896 """return the length of the uncompressed text for a given revision"""
2085 l = self.index[rev][2]
1897 l = self.index[rev][2]
2086 if l >= 0:
1898 if l >= 0:
2087 return l
1899 return l
2088
1900
2089 t = self.rawdata(rev)
1901 t = self.rawdata(rev)
2090 return len(t)
1902 return len(t)
2091
1903
2092 def size(self, rev):
1904 def size(self, rev):
2093 """length of non-raw text (processed by a "read" flag processor)"""
1905 """length of non-raw text (processed by a "read" flag processor)"""
2094 # fast path: if no "read" flag processor could change the content,
1906 # fast path: if no "read" flag processor could change the content,
2095 # size is rawsize. note: ELLIPSIS is known to not change the content.
1907 # size is rawsize. note: ELLIPSIS is known to not change the content.
2096 flags = self.flags(rev)
1908 flags = self.flags(rev)
2097 if flags & (flagutil.REVIDX_KNOWN_FLAGS ^ REVIDX_ELLIPSIS) == 0:
1909 if flags & (flagutil.REVIDX_KNOWN_FLAGS ^ REVIDX_ELLIPSIS) == 0:
2098 return self.rawsize(rev)
1910 return self.rawsize(rev)
2099
1911
2100 return len(self.revision(rev))
1912 return len(self.revision(rev))
2101
1913
2102 def fast_rank(self, rev):
1914 def fast_rank(self, rev):
2103 """Return the rank of a revision if already known, or None otherwise.
1915 """Return the rank of a revision if already known, or None otherwise.
2104
1916
2105 The rank of a revision is the size of the sub-graph it defines as a
1917 The rank of a revision is the size of the sub-graph it defines as a
2106 head. Equivalently, the rank of a revision `r` is the size of the set
1918 head. Equivalently, the rank of a revision `r` is the size of the set
2107 `ancestors(r)`, `r` included.
1919 `ancestors(r)`, `r` included.
2108
1920
2109 This method returns the rank retrieved from the revlog in constant
1921 This method returns the rank retrieved from the revlog in constant
2110 time. It makes no attempt at computing unknown values for versions of
1922 time. It makes no attempt at computing unknown values for versions of
2111 the revlog which do not persist the rank.
1923 the revlog which do not persist the rank.
2112 """
1924 """
2113 rank = self.index[rev][ENTRY_RANK]
1925 rank = self.index[rev][ENTRY_RANK]
2114 if self._format_version != CHANGELOGV2 or rank == RANK_UNKNOWN:
1926 if self._format_version != CHANGELOGV2 or rank == RANK_UNKNOWN:
2115 return None
1927 return None
2116 if rev == nullrev:
1928 if rev == nullrev:
2117 return 0 # convention
1929 return 0 # convention
2118 return rank
1930 return rank
2119
1931
2120 def chainbase(self, rev):
1932 def chainbase(self, rev):
2121 base = self._chainbasecache.get(rev)
1933 base = self._chainbasecache.get(rev)
2122 if base is not None:
1934 if base is not None:
2123 return base
1935 return base
2124
1936
2125 index = self.index
1937 index = self.index
2126 iterrev = rev
1938 iterrev = rev
2127 base = index[iterrev][3]
1939 base = index[iterrev][3]
2128 while base != iterrev:
1940 while base != iterrev:
2129 iterrev = base
1941 iterrev = base
2130 base = index[iterrev][3]
1942 base = index[iterrev][3]
2131
1943
2132 self._chainbasecache[rev] = base
1944 self._chainbasecache[rev] = base
2133 return base
1945 return base
2134
1946
2135 def linkrev(self, rev):
1947 def linkrev(self, rev):
2136 return self.index[rev][4]
1948 return self.index[rev][4]
2137
1949
2138 def parentrevs(self, rev):
1950 def parentrevs(self, rev):
2139 try:
1951 try:
2140 entry = self.index[rev]
1952 entry = self.index[rev]
2141 except IndexError:
1953 except IndexError:
2142 if rev == wdirrev:
1954 if rev == wdirrev:
2143 raise error.WdirUnsupported
1955 raise error.WdirUnsupported
2144 raise
1956 raise
2145
1957
2146 if self.feature_config.canonical_parent_order and entry[5] == nullrev:
1958 if self.feature_config.canonical_parent_order and entry[5] == nullrev:
2147 return entry[6], entry[5]
1959 return entry[6], entry[5]
2148 else:
1960 else:
2149 return entry[5], entry[6]
1961 return entry[5], entry[6]
2150
1962
2151 # fast parentrevs(rev) where rev isn't filtered
1963 # fast parentrevs(rev) where rev isn't filtered
2152 _uncheckedparentrevs = parentrevs
1964 _uncheckedparentrevs = parentrevs
2153
1965
2154 def node(self, rev):
1966 def node(self, rev):
2155 try:
1967 try:
2156 return self.index[rev][7]
1968 return self.index[rev][7]
2157 except IndexError:
1969 except IndexError:
2158 if rev == wdirrev:
1970 if rev == wdirrev:
2159 raise error.WdirUnsupported
1971 raise error.WdirUnsupported
2160 raise
1972 raise
2161
1973
2162 # Derived from index values.
1974 # Derived from index values.
2163
1975
2164 def end(self, rev):
1976 def end(self, rev):
2165 return self.start(rev) + self.length(rev)
1977 return self.start(rev) + self.length(rev)
2166
1978
2167 def parents(self, node):
1979 def parents(self, node):
2168 i = self.index
1980 i = self.index
2169 d = i[self.rev(node)]
1981 d = i[self.rev(node)]
2170 # inline node() to avoid function call overhead
1982 # inline node() to avoid function call overhead
2171 if self.feature_config.canonical_parent_order and d[5] == self.nullid:
1983 if self.feature_config.canonical_parent_order and d[5] == self.nullid:
2172 return i[d[6]][7], i[d[5]][7]
1984 return i[d[6]][7], i[d[5]][7]
2173 else:
1985 else:
2174 return i[d[5]][7], i[d[6]][7]
1986 return i[d[5]][7], i[d[6]][7]
2175
1987
2176 def chainlen(self, rev):
1988 def chainlen(self, rev):
2177 return self._chaininfo(rev)[0]
1989 return self._chaininfo(rev)[0]
2178
1990
2179 def _chaininfo(self, rev):
1991 def _chaininfo(self, rev):
2180 chaininfocache = self._chaininfocache
1992 chaininfocache = self._chaininfocache
2181 if rev in chaininfocache:
1993 if rev in chaininfocache:
2182 return chaininfocache[rev]
1994 return chaininfocache[rev]
2183 index = self.index
1995 index = self.index
2184 generaldelta = self.delta_config.general_delta
1996 generaldelta = self.delta_config.general_delta
2185 iterrev = rev
1997 iterrev = rev
2186 e = index[iterrev]
1998 e = index[iterrev]
2187 clen = 0
1999 clen = 0
2188 compresseddeltalen = 0
2000 compresseddeltalen = 0
2189 while iterrev != e[3]:
2001 while iterrev != e[3]:
2190 clen += 1
2002 clen += 1
2191 compresseddeltalen += e[1]
2003 compresseddeltalen += e[1]
2192 if generaldelta:
2004 if generaldelta:
2193 iterrev = e[3]
2005 iterrev = e[3]
2194 else:
2006 else:
2195 iterrev -= 1
2007 iterrev -= 1
2196 if iterrev in chaininfocache:
2008 if iterrev in chaininfocache:
2197 t = chaininfocache[iterrev]
2009 t = chaininfocache[iterrev]
2198 clen += t[0]
2010 clen += t[0]
2199 compresseddeltalen += t[1]
2011 compresseddeltalen += t[1]
2200 break
2012 break
2201 e = index[iterrev]
2013 e = index[iterrev]
2202 else:
2014 else:
2203 # Add text length of base since decompressing that also takes
2015 # Add text length of base since decompressing that also takes
2204 # work. For cache hits the length is already included.
2016 # work. For cache hits the length is already included.
2205 compresseddeltalen += e[1]
2017 compresseddeltalen += e[1]
2206 r = (clen, compresseddeltalen)
2018 r = (clen, compresseddeltalen)
2207 chaininfocache[rev] = r
2019 chaininfocache[rev] = r
2208 return r
2020 return r
2209
2021
2210 def _deltachain(self, rev, stoprev=None):
2022 def _deltachain(self, rev, stoprev=None):
2211 return self._inner._deltachain(rev, stoprev=stoprev)
2023 return self._inner._deltachain(rev, stoprev=stoprev)
2212
2024
2213 def ancestors(self, revs, stoprev=0, inclusive=False):
2025 def ancestors(self, revs, stoprev=0, inclusive=False):
2214 """Generate the ancestors of 'revs' in reverse revision order.
2026 """Generate the ancestors of 'revs' in reverse revision order.
2215 Does not generate revs lower than stoprev.
2027 Does not generate revs lower than stoprev.
2216
2028
2217 See the documentation for ancestor.lazyancestors for more details."""
2029 See the documentation for ancestor.lazyancestors for more details."""
2218
2030
2219 # first, make sure start revisions aren't filtered
2031 # first, make sure start revisions aren't filtered
2220 revs = list(revs)
2032 revs = list(revs)
2221 checkrev = self.node
2033 checkrev = self.node
2222 for r in revs:
2034 for r in revs:
2223 checkrev(r)
2035 checkrev(r)
2224 # and we're sure ancestors aren't filtered as well
2036 # and we're sure ancestors aren't filtered as well
2225
2037
2226 if rustancestor is not None and self.index.rust_ext_compat:
2038 if rustancestor is not None and self.index.rust_ext_compat:
2227 lazyancestors = rustancestor.LazyAncestors
2039 lazyancestors = rustancestor.LazyAncestors
2228 arg = self.index
2040 arg = self.index
2229 else:
2041 else:
2230 lazyancestors = ancestor.lazyancestors
2042 lazyancestors = ancestor.lazyancestors
2231 arg = self._uncheckedparentrevs
2043 arg = self._uncheckedparentrevs
2232 return lazyancestors(arg, revs, stoprev=stoprev, inclusive=inclusive)
2044 return lazyancestors(arg, revs, stoprev=stoprev, inclusive=inclusive)
2233
2045
2234 def descendants(self, revs):
2046 def descendants(self, revs):
2235 return dagop.descendantrevs(revs, self.revs, self.parentrevs)
2047 return dagop.descendantrevs(revs, self.revs, self.parentrevs)
2236
2048
2237 def findcommonmissing(self, common=None, heads=None):
2049 def findcommonmissing(self, common=None, heads=None):
2238 """Return a tuple of the ancestors of common and the ancestors of heads
2050 """Return a tuple of the ancestors of common and the ancestors of heads
2239 that are not ancestors of common. In revset terminology, we return the
2051 that are not ancestors of common. In revset terminology, we return the
2240 tuple:
2052 tuple:
2241
2053
2242 ::common, (::heads) - (::common)
2054 ::common, (::heads) - (::common)
2243
2055
2244 The list is sorted by revision number, meaning it is
2056 The list is sorted by revision number, meaning it is
2245 topologically sorted.
2057 topologically sorted.
2246
2058
2247 'heads' and 'common' are both lists of node IDs. If heads is
2059 'heads' and 'common' are both lists of node IDs. If heads is
2248 not supplied, uses all of the revlog's heads. If common is not
2060 not supplied, uses all of the revlog's heads. If common is not
2249 supplied, uses nullid."""
2061 supplied, uses nullid."""
2250 if common is None:
2062 if common is None:
2251 common = [self.nullid]
2063 common = [self.nullid]
2252 if heads is None:
2064 if heads is None:
2253 heads = self.heads()
2065 heads = self.heads()
2254
2066
2255 common = [self.rev(n) for n in common]
2067 common = [self.rev(n) for n in common]
2256 heads = [self.rev(n) for n in heads]
2068 heads = [self.rev(n) for n in heads]
2257
2069
2258 # we want the ancestors, but inclusive
2070 # we want the ancestors, but inclusive
2259 class lazyset:
2071 class lazyset:
2260 def __init__(self, lazyvalues):
2072 def __init__(self, lazyvalues):
2261 self.addedvalues = set()
2073 self.addedvalues = set()
2262 self.lazyvalues = lazyvalues
2074 self.lazyvalues = lazyvalues
2263
2075
2264 def __contains__(self, value):
2076 def __contains__(self, value):
2265 return value in self.addedvalues or value in self.lazyvalues
2077 return value in self.addedvalues or value in self.lazyvalues
2266
2078
2267 def __iter__(self):
2079 def __iter__(self):
2268 added = self.addedvalues
2080 added = self.addedvalues
2269 for r in added:
2081 for r in added:
2270 yield r
2082 yield r
2271 for r in self.lazyvalues:
2083 for r in self.lazyvalues:
2272 if not r in added:
2084 if not r in added:
2273 yield r
2085 yield r
2274
2086
2275 def add(self, value):
2087 def add(self, value):
2276 self.addedvalues.add(value)
2088 self.addedvalues.add(value)
2277
2089
2278 def update(self, values):
2090 def update(self, values):
2279 self.addedvalues.update(values)
2091 self.addedvalues.update(values)
2280
2092
2281 has = lazyset(self.ancestors(common))
2093 has = lazyset(self.ancestors(common))
2282 has.add(nullrev)
2094 has.add(nullrev)
2283 has.update(common)
2095 has.update(common)
2284
2096
2285 # take all ancestors from heads that aren't in has
2097 # take all ancestors from heads that aren't in has
2286 missing = set()
2098 missing = set()
2287 visit = collections.deque(r for r in heads if r not in has)
2099 visit = collections.deque(r for r in heads if r not in has)
2288 while visit:
2100 while visit:
2289 r = visit.popleft()
2101 r = visit.popleft()
2290 if r in missing:
2102 if r in missing:
2291 continue
2103 continue
2292 else:
2104 else:
2293 missing.add(r)
2105 missing.add(r)
2294 for p in self.parentrevs(r):
2106 for p in self.parentrevs(r):
2295 if p not in has:
2107 if p not in has:
2296 visit.append(p)
2108 visit.append(p)
2297 missing = list(missing)
2109 missing = list(missing)
2298 missing.sort()
2110 missing.sort()
2299 return has, [self.node(miss) for miss in missing]
2111 return has, [self.node(miss) for miss in missing]
2300
2112
2301 def incrementalmissingrevs(self, common=None):
2113 def incrementalmissingrevs(self, common=None):
2302 """Return an object that can be used to incrementally compute the
2114 """Return an object that can be used to incrementally compute the
2303 revision numbers of the ancestors of arbitrary sets that are not
2115 revision numbers of the ancestors of arbitrary sets that are not
2304 ancestors of common. This is an ancestor.incrementalmissingancestors
2116 ancestors of common. This is an ancestor.incrementalmissingancestors
2305 object.
2117 object.
2306
2118
2307 'common' is a list of revision numbers. If common is not supplied, uses
2119 'common' is a list of revision numbers. If common is not supplied, uses
2308 nullrev.
2120 nullrev.
2309 """
2121 """
2310 if common is None:
2122 if common is None:
2311 common = [nullrev]
2123 common = [nullrev]
2312
2124
2313 if rustancestor is not None and self.index.rust_ext_compat:
2125 if rustancestor is not None and self.index.rust_ext_compat:
2314 return rustancestor.MissingAncestors(self.index, common)
2126 return rustancestor.MissingAncestors(self.index, common)
2315 return ancestor.incrementalmissingancestors(self.parentrevs, common)
2127 return ancestor.incrementalmissingancestors(self.parentrevs, common)
2316
2128
2317 def findmissingrevs(self, common=None, heads=None):
2129 def findmissingrevs(self, common=None, heads=None):
2318 """Return the revision numbers of the ancestors of heads that
2130 """Return the revision numbers of the ancestors of heads that
2319 are not ancestors of common.
2131 are not ancestors of common.
2320
2132
2321 More specifically, return a list of revision numbers corresponding to
2133 More specifically, return a list of revision numbers corresponding to
2322 nodes N such that every N satisfies the following constraints:
2134 nodes N such that every N satisfies the following constraints:
2323
2135
2324 1. N is an ancestor of some node in 'heads'
2136 1. N is an ancestor of some node in 'heads'
2325 2. N is not an ancestor of any node in 'common'
2137 2. N is not an ancestor of any node in 'common'
2326
2138
2327 The list is sorted by revision number, meaning it is
2139 The list is sorted by revision number, meaning it is
2328 topologically sorted.
2140 topologically sorted.
2329
2141
2330 'heads' and 'common' are both lists of revision numbers. If heads is
2142 'heads' and 'common' are both lists of revision numbers. If heads is
2331 not supplied, uses all of the revlog's heads. If common is not
2143 not supplied, uses all of the revlog's heads. If common is not
2332 supplied, uses nullid."""
2144 supplied, uses nullid."""
2333 if common is None:
2145 if common is None:
2334 common = [nullrev]
2146 common = [nullrev]
2335 if heads is None:
2147 if heads is None:
2336 heads = self.headrevs()
2148 heads = self.headrevs()
2337
2149
2338 inc = self.incrementalmissingrevs(common=common)
2150 inc = self.incrementalmissingrevs(common=common)
2339 return inc.missingancestors(heads)
2151 return inc.missingancestors(heads)
2340
2152
2341 def findmissing(self, common=None, heads=None):
2153 def findmissing(self, common=None, heads=None):
2342 """Return the ancestors of heads that are not ancestors of common.
2154 """Return the ancestors of heads that are not ancestors of common.
2343
2155
2344 More specifically, return a list of nodes N such that every N
2156 More specifically, return a list of nodes N such that every N
2345 satisfies the following constraints:
2157 satisfies the following constraints:
2346
2158
2347 1. N is an ancestor of some node in 'heads'
2159 1. N is an ancestor of some node in 'heads'
2348 2. N is not an ancestor of any node in 'common'
2160 2. N is not an ancestor of any node in 'common'
2349
2161
2350 The list is sorted by revision number, meaning it is
2162 The list is sorted by revision number, meaning it is
2351 topologically sorted.
2163 topologically sorted.
2352
2164
2353 'heads' and 'common' are both lists of node IDs. If heads is
2165 'heads' and 'common' are both lists of node IDs. If heads is
2354 not supplied, uses all of the revlog's heads. If common is not
2166 not supplied, uses all of the revlog's heads. If common is not
2355 supplied, uses nullid."""
2167 supplied, uses nullid."""
2356 if common is None:
2168 if common is None:
2357 common = [self.nullid]
2169 common = [self.nullid]
2358 if heads is None:
2170 if heads is None:
2359 heads = self.heads()
2171 heads = self.heads()
2360
2172
2361 common = [self.rev(n) for n in common]
2173 common = [self.rev(n) for n in common]
2362 heads = [self.rev(n) for n in heads]
2174 heads = [self.rev(n) for n in heads]
2363
2175
2364 inc = self.incrementalmissingrevs(common=common)
2176 inc = self.incrementalmissingrevs(common=common)
2365 return [self.node(r) for r in inc.missingancestors(heads)]
2177 return [self.node(r) for r in inc.missingancestors(heads)]
2366
2178
2367 def nodesbetween(self, roots=None, heads=None):
2179 def nodesbetween(self, roots=None, heads=None):
2368 """Return a topological path from 'roots' to 'heads'.
2180 """Return a topological path from 'roots' to 'heads'.
2369
2181
2370 Return a tuple (nodes, outroots, outheads) where 'nodes' is a
2182 Return a tuple (nodes, outroots, outheads) where 'nodes' is a
2371 topologically sorted list of all nodes N that satisfy both of
2183 topologically sorted list of all nodes N that satisfy both of
2372 these constraints:
2184 these constraints:
2373
2185
2374 1. N is a descendant of some node in 'roots'
2186 1. N is a descendant of some node in 'roots'
2375 2. N is an ancestor of some node in 'heads'
2187 2. N is an ancestor of some node in 'heads'
2376
2188
2377 Every node is considered to be both a descendant and an ancestor
2189 Every node is considered to be both a descendant and an ancestor
2378 of itself, so every reachable node in 'roots' and 'heads' will be
2190 of itself, so every reachable node in 'roots' and 'heads' will be
2379 included in 'nodes'.
2191 included in 'nodes'.
2380
2192
2381 'outroots' is the list of reachable nodes in 'roots', i.e., the
2193 'outroots' is the list of reachable nodes in 'roots', i.e., the
2382 subset of 'roots' that is returned in 'nodes'. Likewise,
2194 subset of 'roots' that is returned in 'nodes'. Likewise,
2383 'outheads' is the subset of 'heads' that is also in 'nodes'.
2195 'outheads' is the subset of 'heads' that is also in 'nodes'.
2384
2196
2385 'roots' and 'heads' are both lists of node IDs. If 'roots' is
2197 'roots' and 'heads' are both lists of node IDs. If 'roots' is
2386 unspecified, uses nullid as the only root. If 'heads' is
2198 unspecified, uses nullid as the only root. If 'heads' is
2387 unspecified, uses list of all of the revlog's heads."""
2199 unspecified, uses list of all of the revlog's heads."""
2388 nonodes = ([], [], [])
2200 nonodes = ([], [], [])
2389 if roots is not None:
2201 if roots is not None:
2390 roots = list(roots)
2202 roots = list(roots)
2391 if not roots:
2203 if not roots:
2392 return nonodes
2204 return nonodes
2393 lowestrev = min([self.rev(n) for n in roots])
2205 lowestrev = min([self.rev(n) for n in roots])
2394 else:
2206 else:
2395 roots = [self.nullid] # Everybody's a descendant of nullid
2207 roots = [self.nullid] # Everybody's a descendant of nullid
2396 lowestrev = nullrev
2208 lowestrev = nullrev
2397 if (lowestrev == nullrev) and (heads is None):
2209 if (lowestrev == nullrev) and (heads is None):
2398 # We want _all_ the nodes!
2210 # We want _all_ the nodes!
2399 return (
2211 return (
2400 [self.node(r) for r in self],
2212 [self.node(r) for r in self],
2401 [self.nullid],
2213 [self.nullid],
2402 list(self.heads()),
2214 list(self.heads()),
2403 )
2215 )
2404 if heads is None:
2216 if heads is None:
2405 # All nodes are ancestors, so the latest ancestor is the last
2217 # All nodes are ancestors, so the latest ancestor is the last
2406 # node.
2218 # node.
2407 highestrev = len(self) - 1
2219 highestrev = len(self) - 1
2408 # Set ancestors to None to signal that every node is an ancestor.
2220 # Set ancestors to None to signal that every node is an ancestor.
2409 ancestors = None
2221 ancestors = None
2410 # Set heads to an empty dictionary for later discovery of heads
2222 # Set heads to an empty dictionary for later discovery of heads
2411 heads = {}
2223 heads = {}
2412 else:
2224 else:
2413 heads = list(heads)
2225 heads = list(heads)
2414 if not heads:
2226 if not heads:
2415 return nonodes
2227 return nonodes
2416 ancestors = set()
2228 ancestors = set()
2417 # Turn heads into a dictionary so we can remove 'fake' heads.
2229 # Turn heads into a dictionary so we can remove 'fake' heads.
2418 # Also, later we will be using it to filter out the heads we can't
2230 # Also, later we will be using it to filter out the heads we can't
2419 # find from roots.
2231 # find from roots.
2420 heads = dict.fromkeys(heads, False)
2232 heads = dict.fromkeys(heads, False)
2421 # Start at the top and keep marking parents until we're done.
2233 # Start at the top and keep marking parents until we're done.
2422 nodestotag = set(heads)
2234 nodestotag = set(heads)
2423 # Remember where the top was so we can use it as a limit later.
2235 # Remember where the top was so we can use it as a limit later.
2424 highestrev = max([self.rev(n) for n in nodestotag])
2236 highestrev = max([self.rev(n) for n in nodestotag])
2425 while nodestotag:
2237 while nodestotag:
2426 # grab a node to tag
2238 # grab a node to tag
2427 n = nodestotag.pop()
2239 n = nodestotag.pop()
2428 # Never tag nullid
2240 # Never tag nullid
2429 if n == self.nullid:
2241 if n == self.nullid:
2430 continue
2242 continue
2431 # A node's revision number represents its place in a
2243 # A node's revision number represents its place in a
2432 # topologically sorted list of nodes.
2244 # topologically sorted list of nodes.
2433 r = self.rev(n)
2245 r = self.rev(n)
2434 if r >= lowestrev:
2246 if r >= lowestrev:
2435 if n not in ancestors:
2247 if n not in ancestors:
2436 # If we are possibly a descendant of one of the roots
2248 # If we are possibly a descendant of one of the roots
2437 # and we haven't already been marked as an ancestor
2249 # and we haven't already been marked as an ancestor
2438 ancestors.add(n) # Mark as ancestor
2250 ancestors.add(n) # Mark as ancestor
2439 # Add non-nullid parents to list of nodes to tag.
2251 # Add non-nullid parents to list of nodes to tag.
2440 nodestotag.update(
2252 nodestotag.update(
2441 [p for p in self.parents(n) if p != self.nullid]
2253 [p for p in self.parents(n) if p != self.nullid]
2442 )
2254 )
2443 elif n in heads: # We've seen it before, is it a fake head?
2255 elif n in heads: # We've seen it before, is it a fake head?
2444 # So it is, real heads should not be the ancestors of
2256 # So it is, real heads should not be the ancestors of
2445 # any other heads.
2257 # any other heads.
2446 heads.pop(n)
2258 heads.pop(n)
2447 if not ancestors:
2259 if not ancestors:
2448 return nonodes
2260 return nonodes
2449 # Now that we have our set of ancestors, we want to remove any
2261 # Now that we have our set of ancestors, we want to remove any
2450 # roots that are not ancestors.
2262 # roots that are not ancestors.
2451
2263
2452 # If one of the roots was nullid, everything is included anyway.
2264 # If one of the roots was nullid, everything is included anyway.
2453 if lowestrev > nullrev:
2265 if lowestrev > nullrev:
2454 # But, since we weren't, let's recompute the lowest rev to not
2266 # But, since we weren't, let's recompute the lowest rev to not
2455 # include roots that aren't ancestors.
2267 # include roots that aren't ancestors.
2456
2268
2457 # Filter out roots that aren't ancestors of heads
2269 # Filter out roots that aren't ancestors of heads
2458 roots = [root for root in roots if root in ancestors]
2270 roots = [root for root in roots if root in ancestors]
2459 # Recompute the lowest revision
2271 # Recompute the lowest revision
2460 if roots:
2272 if roots:
2461 lowestrev = min([self.rev(root) for root in roots])
2273 lowestrev = min([self.rev(root) for root in roots])
2462 else:
2274 else:
2463 # No more roots? Return empty list
2275 # No more roots? Return empty list
2464 return nonodes
2276 return nonodes
2465 else:
2277 else:
2466 # We are descending from nullid, and don't need to care about
2278 # We are descending from nullid, and don't need to care about
2467 # any other roots.
2279 # any other roots.
2468 lowestrev = nullrev
2280 lowestrev = nullrev
2469 roots = [self.nullid]
2281 roots = [self.nullid]
2470 # Transform our roots list into a set.
2282 # Transform our roots list into a set.
2471 descendants = set(roots)
2283 descendants = set(roots)
2472 # Also, keep the original roots so we can filter out roots that aren't
2284 # Also, keep the original roots so we can filter out roots that aren't
2473 # 'real' roots (i.e. are descended from other roots).
2285 # 'real' roots (i.e. are descended from other roots).
2474 roots = descendants.copy()
2286 roots = descendants.copy()
2475 # Our topologically sorted list of output nodes.
2287 # Our topologically sorted list of output nodes.
2476 orderedout = []
2288 orderedout = []
2477 # Don't start at nullid since we don't want nullid in our output list,
2289 # Don't start at nullid since we don't want nullid in our output list,
2478 # and if nullid shows up in descendants, empty parents will look like
2290 # and if nullid shows up in descendants, empty parents will look like
2479 # they're descendants.
2291 # they're descendants.
2480 for r in self.revs(start=max(lowestrev, 0), stop=highestrev + 1):
2292 for r in self.revs(start=max(lowestrev, 0), stop=highestrev + 1):
2481 n = self.node(r)
2293 n = self.node(r)
2482 isdescendant = False
2294 isdescendant = False
2483 if lowestrev == nullrev: # Everybody is a descendant of nullid
2295 if lowestrev == nullrev: # Everybody is a descendant of nullid
2484 isdescendant = True
2296 isdescendant = True
2485 elif n in descendants:
2297 elif n in descendants:
2486 # n is already a descendant
2298 # n is already a descendant
2487 isdescendant = True
2299 isdescendant = True
2488 # This check only needs to be done here because all the roots
2300 # This check only needs to be done here because all the roots
2489 # will start being marked is descendants before the loop.
2301 # will start being marked is descendants before the loop.
2490 if n in roots:
2302 if n in roots:
2491 # If n was a root, check if it's a 'real' root.
2303 # If n was a root, check if it's a 'real' root.
2492 p = tuple(self.parents(n))
2304 p = tuple(self.parents(n))
2493 # If any of its parents are descendants, it's not a root.
2305 # If any of its parents are descendants, it's not a root.
2494 if (p[0] in descendants) or (p[1] in descendants):
2306 if (p[0] in descendants) or (p[1] in descendants):
2495 roots.remove(n)
2307 roots.remove(n)
2496 else:
2308 else:
2497 p = tuple(self.parents(n))
2309 p = tuple(self.parents(n))
2498 # A node is a descendant if either of its parents are
2310 # A node is a descendant if either of its parents are
2499 # descendants. (We seeded the dependents list with the roots
2311 # descendants. (We seeded the dependents list with the roots
2500 # up there, remember?)
2312 # up there, remember?)
2501 if (p[0] in descendants) or (p[1] in descendants):
2313 if (p[0] in descendants) or (p[1] in descendants):
2502 descendants.add(n)
2314 descendants.add(n)
2503 isdescendant = True
2315 isdescendant = True
2504 if isdescendant and ((ancestors is None) or (n in ancestors)):
2316 if isdescendant and ((ancestors is None) or (n in ancestors)):
2505 # Only include nodes that are both descendants and ancestors.
2317 # Only include nodes that are both descendants and ancestors.
2506 orderedout.append(n)
2318 orderedout.append(n)
2507 if (ancestors is not None) and (n in heads):
2319 if (ancestors is not None) and (n in heads):
2508 # We're trying to figure out which heads are reachable
2320 # We're trying to figure out which heads are reachable
2509 # from roots.
2321 # from roots.
2510 # Mark this head as having been reached
2322 # Mark this head as having been reached
2511 heads[n] = True
2323 heads[n] = True
2512 elif ancestors is None:
2324 elif ancestors is None:
2513 # Otherwise, we're trying to discover the heads.
2325 # Otherwise, we're trying to discover the heads.
2514 # Assume this is a head because if it isn't, the next step
2326 # Assume this is a head because if it isn't, the next step
2515 # will eventually remove it.
2327 # will eventually remove it.
2516 heads[n] = True
2328 heads[n] = True
2517 # But, obviously its parents aren't.
2329 # But, obviously its parents aren't.
2518 for p in self.parents(n):
2330 for p in self.parents(n):
2519 heads.pop(p, None)
2331 heads.pop(p, None)
2520 heads = [head for head, flag in heads.items() if flag]
2332 heads = [head for head, flag in heads.items() if flag]
2521 roots = list(roots)
2333 roots = list(roots)
2522 assert orderedout
2334 assert orderedout
2523 assert roots
2335 assert roots
2524 assert heads
2336 assert heads
2525 return (orderedout, roots, heads)
2337 return (orderedout, roots, heads)
2526
2338
2527 def headrevs(self, revs=None):
2339 def headrevs(self, revs=None):
2528 if revs is None:
2340 if revs is None:
2529 try:
2341 try:
2530 return self.index.headrevs()
2342 return self.index.headrevs()
2531 except AttributeError:
2343 except AttributeError:
2532 return self._headrevs()
2344 return self._headrevs()
2533 if rustdagop is not None and self.index.rust_ext_compat:
2345 if rustdagop is not None and self.index.rust_ext_compat:
2534 return rustdagop.headrevs(self.index, revs)
2346 return rustdagop.headrevs(self.index, revs)
2535 return dagop.headrevs(revs, self._uncheckedparentrevs)
2347 return dagop.headrevs(revs, self._uncheckedparentrevs)
2536
2348
2537 def computephases(self, roots):
2349 def computephases(self, roots):
2538 return self.index.computephasesmapsets(roots)
2350 return self.index.computephasesmapsets(roots)
2539
2351
2540 def _headrevs(self):
2352 def _headrevs(self):
2541 count = len(self)
2353 count = len(self)
2542 if not count:
2354 if not count:
2543 return [nullrev]
2355 return [nullrev]
2544 # we won't iter over filtered rev so nobody is a head at start
2356 # we won't iter over filtered rev so nobody is a head at start
2545 ishead = [0] * (count + 1)
2357 ishead = [0] * (count + 1)
2546 index = self.index
2358 index = self.index
2547 for r in self:
2359 for r in self:
2548 ishead[r] = 1 # I may be an head
2360 ishead[r] = 1 # I may be an head
2549 e = index[r]
2361 e = index[r]
2550 ishead[e[5]] = ishead[e[6]] = 0 # my parent are not
2362 ishead[e[5]] = ishead[e[6]] = 0 # my parent are not
2551 return [r for r, val in enumerate(ishead) if val]
2363 return [r for r, val in enumerate(ishead) if val]
2552
2364
2553 def heads(self, start=None, stop=None):
2365 def heads(self, start=None, stop=None):
2554 """return the list of all nodes that have no children
2366 """return the list of all nodes that have no children
2555
2367
2556 if start is specified, only heads that are descendants of
2368 if start is specified, only heads that are descendants of
2557 start will be returned
2369 start will be returned
2558 if stop is specified, it will consider all the revs from stop
2370 if stop is specified, it will consider all the revs from stop
2559 as if they had no children
2371 as if they had no children
2560 """
2372 """
2561 if start is None and stop is None:
2373 if start is None and stop is None:
2562 if not len(self):
2374 if not len(self):
2563 return [self.nullid]
2375 return [self.nullid]
2564 return [self.node(r) for r in self.headrevs()]
2376 return [self.node(r) for r in self.headrevs()]
2565
2377
2566 if start is None:
2378 if start is None:
2567 start = nullrev
2379 start = nullrev
2568 else:
2380 else:
2569 start = self.rev(start)
2381 start = self.rev(start)
2570
2382
2571 stoprevs = {self.rev(n) for n in stop or []}
2383 stoprevs = {self.rev(n) for n in stop or []}
2572
2384
2573 revs = dagop.headrevssubset(
2385 revs = dagop.headrevssubset(
2574 self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs
2386 self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs
2575 )
2387 )
2576
2388
2577 return [self.node(rev) for rev in revs]
2389 return [self.node(rev) for rev in revs]
2578
2390
2579 def children(self, node):
2391 def children(self, node):
2580 """find the children of a given node"""
2392 """find the children of a given node"""
2581 c = []
2393 c = []
2582 p = self.rev(node)
2394 p = self.rev(node)
2583 for r in self.revs(start=p + 1):
2395 for r in self.revs(start=p + 1):
2584 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
2396 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
2585 if prevs:
2397 if prevs:
2586 for pr in prevs:
2398 for pr in prevs:
2587 if pr == p:
2399 if pr == p:
2588 c.append(self.node(r))
2400 c.append(self.node(r))
2589 elif p == nullrev:
2401 elif p == nullrev:
2590 c.append(self.node(r))
2402 c.append(self.node(r))
2591 return c
2403 return c
2592
2404
2593 def commonancestorsheads(self, a, b):
2405 def commonancestorsheads(self, a, b):
2594 """calculate all the heads of the common ancestors of nodes a and b"""
2406 """calculate all the heads of the common ancestors of nodes a and b"""
2595 a, b = self.rev(a), self.rev(b)
2407 a, b = self.rev(a), self.rev(b)
2596 ancs = self._commonancestorsheads(a, b)
2408 ancs = self._commonancestorsheads(a, b)
2597 return pycompat.maplist(self.node, ancs)
2409 return pycompat.maplist(self.node, ancs)
2598
2410
2599 def _commonancestorsheads(self, *revs):
2411 def _commonancestorsheads(self, *revs):
2600 """calculate all the heads of the common ancestors of revs"""
2412 """calculate all the heads of the common ancestors of revs"""
2601 try:
2413 try:
2602 ancs = self.index.commonancestorsheads(*revs)
2414 ancs = self.index.commonancestorsheads(*revs)
2603 except (AttributeError, OverflowError): # C implementation failed
2415 except (AttributeError, OverflowError): # C implementation failed
2604 ancs = ancestor.commonancestorsheads(self.parentrevs, *revs)
2416 ancs = ancestor.commonancestorsheads(self.parentrevs, *revs)
2605 return ancs
2417 return ancs
2606
2418
2607 def isancestor(self, a, b):
2419 def isancestor(self, a, b):
2608 """return True if node a is an ancestor of node b
2420 """return True if node a is an ancestor of node b
2609
2421
2610 A revision is considered an ancestor of itself."""
2422 A revision is considered an ancestor of itself."""
2611 a, b = self.rev(a), self.rev(b)
2423 a, b = self.rev(a), self.rev(b)
2612 return self.isancestorrev(a, b)
2424 return self.isancestorrev(a, b)
2613
2425
2614 def isancestorrev(self, a, b):
2426 def isancestorrev(self, a, b):
2615 """return True if revision a is an ancestor of revision b
2427 """return True if revision a is an ancestor of revision b
2616
2428
2617 A revision is considered an ancestor of itself.
2429 A revision is considered an ancestor of itself.
2618
2430
2619 The implementation of this is trivial but the use of
2431 The implementation of this is trivial but the use of
2620 reachableroots is not."""
2432 reachableroots is not."""
2621 if a == nullrev:
2433 if a == nullrev:
2622 return True
2434 return True
2623 elif a == b:
2435 elif a == b:
2624 return True
2436 return True
2625 elif a > b:
2437 elif a > b:
2626 return False
2438 return False
2627 return bool(self.reachableroots(a, [b], [a], includepath=False))
2439 return bool(self.reachableroots(a, [b], [a], includepath=False))
2628
2440
2629 def reachableroots(self, minroot, heads, roots, includepath=False):
2441 def reachableroots(self, minroot, heads, roots, includepath=False):
2630 """return (heads(::(<roots> and <roots>::<heads>)))
2442 """return (heads(::(<roots> and <roots>::<heads>)))
2631
2443
2632 If includepath is True, return (<roots>::<heads>)."""
2444 If includepath is True, return (<roots>::<heads>)."""
2633 try:
2445 try:
2634 return self.index.reachableroots2(
2446 return self.index.reachableroots2(
2635 minroot, heads, roots, includepath
2447 minroot, heads, roots, includepath
2636 )
2448 )
2637 except AttributeError:
2449 except AttributeError:
2638 return dagop._reachablerootspure(
2450 return dagop._reachablerootspure(
2639 self.parentrevs, minroot, roots, heads, includepath
2451 self.parentrevs, minroot, roots, heads, includepath
2640 )
2452 )
2641
2453
2642 def ancestor(self, a, b):
2454 def ancestor(self, a, b):
2643 """calculate the "best" common ancestor of nodes a and b"""
2455 """calculate the "best" common ancestor of nodes a and b"""
2644
2456
2645 a, b = self.rev(a), self.rev(b)
2457 a, b = self.rev(a), self.rev(b)
2646 try:
2458 try:
2647 ancs = self.index.ancestors(a, b)
2459 ancs = self.index.ancestors(a, b)
2648 except (AttributeError, OverflowError):
2460 except (AttributeError, OverflowError):
2649 ancs = ancestor.ancestors(self.parentrevs, a, b)
2461 ancs = ancestor.ancestors(self.parentrevs, a, b)
2650 if ancs:
2462 if ancs:
2651 # choose a consistent winner when there's a tie
2463 # choose a consistent winner when there's a tie
2652 return min(map(self.node, ancs))
2464 return min(map(self.node, ancs))
2653 return self.nullid
2465 return self.nullid
2654
2466
2655 def _match(self, id):
2467 def _match(self, id):
2656 if isinstance(id, int):
2468 if isinstance(id, int):
2657 # rev
2469 # rev
2658 return self.node(id)
2470 return self.node(id)
2659 if len(id) == self.nodeconstants.nodelen:
2471 if len(id) == self.nodeconstants.nodelen:
2660 # possibly a binary node
2472 # possibly a binary node
2661 # odds of a binary node being all hex in ASCII are 1 in 10**25
2473 # odds of a binary node being all hex in ASCII are 1 in 10**25
2662 try:
2474 try:
2663 node = id
2475 node = id
2664 self.rev(node) # quick search the index
2476 self.rev(node) # quick search the index
2665 return node
2477 return node
2666 except error.LookupError:
2478 except error.LookupError:
2667 pass # may be partial hex id
2479 pass # may be partial hex id
2668 try:
2480 try:
2669 # str(rev)
2481 # str(rev)
2670 rev = int(id)
2482 rev = int(id)
2671 if b"%d" % rev != id:
2483 if b"%d" % rev != id:
2672 raise ValueError
2484 raise ValueError
2673 if rev < 0:
2485 if rev < 0:
2674 rev = len(self) + rev
2486 rev = len(self) + rev
2675 if rev < 0 or rev >= len(self):
2487 if rev < 0 or rev >= len(self):
2676 raise ValueError
2488 raise ValueError
2677 return self.node(rev)
2489 return self.node(rev)
2678 except (ValueError, OverflowError):
2490 except (ValueError, OverflowError):
2679 pass
2491 pass
2680 if len(id) == 2 * self.nodeconstants.nodelen:
2492 if len(id) == 2 * self.nodeconstants.nodelen:
2681 try:
2493 try:
2682 # a full hex nodeid?
2494 # a full hex nodeid?
2683 node = bin(id)
2495 node = bin(id)
2684 self.rev(node)
2496 self.rev(node)
2685 return node
2497 return node
2686 except (binascii.Error, error.LookupError):
2498 except (binascii.Error, error.LookupError):
2687 pass
2499 pass
2688
2500
2689 def _partialmatch(self, id):
2501 def _partialmatch(self, id):
2690 # we don't care wdirfilenodeids as they should be always full hash
2502 # we don't care wdirfilenodeids as they should be always full hash
2691 maybewdir = self.nodeconstants.wdirhex.startswith(id)
2503 maybewdir = self.nodeconstants.wdirhex.startswith(id)
2692 ambiguous = False
2504 ambiguous = False
2693 try:
2505 try:
2694 partial = self.index.partialmatch(id)
2506 partial = self.index.partialmatch(id)
2695 if partial and self.hasnode(partial):
2507 if partial and self.hasnode(partial):
2696 if maybewdir:
2508 if maybewdir:
2697 # single 'ff...' match in radix tree, ambiguous with wdir
2509 # single 'ff...' match in radix tree, ambiguous with wdir
2698 ambiguous = True
2510 ambiguous = True
2699 else:
2511 else:
2700 return partial
2512 return partial
2701 elif maybewdir:
2513 elif maybewdir:
2702 # no 'ff...' match in radix tree, wdir identified
2514 # no 'ff...' match in radix tree, wdir identified
2703 raise error.WdirUnsupported
2515 raise error.WdirUnsupported
2704 else:
2516 else:
2705 return None
2517 return None
2706 except error.RevlogError:
2518 except error.RevlogError:
2707 # parsers.c radix tree lookup gave multiple matches
2519 # parsers.c radix tree lookup gave multiple matches
2708 # fast path: for unfiltered changelog, radix tree is accurate
2520 # fast path: for unfiltered changelog, radix tree is accurate
2709 if not getattr(self, 'filteredrevs', None):
2521 if not getattr(self, 'filteredrevs', None):
2710 ambiguous = True
2522 ambiguous = True
2711 # fall through to slow path that filters hidden revisions
2523 # fall through to slow path that filters hidden revisions
2712 except (AttributeError, ValueError):
2524 except (AttributeError, ValueError):
2713 # we are pure python, or key is not hex
2525 # we are pure python, or key is not hex
2714 pass
2526 pass
2715 if ambiguous:
2527 if ambiguous:
2716 raise error.AmbiguousPrefixLookupError(
2528 raise error.AmbiguousPrefixLookupError(
2717 id, self.display_id, _(b'ambiguous identifier')
2529 id, self.display_id, _(b'ambiguous identifier')
2718 )
2530 )
2719
2531
2720 if id in self._pcache:
2532 if id in self._pcache:
2721 return self._pcache[id]
2533 return self._pcache[id]
2722
2534
2723 if len(id) <= 40:
2535 if len(id) <= 40:
2724 # hex(node)[:...]
2536 # hex(node)[:...]
2725 l = len(id) // 2 * 2 # grab an even number of digits
2537 l = len(id) // 2 * 2 # grab an even number of digits
2726 try:
2538 try:
2727 # we're dropping the last digit, so let's check that it's hex,
2539 # we're dropping the last digit, so let's check that it's hex,
2728 # to avoid the expensive computation below if it's not
2540 # to avoid the expensive computation below if it's not
2729 if len(id) % 2 > 0:
2541 if len(id) % 2 > 0:
2730 if not (id[-1] in hexdigits):
2542 if not (id[-1] in hexdigits):
2731 return None
2543 return None
2732 prefix = bin(id[:l])
2544 prefix = bin(id[:l])
2733 except binascii.Error:
2545 except binascii.Error:
2734 pass
2546 pass
2735 else:
2547 else:
2736 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
2548 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
2737 nl = [
2549 nl = [
2738 n for n in nl if hex(n).startswith(id) and self.hasnode(n)
2550 n for n in nl if hex(n).startswith(id) and self.hasnode(n)
2739 ]
2551 ]
2740 if self.nodeconstants.nullhex.startswith(id):
2552 if self.nodeconstants.nullhex.startswith(id):
2741 nl.append(self.nullid)
2553 nl.append(self.nullid)
2742 if len(nl) > 0:
2554 if len(nl) > 0:
2743 if len(nl) == 1 and not maybewdir:
2555 if len(nl) == 1 and not maybewdir:
2744 self._pcache[id] = nl[0]
2556 self._pcache[id] = nl[0]
2745 return nl[0]
2557 return nl[0]
2746 raise error.AmbiguousPrefixLookupError(
2558 raise error.AmbiguousPrefixLookupError(
2747 id, self.display_id, _(b'ambiguous identifier')
2559 id, self.display_id, _(b'ambiguous identifier')
2748 )
2560 )
2749 if maybewdir:
2561 if maybewdir:
2750 raise error.WdirUnsupported
2562 raise error.WdirUnsupported
2751 return None
2563 return None
2752
2564
2753 def lookup(self, id):
2565 def lookup(self, id):
2754 """locate a node based on:
2566 """locate a node based on:
2755 - revision number or str(revision number)
2567 - revision number or str(revision number)
2756 - nodeid or subset of hex nodeid
2568 - nodeid or subset of hex nodeid
2757 """
2569 """
2758 n = self._match(id)
2570 n = self._match(id)
2759 if n is not None:
2571 if n is not None:
2760 return n
2572 return n
2761 n = self._partialmatch(id)
2573 n = self._partialmatch(id)
2762 if n:
2574 if n:
2763 return n
2575 return n
2764
2576
2765 raise error.LookupError(id, self.display_id, _(b'no match found'))
2577 raise error.LookupError(id, self.display_id, _(b'no match found'))
2766
2578
2767 def shortest(self, node, minlength=1):
2579 def shortest(self, node, minlength=1):
2768 """Find the shortest unambiguous prefix that matches node."""
2580 """Find the shortest unambiguous prefix that matches node."""
2769
2581
2770 def isvalid(prefix):
2582 def isvalid(prefix):
2771 try:
2583 try:
2772 matchednode = self._partialmatch(prefix)
2584 matchednode = self._partialmatch(prefix)
2773 except error.AmbiguousPrefixLookupError:
2585 except error.AmbiguousPrefixLookupError:
2774 return False
2586 return False
2775 except error.WdirUnsupported:
2587 except error.WdirUnsupported:
2776 # single 'ff...' match
2588 # single 'ff...' match
2777 return True
2589 return True
2778 if matchednode is None:
2590 if matchednode is None:
2779 raise error.LookupError(node, self.display_id, _(b'no node'))
2591 raise error.LookupError(node, self.display_id, _(b'no node'))
2780 return True
2592 return True
2781
2593
2782 def maybewdir(prefix):
2594 def maybewdir(prefix):
2783 return all(c == b'f' for c in pycompat.iterbytestr(prefix))
2595 return all(c == b'f' for c in pycompat.iterbytestr(prefix))
2784
2596
2785 hexnode = hex(node)
2597 hexnode = hex(node)
2786
2598
2787 def disambiguate(hexnode, minlength):
2599 def disambiguate(hexnode, minlength):
2788 """Disambiguate against wdirid."""
2600 """Disambiguate against wdirid."""
2789 for length in range(minlength, len(hexnode) + 1):
2601 for length in range(minlength, len(hexnode) + 1):
2790 prefix = hexnode[:length]
2602 prefix = hexnode[:length]
2791 if not maybewdir(prefix):
2603 if not maybewdir(prefix):
2792 return prefix
2604 return prefix
2793
2605
2794 if not getattr(self, 'filteredrevs', None):
2606 if not getattr(self, 'filteredrevs', None):
2795 try:
2607 try:
2796 length = max(self.index.shortest(node), minlength)
2608 length = max(self.index.shortest(node), minlength)
2797 return disambiguate(hexnode, length)
2609 return disambiguate(hexnode, length)
2798 except error.RevlogError:
2610 except error.RevlogError:
2799 if node != self.nodeconstants.wdirid:
2611 if node != self.nodeconstants.wdirid:
2800 raise error.LookupError(
2612 raise error.LookupError(
2801 node, self.display_id, _(b'no node')
2613 node, self.display_id, _(b'no node')
2802 )
2614 )
2803 except AttributeError:
2615 except AttributeError:
2804 # Fall through to pure code
2616 # Fall through to pure code
2805 pass
2617 pass
2806
2618
2807 if node == self.nodeconstants.wdirid:
2619 if node == self.nodeconstants.wdirid:
2808 for length in range(minlength, len(hexnode) + 1):
2620 for length in range(minlength, len(hexnode) + 1):
2809 prefix = hexnode[:length]
2621 prefix = hexnode[:length]
2810 if isvalid(prefix):
2622 if isvalid(prefix):
2811 return prefix
2623 return prefix
2812
2624
2813 for length in range(minlength, len(hexnode) + 1):
2625 for length in range(minlength, len(hexnode) + 1):
2814 prefix = hexnode[:length]
2626 prefix = hexnode[:length]
2815 if isvalid(prefix):
2627 if isvalid(prefix):
2816 return disambiguate(hexnode, length)
2628 return disambiguate(hexnode, length)
2817
2629
2818 def cmp(self, node, text):
2630 def cmp(self, node, text):
2819 """compare text with a given file revision
2631 """compare text with a given file revision
2820
2632
2821 returns True if text is different than what is stored.
2633 returns True if text is different than what is stored.
2822 """
2634 """
2823 p1, p2 = self.parents(node)
2635 p1, p2 = self.parents(node)
2824 return storageutil.hashrevisionsha1(text, p1, p2) != node
2636 return storageutil.hashrevisionsha1(text, p1, p2) != node
2825
2637
2826 def deltaparent(self, rev):
2638 def deltaparent(self, rev):
2827 """return deltaparent of the given revision"""
2639 """return deltaparent of the given revision"""
2828 base = self.index[rev][3]
2640 base = self.index[rev][3]
2829 if base == rev:
2641 if base == rev:
2830 return nullrev
2642 return nullrev
2831 elif self.delta_config.general_delta:
2643 elif self.delta_config.general_delta:
2832 return base
2644 return base
2833 else:
2645 else:
2834 return rev - 1
2646 return rev - 1
2835
2647
2836 def issnapshot(self, rev):
2648 def issnapshot(self, rev):
2837 """tells whether rev is a snapshot"""
2649 """tells whether rev is a snapshot"""
2838 ret = self._inner.issnapshot(rev)
2650 ret = self._inner.issnapshot(rev)
2839 self.issnapshot = self._inner.issnapshot
2651 self.issnapshot = self._inner.issnapshot
2840 return ret
2652 return ret
2841
2653
2842 def snapshotdepth(self, rev):
2654 def snapshotdepth(self, rev):
2843 """number of snapshot in the chain before this one"""
2655 """number of snapshot in the chain before this one"""
2844 if not self.issnapshot(rev):
2656 if not self.issnapshot(rev):
2845 raise error.ProgrammingError(b'revision %d not a snapshot')
2657 raise error.ProgrammingError(b'revision %d not a snapshot')
2846 return len(self._inner._deltachain(rev)[0]) - 1
2658 return len(self._inner._deltachain(rev)[0]) - 1
2847
2659
2848 def revdiff(self, rev1, rev2):
2660 def revdiff(self, rev1, rev2):
2849 """return or calculate a delta between two revisions
2661 """return or calculate a delta between two revisions
2850
2662
2851 The delta calculated is in binary form and is intended to be written to
2663 The delta calculated is in binary form and is intended to be written to
2852 revlog data directly. So this function needs raw revision data.
2664 revlog data directly. So this function needs raw revision data.
2853 """
2665 """
2854 if rev1 != nullrev and self.deltaparent(rev2) == rev1:
2666 if rev1 != nullrev and self.deltaparent(rev2) == rev1:
2855 return bytes(self._inner._chunk(rev2))
2667 return bytes(self._inner._chunk(rev2))
2856
2668
2857 return mdiff.textdiff(self.rawdata(rev1), self.rawdata(rev2))
2669 return mdiff.textdiff(self.rawdata(rev1), self.rawdata(rev2))
2858
2670
2859 def revision(self, nodeorrev):
2671 def revision(self, nodeorrev):
2860 """return an uncompressed revision of a given node or revision
2672 """return an uncompressed revision of a given node or revision
2861 number.
2673 number.
2862 """
2674 """
2863 return self._revisiondata(nodeorrev)
2675 return self._revisiondata(nodeorrev)
2864
2676
2865 def sidedata(self, nodeorrev):
2677 def sidedata(self, nodeorrev):
2866 """a map of extra data related to the changeset but not part of the hash
2678 """a map of extra data related to the changeset but not part of the hash
2867
2679
2868 This function currently return a dictionary. However, more advanced
2680 This function currently return a dictionary. However, more advanced
2869 mapping object will likely be used in the future for a more
2681 mapping object will likely be used in the future for a more
2870 efficient/lazy code.
2682 efficient/lazy code.
2871 """
2683 """
2872 # deal with <nodeorrev> argument type
2684 # deal with <nodeorrev> argument type
2873 if isinstance(nodeorrev, int):
2685 if isinstance(nodeorrev, int):
2874 rev = nodeorrev
2686 rev = nodeorrev
2875 else:
2687 else:
2876 rev = self.rev(nodeorrev)
2688 rev = self.rev(nodeorrev)
2877 return self._sidedata(rev)
2689 return self._sidedata(rev)
2878
2690
2879 def _rawtext(self, node, rev):
2691 def _rawtext(self, node, rev):
2880 """return the possibly unvalidated rawtext for a revision
2692 """return the possibly unvalidated rawtext for a revision
2881
2693
2882 returns (rev, rawtext, validated)
2694 returns (rev, rawtext, validated)
2883 """
2695 """
2884 # Check if we have the entry in cache
2696 # Check if we have the entry in cache
2885 # The cache entry looks like (node, rev, rawtext)
2697 # The cache entry looks like (node, rev, rawtext)
2886 if self._inner._revisioncache:
2698 if self._inner._revisioncache:
2887 if self._inner._revisioncache[0] == node:
2699 if self._inner._revisioncache[0] == node:
2888 return (rev, self._inner._revisioncache[2], True)
2700 return (rev, self._inner._revisioncache[2], True)
2889
2701
2890 if rev is None:
2702 if rev is None:
2891 rev = self.rev(node)
2703 rev = self.rev(node)
2892
2704
2893 return self._inner.raw_text(node, rev)
2705 return self._inner.raw_text(node, rev)
2894
2706
2895 def _revisiondata(self, nodeorrev, raw=False):
2707 def _revisiondata(self, nodeorrev, raw=False):
2896 # deal with <nodeorrev> argument type
2708 # deal with <nodeorrev> argument type
2897 if isinstance(nodeorrev, int):
2709 if isinstance(nodeorrev, int):
2898 rev = nodeorrev
2710 rev = nodeorrev
2899 node = self.node(rev)
2711 node = self.node(rev)
2900 else:
2712 else:
2901 node = nodeorrev
2713 node = nodeorrev
2902 rev = None
2714 rev = None
2903
2715
2904 # fast path the special `nullid` rev
2716 # fast path the special `nullid` rev
2905 if node == self.nullid:
2717 if node == self.nullid:
2906 return b""
2718 return b""
2907
2719
2908 # ``rawtext`` is the text as stored inside the revlog. Might be the
2720 # ``rawtext`` is the text as stored inside the revlog. Might be the
2909 # revision or might need to be processed to retrieve the revision.
2721 # revision or might need to be processed to retrieve the revision.
2910 rev, rawtext, validated = self._rawtext(node, rev)
2722 rev, rawtext, validated = self._rawtext(node, rev)
2911
2723
2912 if raw and validated:
2724 if raw and validated:
2913 # if we don't want to process the raw text and that raw
2725 # if we don't want to process the raw text and that raw
2914 # text is cached, we can exit early.
2726 # text is cached, we can exit early.
2915 return rawtext
2727 return rawtext
2916 if rev is None:
2728 if rev is None:
2917 rev = self.rev(node)
2729 rev = self.rev(node)
2918 # the revlog's flag for this revision
2730 # the revlog's flag for this revision
2919 # (usually alter its state or content)
2731 # (usually alter its state or content)
2920 flags = self.flags(rev)
2732 flags = self.flags(rev)
2921
2733
2922 if validated and flags == REVIDX_DEFAULT_FLAGS:
2734 if validated and flags == REVIDX_DEFAULT_FLAGS:
2923 # no extra flags set, no flag processor runs, text = rawtext
2735 # no extra flags set, no flag processor runs, text = rawtext
2924 return rawtext
2736 return rawtext
2925
2737
2926 if raw:
2738 if raw:
2927 validatehash = flagutil.processflagsraw(self, rawtext, flags)
2739 validatehash = flagutil.processflagsraw(self, rawtext, flags)
2928 text = rawtext
2740 text = rawtext
2929 else:
2741 else:
2930 r = flagutil.processflagsread(self, rawtext, flags)
2742 r = flagutil.processflagsread(self, rawtext, flags)
2931 text, validatehash = r
2743 text, validatehash = r
2932 if validatehash:
2744 if validatehash:
2933 self.checkhash(text, node, rev=rev)
2745 self.checkhash(text, node, rev=rev)
2934 if not validated:
2746 if not validated:
2935 self._inner._revisioncache = (node, rev, rawtext)
2747 self._inner._revisioncache = (node, rev, rawtext)
2936
2748
2937 return text
2749 return text
2938
2750
2939 def _sidedata(self, rev):
2751 def _sidedata(self, rev):
2940 """Return the sidedata for a given revision number."""
2752 """Return the sidedata for a given revision number."""
2941 sidedata_end = None
2753 sidedata_end = None
2942 if self._docket is not None:
2754 if self._docket is not None:
2943 sidedata_end = self._docket.sidedata_end
2755 sidedata_end = self._docket.sidedata_end
2944 return self._inner.sidedata(rev, sidedata_end)
2756 return self._inner.sidedata(rev, sidedata_end)
2945
2757
2946 def rawdata(self, nodeorrev):
2758 def rawdata(self, nodeorrev):
2947 """return an uncompressed raw data of a given node or revision number."""
2759 """return an uncompressed raw data of a given node or revision number."""
2948 return self._revisiondata(nodeorrev, raw=True)
2760 return self._revisiondata(nodeorrev, raw=True)
2949
2761
2950 def hash(self, text, p1, p2):
2762 def hash(self, text, p1, p2):
2951 """Compute a node hash.
2763 """Compute a node hash.
2952
2764
2953 Available as a function so that subclasses can replace the hash
2765 Available as a function so that subclasses can replace the hash
2954 as needed.
2766 as needed.
2955 """
2767 """
2956 return storageutil.hashrevisionsha1(text, p1, p2)
2768 return storageutil.hashrevisionsha1(text, p1, p2)
2957
2769
2958 def checkhash(self, text, node, p1=None, p2=None, rev=None):
2770 def checkhash(self, text, node, p1=None, p2=None, rev=None):
2959 """Check node hash integrity.
2771 """Check node hash integrity.
2960
2772
2961 Available as a function so that subclasses can extend hash mismatch
2773 Available as a function so that subclasses can extend hash mismatch
2962 behaviors as needed.
2774 behaviors as needed.
2963 """
2775 """
2964 try:
2776 try:
2965 if p1 is None and p2 is None:
2777 if p1 is None and p2 is None:
2966 p1, p2 = self.parents(node)
2778 p1, p2 = self.parents(node)
2967 if node != self.hash(text, p1, p2):
2779 if node != self.hash(text, p1, p2):
2968 # Clear the revision cache on hash failure. The revision cache
2780 # Clear the revision cache on hash failure. The revision cache
2969 # only stores the raw revision and clearing the cache does have
2781 # only stores the raw revision and clearing the cache does have
2970 # the side-effect that we won't have a cache hit when the raw
2782 # the side-effect that we won't have a cache hit when the raw
2971 # revision data is accessed. But this case should be rare and
2783 # revision data is accessed. But this case should be rare and
2972 # it is extra work to teach the cache about the hash
2784 # it is extra work to teach the cache about the hash
2973 # verification state.
2785 # verification state.
2974 if (
2786 if (
2975 self._inner._revisioncache
2787 self._inner._revisioncache
2976 and self._inner._revisioncache[0] == node
2788 and self._inner._revisioncache[0] == node
2977 ):
2789 ):
2978 self._inner._revisioncache = None
2790 self._inner._revisioncache = None
2979
2791
2980 revornode = rev
2792 revornode = rev
2981 if revornode is None:
2793 if revornode is None:
2982 revornode = templatefilters.short(hex(node))
2794 revornode = templatefilters.short(hex(node))
2983 raise error.RevlogError(
2795 raise error.RevlogError(
2984 _(b"integrity check failed on %s:%s")
2796 _(b"integrity check failed on %s:%s")
2985 % (self.display_id, pycompat.bytestr(revornode))
2797 % (self.display_id, pycompat.bytestr(revornode))
2986 )
2798 )
2987 except error.RevlogError:
2799 except error.RevlogError:
2988 if self.feature_config.censorable and storageutil.iscensoredtext(
2800 if self.feature_config.censorable and storageutil.iscensoredtext(
2989 text
2801 text
2990 ):
2802 ):
2991 raise error.CensoredNodeError(self.display_id, node, text)
2803 raise error.CensoredNodeError(self.display_id, node, text)
2992 raise
2804 raise
2993
2805
2994 @property
2806 @property
2995 def _split_index_file(self):
2807 def _split_index_file(self):
2996 """the path where to expect the index of an ongoing splitting operation
2808 """the path where to expect the index of an ongoing splitting operation
2997
2809
2998 The file will only exist if a splitting operation is in progress, but
2810 The file will only exist if a splitting operation is in progress, but
2999 it is always expected at the same location."""
2811 it is always expected at the same location."""
3000 parts = self.radix.split(b'/')
2812 parts = self.radix.split(b'/')
3001 if len(parts) > 1:
2813 if len(parts) > 1:
3002 # adds a '-s' prefix to the ``data/` or `meta/` base
2814 # adds a '-s' prefix to the ``data/` or `meta/` base
3003 head = parts[0] + b'-s'
2815 head = parts[0] + b'-s'
3004 mids = parts[1:-1]
2816 mids = parts[1:-1]
3005 tail = parts[-1] + b'.i'
2817 tail = parts[-1] + b'.i'
3006 pieces = [head] + mids + [tail]
2818 pieces = [head] + mids + [tail]
3007 return b'/'.join(pieces)
2819 return b'/'.join(pieces)
3008 else:
2820 else:
3009 # the revlog is stored at the root of the store (changelog or
2821 # the revlog is stored at the root of the store (changelog or
3010 # manifest), no risk of collision.
2822 # manifest), no risk of collision.
3011 return self.radix + b'.i.s'
2823 return self.radix + b'.i.s'
3012
2824
3013 def _enforceinlinesize(self, tr, side_write=True):
2825 def _enforceinlinesize(self, tr, side_write=True):
3014 """Check if the revlog is too big for inline and convert if so.
2826 """Check if the revlog is too big for inline and convert if so.
3015
2827
3016 This should be called after revisions are added to the revlog. If the
2828 This should be called after revisions are added to the revlog. If the
3017 revlog has grown too large to be an inline revlog, it will convert it
2829 revlog has grown too large to be an inline revlog, it will convert it
3018 to use multiple index and data files.
2830 to use multiple index and data files.
3019 """
2831 """
3020 tiprev = len(self) - 1
2832 tiprev = len(self) - 1
3021 total_size = self.start(tiprev) + self.length(tiprev)
2833 total_size = self.start(tiprev) + self.length(tiprev)
3022 if not self._inline or total_size < _maxinline:
2834 if not self._inline or total_size < _maxinline:
3023 return
2835 return
3024
2836
3025 if self._docket is not None:
2837 if self._docket is not None:
3026 msg = b"inline revlog should not have a docket"
2838 msg = b"inline revlog should not have a docket"
3027 raise error.ProgrammingError(msg)
2839 raise error.ProgrammingError(msg)
3028
2840
2841 # In the common case, we enforce inline size because the revlog has
2842 # been appened too. And in such case, it must have an initial offset
2843 # recorded in the transaction.
3029 troffset = tr.findoffset(self._inner.canonical_index_file)
2844 troffset = tr.findoffset(self._inner.canonical_index_file)
3030 if troffset is None:
2845 pre_touched = troffset is not None
2846 if not pre_touched and self.target[0] != KIND_CHANGELOG:
3031 raise error.RevlogError(
2847 raise error.RevlogError(
3032 _(b"%s not found in the transaction") % self._indexfile
2848 _(b"%s not found in the transaction") % self._indexfile
3033 )
2849 )
3034 if troffset:
2850
3035 tr.addbackup(self._inner.canonical_index_file, for_offset=True)
2851 tr.addbackup(self._inner.canonical_index_file, for_offset=pre_touched)
3036 tr.add(self._datafile, 0)
2852 tr.add(self._datafile, 0)
3037
2853
3038 new_index_file_path = None
2854 new_index_file_path = None
3039 if side_write:
2855 if side_write:
3040 old_index_file_path = self._indexfile
2856 old_index_file_path = self._indexfile
3041 new_index_file_path = self._split_index_file
2857 new_index_file_path = self._split_index_file
3042 opener = self.opener
2858 opener = self.opener
3043 weak_self = weakref.ref(self)
2859 weak_self = weakref.ref(self)
3044
2860
3045 # the "split" index replace the real index when the transaction is
2861 # the "split" index replace the real index when the transaction is
3046 # finalized
2862 # finalized
3047 def finalize_callback(tr):
2863 def finalize_callback(tr):
3048 opener.rename(
2864 opener.rename(
3049 new_index_file_path,
2865 new_index_file_path,
3050 old_index_file_path,
2866 old_index_file_path,
3051 checkambig=True,
2867 checkambig=True,
3052 )
2868 )
3053 maybe_self = weak_self()
2869 maybe_self = weak_self()
3054 if maybe_self is not None:
2870 if maybe_self is not None:
3055 maybe_self._indexfile = old_index_file_path
2871 maybe_self._indexfile = old_index_file_path
3056 maybe_self._inner.index_file = maybe_self._indexfile
2872 maybe_self._inner.index_file = maybe_self._indexfile
3057
2873
3058 def abort_callback(tr):
2874 def abort_callback(tr):
3059 maybe_self = weak_self()
2875 maybe_self = weak_self()
3060 if maybe_self is not None:
2876 if maybe_self is not None:
3061 maybe_self._indexfile = old_index_file_path
2877 maybe_self._indexfile = old_index_file_path
3062 maybe_self._inner.inline = True
2878 maybe_self._inner.inline = True
3063 maybe_self._inner.index_file = old_index_file_path
2879 maybe_self._inner.index_file = old_index_file_path
3064
2880
3065 tr.registertmp(new_index_file_path)
2881 tr.registertmp(new_index_file_path)
3066 if self.target[1] is not None:
2882 if self.target[1] is not None:
3067 callback_id = b'000-revlog-split-%d-%s' % self.target
2883 callback_id = b'000-revlog-split-%d-%s' % self.target
3068 else:
2884 else:
3069 callback_id = b'000-revlog-split-%d' % self.target[0]
2885 callback_id = b'000-revlog-split-%d' % self.target[0]
3070 tr.addfinalize(callback_id, finalize_callback)
2886 tr.addfinalize(callback_id, finalize_callback)
3071 tr.addabort(callback_id, abort_callback)
2887 tr.addabort(callback_id, abort_callback)
3072
2888
3073 self._format_flags &= ~FLAG_INLINE_DATA
2889 self._format_flags &= ~FLAG_INLINE_DATA
3074 self._inner.split_inline(
2890 self._inner.split_inline(
3075 tr,
2891 tr,
3076 self._format_flags | self._format_version,
2892 self._format_flags | self._format_version,
3077 new_index_file_path=new_index_file_path,
2893 new_index_file_path=new_index_file_path,
3078 )
2894 )
3079
2895
3080 self._inline = False
2896 self._inline = False
3081 if new_index_file_path is not None:
2897 if new_index_file_path is not None:
3082 self._indexfile = new_index_file_path
2898 self._indexfile = new_index_file_path
3083
2899
3084 nodemaputil.setup_persistent_nodemap(tr, self)
2900 nodemaputil.setup_persistent_nodemap(tr, self)
3085
2901
3086 def _nodeduplicatecallback(self, transaction, node):
2902 def _nodeduplicatecallback(self, transaction, node):
3087 """called when trying to add a node already stored."""
2903 """called when trying to add a node already stored."""
3088
2904
3089 @contextlib.contextmanager
2905 @contextlib.contextmanager
3090 def reading(self):
2906 def reading(self):
3091 with self._inner.reading():
2907 with self._inner.reading():
3092 yield
2908 yield
3093
2909
3094 @contextlib.contextmanager
2910 @contextlib.contextmanager
3095 def _writing(self, transaction):
2911 def _writing(self, transaction):
3096 if self._trypending:
2912 if self._trypending:
3097 msg = b'try to write in a `trypending` revlog: %s'
2913 msg = b'try to write in a `trypending` revlog: %s'
3098 msg %= self.display_id
2914 msg %= self.display_id
3099 raise error.ProgrammingError(msg)
2915 raise error.ProgrammingError(msg)
3100 if self._inner.is_writing:
2916 if self._inner.is_writing:
3101 yield
2917 yield
3102 else:
2918 else:
3103 data_end = None
2919 data_end = None
3104 sidedata_end = None
2920 sidedata_end = None
3105 if self._docket is not None:
2921 if self._docket is not None:
3106 data_end = self._docket.data_end
2922 data_end = self._docket.data_end
3107 sidedata_end = self._docket.sidedata_end
2923 sidedata_end = self._docket.sidedata_end
3108 with self._inner.writing(
2924 with self._inner.writing(
3109 transaction,
2925 transaction,
3110 data_end=data_end,
2926 data_end=data_end,
3111 sidedata_end=sidedata_end,
2927 sidedata_end=sidedata_end,
3112 ):
2928 ):
3113 yield
2929 yield
3114 if self._docket is not None:
2930 if self._docket is not None:
3115 self._write_docket(transaction)
2931 self._write_docket(transaction)
3116
2932
3117 @property
2933 @property
3118 def is_delaying(self):
2934 def is_delaying(self):
3119 return self._inner.is_delaying
2935 return self._inner.is_delaying
3120
2936
3121 def _write_docket(self, transaction):
2937 def _write_docket(self, transaction):
3122 """write the current docket on disk
2938 """write the current docket on disk
3123
2939
3124 Exist as a method to help changelog to implement transaction logic
2940 Exist as a method to help changelog to implement transaction logic
3125
2941
3126 We could also imagine using the same transaction logic for all revlog
2942 We could also imagine using the same transaction logic for all revlog
3127 since docket are cheap."""
2943 since docket are cheap."""
3128 self._docket.write(transaction)
2944 self._docket.write(transaction)
3129
2945
3130 def addrevision(
2946 def addrevision(
3131 self,
2947 self,
3132 text,
2948 text,
3133 transaction,
2949 transaction,
3134 link,
2950 link,
3135 p1,
2951 p1,
3136 p2,
2952 p2,
3137 cachedelta=None,
2953 cachedelta=None,
3138 node=None,
2954 node=None,
3139 flags=REVIDX_DEFAULT_FLAGS,
2955 flags=REVIDX_DEFAULT_FLAGS,
3140 deltacomputer=None,
2956 deltacomputer=None,
3141 sidedata=None,
2957 sidedata=None,
3142 ):
2958 ):
3143 """add a revision to the log
2959 """add a revision to the log
3144
2960
3145 text - the revision data to add
2961 text - the revision data to add
3146 transaction - the transaction object used for rollback
2962 transaction - the transaction object used for rollback
3147 link - the linkrev data to add
2963 link - the linkrev data to add
3148 p1, p2 - the parent nodeids of the revision
2964 p1, p2 - the parent nodeids of the revision
3149 cachedelta - an optional precomputed delta
2965 cachedelta - an optional precomputed delta
3150 node - nodeid of revision; typically node is not specified, and it is
2966 node - nodeid of revision; typically node is not specified, and it is
3151 computed by default as hash(text, p1, p2), however subclasses might
2967 computed by default as hash(text, p1, p2), however subclasses might
3152 use different hashing method (and override checkhash() in such case)
2968 use different hashing method (and override checkhash() in such case)
3153 flags - the known flags to set on the revision
2969 flags - the known flags to set on the revision
3154 deltacomputer - an optional deltacomputer instance shared between
2970 deltacomputer - an optional deltacomputer instance shared between
3155 multiple calls
2971 multiple calls
3156 """
2972 """
3157 if link == nullrev:
2973 if link == nullrev:
3158 raise error.RevlogError(
2974 raise error.RevlogError(
3159 _(b"attempted to add linkrev -1 to %s") % self.display_id
2975 _(b"attempted to add linkrev -1 to %s") % self.display_id
3160 )
2976 )
3161
2977
3162 if sidedata is None:
2978 if sidedata is None:
3163 sidedata = {}
2979 sidedata = {}
3164 elif sidedata and not self.feature_config.has_side_data:
2980 elif sidedata and not self.feature_config.has_side_data:
3165 raise error.ProgrammingError(
2981 raise error.ProgrammingError(
3166 _(b"trying to add sidedata to a revlog who don't support them")
2982 _(b"trying to add sidedata to a revlog who don't support them")
3167 )
2983 )
3168
2984
3169 if flags:
2985 if flags:
3170 node = node or self.hash(text, p1, p2)
2986 node = node or self.hash(text, p1, p2)
3171
2987
3172 rawtext, validatehash = flagutil.processflagswrite(self, text, flags)
2988 rawtext, validatehash = flagutil.processflagswrite(self, text, flags)
3173
2989
3174 # If the flag processor modifies the revision data, ignore any provided
2990 # If the flag processor modifies the revision data, ignore any provided
3175 # cachedelta.
2991 # cachedelta.
3176 if rawtext != text:
2992 if rawtext != text:
3177 cachedelta = None
2993 cachedelta = None
3178
2994
3179 if len(rawtext) > _maxentrysize:
2995 if len(rawtext) > _maxentrysize:
3180 raise error.RevlogError(
2996 raise error.RevlogError(
3181 _(
2997 _(
3182 b"%s: size of %d bytes exceeds maximum revlog storage of 2GiB"
2998 b"%s: size of %d bytes exceeds maximum revlog storage of 2GiB"
3183 )
2999 )
3184 % (self.display_id, len(rawtext))
3000 % (self.display_id, len(rawtext))
3185 )
3001 )
3186
3002
3187 node = node or self.hash(rawtext, p1, p2)
3003 node = node or self.hash(rawtext, p1, p2)
3188 rev = self.index.get_rev(node)
3004 rev = self.index.get_rev(node)
3189 if rev is not None:
3005 if rev is not None:
3190 return rev
3006 return rev
3191
3007
3192 if validatehash:
3008 if validatehash:
3193 self.checkhash(rawtext, node, p1=p1, p2=p2)
3009 self.checkhash(rawtext, node, p1=p1, p2=p2)
3194
3010
3195 return self.addrawrevision(
3011 return self.addrawrevision(
3196 rawtext,
3012 rawtext,
3197 transaction,
3013 transaction,
3198 link,
3014 link,
3199 p1,
3015 p1,
3200 p2,
3016 p2,
3201 node,
3017 node,
3202 flags,
3018 flags,
3203 cachedelta=cachedelta,
3019 cachedelta=cachedelta,
3204 deltacomputer=deltacomputer,
3020 deltacomputer=deltacomputer,
3205 sidedata=sidedata,
3021 sidedata=sidedata,
3206 )
3022 )
3207
3023
3208 def addrawrevision(
3024 def addrawrevision(
3209 self,
3025 self,
3210 rawtext,
3026 rawtext,
3211 transaction,
3027 transaction,
3212 link,
3028 link,
3213 p1,
3029 p1,
3214 p2,
3030 p2,
3215 node,
3031 node,
3216 flags,
3032 flags,
3217 cachedelta=None,
3033 cachedelta=None,
3218 deltacomputer=None,
3034 deltacomputer=None,
3219 sidedata=None,
3035 sidedata=None,
3220 ):
3036 ):
3221 """add a raw revision with known flags, node and parents
3037 """add a raw revision with known flags, node and parents
3222 useful when reusing a revision not stored in this revlog (ex: received
3038 useful when reusing a revision not stored in this revlog (ex: received
3223 over wire, or read from an external bundle).
3039 over wire, or read from an external bundle).
3224 """
3040 """
3225 with self._writing(transaction):
3041 with self._writing(transaction):
3226 return self._addrevision(
3042 return self._addrevision(
3227 node,
3043 node,
3228 rawtext,
3044 rawtext,
3229 transaction,
3045 transaction,
3230 link,
3046 link,
3231 p1,
3047 p1,
3232 p2,
3048 p2,
3233 flags,
3049 flags,
3234 cachedelta,
3050 cachedelta,
3235 deltacomputer=deltacomputer,
3051 deltacomputer=deltacomputer,
3236 sidedata=sidedata,
3052 sidedata=sidedata,
3237 )
3053 )
3238
3054
3239 def compress(self, data):
3055 def compress(self, data):
3240 return self._inner.compress(data)
3056 return self._inner.compress(data)
3241
3057
3242 def decompress(self, data):
3058 def decompress(self, data):
3243 return self._inner.decompress(data)
3059 return self._inner.decompress(data)
3244
3060
3245 def _addrevision(
3061 def _addrevision(
3246 self,
3062 self,
3247 node,
3063 node,
3248 rawtext,
3064 rawtext,
3249 transaction,
3065 transaction,
3250 link,
3066 link,
3251 p1,
3067 p1,
3252 p2,
3068 p2,
3253 flags,
3069 flags,
3254 cachedelta,
3070 cachedelta,
3255 alwayscache=False,
3071 alwayscache=False,
3256 deltacomputer=None,
3072 deltacomputer=None,
3257 sidedata=None,
3073 sidedata=None,
3258 ):
3074 ):
3259 """internal function to add revisions to the log
3075 """internal function to add revisions to the log
3260
3076
3261 see addrevision for argument descriptions.
3077 see addrevision for argument descriptions.
3262
3078
3263 note: "addrevision" takes non-raw text, "_addrevision" takes raw text.
3079 note: "addrevision" takes non-raw text, "_addrevision" takes raw text.
3264
3080
3265 if "deltacomputer" is not provided or None, a defaultdeltacomputer will
3081 if "deltacomputer" is not provided or None, a defaultdeltacomputer will
3266 be used.
3082 be used.
3267
3083
3268 invariants:
3084 invariants:
3269 - rawtext is optional (can be None); if not set, cachedelta must be set.
3085 - rawtext is optional (can be None); if not set, cachedelta must be set.
3270 if both are set, they must correspond to each other.
3086 if both are set, they must correspond to each other.
3271 """
3087 """
3272 if node == self.nullid:
3088 if node == self.nullid:
3273 raise error.RevlogError(
3089 raise error.RevlogError(
3274 _(b"%s: attempt to add null revision") % self.display_id
3090 _(b"%s: attempt to add null revision") % self.display_id
3275 )
3091 )
3276 if (
3092 if (
3277 node == self.nodeconstants.wdirid
3093 node == self.nodeconstants.wdirid
3278 or node in self.nodeconstants.wdirfilenodeids
3094 or node in self.nodeconstants.wdirfilenodeids
3279 ):
3095 ):
3280 raise error.RevlogError(
3096 raise error.RevlogError(
3281 _(b"%s: attempt to add wdir revision") % self.display_id
3097 _(b"%s: attempt to add wdir revision") % self.display_id
3282 )
3098 )
3283 if self._inner._writinghandles is None:
3099 if self._inner._writinghandles is None:
3284 msg = b'adding revision outside `revlog._writing` context'
3100 msg = b'adding revision outside `revlog._writing` context'
3285 raise error.ProgrammingError(msg)
3101 raise error.ProgrammingError(msg)
3286
3102
3287 btext = [rawtext]
3103 btext = [rawtext]
3288
3104
3289 curr = len(self)
3105 curr = len(self)
3290 prev = curr - 1
3106 prev = curr - 1
3291
3107
3292 offset = self._get_data_offset(prev)
3108 offset = self._get_data_offset(prev)
3293
3109
3294 if self._concurrencychecker:
3110 if self._concurrencychecker:
3295 ifh, dfh, sdfh = self._inner._writinghandles
3111 ifh, dfh, sdfh = self._inner._writinghandles
3296 # XXX no checking for the sidedata file
3112 # XXX no checking for the sidedata file
3297 if self._inline:
3113 if self._inline:
3298 # offset is "as if" it were in the .d file, so we need to add on
3114 # offset is "as if" it were in the .d file, so we need to add on
3299 # the size of the entry metadata.
3115 # the size of the entry metadata.
3300 self._concurrencychecker(
3116 self._concurrencychecker(
3301 ifh, self._indexfile, offset + curr * self.index.entry_size
3117 ifh, self._indexfile, offset + curr * self.index.entry_size
3302 )
3118 )
3303 else:
3119 else:
3304 # Entries in the .i are a consistent size.
3120 # Entries in the .i are a consistent size.
3305 self._concurrencychecker(
3121 self._concurrencychecker(
3306 ifh, self._indexfile, curr * self.index.entry_size
3122 ifh, self._indexfile, curr * self.index.entry_size
3307 )
3123 )
3308 self._concurrencychecker(dfh, self._datafile, offset)
3124 self._concurrencychecker(dfh, self._datafile, offset)
3309
3125
3310 p1r, p2r = self.rev(p1), self.rev(p2)
3126 p1r, p2r = self.rev(p1), self.rev(p2)
3311
3127
3312 # full versions are inserted when the needed deltas
3128 # full versions are inserted when the needed deltas
3313 # become comparable to the uncompressed text
3129 # become comparable to the uncompressed text
3314 if rawtext is None:
3130 if rawtext is None:
3315 # need rawtext size, before changed by flag processors, which is
3131 # need rawtext size, before changed by flag processors, which is
3316 # the non-raw size. use revlog explicitly to avoid filelog's extra
3132 # the non-raw size. use revlog explicitly to avoid filelog's extra
3317 # logic that might remove metadata size.
3133 # logic that might remove metadata size.
3318 textlen = mdiff.patchedsize(
3134 textlen = mdiff.patchedsize(
3319 revlog.size(self, cachedelta[0]), cachedelta[1]
3135 revlog.size(self, cachedelta[0]), cachedelta[1]
3320 )
3136 )
3321 else:
3137 else:
3322 textlen = len(rawtext)
3138 textlen = len(rawtext)
3323
3139
3324 if deltacomputer is None:
3140 if deltacomputer is None:
3325 write_debug = None
3141 write_debug = None
3326 if self.delta_config.debug_delta:
3142 if self.delta_config.debug_delta:
3327 write_debug = transaction._report
3143 write_debug = transaction._report
3328 deltacomputer = deltautil.deltacomputer(
3144 deltacomputer = deltautil.deltacomputer(
3329 self, write_debug=write_debug
3145 self, write_debug=write_debug
3330 )
3146 )
3331
3147
3332 if cachedelta is not None and len(cachedelta) == 2:
3148 if cachedelta is not None and len(cachedelta) == 2:
3333 # If the cached delta has no information about how it should be
3149 # If the cached delta has no information about how it should be
3334 # reused, add the default reuse instruction according to the
3150 # reused, add the default reuse instruction according to the
3335 # revlog's configuration.
3151 # revlog's configuration.
3336 if (
3152 if (
3337 self.delta_config.general_delta
3153 self.delta_config.general_delta
3338 and self.delta_config.lazy_delta_base
3154 and self.delta_config.lazy_delta_base
3339 ):
3155 ):
3340 delta_base_reuse = DELTA_BASE_REUSE_TRY
3156 delta_base_reuse = DELTA_BASE_REUSE_TRY
3341 else:
3157 else:
3342 delta_base_reuse = DELTA_BASE_REUSE_NO
3158 delta_base_reuse = DELTA_BASE_REUSE_NO
3343 cachedelta = (cachedelta[0], cachedelta[1], delta_base_reuse)
3159 cachedelta = (cachedelta[0], cachedelta[1], delta_base_reuse)
3344
3160
3345 revinfo = revlogutils.revisioninfo(
3161 revinfo = revlogutils.revisioninfo(
3346 node,
3162 node,
3347 p1,
3163 p1,
3348 p2,
3164 p2,
3349 btext,
3165 btext,
3350 textlen,
3166 textlen,
3351 cachedelta,
3167 cachedelta,
3352 flags,
3168 flags,
3353 )
3169 )
3354
3170
3355 deltainfo = deltacomputer.finddeltainfo(revinfo)
3171 deltainfo = deltacomputer.finddeltainfo(revinfo)
3356
3172
3357 compression_mode = COMP_MODE_INLINE
3173 compression_mode = COMP_MODE_INLINE
3358 if self._docket is not None:
3174 if self._docket is not None:
3359 default_comp = self._docket.default_compression_header
3175 default_comp = self._docket.default_compression_header
3360 r = deltautil.delta_compression(default_comp, deltainfo)
3176 r = deltautil.delta_compression(default_comp, deltainfo)
3361 compression_mode, deltainfo = r
3177 compression_mode, deltainfo = r
3362
3178
3363 sidedata_compression_mode = COMP_MODE_INLINE
3179 sidedata_compression_mode = COMP_MODE_INLINE
3364 if sidedata and self.feature_config.has_side_data:
3180 if sidedata and self.feature_config.has_side_data:
3365 sidedata_compression_mode = COMP_MODE_PLAIN
3181 sidedata_compression_mode = COMP_MODE_PLAIN
3366 serialized_sidedata = sidedatautil.serialize_sidedata(sidedata)
3182 serialized_sidedata = sidedatautil.serialize_sidedata(sidedata)
3367 sidedata_offset = self._docket.sidedata_end
3183 sidedata_offset = self._docket.sidedata_end
3368 h, comp_sidedata = self._inner.compress(serialized_sidedata)
3184 h, comp_sidedata = self._inner.compress(serialized_sidedata)
3369 if (
3185 if (
3370 h != b'u'
3186 h != b'u'
3371 and comp_sidedata[0:1] != b'\0'
3187 and comp_sidedata[0:1] != b'\0'
3372 and len(comp_sidedata) < len(serialized_sidedata)
3188 and len(comp_sidedata) < len(serialized_sidedata)
3373 ):
3189 ):
3374 assert not h
3190 assert not h
3375 if (
3191 if (
3376 comp_sidedata[0:1]
3192 comp_sidedata[0:1]
3377 == self._docket.default_compression_header
3193 == self._docket.default_compression_header
3378 ):
3194 ):
3379 sidedata_compression_mode = COMP_MODE_DEFAULT
3195 sidedata_compression_mode = COMP_MODE_DEFAULT
3380 serialized_sidedata = comp_sidedata
3196 serialized_sidedata = comp_sidedata
3381 else:
3197 else:
3382 sidedata_compression_mode = COMP_MODE_INLINE
3198 sidedata_compression_mode = COMP_MODE_INLINE
3383 serialized_sidedata = comp_sidedata
3199 serialized_sidedata = comp_sidedata
3384 else:
3200 else:
3385 serialized_sidedata = b""
3201 serialized_sidedata = b""
3386 # Don't store the offset if the sidedata is empty, that way
3202 # Don't store the offset if the sidedata is empty, that way
3387 # we can easily detect empty sidedata and they will be no different
3203 # we can easily detect empty sidedata and they will be no different
3388 # than ones we manually add.
3204 # than ones we manually add.
3389 sidedata_offset = 0
3205 sidedata_offset = 0
3390
3206
3391 rank = RANK_UNKNOWN
3207 rank = RANK_UNKNOWN
3392 if self.feature_config.compute_rank:
3208 if self.feature_config.compute_rank:
3393 if (p1r, p2r) == (nullrev, nullrev):
3209 if (p1r, p2r) == (nullrev, nullrev):
3394 rank = 1
3210 rank = 1
3395 elif p1r != nullrev and p2r == nullrev:
3211 elif p1r != nullrev and p2r == nullrev:
3396 rank = 1 + self.fast_rank(p1r)
3212 rank = 1 + self.fast_rank(p1r)
3397 elif p1r == nullrev and p2r != nullrev:
3213 elif p1r == nullrev and p2r != nullrev:
3398 rank = 1 + self.fast_rank(p2r)
3214 rank = 1 + self.fast_rank(p2r)
3399 else: # merge node
3215 else: # merge node
3400 if rustdagop is not None and self.index.rust_ext_compat:
3216 if rustdagop is not None and self.index.rust_ext_compat:
3401 rank = rustdagop.rank(self.index, p1r, p2r)
3217 rank = rustdagop.rank(self.index, p1r, p2r)
3402 else:
3218 else:
3403 pmin, pmax = sorted((p1r, p2r))
3219 pmin, pmax = sorted((p1r, p2r))
3404 rank = 1 + self.fast_rank(pmax)
3220 rank = 1 + self.fast_rank(pmax)
3405 rank += sum(1 for _ in self.findmissingrevs([pmax], [pmin]))
3221 rank += sum(1 for _ in self.findmissingrevs([pmax], [pmin]))
3406
3222
3407 e = revlogutils.entry(
3223 e = revlogutils.entry(
3408 flags=flags,
3224 flags=flags,
3409 data_offset=offset,
3225 data_offset=offset,
3410 data_compressed_length=deltainfo.deltalen,
3226 data_compressed_length=deltainfo.deltalen,
3411 data_uncompressed_length=textlen,
3227 data_uncompressed_length=textlen,
3412 data_compression_mode=compression_mode,
3228 data_compression_mode=compression_mode,
3413 data_delta_base=deltainfo.base,
3229 data_delta_base=deltainfo.base,
3414 link_rev=link,
3230 link_rev=link,
3415 parent_rev_1=p1r,
3231 parent_rev_1=p1r,
3416 parent_rev_2=p2r,
3232 parent_rev_2=p2r,
3417 node_id=node,
3233 node_id=node,
3418 sidedata_offset=sidedata_offset,
3234 sidedata_offset=sidedata_offset,
3419 sidedata_compressed_length=len(serialized_sidedata),
3235 sidedata_compressed_length=len(serialized_sidedata),
3420 sidedata_compression_mode=sidedata_compression_mode,
3236 sidedata_compression_mode=sidedata_compression_mode,
3421 rank=rank,
3237 rank=rank,
3422 )
3238 )
3423
3239
3424 self.index.append(e)
3240 self.index.append(e)
3425 entry = self.index.entry_binary(curr)
3241 entry = self.index.entry_binary(curr)
3426 if curr == 0 and self._docket is None:
3242 if curr == 0 and self._docket is None:
3427 header = self._format_flags | self._format_version
3243 header = self._format_flags | self._format_version
3428 header = self.index.pack_header(header)
3244 header = self.index.pack_header(header)
3429 entry = header + entry
3245 entry = header + entry
3430 self._writeentry(
3246 self._writeentry(
3431 transaction,
3247 transaction,
3432 entry,
3248 entry,
3433 deltainfo.data,
3249 deltainfo.data,
3434 link,
3250 link,
3435 offset,
3251 offset,
3436 serialized_sidedata,
3252 serialized_sidedata,
3437 sidedata_offset,
3253 sidedata_offset,
3438 )
3254 )
3439
3255
3440 rawtext = btext[0]
3256 rawtext = btext[0]
3441
3257
3442 if alwayscache and rawtext is None:
3258 if alwayscache and rawtext is None:
3443 rawtext = deltacomputer.buildtext(revinfo)
3259 rawtext = deltacomputer.buildtext(revinfo)
3444
3260
3445 if type(rawtext) == bytes: # only accept immutable objects
3261 if type(rawtext) == bytes: # only accept immutable objects
3446 self._inner._revisioncache = (node, curr, rawtext)
3262 self._inner._revisioncache = (node, curr, rawtext)
3447 self._chainbasecache[curr] = deltainfo.chainbase
3263 self._chainbasecache[curr] = deltainfo.chainbase
3448 return curr
3264 return curr
3449
3265
3450 def _get_data_offset(self, prev):
3266 def _get_data_offset(self, prev):
3451 """Returns the current offset in the (in-transaction) data file.
3267 """Returns the current offset in the (in-transaction) data file.
3452 Versions < 2 of the revlog can get this 0(1), revlog v2 needs a docket
3268 Versions < 2 of the revlog can get this 0(1), revlog v2 needs a docket
3453 file to store that information: since sidedata can be rewritten to the
3269 file to store that information: since sidedata can be rewritten to the
3454 end of the data file within a transaction, you can have cases where, for
3270 end of the data file within a transaction, you can have cases where, for
3455 example, rev `n` does not have sidedata while rev `n - 1` does, leading
3271 example, rev `n` does not have sidedata while rev `n - 1` does, leading
3456 to `n - 1`'s sidedata being written after `n`'s data.
3272 to `n - 1`'s sidedata being written after `n`'s data.
3457
3273
3458 TODO cache this in a docket file before getting out of experimental."""
3274 TODO cache this in a docket file before getting out of experimental."""
3459 if self._docket is None:
3275 if self._docket is None:
3460 return self.end(prev)
3276 return self.end(prev)
3461 else:
3277 else:
3462 return self._docket.data_end
3278 return self._docket.data_end
3463
3279
3464 def _writeentry(
3280 def _writeentry(
3465 self,
3281 self,
3466 transaction,
3282 transaction,
3467 entry,
3283 entry,
3468 data,
3284 data,
3469 link,
3285 link,
3470 offset,
3286 offset,
3471 sidedata,
3287 sidedata,
3472 sidedata_offset,
3288 sidedata_offset,
3473 ):
3289 ):
3474 # Files opened in a+ mode have inconsistent behavior on various
3290 # Files opened in a+ mode have inconsistent behavior on various
3475 # platforms. Windows requires that a file positioning call be made
3291 # platforms. Windows requires that a file positioning call be made
3476 # when the file handle transitions between reads and writes. See
3292 # when the file handle transitions between reads and writes. See
3477 # 3686fa2b8eee and the mixedfilemodewrapper in windows.py. On other
3293 # 3686fa2b8eee and the mixedfilemodewrapper in windows.py. On other
3478 # platforms, Python or the platform itself can be buggy. Some versions
3294 # platforms, Python or the platform itself can be buggy. Some versions
3479 # of Solaris have been observed to not append at the end of the file
3295 # of Solaris have been observed to not append at the end of the file
3480 # if the file was seeked to before the end. See issue4943 for more.
3296 # if the file was seeked to before the end. See issue4943 for more.
3481 #
3297 #
3482 # We work around this issue by inserting a seek() before writing.
3298 # We work around this issue by inserting a seek() before writing.
3483 # Note: This is likely not necessary on Python 3. However, because
3299 # Note: This is likely not necessary on Python 3. However, because
3484 # the file handle is reused for reads and may be seeked there, we need
3300 # the file handle is reused for reads and may be seeked there, we need
3485 # to be careful before changing this.
3301 # to be careful before changing this.
3486 index_end = data_end = sidedata_end = None
3302 index_end = data_end = sidedata_end = None
3487 if self._docket is not None:
3303 if self._docket is not None:
3488 index_end = self._docket.index_end
3304 index_end = self._docket.index_end
3489 data_end = self._docket.data_end
3305 data_end = self._docket.data_end
3490 sidedata_end = self._docket.sidedata_end
3306 sidedata_end = self._docket.sidedata_end
3491
3307
3492 files_end = self._inner.write_entry(
3308 files_end = self._inner.write_entry(
3493 transaction,
3309 transaction,
3494 entry,
3310 entry,
3495 data,
3311 data,
3496 link,
3312 link,
3497 offset,
3313 offset,
3498 sidedata,
3314 sidedata,
3499 sidedata_offset,
3315 sidedata_offset,
3500 index_end,
3316 index_end,
3501 data_end,
3317 data_end,
3502 sidedata_end,
3318 sidedata_end,
3503 )
3319 )
3504 self._enforceinlinesize(transaction)
3320 self._enforceinlinesize(transaction)
3505 if self._docket is not None:
3321 if self._docket is not None:
3506 self._docket.index_end = files_end[0]
3322 self._docket.index_end = files_end[0]
3507 self._docket.data_end = files_end[1]
3323 self._docket.data_end = files_end[1]
3508 self._docket.sidedata_end = files_end[2]
3324 self._docket.sidedata_end = files_end[2]
3509
3325
3510 nodemaputil.setup_persistent_nodemap(transaction, self)
3326 nodemaputil.setup_persistent_nodemap(transaction, self)
3511
3327
3512 def addgroup(
3328 def addgroup(
3513 self,
3329 self,
3514 deltas,
3330 deltas,
3515 linkmapper,
3331 linkmapper,
3516 transaction,
3332 transaction,
3517 alwayscache=False,
3333 alwayscache=False,
3518 addrevisioncb=None,
3334 addrevisioncb=None,
3519 duplicaterevisioncb=None,
3335 duplicaterevisioncb=None,
3520 debug_info=None,
3336 debug_info=None,
3521 delta_base_reuse_policy=None,
3337 delta_base_reuse_policy=None,
3522 ):
3338 ):
3523 """
3339 """
3524 add a delta group
3340 add a delta group
3525
3341
3526 given a set of deltas, add them to the revision log. the
3342 given a set of deltas, add them to the revision log. the
3527 first delta is against its parent, which should be in our
3343 first delta is against its parent, which should be in our
3528 log, the rest are against the previous delta.
3344 log, the rest are against the previous delta.
3529
3345
3530 If ``addrevisioncb`` is defined, it will be called with arguments of
3346 If ``addrevisioncb`` is defined, it will be called with arguments of
3531 this revlog and the node that was added.
3347 this revlog and the node that was added.
3532 """
3348 """
3533
3349
3534 if self._adding_group:
3350 if self._adding_group:
3535 raise error.ProgrammingError(b'cannot nest addgroup() calls')
3351 raise error.ProgrammingError(b'cannot nest addgroup() calls')
3536
3352
3537 # read the default delta-base reuse policy from revlog config if the
3353 # read the default delta-base reuse policy from revlog config if the
3538 # group did not specify one.
3354 # group did not specify one.
3539 if delta_base_reuse_policy is None:
3355 if delta_base_reuse_policy is None:
3540 if (
3356 if (
3541 self.delta_config.general_delta
3357 self.delta_config.general_delta
3542 and self.delta_config.lazy_delta_base
3358 and self.delta_config.lazy_delta_base
3543 ):
3359 ):
3544 delta_base_reuse_policy = DELTA_BASE_REUSE_TRY
3360 delta_base_reuse_policy = DELTA_BASE_REUSE_TRY
3545 else:
3361 else:
3546 delta_base_reuse_policy = DELTA_BASE_REUSE_NO
3362 delta_base_reuse_policy = DELTA_BASE_REUSE_NO
3547
3363
3548 self._adding_group = True
3364 self._adding_group = True
3549 empty = True
3365 empty = True
3550 try:
3366 try:
3551 with self._writing(transaction):
3367 with self._writing(transaction):
3552 write_debug = None
3368 write_debug = None
3553 if self.delta_config.debug_delta:
3369 if self.delta_config.debug_delta:
3554 write_debug = transaction._report
3370 write_debug = transaction._report
3555 deltacomputer = deltautil.deltacomputer(
3371 deltacomputer = deltautil.deltacomputer(
3556 self,
3372 self,
3557 write_debug=write_debug,
3373 write_debug=write_debug,
3558 debug_info=debug_info,
3374 debug_info=debug_info,
3559 )
3375 )
3560 # loop through our set of deltas
3376 # loop through our set of deltas
3561 for data in deltas:
3377 for data in deltas:
3562 (
3378 (
3563 node,
3379 node,
3564 p1,
3380 p1,
3565 p2,
3381 p2,
3566 linknode,
3382 linknode,
3567 deltabase,
3383 deltabase,
3568 delta,
3384 delta,
3569 flags,
3385 flags,
3570 sidedata,
3386 sidedata,
3571 ) = data
3387 ) = data
3572 link = linkmapper(linknode)
3388 link = linkmapper(linknode)
3573 flags = flags or REVIDX_DEFAULT_FLAGS
3389 flags = flags or REVIDX_DEFAULT_FLAGS
3574
3390
3575 rev = self.index.get_rev(node)
3391 rev = self.index.get_rev(node)
3576 if rev is not None:
3392 if rev is not None:
3577 # this can happen if two branches make the same change
3393 # this can happen if two branches make the same change
3578 self._nodeduplicatecallback(transaction, rev)
3394 self._nodeduplicatecallback(transaction, rev)
3579 if duplicaterevisioncb:
3395 if duplicaterevisioncb:
3580 duplicaterevisioncb(self, rev)
3396 duplicaterevisioncb(self, rev)
3581 empty = False
3397 empty = False
3582 continue
3398 continue
3583
3399
3584 for p in (p1, p2):
3400 for p in (p1, p2):
3585 if not self.index.has_node(p):
3401 if not self.index.has_node(p):
3586 raise error.LookupError(
3402 raise error.LookupError(
3587 p, self.radix, _(b'unknown parent')
3403 p, self.radix, _(b'unknown parent')
3588 )
3404 )
3589
3405
3590 if not self.index.has_node(deltabase):
3406 if not self.index.has_node(deltabase):
3591 raise error.LookupError(
3407 raise error.LookupError(
3592 deltabase, self.display_id, _(b'unknown delta base')
3408 deltabase, self.display_id, _(b'unknown delta base')
3593 )
3409 )
3594
3410
3595 baserev = self.rev(deltabase)
3411 baserev = self.rev(deltabase)
3596
3412
3597 if baserev != nullrev and self.iscensored(baserev):
3413 if baserev != nullrev and self.iscensored(baserev):
3598 # if base is censored, delta must be full replacement in a
3414 # if base is censored, delta must be full replacement in a
3599 # single patch operation
3415 # single patch operation
3600 hlen = struct.calcsize(b">lll")
3416 hlen = struct.calcsize(b">lll")
3601 oldlen = self.rawsize(baserev)
3417 oldlen = self.rawsize(baserev)
3602 newlen = len(delta) - hlen
3418 newlen = len(delta) - hlen
3603 if delta[:hlen] != mdiff.replacediffheader(
3419 if delta[:hlen] != mdiff.replacediffheader(
3604 oldlen, newlen
3420 oldlen, newlen
3605 ):
3421 ):
3606 raise error.CensoredBaseError(
3422 raise error.CensoredBaseError(
3607 self.display_id, self.node(baserev)
3423 self.display_id, self.node(baserev)
3608 )
3424 )
3609
3425
3610 if not flags and self._peek_iscensored(baserev, delta):
3426 if not flags and self._peek_iscensored(baserev, delta):
3611 flags |= REVIDX_ISCENSORED
3427 flags |= REVIDX_ISCENSORED
3612
3428
3613 # We assume consumers of addrevisioncb will want to retrieve
3429 # We assume consumers of addrevisioncb will want to retrieve
3614 # the added revision, which will require a call to
3430 # the added revision, which will require a call to
3615 # revision(). revision() will fast path if there is a cache
3431 # revision(). revision() will fast path if there is a cache
3616 # hit. So, we tell _addrevision() to always cache in this case.
3432 # hit. So, we tell _addrevision() to always cache in this case.
3617 # We're only using addgroup() in the context of changegroup
3433 # We're only using addgroup() in the context of changegroup
3618 # generation so the revision data can always be handled as raw
3434 # generation so the revision data can always be handled as raw
3619 # by the flagprocessor.
3435 # by the flagprocessor.
3620 rev = self._addrevision(
3436 rev = self._addrevision(
3621 node,
3437 node,
3622 None,
3438 None,
3623 transaction,
3439 transaction,
3624 link,
3440 link,
3625 p1,
3441 p1,
3626 p2,
3442 p2,
3627 flags,
3443 flags,
3628 (baserev, delta, delta_base_reuse_policy),
3444 (baserev, delta, delta_base_reuse_policy),
3629 alwayscache=alwayscache,
3445 alwayscache=alwayscache,
3630 deltacomputer=deltacomputer,
3446 deltacomputer=deltacomputer,
3631 sidedata=sidedata,
3447 sidedata=sidedata,
3632 )
3448 )
3633
3449
3634 if addrevisioncb:
3450 if addrevisioncb:
3635 addrevisioncb(self, rev)
3451 addrevisioncb(self, rev)
3636 empty = False
3452 empty = False
3637 finally:
3453 finally:
3638 self._adding_group = False
3454 self._adding_group = False
3639 return not empty
3455 return not empty
3640
3456
3641 def iscensored(self, rev):
3457 def iscensored(self, rev):
3642 """Check if a file revision is censored."""
3458 """Check if a file revision is censored."""
3643 if not self.feature_config.censorable:
3459 if not self.feature_config.censorable:
3644 return False
3460 return False
3645
3461
3646 return self.flags(rev) & REVIDX_ISCENSORED
3462 return self.flags(rev) & REVIDX_ISCENSORED
3647
3463
3648 def _peek_iscensored(self, baserev, delta):
3464 def _peek_iscensored(self, baserev, delta):
3649 """Quickly check if a delta produces a censored revision."""
3465 """Quickly check if a delta produces a censored revision."""
3650 if not self.feature_config.censorable:
3466 if not self.feature_config.censorable:
3651 return False
3467 return False
3652
3468
3653 return storageutil.deltaiscensored(delta, baserev, self.rawsize)
3469 return storageutil.deltaiscensored(delta, baserev, self.rawsize)
3654
3470
3655 def getstrippoint(self, minlink):
3471 def getstrippoint(self, minlink):
3656 """find the minimum rev that must be stripped to strip the linkrev
3472 """find the minimum rev that must be stripped to strip the linkrev
3657
3473
3658 Returns a tuple containing the minimum rev and a set of all revs that
3474 Returns a tuple containing the minimum rev and a set of all revs that
3659 have linkrevs that will be broken by this strip.
3475 have linkrevs that will be broken by this strip.
3660 """
3476 """
3661 return storageutil.resolvestripinfo(
3477 return storageutil.resolvestripinfo(
3662 minlink,
3478 minlink,
3663 len(self) - 1,
3479 len(self) - 1,
3664 self.headrevs(),
3480 self.headrevs(),
3665 self.linkrev,
3481 self.linkrev,
3666 self.parentrevs,
3482 self.parentrevs,
3667 )
3483 )
3668
3484
3669 def strip(self, minlink, transaction):
3485 def strip(self, minlink, transaction):
3670 """truncate the revlog on the first revision with a linkrev >= minlink
3486 """truncate the revlog on the first revision with a linkrev >= minlink
3671
3487
3672 This function is called when we're stripping revision minlink and
3488 This function is called when we're stripping revision minlink and
3673 its descendants from the repository.
3489 its descendants from the repository.
3674
3490
3675 We have to remove all revisions with linkrev >= minlink, because
3491 We have to remove all revisions with linkrev >= minlink, because
3676 the equivalent changelog revisions will be renumbered after the
3492 the equivalent changelog revisions will be renumbered after the
3677 strip.
3493 strip.
3678
3494
3679 So we truncate the revlog on the first of these revisions, and
3495 So we truncate the revlog on the first of these revisions, and
3680 trust that the caller has saved the revisions that shouldn't be
3496 trust that the caller has saved the revisions that shouldn't be
3681 removed and that it'll re-add them after this truncation.
3497 removed and that it'll re-add them after this truncation.
3682 """
3498 """
3683 if len(self) == 0:
3499 if len(self) == 0:
3684 return
3500 return
3685
3501
3686 rev, _ = self.getstrippoint(minlink)
3502 rev, _ = self.getstrippoint(minlink)
3687 if rev == len(self):
3503 if rev == len(self):
3688 return
3504 return
3689
3505
3690 # first truncate the files on disk
3506 # first truncate the files on disk
3691 data_end = self.start(rev)
3507 data_end = self.start(rev)
3692 if not self._inline:
3508 if not self._inline:
3693 transaction.add(self._datafile, data_end)
3509 transaction.add(self._datafile, data_end)
3694 end = rev * self.index.entry_size
3510 end = rev * self.index.entry_size
3695 else:
3511 else:
3696 end = data_end + (rev * self.index.entry_size)
3512 end = data_end + (rev * self.index.entry_size)
3697
3513
3698 if self._sidedatafile:
3514 if self._sidedatafile:
3699 sidedata_end = self.sidedata_cut_off(rev)
3515 sidedata_end = self.sidedata_cut_off(rev)
3700 transaction.add(self._sidedatafile, sidedata_end)
3516 transaction.add(self._sidedatafile, sidedata_end)
3701
3517
3702 transaction.add(self._indexfile, end)
3518 transaction.add(self._indexfile, end)
3703 if self._docket is not None:
3519 if self._docket is not None:
3704 # XXX we could, leverage the docket while stripping. However it is
3520 # XXX we could, leverage the docket while stripping. However it is
3705 # not powerfull enough at the time of this comment
3521 # not powerfull enough at the time of this comment
3706 self._docket.index_end = end
3522 self._docket.index_end = end
3707 self._docket.data_end = data_end
3523 self._docket.data_end = data_end
3708 self._docket.sidedata_end = sidedata_end
3524 self._docket.sidedata_end = sidedata_end
3709 self._docket.write(transaction, stripping=True)
3525 self._docket.write(transaction, stripping=True)
3710
3526
3711 # then reset internal state in memory to forget those revisions
3527 # then reset internal state in memory to forget those revisions
3712 self._chaininfocache = util.lrucachedict(500)
3528 self._chaininfocache = util.lrucachedict(500)
3713 self._inner.clear_cache()
3529 self._inner.clear_cache()
3714
3530
3715 del self.index[rev:-1]
3531 del self.index[rev:-1]
3716
3532
3717 def checksize(self):
3533 def checksize(self):
3718 """Check size of index and data files
3534 """Check size of index and data files
3719
3535
3720 return a (dd, di) tuple.
3536 return a (dd, di) tuple.
3721 - dd: extra bytes for the "data" file
3537 - dd: extra bytes for the "data" file
3722 - di: extra bytes for the "index" file
3538 - di: extra bytes for the "index" file
3723
3539
3724 A healthy revlog will return (0, 0).
3540 A healthy revlog will return (0, 0).
3725 """
3541 """
3726 expected = 0
3542 expected = 0
3727 if len(self):
3543 if len(self):
3728 expected = max(0, self.end(len(self) - 1))
3544 expected = max(0, self.end(len(self) - 1))
3729
3545
3730 try:
3546 try:
3731 with self._datafp() as f:
3547 with self._datafp() as f:
3732 f.seek(0, io.SEEK_END)
3548 f.seek(0, io.SEEK_END)
3733 actual = f.tell()
3549 actual = f.tell()
3734 dd = actual - expected
3550 dd = actual - expected
3735 except FileNotFoundError:
3551 except FileNotFoundError:
3736 dd = 0
3552 dd = 0
3737
3553
3738 try:
3554 try:
3739 f = self.opener(self._indexfile)
3555 f = self.opener(self._indexfile)
3740 f.seek(0, io.SEEK_END)
3556 f.seek(0, io.SEEK_END)
3741 actual = f.tell()
3557 actual = f.tell()
3742 f.close()
3558 f.close()
3743 s = self.index.entry_size
3559 s = self.index.entry_size
3744 i = max(0, actual // s)
3560 i = max(0, actual // s)
3745 di = actual - (i * s)
3561 di = actual - (i * s)
3746 if self._inline:
3562 if self._inline:
3747 databytes = 0
3563 databytes = 0
3748 for r in self:
3564 for r in self:
3749 databytes += max(0, self.length(r))
3565 databytes += max(0, self.length(r))
3750 dd = 0
3566 dd = 0
3751 di = actual - len(self) * s - databytes
3567 di = actual - len(self) * s - databytes
3752 except FileNotFoundError:
3568 except FileNotFoundError:
3753 di = 0
3569 di = 0
3754
3570
3755 return (dd, di)
3571 return (dd, di)
3756
3572
3757 def files(self):
3573 def files(self):
3758 """return list of files that compose this revlog"""
3574 """return list of files that compose this revlog"""
3759 res = [self._indexfile]
3575 res = [self._indexfile]
3760 if self._docket_file is None:
3576 if self._docket_file is None:
3761 if not self._inline:
3577 if not self._inline:
3762 res.append(self._datafile)
3578 res.append(self._datafile)
3763 else:
3579 else:
3764 res.append(self._docket_file)
3580 res.append(self._docket_file)
3765 res.extend(self._docket.old_index_filepaths(include_empty=False))
3581 res.extend(self._docket.old_index_filepaths(include_empty=False))
3766 if self._docket.data_end:
3582 if self._docket.data_end:
3767 res.append(self._datafile)
3583 res.append(self._datafile)
3768 res.extend(self._docket.old_data_filepaths(include_empty=False))
3584 res.extend(self._docket.old_data_filepaths(include_empty=False))
3769 if self._docket.sidedata_end:
3585 if self._docket.sidedata_end:
3770 res.append(self._sidedatafile)
3586 res.append(self._sidedatafile)
3771 res.extend(self._docket.old_sidedata_filepaths(include_empty=False))
3587 res.extend(self._docket.old_sidedata_filepaths(include_empty=False))
3772 return res
3588 return res
3773
3589
3774 def emitrevisions(
3590 def emitrevisions(
3775 self,
3591 self,
3776 nodes,
3592 nodes,
3777 nodesorder=None,
3593 nodesorder=None,
3778 revisiondata=False,
3594 revisiondata=False,
3779 assumehaveparentrevisions=False,
3595 assumehaveparentrevisions=False,
3780 deltamode=repository.CG_DELTAMODE_STD,
3596 deltamode=repository.CG_DELTAMODE_STD,
3781 sidedata_helpers=None,
3597 sidedata_helpers=None,
3782 debug_info=None,
3598 debug_info=None,
3783 ):
3599 ):
3784 if nodesorder not in (b'nodes', b'storage', b'linear', None):
3600 if nodesorder not in (b'nodes', b'storage', b'linear', None):
3785 raise error.ProgrammingError(
3601 raise error.ProgrammingError(
3786 b'unhandled value for nodesorder: %s' % nodesorder
3602 b'unhandled value for nodesorder: %s' % nodesorder
3787 )
3603 )
3788
3604
3789 if nodesorder is None and not self.delta_config.general_delta:
3605 if nodesorder is None and not self.delta_config.general_delta:
3790 nodesorder = b'storage'
3606 nodesorder = b'storage'
3791
3607
3792 if (
3608 if (
3793 not self._storedeltachains
3609 not self._storedeltachains
3794 and deltamode != repository.CG_DELTAMODE_PREV
3610 and deltamode != repository.CG_DELTAMODE_PREV
3795 ):
3611 ):
3796 deltamode = repository.CG_DELTAMODE_FULL
3612 deltamode = repository.CG_DELTAMODE_FULL
3797
3613
3798 return storageutil.emitrevisions(
3614 return storageutil.emitrevisions(
3799 self,
3615 self,
3800 nodes,
3616 nodes,
3801 nodesorder,
3617 nodesorder,
3802 revlogrevisiondelta,
3618 revlogrevisiondelta,
3803 deltaparentfn=self.deltaparent,
3619 deltaparentfn=self.deltaparent,
3804 candeltafn=self._candelta,
3620 candeltafn=self._candelta,
3805 rawsizefn=self.rawsize,
3621 rawsizefn=self.rawsize,
3806 revdifffn=self.revdiff,
3622 revdifffn=self.revdiff,
3807 flagsfn=self.flags,
3623 flagsfn=self.flags,
3808 deltamode=deltamode,
3624 deltamode=deltamode,
3809 revisiondata=revisiondata,
3625 revisiondata=revisiondata,
3810 assumehaveparentrevisions=assumehaveparentrevisions,
3626 assumehaveparentrevisions=assumehaveparentrevisions,
3811 sidedata_helpers=sidedata_helpers,
3627 sidedata_helpers=sidedata_helpers,
3812 debug_info=debug_info,
3628 debug_info=debug_info,
3813 )
3629 )
3814
3630
3815 DELTAREUSEALWAYS = b'always'
3631 DELTAREUSEALWAYS = b'always'
3816 DELTAREUSESAMEREVS = b'samerevs'
3632 DELTAREUSESAMEREVS = b'samerevs'
3817 DELTAREUSENEVER = b'never'
3633 DELTAREUSENEVER = b'never'
3818
3634
3819 DELTAREUSEFULLADD = b'fulladd'
3635 DELTAREUSEFULLADD = b'fulladd'
3820
3636
3821 DELTAREUSEALL = {b'always', b'samerevs', b'never', b'fulladd'}
3637 DELTAREUSEALL = {b'always', b'samerevs', b'never', b'fulladd'}
3822
3638
3823 def clone(
3639 def clone(
3824 self,
3640 self,
3825 tr,
3641 tr,
3826 destrevlog,
3642 destrevlog,
3827 addrevisioncb=None,
3643 addrevisioncb=None,
3828 deltareuse=DELTAREUSESAMEREVS,
3644 deltareuse=DELTAREUSESAMEREVS,
3829 forcedeltabothparents=None,
3645 forcedeltabothparents=None,
3830 sidedata_helpers=None,
3646 sidedata_helpers=None,
3831 ):
3647 ):
3832 """Copy this revlog to another, possibly with format changes.
3648 """Copy this revlog to another, possibly with format changes.
3833
3649
3834 The destination revlog will contain the same revisions and nodes.
3650 The destination revlog will contain the same revisions and nodes.
3835 However, it may not be bit-for-bit identical due to e.g. delta encoding
3651 However, it may not be bit-for-bit identical due to e.g. delta encoding
3836 differences.
3652 differences.
3837
3653
3838 The ``deltareuse`` argument control how deltas from the existing revlog
3654 The ``deltareuse`` argument control how deltas from the existing revlog
3839 are preserved in the destination revlog. The argument can have the
3655 are preserved in the destination revlog. The argument can have the
3840 following values:
3656 following values:
3841
3657
3842 DELTAREUSEALWAYS
3658 DELTAREUSEALWAYS
3843 Deltas will always be reused (if possible), even if the destination
3659 Deltas will always be reused (if possible), even if the destination
3844 revlog would not select the same revisions for the delta. This is the
3660 revlog would not select the same revisions for the delta. This is the
3845 fastest mode of operation.
3661 fastest mode of operation.
3846 DELTAREUSESAMEREVS
3662 DELTAREUSESAMEREVS
3847 Deltas will be reused if the destination revlog would pick the same
3663 Deltas will be reused if the destination revlog would pick the same
3848 revisions for the delta. This mode strikes a balance between speed
3664 revisions for the delta. This mode strikes a balance between speed
3849 and optimization.
3665 and optimization.
3850 DELTAREUSENEVER
3666 DELTAREUSENEVER
3851 Deltas will never be reused. This is the slowest mode of execution.
3667 Deltas will never be reused. This is the slowest mode of execution.
3852 This mode can be used to recompute deltas (e.g. if the diff/delta
3668 This mode can be used to recompute deltas (e.g. if the diff/delta
3853 algorithm changes).
3669 algorithm changes).
3854 DELTAREUSEFULLADD
3670 DELTAREUSEFULLADD
3855 Revision will be re-added as if their were new content. This is
3671 Revision will be re-added as if their were new content. This is
3856 slower than DELTAREUSEALWAYS but allow more mechanism to kicks in.
3672 slower than DELTAREUSEALWAYS but allow more mechanism to kicks in.
3857 eg: large file detection and handling.
3673 eg: large file detection and handling.
3858
3674
3859 Delta computation can be slow, so the choice of delta reuse policy can
3675 Delta computation can be slow, so the choice of delta reuse policy can
3860 significantly affect run time.
3676 significantly affect run time.
3861
3677
3862 The default policy (``DELTAREUSESAMEREVS``) strikes a balance between
3678 The default policy (``DELTAREUSESAMEREVS``) strikes a balance between
3863 two extremes. Deltas will be reused if they are appropriate. But if the
3679 two extremes. Deltas will be reused if they are appropriate. But if the
3864 delta could choose a better revision, it will do so. This means if you
3680 delta could choose a better revision, it will do so. This means if you
3865 are converting a non-generaldelta revlog to a generaldelta revlog,
3681 are converting a non-generaldelta revlog to a generaldelta revlog,
3866 deltas will be recomputed if the delta's parent isn't a parent of the
3682 deltas will be recomputed if the delta's parent isn't a parent of the
3867 revision.
3683 revision.
3868
3684
3869 In addition to the delta policy, the ``forcedeltabothparents``
3685 In addition to the delta policy, the ``forcedeltabothparents``
3870 argument controls whether to force compute deltas against both parents
3686 argument controls whether to force compute deltas against both parents
3871 for merges. By default, the current default is used.
3687 for merges. By default, the current default is used.
3872
3688
3873 See `revlogutil.sidedata.get_sidedata_helpers` for the doc on
3689 See `revlogutil.sidedata.get_sidedata_helpers` for the doc on
3874 `sidedata_helpers`.
3690 `sidedata_helpers`.
3875 """
3691 """
3876 if deltareuse not in self.DELTAREUSEALL:
3692 if deltareuse not in self.DELTAREUSEALL:
3877 raise ValueError(
3693 raise ValueError(
3878 _(b'value for deltareuse invalid: %s') % deltareuse
3694 _(b'value for deltareuse invalid: %s') % deltareuse
3879 )
3695 )
3880
3696
3881 if len(destrevlog):
3697 if len(destrevlog):
3882 raise ValueError(_(b'destination revlog is not empty'))
3698 raise ValueError(_(b'destination revlog is not empty'))
3883
3699
3884 if getattr(self, 'filteredrevs', None):
3700 if getattr(self, 'filteredrevs', None):
3885 raise ValueError(_(b'source revlog has filtered revisions'))
3701 raise ValueError(_(b'source revlog has filtered revisions'))
3886 if getattr(destrevlog, 'filteredrevs', None):
3702 if getattr(destrevlog, 'filteredrevs', None):
3887 raise ValueError(_(b'destination revlog has filtered revisions'))
3703 raise ValueError(_(b'destination revlog has filtered revisions'))
3888
3704
3889 # lazydelta and lazydeltabase controls whether to reuse a cached delta,
3705 # lazydelta and lazydeltabase controls whether to reuse a cached delta,
3890 # if possible.
3706 # if possible.
3891 old_delta_config = destrevlog.delta_config
3707 old_delta_config = destrevlog.delta_config
3892 destrevlog.delta_config = destrevlog.delta_config.copy()
3708 destrevlog.delta_config = destrevlog.delta_config.copy()
3893
3709
3894 try:
3710 try:
3895 if deltareuse == self.DELTAREUSEALWAYS:
3711 if deltareuse == self.DELTAREUSEALWAYS:
3896 destrevlog.delta_config.lazy_delta_base = True
3712 destrevlog.delta_config.lazy_delta_base = True
3897 destrevlog.delta_config.lazy_delta = True
3713 destrevlog.delta_config.lazy_delta = True
3898 elif deltareuse == self.DELTAREUSESAMEREVS:
3714 elif deltareuse == self.DELTAREUSESAMEREVS:
3899 destrevlog.delta_config.lazy_delta_base = False
3715 destrevlog.delta_config.lazy_delta_base = False
3900 destrevlog.delta_config.lazy_delta = True
3716 destrevlog.delta_config.lazy_delta = True
3901 elif deltareuse == self.DELTAREUSENEVER:
3717 elif deltareuse == self.DELTAREUSENEVER:
3902 destrevlog.delta_config.lazy_delta_base = False
3718 destrevlog.delta_config.lazy_delta_base = False
3903 destrevlog.delta_config.lazy_delta = False
3719 destrevlog.delta_config.lazy_delta = False
3904
3720
3905 delta_both_parents = (
3721 delta_both_parents = (
3906 forcedeltabothparents or old_delta_config.delta_both_parents
3722 forcedeltabothparents or old_delta_config.delta_both_parents
3907 )
3723 )
3908 destrevlog.delta_config.delta_both_parents = delta_both_parents
3724 destrevlog.delta_config.delta_both_parents = delta_both_parents
3909
3725
3910 with self.reading(), destrevlog._writing(tr):
3726 with self.reading(), destrevlog._writing(tr):
3911 self._clone(
3727 self._clone(
3912 tr,
3728 tr,
3913 destrevlog,
3729 destrevlog,
3914 addrevisioncb,
3730 addrevisioncb,
3915 deltareuse,
3731 deltareuse,
3916 forcedeltabothparents,
3732 forcedeltabothparents,
3917 sidedata_helpers,
3733 sidedata_helpers,
3918 )
3734 )
3919
3735
3920 finally:
3736 finally:
3921 destrevlog.delta_config = old_delta_config
3737 destrevlog.delta_config = old_delta_config
3922
3738
3923 def _clone(
3739 def _clone(
3924 self,
3740 self,
3925 tr,
3741 tr,
3926 destrevlog,
3742 destrevlog,
3927 addrevisioncb,
3743 addrevisioncb,
3928 deltareuse,
3744 deltareuse,
3929 forcedeltabothparents,
3745 forcedeltabothparents,
3930 sidedata_helpers,
3746 sidedata_helpers,
3931 ):
3747 ):
3932 """perform the core duty of `revlog.clone` after parameter processing"""
3748 """perform the core duty of `revlog.clone` after parameter processing"""
3933 write_debug = None
3749 write_debug = None
3934 if self.delta_config.debug_delta:
3750 if self.delta_config.debug_delta:
3935 write_debug = tr._report
3751 write_debug = tr._report
3936 deltacomputer = deltautil.deltacomputer(
3752 deltacomputer = deltautil.deltacomputer(
3937 destrevlog,
3753 destrevlog,
3938 write_debug=write_debug,
3754 write_debug=write_debug,
3939 )
3755 )
3940 index = self.index
3756 index = self.index
3941 for rev in self:
3757 for rev in self:
3942 entry = index[rev]
3758 entry = index[rev]
3943
3759
3944 # Some classes override linkrev to take filtered revs into
3760 # Some classes override linkrev to take filtered revs into
3945 # account. Use raw entry from index.
3761 # account. Use raw entry from index.
3946 flags = entry[0] & 0xFFFF
3762 flags = entry[0] & 0xFFFF
3947 linkrev = entry[4]
3763 linkrev = entry[4]
3948 p1 = index[entry[5]][7]
3764 p1 = index[entry[5]][7]
3949 p2 = index[entry[6]][7]
3765 p2 = index[entry[6]][7]
3950 node = entry[7]
3766 node = entry[7]
3951
3767
3952 # (Possibly) reuse the delta from the revlog if allowed and
3768 # (Possibly) reuse the delta from the revlog if allowed and
3953 # the revlog chunk is a delta.
3769 # the revlog chunk is a delta.
3954 cachedelta = None
3770 cachedelta = None
3955 rawtext = None
3771 rawtext = None
3956 if deltareuse == self.DELTAREUSEFULLADD:
3772 if deltareuse == self.DELTAREUSEFULLADD:
3957 text = self._revisiondata(rev)
3773 text = self._revisiondata(rev)
3958 sidedata = self.sidedata(rev)
3774 sidedata = self.sidedata(rev)
3959
3775
3960 if sidedata_helpers is not None:
3776 if sidedata_helpers is not None:
3961 (sidedata, new_flags) = sidedatautil.run_sidedata_helpers(
3777 (sidedata, new_flags) = sidedatautil.run_sidedata_helpers(
3962 self, sidedata_helpers, sidedata, rev
3778 self, sidedata_helpers, sidedata, rev
3963 )
3779 )
3964 flags = flags | new_flags[0] & ~new_flags[1]
3780 flags = flags | new_flags[0] & ~new_flags[1]
3965
3781
3966 destrevlog.addrevision(
3782 destrevlog.addrevision(
3967 text,
3783 text,
3968 tr,
3784 tr,
3969 linkrev,
3785 linkrev,
3970 p1,
3786 p1,
3971 p2,
3787 p2,
3972 cachedelta=cachedelta,
3788 cachedelta=cachedelta,
3973 node=node,
3789 node=node,
3974 flags=flags,
3790 flags=flags,
3975 deltacomputer=deltacomputer,
3791 deltacomputer=deltacomputer,
3976 sidedata=sidedata,
3792 sidedata=sidedata,
3977 )
3793 )
3978 else:
3794 else:
3979 if destrevlog.delta_config.lazy_delta:
3795 if destrevlog.delta_config.lazy_delta:
3980 dp = self.deltaparent(rev)
3796 dp = self.deltaparent(rev)
3981 if dp != nullrev:
3797 if dp != nullrev:
3982 cachedelta = (dp, bytes(self._inner._chunk(rev)))
3798 cachedelta = (dp, bytes(self._inner._chunk(rev)))
3983
3799
3984 sidedata = None
3800 sidedata = None
3985 if not cachedelta:
3801 if not cachedelta:
3986 try:
3802 try:
3987 rawtext = self._revisiondata(rev)
3803 rawtext = self._revisiondata(rev)
3988 except error.CensoredNodeError as censored:
3804 except error.CensoredNodeError as censored:
3989 assert flags & REVIDX_ISCENSORED
3805 assert flags & REVIDX_ISCENSORED
3990 rawtext = censored.tombstone
3806 rawtext = censored.tombstone
3991 sidedata = self.sidedata(rev)
3807 sidedata = self.sidedata(rev)
3992 if sidedata is None:
3808 if sidedata is None:
3993 sidedata = self.sidedata(rev)
3809 sidedata = self.sidedata(rev)
3994
3810
3995 if sidedata_helpers is not None:
3811 if sidedata_helpers is not None:
3996 (sidedata, new_flags) = sidedatautil.run_sidedata_helpers(
3812 (sidedata, new_flags) = sidedatautil.run_sidedata_helpers(
3997 self, sidedata_helpers, sidedata, rev
3813 self, sidedata_helpers, sidedata, rev
3998 )
3814 )
3999 flags = flags | new_flags[0] & ~new_flags[1]
3815 flags = flags | new_flags[0] & ~new_flags[1]
4000
3816
4001 destrevlog._addrevision(
3817 destrevlog._addrevision(
4002 node,
3818 node,
4003 rawtext,
3819 rawtext,
4004 tr,
3820 tr,
4005 linkrev,
3821 linkrev,
4006 p1,
3822 p1,
4007 p2,
3823 p2,
4008 flags,
3824 flags,
4009 cachedelta,
3825 cachedelta,
4010 deltacomputer=deltacomputer,
3826 deltacomputer=deltacomputer,
4011 sidedata=sidedata,
3827 sidedata=sidedata,
4012 )
3828 )
4013
3829
4014 if addrevisioncb:
3830 if addrevisioncb:
4015 addrevisioncb(self, rev, node)
3831 addrevisioncb(self, rev, node)
4016
3832
4017 def censorrevision(self, tr, censornode, tombstone=b''):
3833 def censorrevision(self, tr, censornode, tombstone=b''):
4018 if self._format_version == REVLOGV0:
3834 if self._format_version == REVLOGV0:
4019 raise error.RevlogError(
3835 raise error.RevlogError(
4020 _(b'cannot censor with version %d revlogs')
3836 _(b'cannot censor with version %d revlogs')
4021 % self._format_version
3837 % self._format_version
4022 )
3838 )
4023 elif self._format_version == REVLOGV1:
3839 elif self._format_version == REVLOGV1:
4024 rewrite.v1_censor(self, tr, censornode, tombstone)
3840 rewrite.v1_censor(self, tr, censornode, tombstone)
4025 else:
3841 else:
4026 rewrite.v2_censor(self, tr, censornode, tombstone)
3842 rewrite.v2_censor(self, tr, censornode, tombstone)
4027
3843
4028 def verifyintegrity(self, state):
3844 def verifyintegrity(self, state):
4029 """Verifies the integrity of the revlog.
3845 """Verifies the integrity of the revlog.
4030
3846
4031 Yields ``revlogproblem`` instances describing problems that are
3847 Yields ``revlogproblem`` instances describing problems that are
4032 found.
3848 found.
4033 """
3849 """
4034 dd, di = self.checksize()
3850 dd, di = self.checksize()
4035 if dd:
3851 if dd:
4036 yield revlogproblem(error=_(b'data length off by %d bytes') % dd)
3852 yield revlogproblem(error=_(b'data length off by %d bytes') % dd)
4037 if di:
3853 if di:
4038 yield revlogproblem(error=_(b'index contains %d extra bytes') % di)
3854 yield revlogproblem(error=_(b'index contains %d extra bytes') % di)
4039
3855
4040 version = self._format_version
3856 version = self._format_version
4041
3857
4042 # The verifier tells us what version revlog we should be.
3858 # The verifier tells us what version revlog we should be.
4043 if version != state[b'expectedversion']:
3859 if version != state[b'expectedversion']:
4044 yield revlogproblem(
3860 yield revlogproblem(
4045 warning=_(b"warning: '%s' uses revlog format %d; expected %d")
3861 warning=_(b"warning: '%s' uses revlog format %d; expected %d")
4046 % (self.display_id, version, state[b'expectedversion'])
3862 % (self.display_id, version, state[b'expectedversion'])
4047 )
3863 )
4048
3864
4049 state[b'skipread'] = set()
3865 state[b'skipread'] = set()
4050 state[b'safe_renamed'] = set()
3866 state[b'safe_renamed'] = set()
4051
3867
4052 for rev in self:
3868 for rev in self:
4053 node = self.node(rev)
3869 node = self.node(rev)
4054
3870
4055 # Verify contents. 4 cases to care about:
3871 # Verify contents. 4 cases to care about:
4056 #
3872 #
4057 # common: the most common case
3873 # common: the most common case
4058 # rename: with a rename
3874 # rename: with a rename
4059 # meta: file content starts with b'\1\n', the metadata
3875 # meta: file content starts with b'\1\n', the metadata
4060 # header defined in filelog.py, but without a rename
3876 # header defined in filelog.py, but without a rename
4061 # ext: content stored externally
3877 # ext: content stored externally
4062 #
3878 #
4063 # More formally, their differences are shown below:
3879 # More formally, their differences are shown below:
4064 #
3880 #
4065 # | common | rename | meta | ext
3881 # | common | rename | meta | ext
4066 # -------------------------------------------------------
3882 # -------------------------------------------------------
4067 # flags() | 0 | 0 | 0 | not 0
3883 # flags() | 0 | 0 | 0 | not 0
4068 # renamed() | False | True | False | ?
3884 # renamed() | False | True | False | ?
4069 # rawtext[0:2]=='\1\n'| False | True | True | ?
3885 # rawtext[0:2]=='\1\n'| False | True | True | ?
4070 #
3886 #
4071 # "rawtext" means the raw text stored in revlog data, which
3887 # "rawtext" means the raw text stored in revlog data, which
4072 # could be retrieved by "rawdata(rev)". "text"
3888 # could be retrieved by "rawdata(rev)". "text"
4073 # mentioned below is "revision(rev)".
3889 # mentioned below is "revision(rev)".
4074 #
3890 #
4075 # There are 3 different lengths stored physically:
3891 # There are 3 different lengths stored physically:
4076 # 1. L1: rawsize, stored in revlog index
3892 # 1. L1: rawsize, stored in revlog index
4077 # 2. L2: len(rawtext), stored in revlog data
3893 # 2. L2: len(rawtext), stored in revlog data
4078 # 3. L3: len(text), stored in revlog data if flags==0, or
3894 # 3. L3: len(text), stored in revlog data if flags==0, or
4079 # possibly somewhere else if flags!=0
3895 # possibly somewhere else if flags!=0
4080 #
3896 #
4081 # L1 should be equal to L2. L3 could be different from them.
3897 # L1 should be equal to L2. L3 could be different from them.
4082 # "text" may or may not affect commit hash depending on flag
3898 # "text" may or may not affect commit hash depending on flag
4083 # processors (see flagutil.addflagprocessor).
3899 # processors (see flagutil.addflagprocessor).
4084 #
3900 #
4085 # | common | rename | meta | ext
3901 # | common | rename | meta | ext
4086 # -------------------------------------------------
3902 # -------------------------------------------------
4087 # rawsize() | L1 | L1 | L1 | L1
3903 # rawsize() | L1 | L1 | L1 | L1
4088 # size() | L1 | L2-LM | L1(*) | L1 (?)
3904 # size() | L1 | L2-LM | L1(*) | L1 (?)
4089 # len(rawtext) | L2 | L2 | L2 | L2
3905 # len(rawtext) | L2 | L2 | L2 | L2
4090 # len(text) | L2 | L2 | L2 | L3
3906 # len(text) | L2 | L2 | L2 | L3
4091 # len(read()) | L2 | L2-LM | L2-LM | L3 (?)
3907 # len(read()) | L2 | L2-LM | L2-LM | L3 (?)
4092 #
3908 #
4093 # LM: length of metadata, depending on rawtext
3909 # LM: length of metadata, depending on rawtext
4094 # (*): not ideal, see comment in filelog.size
3910 # (*): not ideal, see comment in filelog.size
4095 # (?): could be "- len(meta)" if the resolved content has
3911 # (?): could be "- len(meta)" if the resolved content has
4096 # rename metadata
3912 # rename metadata
4097 #
3913 #
4098 # Checks needed to be done:
3914 # Checks needed to be done:
4099 # 1. length check: L1 == L2, in all cases.
3915 # 1. length check: L1 == L2, in all cases.
4100 # 2. hash check: depending on flag processor, we may need to
3916 # 2. hash check: depending on flag processor, we may need to
4101 # use either "text" (external), or "rawtext" (in revlog).
3917 # use either "text" (external), or "rawtext" (in revlog).
4102
3918
4103 try:
3919 try:
4104 skipflags = state.get(b'skipflags', 0)
3920 skipflags = state.get(b'skipflags', 0)
4105 if skipflags:
3921 if skipflags:
4106 skipflags &= self.flags(rev)
3922 skipflags &= self.flags(rev)
4107
3923
4108 _verify_revision(self, skipflags, state, node)
3924 _verify_revision(self, skipflags, state, node)
4109
3925
4110 l1 = self.rawsize(rev)
3926 l1 = self.rawsize(rev)
4111 l2 = len(self.rawdata(node))
3927 l2 = len(self.rawdata(node))
4112
3928
4113 if l1 != l2:
3929 if l1 != l2:
4114 yield revlogproblem(
3930 yield revlogproblem(
4115 error=_(b'unpacked size is %d, %d expected') % (l2, l1),
3931 error=_(b'unpacked size is %d, %d expected') % (l2, l1),
4116 node=node,
3932 node=node,
4117 )
3933 )
4118
3934
4119 except error.CensoredNodeError:
3935 except error.CensoredNodeError:
4120 if state[b'erroroncensored']:
3936 if state[b'erroroncensored']:
4121 yield revlogproblem(
3937 yield revlogproblem(
4122 error=_(b'censored file data'), node=node
3938 error=_(b'censored file data'), node=node
4123 )
3939 )
4124 state[b'skipread'].add(node)
3940 state[b'skipread'].add(node)
4125 except Exception as e:
3941 except Exception as e:
4126 yield revlogproblem(
3942 yield revlogproblem(
4127 error=_(b'unpacking %s: %s')
3943 error=_(b'unpacking %s: %s')
4128 % (short(node), stringutil.forcebytestr(e)),
3944 % (short(node), stringutil.forcebytestr(e)),
4129 node=node,
3945 node=node,
4130 )
3946 )
4131 state[b'skipread'].add(node)
3947 state[b'skipread'].add(node)
4132
3948
4133 def storageinfo(
3949 def storageinfo(
4134 self,
3950 self,
4135 exclusivefiles=False,
3951 exclusivefiles=False,
4136 sharedfiles=False,
3952 sharedfiles=False,
4137 revisionscount=False,
3953 revisionscount=False,
4138 trackedsize=False,
3954 trackedsize=False,
4139 storedsize=False,
3955 storedsize=False,
4140 ):
3956 ):
4141 d = {}
3957 d = {}
4142
3958
4143 if exclusivefiles:
3959 if exclusivefiles:
4144 d[b'exclusivefiles'] = [(self.opener, self._indexfile)]
3960 d[b'exclusivefiles'] = [(self.opener, self._indexfile)]
4145 if not self._inline:
3961 if not self._inline:
4146 d[b'exclusivefiles'].append((self.opener, self._datafile))
3962 d[b'exclusivefiles'].append((self.opener, self._datafile))
4147
3963
4148 if sharedfiles:
3964 if sharedfiles:
4149 d[b'sharedfiles'] = []
3965 d[b'sharedfiles'] = []
4150
3966
4151 if revisionscount:
3967 if revisionscount:
4152 d[b'revisionscount'] = len(self)
3968 d[b'revisionscount'] = len(self)
4153
3969
4154 if trackedsize:
3970 if trackedsize:
4155 d[b'trackedsize'] = sum(map(self.rawsize, iter(self)))
3971 d[b'trackedsize'] = sum(map(self.rawsize, iter(self)))
4156
3972
4157 if storedsize:
3973 if storedsize:
4158 d[b'storedsize'] = sum(
3974 d[b'storedsize'] = sum(
4159 self.opener.stat(path).st_size for path in self.files()
3975 self.opener.stat(path).st_size for path in self.files()
4160 )
3976 )
4161
3977
4162 return d
3978 return d
4163
3979
4164 def rewrite_sidedata(self, transaction, helpers, startrev, endrev):
3980 def rewrite_sidedata(self, transaction, helpers, startrev, endrev):
4165 if not self.feature_config.has_side_data:
3981 if not self.feature_config.has_side_data:
4166 return
3982 return
4167 # revlog formats with sidedata support does not support inline
3983 # revlog formats with sidedata support does not support inline
4168 assert not self._inline
3984 assert not self._inline
4169 if not helpers[1] and not helpers[2]:
3985 if not helpers[1] and not helpers[2]:
4170 # Nothing to generate or remove
3986 # Nothing to generate or remove
4171 return
3987 return
4172
3988
4173 new_entries = []
3989 new_entries = []
4174 # append the new sidedata
3990 # append the new sidedata
4175 with self._writing(transaction):
3991 with self._writing(transaction):
4176 ifh, dfh, sdfh = self._inner._writinghandles
3992 ifh, dfh, sdfh = self._inner._writinghandles
4177 dfh.seek(self._docket.sidedata_end, os.SEEK_SET)
3993 dfh.seek(self._docket.sidedata_end, os.SEEK_SET)
4178
3994
4179 current_offset = sdfh.tell()
3995 current_offset = sdfh.tell()
4180 for rev in range(startrev, endrev + 1):
3996 for rev in range(startrev, endrev + 1):
4181 entry = self.index[rev]
3997 entry = self.index[rev]
4182 new_sidedata, flags = sidedatautil.run_sidedata_helpers(
3998 new_sidedata, flags = sidedatautil.run_sidedata_helpers(
4183 store=self,
3999 store=self,
4184 sidedata_helpers=helpers,
4000 sidedata_helpers=helpers,
4185 sidedata={},
4001 sidedata={},
4186 rev=rev,
4002 rev=rev,
4187 )
4003 )
4188
4004
4189 serialized_sidedata = sidedatautil.serialize_sidedata(
4005 serialized_sidedata = sidedatautil.serialize_sidedata(
4190 new_sidedata
4006 new_sidedata
4191 )
4007 )
4192
4008
4193 sidedata_compression_mode = COMP_MODE_INLINE
4009 sidedata_compression_mode = COMP_MODE_INLINE
4194 if serialized_sidedata and self.feature_config.has_side_data:
4010 if serialized_sidedata and self.feature_config.has_side_data:
4195 sidedata_compression_mode = COMP_MODE_PLAIN
4011 sidedata_compression_mode = COMP_MODE_PLAIN
4196 h, comp_sidedata = self._inner.compress(serialized_sidedata)
4012 h, comp_sidedata = self._inner.compress(serialized_sidedata)
4197 if (
4013 if (
4198 h != b'u'
4014 h != b'u'
4199 and comp_sidedata[0] != b'\0'
4015 and comp_sidedata[0] != b'\0'
4200 and len(comp_sidedata) < len(serialized_sidedata)
4016 and len(comp_sidedata) < len(serialized_sidedata)
4201 ):
4017 ):
4202 assert not h
4018 assert not h
4203 if (
4019 if (
4204 comp_sidedata[0]
4020 comp_sidedata[0]
4205 == self._docket.default_compression_header
4021 == self._docket.default_compression_header
4206 ):
4022 ):
4207 sidedata_compression_mode = COMP_MODE_DEFAULT
4023 sidedata_compression_mode = COMP_MODE_DEFAULT
4208 serialized_sidedata = comp_sidedata
4024 serialized_sidedata = comp_sidedata
4209 else:
4025 else:
4210 sidedata_compression_mode = COMP_MODE_INLINE
4026 sidedata_compression_mode = COMP_MODE_INLINE
4211 serialized_sidedata = comp_sidedata
4027 serialized_sidedata = comp_sidedata
4212 if entry[8] != 0 or entry[9] != 0:
4028 if entry[8] != 0 or entry[9] != 0:
4213 # rewriting entries that already have sidedata is not
4029 # rewriting entries that already have sidedata is not
4214 # supported yet, because it introduces garbage data in the
4030 # supported yet, because it introduces garbage data in the
4215 # revlog.
4031 # revlog.
4216 msg = b"rewriting existing sidedata is not supported yet"
4032 msg = b"rewriting existing sidedata is not supported yet"
4217 raise error.Abort(msg)
4033 raise error.Abort(msg)
4218
4034
4219 # Apply (potential) flags to add and to remove after running
4035 # Apply (potential) flags to add and to remove after running
4220 # the sidedata helpers
4036 # the sidedata helpers
4221 new_offset_flags = entry[0] | flags[0] & ~flags[1]
4037 new_offset_flags = entry[0] | flags[0] & ~flags[1]
4222 entry_update = (
4038 entry_update = (
4223 current_offset,
4039 current_offset,
4224 len(serialized_sidedata),
4040 len(serialized_sidedata),
4225 new_offset_flags,
4041 new_offset_flags,
4226 sidedata_compression_mode,
4042 sidedata_compression_mode,
4227 )
4043 )
4228
4044
4229 # the sidedata computation might have move the file cursors around
4045 # the sidedata computation might have move the file cursors around
4230 sdfh.seek(current_offset, os.SEEK_SET)
4046 sdfh.seek(current_offset, os.SEEK_SET)
4231 sdfh.write(serialized_sidedata)
4047 sdfh.write(serialized_sidedata)
4232 new_entries.append(entry_update)
4048 new_entries.append(entry_update)
4233 current_offset += len(serialized_sidedata)
4049 current_offset += len(serialized_sidedata)
4234 self._docket.sidedata_end = sdfh.tell()
4050 self._docket.sidedata_end = sdfh.tell()
4235
4051
4236 # rewrite the new index entries
4052 # rewrite the new index entries
4237 ifh.seek(startrev * self.index.entry_size)
4053 ifh.seek(startrev * self.index.entry_size)
4238 for i, e in enumerate(new_entries):
4054 for i, e in enumerate(new_entries):
4239 rev = startrev + i
4055 rev = startrev + i
4240 self.index.replace_sidedata_info(rev, *e)
4056 self.index.replace_sidedata_info(rev, *e)
4241 packed = self.index.entry_binary(rev)
4057 packed = self.index.entry_binary(rev)
4242 if rev == 0 and self._docket is None:
4058 if rev == 0 and self._docket is None:
4243 header = self._format_flags | self._format_version
4059 header = self._format_flags | self._format_version
4244 header = self.index.pack_header(header)
4060 header = self.index.pack_header(header)
4245 packed = header + packed
4061 packed = header + packed
4246 ifh.write(packed)
4062 ifh.write(packed)
@@ -1,1107 +1,1118 b''
1 Setting up test
1 Setting up test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo 0 > afile
5 $ echo 0 > afile
6 $ hg add afile
6 $ hg add afile
7 $ hg commit -m "0.0"
7 $ hg commit -m "0.0"
8 $ echo 1 >> afile
8 $ echo 1 >> afile
9 $ hg commit -m "0.1"
9 $ hg commit -m "0.1"
10 $ echo 2 >> afile
10 $ echo 2 >> afile
11 $ hg commit -m "0.2"
11 $ hg commit -m "0.2"
12 $ echo 3 >> afile
12 $ echo 3 >> afile
13 $ hg commit -m "0.3"
13 $ hg commit -m "0.3"
14 $ hg update -C 0
14 $ hg update -C 0
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 $ echo 1 >> afile
16 $ echo 1 >> afile
17 $ hg commit -m "1.1"
17 $ hg commit -m "1.1"
18 created new head
18 created new head
19 $ echo 2 >> afile
19 $ echo 2 >> afile
20 $ hg commit -m "1.2"
20 $ hg commit -m "1.2"
21 $ echo "a line" > fred
21 $ echo "a line" > fred
22 $ echo 3 >> afile
22 $ echo 3 >> afile
23 $ hg add fred
23 $ hg add fred
24 $ hg commit -m "1.3"
24 $ hg commit -m "1.3"
25 $ hg mv afile adifferentfile
25 $ hg mv afile adifferentfile
26 $ hg commit -m "1.3m"
26 $ hg commit -m "1.3m"
27 $ hg update -C 3
27 $ hg update -C 3
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
29 $ hg mv afile anotherfile
29 $ hg mv afile anotherfile
30 $ hg commit -m "0.3m"
30 $ hg commit -m "0.3m"
31 $ hg verify -q
31 $ hg verify -q
32 $ cd ..
32 $ cd ..
33 $ hg init empty
33 $ hg init empty
34
34
35 Bundle and phase
35 Bundle and phase
36
36
37 $ hg -R test phase --force --secret 0
37 $ hg -R test phase --force --secret 0
38 $ hg -R test bundle phase.hg empty
38 $ hg -R test bundle phase.hg empty
39 searching for changes
39 searching for changes
40 no changes found (ignored 9 secret changesets)
40 no changes found (ignored 9 secret changesets)
41 [1]
41 [1]
42 $ hg -R test phase --draft -r 'head()'
42 $ hg -R test phase --draft -r 'head()'
43
43
44 Bundle --all
44 Bundle --all
45
45
46 $ hg -R test bundle --all all.hg
46 $ hg -R test bundle --all all.hg
47 9 changesets found
47 9 changesets found
48
48
49 Bundle test to full.hg
49 Bundle test to full.hg
50
50
51 $ hg -R test bundle full.hg empty
51 $ hg -R test bundle full.hg empty
52 searching for changes
52 searching for changes
53 9 changesets found
53 9 changesets found
54
54
55 Unbundle full.hg in test
55 Unbundle full.hg in test
56
56
57 $ hg -R test unbundle full.hg
57 $ hg -R test unbundle full.hg
58 adding changesets
58 adding changesets
59 adding manifests
59 adding manifests
60 adding file changes
60 adding file changes
61 added 0 changesets with 0 changes to 4 files
61 added 0 changesets with 0 changes to 4 files
62 (run 'hg update' to get a working copy)
62 (run 'hg update' to get a working copy)
63
63
64 Verify empty
64 Verify empty
65
65
66 $ hg -R empty heads
66 $ hg -R empty heads
67 [1]
67 [1]
68 $ hg -R empty verify -q
68 $ hg -R empty verify -q
69
69
70 #if repobundlerepo
70 #if repobundlerepo
71
71
72 Pull full.hg into test (using --cwd)
72 Pull full.hg into test (using --cwd)
73
73
74 $ hg --cwd test pull ../full.hg
74 $ hg --cwd test pull ../full.hg
75 pulling from ../full.hg
75 pulling from ../full.hg
76 searching for changes
76 searching for changes
77 no changes found
77 no changes found
78
78
79 Verify that there are no leaked temporary files after pull (issue2797)
79 Verify that there are no leaked temporary files after pull (issue2797)
80
80
81 $ ls test/.hg | grep .hg10un
81 $ ls test/.hg | grep .hg10un
82 [1]
82 [1]
83
83
84 Pull full.hg into empty (using --cwd)
84 Pull full.hg into empty (using --cwd)
85
85
86 $ hg --cwd empty pull ../full.hg
86 $ hg --cwd empty pull ../full.hg
87 pulling from ../full.hg
87 pulling from ../full.hg
88 requesting all changes
88 requesting all changes
89 adding changesets
89 adding changesets
90 adding manifests
90 adding manifests
91 adding file changes
91 adding file changes
92 added 9 changesets with 7 changes to 4 files (+1 heads)
92 added 9 changesets with 7 changes to 4 files (+1 heads)
93 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
93 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
94 (run 'hg heads' to see heads, 'hg merge' to merge)
94 (run 'hg heads' to see heads, 'hg merge' to merge)
95
95
96 Rollback empty
96 Rollback empty
97
97
98 $ hg -R empty rollback
98 $ hg -R empty rollback
99 repository tip rolled back to revision -1 (undo pull)
99 repository tip rolled back to revision -1 (undo pull)
100
100
101 Pull full.hg into empty again (using --cwd)
101 Pull full.hg into empty again (using --cwd)
102
102
103 $ hg --cwd empty pull ../full.hg
103 $ hg --cwd empty pull ../full.hg
104 pulling from ../full.hg
104 pulling from ../full.hg
105 requesting all changes
105 requesting all changes
106 adding changesets
106 adding changesets
107 adding manifests
107 adding manifests
108 adding file changes
108 adding file changes
109 added 9 changesets with 7 changes to 4 files (+1 heads)
109 added 9 changesets with 7 changes to 4 files (+1 heads)
110 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
110 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
111 (run 'hg heads' to see heads, 'hg merge' to merge)
111 (run 'hg heads' to see heads, 'hg merge' to merge)
112
112
113 Pull full.hg into test (using -R)
113 Pull full.hg into test (using -R)
114
114
115 $ hg -R test pull full.hg
115 $ hg -R test pull full.hg
116 pulling from full.hg
116 pulling from full.hg
117 searching for changes
117 searching for changes
118 no changes found
118 no changes found
119
119
120 Pull full.hg into empty (using -R)
120 Pull full.hg into empty (using -R)
121
121
122 $ hg -R empty pull full.hg
122 $ hg -R empty pull full.hg
123 pulling from full.hg
123 pulling from full.hg
124 searching for changes
124 searching for changes
125 no changes found
125 no changes found
126
126
127 Rollback empty
127 Rollback empty
128
128
129 $ hg -R empty rollback
129 $ hg -R empty rollback
130 repository tip rolled back to revision -1 (undo pull)
130 repository tip rolled back to revision -1 (undo pull)
131
131
132 Pull full.hg into empty again (using -R)
132 Pull full.hg into empty again (using -R)
133
133
134 $ hg -R empty pull full.hg
134 $ hg -R empty pull full.hg
135 pulling from full.hg
135 pulling from full.hg
136 requesting all changes
136 requesting all changes
137 adding changesets
137 adding changesets
138 adding manifests
138 adding manifests
139 adding file changes
139 adding file changes
140 added 9 changesets with 7 changes to 4 files (+1 heads)
140 added 9 changesets with 7 changes to 4 files (+1 heads)
141 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
141 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
142 (run 'hg heads' to see heads, 'hg merge' to merge)
142 (run 'hg heads' to see heads, 'hg merge' to merge)
143
143
144 Log -R full.hg in fresh empty
144 Log -R full.hg in fresh empty
145
145
146 $ rm -r empty
146 $ rm -r empty
147 $ hg init empty
147 $ hg init empty
148 $ cd empty
148 $ cd empty
149 $ hg -R bundle://../full.hg log
149 $ hg -R bundle://../full.hg log
150 changeset: 8:aa35859c02ea
150 changeset: 8:aa35859c02ea
151 tag: tip
151 tag: tip
152 parent: 3:eebf5a27f8ca
152 parent: 3:eebf5a27f8ca
153 user: test
153 user: test
154 date: Thu Jan 01 00:00:00 1970 +0000
154 date: Thu Jan 01 00:00:00 1970 +0000
155 summary: 0.3m
155 summary: 0.3m
156
156
157 changeset: 7:a6a34bfa0076
157 changeset: 7:a6a34bfa0076
158 user: test
158 user: test
159 date: Thu Jan 01 00:00:00 1970 +0000
159 date: Thu Jan 01 00:00:00 1970 +0000
160 summary: 1.3m
160 summary: 1.3m
161
161
162 changeset: 6:7373c1169842
162 changeset: 6:7373c1169842
163 user: test
163 user: test
164 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
165 summary: 1.3
165 summary: 1.3
166
166
167 changeset: 5:1bb50a9436a7
167 changeset: 5:1bb50a9436a7
168 user: test
168 user: test
169 date: Thu Jan 01 00:00:00 1970 +0000
169 date: Thu Jan 01 00:00:00 1970 +0000
170 summary: 1.2
170 summary: 1.2
171
171
172 changeset: 4:095197eb4973
172 changeset: 4:095197eb4973
173 parent: 0:f9ee2f85a263
173 parent: 0:f9ee2f85a263
174 user: test
174 user: test
175 date: Thu Jan 01 00:00:00 1970 +0000
175 date: Thu Jan 01 00:00:00 1970 +0000
176 summary: 1.1
176 summary: 1.1
177
177
178 changeset: 3:eebf5a27f8ca
178 changeset: 3:eebf5a27f8ca
179 user: test
179 user: test
180 date: Thu Jan 01 00:00:00 1970 +0000
180 date: Thu Jan 01 00:00:00 1970 +0000
181 summary: 0.3
181 summary: 0.3
182
182
183 changeset: 2:e38ba6f5b7e0
183 changeset: 2:e38ba6f5b7e0
184 user: test
184 user: test
185 date: Thu Jan 01 00:00:00 1970 +0000
185 date: Thu Jan 01 00:00:00 1970 +0000
186 summary: 0.2
186 summary: 0.2
187
187
188 changeset: 1:34c2bf6b0626
188 changeset: 1:34c2bf6b0626
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: 0.1
191 summary: 0.1
192
192
193 changeset: 0:f9ee2f85a263
193 changeset: 0:f9ee2f85a263
194 user: test
194 user: test
195 date: Thu Jan 01 00:00:00 1970 +0000
195 date: Thu Jan 01 00:00:00 1970 +0000
196 summary: 0.0
196 summary: 0.0
197
197
198 Make sure bundlerepo doesn't leak tempfiles (issue2491)
198 Make sure bundlerepo doesn't leak tempfiles (issue2491)
199
199
200 $ ls .hg
200 $ ls .hg
201 00changelog.i
201 00changelog.i
202 cache
202 cache
203 requires
203 requires
204 store
204 store
205 wcache
205 wcache
206
206
207 Pull ../full.hg into empty (with hook)
207 Pull ../full.hg into empty (with hook)
208
208
209 $ cat >> .hg/hgrc <<EOF
209 $ cat >> .hg/hgrc <<EOF
210 > [hooks]
210 > [hooks]
211 > changegroup = sh -c "printenv.py --line changegroup"
211 > changegroup = sh -c "printenv.py --line changegroup"
212 > EOF
212 > EOF
213
213
214 doesn't work (yet ?)
214 doesn't work (yet ?)
215 NOTE: msys is mangling the URL below
215 NOTE: msys is mangling the URL below
216
216
217 hg -R bundle://../full.hg verify
217 hg -R bundle://../full.hg verify
218
218
219 $ hg pull bundle://../full.hg
219 $ hg pull bundle://../full.hg
220 pulling from bundle:../full.hg
220 pulling from bundle:../full.hg
221 requesting all changes
221 requesting all changes
222 adding changesets
222 adding changesets
223 adding manifests
223 adding manifests
224 adding file changes
224 adding file changes
225 added 9 changesets with 7 changes to 4 files (+1 heads)
225 added 9 changesets with 7 changes to 4 files (+1 heads)
226 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
226 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
227 changegroup hook: HG_HOOKNAME=changegroup
227 changegroup hook: HG_HOOKNAME=changegroup
228 HG_HOOKTYPE=changegroup
228 HG_HOOKTYPE=changegroup
229 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
229 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
230 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
230 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
231 HG_SOURCE=pull
231 HG_SOURCE=pull
232 HG_TXNID=TXN:$ID$
232 HG_TXNID=TXN:$ID$
233 HG_TXNNAME=pull
233 HG_TXNNAME=pull
234 bundle:../full.hg (no-msys !)
234 bundle:../full.hg (no-msys !)
235 bundle;../full.hg (msys !)
235 bundle;../full.hg (msys !)
236 HG_URL=bundle:../full.hg (no-msys !)
236 HG_URL=bundle:../full.hg (no-msys !)
237 HG_URL=bundle;../full.hg (msys !)
237 HG_URL=bundle;../full.hg (msys !)
238
238
239 (run 'hg heads' to see heads, 'hg merge' to merge)
239 (run 'hg heads' to see heads, 'hg merge' to merge)
240
240
241 Rollback empty
241 Rollback empty
242
242
243 $ hg rollback
243 $ hg rollback
244 repository tip rolled back to revision -1 (undo pull)
244 repository tip rolled back to revision -1 (undo pull)
245 $ cd ..
245 $ cd ..
246
246
247 Log -R bundle:empty+full.hg
247 Log -R bundle:empty+full.hg
248
248
249 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
249 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
250 8 7 6 5 4 3 2 1 0
250 8 7 6 5 4 3 2 1 0
251
251
252 Pull full.hg into empty again (using -R; with hook)
252 Pull full.hg into empty again (using -R; with hook)
253
253
254 $ hg -R empty pull full.hg
254 $ hg -R empty pull full.hg
255 pulling from full.hg
255 pulling from full.hg
256 requesting all changes
256 requesting all changes
257 adding changesets
257 adding changesets
258 adding manifests
258 adding manifests
259 adding file changes
259 adding file changes
260 added 9 changesets with 7 changes to 4 files (+1 heads)
260 added 9 changesets with 7 changes to 4 files (+1 heads)
261 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
261 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
262 changegroup hook: HG_HOOKNAME=changegroup
262 changegroup hook: HG_HOOKNAME=changegroup
263 HG_HOOKTYPE=changegroup
263 HG_HOOKTYPE=changegroup
264 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
264 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
265 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
265 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
266 HG_SOURCE=pull
266 HG_SOURCE=pull
267 HG_TXNID=TXN:$ID$
267 HG_TXNID=TXN:$ID$
268 HG_TXNNAME=pull
268 HG_TXNNAME=pull
269 bundle:empty+full.hg
269 bundle:empty+full.hg
270 HG_URL=bundle:empty+full.hg
270 HG_URL=bundle:empty+full.hg
271
271
272 (run 'hg heads' to see heads, 'hg merge' to merge)
272 (run 'hg heads' to see heads, 'hg merge' to merge)
273
273
274 #endif
274 #endif
275
275
276 Cannot produce streaming clone bundles with "hg bundle"
276 Cannot produce streaming clone bundles with "hg bundle"
277
277
278 $ hg -R test bundle -t packed1 packed.hg
278 $ hg -R test bundle -t packed1 packed.hg
279 abort: packed bundles cannot be produced by "hg bundle"
279 abort: packed bundles cannot be produced by "hg bundle"
280 (use 'hg debugcreatestreamclonebundle')
280 (use 'hg debugcreatestreamclonebundle')
281 [10]
281 [10]
282
282
283 packed1 is produced properly
283 packed1 is produced properly
284
284
285
285
286 #if reporevlogstore rust
286 #if reporevlogstore rust
287
287
288 $ hg -R test debugcreatestreamclonebundle packed.hg
288 $ hg -R test debugcreatestreamclonebundle packed.hg
289 writing 2665 bytes for 6 files
289 writing 2665 bytes for 6 files (no-rust !)
290 writing 2919 bytes for 9 files (rust !)
290 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
291 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
291
292
292 $ f -B 64 --size --sha1 --hexdump packed.hg
293 $ f -B 64 --size --sha1 --hexdump packed.hg
293 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
294 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702 (no-rust !)
294 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
295 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| (no-rust !)
295 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
296 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald| (no-rust !)
297 packed.hg: size=3181, sha1=b202787710a1c109246554be589506cd2916acb7 (rust !)
298 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 09 00 00 |HGS1UN..........| (rust !)
299 0010: 00 00 00 00 0b 67 00 3b 67 65 6e 65 72 61 6c 64 |.....g.;generald| (rust !)
296 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
300 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
297 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
301 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
298 $ hg debugbundle --spec packed.hg
302 $ hg debugbundle --spec packed.hg
299 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
303 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
300 #endif
304 #endif
301
305
302 #if reporevlogstore no-rust zstd
306 #if reporevlogstore no-rust zstd
303
307
304 $ hg -R test debugcreatestreamclonebundle packed.hg
308 $ hg -R test debugcreatestreamclonebundle packed.hg
305 writing 2665 bytes for 6 files
309 writing 2665 bytes for 7 files
306 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
310 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
307
311
308 $ f -B 64 --size --sha1 --hexdump packed.hg
312 $ f -B 64 --size --sha1 --hexdump packed.hg
309 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
313 packed.hg: size=2882, sha1=6525b07e6bfced4b6c2319cb58c6ff76ca72fa13
310 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
314 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 07 00 00 |HGS1UN..........|
311 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
315 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
312 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
316 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
313 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
317 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
314 $ hg debugbundle --spec packed.hg
318 $ hg debugbundle --spec packed.hg
315 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
319 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
316 #endif
320 #endif
317
321
318 #if reporevlogstore no-rust no-zstd
322 #if reporevlogstore no-rust no-zstd
319
323
320 $ hg -R test debugcreatestreamclonebundle packed.hg
324 $ hg -R test debugcreatestreamclonebundle packed.hg
321 writing 2664 bytes for 6 files
325 writing 2664 bytes for 7 files
322 bundle requirements: generaldelta, revlogv1, sparserevlog
326 bundle requirements: generaldelta, revlogv1, sparserevlog
323
327
324 $ f -B 64 --size --sha1 --hexdump packed.hg
328 $ f -B 64 --size --sha1 --hexdump packed.hg
325 packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5
329 packed.hg: size=2857, sha1=3a7353323915b095baa6f2ee0a5aed588f11f5f0
326 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
330 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 07 00 00 |HGS1UN..........|
327 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
331 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
328 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
332 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
329 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
333 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
330 $ hg debugbundle --spec packed.hg
334 $ hg debugbundle --spec packed.hg
331 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
335 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
332 #endif
336 #endif
333
337
334 #if reporevlogstore
338 #if reporevlogstore
335
339
336 generaldelta requirement is not listed in stream clone bundles unless used
340 generaldelta requirement is not listed in stream clone bundles unless used
337
341
338 $ hg --config format.usegeneraldelta=false init testnongd
342 $ hg --config format.usegeneraldelta=false init testnongd
339 $ cd testnongd
343 $ cd testnongd
340 $ touch foo
344 $ touch foo
341 $ hg -q commit -A -m initial
345 $ hg -q commit -A -m initial
342 $ cd ..
346 $ cd ..
343
347
344 #endif
348 #endif
345
349
346 #if reporevlogstore rust
350 #if reporevlogstore rust
347
351
348 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
352 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
349 writing 301 bytes for 3 files
353 writing 301 bytes for 3 files (no-rust !)
354 writing 427 bytes for 6 files (rust !)
350 bundle requirements: revlog-compression-zstd, revlogv1
355 bundle requirements: revlog-compression-zstd, revlogv1
351
356
352 $ f -B 64 --size --sha1 --hexdump packednongd.hg
357 $ f -B 64 --size --sha1 --hexdump packednongd.hg
353 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
358 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5 (no-rust !)
354 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
359 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........| (no-rust !)
355 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
360 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c| (no-rust !)
361 packednongd.hg: size=593, sha1=1ad0cbea11b5dd7b0437e54ae20fc5f8df118521 (rust !)
362 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| (rust !)
363 0010: 00 00 00 00 01 ab 00 21 72 65 76 6c 6f 67 2d 63 |.......!revlog-c| (rust !)
356 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
364 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
357 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
365 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
358
366
359 $ hg debugbundle --spec packednongd.hg
367 $ hg debugbundle --spec packednongd.hg
360 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
368 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
361
369
362 #endif
370 #endif
363
371
364 #if reporevlogstore no-rust zstd
372 #if reporevlogstore no-rust zstd
365
373
366 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
374 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
367 writing 301 bytes for 3 files
375 writing 301 bytes for 4 files
368 bundle requirements: revlog-compression-zstd, revlogv1
376 bundle requirements: revlog-compression-zstd, revlogv1
369
377
370 $ f -B 64 --size --sha1 --hexdump packednongd.hg
378 $ f -B 64 --size --sha1 --hexdump packednongd.hg
371 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
379 packednongd.hg: size=423, sha1=4269c89cf64b6a4377be75a3983771c4153362bf
372 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
380 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 04 00 00 |HGS1UN..........|
373 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
381 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
374 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
382 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
375 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
383 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
376
384
377 $ hg debugbundle --spec packednongd.hg
385 $ hg debugbundle --spec packednongd.hg
378 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
386 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
379
387
380
388
381 #endif
389 #endif
382
390
383 #if reporevlogstore no-rust no-zstd
391 #if reporevlogstore no-rust no-zstd
384
392
385 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
393 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
386 writing 301 bytes for 3 files
394 writing 301 bytes for 4 files
387 bundle requirements: revlogv1
395 bundle requirements: revlogv1
388
396
389 $ f -B 64 --size --sha1 --hexdump packednongd.hg
397 $ f -B 64 --size --sha1 --hexdump packednongd.hg
390 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
398 packednongd.hg: size=399, sha1=99bb89decfc6674a3cf2cc87accc8c5332ede7fd
391 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
399 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 04 00 00 |HGS1UN..........|
392 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
400 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
393 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
401 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
394 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
402 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
395
403
396 $ hg debugbundle --spec packednongd.hg
404 $ hg debugbundle --spec packednongd.hg
397 none-packed1;requirements%3Drevlogv1
405 none-packed1;requirements%3Drevlogv1
398
406
399
407
400 #endif
408 #endif
401
409
402 #if reporevlogstore
410 #if reporevlogstore
403
411
404 Warning emitted when packed bundles contain secret changesets
412 Warning emitted when packed bundles contain secret changesets
405
413
406 $ hg init testsecret
414 $ hg init testsecret
407 $ cd testsecret
415 $ cd testsecret
408 $ touch foo
416 $ touch foo
409 $ hg -q commit -A -m initial
417 $ hg -q commit -A -m initial
410 $ hg phase --force --secret -r .
418 $ hg phase --force --secret -r .
411 $ cd ..
419 $ cd ..
412
420
413 #endif
421 #endif
414
422
415 #if reporevlogstore rust
423 #if reporevlogstore rust
416
424
417 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
425 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
418 (warning: stream clone bundle will contain secret revisions)
426 (warning: stream clone bundle will contain secret revisions)
419 writing 301 bytes for 3 files
427 writing 301 bytes for 3 files (no-rust !)
428 writing 427 bytes for 6 files (rust !)
420 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
429 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
421
430
422 #endif
431 #endif
423
432
424 #if reporevlogstore no-rust zstd
433 #if reporevlogstore no-rust zstd
425
434
426 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
435 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
427 (warning: stream clone bundle will contain secret revisions)
436 (warning: stream clone bundle will contain secret revisions)
428 writing 301 bytes for 3 files
437 writing 301 bytes for 4 files
429 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
438 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
430
439
431 #endif
440 #endif
432
441
433 #if reporevlogstore no-rust no-zstd
442 #if reporevlogstore no-rust no-zstd
434
443
435 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
444 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
436 (warning: stream clone bundle will contain secret revisions)
445 (warning: stream clone bundle will contain secret revisions)
437 writing 301 bytes for 3 files
446 writing 301 bytes for 4 files
438 bundle requirements: generaldelta, revlogv1, sparserevlog
447 bundle requirements: generaldelta, revlogv1, sparserevlog
439
448
440 #endif
449 #endif
441
450
442 #if reporevlogstore
451 #if reporevlogstore
443
452
444 Unpacking packed1 bundles with "hg unbundle" isn't allowed
453 Unpacking packed1 bundles with "hg unbundle" isn't allowed
445
454
446 $ hg init packed
455 $ hg init packed
447 $ hg -R packed unbundle packed.hg
456 $ hg -R packed unbundle packed.hg
448 abort: packed bundles cannot be applied with "hg unbundle"
457 abort: packed bundles cannot be applied with "hg unbundle"
449 (use "hg debugapplystreamclonebundle")
458 (use "hg debugapplystreamclonebundle")
450 [10]
459 [10]
451
460
452 packed1 can be consumed from debug command
461 packed1 can be consumed from debug command
453
462
454 (this also confirms that streamclone-ed changes are visible via
463 (this also confirms that streamclone-ed changes are visible via
455 @filecache properties to in-process procedures before closing
464 @filecache properties to in-process procedures before closing
456 transaction)
465 transaction)
457
466
458 $ cat > $TESTTMP/showtip.py <<EOF
467 $ cat > $TESTTMP/showtip.py <<EOF
459 >
468 >
460 > def showtip(ui, repo, hooktype, **kwargs):
469 > def showtip(ui, repo, hooktype, **kwargs):
461 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
470 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
462 >
471 >
463 > def reposetup(ui, repo):
472 > def reposetup(ui, repo):
464 > # this confirms (and ensures) that (empty) 00changelog.i
473 > # this confirms (and ensures) that (empty) 00changelog.i
465 > # before streamclone is already cached as repo.changelog
474 > # before streamclone is already cached as repo.changelog
466 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
475 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
467 >
476 >
468 > # this confirms that streamclone-ed changes are visible to
477 > # this confirms that streamclone-ed changes are visible to
469 > # in-process procedures before closing transaction
478 > # in-process procedures before closing transaction
470 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
479 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
471 >
480 >
472 > # this confirms that streamclone-ed changes are still visible
481 > # this confirms that streamclone-ed changes are still visible
473 > # after closing transaction
482 > # after closing transaction
474 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
483 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
475 > EOF
484 > EOF
476 $ cat >> $HGRCPATH <<EOF
485 $ cat >> $HGRCPATH <<EOF
477 > [extensions]
486 > [extensions]
478 > showtip = $TESTTMP/showtip.py
487 > showtip = $TESTTMP/showtip.py
479 > EOF
488 > EOF
480
489
481 $ hg -R packed debugapplystreamclonebundle packed.hg
490 $ hg -R packed debugapplystreamclonebundle packed.hg
482 6 files to transfer, 2.60 KB of data
491 7 files to transfer, 2.60 KB of data (no-rust !)
492 9 files to transfer, 2.85 KB of data (rust !)
483 pretxnopen: 000000000000
493 pretxnopen: 000000000000
484 pretxnclose: aa35859c02ea
494 pretxnclose: aa35859c02ea
485 transferred 2.60 KB in * seconds (* */sec) (glob)
495 transferred 2.60 KB in * seconds (* */sec) (glob) (no-rust !)
496 transferred 2.85 KB in * seconds (* */sec) (glob) (rust !)
486 txnclose: aa35859c02ea
497 txnclose: aa35859c02ea
487
498
488 (for safety, confirm visibility of streamclone-ed changes by another
499 (for safety, confirm visibility of streamclone-ed changes by another
489 process, too)
500 process, too)
490
501
491 $ hg -R packed tip -T "{node|short}\n"
502 $ hg -R packed tip -T "{node|short}\n"
492 aa35859c02ea
503 aa35859c02ea
493
504
494 $ cat >> $HGRCPATH <<EOF
505 $ cat >> $HGRCPATH <<EOF
495 > [extensions]
506 > [extensions]
496 > showtip = !
507 > showtip = !
497 > EOF
508 > EOF
498
509
499 Does not work on non-empty repo
510 Does not work on non-empty repo
500
511
501 $ hg -R packed debugapplystreamclonebundle packed.hg
512 $ hg -R packed debugapplystreamclonebundle packed.hg
502 abort: cannot apply stream clone bundle on non-empty repo
513 abort: cannot apply stream clone bundle on non-empty repo
503 [255]
514 [255]
504
515
505 #endif
516 #endif
506
517
507 Create partial clones
518 Create partial clones
508
519
509 $ rm -r empty
520 $ rm -r empty
510 $ hg init empty
521 $ hg init empty
511 $ hg clone -r 3 test partial
522 $ hg clone -r 3 test partial
512 adding changesets
523 adding changesets
513 adding manifests
524 adding manifests
514 adding file changes
525 adding file changes
515 added 4 changesets with 4 changes to 1 files
526 added 4 changesets with 4 changes to 1 files
516 new changesets f9ee2f85a263:eebf5a27f8ca
527 new changesets f9ee2f85a263:eebf5a27f8ca
517 updating to branch default
528 updating to branch default
518 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
529 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
519 $ hg clone partial partial2
530 $ hg clone partial partial2
520 updating to branch default
531 updating to branch default
521 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
532 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
522 $ cd partial
533 $ cd partial
523
534
524 #if repobundlerepo
535 #if repobundlerepo
525
536
526 Log -R full.hg in partial
537 Log -R full.hg in partial
527
538
528 $ hg -R bundle://../full.hg log -T phases
539 $ hg -R bundle://../full.hg log -T phases
529 changeset: 8:aa35859c02ea
540 changeset: 8:aa35859c02ea
530 tag: tip
541 tag: tip
531 phase: draft
542 phase: draft
532 parent: 3:eebf5a27f8ca
543 parent: 3:eebf5a27f8ca
533 user: test
544 user: test
534 date: Thu Jan 01 00:00:00 1970 +0000
545 date: Thu Jan 01 00:00:00 1970 +0000
535 summary: 0.3m
546 summary: 0.3m
536
547
537 changeset: 7:a6a34bfa0076
548 changeset: 7:a6a34bfa0076
538 phase: draft
549 phase: draft
539 user: test
550 user: test
540 date: Thu Jan 01 00:00:00 1970 +0000
551 date: Thu Jan 01 00:00:00 1970 +0000
541 summary: 1.3m
552 summary: 1.3m
542
553
543 changeset: 6:7373c1169842
554 changeset: 6:7373c1169842
544 phase: draft
555 phase: draft
545 user: test
556 user: test
546 date: Thu Jan 01 00:00:00 1970 +0000
557 date: Thu Jan 01 00:00:00 1970 +0000
547 summary: 1.3
558 summary: 1.3
548
559
549 changeset: 5:1bb50a9436a7
560 changeset: 5:1bb50a9436a7
550 phase: draft
561 phase: draft
551 user: test
562 user: test
552 date: Thu Jan 01 00:00:00 1970 +0000
563 date: Thu Jan 01 00:00:00 1970 +0000
553 summary: 1.2
564 summary: 1.2
554
565
555 changeset: 4:095197eb4973
566 changeset: 4:095197eb4973
556 phase: draft
567 phase: draft
557 parent: 0:f9ee2f85a263
568 parent: 0:f9ee2f85a263
558 user: test
569 user: test
559 date: Thu Jan 01 00:00:00 1970 +0000
570 date: Thu Jan 01 00:00:00 1970 +0000
560 summary: 1.1
571 summary: 1.1
561
572
562 changeset: 3:eebf5a27f8ca
573 changeset: 3:eebf5a27f8ca
563 phase: public
574 phase: public
564 user: test
575 user: test
565 date: Thu Jan 01 00:00:00 1970 +0000
576 date: Thu Jan 01 00:00:00 1970 +0000
566 summary: 0.3
577 summary: 0.3
567
578
568 changeset: 2:e38ba6f5b7e0
579 changeset: 2:e38ba6f5b7e0
569 phase: public
580 phase: public
570 user: test
581 user: test
571 date: Thu Jan 01 00:00:00 1970 +0000
582 date: Thu Jan 01 00:00:00 1970 +0000
572 summary: 0.2
583 summary: 0.2
573
584
574 changeset: 1:34c2bf6b0626
585 changeset: 1:34c2bf6b0626
575 phase: public
586 phase: public
576 user: test
587 user: test
577 date: Thu Jan 01 00:00:00 1970 +0000
588 date: Thu Jan 01 00:00:00 1970 +0000
578 summary: 0.1
589 summary: 0.1
579
590
580 changeset: 0:f9ee2f85a263
591 changeset: 0:f9ee2f85a263
581 phase: public
592 phase: public
582 user: test
593 user: test
583 date: Thu Jan 01 00:00:00 1970 +0000
594 date: Thu Jan 01 00:00:00 1970 +0000
584 summary: 0.0
595 summary: 0.0
585
596
586
597
587 Incoming full.hg in partial
598 Incoming full.hg in partial
588
599
589 $ hg incoming bundle://../full.hg
600 $ hg incoming bundle://../full.hg
590 comparing with bundle:../full.hg
601 comparing with bundle:../full.hg
591 searching for changes
602 searching for changes
592 changeset: 4:095197eb4973
603 changeset: 4:095197eb4973
593 parent: 0:f9ee2f85a263
604 parent: 0:f9ee2f85a263
594 user: test
605 user: test
595 date: Thu Jan 01 00:00:00 1970 +0000
606 date: Thu Jan 01 00:00:00 1970 +0000
596 summary: 1.1
607 summary: 1.1
597
608
598 changeset: 5:1bb50a9436a7
609 changeset: 5:1bb50a9436a7
599 user: test
610 user: test
600 date: Thu Jan 01 00:00:00 1970 +0000
611 date: Thu Jan 01 00:00:00 1970 +0000
601 summary: 1.2
612 summary: 1.2
602
613
603 changeset: 6:7373c1169842
614 changeset: 6:7373c1169842
604 user: test
615 user: test
605 date: Thu Jan 01 00:00:00 1970 +0000
616 date: Thu Jan 01 00:00:00 1970 +0000
606 summary: 1.3
617 summary: 1.3
607
618
608 changeset: 7:a6a34bfa0076
619 changeset: 7:a6a34bfa0076
609 user: test
620 user: test
610 date: Thu Jan 01 00:00:00 1970 +0000
621 date: Thu Jan 01 00:00:00 1970 +0000
611 summary: 1.3m
622 summary: 1.3m
612
623
613 changeset: 8:aa35859c02ea
624 changeset: 8:aa35859c02ea
614 tag: tip
625 tag: tip
615 parent: 3:eebf5a27f8ca
626 parent: 3:eebf5a27f8ca
616 user: test
627 user: test
617 date: Thu Jan 01 00:00:00 1970 +0000
628 date: Thu Jan 01 00:00:00 1970 +0000
618 summary: 0.3m
629 summary: 0.3m
619
630
620
631
621 Outgoing -R full.hg vs partial2 in partial
632 Outgoing -R full.hg vs partial2 in partial
622
633
623 $ hg -R bundle://../full.hg outgoing ../partial2
634 $ hg -R bundle://../full.hg outgoing ../partial2
624 comparing with ../partial2
635 comparing with ../partial2
625 searching for changes
636 searching for changes
626 changeset: 4:095197eb4973
637 changeset: 4:095197eb4973
627 parent: 0:f9ee2f85a263
638 parent: 0:f9ee2f85a263
628 user: test
639 user: test
629 date: Thu Jan 01 00:00:00 1970 +0000
640 date: Thu Jan 01 00:00:00 1970 +0000
630 summary: 1.1
641 summary: 1.1
631
642
632 changeset: 5:1bb50a9436a7
643 changeset: 5:1bb50a9436a7
633 user: test
644 user: test
634 date: Thu Jan 01 00:00:00 1970 +0000
645 date: Thu Jan 01 00:00:00 1970 +0000
635 summary: 1.2
646 summary: 1.2
636
647
637 changeset: 6:7373c1169842
648 changeset: 6:7373c1169842
638 user: test
649 user: test
639 date: Thu Jan 01 00:00:00 1970 +0000
650 date: Thu Jan 01 00:00:00 1970 +0000
640 summary: 1.3
651 summary: 1.3
641
652
642 changeset: 7:a6a34bfa0076
653 changeset: 7:a6a34bfa0076
643 user: test
654 user: test
644 date: Thu Jan 01 00:00:00 1970 +0000
655 date: Thu Jan 01 00:00:00 1970 +0000
645 summary: 1.3m
656 summary: 1.3m
646
657
647 changeset: 8:aa35859c02ea
658 changeset: 8:aa35859c02ea
648 tag: tip
659 tag: tip
649 parent: 3:eebf5a27f8ca
660 parent: 3:eebf5a27f8ca
650 user: test
661 user: test
651 date: Thu Jan 01 00:00:00 1970 +0000
662 date: Thu Jan 01 00:00:00 1970 +0000
652 summary: 0.3m
663 summary: 0.3m
653
664
654
665
655 Outgoing -R does-not-exist.hg vs partial2 in partial
666 Outgoing -R does-not-exist.hg vs partial2 in partial
656
667
657 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
668 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
658 abort: *../does-not-exist.hg* (glob)
669 abort: *../does-not-exist.hg* (glob)
659 [255]
670 [255]
660
671
661 #endif
672 #endif
662
673
663 $ cd ..
674 $ cd ..
664
675
665 hide outer repo
676 hide outer repo
666 $ hg init
677 $ hg init
667
678
668 Direct clone from bundle (all-history)
679 Direct clone from bundle (all-history)
669
680
670 #if repobundlerepo
681 #if repobundlerepo
671
682
672 $ hg clone full.hg full-clone
683 $ hg clone full.hg full-clone
673 requesting all changes
684 requesting all changes
674 adding changesets
685 adding changesets
675 adding manifests
686 adding manifests
676 adding file changes
687 adding file changes
677 added 9 changesets with 7 changes to 4 files (+1 heads)
688 added 9 changesets with 7 changes to 4 files (+1 heads)
678 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
689 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
679 updating to branch default
690 updating to branch default
680 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
691 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
681 $ hg -R full-clone heads
692 $ hg -R full-clone heads
682 changeset: 8:aa35859c02ea
693 changeset: 8:aa35859c02ea
683 tag: tip
694 tag: tip
684 parent: 3:eebf5a27f8ca
695 parent: 3:eebf5a27f8ca
685 user: test
696 user: test
686 date: Thu Jan 01 00:00:00 1970 +0000
697 date: Thu Jan 01 00:00:00 1970 +0000
687 summary: 0.3m
698 summary: 0.3m
688
699
689 changeset: 7:a6a34bfa0076
700 changeset: 7:a6a34bfa0076
690 user: test
701 user: test
691 date: Thu Jan 01 00:00:00 1970 +0000
702 date: Thu Jan 01 00:00:00 1970 +0000
692 summary: 1.3m
703 summary: 1.3m
693
704
694 $ rm -r full-clone
705 $ rm -r full-clone
695
706
696 When cloning from a non-copiable repository into '', do not
707 When cloning from a non-copiable repository into '', do not
697 recurse infinitely (issue2528)
708 recurse infinitely (issue2528)
698
709
699 $ hg clone full.hg ''
710 $ hg clone full.hg ''
700 abort: empty destination path is not valid
711 abort: empty destination path is not valid
701 [10]
712 [10]
702
713
703 test for https://bz.mercurial-scm.org/216
714 test for https://bz.mercurial-scm.org/216
704
715
705 Unbundle incremental bundles into fresh empty in one go
716 Unbundle incremental bundles into fresh empty in one go
706
717
707 $ rm -r empty
718 $ rm -r empty
708 $ hg init empty
719 $ hg init empty
709 $ hg -R test bundle --base null -r 0 ../0.hg
720 $ hg -R test bundle --base null -r 0 ../0.hg
710 1 changesets found
721 1 changesets found
711 $ hg -R test bundle --exact -r 1 ../1.hg
722 $ hg -R test bundle --exact -r 1 ../1.hg
712 1 changesets found
723 1 changesets found
713 $ hg -R empty unbundle -u ../0.hg ../1.hg
724 $ hg -R empty unbundle -u ../0.hg ../1.hg
714 adding changesets
725 adding changesets
715 adding manifests
726 adding manifests
716 adding file changes
727 adding file changes
717 added 1 changesets with 1 changes to 1 files
728 added 1 changesets with 1 changes to 1 files
718 new changesets f9ee2f85a263 (1 drafts)
729 new changesets f9ee2f85a263 (1 drafts)
719 adding changesets
730 adding changesets
720 adding manifests
731 adding manifests
721 adding file changes
732 adding file changes
722 added 1 changesets with 1 changes to 1 files
733 added 1 changesets with 1 changes to 1 files
723 new changesets 34c2bf6b0626 (1 drafts)
734 new changesets 34c2bf6b0626 (1 drafts)
724 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
735 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
725
736
726 View full contents of the bundle
737 View full contents of the bundle
727 $ hg -R test bundle --base null -r 3 ../partial.hg
738 $ hg -R test bundle --base null -r 3 ../partial.hg
728 4 changesets found
739 4 changesets found
729 $ cd test
740 $ cd test
730 $ hg -R ../../partial.hg log -r "bundle()"
741 $ hg -R ../../partial.hg log -r "bundle()"
731 changeset: 0:f9ee2f85a263
742 changeset: 0:f9ee2f85a263
732 user: test
743 user: test
733 date: Thu Jan 01 00:00:00 1970 +0000
744 date: Thu Jan 01 00:00:00 1970 +0000
734 summary: 0.0
745 summary: 0.0
735
746
736 changeset: 1:34c2bf6b0626
747 changeset: 1:34c2bf6b0626
737 user: test
748 user: test
738 date: Thu Jan 01 00:00:00 1970 +0000
749 date: Thu Jan 01 00:00:00 1970 +0000
739 summary: 0.1
750 summary: 0.1
740
751
741 changeset: 2:e38ba6f5b7e0
752 changeset: 2:e38ba6f5b7e0
742 user: test
753 user: test
743 date: Thu Jan 01 00:00:00 1970 +0000
754 date: Thu Jan 01 00:00:00 1970 +0000
744 summary: 0.2
755 summary: 0.2
745
756
746 changeset: 3:eebf5a27f8ca
757 changeset: 3:eebf5a27f8ca
747 user: test
758 user: test
748 date: Thu Jan 01 00:00:00 1970 +0000
759 date: Thu Jan 01 00:00:00 1970 +0000
749 summary: 0.3
760 summary: 0.3
750
761
751 $ cd ..
762 $ cd ..
752
763
753 #endif
764 #endif
754
765
755 test for 540d1059c802
766 test for 540d1059c802
756
767
757 $ hg init orig
768 $ hg init orig
758 $ cd orig
769 $ cd orig
759 $ echo foo > foo
770 $ echo foo > foo
760 $ hg add foo
771 $ hg add foo
761 $ hg ci -m 'add foo'
772 $ hg ci -m 'add foo'
762
773
763 $ hg clone . ../copy
774 $ hg clone . ../copy
764 updating to branch default
775 updating to branch default
765 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
776 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
766 $ hg tag foo
777 $ hg tag foo
767
778
768 $ cd ../copy
779 $ cd ../copy
769 $ echo >> foo
780 $ echo >> foo
770 $ hg ci -m 'change foo'
781 $ hg ci -m 'change foo'
771 $ hg bundle ../bundle.hg ../orig
782 $ hg bundle ../bundle.hg ../orig
772 searching for changes
783 searching for changes
773 1 changesets found
784 1 changesets found
774
785
775 $ cd ..
786 $ cd ..
776
787
777 #if repobundlerepo
788 #if repobundlerepo
778 $ cd orig
789 $ cd orig
779 $ hg incoming ../bundle.hg
790 $ hg incoming ../bundle.hg
780 comparing with ../bundle.hg
791 comparing with ../bundle.hg
781 searching for changes
792 searching for changes
782 changeset: 2:ed1b79f46b9a
793 changeset: 2:ed1b79f46b9a
783 tag: tip
794 tag: tip
784 parent: 0:bbd179dfa0a7
795 parent: 0:bbd179dfa0a7
785 user: test
796 user: test
786 date: Thu Jan 01 00:00:00 1970 +0000
797 date: Thu Jan 01 00:00:00 1970 +0000
787 summary: change foo
798 summary: change foo
788
799
789 $ cd ..
800 $ cd ..
790
801
791 test bundle with # in the filename (issue2154):
802 test bundle with # in the filename (issue2154):
792
803
793 $ cp bundle.hg 'test#bundle.hg'
804 $ cp bundle.hg 'test#bundle.hg'
794 $ cd orig
805 $ cd orig
795 $ hg incoming '../test#bundle.hg'
806 $ hg incoming '../test#bundle.hg'
796 comparing with ../test
807 comparing with ../test
797 abort: unknown revision 'bundle.hg'
808 abort: unknown revision 'bundle.hg'
798 [10]
809 [10]
799
810
800 note that percent encoding is not handled:
811 note that percent encoding is not handled:
801
812
802 $ hg incoming ../test%23bundle.hg
813 $ hg incoming ../test%23bundle.hg
803 abort: repository ../test%23bundle.hg not found
814 abort: repository ../test%23bundle.hg not found
804 [255]
815 [255]
805 $ cd ..
816 $ cd ..
806
817
807 #endif
818 #endif
808
819
809 test to bundle revisions on the newly created branch (issue3828):
820 test to bundle revisions on the newly created branch (issue3828):
810
821
811 $ hg -q clone -U test test-clone
822 $ hg -q clone -U test test-clone
812 $ cd test
823 $ cd test
813
824
814 $ hg -q branch foo
825 $ hg -q branch foo
815 $ hg commit -m "create foo branch"
826 $ hg commit -m "create foo branch"
816 $ hg -q outgoing ../test-clone
827 $ hg -q outgoing ../test-clone
817 9:b4f5acb1ee27
828 9:b4f5acb1ee27
818 $ hg -q bundle --branch foo foo.hg ../test-clone
829 $ hg -q bundle --branch foo foo.hg ../test-clone
819 #if repobundlerepo
830 #if repobundlerepo
820 $ hg -R foo.hg -q log -r "bundle()"
831 $ hg -R foo.hg -q log -r "bundle()"
821 9:b4f5acb1ee27
832 9:b4f5acb1ee27
822 #endif
833 #endif
823
834
824 $ cd ..
835 $ cd ..
825
836
826 test for https://bz.mercurial-scm.org/1144
837 test for https://bz.mercurial-scm.org/1144
827
838
828 test that verify bundle does not traceback
839 test that verify bundle does not traceback
829
840
830 partial history bundle, fails w/ unknown parent
841 partial history bundle, fails w/ unknown parent
831
842
832 $ hg -R bundle.hg verify
843 $ hg -R bundle.hg verify
833 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
844 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
834 [50]
845 [50]
835
846
836 full history bundle, refuses to verify non-local repo
847 full history bundle, refuses to verify non-local repo
837
848
838 #if repobundlerepo
849 #if repobundlerepo
839 $ hg -R all.hg verify
850 $ hg -R all.hg verify
840 abort: cannot verify bundle or remote repos
851 abort: cannot verify bundle or remote repos
841 [255]
852 [255]
842 #endif
853 #endif
843
854
844 but, regular verify must continue to work
855 but, regular verify must continue to work
845
856
846 $ hg -R orig verify -q
857 $ hg -R orig verify -q
847
858
848 #if repobundlerepo
859 #if repobundlerepo
849 diff against bundle
860 diff against bundle
850
861
851 $ hg init b
862 $ hg init b
852 $ cd b
863 $ cd b
853 $ hg -R ../all.hg diff -r tip
864 $ hg -R ../all.hg diff -r tip
854 diff -r aa35859c02ea anotherfile
865 diff -r aa35859c02ea anotherfile
855 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
866 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
856 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
867 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
857 @@ -1,4 +0,0 @@
868 @@ -1,4 +0,0 @@
858 -0
869 -0
859 -1
870 -1
860 -2
871 -2
861 -3
872 -3
862 $ cd ..
873 $ cd ..
863 #endif
874 #endif
864
875
865 bundle single branch
876 bundle single branch
866
877
867 $ hg init branchy
878 $ hg init branchy
868 $ cd branchy
879 $ cd branchy
869 $ echo a >a
880 $ echo a >a
870 $ echo x >x
881 $ echo x >x
871 $ hg ci -Ama
882 $ hg ci -Ama
872 adding a
883 adding a
873 adding x
884 adding x
874 $ echo c >c
885 $ echo c >c
875 $ echo xx >x
886 $ echo xx >x
876 $ hg ci -Amc
887 $ hg ci -Amc
877 adding c
888 adding c
878 $ echo c1 >c1
889 $ echo c1 >c1
879 $ hg ci -Amc1
890 $ hg ci -Amc1
880 adding c1
891 adding c1
881 $ hg up 0
892 $ hg up 0
882 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
893 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
883 $ echo b >b
894 $ echo b >b
884 $ hg ci -Amb
895 $ hg ci -Amb
885 adding b
896 adding b
886 created new head
897 created new head
887 $ echo b1 >b1
898 $ echo b1 >b1
888 $ echo xx >x
899 $ echo xx >x
889 $ hg ci -Amb1
900 $ hg ci -Amb1
890 adding b1
901 adding b1
891 $ hg clone -q -r2 . part
902 $ hg clone -q -r2 . part
892
903
893 == bundling via incoming
904 == bundling via incoming
894
905
895 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
906 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
896 comparing with .
907 comparing with .
897 searching for changes
908 searching for changes
898 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
909 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
899 057f4db07f61970e1c11e83be79e9d08adc4dc31
910 057f4db07f61970e1c11e83be79e9d08adc4dc31
900
911
901 == bundling
912 == bundling
902
913
903 $ hg bundle bundle.hg part --debug --config progress.debug=true
914 $ hg bundle bundle.hg part --debug --config progress.debug=true
904 query 1; heads
915 query 1; heads
905 searching for changes
916 searching for changes
906 all remote heads known locally
917 all remote heads known locally
907 2 changesets found
918 2 changesets found
908 list of changesets:
919 list of changesets:
909 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
920 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
910 057f4db07f61970e1c11e83be79e9d08adc4dc31
921 057f4db07f61970e1c11e83be79e9d08adc4dc31
911 bundle2-output-bundle: "HG20", (1 params) 2 parts total
922 bundle2-output-bundle: "HG20", (1 params) 2 parts total
912 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
923 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
913 changesets: 1/2 chunks (50.00%)
924 changesets: 1/2 chunks (50.00%)
914 changesets: 2/2 chunks (100.00%)
925 changesets: 2/2 chunks (100.00%)
915 manifests: 1/2 chunks (50.00%)
926 manifests: 1/2 chunks (50.00%)
916 manifests: 2/2 chunks (100.00%)
927 manifests: 2/2 chunks (100.00%)
917 files: b 1/3 files (33.33%)
928 files: b 1/3 files (33.33%)
918 files: b1 2/3 files (66.67%)
929 files: b1 2/3 files (66.67%)
919 files: x 3/3 files (100.00%)
930 files: x 3/3 files (100.00%)
920 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
931 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
921
932
922 #if repobundlerepo
933 #if repobundlerepo
923 == Test for issue3441
934 == Test for issue3441
924
935
925 $ hg clone -q -r0 . part2
936 $ hg clone -q -r0 . part2
926 $ hg -q -R part2 pull bundle.hg
937 $ hg -q -R part2 pull bundle.hg
927 $ hg -R part2 verify -q
938 $ hg -R part2 verify -q
928 #endif
939 #endif
929
940
930 == Test bundling no commits
941 == Test bundling no commits
931
942
932 $ hg bundle -r 'public()' no-output.hg
943 $ hg bundle -r 'public()' no-output.hg
933 abort: no commits to bundle
944 abort: no commits to bundle
934 [10]
945 [10]
935
946
936 $ cd ..
947 $ cd ..
937
948
938 When user merges to the revision existing only in the bundle,
949 When user merges to the revision existing only in the bundle,
939 it should show warning that second parent of the working
950 it should show warning that second parent of the working
940 directory does not exist
951 directory does not exist
941
952
942 $ hg init update2bundled
953 $ hg init update2bundled
943 $ cd update2bundled
954 $ cd update2bundled
944 $ cat <<EOF >> .hg/hgrc
955 $ cat <<EOF >> .hg/hgrc
945 > [extensions]
956 > [extensions]
946 > strip =
957 > strip =
947 > EOF
958 > EOF
948 $ echo "aaa" >> a
959 $ echo "aaa" >> a
949 $ hg commit -A -m 0
960 $ hg commit -A -m 0
950 adding a
961 adding a
951 $ echo "bbb" >> b
962 $ echo "bbb" >> b
952 $ hg commit -A -m 1
963 $ hg commit -A -m 1
953 adding b
964 adding b
954 $ echo "ccc" >> c
965 $ echo "ccc" >> c
955 $ hg commit -A -m 2
966 $ hg commit -A -m 2
956 adding c
967 adding c
957 $ hg update -r 1
968 $ hg update -r 1
958 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
969 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
959 $ echo "ddd" >> d
970 $ echo "ddd" >> d
960 $ hg commit -A -m 3
971 $ hg commit -A -m 3
961 adding d
972 adding d
962 created new head
973 created new head
963 $ hg update -r 2
974 $ hg update -r 2
964 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
975 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
965 $ hg log -G
976 $ hg log -G
966 o changeset: 3:8bd3e1f196af
977 o changeset: 3:8bd3e1f196af
967 | tag: tip
978 | tag: tip
968 | parent: 1:a01eca7af26d
979 | parent: 1:a01eca7af26d
969 | user: test
980 | user: test
970 | date: Thu Jan 01 00:00:00 1970 +0000
981 | date: Thu Jan 01 00:00:00 1970 +0000
971 | summary: 3
982 | summary: 3
972 |
983 |
973 | @ changeset: 2:4652c276ac4f
984 | @ changeset: 2:4652c276ac4f
974 |/ user: test
985 |/ user: test
975 | date: Thu Jan 01 00:00:00 1970 +0000
986 | date: Thu Jan 01 00:00:00 1970 +0000
976 | summary: 2
987 | summary: 2
977 |
988 |
978 o changeset: 1:a01eca7af26d
989 o changeset: 1:a01eca7af26d
979 | user: test
990 | user: test
980 | date: Thu Jan 01 00:00:00 1970 +0000
991 | date: Thu Jan 01 00:00:00 1970 +0000
981 | summary: 1
992 | summary: 1
982 |
993 |
983 o changeset: 0:4fe08cd4693e
994 o changeset: 0:4fe08cd4693e
984 user: test
995 user: test
985 date: Thu Jan 01 00:00:00 1970 +0000
996 date: Thu Jan 01 00:00:00 1970 +0000
986 summary: 0
997 summary: 0
987
998
988
999
989 #if repobundlerepo
1000 #if repobundlerepo
990 $ hg bundle --base 1 -r 3 ../update2bundled.hg
1001 $ hg bundle --base 1 -r 3 ../update2bundled.hg
991 1 changesets found
1002 1 changesets found
992 $ hg strip -r 3
1003 $ hg strip -r 3
993 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
1004 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
994 $ hg merge -R ../update2bundled.hg -r 3
1005 $ hg merge -R ../update2bundled.hg -r 3
995 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1006 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
996 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1007 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
997 (branch merge, don't forget to commit)
1008 (branch merge, don't forget to commit)
998
1009
999 When user updates to the revision existing only in the bundle,
1010 When user updates to the revision existing only in the bundle,
1000 it should show warning
1011 it should show warning
1001
1012
1002 $ hg update -R ../update2bundled.hg --clean -r 3
1013 $ hg update -R ../update2bundled.hg --clean -r 3
1003 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1014 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1004 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1015 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1005
1016
1006 When user updates to the revision existing in the local repository
1017 When user updates to the revision existing in the local repository
1007 the warning shouldn't be emitted
1018 the warning shouldn't be emitted
1008
1019
1009 $ hg update -R ../update2bundled.hg -r 0
1020 $ hg update -R ../update2bundled.hg -r 0
1010 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1021 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1011 #endif
1022 #endif
1012
1023
1013 Test the option that create slim bundle
1024 Test the option that create slim bundle
1014
1025
1015 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1026 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1016 3 changesets found
1027 3 changesets found
1017
1028
1018 Test the option that create and no-delta's bundle
1029 Test the option that create and no-delta's bundle
1019 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1030 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1020 3 changesets found
1031 3 changesets found
1021
1032
1022
1033
1023 Test the debug statistic when building a bundle
1034 Test the debug statistic when building a bundle
1024 -----------------------------------------------
1035 -----------------------------------------------
1025
1036
1026 $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
1037 $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
1027 3 changesets found
1038 3 changesets found
1028 DEBUG-BUNDLING: revisions: 9
1039 DEBUG-BUNDLING: revisions: 9
1029 DEBUG-BUNDLING: changelog: 3
1040 DEBUG-BUNDLING: changelog: 3
1030 DEBUG-BUNDLING: manifest: 3
1041 DEBUG-BUNDLING: manifest: 3
1031 DEBUG-BUNDLING: files: 3 (for 3 revlogs)
1042 DEBUG-BUNDLING: files: 3 (for 3 revlogs)
1032 DEBUG-BUNDLING: deltas:
1043 DEBUG-BUNDLING: deltas:
1033 DEBUG-BUNDLING: from-storage: 2 (100% of available 2)
1044 DEBUG-BUNDLING: from-storage: 2 (100% of available 2)
1034 DEBUG-BUNDLING: computed: 7
1045 DEBUG-BUNDLING: computed: 7
1035 DEBUG-BUNDLING: full: 7 (100% of native 7)
1046 DEBUG-BUNDLING: full: 7 (100% of native 7)
1036 DEBUG-BUNDLING: changelog: 3 (100% of native 3)
1047 DEBUG-BUNDLING: changelog: 3 (100% of native 3)
1037 DEBUG-BUNDLING: manifests: 1 (100% of native 1)
1048 DEBUG-BUNDLING: manifests: 1 (100% of native 1)
1038 DEBUG-BUNDLING: files: 3 (100% of native 3)
1049 DEBUG-BUNDLING: files: 3 (100% of native 3)
1039
1050
1040 Test the debug output when applying delta
1051 Test the debug output when applying delta
1041 -----------------------------------------
1052 -----------------------------------------
1042
1053
1043 $ hg init foo
1054 $ hg init foo
1044 $ hg -R foo unbundle ./slim.hg \
1055 $ hg -R foo unbundle ./slim.hg \
1045 > --config debug.revlog.debug-delta=yes \
1056 > --config debug.revlog.debug-delta=yes \
1046 > --config storage.revlog.reuse-external-delta=no \
1057 > --config storage.revlog.reuse-external-delta=no \
1047 > --config storage.revlog.reuse-external-delta-parent=no
1058 > --config storage.revlog.reuse-external-delta-parent=no
1048 adding changesets
1059 adding changesets
1049 DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1060 DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1050 DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1061 DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1051 DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1062 DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1052 adding manifests
1063 adding manifests
1053 DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1064 DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1054 DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1065 DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1055 DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1066 DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1056 adding file changes
1067 adding file changes
1057 DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1068 DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1058 DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1069 DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1059 DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1070 DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1060 added 3 changesets with 3 changes to 3 files
1071 added 3 changesets with 3 changes to 3 files
1061 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1072 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1062 (run 'hg update' to get a working copy)
1073 (run 'hg update' to get a working copy)
1063
1074
1064
1075
1065 Test the debug statistic when applying a bundle
1076 Test the debug statistic when applying a bundle
1066 -----------------------------------------------
1077 -----------------------------------------------
1067
1078
1068 $ hg init bar
1079 $ hg init bar
1069 $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes
1080 $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes
1070 adding changesets
1081 adding changesets
1071 adding manifests
1082 adding manifests
1072 adding file changes
1083 adding file changes
1073 DEBUG-UNBUNDLING: revisions: 9
1084 DEBUG-UNBUNDLING: revisions: 9
1074 DEBUG-UNBUNDLING: changelog: 3 ( 33%)
1085 DEBUG-UNBUNDLING: changelog: 3 ( 33%)
1075 DEBUG-UNBUNDLING: manifests: 3 ( 33%)
1086 DEBUG-UNBUNDLING: manifests: 3 ( 33%)
1076 DEBUG-UNBUNDLING: files: 3 ( 33%)
1087 DEBUG-UNBUNDLING: files: 3 ( 33%)
1077 DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob)
1088 DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob)
1078 DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob)
1089 DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob)
1079 DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob)
1090 DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob)
1080 DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob)
1091 DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob)
1081 DEBUG-UNBUNDLING: type-count:
1092 DEBUG-UNBUNDLING: type-count:
1082 DEBUG-UNBUNDLING: changelog:
1093 DEBUG-UNBUNDLING: changelog:
1083 DEBUG-UNBUNDLING: full: 3
1094 DEBUG-UNBUNDLING: full: 3
1084 DEBUG-UNBUNDLING: cached: 3 (100%)
1095 DEBUG-UNBUNDLING: cached: 3 (100%)
1085 DEBUG-UNBUNDLING: manifests:
1096 DEBUG-UNBUNDLING: manifests:
1086 DEBUG-UNBUNDLING: full: 1
1097 DEBUG-UNBUNDLING: full: 1
1087 DEBUG-UNBUNDLING: cached: 1 (100%)
1098 DEBUG-UNBUNDLING: cached: 1 (100%)
1088 DEBUG-UNBUNDLING: delta: 2
1099 DEBUG-UNBUNDLING: delta: 2
1089 DEBUG-UNBUNDLING: cached: 2 (100%)
1100 DEBUG-UNBUNDLING: cached: 2 (100%)
1090 DEBUG-UNBUNDLING: files:
1101 DEBUG-UNBUNDLING: files:
1091 DEBUG-UNBUNDLING: full: 3
1102 DEBUG-UNBUNDLING: full: 3
1092 DEBUG-UNBUNDLING: cached: 3 (100%)
1103 DEBUG-UNBUNDLING: cached: 3 (100%)
1093 DEBUG-UNBUNDLING: type-time:
1104 DEBUG-UNBUNDLING: type-time:
1094 DEBUG-UNBUNDLING: changelog:
1105 DEBUG-UNBUNDLING: changelog:
1095 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1106 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1096 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1107 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1097 DEBUG-UNBUNDLING: manifests:
1108 DEBUG-UNBUNDLING: manifests:
1098 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1109 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1099 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1110 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1100 DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob)
1111 DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob)
1101 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1112 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1102 DEBUG-UNBUNDLING: files:
1113 DEBUG-UNBUNDLING: files:
1103 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1114 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1104 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1115 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1105 added 3 changesets with 3 changes to 3 files
1116 added 3 changesets with 3 changes to 3 files
1106 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1117 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1107 (run 'hg update' to get a working copy)
1118 (run 'hg update' to get a working copy)
@@ -1,1134 +1,1136 b''
1 Test exchange of common information using bundle2
1 Test exchange of common information using bundle2
2
2
3
3
4 $ getmainid() {
4 $ getmainid() {
5 > hg -R main log --template '{node}\n' --rev "$1"
5 > hg -R main log --template '{node}\n' --rev "$1"
6 > }
6 > }
7
7
8 enable obsolescence
8 enable obsolescence
9
9
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
10 $ cp $HGRCPATH $TESTTMP/hgrc.orig
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
11 $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
12 > echo pushkey: lock state after \"\$HG_NAMESPACE\"
13 > hg debuglock
13 > hg debuglock
14 > EOF
14 > EOF
15
15
16 $ cat >> $HGRCPATH << EOF
16 $ cat >> $HGRCPATH << EOF
17 > [experimental]
17 > [experimental]
18 > evolution.createmarkers=True
18 > evolution.createmarkers=True
19 > evolution.exchange=True
19 > evolution.exchange=True
20 > bundle2-output-capture=True
20 > bundle2-output-capture=True
21 > [command-templates]
21 > [command-templates]
22 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
22 > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
23 > [web]
23 > [web]
24 > push_ssl = false
24 > push_ssl = false
25 > allow_push = *
25 > allow_push = *
26 > [phases]
26 > [phases]
27 > publish=False
27 > publish=False
28 > [hooks]
28 > [hooks]
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
29 > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
30 > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
31 > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
32 > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh"
33 > EOF
33 > EOF
34
34
35 The extension requires a repo (currently unused)
35 The extension requires a repo (currently unused)
36
36
37 $ hg init main
37 $ hg init main
38 $ cd main
38 $ cd main
39 $ touch a
39 $ touch a
40 $ hg add a
40 $ hg add a
41 $ hg commit -m 'a'
41 $ hg commit -m 'a'
42 pre-close-tip:3903775176ed draft
42 pre-close-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
43 postclose-tip:3903775176ed draft
44 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
44 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
45
45
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
46 $ hg unbundle $TESTDIR/bundles/rebase.hg
47 adding changesets
47 adding changesets
48 adding manifests
48 adding manifests
49 adding file changes
49 adding file changes
50 pre-close-tip:02de42196ebe draft
50 pre-close-tip:02de42196ebe draft
51 added 8 changesets with 7 changes to 7 files (+3 heads)
51 added 8 changesets with 7 changes to 7 files (+3 heads)
52 new changesets cd010b8cd998:02de42196ebe (8 drafts)
52 new changesets cd010b8cd998:02de42196ebe (8 drafts)
53 postclose-tip:02de42196ebe draft
53 postclose-tip:02de42196ebe draft
54 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:$ID$ HG_TXNNAME=unbundle
54 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:$ID$ HG_TXNNAME=unbundle
55 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
55 bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
56 (run 'hg heads' to see heads, 'hg merge' to merge)
57
57
58 $ cd ..
58 $ cd ..
59
59
60 Real world exchange
60 Real world exchange
61 =====================
61 =====================
62
62
63 Add more obsolescence information
63 Add more obsolescence information
64
64
65 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
65 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
66 pre-close-tip:02de42196ebe draft
66 pre-close-tip:02de42196ebe draft
67 1 new obsolescence markers
67 1 new obsolescence markers
68 postclose-tip:02de42196ebe draft
68 postclose-tip:02de42196ebe draft
69 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
69 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
70 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
70 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
71 pre-close-tip:02de42196ebe draft
71 pre-close-tip:02de42196ebe draft
72 1 new obsolescence markers
72 1 new obsolescence markers
73 postclose-tip:02de42196ebe draft
73 postclose-tip:02de42196ebe draft
74 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
74 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
75
75
76 clone --pull
76 clone --pull
77
77
78 $ hg -R main phase --public cd010b8cd998
78 $ hg -R main phase --public cd010b8cd998
79 pre-close-tip:02de42196ebe draft
79 pre-close-tip:02de42196ebe draft
80 postclose-tip:02de42196ebe draft
80 postclose-tip:02de42196ebe draft
81 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
81 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
82 $ hg clone main other --pull --rev 9520eea781bc
82 $ hg clone main other --pull --rev 9520eea781bc
83 adding changesets
83 adding changesets
84 adding manifests
84 adding manifests
85 adding file changes
85 adding file changes
86 pre-close-tip:9520eea781bc draft
86 pre-close-tip:9520eea781bc draft
87 added 2 changesets with 2 changes to 2 files
87 added 2 changesets with 2 changes to 2 files
88 1 new obsolescence markers
88 1 new obsolescence markers
89 new changesets cd010b8cd998:9520eea781bc (1 drafts)
89 new changesets cd010b8cd998:9520eea781bc (1 drafts)
90 postclose-tip:9520eea781bc draft
90 postclose-tip:9520eea781bc draft
91 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
91 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
92 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
92 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
93 updating to branch default
93 updating to branch default
94 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
94 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
95 $ hg -R other log -G
95 $ hg -R other log -G
96 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
96 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
97 |
97 |
98 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
98 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
99
99
100 $ hg -R other debugobsolete
100 $ hg -R other debugobsolete
101 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
101 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
102
102
103 pull
103 pull
104
104
105 $ hg -R main phase --public 9520eea781bc
105 $ hg -R main phase --public 9520eea781bc
106 pre-close-tip:02de42196ebe draft
106 pre-close-tip:02de42196ebe draft
107 postclose-tip:02de42196ebe draft
107 postclose-tip:02de42196ebe draft
108 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
108 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
109 $ hg -R other pull -r 24b6387c8c8c
109 $ hg -R other pull -r 24b6387c8c8c
110 pulling from $TESTTMP/main
110 pulling from $TESTTMP/main
111 searching for changes
111 searching for changes
112 adding changesets
112 adding changesets
113 adding manifests
113 adding manifests
114 adding file changes
114 adding file changes
115 pre-close-tip:24b6387c8c8c draft
115 pre-close-tip:24b6387c8c8c draft
116 added 1 changesets with 1 changes to 1 files (+1 heads)
116 added 1 changesets with 1 changes to 1 files (+1 heads)
117 1 new obsolescence markers
117 1 new obsolescence markers
118 new changesets 24b6387c8c8c (1 drafts)
118 new changesets 24b6387c8c8c (1 drafts)
119 postclose-tip:24b6387c8c8c draft
119 postclose-tip:24b6387c8c8c draft
120 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
120 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
121 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
121 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
122 (run 'hg heads' to see heads, 'hg merge' to merge)
122 (run 'hg heads' to see heads, 'hg merge' to merge)
123 $ hg -R other log -G
123 $ hg -R other log -G
124 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
124 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
125 |
125 |
126 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
126 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
127 |/
127 |/
128 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
128 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
129
129
130 $ hg -R other debugobsolete
130 $ hg -R other debugobsolete
131 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
131 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
132 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
132 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
133
133
134 pull empty (with phase movement)
134 pull empty (with phase movement)
135
135
136 $ hg -R main phase --public 24b6387c8c8c
136 $ hg -R main phase --public 24b6387c8c8c
137 pre-close-tip:02de42196ebe draft
137 pre-close-tip:02de42196ebe draft
138 postclose-tip:02de42196ebe draft
138 postclose-tip:02de42196ebe draft
139 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
139 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
140 $ hg -R other pull -r 24b6387c8c8c
140 $ hg -R other pull -r 24b6387c8c8c
141 pulling from $TESTTMP/main
141 pulling from $TESTTMP/main
142 no changes found
142 no changes found
143 pre-close-tip:24b6387c8c8c public
143 pre-close-tip:24b6387c8c8c public
144 1 local changesets published
144 1 local changesets published
145 postclose-tip:24b6387c8c8c public
145 postclose-tip:24b6387c8c8c public
146 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
146 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
147 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
147 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
148 $ hg -R other log -G
148 $ hg -R other log -G
149 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
149 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
150 |
150 |
151 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
151 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
152 |/
152 |/
153 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
153 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
154
154
155 $ hg -R other debugobsolete
155 $ hg -R other debugobsolete
156 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
156 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
157 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
157 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
158
158
159 pull empty
159 pull empty
160
160
161 $ hg -R other pull -r 24b6387c8c8c
161 $ hg -R other pull -r 24b6387c8c8c
162 pulling from $TESTTMP/main
162 pulling from $TESTTMP/main
163 no changes found
163 no changes found
164 pre-close-tip:24b6387c8c8c public
164 pre-close-tip:24b6387c8c8c public
165 postclose-tip:24b6387c8c8c public
165 postclose-tip:24b6387c8c8c public
166 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
166 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
167 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
167 file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob)
168 $ hg -R other log -G
168 $ hg -R other log -G
169 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
169 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
170 |
170 |
171 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
171 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
172 |/
172 |/
173 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
173 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
174
174
175 $ hg -R other debugobsolete
175 $ hg -R other debugobsolete
176 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
176 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
177 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
177 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
178
178
179 add extra data to test their exchange during push
179 add extra data to test their exchange during push
180
180
181 $ hg -R main bookmark --rev eea13746799a book_eea1
181 $ hg -R main bookmark --rev eea13746799a book_eea1
182 pre-close-tip:02de42196ebe draft
182 pre-close-tip:02de42196ebe draft
183 postclose-tip:02de42196ebe draft
183 postclose-tip:02de42196ebe draft
184 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
184 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
185 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
185 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
186 pre-close-tip:02de42196ebe draft
186 pre-close-tip:02de42196ebe draft
187 1 new obsolescence markers
187 1 new obsolescence markers
188 postclose-tip:02de42196ebe draft
188 postclose-tip:02de42196ebe draft
189 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
189 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
190 $ hg -R main bookmark --rev 02de42196ebe book_02de
190 $ hg -R main bookmark --rev 02de42196ebe book_02de
191 pre-close-tip:02de42196ebe draft book_02de
191 pre-close-tip:02de42196ebe draft book_02de
192 postclose-tip:02de42196ebe draft book_02de
192 postclose-tip:02de42196ebe draft book_02de
193 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
193 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
194 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
194 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
195 pre-close-tip:02de42196ebe draft book_02de
195 pre-close-tip:02de42196ebe draft book_02de
196 1 new obsolescence markers
196 1 new obsolescence markers
197 postclose-tip:02de42196ebe draft book_02de
197 postclose-tip:02de42196ebe draft book_02de
198 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
198 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
199 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
199 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
200 pre-close-tip:02de42196ebe draft book_02de
200 pre-close-tip:02de42196ebe draft book_02de
201 postclose-tip:02de42196ebe draft book_02de
201 postclose-tip:02de42196ebe draft book_02de
202 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
202 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
203 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
203 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
204 pre-close-tip:02de42196ebe draft book_02de
204 pre-close-tip:02de42196ebe draft book_02de
205 1 new obsolescence markers
205 1 new obsolescence markers
206 postclose-tip:02de42196ebe draft book_02de
206 postclose-tip:02de42196ebe draft book_02de
207 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
207 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
208 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
208 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
209 pre-close-tip:02de42196ebe draft book_02de
209 pre-close-tip:02de42196ebe draft book_02de
210 postclose-tip:02de42196ebe draft book_02de
210 postclose-tip:02de42196ebe draft book_02de
211 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
211 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
212 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
212 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
213 pre-close-tip:02de42196ebe draft book_02de
213 pre-close-tip:02de42196ebe draft book_02de
214 1 new obsolescence markers
214 1 new obsolescence markers
215 postclose-tip:02de42196ebe draft book_02de
215 postclose-tip:02de42196ebe draft book_02de
216 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
216 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
217 $ hg -R main bookmark --rev 32af7686d403 book_32af
217 $ hg -R main bookmark --rev 32af7686d403 book_32af
218 pre-close-tip:02de42196ebe draft book_02de
218 pre-close-tip:02de42196ebe draft book_02de
219 postclose-tip:02de42196ebe draft book_02de
219 postclose-tip:02de42196ebe draft book_02de
220 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
220 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
221 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
221 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
222 pre-close-tip:02de42196ebe draft book_02de
222 pre-close-tip:02de42196ebe draft book_02de
223 1 new obsolescence markers
223 1 new obsolescence markers
224 postclose-tip:02de42196ebe draft book_02de
224 postclose-tip:02de42196ebe draft book_02de
225 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
225 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=debugobsolete
226
226
227 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
227 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
228 pre-close-tip:24b6387c8c8c public
228 pre-close-tip:24b6387c8c8c public
229 postclose-tip:24b6387c8c8c public
229 postclose-tip:24b6387c8c8c public
230 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
230 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
231 $ hg -R other bookmark --rev cd010b8cd998 book_02de
231 $ hg -R other bookmark --rev cd010b8cd998 book_02de
232 pre-close-tip:24b6387c8c8c public
232 pre-close-tip:24b6387c8c8c public
233 postclose-tip:24b6387c8c8c public
233 postclose-tip:24b6387c8c8c public
234 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
234 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
235 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
235 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
236 pre-close-tip:24b6387c8c8c public
236 pre-close-tip:24b6387c8c8c public
237 postclose-tip:24b6387c8c8c public
237 postclose-tip:24b6387c8c8c public
238 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
238 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
239 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
239 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
240 pre-close-tip:24b6387c8c8c public
240 pre-close-tip:24b6387c8c8c public
241 postclose-tip:24b6387c8c8c public
241 postclose-tip:24b6387c8c8c public
242 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
242 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
243 $ hg -R other bookmark --rev cd010b8cd998 book_32af
243 $ hg -R other bookmark --rev cd010b8cd998 book_32af
244 pre-close-tip:24b6387c8c8c public
244 pre-close-tip:24b6387c8c8c public
245 postclose-tip:24b6387c8c8c public
245 postclose-tip:24b6387c8c8c public
246 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
246 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=bookmark
247
247
248 $ hg -R main phase --public eea13746799a
248 $ hg -R main phase --public eea13746799a
249 pre-close-tip:02de42196ebe draft book_02de
249 pre-close-tip:02de42196ebe draft book_02de
250 postclose-tip:02de42196ebe draft book_02de
250 postclose-tip:02de42196ebe draft book_02de
251 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
251 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
252
252
253 push
253 push
254 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
254 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
255 pushing to other
255 pushing to other
256 searching for changes
256 searching for changes
257 remote: adding changesets
257 remote: adding changesets
258 remote: adding manifests
258 remote: adding manifests
259 remote: adding file changes
259 remote: adding file changes
260 remote: pre-close-tip:eea13746799a public book_eea1
260 remote: pre-close-tip:eea13746799a public book_eea1
261 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
261 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
262 remote: 1 new obsolescence markers
262 remote: 1 new obsolescence markers
263 remote: pushkey: lock state after "bookmarks"
263 remote: pushkey: lock state after "bookmarks"
264 remote: lock: free
264 remote: lock: free
265 remote: wlock: free
265 remote: wlock: free
266 remote: postclose-tip:eea13746799a public book_eea1
266 remote: postclose-tip:eea13746799a public book_eea1
267 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/other
267 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:$ID$ HG_TXNNAME=push HG_URL=file:$TESTTMP/other
268 updating bookmark book_eea1
268 updating bookmark book_eea1
269 pre-close-tip:02de42196ebe draft book_02de
269 pre-close-tip:02de42196ebe draft book_02de
270 postclose-tip:02de42196ebe draft book_02de
270 postclose-tip:02de42196ebe draft book_02de
271 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
271 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
272 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
272 file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob)
273 $ hg -R other log -G
273 $ hg -R other log -G
274 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
274 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
275 |\
275 |\
276 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
276 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
277 | |
277 | |
278 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
278 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
279 |/
279 |/
280 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
280 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
281
281
282 $ hg -R other debugobsolete
282 $ hg -R other debugobsolete
283 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
283 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
284 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
284 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
285 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
285 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
286
286
287 pull over ssh
287 pull over ssh
288
288
289 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
289 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
290 pulling from ssh://user@dummy/main
290 pulling from ssh://user@dummy/main
291 searching for changes
291 searching for changes
292 adding changesets
292 adding changesets
293 adding manifests
293 adding manifests
294 adding file changes
294 adding file changes
295 updating bookmark book_02de
295 updating bookmark book_02de
296 pre-close-tip:02de42196ebe draft book_02de
296 pre-close-tip:02de42196ebe draft book_02de
297 added 1 changesets with 1 changes to 1 files (+1 heads)
297 added 1 changesets with 1 changes to 1 files (+1 heads)
298 1 new obsolescence markers
298 1 new obsolescence markers
299 new changesets 02de42196ebe (1 drafts)
299 new changesets 02de42196ebe (1 drafts)
300 postclose-tip:02de42196ebe draft book_02de
300 postclose-tip:02de42196ebe draft book_02de
301 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
301 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
302 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
302 ssh://user@dummy/main HG_URL=ssh://user@dummy/main
303 (run 'hg heads' to see heads, 'hg merge' to merge)
303 (run 'hg heads' to see heads, 'hg merge' to merge)
304 $ hg -R other debugobsolete
304 $ hg -R other debugobsolete
305 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
305 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
306 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
306 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
307 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
307 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
308 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
308 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
309
309
310 pull over http
310 pull over http
311
311
312 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
312 $ hg serve -R main -p $HGPORT -d --pid-file=main.pid -E main-error.log
313 $ cat main.pid >> $DAEMON_PIDS
313 $ cat main.pid >> $DAEMON_PIDS
314
314
315 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
315 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
316 pulling from http://localhost:$HGPORT/
316 pulling from http://localhost:$HGPORT/
317 searching for changes
317 searching for changes
318 adding changesets
318 adding changesets
319 adding manifests
319 adding manifests
320 adding file changes
320 adding file changes
321 updating bookmark book_42cc
321 updating bookmark book_42cc
322 pre-close-tip:42ccdea3bb16 draft book_42cc
322 pre-close-tip:42ccdea3bb16 draft book_42cc
323 added 1 changesets with 1 changes to 1 files (+1 heads)
323 added 1 changesets with 1 changes to 1 files (+1 heads)
324 1 new obsolescence markers
324 1 new obsolescence markers
325 new changesets 42ccdea3bb16 (1 drafts)
325 new changesets 42ccdea3bb16 (1 drafts)
326 postclose-tip:42ccdea3bb16 draft book_42cc
326 postclose-tip:42ccdea3bb16 draft book_42cc
327 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
327 txnclose hook: HG_BOOKMARK_MOVED=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_TXNNAME=pull
328 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
328 http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/
329 (run 'hg heads .' to see heads, 'hg merge' to merge)
329 (run 'hg heads .' to see heads, 'hg merge' to merge)
330 $ cat main-error.log
330 $ cat main-error.log
331 $ hg -R other debugobsolete
331 $ hg -R other debugobsolete
332 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
332 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
333 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
333 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
334 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
334 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
335 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
335 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
336 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
336 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
337
337
338 push over ssh
338 push over ssh
339
339
340 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
340 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
341 pushing to ssh://user@dummy/other
341 pushing to ssh://user@dummy/other
342 searching for changes
342 searching for changes
343 remote: adding changesets
343 remote: adding changesets
344 remote: adding manifests
344 remote: adding manifests
345 remote: adding file changes
345 remote: adding file changes
346 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
346 remote: pre-close-tip:5fddd98957c8 draft book_5fdd
347 remote: added 1 changesets with 1 changes to 1 files
347 remote: added 1 changesets with 1 changes to 1 files
348 remote: 1 new obsolescence markers
348 remote: 1 new obsolescence markers
349 remote: pushkey: lock state after "bookmarks"
349 remote: pushkey: lock state after "bookmarks"
350 remote: lock: free
350 remote: lock: free
351 remote: wlock: free
351 remote: wlock: free
352 remote: postclose-tip:5fddd98957c8 draft book_5fdd
352 remote: postclose-tip:5fddd98957c8 draft book_5fdd
353 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:ssh:$LOCALIP
353 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:ssh:$LOCALIP
354 updating bookmark book_5fdd
354 updating bookmark book_5fdd
355 pre-close-tip:02de42196ebe draft book_02de
355 pre-close-tip:02de42196ebe draft book_02de
356 postclose-tip:02de42196ebe draft book_02de
356 postclose-tip:02de42196ebe draft book_02de
357 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
357 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
358 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
358 ssh://user@dummy/other HG_URL=ssh://user@dummy/other
359 $ hg -R other log -G
359 $ hg -R other log -G
360 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
360 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
361 |
361 |
362 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
362 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
363 |
363 |
364 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
364 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
365 | |
365 | |
366 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
366 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
367 | |/|
367 | |/|
368 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
368 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
369 |/ /
369 |/ /
370 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
370 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
371 |/
371 |/
372 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
372 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
373
373
374 $ hg -R other debugobsolete
374 $ hg -R other debugobsolete
375 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
375 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
376 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
376 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
377 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
377 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
378 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
378 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
379 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
379 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
380 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
380 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
381
381
382 push over http
382 push over http
383
383
384 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
384 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
385 $ cat other.pid >> $DAEMON_PIDS
385 $ cat other.pid >> $DAEMON_PIDS
386
386
387 $ hg -R main phase --public 32af7686d403
387 $ hg -R main phase --public 32af7686d403
388 pre-close-tip:02de42196ebe draft book_02de
388 pre-close-tip:02de42196ebe draft book_02de
389 postclose-tip:02de42196ebe draft book_02de
389 postclose-tip:02de42196ebe draft book_02de
390 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
390 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_PHASES_MOVED=1 HG_TXNID=TXN:$ID$ HG_TXNNAME=phase
391 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
391 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
392 pushing to http://localhost:$HGPORT2/
392 pushing to http://localhost:$HGPORT2/
393 searching for changes
393 searching for changes
394 remote: adding changesets
394 remote: adding changesets
395 remote: adding manifests
395 remote: adding manifests
396 remote: adding file changes
396 remote: adding file changes
397 remote: pre-close-tip:32af7686d403 public book_32af
397 remote: pre-close-tip:32af7686d403 public book_32af
398 remote: added 1 changesets with 1 changes to 1 files
398 remote: added 1 changesets with 1 changes to 1 files
399 remote: 1 new obsolescence markers
399 remote: 1 new obsolescence markers
400 remote: pushkey: lock state after "bookmarks"
400 remote: pushkey: lock state after "bookmarks"
401 remote: lock: free
401 remote: lock: free
402 remote: wlock: free
402 remote: wlock: free
403 remote: postclose-tip:32af7686d403 public book_32af
403 remote: postclose-tip:32af7686d403 public book_32af
404 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:http:$LOCALIP: (glob)
404 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_TXNNAME=serve HG_URL=remote:http:$LOCALIP: (glob)
405 updating bookmark book_32af
405 updating bookmark book_32af
406 pre-close-tip:02de42196ebe draft book_02de
406 pre-close-tip:02de42196ebe draft book_02de
407 postclose-tip:02de42196ebe draft book_02de
407 postclose-tip:02de42196ebe draft book_02de
408 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
408 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_SOURCE=push-response HG_TXNID=TXN:$ID$ HG_TXNNAME=push-response
409 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
409 http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/
410 $ cat other-error.log
410 $ cat other-error.log
411
411
412 Check final content.
412 Check final content.
413
413
414 $ hg -R other log -G
414 $ hg -R other log -G
415 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
415 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
416 |
416 |
417 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
417 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
418 |
418 |
419 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
419 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
420 |
420 |
421 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
421 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
422 | |
422 | |
423 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
423 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
424 | |/|
424 | |/|
425 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
425 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
426 |/ /
426 |/ /
427 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
427 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
428 |/
428 |/
429 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
429 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
430
430
431 $ hg -R other debugobsolete
431 $ hg -R other debugobsolete
432 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
432 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
433 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
433 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
434 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
434 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
435 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
435 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
436 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
436 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
437 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
437 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
438 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
438 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
439
439
440 (check that no 'pending' files remain)
440 (check that no 'pending' files remain)
441
441
442 $ ls -1 other/.hg/bookmarks*
442 $ ls -1 other/.hg/bookmarks*
443 other/.hg/bookmarks
443 other/.hg/bookmarks
444 $ ls -1 other/.hg/store/phaseroots*
444 $ ls -1 other/.hg/store/phaseroots*
445 other/.hg/store/phaseroots
445 other/.hg/store/phaseroots
446 $ ls -1 other/.hg/store/00changelog.i*
446 $ ls -1 other/.hg/store/00changelog.i*
447 other/.hg/store/00changelog.i
447 other/.hg/store/00changelog.i
448
448
449 Error Handling
449 Error Handling
450 ==============
450 ==============
451
451
452 Check that errors are properly returned to the client during push.
452 Check that errors are properly returned to the client during push.
453
453
454 Setting up
454 Setting up
455
455
456 $ cat > failpush.py << EOF
456 $ cat > failpush.py << EOF
457 > """A small extension that makes push fails when using bundle2
457 > """A small extension that makes push fails when using bundle2
458 >
458 >
459 > used to test error handling in bundle2
459 > used to test error handling in bundle2
460 > """
460 > """
461 >
461 >
462 > from mercurial import error
462 > from mercurial import error
463 > from mercurial import bundle2
463 > from mercurial import bundle2
464 > from mercurial import exchange
464 > from mercurial import exchange
465 > from mercurial import extensions
465 > from mercurial import extensions
466 > from mercurial import registrar
466 > from mercurial import registrar
467 > cmdtable = {}
467 > cmdtable = {}
468 > command = registrar.command(cmdtable)
468 > command = registrar.command(cmdtable)
469 >
469 >
470 > configtable = {}
470 > configtable = {}
471 > configitem = registrar.configitem(configtable)
471 > configitem = registrar.configitem(configtable)
472 > configitem(b'failpush', b'reason',
472 > configitem(b'failpush', b'reason',
473 > default=None,
473 > default=None,
474 > )
474 > )
475 >
475 >
476 > def _pushbundle2failpart(pushop, bundler):
476 > def _pushbundle2failpart(pushop, bundler):
477 > reason = pushop.ui.config(b'failpush', b'reason')
477 > reason = pushop.ui.config(b'failpush', b'reason')
478 > part = None
478 > part = None
479 > if reason == b'abort':
479 > if reason == b'abort':
480 > bundler.newpart(b'test:abort')
480 > bundler.newpart(b'test:abort')
481 > if reason == b'unknown':
481 > if reason == b'unknown':
482 > bundler.newpart(b'test:unknown')
482 > bundler.newpart(b'test:unknown')
483 > if reason == b'race':
483 > if reason == b'race':
484 > # 20 Bytes of crap
484 > # 20 Bytes of crap
485 > bundler.newpart(b'check:heads', data=b'01234567890123456789')
485 > bundler.newpart(b'check:heads', data=b'01234567890123456789')
486 >
486 >
487 > @bundle2.parthandler(b"test:abort")
487 > @bundle2.parthandler(b"test:abort")
488 > def handleabort(op, part):
488 > def handleabort(op, part):
489 > raise error.Abort(b'Abandon ship!', hint=b"don't panic")
489 > raise error.Abort(b'Abandon ship!', hint=b"don't panic")
490 >
490 >
491 > def uisetup(ui):
491 > def uisetup(ui):
492 > exchange.b2partsgenmapping[b'failpart'] = _pushbundle2failpart
492 > exchange.b2partsgenmapping[b'failpart'] = _pushbundle2failpart
493 > exchange.b2partsgenorder.insert(0, b'failpart')
493 > exchange.b2partsgenorder.insert(0, b'failpart')
494 >
494 >
495 > EOF
495 > EOF
496
496
497 $ cd main
497 $ cd main
498 $ hg up tip
498 $ hg up tip
499 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
499 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
500 $ echo 'I' > I
500 $ echo 'I' > I
501 $ hg add I
501 $ hg add I
502 $ hg ci -m 'I'
502 $ hg ci -m 'I'
503 pre-close-tip:e7ec4e813ba6 draft
503 pre-close-tip:e7ec4e813ba6 draft
504 postclose-tip:e7ec4e813ba6 draft
504 postclose-tip:e7ec4e813ba6 draft
505 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
505 txnclose hook: HG_HOOKNAME=txnclose.env HG_HOOKTYPE=txnclose HG_TXNID=TXN:$ID$ HG_TXNNAME=commit
506 $ hg id
506 $ hg id
507 e7ec4e813ba6 tip
507 e7ec4e813ba6 tip
508 $ cd ..
508 $ cd ..
509
509
510 $ cat << EOF >> $HGRCPATH
510 $ cat << EOF >> $HGRCPATH
511 > [extensions]
511 > [extensions]
512 > failpush=$TESTTMP/failpush.py
512 > failpush=$TESTTMP/failpush.py
513 > EOF
513 > EOF
514
514
515 $ killdaemons.py
515 $ killdaemons.py
516 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
516 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
517 $ cat other.pid >> $DAEMON_PIDS
517 $ cat other.pid >> $DAEMON_PIDS
518
518
519 Doing the actual push: Abort error
519 Doing the actual push: Abort error
520
520
521 $ cat << EOF >> $HGRCPATH
521 $ cat << EOF >> $HGRCPATH
522 > [failpush]
522 > [failpush]
523 > reason = abort
523 > reason = abort
524 > EOF
524 > EOF
525
525
526 $ hg -R main push other -r e7ec4e813ba6
526 $ hg -R main push other -r e7ec4e813ba6
527 pushing to other
527 pushing to other
528 searching for changes
528 searching for changes
529 abort: Abandon ship!
529 abort: Abandon ship!
530 (don't panic)
530 (don't panic)
531 [255]
531 [255]
532
532
533 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
533 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
534 pushing to ssh://user@dummy/other
534 pushing to ssh://user@dummy/other
535 searching for changes
535 searching for changes
536 remote: Abandon ship!
536 remote: Abandon ship!
537 remote: (don't panic)
537 remote: (don't panic)
538 abort: push failed on remote
538 abort: push failed on remote
539 [100]
539 [100]
540
540
541 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
541 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
542 pushing to http://localhost:$HGPORT2/
542 pushing to http://localhost:$HGPORT2/
543 searching for changes
543 searching for changes
544 remote: Abandon ship!
544 remote: Abandon ship!
545 remote: (don't panic)
545 remote: (don't panic)
546 abort: push failed on remote
546 abort: push failed on remote
547 [100]
547 [100]
548
548
549
549
550 Doing the actual push: unknown mandatory parts
550 Doing the actual push: unknown mandatory parts
551
551
552 $ cat << EOF >> $HGRCPATH
552 $ cat << EOF >> $HGRCPATH
553 > [failpush]
553 > [failpush]
554 > reason = unknown
554 > reason = unknown
555 > EOF
555 > EOF
556
556
557 $ hg -R main push other -r e7ec4e813ba6
557 $ hg -R main push other -r e7ec4e813ba6
558 pushing to other
558 pushing to other
559 searching for changes
559 searching for changes
560 abort: missing support for test:unknown
560 abort: missing support for test:unknown
561 [100]
561 [100]
562
562
563 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
563 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
564 pushing to ssh://user@dummy/other
564 pushing to ssh://user@dummy/other
565 searching for changes
565 searching for changes
566 abort: missing support for test:unknown
566 abort: missing support for test:unknown
567 [100]
567 [100]
568
568
569 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
569 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
570 pushing to http://localhost:$HGPORT2/
570 pushing to http://localhost:$HGPORT2/
571 searching for changes
571 searching for changes
572 abort: missing support for test:unknown
572 abort: missing support for test:unknown
573 [100]
573 [100]
574
574
575 Doing the actual push: race
575 Doing the actual push: race
576
576
577 $ cat << EOF >> $HGRCPATH
577 $ cat << EOF >> $HGRCPATH
578 > [failpush]
578 > [failpush]
579 > reason = race
579 > reason = race
580 > EOF
580 > EOF
581
581
582 $ hg -R main push other -r e7ec4e813ba6
582 $ hg -R main push other -r e7ec4e813ba6
583 pushing to other
583 pushing to other
584 searching for changes
584 searching for changes
585 abort: push failed:
585 abort: push failed:
586 'remote repository changed while pushing - please try again'
586 'remote repository changed while pushing - please try again'
587 [255]
587 [255]
588
588
589 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
589 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
590 pushing to ssh://user@dummy/other
590 pushing to ssh://user@dummy/other
591 searching for changes
591 searching for changes
592 abort: push failed:
592 abort: push failed:
593 'remote repository changed while pushing - please try again'
593 'remote repository changed while pushing - please try again'
594 [255]
594 [255]
595
595
596 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
596 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
597 pushing to http://localhost:$HGPORT2/
597 pushing to http://localhost:$HGPORT2/
598 searching for changes
598 searching for changes
599 abort: push failed:
599 abort: push failed:
600 'remote repository changed while pushing - please try again'
600 'remote repository changed while pushing - please try again'
601 [255]
601 [255]
602
602
603 Doing the actual push: hook abort
603 Doing the actual push: hook abort
604
604
605 $ cat << EOF >> $HGRCPATH
605 $ cat << EOF >> $HGRCPATH
606 > [failpush]
606 > [failpush]
607 > reason =
607 > reason =
608 > [hooks]
608 > [hooks]
609 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
609 > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false"
610 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
610 > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'"
611 > EOF
611 > EOF
612
612
613 $ killdaemons.py
613 $ killdaemons.py
614 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
614 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
615 $ cat other.pid >> $DAEMON_PIDS
615 $ cat other.pid >> $DAEMON_PIDS
616
616
617 $ hg -R main push other -r e7ec4e813ba6
617 $ hg -R main push other -r e7ec4e813ba6
618 pushing to other
618 pushing to other
619 searching for changes
619 searching for changes
620 remote: adding changesets
620 remote: adding changesets
621 remote: adding manifests
621 remote: adding manifests
622 remote: adding file changes
622 remote: adding file changes
623 remote: pre-close-tip:e7ec4e813ba6 draft
623 remote: pre-close-tip:e7ec4e813ba6 draft
624 remote: You shall not pass!
624 remote: You shall not pass!
625 remote: transaction abort!
625 remote: transaction abort!
626 remote: Cleaning up the mess...
626 remote: Cleaning up the mess...
627 remote: rollback completed
627 remote: rollback completed
628 abort: pretxnclose.failpush hook exited with status 1
628 abort: pretxnclose.failpush hook exited with status 1
629 [40]
629 [40]
630
630
631 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
631 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
632 pushing to ssh://user@dummy/other
632 pushing to ssh://user@dummy/other
633 searching for changes
633 searching for changes
634 remote: adding changesets
634 remote: adding changesets
635 remote: adding manifests
635 remote: adding manifests
636 remote: adding file changes
636 remote: adding file changes
637 remote: pre-close-tip:e7ec4e813ba6 draft
637 remote: pre-close-tip:e7ec4e813ba6 draft
638 remote: You shall not pass!
638 remote: You shall not pass!
639 remote: transaction abort!
639 remote: transaction abort!
640 remote: Cleaning up the mess...
640 remote: Cleaning up the mess...
641 remote: rollback completed
641 remote: rollback completed
642 remote: pretxnclose.failpush hook exited with status 1
642 remote: pretxnclose.failpush hook exited with status 1
643 abort: push failed on remote
643 abort: push failed on remote
644 [100]
644 [100]
645
645
646 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
646 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
647 pushing to http://localhost:$HGPORT2/
647 pushing to http://localhost:$HGPORT2/
648 searching for changes
648 searching for changes
649 remote: adding changesets
649 remote: adding changesets
650 remote: adding manifests
650 remote: adding manifests
651 remote: adding file changes
651 remote: adding file changes
652 remote: pre-close-tip:e7ec4e813ba6 draft
652 remote: pre-close-tip:e7ec4e813ba6 draft
653 remote: You shall not pass!
653 remote: You shall not pass!
654 remote: transaction abort!
654 remote: transaction abort!
655 remote: Cleaning up the mess...
655 remote: Cleaning up the mess...
656 remote: rollback completed
656 remote: rollback completed
657 remote: pretxnclose.failpush hook exited with status 1
657 remote: pretxnclose.failpush hook exited with status 1
658 abort: push failed on remote
658 abort: push failed on remote
659 [100]
659 [100]
660
660
661 (check that no 'pending' files remain)
661 (check that no 'pending' files remain)
662
662
663 $ ls -1 other/.hg/bookmarks*
663 $ ls -1 other/.hg/bookmarks*
664 other/.hg/bookmarks
664 other/.hg/bookmarks
665 $ ls -1 other/.hg/store/phaseroots*
665 $ ls -1 other/.hg/store/phaseroots*
666 other/.hg/store/phaseroots
666 other/.hg/store/phaseroots
667 $ ls -1 other/.hg/store/00changelog.i*
667 $ ls -1 other/.hg/store/00changelog.i*
668 other/.hg/store/00changelog.i
668 other/.hg/store/00changelog.i
669
669
670 Check error from hook during the unbundling process itself
670 Check error from hook during the unbundling process itself
671
671
672 $ cat << EOF >> $HGRCPATH
672 $ cat << EOF >> $HGRCPATH
673 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
673 > pretxnchangegroup = sh -c "echo 'Fail early!'; false"
674 > EOF
674 > EOF
675 $ killdaemons.py # reload http config
675 $ killdaemons.py # reload http config
676 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
676 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
677 $ cat other.pid >> $DAEMON_PIDS
677 $ cat other.pid >> $DAEMON_PIDS
678
678
679 $ hg -R main push other -r e7ec4e813ba6
679 $ hg -R main push other -r e7ec4e813ba6
680 pushing to other
680 pushing to other
681 searching for changes
681 searching for changes
682 remote: adding changesets
682 remote: adding changesets
683 remote: adding manifests
683 remote: adding manifests
684 remote: adding file changes
684 remote: adding file changes
685 remote: Fail early!
685 remote: Fail early!
686 remote: transaction abort!
686 remote: transaction abort!
687 remote: Cleaning up the mess...
687 remote: Cleaning up the mess...
688 remote: rollback completed
688 remote: rollback completed
689 abort: pretxnchangegroup hook exited with status 1
689 abort: pretxnchangegroup hook exited with status 1
690 [40]
690 [40]
691 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
691 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
692 pushing to ssh://user@dummy/other
692 pushing to ssh://user@dummy/other
693 searching for changes
693 searching for changes
694 remote: adding changesets
694 remote: adding changesets
695 remote: adding manifests
695 remote: adding manifests
696 remote: adding file changes
696 remote: adding file changes
697 remote: Fail early!
697 remote: Fail early!
698 remote: transaction abort!
698 remote: transaction abort!
699 remote: Cleaning up the mess...
699 remote: Cleaning up the mess...
700 remote: rollback completed
700 remote: rollback completed
701 remote: pretxnchangegroup hook exited with status 1
701 remote: pretxnchangegroup hook exited with status 1
702 abort: push failed on remote
702 abort: push failed on remote
703 [100]
703 [100]
704 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
704 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
705 pushing to http://localhost:$HGPORT2/
705 pushing to http://localhost:$HGPORT2/
706 searching for changes
706 searching for changes
707 remote: adding changesets
707 remote: adding changesets
708 remote: adding manifests
708 remote: adding manifests
709 remote: adding file changes
709 remote: adding file changes
710 remote: Fail early!
710 remote: Fail early!
711 remote: transaction abort!
711 remote: transaction abort!
712 remote: Cleaning up the mess...
712 remote: Cleaning up the mess...
713 remote: rollback completed
713 remote: rollback completed
714 remote: pretxnchangegroup hook exited with status 1
714 remote: pretxnchangegroup hook exited with status 1
715 abort: push failed on remote
715 abort: push failed on remote
716 [100]
716 [100]
717
717
718 Check output capture control.
718 Check output capture control.
719
719
720 (should be still forced for http, disabled for local and ssh)
720 (should be still forced for http, disabled for local and ssh)
721
721
722 $ cat >> $HGRCPATH << EOF
722 $ cat >> $HGRCPATH << EOF
723 > [experimental]
723 > [experimental]
724 > bundle2-output-capture=False
724 > bundle2-output-capture=False
725 > EOF
725 > EOF
726
726
727 $ hg -R main push other -r e7ec4e813ba6
727 $ hg -R main push other -r e7ec4e813ba6
728 pushing to other
728 pushing to other
729 searching for changes
729 searching for changes
730 adding changesets
730 adding changesets
731 adding manifests
731 adding manifests
732 adding file changes
732 adding file changes
733 Fail early!
733 Fail early!
734 transaction abort!
734 transaction abort!
735 Cleaning up the mess...
735 Cleaning up the mess...
736 rollback completed
736 rollback completed
737 abort: pretxnchangegroup hook exited with status 1
737 abort: pretxnchangegroup hook exited with status 1
738 [40]
738 [40]
739 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
739 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
740 pushing to ssh://user@dummy/other
740 pushing to ssh://user@dummy/other
741 searching for changes
741 searching for changes
742 remote: adding changesets
742 remote: adding changesets
743 remote: adding manifests
743 remote: adding manifests
744 remote: adding file changes
744 remote: adding file changes
745 remote: Fail early!
745 remote: Fail early!
746 remote: transaction abort!
746 remote: transaction abort!
747 remote: Cleaning up the mess...
747 remote: Cleaning up the mess...
748 remote: rollback completed
748 remote: rollback completed
749 remote: pretxnchangegroup hook exited with status 1
749 remote: pretxnchangegroup hook exited with status 1
750 abort: push failed on remote
750 abort: push failed on remote
751 [100]
751 [100]
752 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
752 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
753 pushing to http://localhost:$HGPORT2/
753 pushing to http://localhost:$HGPORT2/
754 searching for changes
754 searching for changes
755 remote: adding changesets
755 remote: adding changesets
756 remote: adding manifests
756 remote: adding manifests
757 remote: adding file changes
757 remote: adding file changes
758 remote: Fail early!
758 remote: Fail early!
759 remote: transaction abort!
759 remote: transaction abort!
760 remote: Cleaning up the mess...
760 remote: Cleaning up the mess...
761 remote: rollback completed
761 remote: rollback completed
762 remote: pretxnchangegroup hook exited with status 1
762 remote: pretxnchangegroup hook exited with status 1
763 abort: push failed on remote
763 abort: push failed on remote
764 [100]
764 [100]
765
765
766 Check abort from mandatory pushkey
766 Check abort from mandatory pushkey
767
767
768 $ cat > mandatorypart.py << EOF
768 $ cat > mandatorypart.py << EOF
769 > from mercurial import exchange
769 > from mercurial import exchange
770 > from mercurial import pushkey
770 > from mercurial import pushkey
771 > from mercurial import node
771 > from mercurial import node
772 > from mercurial import error
772 > from mercurial import error
773 > @exchange.b2partsgenerator(b'failingpuskey')
773 > @exchange.b2partsgenerator(b'failingpuskey')
774 > def addfailingpushey(pushop, bundler):
774 > def addfailingpushey(pushop, bundler):
775 > enc = pushkey.encode
775 > enc = pushkey.encode
776 > part = bundler.newpart(b'pushkey')
776 > part = bundler.newpart(b'pushkey')
777 > part.addparam(b'namespace', enc(b'phases'))
777 > part.addparam(b'namespace', enc(b'phases'))
778 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
778 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
779 > part.addparam(b'old', enc(b'0')) # successful update
779 > part.addparam(b'old', enc(b'0')) # successful update
780 > part.addparam(b'new', enc(b'0'))
780 > part.addparam(b'new', enc(b'0'))
781 > def fail(pushop, exc):
781 > def fail(pushop, exc):
782 > raise error.Abort(b'Correct phase push failed (because hooks)')
782 > raise error.Abort(b'Correct phase push failed (because hooks)')
783 > pushop.pkfailcb[part.id] = fail
783 > pushop.pkfailcb[part.id] = fail
784 > EOF
784 > EOF
785 $ cat >> $HGRCPATH << EOF
785 $ cat >> $HGRCPATH << EOF
786 > [hooks]
786 > [hooks]
787 > pretxnchangegroup=
787 > pretxnchangegroup=
788 > pretxnclose.failpush=
788 > pretxnclose.failpush=
789 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
789 > prepushkey.failpush = sh -c "echo 'do not push the key !'; false"
790 > [extensions]
790 > [extensions]
791 > mandatorypart=$TESTTMP/mandatorypart.py
791 > mandatorypart=$TESTTMP/mandatorypart.py
792 > EOF
792 > EOF
793 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
793 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
794 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
794 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
795 $ cat other.pid >> $DAEMON_PIDS
795 $ cat other.pid >> $DAEMON_PIDS
796
796
797 (Failure from a hook)
797 (Failure from a hook)
798
798
799 $ hg -R main push other -r e7ec4e813ba6
799 $ hg -R main push other -r e7ec4e813ba6
800 pushing to other
800 pushing to other
801 searching for changes
801 searching for changes
802 adding changesets
802 adding changesets
803 adding manifests
803 adding manifests
804 adding file changes
804 adding file changes
805 do not push the key !
805 do not push the key !
806 pushkey-abort: prepushkey.failpush hook exited with status 1
806 pushkey-abort: prepushkey.failpush hook exited with status 1
807 transaction abort!
807 transaction abort!
808 Cleaning up the mess...
808 Cleaning up the mess...
809 rollback completed
809 rollback completed
810 abort: Correct phase push failed (because hooks)
810 abort: Correct phase push failed (because hooks)
811 [255]
811 [255]
812 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
812 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
813 pushing to ssh://user@dummy/other
813 pushing to ssh://user@dummy/other
814 searching for changes
814 searching for changes
815 remote: adding changesets
815 remote: adding changesets
816 remote: adding manifests
816 remote: adding manifests
817 remote: adding file changes
817 remote: adding file changes
818 remote: do not push the key !
818 remote: do not push the key !
819 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
819 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
820 remote: transaction abort!
820 remote: transaction abort!
821 remote: Cleaning up the mess...
821 remote: Cleaning up the mess...
822 remote: rollback completed
822 remote: rollback completed
823 abort: Correct phase push failed (because hooks)
823 abort: Correct phase push failed (because hooks)
824 [255]
824 [255]
825 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
825 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
826 pushing to http://localhost:$HGPORT2/
826 pushing to http://localhost:$HGPORT2/
827 searching for changes
827 searching for changes
828 remote: adding changesets
828 remote: adding changesets
829 remote: adding manifests
829 remote: adding manifests
830 remote: adding file changes
830 remote: adding file changes
831 remote: do not push the key !
831 remote: do not push the key !
832 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
832 remote: pushkey-abort: prepushkey.failpush hook exited with status 1
833 remote: transaction abort!
833 remote: transaction abort!
834 remote: Cleaning up the mess...
834 remote: Cleaning up the mess...
835 remote: rollback completed
835 remote: rollback completed
836 abort: Correct phase push failed (because hooks)
836 abort: Correct phase push failed (because hooks)
837 [255]
837 [255]
838
838
839 (Failure from a the pushkey)
839 (Failure from a the pushkey)
840
840
841 $ cat > mandatorypart.py << EOF
841 $ cat > mandatorypart.py << EOF
842 > from mercurial import exchange
842 > from mercurial import exchange
843 > from mercurial import pushkey
843 > from mercurial import pushkey
844 > from mercurial import node
844 > from mercurial import node
845 > from mercurial import error
845 > from mercurial import error
846 > @exchange.b2partsgenerator(b'failingpuskey')
846 > @exchange.b2partsgenerator(b'failingpuskey')
847 > def addfailingpushey(pushop, bundler):
847 > def addfailingpushey(pushop, bundler):
848 > enc = pushkey.encode
848 > enc = pushkey.encode
849 > part = bundler.newpart(b'pushkey')
849 > part = bundler.newpart(b'pushkey')
850 > part.addparam(b'namespace', enc(b'phases'))
850 > part.addparam(b'namespace', enc(b'phases'))
851 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
851 > part.addparam(b'key', enc(b'cd010b8cd998f3981a5a8115f94f8da4ab506089'))
852 > part.addparam(b'old', enc(b'4')) # will fail
852 > part.addparam(b'old', enc(b'4')) # will fail
853 > part.addparam(b'new', enc(b'3'))
853 > part.addparam(b'new', enc(b'3'))
854 > def fail(pushop, exc):
854 > def fail(pushop, exc):
855 > raise error.Abort(b'Clown phase push failed')
855 > raise error.Abort(b'Clown phase push failed')
856 > pushop.pkfailcb[part.id] = fail
856 > pushop.pkfailcb[part.id] = fail
857 > EOF
857 > EOF
858 $ cat >> $HGRCPATH << EOF
858 $ cat >> $HGRCPATH << EOF
859 > [hooks]
859 > [hooks]
860 > prepushkey.failpush =
860 > prepushkey.failpush =
861 > EOF
861 > EOF
862 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
862 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config
863 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
863 $ hg serve -R other -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
864 $ cat other.pid >> $DAEMON_PIDS
864 $ cat other.pid >> $DAEMON_PIDS
865
865
866 $ hg -R main push other -r e7ec4e813ba6
866 $ hg -R main push other -r e7ec4e813ba6
867 pushing to other
867 pushing to other
868 searching for changes
868 searching for changes
869 adding changesets
869 adding changesets
870 adding manifests
870 adding manifests
871 adding file changes
871 adding file changes
872 transaction abort!
872 transaction abort!
873 Cleaning up the mess...
873 Cleaning up the mess...
874 rollback completed
874 rollback completed
875 pushkey: lock state after "phases"
875 pushkey: lock state after "phases"
876 lock: free
876 lock: free
877 wlock: free
877 wlock: free
878 abort: Clown phase push failed
878 abort: Clown phase push failed
879 [255]
879 [255]
880 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
880 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
881 pushing to ssh://user@dummy/other
881 pushing to ssh://user@dummy/other
882 searching for changes
882 searching for changes
883 remote: adding changesets
883 remote: adding changesets
884 remote: adding manifests
884 remote: adding manifests
885 remote: adding file changes
885 remote: adding file changes
886 remote: transaction abort!
886 remote: transaction abort!
887 remote: Cleaning up the mess...
887 remote: Cleaning up the mess...
888 remote: rollback completed
888 remote: rollback completed
889 remote: pushkey: lock state after "phases"
889 remote: pushkey: lock state after "phases"
890 remote: lock: free
890 remote: lock: free
891 remote: wlock: free
891 remote: wlock: free
892 abort: Clown phase push failed
892 abort: Clown phase push failed
893 [255]
893 [255]
894 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
894 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
895 pushing to http://localhost:$HGPORT2/
895 pushing to http://localhost:$HGPORT2/
896 searching for changes
896 searching for changes
897 remote: adding changesets
897 remote: adding changesets
898 remote: adding manifests
898 remote: adding manifests
899 remote: adding file changes
899 remote: adding file changes
900 remote: transaction abort!
900 remote: transaction abort!
901 remote: Cleaning up the mess...
901 remote: Cleaning up the mess...
902 remote: rollback completed
902 remote: rollback completed
903 remote: pushkey: lock state after "phases"
903 remote: pushkey: lock state after "phases"
904 remote: lock: free
904 remote: lock: free
905 remote: wlock: free
905 remote: wlock: free
906 abort: Clown phase push failed
906 abort: Clown phase push failed
907 [255]
907 [255]
908
908
909 Test lazily acquiring the lock during unbundle
909 Test lazily acquiring the lock during unbundle
910 $ cp $TESTTMP/hgrc.orig $HGRCPATH
910 $ cp $TESTTMP/hgrc.orig $HGRCPATH
911
911
912 $ cat >> $TESTTMP/locktester.py <<EOF
912 $ cat >> $TESTTMP/locktester.py <<EOF
913 > import os
913 > import os
914 > from mercurial import bundle2, error, extensions
914 > from mercurial import bundle2, error, extensions
915 > def checklock(orig, repo, *args, **kwargs):
915 > def checklock(orig, repo, *args, **kwargs):
916 > if repo.svfs.lexists(b"lock"):
916 > if repo.svfs.lexists(b"lock"):
917 > raise error.Abort(b"Lock should not be taken")
917 > raise error.Abort(b"Lock should not be taken")
918 > return orig(repo, *args, **kwargs)
918 > return orig(repo, *args, **kwargs)
919 > def extsetup(ui):
919 > def extsetup(ui):
920 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
920 > extensions.wrapfunction(bundle2, 'processbundle', checklock)
921 > EOF
921 > EOF
922
922
923 $ hg init lazylock
923 $ hg init lazylock
924 $ cat >> lazylock/.hg/hgrc <<EOF
924 $ cat >> lazylock/.hg/hgrc <<EOF
925 > [extensions]
925 > [extensions]
926 > locktester=$TESTTMP/locktester.py
926 > locktester=$TESTTMP/locktester.py
927 > EOF
927 > EOF
928
928
929 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
929 $ hg clone -q ssh://user@dummy/lazylock lazylockclient
930 $ cd lazylockclient
930 $ cd lazylockclient
931 $ touch a && hg ci -Aqm a
931 $ touch a && hg ci -Aqm a
932 $ hg push
932 $ hg push
933 pushing to ssh://user@dummy/lazylock
933 pushing to ssh://user@dummy/lazylock
934 searching for changes
934 searching for changes
935 remote: Lock should not be taken
935 remote: Lock should not be taken
936 abort: push failed on remote
936 abort: push failed on remote
937 [100]
937 [100]
938
938
939 $ cat >> ../lazylock/.hg/hgrc <<EOF
939 $ cat >> ../lazylock/.hg/hgrc <<EOF
940 > [experimental]
940 > [experimental]
941 > bundle2lazylocking=True
941 > bundle2lazylocking=True
942 > EOF
942 > EOF
943 $ hg push
943 $ hg push
944 pushing to ssh://user@dummy/lazylock
944 pushing to ssh://user@dummy/lazylock
945 searching for changes
945 searching for changes
946 remote: adding changesets
946 remote: adding changesets
947 remote: adding manifests
947 remote: adding manifests
948 remote: adding file changes
948 remote: adding file changes
949 remote: added 1 changesets with 1 changes to 1 files
949 remote: added 1 changesets with 1 changes to 1 files
950
950
951 $ cd ..
951 $ cd ..
952
952
953 Servers can disable bundle1 for clone/pull operations
953 Servers can disable bundle1 for clone/pull operations
954
954
955 $ killdaemons.py
955 $ killdaemons.py
956 $ hg init bundle2onlyserver
956 $ hg init bundle2onlyserver
957 $ cd bundle2onlyserver
957 $ cd bundle2onlyserver
958 $ cat > .hg/hgrc << EOF
958 $ cat > .hg/hgrc << EOF
959 > [server]
959 > [server]
960 > bundle1.pull = false
960 > bundle1.pull = false
961 > EOF
961 > EOF
962
962
963 $ touch foo
963 $ touch foo
964 $ hg -q commit -A -m initial
964 $ hg -q commit -A -m initial
965
965
966 $ hg serve -p $HGPORT -d --pid-file=hg.pid
966 $ hg serve -p $HGPORT -d --pid-file=hg.pid
967 $ cat hg.pid >> $DAEMON_PIDS
967 $ cat hg.pid >> $DAEMON_PIDS
968
968
969 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
969 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
970 requesting all changes
970 requesting all changes
971 abort: remote error:
971 abort: remote error:
972 incompatible Mercurial client; bundle2 required
972 incompatible Mercurial client; bundle2 required
973 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
973 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
974 [100]
974 [100]
975 $ killdaemons.py
975 $ killdaemons.py
976 $ cd ..
976 $ cd ..
977
977
978 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
978 bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled
979
979
980 $ hg --config format.usegeneraldelta=false init notgdserver
980 $ hg --config format.usegeneraldelta=false init notgdserver
981 $ cd notgdserver
981 $ cd notgdserver
982 $ cat > .hg/hgrc << EOF
982 $ cat > .hg/hgrc << EOF
983 > [server]
983 > [server]
984 > bundle1gd.pull = false
984 > bundle1gd.pull = false
985 > EOF
985 > EOF
986
986
987 $ touch foo
987 $ touch foo
988 $ hg -q commit -A -m initial
988 $ hg -q commit -A -m initial
989 $ hg serve -p $HGPORT -d --pid-file=hg.pid
989 $ hg serve -p $HGPORT -d --pid-file=hg.pid
990 $ cat hg.pid >> $DAEMON_PIDS
990 $ cat hg.pid >> $DAEMON_PIDS
991
991
992 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
992 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-1
993 requesting all changes
993 requesting all changes
994 adding changesets
994 adding changesets
995 adding manifests
995 adding manifests
996 adding file changes
996 adding file changes
997 added 1 changesets with 1 changes to 1 files
997 added 1 changesets with 1 changes to 1 files
998 new changesets 96ee1d7354c4
998 new changesets 96ee1d7354c4
999 updating to branch default
999 updating to branch default
1000 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1001
1001
1002 $ killdaemons.py
1002 $ killdaemons.py
1003 $ cd ../bundle2onlyserver
1003 $ cd ../bundle2onlyserver
1004
1004
1005 bundle1 pull can be disabled for generaldelta repos only
1005 bundle1 pull can be disabled for generaldelta repos only
1006
1006
1007 $ cat > .hg/hgrc << EOF
1007 $ cat > .hg/hgrc << EOF
1008 > [server]
1008 > [server]
1009 > bundle1gd.pull = false
1009 > bundle1gd.pull = false
1010 > EOF
1010 > EOF
1011
1011
1012 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1012 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1013 $ cat hg.pid >> $DAEMON_PIDS
1013 $ cat hg.pid >> $DAEMON_PIDS
1014 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1014 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1015 requesting all changes
1015 requesting all changes
1016 abort: remote error:
1016 abort: remote error:
1017 incompatible Mercurial client; bundle2 required
1017 incompatible Mercurial client; bundle2 required
1018 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1018 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1019 [100]
1019 [100]
1020
1020
1021 $ killdaemons.py
1021 $ killdaemons.py
1022
1022
1023 Verify the global server.bundle1 option works
1023 Verify the global server.bundle1 option works
1024
1024
1025 $ cd ..
1025 $ cd ..
1026 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1026 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1027 > [server]
1027 > [server]
1028 > bundle1 = false
1028 > bundle1 = false
1029 > EOF
1029 > EOF
1030 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1030 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1031 $ cat hg.pid >> $DAEMON_PIDS
1031 $ cat hg.pid >> $DAEMON_PIDS
1032 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1032 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT not-bundle2
1033 requesting all changes
1033 requesting all changes
1034 abort: remote error:
1034 abort: remote error:
1035 incompatible Mercurial client; bundle2 required
1035 incompatible Mercurial client; bundle2 required
1036 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1036 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1037 [100]
1037 [100]
1038 $ killdaemons.py
1038 $ killdaemons.py
1039
1039
1040 $ hg --config devel.legacy.exchange=bundle1 clone ssh://user@dummy/bundle2onlyserver not-bundle2-ssh
1040 $ hg --config devel.legacy.exchange=bundle1 clone ssh://user@dummy/bundle2onlyserver not-bundle2-ssh
1041 requesting all changes
1041 requesting all changes
1042 adding changesets
1042 adding changesets
1043 remote: abort: incompatible Mercurial client; bundle2 required
1043 remote: abort: incompatible Mercurial client; bundle2 required
1044 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1044 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1045 transaction abort!
1046 rollback completed
1045 abort: stream ended unexpectedly (got 0 bytes, expected 4)
1047 abort: stream ended unexpectedly (got 0 bytes, expected 4)
1046 [255]
1048 [255]
1047
1049
1048 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1050 $ cat > bundle2onlyserver/.hg/hgrc << EOF
1049 > [server]
1051 > [server]
1050 > bundle1gd = false
1052 > bundle1gd = false
1051 > EOF
1053 > EOF
1052 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1054 $ hg serve -R bundle2onlyserver -p $HGPORT -d --pid-file=hg.pid
1053 $ cat hg.pid >> $DAEMON_PIDS
1055 $ cat hg.pid >> $DAEMON_PIDS
1054
1056
1055 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1057 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2
1056 requesting all changes
1058 requesting all changes
1057 abort: remote error:
1059 abort: remote error:
1058 incompatible Mercurial client; bundle2 required
1060 incompatible Mercurial client; bundle2 required
1059 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1061 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1060 [100]
1062 [100]
1061
1063
1062 $ killdaemons.py
1064 $ killdaemons.py
1063
1065
1064 $ cd notgdserver
1066 $ cd notgdserver
1065 $ cat > .hg/hgrc << EOF
1067 $ cat > .hg/hgrc << EOF
1066 > [server]
1068 > [server]
1067 > bundle1gd = false
1069 > bundle1gd = false
1068 > EOF
1070 > EOF
1069 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1071 $ hg serve -p $HGPORT -d --pid-file=hg.pid
1070 $ cat hg.pid >> $DAEMON_PIDS
1072 $ cat hg.pid >> $DAEMON_PIDS
1071
1073
1072 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1074 $ hg --config devel.legacy.exchange=bundle1 clone http://localhost:$HGPORT/ not-bundle2-2
1073 requesting all changes
1075 requesting all changes
1074 adding changesets
1076 adding changesets
1075 adding manifests
1077 adding manifests
1076 adding file changes
1078 adding file changes
1077 added 1 changesets with 1 changes to 1 files
1079 added 1 changesets with 1 changes to 1 files
1078 new changesets 96ee1d7354c4
1080 new changesets 96ee1d7354c4
1079 updating to branch default
1081 updating to branch default
1080 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1082 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1081
1083
1082 $ killdaemons.py
1084 $ killdaemons.py
1083 $ cd ../bundle2onlyserver
1085 $ cd ../bundle2onlyserver
1084
1086
1085 Verify bundle1 pushes can be disabled
1087 Verify bundle1 pushes can be disabled
1086
1088
1087 $ cat > .hg/hgrc << EOF
1089 $ cat > .hg/hgrc << EOF
1088 > [server]
1090 > [server]
1089 > bundle1.push = false
1091 > bundle1.push = false
1090 > [web]
1092 > [web]
1091 > allow_push = *
1093 > allow_push = *
1092 > push_ssl = false
1094 > push_ssl = false
1093 > EOF
1095 > EOF
1094
1096
1095 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1097 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log
1096 $ cat hg.pid >> $DAEMON_PIDS
1098 $ cat hg.pid >> $DAEMON_PIDS
1097 $ cd ..
1099 $ cd ..
1098
1100
1099 $ hg clone http://localhost:$HGPORT bundle2-only
1101 $ hg clone http://localhost:$HGPORT bundle2-only
1100 requesting all changes
1102 requesting all changes
1101 adding changesets
1103 adding changesets
1102 adding manifests
1104 adding manifests
1103 adding file changes
1105 adding file changes
1104 added 1 changesets with 1 changes to 1 files
1106 added 1 changesets with 1 changes to 1 files
1105 new changesets 96ee1d7354c4
1107 new changesets 96ee1d7354c4
1106 updating to branch default
1108 updating to branch default
1107 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1108 $ cd bundle2-only
1110 $ cd bundle2-only
1109 $ echo commit > foo
1111 $ echo commit > foo
1110 $ hg commit -m commit
1112 $ hg commit -m commit
1111 $ hg --config devel.legacy.exchange=bundle1 push
1113 $ hg --config devel.legacy.exchange=bundle1 push
1112 pushing to http://localhost:$HGPORT/
1114 pushing to http://localhost:$HGPORT/
1113 searching for changes
1115 searching for changes
1114 abort: remote error:
1116 abort: remote error:
1115 incompatible Mercurial client; bundle2 required
1117 incompatible Mercurial client; bundle2 required
1116 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1118 (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1117 [100]
1119 [100]
1118
1120
1119 (also check with ssh)
1121 (also check with ssh)
1120
1122
1121 $ hg --config devel.legacy.exchange=bundle1 push ssh://user@dummy/bundle2onlyserver
1123 $ hg --config devel.legacy.exchange=bundle1 push ssh://user@dummy/bundle2onlyserver
1122 pushing to ssh://user@dummy/bundle2onlyserver
1124 pushing to ssh://user@dummy/bundle2onlyserver
1123 searching for changes
1125 searching for changes
1124 remote: abort: incompatible Mercurial client; bundle2 required
1126 remote: abort: incompatible Mercurial client; bundle2 required
1125 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1127 remote: (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
1126 [1]
1128 [1]
1127
1129
1128 $ hg push
1130 $ hg push
1129 pushing to http://localhost:$HGPORT/
1131 pushing to http://localhost:$HGPORT/
1130 searching for changes
1132 searching for changes
1131 remote: adding changesets
1133 remote: adding changesets
1132 remote: adding manifests
1134 remote: adding manifests
1133 remote: adding file changes
1135 remote: adding file changes
1134 remote: added 1 changesets with 1 changes to 1 files
1136 remote: added 1 changesets with 1 changes to 1 files
@@ -1,179 +1,186 b''
1 Test stream cloning while a revlog split happens
1 Test stream cloning while a revlog split happens
2 ------------------------------------------------
2 ------------------------------------------------
3
3
4 #testcases stream-bundle2-v2 stream-bundle2-v3
4 #testcases stream-bundle2-v2 stream-bundle2-v3
5
5
6 #if stream-bundle2-v3
6 #if stream-bundle2-v3
7 $ cat << EOF >> $HGRCPATH
7 $ cat << EOF >> $HGRCPATH
8 > [experimental]
8 > [experimental]
9 > stream-v3 = yes
9 > stream-v3 = yes
10 > EOF
10 > EOF
11 #endif
11 #endif
12
12
13 setup a repository for tests
13 setup a repository for tests
14 ----------------------------
14 ----------------------------
15
15
16 $ cat >> $HGRCPATH << EOF
16 $ cat >> $HGRCPATH << EOF
17 > [format]
17 > [format]
18 > # skip compression to make it easy to trigger a split
18 > # skip compression to make it easy to trigger a split
19 > revlog-compression=none
19 > revlog-compression=none
20 > [phases]
20 > [phases]
21 > publish=no
21 > publish=no
22 > EOF
22 > EOF
23
23
24 $ hg init server
24 $ hg init server
25 $ cd server
25 $ cd server
26 $ file="some-file"
26 $ file="some-file"
27 $ printf '%20d' '1' > $file
27 $ printf '%20d' '1' > $file
28 $ hg commit -Aqma
28 $ hg commit -Aqma
29 $ printf '%1024d' '1' > $file
29 $ printf '%1024d' '1' > $file
30 $ hg commit -Aqmb
30 $ hg commit -Aqmb
31 $ printf '%20d' '1' > $file
31 $ printf '%20d' '1' > $file
32 $ hg commit -Aqmc
32 $ hg commit -Aqmc
33
33
34 check the revlog is inline
34 check the revlog is inline
35
35
36 $ f -s .hg/store/data/some-file*
36 $ f -s .hg/store/data/some-file*
37 .hg/store/data/some-file.i: size=1259
37 .hg/store/data/some-file.i: size=1259
38 $ hg debug-revlog-index some-file
38 $ hg debug-revlog-index some-file
39 rev linkrev nodeid p1-nodeid p2-nodeid
39 rev linkrev nodeid p1-nodeid p2-nodeid
40 0 0 ed70cecbc103 000000000000 000000000000
40 0 0 ed70cecbc103 000000000000 000000000000
41 1 1 7241018db64c ed70cecbc103 000000000000
41 1 1 7241018db64c ed70cecbc103 000000000000
42 2 2 fa1120531cc1 7241018db64c 000000000000
42 2 2 fa1120531cc1 7241018db64c 000000000000
43 $ cd ..
43 $ cd ..
44
44
45 setup synchronisation file
45 setup synchronisation file
46
46
47 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
47 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
48 $ export HG_TEST_STREAM_WALKED_FILE_1
48 $ export HG_TEST_STREAM_WALKED_FILE_1
49 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
49 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
50 $ export HG_TEST_STREAM_WALKED_FILE_2
50 $ export HG_TEST_STREAM_WALKED_FILE_2
51 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
51 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
52 $ export HG_TEST_STREAM_WALKED_FILE_3
52 $ export HG_TEST_STREAM_WALKED_FILE_3
53
53
54
54
55 Test stream-clone raced by a revlog-split
55 Test stream-clone raced by a revlog-split
56 =========================================
56 =========================================
57
57
58 Test stream-clone where the file is split right after the lock section is done
58 Test stream-clone where the file is split right after the lock section is done
59
59
60 Start the server
60 Start the server
61
61
62 $ hg serve -R server \
62 $ hg serve -R server \
63 > -p $HGPORT1 -d --error errors.log --pid-file=hg.pid \
63 > -p $HGPORT1 -d --error errors.log --pid-file=hg.pid \
64 > --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py"
64 > --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py"
65 $ cat hg.pid >> $DAEMON_PIDS
65 $ cat hg.pid >> $DAEMON_PIDS
66
66
67 Start a client doing a streaming clone
67 Start a client doing a streaming clone
68
68
69 $ ( \
69 $ ( \
70 > hg clone --debug --stream -U http://localhost:$HGPORT1 \
70 > hg clone --debug --stream -U http://localhost:$HGPORT1 \
71 > clone-while-split > client.log 2>&1; \
71 > clone-while-split > client.log 2>&1; \
72 > touch "$HG_TEST_STREAM_WALKED_FILE_3" \
72 > touch "$HG_TEST_STREAM_WALKED_FILE_3" \
73 > ) &
73 > ) &
74
74
75 Wait for the server to be done collecting data
75 Wait for the server to be done collecting data
76
76
77 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
77 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
78
78
79 trigger a split
79 trigger a split
80
80
81 $ dd if=/dev/zero of=server/$file bs=1k count=128 > /dev/null 2>&1
81 $ dd if=/dev/zero of=server/$file bs=1k count=128 > /dev/null 2>&1
82 $ hg -R server ci -m "triggering a split" --config ui.timeout.warn=-1
82 $ hg -R server ci -m "triggering a split" --config ui.timeout.warn=-1
83
83
84 unlock the stream generation
84 unlock the stream generation
85
85
86 $ touch $HG_TEST_STREAM_WALKED_FILE_2
86 $ touch $HG_TEST_STREAM_WALKED_FILE_2
87
87
88 wait for the client to be done cloning.
88 wait for the client to be done cloning.
89
89
90 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
90 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
91
91
92 Check everything is fine
92 Check everything is fine
93
93
94 $ cat client.log
94 $ cat client.log
95 using http://localhost:$HGPORT1/
95 using http://localhost:$HGPORT1/
96 sending capabilities command
96 sending capabilities command
97 query 1; heads
97 query 1; heads
98 sending batch command
98 sending batch command
99 streaming all changes
99 streaming all changes
100 sending getbundle command
100 sending getbundle command
101 bundle2-input-bundle: with-transaction
101 bundle2-input-bundle: with-transaction
102 bundle2-input-part: "stream2" (params: 3 mandatory) supported (stream-bundle2-v2 !)
102 bundle2-input-part: "stream2" (params: 3 mandatory) supported (stream-bundle2-v2 !)
103 bundle2-input-part: "stream3-exp" (params: 1 mandatory) supported (stream-bundle2-v3 !)
103 bundle2-input-part: "stream3-exp" (params: 1 mandatory) supported (stream-bundle2-v3 !)
104 applying stream bundle
104 applying stream bundle
105 7 files to transfer, 2.11 KB of data (stream-bundle2-v2 !)
105 8 files to transfer, 2.11 KB of data (stream-bundle2-v2 no-rust !)
106 10 files to transfer, 2.29 KB of data (stream-bundle2-v2 rust !)
106 adding [s] data/some-file.i (1.23 KB) (stream-bundle2-v2 !)
107 adding [s] data/some-file.i (1.23 KB) (stream-bundle2-v2 !)
107 7 entries to transfer (stream-bundle2-v3 !)
108 7 entries to transfer (stream-bundle2-v3 !)
108 adding [s] data/some-file.d (1.04 KB) (stream-bundle2-v3 !)
109 adding [s] data/some-file.d (1.04 KB) (stream-bundle2-v3 !)
109 adding [s] data/some-file.i (192 bytes) (stream-bundle2-v3 !)
110 adding [s] data/some-file.i (192 bytes) (stream-bundle2-v3 !)
110 adding [s] phaseroots (43 bytes)
111 adding [s] phaseroots (43 bytes)
111 adding [s] 00manifest.i (348 bytes)
112 adding [s] 00manifest.i (348 bytes)
112 adding [s] 00changelog.i (381 bytes)
113 adding [s] 00changelog.n (62 bytes) (rust !)
114 adding [s] 00changelog-88698448.nd (128 bytes) (rust !)
115 adding [s] 00changelog.d (189 bytes)
116 adding [s] 00changelog.i (192 bytes)
113 adding [c] branch2-served (94 bytes)
117 adding [c] branch2-served (94 bytes)
114 adding [c] rbc-names-v1 (7 bytes)
118 adding [c] rbc-names-v1 (7 bytes)
115 adding [c] rbc-revs-v1 (24 bytes)
119 adding [c] rbc-revs-v1 (24 bytes)
116 updating the branch cache
120 updating the branch cache
117 transferred 2.11 KB in * seconds (* */sec) (glob)
121 transferred 2.11 KB in * seconds (* */sec) (glob) (no-rust !)
118 bundle2-input-part: total payload size 2268 (stream-bundle2-v2 !)
122 transferred 2.29 KB in * seconds (* */sec) (glob) (rust !)
119 bundle2-input-part: total payload size 2296 (stream-bundle2-v3 !)
123 bundle2-input-part: total payload size 2285 (stream-bundle2-v2 no-rust !)
124 bundle2-input-part: total payload size 2518 (stream-bundle2-v2 rust !)
125 bundle2-input-part: total payload size 2313 (stream-bundle2-v3 no-rust !)
126 bundle2-input-part: total payload size 2546 (stream-bundle2-v3 rust !)
120 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
127 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
121 bundle2-input-bundle: 2 parts total
128 bundle2-input-bundle: 2 parts total
122 checking for updated bookmarks
129 checking for updated bookmarks
123 updating the branch cache
130 updating the branch cache
124 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
131 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
125 $ tail -2 errors.log
132 $ tail -2 errors.log
126 $ hg -R clone-while-split verify
133 $ hg -R clone-while-split verify
127 checking changesets
134 checking changesets
128 checking manifests
135 checking manifests
129 crosschecking files in changesets and manifests
136 crosschecking files in changesets and manifests
130 checking files
137 checking files
131 checking dirstate
138 checking dirstate
132 checked 3 changesets with 3 changes to 1 files
139 checked 3 changesets with 3 changes to 1 files
133 $ hg -R clone-while-split tip
140 $ hg -R clone-while-split tip
134 changeset: 2:dbd9854c38a6
141 changeset: 2:dbd9854c38a6
135 tag: tip
142 tag: tip
136 user: test
143 user: test
137 date: Thu Jan 01 00:00:00 1970 +0000
144 date: Thu Jan 01 00:00:00 1970 +0000
138 summary: c
145 summary: c
139
146
140 $ hg -R clone-while-split debug-revlog-index some-file
147 $ hg -R clone-while-split debug-revlog-index some-file
141 rev linkrev nodeid p1-nodeid p2-nodeid
148 rev linkrev nodeid p1-nodeid p2-nodeid
142 0 0 ed70cecbc103 000000000000 000000000000
149 0 0 ed70cecbc103 000000000000 000000000000
143 1 1 7241018db64c ed70cecbc103 000000000000
150 1 1 7241018db64c ed70cecbc103 000000000000
144 2 2 fa1120531cc1 7241018db64c 000000000000
151 2 2 fa1120531cc1 7241018db64c 000000000000
145 $ hg -R server phase --rev 'all()'
152 $ hg -R server phase --rev 'all()'
146 0: draft
153 0: draft
147 1: draft
154 1: draft
148 2: draft
155 2: draft
149 3: draft
156 3: draft
150 $ hg -R clone-while-split phase --rev 'all()'
157 $ hg -R clone-while-split phase --rev 'all()'
151 0: draft
158 0: draft
152 1: draft
159 1: draft
153 2: draft
160 2: draft
154
161
155 subsequent pull work
162 subsequent pull work
156
163
157 $ hg -R clone-while-split pull
164 $ hg -R clone-while-split pull
158 pulling from http://localhost:$HGPORT1/
165 pulling from http://localhost:$HGPORT1/
159 searching for changes
166 searching for changes
160 adding changesets
167 adding changesets
161 adding manifests
168 adding manifests
162 adding file changes
169 adding file changes
163 added 1 changesets with 1 changes to 1 files
170 added 1 changesets with 1 changes to 1 files
164 new changesets df05c6cb1406 (1 drafts)
171 new changesets df05c6cb1406 (1 drafts)
165 (run 'hg update' to get a working copy)
172 (run 'hg update' to get a working copy)
166
173
167 $ hg -R clone-while-split debug-revlog-index some-file
174 $ hg -R clone-while-split debug-revlog-index some-file
168 rev linkrev nodeid p1-nodeid p2-nodeid
175 rev linkrev nodeid p1-nodeid p2-nodeid
169 0 0 ed70cecbc103 000000000000 000000000000
176 0 0 ed70cecbc103 000000000000 000000000000
170 1 1 7241018db64c ed70cecbc103 000000000000
177 1 1 7241018db64c ed70cecbc103 000000000000
171 2 2 fa1120531cc1 7241018db64c 000000000000
178 2 2 fa1120531cc1 7241018db64c 000000000000
172 3 3 a631378adaa3 fa1120531cc1 000000000000
179 3 3 a631378adaa3 fa1120531cc1 000000000000
173 $ hg -R clone-while-split verify
180 $ hg -R clone-while-split verify
174 checking changesets
181 checking changesets
175 checking manifests
182 checking manifests
176 crosschecking files in changesets and manifests
183 crosschecking files in changesets and manifests
177 checking files
184 checking files
178 checking dirstate
185 checking dirstate
179 checked 4 changesets with 4 changes to 1 files
186 checked 4 changesets with 4 changes to 1 files
@@ -1,994 +1,1036 b''
1 #require serve no-reposimplestore no-chg
1 #require serve no-reposimplestore no-chg
2
2
3 #testcases stream-legacy stream-bundle2-v2 stream-bundle2-v3
3 #testcases stream-legacy stream-bundle2-v2 stream-bundle2-v3
4
4
5 #if stream-legacy
5 #if stream-legacy
6 $ cat << EOF >> $HGRCPATH
6 $ cat << EOF >> $HGRCPATH
7 > [server]
7 > [server]
8 > bundle2.stream = no
8 > bundle2.stream = no
9 > [format]
10 > # persistent nodemap is too broken with legacy format,
11 > # however client with nodemap support will have better stream support.
12 > use-persistent-nodemap=no
9 > EOF
13 > EOF
10 #endif
14 #endif
11 #if stream-bundle2-v3
15 #if stream-bundle2-v3
12 $ cat << EOF >> $HGRCPATH
16 $ cat << EOF >> $HGRCPATH
13 > [experimental]
17 > [experimental]
14 > stream-v3 = yes
18 > stream-v3 = yes
15 > EOF
19 > EOF
16 #endif
20 #endif
17
21
18 Initialize repository
22 Initialize repository
19
23
20 $ hg init server
24 $ hg init server
21 $ cd server
25 $ cd server
22 $ sh $TESTDIR/testlib/stream_clone_setup.sh
26 $ sh $TESTDIR/testlib/stream_clone_setup.sh
23 adding 00changelog-ab349180a0405010.nd
27 adding 00changelog-ab349180a0405010.nd
24 adding 00changelog.d
28 adding 00changelog.d
25 adding 00changelog.i
29 adding 00changelog.i
26 adding 00changelog.n
30 adding 00changelog.n
27 adding 00manifest.d
31 adding 00manifest.d
28 adding 00manifest.i
32 adding 00manifest.i
29 adding container/isam-build-centos7/bazel-coverage-generator-sandboxfs-compatibility-0758e3e4f6057904d44399bd666faba9e7f40686.patch
33 adding container/isam-build-centos7/bazel-coverage-generator-sandboxfs-compatibility-0758e3e4f6057904d44399bd666faba9e7f40686.patch
30 adding data/foo.d
34 adding data/foo.d
31 adding data/foo.i
35 adding data/foo.i
32 adding data/foo.n
36 adding data/foo.n
33 adding data/undo.babar
37 adding data/undo.babar
34 adding data/undo.d
38 adding data/undo.d
35 adding data/undo.foo.d
39 adding data/undo.foo.d
36 adding data/undo.foo.i
40 adding data/undo.foo.i
37 adding data/undo.foo.n
41 adding data/undo.foo.n
38 adding data/undo.i
42 adding data/undo.i
39 adding data/undo.n
43 adding data/undo.n
40 adding data/undo.py
44 adding data/undo.py
41 adding foo.d
45 adding foo.d
42 adding foo.i
46 adding foo.i
43 adding foo.n
47 adding foo.n
44 adding meta/foo.d
48 adding meta/foo.d
45 adding meta/foo.i
49 adding meta/foo.i
46 adding meta/foo.n
50 adding meta/foo.n
47 adding meta/undo.babar
51 adding meta/undo.babar
48 adding meta/undo.d
52 adding meta/undo.d
49 adding meta/undo.foo.d
53 adding meta/undo.foo.d
50 adding meta/undo.foo.i
54 adding meta/undo.foo.i
51 adding meta/undo.foo.n
55 adding meta/undo.foo.n
52 adding meta/undo.i
56 adding meta/undo.i
53 adding meta/undo.n
57 adding meta/undo.n
54 adding meta/undo.py
58 adding meta/undo.py
55 adding savanah/foo.d
59 adding savanah/foo.d
56 adding savanah/foo.i
60 adding savanah/foo.i
57 adding savanah/foo.n
61 adding savanah/foo.n
58 adding savanah/undo.babar
62 adding savanah/undo.babar
59 adding savanah/undo.d
63 adding savanah/undo.d
60 adding savanah/undo.foo.d
64 adding savanah/undo.foo.d
61 adding savanah/undo.foo.i
65 adding savanah/undo.foo.i
62 adding savanah/undo.foo.n
66 adding savanah/undo.foo.n
63 adding savanah/undo.i
67 adding savanah/undo.i
64 adding savanah/undo.n
68 adding savanah/undo.n
65 adding savanah/undo.py
69 adding savanah/undo.py
66 adding store/C\xc3\xa9lesteVille_is_a_Capital_City (esc)
70 adding store/C\xc3\xa9lesteVille_is_a_Capital_City (esc)
67 adding store/foo.d
71 adding store/foo.d
68 adding store/foo.i
72 adding store/foo.i
69 adding store/foo.n
73 adding store/foo.n
70 adding store/undo.babar
74 adding store/undo.babar
71 adding store/undo.d
75 adding store/undo.d
72 adding store/undo.foo.d
76 adding store/undo.foo.d
73 adding store/undo.foo.i
77 adding store/undo.foo.i
74 adding store/undo.foo.n
78 adding store/undo.foo.n
75 adding store/undo.i
79 adding store/undo.i
76 adding store/undo.n
80 adding store/undo.n
77 adding store/undo.py
81 adding store/undo.py
78 adding undo.babar
82 adding undo.babar
79 adding undo.d
83 adding undo.d
80 adding undo.foo.d
84 adding undo.foo.d
81 adding undo.foo.i
85 adding undo.foo.i
82 adding undo.foo.n
86 adding undo.foo.n
83 adding undo.i
87 adding undo.i
84 adding undo.n
88 adding undo.n
85 adding undo.py
89 adding undo.py
86
90
87 $ hg --config server.uncompressed=false serve -p $HGPORT -d --pid-file=hg.pid
91 $ hg --config server.uncompressed=false serve -p $HGPORT -d --pid-file=hg.pid
88 $ cat hg.pid > $DAEMON_PIDS
92 $ cat hg.pid > $DAEMON_PIDS
89 $ cd ..
93 $ cd ..
90
94
91 Check local clone
95 Check local clone
92 ==================
96 ==================
93
97
94 The logic is close enough of uncompressed.
98 The logic is close enough of uncompressed.
95 This is present here to reuse the testing around file with "special" names.
99 This is present here to reuse the testing around file with "special" names.
96
100
97 $ hg clone server local-clone
101 $ hg clone server local-clone
98 updating to branch default
102 updating to branch default
99 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
103 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
100
104
101 Check that the clone went well
105 Check that the clone went well
102
106
103 $ hg verify -R local-clone -q
107 $ hg verify -R local-clone -q
104
108
105 Check uncompressed
109 Check uncompressed
106 ==================
110 ==================
107
111
108 Cannot stream clone when server.uncompressed is set
112 Cannot stream clone when server.uncompressed is set
109
113
110 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=stream_out'
114 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=stream_out'
111 200 Script output follows
115 200 Script output follows
112
116
113 1
117 1
114
118
115 #if stream-legacy
119 #if stream-legacy
116 $ hg debugcapabilities http://localhost:$HGPORT
120 $ hg debugcapabilities http://localhost:$HGPORT
117 Main capabilities:
121 Main capabilities:
118 batch
122 batch
119 branchmap
123 branchmap
120 $USUAL_BUNDLE2_CAPS_SERVER$
124 $USUAL_BUNDLE2_CAPS_SERVER$
121 changegroupsubset
125 changegroupsubset
122 compression=$BUNDLE2_COMPRESSIONS$
126 compression=$BUNDLE2_COMPRESSIONS$
123 getbundle
127 getbundle
124 httpheader=1024
128 httpheader=1024
125 httpmediatype=0.1rx,0.1tx,0.2tx
129 httpmediatype=0.1rx,0.1tx,0.2tx
126 known
130 known
127 lookup
131 lookup
128 pushkey
132 pushkey
129 unbundle=HG10GZ,HG10BZ,HG10UN
133 unbundle=HG10GZ,HG10BZ,HG10UN
130 unbundlehash
134 unbundlehash
131 Bundle2 capabilities:
135 Bundle2 capabilities:
132 HG20
136 HG20
133 bookmarks
137 bookmarks
134 changegroup
138 changegroup
135 01
139 01
136 02
140 02
137 03
141 03
138 checkheads
142 checkheads
139 related
143 related
140 digests
144 digests
141 md5
145 md5
142 sha1
146 sha1
143 sha512
147 sha512
144 error
148 error
145 abort
149 abort
146 unsupportedcontent
150 unsupportedcontent
147 pushraced
151 pushraced
148 pushkey
152 pushkey
149 hgtagsfnodes
153 hgtagsfnodes
150 listkeys
154 listkeys
151 phases
155 phases
152 heads
156 heads
153 pushkey
157 pushkey
154 remote-changegroup
158 remote-changegroup
155 http
159 http
156 https
160 https
157
161
158 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
162 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
159 warning: stream clone requested but server has them disabled
163 warning: stream clone requested but server has them disabled
160 requesting all changes
164 requesting all changes
161 adding changesets
165 adding changesets
162 adding manifests
166 adding manifests
163 adding file changes
167 adding file changes
164 added 3 changesets with 1088 changes to 1088 files
168 added 3 changesets with 1088 changes to 1088 files
165 new changesets 96ee1d7354c4:5223b5e3265f
169 new changesets 96ee1d7354c4:5223b5e3265f
166
170
167 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
171 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
168 200 Script output follows
172 200 Script output follows
169 content-type: application/mercurial-0.2
173 content-type: application/mercurial-0.2
170
174
171
175
172 $ f --size body --hexdump --bytes 100
176 $ f --size body --hexdump --bytes 100
173 body: size=140
177 body: size=140
174 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
178 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
175 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
179 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
176 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
180 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
177 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
181 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
178 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
182 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
179 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
183 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
180 0060: 69 73 20 66 |is f|
184 0060: 69 73 20 66 |is f|
181
185
182 #endif
186 #endif
183 #if stream-bundle2-v2
187 #if stream-bundle2-v2
184 $ hg debugcapabilities http://localhost:$HGPORT
188 $ hg debugcapabilities http://localhost:$HGPORT
185 Main capabilities:
189 Main capabilities:
186 batch
190 batch
187 branchmap
191 branchmap
188 $USUAL_BUNDLE2_CAPS_SERVER$
192 $USUAL_BUNDLE2_CAPS_SERVER$
189 changegroupsubset
193 changegroupsubset
190 compression=$BUNDLE2_COMPRESSIONS$
194 compression=$BUNDLE2_COMPRESSIONS$
191 getbundle
195 getbundle
192 httpheader=1024
196 httpheader=1024
193 httpmediatype=0.1rx,0.1tx,0.2tx
197 httpmediatype=0.1rx,0.1tx,0.2tx
194 known
198 known
195 lookup
199 lookup
196 pushkey
200 pushkey
197 unbundle=HG10GZ,HG10BZ,HG10UN
201 unbundle=HG10GZ,HG10BZ,HG10UN
198 unbundlehash
202 unbundlehash
199 Bundle2 capabilities:
203 Bundle2 capabilities:
200 HG20
204 HG20
201 bookmarks
205 bookmarks
202 changegroup
206 changegroup
203 01
207 01
204 02
208 02
205 03
209 03
206 checkheads
210 checkheads
207 related
211 related
208 digests
212 digests
209 md5
213 md5
210 sha1
214 sha1
211 sha512
215 sha512
212 error
216 error
213 abort
217 abort
214 unsupportedcontent
218 unsupportedcontent
215 pushraced
219 pushraced
216 pushkey
220 pushkey
217 hgtagsfnodes
221 hgtagsfnodes
218 listkeys
222 listkeys
219 phases
223 phases
220 heads
224 heads
221 pushkey
225 pushkey
222 remote-changegroup
226 remote-changegroup
223 http
227 http
224 https
228 https
225
229
226 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
230 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
227 warning: stream clone requested but server has them disabled
231 warning: stream clone requested but server has them disabled
228 requesting all changes
232 requesting all changes
229 adding changesets
233 adding changesets
230 adding manifests
234 adding manifests
231 adding file changes
235 adding file changes
232 added 3 changesets with 1088 changes to 1088 files
236 added 3 changesets with 1088 changes to 1088 files
233 new changesets 96ee1d7354c4:5223b5e3265f
237 new changesets 96ee1d7354c4:5223b5e3265f
234
238
235 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
239 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
236 200 Script output follows
240 200 Script output follows
237 content-type: application/mercurial-0.2
241 content-type: application/mercurial-0.2
238
242
239
243
240 $ f --size body --hexdump --bytes 100
244 $ f --size body --hexdump --bytes 100
241 body: size=140
245 body: size=140
242 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
246 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
243 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
247 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
244 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
248 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
245 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
249 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
246 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
250 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
247 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
251 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
248 0060: 69 73 20 66 |is f|
252 0060: 69 73 20 66 |is f|
249
253
250 #endif
254 #endif
251 #if stream-bundle2-v3
255 #if stream-bundle2-v3
252 $ hg debugcapabilities http://localhost:$HGPORT
256 $ hg debugcapabilities http://localhost:$HGPORT
253 Main capabilities:
257 Main capabilities:
254 batch
258 batch
255 branchmap
259 branchmap
256 $USUAL_BUNDLE2_CAPS_SERVER$
260 $USUAL_BUNDLE2_CAPS_SERVER$
257 changegroupsubset
261 changegroupsubset
258 compression=$BUNDLE2_COMPRESSIONS$
262 compression=$BUNDLE2_COMPRESSIONS$
259 getbundle
263 getbundle
260 httpheader=1024
264 httpheader=1024
261 httpmediatype=0.1rx,0.1tx,0.2tx
265 httpmediatype=0.1rx,0.1tx,0.2tx
262 known
266 known
263 lookup
267 lookup
264 pushkey
268 pushkey
265 unbundle=HG10GZ,HG10BZ,HG10UN
269 unbundle=HG10GZ,HG10BZ,HG10UN
266 unbundlehash
270 unbundlehash
267 Bundle2 capabilities:
271 Bundle2 capabilities:
268 HG20
272 HG20
269 bookmarks
273 bookmarks
270 changegroup
274 changegroup
271 01
275 01
272 02
276 02
273 03
277 03
274 checkheads
278 checkheads
275 related
279 related
276 digests
280 digests
277 md5
281 md5
278 sha1
282 sha1
279 sha512
283 sha512
280 error
284 error
281 abort
285 abort
282 unsupportedcontent
286 unsupportedcontent
283 pushraced
287 pushraced
284 pushkey
288 pushkey
285 hgtagsfnodes
289 hgtagsfnodes
286 listkeys
290 listkeys
287 phases
291 phases
288 heads
292 heads
289 pushkey
293 pushkey
290 remote-changegroup
294 remote-changegroup
291 http
295 http
292 https
296 https
293
297
294 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
298 $ hg clone --stream -U http://localhost:$HGPORT server-disabled
295 warning: stream clone requested but server has them disabled
299 warning: stream clone requested but server has them disabled
296 requesting all changes
300 requesting all changes
297 adding changesets
301 adding changesets
298 adding manifests
302 adding manifests
299 adding file changes
303 adding file changes
300 added 3 changesets with 1088 changes to 1088 files
304 added 3 changesets with 1088 changes to 1088 files
301 new changesets 96ee1d7354c4:5223b5e3265f
305 new changesets 96ee1d7354c4:5223b5e3265f
302
306
303 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
307 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
304 200 Script output follows
308 200 Script output follows
305 content-type: application/mercurial-0.2
309 content-type: application/mercurial-0.2
306
310
307
311
308 $ f --size body --hexdump --bytes 100
312 $ f --size body --hexdump --bytes 100
309 body: size=140
313 body: size=140
310 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
314 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
311 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
315 0010: 73 0b 45 52 52 4f 52 3a 41 42 4f 52 54 00 00 00 |s.ERROR:ABORT...|
312 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
316 0020: 00 01 01 07 3c 04 16 6d 65 73 73 61 67 65 73 74 |....<..messagest|
313 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
317 0030: 72 65 61 6d 20 64 61 74 61 20 72 65 71 75 65 73 |ream data reques|
314 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
318 0040: 74 65 64 20 62 75 74 20 73 65 72 76 65 72 20 64 |ted but server d|
315 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
319 0050: 6f 65 73 20 6e 6f 74 20 61 6c 6c 6f 77 20 74 68 |oes not allow th|
316 0060: 69 73 20 66 |is f|
320 0060: 69 73 20 66 |is f|
317
321
318 #endif
322 #endif
319
323
320 $ killdaemons.py
324 $ killdaemons.py
321 $ cd server
325 $ cd server
322 $ hg serve -p $HGPORT -d --pid-file=hg.pid --error errors.txt
326 $ hg serve -p $HGPORT -d --pid-file=hg.pid --error errors.txt
323 $ cat hg.pid > $DAEMON_PIDS
327 $ cat hg.pid > $DAEMON_PIDS
324 $ cd ..
328 $ cd ..
325
329
326 Basic clone
330 Basic clone
327
331
328 #if stream-legacy
332 #if stream-legacy
329 $ hg clone --stream -U http://localhost:$HGPORT clone1
333 $ hg clone --stream -U http://localhost:$HGPORT clone1
330 streaming all changes
334 streaming all changes
331 1090 files to transfer, 102 KB of data (no-zstd !)
335 1091 files to transfer, 102 KB of data (no-zstd !)
332 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
336 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
333 1090 files to transfer, 98.8 KB of data (zstd !)
337 1091 files to transfer, 98.8 KB of data (zstd !)
334 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
338 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
335 searching for changes
339 searching for changes
336 no changes found
340 no changes found
337 $ cat server/errors.txt
341 $ cat server/errors.txt
338 #endif
342 #endif
339 #if stream-bundle2-v2
343 #if stream-bundle2-v2
340 $ hg clone --stream -U http://localhost:$HGPORT clone1
344 $ hg clone --stream -U http://localhost:$HGPORT clone1
341 streaming all changes
345 streaming all changes
342 1093 files to transfer, 102 KB of data (no-zstd !)
346 1094 files to transfer, 102 KB of data (no-zstd !)
343 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
347 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
344 1093 files to transfer, 98.9 KB of data (zstd !)
348 1094 files to transfer, 98.9 KB of data (zstd no-rust !)
345 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
349 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
350 1096 files to transfer, 99.0 KB of data (zstd rust !)
351 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
346
352
347 $ ls -1 clone1/.hg/cache
353 $ ls -1 clone1/.hg/cache
348 branch2-base
354 branch2-base
349 branch2-immutable
355 branch2-immutable
350 branch2-served
356 branch2-served
351 branch2-served.hidden
357 branch2-served.hidden
352 branch2-visible
358 branch2-visible
353 branch2-visible-hidden
359 branch2-visible-hidden
354 rbc-names-v1
360 rbc-names-v1
355 rbc-revs-v1
361 rbc-revs-v1
356 tags2
362 tags2
357 tags2-served
363 tags2-served
358 $ cat server/errors.txt
364 $ cat server/errors.txt
359 #endif
365 #endif
360 #if stream-bundle2-v3
366 #if stream-bundle2-v3
361 $ hg clone --stream -U http://localhost:$HGPORT clone1
367 $ hg clone --stream -U http://localhost:$HGPORT clone1
362 streaming all changes
368 streaming all changes
363 1093 entries to transfer
369 1093 entries to transfer
364 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
370 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
365 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
371 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
372 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
366
373
367 $ ls -1 clone1/.hg/cache
374 $ ls -1 clone1/.hg/cache
368 branch2-base
375 branch2-base
369 branch2-immutable
376 branch2-immutable
370 branch2-served
377 branch2-served
371 branch2-served.hidden
378 branch2-served.hidden
372 branch2-visible
379 branch2-visible
373 branch2-visible-hidden
380 branch2-visible-hidden
374 rbc-names-v1
381 rbc-names-v1
375 rbc-revs-v1
382 rbc-revs-v1
376 tags2
383 tags2
377 tags2-served
384 tags2-served
378 $ cat server/errors.txt
385 $ cat server/errors.txt
379 #endif
386 #endif
380
387
381 getbundle requests with stream=1 are uncompressed
388 getbundle requests with stream=1 are uncompressed
382
389
383 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
390 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
384 200 Script output follows
391 200 Script output follows
385 content-type: application/mercurial-0.2
392 content-type: application/mercurial-0.2
386
393
387
394
388 #if no-zstd no-rust
395 #if no-zstd no-rust
389 $ f --size --hex --bytes 256 body
396 $ f --size --hex --bytes 256 body
390 body: size=119123
397 body: size=119140
391 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
398 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
392 0010: 62 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |b.STREAM2.......|
399 0010: 62 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |b.STREAM2.......|
393 0020: 06 09 04 0c 26 62 79 74 65 63 6f 75 6e 74 31 30 |....&bytecount10|
400 0020: 06 09 04 0c 26 62 79 74 65 63 6f 75 6e 74 31 30 |....&bytecount10|
394 0030: 34 31 31 35 66 69 6c 65 63 6f 75 6e 74 31 30 39 |4115filecount109|
401 0030: 34 31 31 35 66 69 6c 65 63 6f 75 6e 74 31 30 39 |4115filecount109|
395 0040: 33 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |3requirementsgen|
402 0040: 34 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |4requirementsgen|
396 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
403 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
397 0060: 6f 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 |ogv1%2Csparserev|
404 0060: 6f 67 76 31 25 32 43 73 70 61 72 73 65 72 65 76 |ogv1%2Csparserev|
398 0070: 6c 6f 67 00 00 80 00 73 08 42 64 61 74 61 2f 30 |log....s.Bdata/0|
405 0070: 6c 6f 67 00 00 80 00 73 08 42 64 61 74 61 2f 30 |log....s.Bdata/0|
399 0080: 2e 69 00 03 00 01 00 00 00 00 00 00 00 02 00 00 |.i..............|
406 0080: 2e 69 00 03 00 01 00 00 00 00 00 00 00 02 00 00 |.i..............|
400 0090: 00 01 00 00 00 00 00 00 00 01 ff ff ff ff ff ff |................|
407 0090: 00 01 00 00 00 00 00 00 00 01 ff ff ff ff ff ff |................|
401 00a0: ff ff 80 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 |...)c.I.#....Vg.|
408 00a0: ff ff 80 29 63 a0 49 d3 23 87 bf ce fe 56 67 92 |...)c.I.#....Vg.|
402 00b0: 67 2c 69 d1 ec 39 00 00 00 00 00 00 00 00 00 00 |g,i..9..........|
409 00b0: 67 2c 69 d1 ec 39 00 00 00 00 00 00 00 00 00 00 |g,i..9..........|
403 00c0: 00 00 75 30 73 26 45 64 61 74 61 2f 30 30 63 68 |..u0s&Edata/00ch|
410 00c0: 00 00 75 30 73 26 45 64 61 74 61 2f 30 30 63 68 |..u0s&Edata/00ch|
404 00d0: 61 6e 67 65 6c 6f 67 2d 61 62 33 34 39 31 38 30 |angelog-ab349180|
411 00d0: 61 6e 67 65 6c 6f 67 2d 61 62 33 34 39 31 38 30 |angelog-ab349180|
405 00e0: 61 30 34 30 35 30 31 30 2e 6e 64 2e 69 00 03 00 |a0405010.nd.i...|
412 00e0: 61 30 34 30 35 30 31 30 2e 6e 64 2e 69 00 03 00 |a0405010.nd.i...|
406 00f0: 01 00 00 00 00 00 00 00 05 00 00 00 04 00 00 00 |................|
413 00f0: 01 00 00 00 00 00 00 00 05 00 00 00 04 00 00 00 |................|
407 #endif
414 #endif
408 #if zstd no-rust
415 #if zstd no-rust
409 $ f --size --hex --bytes 256 body
416 $ f --size --hex --bytes 256 body
410 body: size=116310 (no-bigendian !)
417 body: size=116327 (no-bigendian !)
411 body: size=116305 (bigendian !)
418 body: size=116322 (bigendian !)
412 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
419 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
413 0010: 7c 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 ||.STREAM2.......|
420 0010: 7c 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 ||.STREAM2.......|
414 0020: 06 09 04 0c 40 62 79 74 65 63 6f 75 6e 74 31 30 |....@bytecount10|
421 0020: 06 09 04 0c 40 62 79 74 65 63 6f 75 6e 74 31 30 |....@bytecount10|
415 0030: 31 32 37 36 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1276filecount109| (no-bigendian !)
422 0030: 31 32 37 36 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1276filecount109| (no-bigendian !)
416 0030: 31 32 37 31 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1271filecount109| (bigendian !)
423 0030: 31 32 37 31 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1271filecount109| (bigendian !)
417 0040: 33 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |3requirementsgen|
424 0040: 34 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |4requirementsgen|
418 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
425 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
419 0060: 6f 67 2d 63 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a |og-compression-z|
426 0060: 6f 67 2d 63 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a |og-compression-z|
420 0070: 73 74 64 25 32 43 72 65 76 6c 6f 67 76 31 25 32 |std%2Crevlogv1%2|
427 0070: 73 74 64 25 32 43 72 65 76 6c 6f 67 76 31 25 32 |std%2Crevlogv1%2|
421 0080: 43 73 70 61 72 73 65 72 65 76 6c 6f 67 00 00 80 |Csparserevlog...|
428 0080: 43 73 70 61 72 73 65 72 65 76 6c 6f 67 00 00 80 |Csparserevlog...|
422 0090: 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 00 01 |.s.Bdata/0.i....|
429 0090: 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 00 01 |.s.Bdata/0.i....|
423 00a0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................|
430 00a0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................|
424 00b0: 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 63 a0 |.............)c.|
431 00b0: 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 63 a0 |.............)c.|
425 00c0: 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 |I.#....Vg.g,i..9|
432 00c0: 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 |I.#....Vg.g,i..9|
426 00d0: 00 00 00 00 00 00 00 00 00 00 00 00 75 30 73 26 |............u0s&|
433 00d0: 00 00 00 00 00 00 00 00 00 00 00 00 75 30 73 26 |............u0s&|
427 00e0: 45 64 61 74 61 2f 30 30 63 68 61 6e 67 65 6c 6f |Edata/00changelo|
434 00e0: 45 64 61 74 61 2f 30 30 63 68 61 6e 67 65 6c 6f |Edata/00changelo|
428 00f0: 67 2d 61 62 33 34 39 31 38 30 61 30 34 30 35 30 |g-ab349180a04050|
435 00f0: 67 2d 61 62 33 34 39 31 38 30 61 30 34 30 35 30 |g-ab349180a04050|
429 #endif
436 #endif
430 #if zstd rust no-dirstate-v2
437 #if zstd rust no-dirstate-v2
431 $ f --size --hex --bytes 256 body
438 $ f --size --hex --bytes 256 body
432 body: size=116310
439 body: size=116310 (no-rust !)
440 body: size=116495 (rust no-stream-legacy no-bigendian !)
441 body: size=116490 (rust no-stream-legacy bigendian !)
442 body: size=116327 (rust stream-legacy no-bigendian !)
443 body: size=116322 (rust stream-legacy bigendian !)
433 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
444 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
434 0010: 7c 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 ||.STREAM2.......|
445 0010: 7c 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 ||.STREAM2.......|
435 0020: 06 09 04 0c 40 62 79 74 65 63 6f 75 6e 74 31 30 |....@bytecount10|
446 0020: 06 09 04 0c 40 62 79 74 65 63 6f 75 6e 74 31 30 |....@bytecount10|
436 0030: 31 32 37 36 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1276filecount109|
447 0030: 31 32 37 36 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1276filecount109| (no-rust !)
437 0040: 33 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |3requirementsgen|
448 0040: 33 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |3requirementsgen| (no-rust !)
449 0030: 31 34 30 32 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1402filecount109| (rust no-stream-legacy no-bigendian !)
450 0030: 31 33 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1397filecount109| (rust no-stream-legacy bigendian !)
451 0040: 36 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |6requirementsgen| (rust no-stream-legacy !)
452 0030: 31 32 37 36 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1276filecount109| (rust stream-legacy no-bigendian !)
453 0030: 31 32 37 31 66 69 6c 65 63 6f 75 6e 74 31 30 39 |1271filecount109| (rust stream-legacy bigendian !)
454 0040: 34 72 65 71 75 69 72 65 6d 65 6e 74 73 67 65 6e |4requirementsgen| (rust stream-legacy !)
438 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
455 0050: 65 72 61 6c 64 65 6c 74 61 25 32 43 72 65 76 6c |eraldelta%2Crevl|
439 0060: 6f 67 2d 63 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a |og-compression-z|
456 0060: 6f 67 2d 63 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a |og-compression-z|
440 0070: 73 74 64 25 32 43 72 65 76 6c 6f 67 76 31 25 32 |std%2Crevlogv1%2|
457 0070: 73 74 64 25 32 43 72 65 76 6c 6f 67 76 31 25 32 |std%2Crevlogv1%2|
441 0080: 43 73 70 61 72 73 65 72 65 76 6c 6f 67 00 00 80 |Csparserevlog...|
458 0080: 43 73 70 61 72 73 65 72 65 76 6c 6f 67 00 00 80 |Csparserevlog...|
442 0090: 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 00 01 |.s.Bdata/0.i....|
459 0090: 00 73 08 42 64 61 74 61 2f 30 2e 69 00 03 00 01 |.s.Bdata/0.i....|
443 00a0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................|
460 00a0: 00 00 00 00 00 00 00 02 00 00 00 01 00 00 00 00 |................|
444 00b0: 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 63 a0 |.............)c.|
461 00b0: 00 00 00 01 ff ff ff ff ff ff ff ff 80 29 63 a0 |.............)c.|
445 00c0: 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 |I.#....Vg.g,i..9|
462 00c0: 49 d3 23 87 bf ce fe 56 67 92 67 2c 69 d1 ec 39 |I.#....Vg.g,i..9|
446 00d0: 00 00 00 00 00 00 00 00 00 00 00 00 75 30 73 26 |............u0s&|
463 00d0: 00 00 00 00 00 00 00 00 00 00 00 00 75 30 73 26 |............u0s&|
447 00e0: 45 64 61 74 61 2f 30 30 63 68 61 6e 67 65 6c 6f |Edata/00changelo|
464 00e0: 45 64 61 74 61 2f 30 30 63 68 61 6e 67 65 6c 6f |Edata/00changelo|
448 00f0: 67 2d 61 62 33 34 39 31 38 30 61 30 34 30 35 30 |g-ab349180a04050|
465 00f0: 67 2d 61 62 33 34 39 31 38 30 61 30 34 30 35 30 |g-ab349180a04050|
449 #endif
466 #endif
450 #if zstd dirstate-v2
467 #if zstd dirstate-v2
451 $ f --size --hex --bytes 256 body
468 $ f --size --hex --bytes 256 body
452 body: size=109549
469 body: size=109549
453 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
470 0000: 04 6e 6f 6e 65 48 47 32 30 00 00 00 00 00 00 00 |.noneHG20.......|
454 0010: c0 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......|
471 0010: c0 07 53 54 52 45 41 4d 32 00 00 00 00 03 00 09 |..STREAM2.......|
455 0020: 05 09 04 0c 85 62 79 74 65 63 6f 75 6e 74 39 35 |.....bytecount95|
472 0020: 05 09 04 0c 85 62 79 74 65 63 6f 75 6e 74 39 35 |.....bytecount95|
456 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030|
473 0030: 38 39 37 66 69 6c 65 63 6f 75 6e 74 31 30 33 30 |897filecount1030|
457 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote|
474 0040: 72 65 71 75 69 72 65 6d 65 6e 74 73 64 6f 74 65 |requirementsdote|
458 0050: 6e 63 6f 64 65 25 32 43 65 78 70 2d 64 69 72 73 |ncode%2Cexp-dirs|
475 0050: 6e 63 6f 64 65 25 32 43 65 78 70 2d 64 69 72 73 |ncode%2Cexp-dirs|
459 0060: 74 61 74 65 2d 76 32 25 32 43 66 6e 63 61 63 68 |tate-v2%2Cfncach|
476 0060: 74 61 74 65 2d 76 32 25 32 43 66 6e 63 61 63 68 |tate-v2%2Cfncach|
460 0070: 65 25 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 |e%2Cgeneraldelta|
477 0070: 65 25 32 43 67 65 6e 65 72 61 6c 64 65 6c 74 61 |e%2Cgeneraldelta|
461 0080: 25 32 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f |%2Cpersistent-no|
478 0080: 25 32 43 70 65 72 73 69 73 74 65 6e 74 2d 6e 6f |%2Cpersistent-no|
462 0090: 64 65 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 |demap%2Crevlog-c|
479 0090: 64 65 6d 61 70 25 32 43 72 65 76 6c 6f 67 2d 63 |demap%2Crevlog-c|
463 00a0: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 |ompression-zstd%|
480 00a0: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 25 |ompression-zstd%|
464 00b0: 32 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 |2Crevlogv1%2Cspa|
481 00b0: 32 43 72 65 76 6c 6f 67 76 31 25 32 43 73 70 61 |2Crevlogv1%2Cspa|
465 00c0: 72 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 |rserevlog%2Cstor|
482 00c0: 72 73 65 72 65 76 6c 6f 67 25 32 43 73 74 6f 72 |rserevlog%2Cstor|
466 00d0: 65 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 |e....s.Bdata/0.i|
483 00d0: 65 00 00 80 00 73 08 42 64 61 74 61 2f 30 2e 69 |e....s.Bdata/0.i|
467 00e0: 00 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 |................|
484 00e0: 00 03 00 01 00 00 00 00 00 00 00 02 00 00 00 01 |................|
468 00f0: 00 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff |................|
485 00f0: 00 00 00 00 00 00 00 01 ff ff ff ff ff ff ff ff |................|
469 #endif
486 #endif
470
487
471 --uncompressed is an alias to --stream
488 --uncompressed is an alias to --stream
472
489
473 #if stream-legacy
490 #if stream-legacy
474 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
491 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
475 streaming all changes
492 streaming all changes
476 1090 files to transfer, 102 KB of data (no-zstd !)
493 1091 files to transfer, 102 KB of data (no-zstd !)
477 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
494 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
478 1090 files to transfer, 98.8 KB of data (zstd !)
495 1091 files to transfer, 98.8 KB of data (zstd !)
479 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
496 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
480 searching for changes
497 searching for changes
481 no changes found
498 no changes found
482 #endif
499 #endif
483 #if stream-bundle2-v2
500 #if stream-bundle2-v2
484 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
501 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
485 streaming all changes
502 streaming all changes
486 1093 files to transfer, 102 KB of data (no-zstd !)
503 1094 files to transfer, 102 KB of data (no-zstd !)
487 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
504 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
488 1093 files to transfer, 98.9 KB of data (zstd !)
505 1094 files to transfer, 98.9 KB of data (zstd no-rust !)
489 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
506 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
507 1096 files to transfer, 99.0 KB of data (zstd rust !)
508 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
490 #endif
509 #endif
491 #if stream-bundle2-v3
510 #if stream-bundle2-v3
492 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
511 $ hg clone --uncompressed -U http://localhost:$HGPORT clone1-uncompressed
493 streaming all changes
512 streaming all changes
494 1093 entries to transfer
513 1093 entries to transfer
495 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
514 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
496 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
515 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
516 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
497 #endif
517 #endif
498
518
499 Clone with background file closing enabled
519 Clone with background file closing enabled
500
520
501 #if stream-legacy
521 #if stream-legacy
502 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
522 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
503 using http://localhost:$HGPORT/
523 using http://localhost:$HGPORT/
504 sending capabilities command
524 sending capabilities command
505 sending branchmap command
525 sending branchmap command
506 streaming all changes
526 streaming all changes
507 sending stream_out command
527 sending stream_out command
508 1090 files to transfer, 102 KB of data (no-zstd !)
528 1091 files to transfer, 102 KB of data (no-zstd !)
509 1090 files to transfer, 98.8 KB of data (zstd !)
529 1091 files to transfer, 98.8 KB of data (zstd !)
510 starting 4 threads for background file closing
530 starting 4 threads for background file closing
511 updating the branch cache
531 updating the branch cache
512 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
532 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
513 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
533 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
514 query 1; heads
534 query 1; heads
515 sending batch command
535 sending batch command
516 searching for changes
536 searching for changes
517 all remote heads known locally
537 all remote heads known locally
518 no changes found
538 no changes found
519 sending getbundle command
539 sending getbundle command
520 bundle2-input-bundle: with-transaction
540 bundle2-input-bundle: with-transaction
521 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
541 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
522 bundle2-input-part: "phase-heads" supported
542 bundle2-input-part: "phase-heads" supported
523 bundle2-input-part: total payload size 24
543 bundle2-input-part: total payload size 24
524 bundle2-input-bundle: 2 parts total
544 bundle2-input-bundle: 2 parts total
525 checking for updated bookmarks
545 checking for updated bookmarks
526 updating the branch cache
546 updating the branch cache
527 (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
547 (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
528 #endif
548 #endif
529 #if stream-bundle2-v2
549 #if stream-bundle2-v2
530 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
550 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
531 using http://localhost:$HGPORT/
551 using http://localhost:$HGPORT/
532 sending capabilities command
552 sending capabilities command
533 query 1; heads
553 query 1; heads
534 sending batch command
554 sending batch command
535 streaming all changes
555 streaming all changes
536 sending getbundle command
556 sending getbundle command
537 bundle2-input-bundle: with-transaction
557 bundle2-input-bundle: with-transaction
538 bundle2-input-part: "stream2" (params: 3 mandatory) supported
558 bundle2-input-part: "stream2" (params: 3 mandatory) supported
539 applying stream bundle
559 applying stream bundle
540 1093 files to transfer, 102 KB of data (no-zstd !)
560 1094 files to transfer, 102 KB of data (no-zstd !)
541 1093 files to transfer, 98.9 KB of data (zstd !)
561 1094 files to transfer, 98.9 KB of data (zstd no-rust !)
562 1096 files to transfer, 99.0 KB of data (zstd rust !)
542 starting 4 threads for background file closing
563 starting 4 threads for background file closing
543 starting 4 threads for background file closing
564 starting 4 threads for background file closing
544 updating the branch cache
565 updating the branch cache
545 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
566 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
546 bundle2-input-part: total payload size 118984 (no-zstd !)
567 bundle2-input-part: total payload size 119001 (no-zstd !)
547 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
568 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
548 bundle2-input-part: total payload size 116145 (zstd no-bigendian !)
569 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
549 bundle2-input-part: total payload size 116140 (zstd bigendian !)
570 bundle2-input-part: total payload size 116162 (zstd no-bigendian no-rust !)
571 bundle2-input-part: total payload size 116330 (zstd no-bigendian rust !)
572 bundle2-input-part: total payload size 116157 (zstd bigendian no-rust !)
573 bundle2-input-part: total payload size 116325 (zstd bigendian rust !)
550 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
574 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
551 bundle2-input-bundle: 2 parts total
575 bundle2-input-bundle: 2 parts total
552 checking for updated bookmarks
576 checking for updated bookmarks
553 updating the branch cache
577 updating the branch cache
554 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
578 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
555 #endif
579 #endif
556 #if stream-bundle2-v3
580 #if stream-bundle2-v3
557 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
581 $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --stream -U http://localhost:$HGPORT clone-background | grep -v adding
558 using http://localhost:$HGPORT/
582 using http://localhost:$HGPORT/
559 sending capabilities command
583 sending capabilities command
560 query 1; heads
584 query 1; heads
561 sending batch command
585 sending batch command
562 streaming all changes
586 streaming all changes
563 sending getbundle command
587 sending getbundle command
564 bundle2-input-bundle: with-transaction
588 bundle2-input-bundle: with-transaction
565 bundle2-input-part: "stream3-exp" (params: 1 mandatory) supported
589 bundle2-input-part: "stream3-exp" (params: 1 mandatory) supported
566 applying stream bundle
590 applying stream bundle
567 1093 entries to transfer
591 1093 entries to transfer
568 starting 4 threads for background file closing
592 starting 4 threads for background file closing
569 starting 4 threads for background file closing
593 starting 4 threads for background file closing
570 updating the branch cache
594 updating the branch cache
571 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
595 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
572 bundle2-input-part: total payload size 120079 (no-zstd !)
596 bundle2-input-part: total payload size 120096 (no-zstd !)
573 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
597 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
574 bundle2-input-part: total payload size 117240 (zstd no-bigendian !)
598 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
575 bundle2-input-part: total payload size 116138 (zstd bigendian !)
599 bundle2-input-part: total payload size 117257 (zstd no-rust no-bigendian !)
600 bundle2-input-part: total payload size 117425 (zstd rust no-bigendian !)
601 bundle2-input-part: total payload size 117252 (zstd bigendian no-rust !)
602 bundle2-input-part: total payload size 117420 (zstd bigendian rust !)
576 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
603 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
577 bundle2-input-bundle: 2 parts total
604 bundle2-input-bundle: 2 parts total
578 checking for updated bookmarks
605 checking for updated bookmarks
579 updating the branch cache
606 updating the branch cache
580 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
607 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
581 #endif
608 #endif
582
609
583 Cannot stream clone when there are secret changesets
610 Cannot stream clone when there are secret changesets
584
611
585 $ hg -R server phase --force --secret -r tip
612 $ hg -R server phase --force --secret -r tip
586 $ hg clone --stream -U http://localhost:$HGPORT secret-denied
613 $ hg clone --stream -U http://localhost:$HGPORT secret-denied
587 warning: stream clone requested but server has them disabled
614 warning: stream clone requested but server has them disabled
588 requesting all changes
615 requesting all changes
589 adding changesets
616 adding changesets
590 adding manifests
617 adding manifests
591 adding file changes
618 adding file changes
592 added 2 changesets with 1025 changes to 1025 files
619 added 2 changesets with 1025 changes to 1025 files
593 new changesets 96ee1d7354c4:c17445101a72
620 new changesets 96ee1d7354c4:c17445101a72
594
621
595 $ killdaemons.py
622 $ killdaemons.py
596
623
597 Streaming of secrets can be overridden by server config
624 Streaming of secrets can be overridden by server config
598
625
599 $ cd server
626 $ cd server
600 $ hg serve --config server.uncompressedallowsecret=true -p $HGPORT -d --pid-file=hg.pid
627 $ hg serve --config server.uncompressedallowsecret=true -p $HGPORT -d --pid-file=hg.pid
601 $ cat hg.pid > $DAEMON_PIDS
628 $ cat hg.pid > $DAEMON_PIDS
602 $ cd ..
629 $ cd ..
603
630
604 #if stream-legacy
631 #if stream-legacy
605 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
632 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
606 streaming all changes
633 streaming all changes
607 1090 files to transfer, 102 KB of data (no-zstd !)
634 1091 files to transfer, 102 KB of data (no-zstd !)
608 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
635 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
609 1090 files to transfer, 98.8 KB of data (zstd !)
636 1091 files to transfer, 98.8 KB of data (zstd !)
610 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
637 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
611 searching for changes
638 searching for changes
612 no changes found
639 no changes found
613 #endif
640 #endif
614 #if stream-bundle2-v2
641 #if stream-bundle2-v2
615 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
642 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
616 streaming all changes
643 streaming all changes
617 1093 files to transfer, 102 KB of data (no-zstd !)
644 1094 files to transfer, 102 KB of data (no-zstd !)
618 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
645 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
619 1093 files to transfer, 98.9 KB of data (zstd !)
646 1094 files to transfer, 98.9 KB of data (zstd no-rust !)
620 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
647 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
648 1096 files to transfer, 99.0 KB of data (zstd rust !)
649 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
621 #endif
650 #endif
622 #if stream-bundle2-v3
651 #if stream-bundle2-v3
623 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
652 $ hg clone --stream -U http://localhost:$HGPORT secret-allowed
624 streaming all changes
653 streaming all changes
625 1093 entries to transfer
654 1093 entries to transfer
626 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
655 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
627 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd !)
656 transferred 98.9 KB in * seconds (* */sec) (glob) (zstd no-rust !)
657 transferred 99.0 KB in * seconds (* */sec) (glob) (zstd rust !)
628 #endif
658 #endif
629
659
630 $ killdaemons.py
660 $ killdaemons.py
631
661
632 Verify interaction between preferuncompressed and secret presence
662 Verify interaction between preferuncompressed and secret presence
633
663
634 $ cd server
664 $ cd server
635 $ hg serve --config server.preferuncompressed=true -p $HGPORT -d --pid-file=hg.pid
665 $ hg serve --config server.preferuncompressed=true -p $HGPORT -d --pid-file=hg.pid
636 $ cat hg.pid > $DAEMON_PIDS
666 $ cat hg.pid > $DAEMON_PIDS
637 $ cd ..
667 $ cd ..
638
668
639 $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
669 $ hg clone -U http://localhost:$HGPORT preferuncompressed-secret
640 requesting all changes
670 requesting all changes
641 adding changesets
671 adding changesets
642 adding manifests
672 adding manifests
643 adding file changes
673 adding file changes
644 added 2 changesets with 1025 changes to 1025 files
674 added 2 changesets with 1025 changes to 1025 files
645 new changesets 96ee1d7354c4:c17445101a72
675 new changesets 96ee1d7354c4:c17445101a72
646
676
647 $ killdaemons.py
677 $ killdaemons.py
648
678
649 Clone not allowed when full bundles disabled and can't serve secrets
679 Clone not allowed when full bundles disabled and can't serve secrets
650
680
651 $ cd server
681 $ cd server
652 $ hg serve --config server.disablefullbundle=true -p $HGPORT -d --pid-file=hg.pid
682 $ hg serve --config server.disablefullbundle=true -p $HGPORT -d --pid-file=hg.pid
653 $ cat hg.pid > $DAEMON_PIDS
683 $ cat hg.pid > $DAEMON_PIDS
654 $ cd ..
684 $ cd ..
655
685
656 $ hg clone --stream http://localhost:$HGPORT secret-full-disabled
686 $ hg clone --stream http://localhost:$HGPORT secret-full-disabled
657 warning: stream clone requested but server has them disabled
687 warning: stream clone requested but server has them disabled
658 requesting all changes
688 requesting all changes
659 remote: abort: server has pull-based clones disabled
689 remote: abort: server has pull-based clones disabled
660 abort: pull failed on remote
690 abort: pull failed on remote
661 (remove --pull if specified or upgrade Mercurial)
691 (remove --pull if specified or upgrade Mercurial)
662 [100]
692 [100]
663
693
664 Local stream clone with secrets involved
694 Local stream clone with secrets involved
665 (This is just a test over behavior: if you have access to the repo's files,
695 (This is just a test over behavior: if you have access to the repo's files,
666 there is no security so it isn't important to prevent a clone here.)
696 there is no security so it isn't important to prevent a clone here.)
667
697
668 $ hg clone -U --stream server local-secret
698 $ hg clone -U --stream server local-secret
669 warning: stream clone requested but server has them disabled
699 warning: stream clone requested but server has them disabled
670 requesting all changes
700 requesting all changes
671 adding changesets
701 adding changesets
672 adding manifests
702 adding manifests
673 adding file changes
703 adding file changes
674 added 2 changesets with 1025 changes to 1025 files
704 added 2 changesets with 1025 changes to 1025 files
675 new changesets 96ee1d7354c4:c17445101a72
705 new changesets 96ee1d7354c4:c17445101a72
676
706
677 Stream clone while repo is changing:
707 Stream clone while repo is changing:
678
708
679 $ mkdir changing
709 $ mkdir changing
680 $ cd changing
710 $ cd changing
681
711
682 prepare repo with small and big file to cover both code paths in emitrevlogdata
712 prepare repo with small and big file to cover both code paths in emitrevlogdata
683
713
684 $ hg init repo
714 $ hg init repo
685 $ touch repo/f1
715 $ touch repo/f1
686 $ $TESTDIR/seq.py 50000 > repo/f2
716 $ $TESTDIR/seq.py 50000 > repo/f2
687 $ hg -R repo ci -Aqm "0"
717 $ hg -R repo ci -Aqm "0"
688 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
718 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
689 $ export HG_TEST_STREAM_WALKED_FILE_1
719 $ export HG_TEST_STREAM_WALKED_FILE_1
690 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
720 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
691 $ export HG_TEST_STREAM_WALKED_FILE_2
721 $ export HG_TEST_STREAM_WALKED_FILE_2
692 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
722 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
693 $ export HG_TEST_STREAM_WALKED_FILE_3
723 $ export HG_TEST_STREAM_WALKED_FILE_3
694 # $ cat << EOF >> $HGRCPATH
724 # $ cat << EOF >> $HGRCPATH
695 # > [hooks]
725 # > [hooks]
696 # > pre-clone=rm -f "$TESTTMP/sync_file_walked_*"
726 # > pre-clone=rm -f "$TESTTMP/sync_file_walked_*"
697 # > EOF
727 # > EOF
698 $ hg serve -R repo -p $HGPORT1 -d --error errors.log --pid-file=hg.pid --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py"
728 $ hg serve -R repo -p $HGPORT1 -d --error errors.log --pid-file=hg.pid --config extensions.stream_steps="$RUNTESTDIR/testlib/ext-stream-clone-steps.py"
699 $ cat hg.pid >> $DAEMON_PIDS
729 $ cat hg.pid >> $DAEMON_PIDS
700
730
701 clone while modifying the repo between stating file with write lock and
731 clone while modifying the repo between stating file with write lock and
702 actually serving file content
732 actually serving file content
703
733
704 $ (hg clone -q --stream -U http://localhost:$HGPORT1 clone; touch "$HG_TEST_STREAM_WALKED_FILE_3") &
734 $ (hg clone -q --stream -U http://localhost:$HGPORT1 clone; touch "$HG_TEST_STREAM_WALKED_FILE_3") &
705 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
735 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
706 $ echo >> repo/f1
736 $ echo >> repo/f1
707 $ echo >> repo/f2
737 $ echo >> repo/f2
708 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1
738 $ hg -R repo ci -m "1" --config ui.timeout.warn=-1
709 $ touch $HG_TEST_STREAM_WALKED_FILE_2
739 $ touch $HG_TEST_STREAM_WALKED_FILE_2
710 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
740 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
711 $ hg -R clone id
741 $ hg -R clone id
712 000000000000
742 000000000000
713 $ cat errors.log
743 $ cat errors.log
714 $ cd ..
744 $ cd ..
715
745
716 Stream repository with bookmarks
746 Stream repository with bookmarks
717 --------------------------------
747 --------------------------------
718
748
719 (revert introduction of secret changeset)
749 (revert introduction of secret changeset)
720
750
721 $ hg -R server phase --draft 'secret()'
751 $ hg -R server phase --draft 'secret()'
722
752
723 add a bookmark
753 add a bookmark
724
754
725 $ hg -R server bookmark -r tip some-bookmark
755 $ hg -R server bookmark -r tip some-bookmark
726
756
727 clone it
757 clone it
728
758
729 #if stream-legacy
759 #if stream-legacy
730 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
760 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
731 streaming all changes
761 streaming all changes
732 1090 files to transfer, 102 KB of data (no-zstd !)
762 1091 files to transfer, 102 KB of data (no-zstd !)
733 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
763 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
734 1090 files to transfer, 98.8 KB of data (zstd !)
764 1091 files to transfer, 98.8 KB of data (zstd !)
735 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
765 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
736 searching for changes
766 searching for changes
737 no changes found
767 no changes found
738 updating to branch default
768 updating to branch default
739 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
769 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
740 #endif
770 #endif
741 #if stream-bundle2-v2
771 #if stream-bundle2-v2
742 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
772 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
743 streaming all changes
773 streaming all changes
744 1096 files to transfer, 102 KB of data (no-zstd !)
774 1097 files to transfer, 102 KB of data (no-zstd !)
745 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
775 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
746 1096 files to transfer, 99.1 KB of data (zstd !)
776 1097 files to transfer, 99.1 KB of data (zstd no-rust !)
747 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
777 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
778 1099 files to transfer, 99.2 KB of data (zstd rust !)
779 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
748 updating to branch default
780 updating to branch default
749 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
781 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
750 #endif
782 #endif
751 #if stream-bundle2-v3
783 #if stream-bundle2-v3
752 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
784 $ hg clone --stream http://localhost:$HGPORT with-bookmarks
753 streaming all changes
785 streaming all changes
754 1096 entries to transfer
786 1096 entries to transfer
755 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
787 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
756 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
788 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
789 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
757 updating to branch default
790 updating to branch default
758 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
791 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
759 #endif
792 #endif
760 $ hg verify -R with-bookmarks -q
793 $ hg verify -R with-bookmarks -q
761 $ hg -R with-bookmarks bookmarks
794 $ hg -R with-bookmarks bookmarks
762 some-bookmark 2:5223b5e3265f
795 some-bookmark 2:5223b5e3265f
763
796
764 Stream repository with phases
797 Stream repository with phases
765 -----------------------------
798 -----------------------------
766
799
767 Clone as publishing
800 Clone as publishing
768
801
769 $ hg -R server phase -r 'all()'
802 $ hg -R server phase -r 'all()'
770 0: draft
803 0: draft
771 1: draft
804 1: draft
772 2: draft
805 2: draft
773
806
774 #if stream-legacy
807 #if stream-legacy
775 $ hg clone --stream http://localhost:$HGPORT phase-publish
808 $ hg clone --stream http://localhost:$HGPORT phase-publish
776 streaming all changes
809 streaming all changes
777 1090 files to transfer, 102 KB of data (no-zstd !)
810 1091 files to transfer, 102 KB of data (no-zstd !)
778 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
811 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
779 1090 files to transfer, 98.8 KB of data (zstd !)
812 1091 files to transfer, 98.8 KB of data (zstd !)
780 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
813 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
781 searching for changes
814 searching for changes
782 no changes found
815 no changes found
783 updating to branch default
816 updating to branch default
784 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
817 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
785 #endif
818 #endif
786 #if stream-bundle2-v2
819 #if stream-bundle2-v2
787 $ hg clone --stream http://localhost:$HGPORT phase-publish
820 $ hg clone --stream http://localhost:$HGPORT phase-publish
788 streaming all changes
821 streaming all changes
789 1096 files to transfer, 102 KB of data (no-zstd !)
822 1097 files to transfer, 102 KB of data (no-zstd !)
790 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
823 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
791 1096 files to transfer, 99.1 KB of data (zstd !)
824 1097 files to transfer, 99.1 KB of data (zstd no-rust !)
792 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
825 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
826 1099 files to transfer, 99.2 KB of data (zstd rust !)
827 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
793 updating to branch default
828 updating to branch default
794 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
829 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
795 #endif
830 #endif
796 #if stream-bundle2-v3
831 #if stream-bundle2-v3
797 $ hg clone --stream http://localhost:$HGPORT phase-publish
832 $ hg clone --stream http://localhost:$HGPORT phase-publish
798 streaming all changes
833 streaming all changes
799 1096 entries to transfer
834 1096 entries to transfer
800 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
835 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
801 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
836 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
837 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
802 updating to branch default
838 updating to branch default
803 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
839 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
804 #endif
840 #endif
805 $ hg verify -R phase-publish -q
841 $ hg verify -R phase-publish -q
806 $ hg -R phase-publish phase -r 'all()'
842 $ hg -R phase-publish phase -r 'all()'
807 0: public
843 0: public
808 1: public
844 1: public
809 2: public
845 2: public
810
846
811 Clone as non publishing
847 Clone as non publishing
812
848
813 $ cat << EOF >> server/.hg/hgrc
849 $ cat << EOF >> server/.hg/hgrc
814 > [phases]
850 > [phases]
815 > publish = False
851 > publish = False
816 > EOF
852 > EOF
817 $ killdaemons.py
853 $ killdaemons.py
818 $ hg -R server serve -p $HGPORT -d --pid-file=hg.pid
854 $ hg -R server serve -p $HGPORT -d --pid-file=hg.pid
819 $ cat hg.pid > $DAEMON_PIDS
855 $ cat hg.pid > $DAEMON_PIDS
820
856
821 #if stream-legacy
857 #if stream-legacy
822
858
823 With v1 of the stream protocol, changeset are always cloned as public. It make
859 With v1 of the stream protocol, changeset are always cloned as public. It make
824 stream v1 unsuitable for non-publishing repository.
860 stream v1 unsuitable for non-publishing repository.
825
861
826 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
862 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
827 streaming all changes
863 streaming all changes
828 1090 files to transfer, 102 KB of data (no-zstd !)
864 1091 files to transfer, 102 KB of data (no-zstd !)
829 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
865 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
830 1090 files to transfer, 98.8 KB of data (zstd !)
866 1091 files to transfer, 98.8 KB of data (zstd !)
831 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
867 transferred 98.8 KB in * seconds (* */sec) (glob) (zstd !)
832 searching for changes
868 searching for changes
833 no changes found
869 no changes found
834 updating to branch default
870 updating to branch default
835 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
871 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
836 $ hg -R phase-no-publish phase -r 'all()'
872 $ hg -R phase-no-publish phase -r 'all()'
837 0: public
873 0: public
838 1: public
874 1: public
839 2: public
875 2: public
840 #endif
876 #endif
841 #if stream-bundle2-v2
877 #if stream-bundle2-v2
842 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
878 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
843 streaming all changes
879 streaming all changes
844 1097 files to transfer, 102 KB of data (no-zstd !)
880 1098 files to transfer, 102 KB of data (no-zstd !)
845 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
881 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
846 1097 files to transfer, 99.1 KB of data (zstd !)
882 1098 files to transfer, 99.1 KB of data (zstd no-rust !)
847 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
883 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
884 1100 files to transfer, 99.2 KB of data (zstd rust !)
885 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
848 updating to branch default
886 updating to branch default
849 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
887 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
850 $ hg -R phase-no-publish phase -r 'all()'
888 $ hg -R phase-no-publish phase -r 'all()'
851 0: draft
889 0: draft
852 1: draft
890 1: draft
853 2: draft
891 2: draft
854 #endif
892 #endif
855 #if stream-bundle2-v3
893 #if stream-bundle2-v3
856 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
894 $ hg clone --stream http://localhost:$HGPORT phase-no-publish
857 streaming all changes
895 streaming all changes
858 1097 entries to transfer
896 1097 entries to transfer
859 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
897 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
860 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd !)
898 transferred 99.1 KB in * seconds (* */sec) (glob) (zstd no-rust !)
899 transferred 99.2 KB in * seconds (* */sec) (glob) (zstd rust !)
861 updating to branch default
900 updating to branch default
862 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
901 1088 files updated, 0 files merged, 0 files removed, 0 files unresolved
863 $ hg -R phase-no-publish phase -r 'all()'
902 $ hg -R phase-no-publish phase -r 'all()'
864 0: draft
903 0: draft
865 1: draft
904 1: draft
866 2: draft
905 2: draft
867 #endif
906 #endif
868 $ hg verify -R phase-no-publish -q
907 $ hg verify -R phase-no-publish -q
869
908
870 $ killdaemons.py
909 $ killdaemons.py
871
910
872 #if stream-legacy
911 #if stream-legacy
873
912
874 With v1 of the stream protocol, changeset are always cloned as public. There's
913 With v1 of the stream protocol, changeset are always cloned as public. There's
875 no obsolescence markers exchange in stream v1.
914 no obsolescence markers exchange in stream v1.
876
915
877 #endif
916 #endif
878 #if stream-bundle2-v2
917 #if stream-bundle2-v2
879
918
880 Stream repository with obsolescence
919 Stream repository with obsolescence
881 -----------------------------------
920 -----------------------------------
882
921
883 Clone non-publishing with obsolescence
922 Clone non-publishing with obsolescence
884
923
885 $ cat >> $HGRCPATH << EOF
924 $ cat >> $HGRCPATH << EOF
886 > [experimental]
925 > [experimental]
887 > evolution=all
926 > evolution=all
888 > EOF
927 > EOF
889
928
890 $ cd server
929 $ cd server
891 $ echo foo > foo
930 $ echo foo > foo
892 $ hg -q commit -m 'about to be pruned'
931 $ hg -q commit -m 'about to be pruned'
893 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
932 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
894 1 new obsolescence markers
933 1 new obsolescence markers
895 obsoleted 1 changesets
934 obsoleted 1 changesets
896 $ hg up null -q
935 $ hg up null -q
897 $ hg log -T '{rev}: {phase}\n'
936 $ hg log -T '{rev}: {phase}\n'
898 2: draft
937 2: draft
899 1: draft
938 1: draft
900 0: draft
939 0: draft
901 $ hg serve -p $HGPORT -d --pid-file=hg.pid
940 $ hg serve -p $HGPORT -d --pid-file=hg.pid
902 $ cat hg.pid > $DAEMON_PIDS
941 $ cat hg.pid > $DAEMON_PIDS
903 $ cd ..
942 $ cd ..
904
943
905 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
944 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
906 streaming all changes
945 streaming all changes
907 1098 files to transfer, 102 KB of data (no-zstd !)
946 1099 files to transfer, 102 KB of data (no-zstd !)
908 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
947 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
909 1098 files to transfer, 99.5 KB of data (zstd !)
948 1099 files to transfer, 99.5 KB of data (zstd no-rust !)
910 transferred 99.5 KB in * seconds (* */sec) (glob) (zstd !)
949 transferred 99.5 KB in * seconds (* */sec) (glob) (zstd no-rust !)
950 1101 files to transfer, 99.6 KB of data (zstd rust !)
951 transferred 99.6 KB in * seconds (* */sec) (glob) (zstd rust !)
911 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
952 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
912 2: draft
953 2: draft
913 1: draft
954 1: draft
914 0: draft
955 0: draft
915 $ hg debugobsolete -R with-obsolescence
956 $ hg debugobsolete -R with-obsolescence
916 8c206a663911c1f97f2f9d7382e417ae55872cfa 0 {5223b5e3265f0df40bb743da62249413d74ac70f} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
957 8c206a663911c1f97f2f9d7382e417ae55872cfa 0 {5223b5e3265f0df40bb743da62249413d74ac70f} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 $ hg verify -R with-obsolescence -q
958 $ hg verify -R with-obsolescence -q
918
959
919 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
960 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
920 streaming all changes
961 streaming all changes
921 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
962 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
922 abort: pull failed on remote
963 abort: pull failed on remote
923 [100]
964 [100]
924
965
925 $ killdaemons.py
966 $ killdaemons.py
926
967
927 #endif
968 #endif
928 #if stream-bundle2-v3
969 #if stream-bundle2-v3
929
970
930 Stream repository with obsolescence
971 Stream repository with obsolescence
931 -----------------------------------
972 -----------------------------------
932
973
933 Clone non-publishing with obsolescence
974 Clone non-publishing with obsolescence
934
975
935 $ cat >> $HGRCPATH << EOF
976 $ cat >> $HGRCPATH << EOF
936 > [experimental]
977 > [experimental]
937 > evolution=all
978 > evolution=all
938 > EOF
979 > EOF
939
980
940 $ cd server
981 $ cd server
941 $ echo foo > foo
982 $ echo foo > foo
942 $ hg -q commit -m 'about to be pruned'
983 $ hg -q commit -m 'about to be pruned'
943 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
984 $ hg debugobsolete `hg log -r . -T '{node}'` -d '0 0' -u test --record-parents
944 1 new obsolescence markers
985 1 new obsolescence markers
945 obsoleted 1 changesets
986 obsoleted 1 changesets
946 $ hg up null -q
987 $ hg up null -q
947 $ hg log -T '{rev}: {phase}\n'
988 $ hg log -T '{rev}: {phase}\n'
948 2: draft
989 2: draft
949 1: draft
990 1: draft
950 0: draft
991 0: draft
951 $ hg serve -p $HGPORT -d --pid-file=hg.pid
992 $ hg serve -p $HGPORT -d --pid-file=hg.pid
952 $ cat hg.pid > $DAEMON_PIDS
993 $ cat hg.pid > $DAEMON_PIDS
953 $ cd ..
994 $ cd ..
954
995
955 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
996 $ hg clone -U --stream http://localhost:$HGPORT with-obsolescence
956 streaming all changes
997 streaming all changes
957 1098 entries to transfer
998 1098 entries to transfer
958 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
999 transferred 102 KB in * seconds (* */sec) (glob) (no-zstd !)
959 transferred 99.5 KB in * seconds (* */sec) (glob) (zstd !)
1000 transferred 99.5 KB in * seconds (* */sec) (glob) (zstd no-rust !)
1001 transferred 99.6 KB in * seconds (* */sec) (glob) (zstd rust !)
960 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
1002 $ hg -R with-obsolescence log -T '{rev}: {phase}\n'
961 2: draft
1003 2: draft
962 1: draft
1004 1: draft
963 0: draft
1005 0: draft
964 $ hg debugobsolete -R with-obsolescence
1006 $ hg debugobsolete -R with-obsolescence
965 8c206a663911c1f97f2f9d7382e417ae55872cfa 0 {5223b5e3265f0df40bb743da62249413d74ac70f} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1007 8c206a663911c1f97f2f9d7382e417ae55872cfa 0 {5223b5e3265f0df40bb743da62249413d74ac70f} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
966 $ hg verify -R with-obsolescence -q
1008 $ hg verify -R with-obsolescence -q
967
1009
968 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
1010 $ hg clone -U --stream --config experimental.evolution=0 http://localhost:$HGPORT with-obsolescence-no-evolution
969 streaming all changes
1011 streaming all changes
970 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
1012 remote: abort: server has obsolescence markers, but client cannot receive them via stream clone
971 abort: pull failed on remote
1013 abort: pull failed on remote
972 [100]
1014 [100]
973
1015
974 $ killdaemons.py
1016 $ killdaemons.py
975
1017
976 #endif
1018 #endif
977
1019
978 Cloning a repo with no requirements doesn't give some obscure error
1020 Cloning a repo with no requirements doesn't give some obscure error
979
1021
980 $ mkdir -p empty-repo/.hg
1022 $ mkdir -p empty-repo/.hg
981 $ hg clone -q --stream ssh://user@dummy/empty-repo empty-repo2
1023 $ hg clone -q --stream ssh://user@dummy/empty-repo empty-repo2
982 $ hg --cwd empty-repo2 verify -q
1024 $ hg --cwd empty-repo2 verify -q
983
1025
984 Cloning a repo with an empty manifestlog doesn't give some weird error
1026 Cloning a repo with an empty manifestlog doesn't give some weird error
985
1027
986 $ rm -r empty-repo; hg init empty-repo
1028 $ rm -r empty-repo; hg init empty-repo
987 $ (cd empty-repo; touch x; hg commit -Am empty; hg debugstrip -r 0) > /dev/null
1029 $ (cd empty-repo; touch x; hg commit -Am empty; hg debugstrip -r 0) > /dev/null
988 $ hg clone -q --stream ssh://user@dummy/empty-repo empty-repo3
1030 $ hg clone -q --stream ssh://user@dummy/empty-repo empty-repo3
989 $ hg --cwd empty-repo3 verify -q 2>&1 | grep -v warning
1031 $ hg --cwd empty-repo3 verify -q 2>&1 | grep -v warning
990 [1]
1032 [1]
991
1033
992 The warnings filtered out here are talking about zero-length 'orphan' data files.
1034 The warnings filtered out here are talking about zero-length 'orphan' data files.
993 Those are harmless, so that's fine.
1035 Those are harmless, so that's fine.
994
1036
@@ -1,1296 +1,1316 b''
1 Prepare repo a:
1 Prepare repo a:
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ echo a > a
5 $ echo a > a
6 $ hg add a
6 $ hg add a
7 $ hg commit -m test
7 $ hg commit -m test
8 $ echo first line > b
8 $ echo first line > b
9 $ hg add b
9 $ hg add b
10
10
11 Create a non-inlined filelog:
11 Create a non-inlined filelog:
12
12
13 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
13 $ "$PYTHON" -c 'open("data1", "wb").write(b"".join(b"%d\n" % x for x in range(10000)))'
14 $ for j in 0 1 2 3 4 5 6 7 8 9; do
14 $ for j in 0 1 2 3 4 5 6 7 8 9; do
15 > cat data1 >> b
15 > cat data1 >> b
16 > hg commit -m test
16 > hg commit -m test
17 > done
17 > done
18
18
19 List files in store/data (should show a 'b.d'):
19 List files in store/data (should show a 'b.d'):
20
20
21 #if reporevlogstore
21 #if reporevlogstore
22 $ for i in .hg/store/data/*; do
22 $ for i in .hg/store/data/*; do
23 > echo $i
23 > echo $i
24 > done
24 > done
25 .hg/store/data/a.i
25 .hg/store/data/a.i
26 .hg/store/data/b.d
26 .hg/store/data/b.d
27 .hg/store/data/b.i
27 .hg/store/data/b.i
28 #endif
28 #endif
29
29
30 Trigger branchcache creation:
30 Trigger branchcache creation:
31
31
32 $ hg branches
32 $ hg branches
33 default 10:a7949464abda
33 default 10:a7949464abda
34 $ ls .hg/cache
34 $ ls .hg/cache
35 branch2-served
35 branch2-served
36 rbc-names-v1
36 rbc-names-v1
37 rbc-revs-v1
37 rbc-revs-v1
38
38
39 Default operation:
39 Default operation:
40
40
41 $ hg clone . ../b
41 $ hg clone . ../b
42 updating to branch default
42 updating to branch default
43 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 $ cd ../b
44 $ cd ../b
45
45
46 Ensure branchcache got copied over:
46 Ensure branchcache got copied over:
47
47
48 $ ls .hg/cache
48 $ ls .hg/cache
49 branch2-base
49 branch2-base
50 branch2-immutable
50 branch2-immutable
51 branch2-served
51 branch2-served
52 branch2-served.hidden
52 branch2-served.hidden
53 branch2-visible
53 branch2-visible
54 branch2-visible-hidden
54 branch2-visible-hidden
55 rbc-names-v1
55 rbc-names-v1
56 rbc-revs-v1
56 rbc-revs-v1
57 tags2
57 tags2
58 tags2-served
58 tags2-served
59
59
60 $ cat a
60 $ cat a
61 a
61 a
62 $ hg verify -q
62 $ hg verify -q
63
63
64 Invalid dest '' must abort:
64 Invalid dest '' must abort:
65
65
66 $ hg clone . ''
66 $ hg clone . ''
67 abort: empty destination path is not valid
67 abort: empty destination path is not valid
68 [10]
68 [10]
69
69
70 No update, with debug option:
70 No update, with debug option:
71
71
72 #if hardlink
72 #if hardlink
73 $ hg --debug clone -U . ../c --config progress.debug=true
73 $ hg --debug clone -U . ../c --config progress.debug=true
74 linking: 1/15 files (6.67%)
74 linking: 1/16 files (6.25%) (no-rust !)
75 linking: 2/15 files (13.33%)
75 linking: 2/16 files (12.50%) (no-rust !)
76 linking: 3/15 files (20.00%)
76 linking: 3/16 files (18.75%) (no-rust !)
77 linking: 4/15 files (26.67%)
77 linking: 4/16 files (25.00%) (no-rust !)
78 linking: 5/15 files (33.33%)
78 linking: 5/16 files (31.25%) (no-rust !)
79 linking: 6/15 files (40.00%)
79 linking: 6/16 files (37.50%) (no-rust !)
80 linking: 7/15 files (46.67%)
80 linking: 7/16 files (43.75%) (no-rust !)
81 linking: 8/15 files (53.33%)
81 linking: 8/16 files (50.00%) (no-rust !)
82 linking: 9/15 files (60.00%)
82 linking: 9/16 files (56.25%) (no-rust !)
83 linking: 10/15 files (66.67%)
83 linking: 10/16 files (62.50%) (no-rust !)
84 linking: 11/15 files (73.33%)
84 linking: 11/16 files (68.75%) (no-rust !)
85 linking: 12/15 files (80.00%)
85 linking: 12/16 files (75.00%) (no-rust !)
86 linking: 13/15 files (86.67%)
86 linking: 13/16 files (81.25%) (no-rust !)
87 linking: 14/15 files (93.33%)
87 linking: 14/16 files (87.50%) (no-rust !)
88 linking: 15/15 files (100.00%)
88 linking: 15/16 files (93.75%) (no-rust !)
89 linked 15 files
89 linking: 16/16 files (100.00%) (no-rust !)
90 linked 16 files (no-rust !)
91 linking: 1/18 files (5.56%) (rust !)
92 linking: 2/18 files (11.11%) (rust !)
93 linking: 3/18 files (16.67%) (rust !)
94 linking: 4/18 files (22.22%) (rust !)
95 linking: 5/18 files (27.78%) (rust !)
96 linking: 6/18 files (33.33%) (rust !)
97 linking: 7/18 files (38.89%) (rust !)
98 linking: 8/18 files (44.44%) (rust !)
99 linking: 9/18 files (50.00%) (rust !)
100 linking: 10/18 files (55.56%) (rust !)
101 linking: 11/18 files (61.11%) (rust !)
102 linking: 12/18 files (66.67%) (rust !)
103 linking: 13/18 files (72.22%) (rust !)
104 linking: 14/18 files (77.78%) (rust !)
105 linking: 15/18 files (83.33%) (rust !)
106 linking: 16/18 files (88.89%) (rust !)
107 linking: 17/18 files (94.44%) (rust !)
108 linking: 18/18 files (100.00%) (rust !)
109 linked 18 files (rust !)
90 updating the branch cache
110 updating the branch cache
91 #else
111 #else
92 $ hg --debug clone -U . ../c --config progress.debug=true
112 $ hg --debug clone -U . ../c --config progress.debug=true
93 linking: 1 files
113 linking: 1 files
94 copying: 2 files
114 copying: 2 files
95 copying: 3 files
115 copying: 3 files
96 copying: 4 files
116 copying: 4 files
97 copying: 5 files
117 copying: 5 files
98 copying: 6 files
118 copying: 6 files
99 copying: 7 files
119 copying: 7 files
100 copying: 8 files
120 copying: 8 files
101 #endif
121 #endif
102 $ cd ../c
122 $ cd ../c
103
123
104 Ensure branchcache got copied over:
124 Ensure branchcache got copied over:
105
125
106 $ ls .hg/cache
126 $ ls .hg/cache
107 branch2-base
127 branch2-base
108 branch2-immutable
128 branch2-immutable
109 branch2-served
129 branch2-served
110 branch2-served.hidden
130 branch2-served.hidden
111 branch2-visible
131 branch2-visible
112 branch2-visible-hidden
132 branch2-visible-hidden
113 rbc-names-v1
133 rbc-names-v1
114 rbc-revs-v1
134 rbc-revs-v1
115 tags2
135 tags2
116 tags2-served
136 tags2-served
117
137
118 $ cat a 2>/dev/null || echo "a not present"
138 $ cat a 2>/dev/null || echo "a not present"
119 a not present
139 a not present
120 $ hg verify -q
140 $ hg verify -q
121
141
122 Default destination:
142 Default destination:
123
143
124 $ mkdir ../d
144 $ mkdir ../d
125 $ cd ../d
145 $ cd ../d
126 $ hg clone ../a
146 $ hg clone ../a
127 destination directory: a
147 destination directory: a
128 updating to branch default
148 updating to branch default
129 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
149 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 $ cd a
150 $ cd a
131 $ hg cat a
151 $ hg cat a
132 a
152 a
133 $ cd ../..
153 $ cd ../..
134
154
135 Check that we drop the 'file:' from the path before writing the .hgrc:
155 Check that we drop the 'file:' from the path before writing the .hgrc:
136
156
137 $ hg clone file:a e
157 $ hg clone file:a e
138 updating to branch default
158 updating to branch default
139 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
140 $ grep 'file:' e/.hg/hgrc
160 $ grep 'file:' e/.hg/hgrc
141 [1]
161 [1]
142
162
143 Check that path aliases are expanded:
163 Check that path aliases are expanded:
144
164
145 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
165 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
146 $ hg -R f showconfig paths.default
166 $ hg -R f showconfig paths.default
147 $TESTTMP/a#0
167 $TESTTMP/a#0
148
168
149 Use --pull:
169 Use --pull:
150
170
151 $ hg clone --pull a g
171 $ hg clone --pull a g
152 requesting all changes
172 requesting all changes
153 adding changesets
173 adding changesets
154 adding manifests
174 adding manifests
155 adding file changes
175 adding file changes
156 added 11 changesets with 11 changes to 2 files
176 added 11 changesets with 11 changes to 2 files
157 new changesets acb14030fe0a:a7949464abda
177 new changesets acb14030fe0a:a7949464abda
158 updating to branch default
178 updating to branch default
159 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
179 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 $ hg -R g verify -q
180 $ hg -R g verify -q
161
181
162 Invalid dest '' with --pull must abort (issue2528):
182 Invalid dest '' with --pull must abort (issue2528):
163
183
164 $ hg clone --pull a ''
184 $ hg clone --pull a ''
165 abort: empty destination path is not valid
185 abort: empty destination path is not valid
166 [10]
186 [10]
167
187
168 Clone to '.':
188 Clone to '.':
169
189
170 $ mkdir h
190 $ mkdir h
171 $ cd h
191 $ cd h
172 $ hg clone ../a .
192 $ hg clone ../a .
173 updating to branch default
193 updating to branch default
174 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
194 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 $ cd ..
195 $ cd ..
176
196
177
197
178 *** Tests for option -u ***
198 *** Tests for option -u ***
179
199
180 Adding some more history to repo a:
200 Adding some more history to repo a:
181
201
182 $ cd a
202 $ cd a
183 $ hg tag ref1
203 $ hg tag ref1
184 $ echo the quick brown fox >a
204 $ echo the quick brown fox >a
185 $ hg ci -m "hacked default"
205 $ hg ci -m "hacked default"
186 $ hg up ref1
206 $ hg up ref1
187 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
207 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
188 $ hg branch stable
208 $ hg branch stable
189 marked working directory as branch stable
209 marked working directory as branch stable
190 (branches are permanent and global, did you want a bookmark?)
210 (branches are permanent and global, did you want a bookmark?)
191 $ echo some text >a
211 $ echo some text >a
192 $ hg ci -m "starting branch stable"
212 $ hg ci -m "starting branch stable"
193 $ hg tag ref2
213 $ hg tag ref2
194 $ echo some more text >a
214 $ echo some more text >a
195 $ hg ci -m "another change for branch stable"
215 $ hg ci -m "another change for branch stable"
196 $ hg up ref2
216 $ hg up ref2
197 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
217 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
198 $ hg parents
218 $ hg parents
199 changeset: 13:e8ece76546a6
219 changeset: 13:e8ece76546a6
200 branch: stable
220 branch: stable
201 tag: ref2
221 tag: ref2
202 parent: 10:a7949464abda
222 parent: 10:a7949464abda
203 user: test
223 user: test
204 date: Thu Jan 01 00:00:00 1970 +0000
224 date: Thu Jan 01 00:00:00 1970 +0000
205 summary: starting branch stable
225 summary: starting branch stable
206
226
207
227
208 Repo a has two heads:
228 Repo a has two heads:
209
229
210 $ hg heads
230 $ hg heads
211 changeset: 15:0aae7cf88f0d
231 changeset: 15:0aae7cf88f0d
212 branch: stable
232 branch: stable
213 tag: tip
233 tag: tip
214 user: test
234 user: test
215 date: Thu Jan 01 00:00:00 1970 +0000
235 date: Thu Jan 01 00:00:00 1970 +0000
216 summary: another change for branch stable
236 summary: another change for branch stable
217
237
218 changeset: 12:f21241060d6a
238 changeset: 12:f21241060d6a
219 user: test
239 user: test
220 date: Thu Jan 01 00:00:00 1970 +0000
240 date: Thu Jan 01 00:00:00 1970 +0000
221 summary: hacked default
241 summary: hacked default
222
242
223
243
224 $ cd ..
244 $ cd ..
225
245
226
246
227 Testing --noupdate with --updaterev (must abort):
247 Testing --noupdate with --updaterev (must abort):
228
248
229 $ hg clone --noupdate --updaterev 1 a ua
249 $ hg clone --noupdate --updaterev 1 a ua
230 abort: cannot specify both --noupdate and --updaterev
250 abort: cannot specify both --noupdate and --updaterev
231 [10]
251 [10]
232
252
233
253
234 Testing clone -u:
254 Testing clone -u:
235
255
236 $ hg clone -u . a ua
256 $ hg clone -u . a ua
237 updating to branch stable
257 updating to branch stable
238 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
258 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
239
259
240 Repo ua has both heads:
260 Repo ua has both heads:
241
261
242 $ hg -R ua heads
262 $ hg -R ua heads
243 changeset: 15:0aae7cf88f0d
263 changeset: 15:0aae7cf88f0d
244 branch: stable
264 branch: stable
245 tag: tip
265 tag: tip
246 user: test
266 user: test
247 date: Thu Jan 01 00:00:00 1970 +0000
267 date: Thu Jan 01 00:00:00 1970 +0000
248 summary: another change for branch stable
268 summary: another change for branch stable
249
269
250 changeset: 12:f21241060d6a
270 changeset: 12:f21241060d6a
251 user: test
271 user: test
252 date: Thu Jan 01 00:00:00 1970 +0000
272 date: Thu Jan 01 00:00:00 1970 +0000
253 summary: hacked default
273 summary: hacked default
254
274
255
275
256 Same revision checked out in repo a and ua:
276 Same revision checked out in repo a and ua:
257
277
258 $ hg -R a parents --template "{node|short}\n"
278 $ hg -R a parents --template "{node|short}\n"
259 e8ece76546a6
279 e8ece76546a6
260 $ hg -R ua parents --template "{node|short}\n"
280 $ hg -R ua parents --template "{node|short}\n"
261 e8ece76546a6
281 e8ece76546a6
262
282
263 $ rm -r ua
283 $ rm -r ua
264
284
265
285
266 Testing clone --pull -u:
286 Testing clone --pull -u:
267
287
268 $ hg clone --pull -u . a ua
288 $ hg clone --pull -u . a ua
269 requesting all changes
289 requesting all changes
270 adding changesets
290 adding changesets
271 adding manifests
291 adding manifests
272 adding file changes
292 adding file changes
273 added 16 changesets with 16 changes to 3 files (+1 heads)
293 added 16 changesets with 16 changes to 3 files (+1 heads)
274 new changesets acb14030fe0a:0aae7cf88f0d
294 new changesets acb14030fe0a:0aae7cf88f0d
275 updating to branch stable
295 updating to branch stable
276 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
277
297
278 Repo ua has both heads:
298 Repo ua has both heads:
279
299
280 $ hg -R ua heads
300 $ hg -R ua heads
281 changeset: 15:0aae7cf88f0d
301 changeset: 15:0aae7cf88f0d
282 branch: stable
302 branch: stable
283 tag: tip
303 tag: tip
284 user: test
304 user: test
285 date: Thu Jan 01 00:00:00 1970 +0000
305 date: Thu Jan 01 00:00:00 1970 +0000
286 summary: another change for branch stable
306 summary: another change for branch stable
287
307
288 changeset: 12:f21241060d6a
308 changeset: 12:f21241060d6a
289 user: test
309 user: test
290 date: Thu Jan 01 00:00:00 1970 +0000
310 date: Thu Jan 01 00:00:00 1970 +0000
291 summary: hacked default
311 summary: hacked default
292
312
293
313
294 Same revision checked out in repo a and ua:
314 Same revision checked out in repo a and ua:
295
315
296 $ hg -R a parents --template "{node|short}\n"
316 $ hg -R a parents --template "{node|short}\n"
297 e8ece76546a6
317 e8ece76546a6
298 $ hg -R ua parents --template "{node|short}\n"
318 $ hg -R ua parents --template "{node|short}\n"
299 e8ece76546a6
319 e8ece76546a6
300
320
301 $ rm -r ua
321 $ rm -r ua
302
322
303
323
304 Testing clone -u <branch>:
324 Testing clone -u <branch>:
305
325
306 $ hg clone -u stable a ua
326 $ hg clone -u stable a ua
307 updating to branch stable
327 updating to branch stable
308 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
328 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
309
329
310 Repo ua has both heads:
330 Repo ua has both heads:
311
331
312 $ hg -R ua heads
332 $ hg -R ua heads
313 changeset: 15:0aae7cf88f0d
333 changeset: 15:0aae7cf88f0d
314 branch: stable
334 branch: stable
315 tag: tip
335 tag: tip
316 user: test
336 user: test
317 date: Thu Jan 01 00:00:00 1970 +0000
337 date: Thu Jan 01 00:00:00 1970 +0000
318 summary: another change for branch stable
338 summary: another change for branch stable
319
339
320 changeset: 12:f21241060d6a
340 changeset: 12:f21241060d6a
321 user: test
341 user: test
322 date: Thu Jan 01 00:00:00 1970 +0000
342 date: Thu Jan 01 00:00:00 1970 +0000
323 summary: hacked default
343 summary: hacked default
324
344
325
345
326 Branch 'stable' is checked out:
346 Branch 'stable' is checked out:
327
347
328 $ hg -R ua parents
348 $ hg -R ua parents
329 changeset: 15:0aae7cf88f0d
349 changeset: 15:0aae7cf88f0d
330 branch: stable
350 branch: stable
331 tag: tip
351 tag: tip
332 user: test
352 user: test
333 date: Thu Jan 01 00:00:00 1970 +0000
353 date: Thu Jan 01 00:00:00 1970 +0000
334 summary: another change for branch stable
354 summary: another change for branch stable
335
355
336
356
337 $ rm -r ua
357 $ rm -r ua
338
358
339
359
340 Testing default checkout:
360 Testing default checkout:
341
361
342 $ hg clone a ua
362 $ hg clone a ua
343 updating to branch default
363 updating to branch default
344 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
364 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
345
365
346 Repo ua has both heads:
366 Repo ua has both heads:
347
367
348 $ hg -R ua heads
368 $ hg -R ua heads
349 changeset: 15:0aae7cf88f0d
369 changeset: 15:0aae7cf88f0d
350 branch: stable
370 branch: stable
351 tag: tip
371 tag: tip
352 user: test
372 user: test
353 date: Thu Jan 01 00:00:00 1970 +0000
373 date: Thu Jan 01 00:00:00 1970 +0000
354 summary: another change for branch stable
374 summary: another change for branch stable
355
375
356 changeset: 12:f21241060d6a
376 changeset: 12:f21241060d6a
357 user: test
377 user: test
358 date: Thu Jan 01 00:00:00 1970 +0000
378 date: Thu Jan 01 00:00:00 1970 +0000
359 summary: hacked default
379 summary: hacked default
360
380
361
381
362 Branch 'default' is checked out:
382 Branch 'default' is checked out:
363
383
364 $ hg -R ua parents
384 $ hg -R ua parents
365 changeset: 12:f21241060d6a
385 changeset: 12:f21241060d6a
366 user: test
386 user: test
367 date: Thu Jan 01 00:00:00 1970 +0000
387 date: Thu Jan 01 00:00:00 1970 +0000
368 summary: hacked default
388 summary: hacked default
369
389
370 Test clone with a branch named "@" (issue3677)
390 Test clone with a branch named "@" (issue3677)
371
391
372 $ hg -R ua branch @
392 $ hg -R ua branch @
373 marked working directory as branch @
393 marked working directory as branch @
374 $ hg -R ua commit -m 'created branch @'
394 $ hg -R ua commit -m 'created branch @'
375 $ hg clone ua atbranch
395 $ hg clone ua atbranch
376 updating to branch default
396 updating to branch default
377 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
397 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
378 $ hg -R atbranch heads
398 $ hg -R atbranch heads
379 changeset: 16:798b6d97153e
399 changeset: 16:798b6d97153e
380 branch: @
400 branch: @
381 tag: tip
401 tag: tip
382 parent: 12:f21241060d6a
402 parent: 12:f21241060d6a
383 user: test
403 user: test
384 date: Thu Jan 01 00:00:00 1970 +0000
404 date: Thu Jan 01 00:00:00 1970 +0000
385 summary: created branch @
405 summary: created branch @
386
406
387 changeset: 15:0aae7cf88f0d
407 changeset: 15:0aae7cf88f0d
388 branch: stable
408 branch: stable
389 user: test
409 user: test
390 date: Thu Jan 01 00:00:00 1970 +0000
410 date: Thu Jan 01 00:00:00 1970 +0000
391 summary: another change for branch stable
411 summary: another change for branch stable
392
412
393 changeset: 12:f21241060d6a
413 changeset: 12:f21241060d6a
394 user: test
414 user: test
395 date: Thu Jan 01 00:00:00 1970 +0000
415 date: Thu Jan 01 00:00:00 1970 +0000
396 summary: hacked default
416 summary: hacked default
397
417
398 $ hg -R atbranch parents
418 $ hg -R atbranch parents
399 changeset: 12:f21241060d6a
419 changeset: 12:f21241060d6a
400 user: test
420 user: test
401 date: Thu Jan 01 00:00:00 1970 +0000
421 date: Thu Jan 01 00:00:00 1970 +0000
402 summary: hacked default
422 summary: hacked default
403
423
404
424
405 $ rm -r ua atbranch
425 $ rm -r ua atbranch
406
426
407
427
408 Testing #<branch>:
428 Testing #<branch>:
409
429
410 $ hg clone -u . a#stable ua
430 $ hg clone -u . a#stable ua
411 adding changesets
431 adding changesets
412 adding manifests
432 adding manifests
413 adding file changes
433 adding file changes
414 added 14 changesets with 14 changes to 3 files
434 added 14 changesets with 14 changes to 3 files
415 new changesets acb14030fe0a:0aae7cf88f0d
435 new changesets acb14030fe0a:0aae7cf88f0d
416 updating to branch stable
436 updating to branch stable
417 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
437 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
418
438
419 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
439 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
420
440
421 $ hg -R ua heads
441 $ hg -R ua heads
422 changeset: 13:0aae7cf88f0d
442 changeset: 13:0aae7cf88f0d
423 branch: stable
443 branch: stable
424 tag: tip
444 tag: tip
425 user: test
445 user: test
426 date: Thu Jan 01 00:00:00 1970 +0000
446 date: Thu Jan 01 00:00:00 1970 +0000
427 summary: another change for branch stable
447 summary: another change for branch stable
428
448
429 changeset: 10:a7949464abda
449 changeset: 10:a7949464abda
430 user: test
450 user: test
431 date: Thu Jan 01 00:00:00 1970 +0000
451 date: Thu Jan 01 00:00:00 1970 +0000
432 summary: test
452 summary: test
433
453
434
454
435 Same revision checked out in repo a and ua:
455 Same revision checked out in repo a and ua:
436
456
437 $ hg -R a parents --template "{node|short}\n"
457 $ hg -R a parents --template "{node|short}\n"
438 e8ece76546a6
458 e8ece76546a6
439 $ hg -R ua parents --template "{node|short}\n"
459 $ hg -R ua parents --template "{node|short}\n"
440 e8ece76546a6
460 e8ece76546a6
441
461
442 $ rm -r ua
462 $ rm -r ua
443
463
444
464
445 Testing -u -r <branch>:
465 Testing -u -r <branch>:
446
466
447 $ hg clone -u . -r stable a ua
467 $ hg clone -u . -r stable a ua
448 adding changesets
468 adding changesets
449 adding manifests
469 adding manifests
450 adding file changes
470 adding file changes
451 added 14 changesets with 14 changes to 3 files
471 added 14 changesets with 14 changes to 3 files
452 new changesets acb14030fe0a:0aae7cf88f0d
472 new changesets acb14030fe0a:0aae7cf88f0d
453 updating to branch stable
473 updating to branch stable
454 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
474 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
455
475
456 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
476 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
457
477
458 $ hg -R ua heads
478 $ hg -R ua heads
459 changeset: 13:0aae7cf88f0d
479 changeset: 13:0aae7cf88f0d
460 branch: stable
480 branch: stable
461 tag: tip
481 tag: tip
462 user: test
482 user: test
463 date: Thu Jan 01 00:00:00 1970 +0000
483 date: Thu Jan 01 00:00:00 1970 +0000
464 summary: another change for branch stable
484 summary: another change for branch stable
465
485
466 changeset: 10:a7949464abda
486 changeset: 10:a7949464abda
467 user: test
487 user: test
468 date: Thu Jan 01 00:00:00 1970 +0000
488 date: Thu Jan 01 00:00:00 1970 +0000
469 summary: test
489 summary: test
470
490
471
491
472 Same revision checked out in repo a and ua:
492 Same revision checked out in repo a and ua:
473
493
474 $ hg -R a parents --template "{node|short}\n"
494 $ hg -R a parents --template "{node|short}\n"
475 e8ece76546a6
495 e8ece76546a6
476 $ hg -R ua parents --template "{node|short}\n"
496 $ hg -R ua parents --template "{node|short}\n"
477 e8ece76546a6
497 e8ece76546a6
478
498
479 $ rm -r ua
499 $ rm -r ua
480
500
481
501
482 Testing -r <branch>:
502 Testing -r <branch>:
483
503
484 $ hg clone -r stable a ua
504 $ hg clone -r stable a ua
485 adding changesets
505 adding changesets
486 adding manifests
506 adding manifests
487 adding file changes
507 adding file changes
488 added 14 changesets with 14 changes to 3 files
508 added 14 changesets with 14 changes to 3 files
489 new changesets acb14030fe0a:0aae7cf88f0d
509 new changesets acb14030fe0a:0aae7cf88f0d
490 updating to branch stable
510 updating to branch stable
491 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
511 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
492
512
493 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
513 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
494
514
495 $ hg -R ua heads
515 $ hg -R ua heads
496 changeset: 13:0aae7cf88f0d
516 changeset: 13:0aae7cf88f0d
497 branch: stable
517 branch: stable
498 tag: tip
518 tag: tip
499 user: test
519 user: test
500 date: Thu Jan 01 00:00:00 1970 +0000
520 date: Thu Jan 01 00:00:00 1970 +0000
501 summary: another change for branch stable
521 summary: another change for branch stable
502
522
503 changeset: 10:a7949464abda
523 changeset: 10:a7949464abda
504 user: test
524 user: test
505 date: Thu Jan 01 00:00:00 1970 +0000
525 date: Thu Jan 01 00:00:00 1970 +0000
506 summary: test
526 summary: test
507
527
508
528
509 Branch 'stable' is checked out:
529 Branch 'stable' is checked out:
510
530
511 $ hg -R ua parents
531 $ hg -R ua parents
512 changeset: 13:0aae7cf88f0d
532 changeset: 13:0aae7cf88f0d
513 branch: stable
533 branch: stable
514 tag: tip
534 tag: tip
515 user: test
535 user: test
516 date: Thu Jan 01 00:00:00 1970 +0000
536 date: Thu Jan 01 00:00:00 1970 +0000
517 summary: another change for branch stable
537 summary: another change for branch stable
518
538
519
539
520 $ rm -r ua
540 $ rm -r ua
521
541
522
542
523 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
543 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
524 iterable in addbranchrevs()
544 iterable in addbranchrevs()
525
545
526 $ cat <<EOF > simpleclone.py
546 $ cat <<EOF > simpleclone.py
527 > from mercurial import hg, ui as uimod
547 > from mercurial import hg, ui as uimod
528 > myui = uimod.ui.load()
548 > myui = uimod.ui.load()
529 > repo = hg.repository(myui, b'a')
549 > repo = hg.repository(myui, b'a')
530 > hg.clone(myui, {}, repo, dest=b"ua")
550 > hg.clone(myui, {}, repo, dest=b"ua")
531 > EOF
551 > EOF
532
552
533 $ "$PYTHON" simpleclone.py
553 $ "$PYTHON" simpleclone.py
534 updating to branch default
554 updating to branch default
535 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
555 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
536
556
537 $ rm -r ua
557 $ rm -r ua
538
558
539 $ cat <<EOF > branchclone.py
559 $ cat <<EOF > branchclone.py
540 > from mercurial import extensions, hg, ui as uimod
560 > from mercurial import extensions, hg, ui as uimod
541 > myui = uimod.ui.load()
561 > myui = uimod.ui.load()
542 > extensions.loadall(myui)
562 > extensions.loadall(myui)
543 > extensions.populateui(myui)
563 > extensions.populateui(myui)
544 > repo = hg.repository(myui, b'a')
564 > repo = hg.repository(myui, b'a')
545 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"])
565 > hg.clone(myui, {}, repo, dest=b"ua", branch=[b"stable"])
546 > EOF
566 > EOF
547
567
548 $ "$PYTHON" branchclone.py
568 $ "$PYTHON" branchclone.py
549 adding changesets
569 adding changesets
550 adding manifests
570 adding manifests
551 adding file changes
571 adding file changes
552 added 14 changesets with 14 changes to 3 files
572 added 14 changesets with 14 changes to 3 files
553 new changesets acb14030fe0a:0aae7cf88f0d
573 new changesets acb14030fe0a:0aae7cf88f0d
554 updating to branch stable
574 updating to branch stable
555 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
575 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
556 $ rm -r ua
576 $ rm -r ua
557
577
558 Local clones don't get confused by unusual experimental.evolution options
578 Local clones don't get confused by unusual experimental.evolution options
559
579
560 $ hg clone \
580 $ hg clone \
561 > --config experimental.evolution=allowunstable,allowdivergence,exchange \
581 > --config experimental.evolution=allowunstable,allowdivergence,exchange \
562 > a ua
582 > a ua
563 updating to branch default
583 updating to branch default
564 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
584 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
565 $ rm -r ua
585 $ rm -r ua
566
586
567 $ hg clone \
587 $ hg clone \
568 > --config experimental.evolution.createmarkers=no \
588 > --config experimental.evolution.createmarkers=no \
569 > --config experimental.evolution.allowunstable=yes \
589 > --config experimental.evolution.allowunstable=yes \
570 > --config experimental.evolution.allowdivergence=yes \
590 > --config experimental.evolution.allowdivergence=yes \
571 > --config experimental.evolution.exchange=yes \
591 > --config experimental.evolution.exchange=yes \
572 > a ua
592 > a ua
573 updating to branch default
593 updating to branch default
574 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
594 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
575 $ rm -r ua
595 $ rm -r ua
576
596
577 Test clone with special '@' bookmark:
597 Test clone with special '@' bookmark:
578 $ cd a
598 $ cd a
579 $ hg bookmark -r a7949464abda @ # branch point of stable from default
599 $ hg bookmark -r a7949464abda @ # branch point of stable from default
580 $ hg clone . ../i
600 $ hg clone . ../i
581 updating to bookmark @
601 updating to bookmark @
582 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
602 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
583 $ hg id -i ../i
603 $ hg id -i ../i
584 a7949464abda
604 a7949464abda
585 $ rm -r ../i
605 $ rm -r ../i
586
606
587 $ hg bookmark -f -r stable @
607 $ hg bookmark -f -r stable @
588 $ hg bookmarks
608 $ hg bookmarks
589 @ 15:0aae7cf88f0d
609 @ 15:0aae7cf88f0d
590 $ hg clone . ../i
610 $ hg clone . ../i
591 updating to bookmark @ on branch stable
611 updating to bookmark @ on branch stable
592 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
612 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
593 $ hg id -i ../i
613 $ hg id -i ../i
594 0aae7cf88f0d
614 0aae7cf88f0d
595 $ cd "$TESTTMP"
615 $ cd "$TESTTMP"
596
616
597
617
598 Testing failures:
618 Testing failures:
599
619
600 $ mkdir fail
620 $ mkdir fail
601 $ cd fail
621 $ cd fail
602
622
603 No local source
623 No local source
604
624
605 $ hg clone a b
625 $ hg clone a b
606 abort: repository a not found
626 abort: repository a not found
607 [255]
627 [255]
608
628
609 Invalid URL
629 Invalid URL
610
630
611 $ hg clone http://invalid:url/a b
631 $ hg clone http://invalid:url/a b
612 abort: error: nonnumeric port: 'url'
632 abort: error: nonnumeric port: 'url'
613 [100]
633 [100]
614
634
615 No remote source
635 No remote source
616
636
617 #if windows
637 #if windows
618 $ hg clone http://$LOCALIP:3121/a b
638 $ hg clone http://$LOCALIP:3121/a b
619 abort: error: * (glob)
639 abort: error: * (glob)
620 [100]
640 [100]
621 #else
641 #else
622 $ hg clone http://$LOCALIP:3121/a b
642 $ hg clone http://$LOCALIP:3121/a b
623 abort: error: *refused* (glob)
643 abort: error: *refused* (glob)
624 [100]
644 [100]
625 #endif
645 #endif
626 $ rm -rf b # work around bug with http clone
646 $ rm -rf b # work around bug with http clone
627
647
628
648
629 #if unix-permissions no-root
649 #if unix-permissions no-root
630
650
631 Inaccessible source
651 Inaccessible source
632
652
633 $ mkdir a
653 $ mkdir a
634 $ chmod 000 a
654 $ chmod 000 a
635 $ hg clone a b
655 $ hg clone a b
636 abort: $EACCES$: *$TESTTMP/fail/a/.hg* (glob)
656 abort: $EACCES$: *$TESTTMP/fail/a/.hg* (glob)
637 [255]
657 [255]
638
658
639 Inaccessible destination
659 Inaccessible destination
640
660
641 $ hg init b
661 $ hg init b
642 $ cd b
662 $ cd b
643 $ hg clone . ../a
663 $ hg clone . ../a
644 abort: $EACCES$: *../a* (glob)
664 abort: $EACCES$: *../a* (glob)
645 [255]
665 [255]
646 $ cd ..
666 $ cd ..
647 $ chmod 700 a
667 $ chmod 700 a
648 $ rm -r a b
668 $ rm -r a b
649
669
650 #endif
670 #endif
651
671
652
672
653 #if fifo
673 #if fifo
654
674
655 Source of wrong type
675 Source of wrong type
656
676
657 $ mkfifo a
677 $ mkfifo a
658 $ hg clone a b
678 $ hg clone a b
659 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
679 abort: $ENOTDIR$: *$TESTTMP/fail/a/.hg* (glob)
660 [255]
680 [255]
661 $ rm a
681 $ rm a
662
682
663 #endif
683 #endif
664
684
665 Default destination, same directory
685 Default destination, same directory
666
686
667 $ hg init q
687 $ hg init q
668 $ hg clone q
688 $ hg clone q
669 destination directory: q
689 destination directory: q
670 abort: destination 'q' is not empty
690 abort: destination 'q' is not empty
671 [10]
691 [10]
672
692
673 destination directory not empty
693 destination directory not empty
674
694
675 $ mkdir a
695 $ mkdir a
676 $ echo stuff > a/a
696 $ echo stuff > a/a
677 $ hg clone q a
697 $ hg clone q a
678 abort: destination 'a' is not empty
698 abort: destination 'a' is not empty
679 [10]
699 [10]
680
700
681
701
682 #if unix-permissions no-root
702 #if unix-permissions no-root
683
703
684 leave existing directory in place after clone failure
704 leave existing directory in place after clone failure
685
705
686 $ hg init c
706 $ hg init c
687 $ cd c
707 $ cd c
688 $ echo c > c
708 $ echo c > c
689 $ hg commit -A -m test
709 $ hg commit -A -m test
690 adding c
710 adding c
691 $ chmod -rx .hg/store/data
711 $ chmod -rx .hg/store/data
692 $ cd ..
712 $ cd ..
693 $ mkdir d
713 $ mkdir d
694 $ hg clone c d 2> err
714 $ hg clone c d 2> err
695 [255]
715 [255]
696 $ test -d d
716 $ test -d d
697 $ test -d d/.hg
717 $ test -d d/.hg
698 [1]
718 [1]
699
719
700 re-enable perm to allow deletion
720 re-enable perm to allow deletion
701
721
702 $ chmod +rx c/.hg/store/data
722 $ chmod +rx c/.hg/store/data
703
723
704 #endif
724 #endif
705
725
706 $ cd ..
726 $ cd ..
707
727
708 Test clone from the repository in (emulated) revlog format 0 (issue4203):
728 Test clone from the repository in (emulated) revlog format 0 (issue4203):
709
729
710 $ mkdir issue4203
730 $ mkdir issue4203
711 $ mkdir -p src/.hg
731 $ mkdir -p src/.hg
712 $ echo foo > src/foo
732 $ echo foo > src/foo
713 $ hg -R src add src/foo
733 $ hg -R src add src/foo
714 $ hg -R src commit -m '#0'
734 $ hg -R src commit -m '#0'
715 $ hg -R src log -q
735 $ hg -R src log -q
716 0:e1bab28bca43
736 0:e1bab28bca43
717 $ hg -R src debugrevlog -c | grep -E 'format|flags'
737 $ hg -R src debugrevlog -c | grep -E 'format|flags'
718 format : 0
738 format : 0
719 flags : (none)
739 flags : (none)
720 $ hg root -R src -T json | sed 's|\\\\|\\|g'
740 $ hg root -R src -T json | sed 's|\\\\|\\|g'
721 [
741 [
722 {
742 {
723 "hgpath": "$TESTTMP/src/.hg",
743 "hgpath": "$TESTTMP/src/.hg",
724 "reporoot": "$TESTTMP/src",
744 "reporoot": "$TESTTMP/src",
725 "storepath": "$TESTTMP/src/.hg"
745 "storepath": "$TESTTMP/src/.hg"
726 }
746 }
727 ]
747 ]
728 $ hg clone -U -q src dst
748 $ hg clone -U -q src dst
729 $ hg -R dst log -q
749 $ hg -R dst log -q
730 0:e1bab28bca43
750 0:e1bab28bca43
731
751
732 Create repositories to test auto sharing functionality
752 Create repositories to test auto sharing functionality
733
753
734 $ cat >> $HGRCPATH << EOF
754 $ cat >> $HGRCPATH << EOF
735 > [extensions]
755 > [extensions]
736 > share=
756 > share=
737 > EOF
757 > EOF
738
758
739 $ hg init empty
759 $ hg init empty
740 $ hg init source1a
760 $ hg init source1a
741 $ cd source1a
761 $ cd source1a
742 $ echo initial1 > foo
762 $ echo initial1 > foo
743 $ hg -q commit -A -m initial
763 $ hg -q commit -A -m initial
744 $ echo second > foo
764 $ echo second > foo
745 $ hg commit -m second
765 $ hg commit -m second
746 $ cd ..
766 $ cd ..
747
767
748 $ hg init filteredrev0
768 $ hg init filteredrev0
749 $ cd filteredrev0
769 $ cd filteredrev0
750 $ cat >> .hg/hgrc << EOF
770 $ cat >> .hg/hgrc << EOF
751 > [experimental]
771 > [experimental]
752 > evolution.createmarkers=True
772 > evolution.createmarkers=True
753 > EOF
773 > EOF
754 $ echo initial1 > foo
774 $ echo initial1 > foo
755 $ hg -q commit -A -m initial0
775 $ hg -q commit -A -m initial0
756 $ hg -q up -r null
776 $ hg -q up -r null
757 $ echo initial2 > foo
777 $ echo initial2 > foo
758 $ hg -q commit -A -m initial1
778 $ hg -q commit -A -m initial1
759 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
779 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
760 1 new obsolescence markers
780 1 new obsolescence markers
761 obsoleted 1 changesets
781 obsoleted 1 changesets
762 $ cd ..
782 $ cd ..
763
783
764 $ hg -q clone --pull source1a source1b
784 $ hg -q clone --pull source1a source1b
765 $ cd source1a
785 $ cd source1a
766 $ hg bookmark bookA
786 $ hg bookmark bookA
767 $ echo 1a > foo
787 $ echo 1a > foo
768 $ hg commit -m 1a
788 $ hg commit -m 1a
769 $ cd ../source1b
789 $ cd ../source1b
770 $ hg -q up -r 0
790 $ hg -q up -r 0
771 $ echo head1 > foo
791 $ echo head1 > foo
772 $ hg commit -m head1
792 $ hg commit -m head1
773 created new head
793 created new head
774 $ hg bookmark head1
794 $ hg bookmark head1
775 $ hg -q up -r 0
795 $ hg -q up -r 0
776 $ echo head2 > foo
796 $ echo head2 > foo
777 $ hg commit -m head2
797 $ hg commit -m head2
778 created new head
798 created new head
779 $ hg bookmark head2
799 $ hg bookmark head2
780 $ hg -q up -r 0
800 $ hg -q up -r 0
781 $ hg branch branch1
801 $ hg branch branch1
782 marked working directory as branch branch1
802 marked working directory as branch branch1
783 (branches are permanent and global, did you want a bookmark?)
803 (branches are permanent and global, did you want a bookmark?)
784 $ echo branch1 > foo
804 $ echo branch1 > foo
785 $ hg commit -m branch1
805 $ hg commit -m branch1
786 $ hg -q up -r 0
806 $ hg -q up -r 0
787 $ hg branch branch2
807 $ hg branch branch2
788 marked working directory as branch branch2
808 marked working directory as branch branch2
789 $ echo branch2 > foo
809 $ echo branch2 > foo
790 $ hg commit -m branch2
810 $ hg commit -m branch2
791 $ cd ..
811 $ cd ..
792 $ hg init source2
812 $ hg init source2
793 $ cd source2
813 $ cd source2
794 $ echo initial2 > foo
814 $ echo initial2 > foo
795 $ hg -q commit -A -m initial2
815 $ hg -q commit -A -m initial2
796 $ echo second > foo
816 $ echo second > foo
797 $ hg commit -m second
817 $ hg commit -m second
798 $ cd ..
818 $ cd ..
799
819
800 Clone with auto share from an empty repo should not result in share
820 Clone with auto share from an empty repo should not result in share
801
821
802 $ mkdir share
822 $ mkdir share
803 $ hg --config share.pool=share clone empty share-empty
823 $ hg --config share.pool=share clone empty share-empty
804 (not using pooled storage: remote appears to be empty)
824 (not using pooled storage: remote appears to be empty)
805 updating to branch default
825 updating to branch default
806 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
826 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
807 $ ls share
827 $ ls share
808 $ test -d share-empty/.hg/store
828 $ test -d share-empty/.hg/store
809 $ test -f share-empty/.hg/sharedpath
829 $ test -f share-empty/.hg/sharedpath
810 [1]
830 [1]
811
831
812 Clone with auto share from a repo with filtered revision 0 should not result in share
832 Clone with auto share from a repo with filtered revision 0 should not result in share
813
833
814 $ hg --config share.pool=share clone filteredrev0 share-filtered
834 $ hg --config share.pool=share clone filteredrev0 share-filtered
815 (not using pooled storage: unable to resolve identity of remote)
835 (not using pooled storage: unable to resolve identity of remote)
816 requesting all changes
836 requesting all changes
817 adding changesets
837 adding changesets
818 adding manifests
838 adding manifests
819 adding file changes
839 adding file changes
820 added 1 changesets with 1 changes to 1 files
840 added 1 changesets with 1 changes to 1 files
821 new changesets e082c1832e09
841 new changesets e082c1832e09
822 updating to branch default
842 updating to branch default
823 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
843 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
824
844
825 Clone from repo with content should result in shared store being created
845 Clone from repo with content should result in shared store being created
826
846
827 $ hg --config share.pool=share clone source1a share-dest1a
847 $ hg --config share.pool=share clone source1a share-dest1a
828 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
848 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
829 requesting all changes
849 requesting all changes
830 adding changesets
850 adding changesets
831 adding manifests
851 adding manifests
832 adding file changes
852 adding file changes
833 added 3 changesets with 3 changes to 1 files
853 added 3 changesets with 3 changes to 1 files
834 new changesets b5f04eac9d8f:e5bfe23c0b47
854 new changesets b5f04eac9d8f:e5bfe23c0b47
835 searching for changes
855 searching for changes
836 no changes found
856 no changes found
837 adding remote bookmark bookA
857 adding remote bookmark bookA
838 updating working directory
858 updating working directory
839 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
859 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
840
860
841 The shared repo should have been created
861 The shared repo should have been created
842
862
843 $ ls share
863 $ ls share
844 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
864 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
845
865
846 The destination should point to it
866 The destination should point to it
847
867
848 $ cat share-dest1a/.hg/sharedpath; echo
868 $ cat share-dest1a/.hg/sharedpath; echo
849 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
869 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
850
870
851 The destination should have bookmarks
871 The destination should have bookmarks
852
872
853 $ hg -R share-dest1a bookmarks
873 $ hg -R share-dest1a bookmarks
854 bookA 2:e5bfe23c0b47
874 bookA 2:e5bfe23c0b47
855
875
856 The default path should be the remote, not the share
876 The default path should be the remote, not the share
857
877
858 $ hg -R share-dest1a config paths.default
878 $ hg -R share-dest1a config paths.default
859 $TESTTMP/source1a
879 $TESTTMP/source1a
860
880
861 Clone with existing share dir should result in pull + share
881 Clone with existing share dir should result in pull + share
862
882
863 $ hg --config share.pool=share clone source1b share-dest1b
883 $ hg --config share.pool=share clone source1b share-dest1b
864 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
884 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
865 searching for changes
885 searching for changes
866 adding changesets
886 adding changesets
867 adding manifests
887 adding manifests
868 adding file changes
888 adding file changes
869 adding remote bookmark head1
889 adding remote bookmark head1
870 adding remote bookmark head2
890 adding remote bookmark head2
871 added 4 changesets with 4 changes to 1 files (+4 heads)
891 added 4 changesets with 4 changes to 1 files (+4 heads)
872 new changesets 4a8dc1ab4c13:6bacf4683960
892 new changesets 4a8dc1ab4c13:6bacf4683960
873 updating working directory
893 updating working directory
874 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
894 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
875
895
876 $ ls share
896 $ ls share
877 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
897 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
878
898
879 $ cat share-dest1b/.hg/sharedpath; echo
899 $ cat share-dest1b/.hg/sharedpath; echo
880 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
900 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg
881
901
882 We only get bookmarks from the remote, not everything in the share
902 We only get bookmarks from the remote, not everything in the share
883
903
884 $ hg -R share-dest1b bookmarks
904 $ hg -R share-dest1b bookmarks
885 head1 3:4a8dc1ab4c13
905 head1 3:4a8dc1ab4c13
886 head2 4:99f71071f117
906 head2 4:99f71071f117
887
907
888 Default path should be source, not share.
908 Default path should be source, not share.
889
909
890 $ hg -R share-dest1b config paths.default
910 $ hg -R share-dest1b config paths.default
891 $TESTTMP/source1b
911 $TESTTMP/source1b
892
912
893 Checked out revision should be head of default branch
913 Checked out revision should be head of default branch
894
914
895 $ hg -R share-dest1b log -r .
915 $ hg -R share-dest1b log -r .
896 changeset: 4:99f71071f117
916 changeset: 4:99f71071f117
897 bookmark: head2
917 bookmark: head2
898 parent: 0:b5f04eac9d8f
918 parent: 0:b5f04eac9d8f
899 user: test
919 user: test
900 date: Thu Jan 01 00:00:00 1970 +0000
920 date: Thu Jan 01 00:00:00 1970 +0000
901 summary: head2
921 summary: head2
902
922
903
923
904 Clone from unrelated repo should result in new share
924 Clone from unrelated repo should result in new share
905
925
906 $ hg --config share.pool=share clone source2 share-dest2
926 $ hg --config share.pool=share clone source2 share-dest2
907 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
927 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
908 requesting all changes
928 requesting all changes
909 adding changesets
929 adding changesets
910 adding manifests
930 adding manifests
911 adding file changes
931 adding file changes
912 added 2 changesets with 2 changes to 1 files
932 added 2 changesets with 2 changes to 1 files
913 new changesets 22aeff664783:63cf6c3dba4a
933 new changesets 22aeff664783:63cf6c3dba4a
914 searching for changes
934 searching for changes
915 no changes found
935 no changes found
916 updating working directory
936 updating working directory
917 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
937 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
918
938
919 $ ls share
939 $ ls share
920 22aeff664783fd44c6d9b435618173c118c3448e
940 22aeff664783fd44c6d9b435618173c118c3448e
921 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
941 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
922
942
923 remote naming mode works as advertised
943 remote naming mode works as advertised
924
944
925 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
945 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
926 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
946 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
927 requesting all changes
947 requesting all changes
928 adding changesets
948 adding changesets
929 adding manifests
949 adding manifests
930 adding file changes
950 adding file changes
931 added 3 changesets with 3 changes to 1 files
951 added 3 changesets with 3 changes to 1 files
932 new changesets b5f04eac9d8f:e5bfe23c0b47
952 new changesets b5f04eac9d8f:e5bfe23c0b47
933 searching for changes
953 searching for changes
934 no changes found
954 no changes found
935 adding remote bookmark bookA
955 adding remote bookmark bookA
936 updating working directory
956 updating working directory
937 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
957 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
938
958
939 $ ls shareremote
959 $ ls shareremote
940 195bb1fcdb595c14a6c13e0269129ed78f6debde
960 195bb1fcdb595c14a6c13e0269129ed78f6debde
941
961
942 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
962 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
943 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
963 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
944 requesting all changes
964 requesting all changes
945 adding changesets
965 adding changesets
946 adding manifests
966 adding manifests
947 adding file changes
967 adding file changes
948 added 6 changesets with 6 changes to 1 files (+4 heads)
968 added 6 changesets with 6 changes to 1 files (+4 heads)
949 new changesets b5f04eac9d8f:6bacf4683960
969 new changesets b5f04eac9d8f:6bacf4683960
950 searching for changes
970 searching for changes
951 no changes found
971 no changes found
952 adding remote bookmark head1
972 adding remote bookmark head1
953 adding remote bookmark head2
973 adding remote bookmark head2
954 updating working directory
974 updating working directory
955 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
975 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
956
976
957 $ ls shareremote
977 $ ls shareremote
958 195bb1fcdb595c14a6c13e0269129ed78f6debde
978 195bb1fcdb595c14a6c13e0269129ed78f6debde
959 c0d4f83847ca2a873741feb7048a45085fd47c46
979 c0d4f83847ca2a873741feb7048a45085fd47c46
960
980
961 request to clone a single revision is respected in sharing mode
981 request to clone a single revision is respected in sharing mode
962
982
963 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
983 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
964 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
984 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
965 adding changesets
985 adding changesets
966 adding manifests
986 adding manifests
967 adding file changes
987 adding file changes
968 added 2 changesets with 2 changes to 1 files
988 added 2 changesets with 2 changes to 1 files
969 new changesets b5f04eac9d8f:4a8dc1ab4c13
989 new changesets b5f04eac9d8f:4a8dc1ab4c13
970 no changes found
990 no changes found
971 adding remote bookmark head1
991 adding remote bookmark head1
972 updating working directory
992 updating working directory
973 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
993 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
974
994
975 $ hg -R share-1arev log -G
995 $ hg -R share-1arev log -G
976 @ changeset: 1:4a8dc1ab4c13
996 @ changeset: 1:4a8dc1ab4c13
977 | bookmark: head1
997 | bookmark: head1
978 | tag: tip
998 | tag: tip
979 | user: test
999 | user: test
980 | date: Thu Jan 01 00:00:00 1970 +0000
1000 | date: Thu Jan 01 00:00:00 1970 +0000
981 | summary: head1
1001 | summary: head1
982 |
1002 |
983 o changeset: 0:b5f04eac9d8f
1003 o changeset: 0:b5f04eac9d8f
984 user: test
1004 user: test
985 date: Thu Jan 01 00:00:00 1970 +0000
1005 date: Thu Jan 01 00:00:00 1970 +0000
986 summary: initial
1006 summary: initial
987
1007
988
1008
989 making another clone should only pull down requested rev
1009 making another clone should only pull down requested rev
990
1010
991 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
1011 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
992 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1012 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
993 searching for changes
1013 searching for changes
994 adding changesets
1014 adding changesets
995 adding manifests
1015 adding manifests
996 adding file changes
1016 adding file changes
997 adding remote bookmark head1
1017 adding remote bookmark head1
998 adding remote bookmark head2
1018 adding remote bookmark head2
999 added 1 changesets with 1 changes to 1 files (+1 heads)
1019 added 1 changesets with 1 changes to 1 files (+1 heads)
1000 new changesets 99f71071f117
1020 new changesets 99f71071f117
1001 updating working directory
1021 updating working directory
1002 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1022 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1003
1023
1004 $ hg -R share-1brev log -G
1024 $ hg -R share-1brev log -G
1005 @ changeset: 2:99f71071f117
1025 @ changeset: 2:99f71071f117
1006 | bookmark: head2
1026 | bookmark: head2
1007 | tag: tip
1027 | tag: tip
1008 | parent: 0:b5f04eac9d8f
1028 | parent: 0:b5f04eac9d8f
1009 | user: test
1029 | user: test
1010 | date: Thu Jan 01 00:00:00 1970 +0000
1030 | date: Thu Jan 01 00:00:00 1970 +0000
1011 | summary: head2
1031 | summary: head2
1012 |
1032 |
1013 | o changeset: 1:4a8dc1ab4c13
1033 | o changeset: 1:4a8dc1ab4c13
1014 |/ bookmark: head1
1034 |/ bookmark: head1
1015 | user: test
1035 | user: test
1016 | date: Thu Jan 01 00:00:00 1970 +0000
1036 | date: Thu Jan 01 00:00:00 1970 +0000
1017 | summary: head1
1037 | summary: head1
1018 |
1038 |
1019 o changeset: 0:b5f04eac9d8f
1039 o changeset: 0:b5f04eac9d8f
1020 user: test
1040 user: test
1021 date: Thu Jan 01 00:00:00 1970 +0000
1041 date: Thu Jan 01 00:00:00 1970 +0000
1022 summary: initial
1042 summary: initial
1023
1043
1024
1044
1025 Request to clone a single branch is respected in sharing mode
1045 Request to clone a single branch is respected in sharing mode
1026
1046
1027 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1047 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
1028 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1048 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1029 adding changesets
1049 adding changesets
1030 adding manifests
1050 adding manifests
1031 adding file changes
1051 adding file changes
1032 added 2 changesets with 2 changes to 1 files
1052 added 2 changesets with 2 changes to 1 files
1033 new changesets b5f04eac9d8f:5f92a6c1a1b1
1053 new changesets b5f04eac9d8f:5f92a6c1a1b1
1034 no changes found
1054 no changes found
1035 updating working directory
1055 updating working directory
1036 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1056 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1037
1057
1038 $ hg -R share-1bbranch1 log -G
1058 $ hg -R share-1bbranch1 log -G
1039 o changeset: 1:5f92a6c1a1b1
1059 o changeset: 1:5f92a6c1a1b1
1040 | branch: branch1
1060 | branch: branch1
1041 | tag: tip
1061 | tag: tip
1042 | user: test
1062 | user: test
1043 | date: Thu Jan 01 00:00:00 1970 +0000
1063 | date: Thu Jan 01 00:00:00 1970 +0000
1044 | summary: branch1
1064 | summary: branch1
1045 |
1065 |
1046 @ changeset: 0:b5f04eac9d8f
1066 @ changeset: 0:b5f04eac9d8f
1047 user: test
1067 user: test
1048 date: Thu Jan 01 00:00:00 1970 +0000
1068 date: Thu Jan 01 00:00:00 1970 +0000
1049 summary: initial
1069 summary: initial
1050
1070
1051
1071
1052 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1072 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1053 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1073 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1054 searching for changes
1074 searching for changes
1055 adding changesets
1075 adding changesets
1056 adding manifests
1076 adding manifests
1057 adding file changes
1077 adding file changes
1058 added 1 changesets with 1 changes to 1 files (+1 heads)
1078 added 1 changesets with 1 changes to 1 files (+1 heads)
1059 new changesets 6bacf4683960
1079 new changesets 6bacf4683960
1060 updating working directory
1080 updating working directory
1061 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1081 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1062
1082
1063 $ hg -R share-1bbranch2 log -G
1083 $ hg -R share-1bbranch2 log -G
1064 o changeset: 2:6bacf4683960
1084 o changeset: 2:6bacf4683960
1065 | branch: branch2
1085 | branch: branch2
1066 | tag: tip
1086 | tag: tip
1067 | parent: 0:b5f04eac9d8f
1087 | parent: 0:b5f04eac9d8f
1068 | user: test
1088 | user: test
1069 | date: Thu Jan 01 00:00:00 1970 +0000
1089 | date: Thu Jan 01 00:00:00 1970 +0000
1070 | summary: branch2
1090 | summary: branch2
1071 |
1091 |
1072 | o changeset: 1:5f92a6c1a1b1
1092 | o changeset: 1:5f92a6c1a1b1
1073 |/ branch: branch1
1093 |/ branch: branch1
1074 | user: test
1094 | user: test
1075 | date: Thu Jan 01 00:00:00 1970 +0000
1095 | date: Thu Jan 01 00:00:00 1970 +0000
1076 | summary: branch1
1096 | summary: branch1
1077 |
1097 |
1078 @ changeset: 0:b5f04eac9d8f
1098 @ changeset: 0:b5f04eac9d8f
1079 user: test
1099 user: test
1080 date: Thu Jan 01 00:00:00 1970 +0000
1100 date: Thu Jan 01 00:00:00 1970 +0000
1081 summary: initial
1101 summary: initial
1082
1102
1083
1103
1084 -U is respected in share clone mode
1104 -U is respected in share clone mode
1085
1105
1086 $ hg --config share.pool=share clone -U source1a share-1anowc
1106 $ hg --config share.pool=share clone -U source1a share-1anowc
1087 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1107 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1088 searching for changes
1108 searching for changes
1089 no changes found
1109 no changes found
1090 adding remote bookmark bookA
1110 adding remote bookmark bookA
1091
1111
1092 $ ls -A share-1anowc
1112 $ ls -A share-1anowc
1093 .hg
1113 .hg
1094
1114
1095 Test that auto sharing doesn't cause failure of "hg clone local remote"
1115 Test that auto sharing doesn't cause failure of "hg clone local remote"
1096
1116
1097 $ cd $TESTTMP
1117 $ cd $TESTTMP
1098 $ hg -R a id -r 0
1118 $ hg -R a id -r 0
1099 acb14030fe0a
1119 acb14030fe0a
1100 $ hg id -R remote -r 0
1120 $ hg id -R remote -r 0
1101 abort: repository remote not found
1121 abort: repository remote not found
1102 [255]
1122 [255]
1103 $ hg --config share.pool=share -q clone a ssh://user@dummy/remote
1123 $ hg --config share.pool=share -q clone a ssh://user@dummy/remote
1104 $ hg -R remote id -r 0
1124 $ hg -R remote id -r 0
1105 acb14030fe0a
1125 acb14030fe0a
1106
1126
1107 Cloning into pooled storage doesn't race (issue5104)
1127 Cloning into pooled storage doesn't race (issue5104)
1108
1128
1109 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1129 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1110 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1130 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1111 $ wait
1131 $ wait
1112
1132
1113 $ hg -R share-destrace1 log -r tip
1133 $ hg -R share-destrace1 log -r tip
1114 changeset: 2:e5bfe23c0b47
1134 changeset: 2:e5bfe23c0b47
1115 bookmark: bookA
1135 bookmark: bookA
1116 tag: tip
1136 tag: tip
1117 user: test
1137 user: test
1118 date: Thu Jan 01 00:00:00 1970 +0000
1138 date: Thu Jan 01 00:00:00 1970 +0000
1119 summary: 1a
1139 summary: 1a
1120
1140
1121
1141
1122 $ hg -R share-destrace2 log -r tip
1142 $ hg -R share-destrace2 log -r tip
1123 changeset: 2:e5bfe23c0b47
1143 changeset: 2:e5bfe23c0b47
1124 bookmark: bookA
1144 bookmark: bookA
1125 tag: tip
1145 tag: tip
1126 user: test
1146 user: test
1127 date: Thu Jan 01 00:00:00 1970 +0000
1147 date: Thu Jan 01 00:00:00 1970 +0000
1128 summary: 1a
1148 summary: 1a
1129
1149
1130 One repo should be new, the other should be shared from the pool. We
1150 One repo should be new, the other should be shared from the pool. We
1131 don't care which is which, so we just make sure we always print the
1151 don't care which is which, so we just make sure we always print the
1132 one containing "new pooled" first, then one one containing "existing
1152 one containing "new pooled" first, then one one containing "existing
1133 pooled".
1153 pooled".
1134
1154
1135 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1155 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1136 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1156 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1137 requesting all changes
1157 requesting all changes
1138 adding changesets
1158 adding changesets
1139 adding manifests
1159 adding manifests
1140 adding file changes
1160 adding file changes
1141 added 3 changesets with 3 changes to 1 files
1161 added 3 changesets with 3 changes to 1 files
1142 new changesets b5f04eac9d8f:e5bfe23c0b47
1162 new changesets b5f04eac9d8f:e5bfe23c0b47
1143 searching for changes
1163 searching for changes
1144 no changes found
1164 no changes found
1145 adding remote bookmark bookA
1165 adding remote bookmark bookA
1146 updating working directory
1166 updating working directory
1147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1148
1168
1149 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1169 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1150 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1170 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1151 searching for changes
1171 searching for changes
1152 no changes found
1172 no changes found
1153 adding remote bookmark bookA
1173 adding remote bookmark bookA
1154 updating working directory
1174 updating working directory
1155 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1175 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1156
1176
1157 SEC: check for unsafe ssh url
1177 SEC: check for unsafe ssh url
1158
1178
1159 $ cat >> $HGRCPATH << EOF
1179 $ cat >> $HGRCPATH << EOF
1160 > [ui]
1180 > [ui]
1161 > ssh = sh -c "read l; read l; read l"
1181 > ssh = sh -c "read l; read l; read l"
1162 > EOF
1182 > EOF
1163
1183
1164 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1184 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1165 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1185 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1166 [255]
1186 [255]
1167 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1187 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1168 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1188 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1169 [255]
1189 [255]
1170 $ hg clone 'ssh://fakehost|touch%20owned/path'
1190 $ hg clone 'ssh://fakehost|touch%20owned/path'
1171 abort: no suitable response from remote hg
1191 abort: no suitable response from remote hg
1172 [255]
1192 [255]
1173 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1193 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1174 abort: no suitable response from remote hg
1194 abort: no suitable response from remote hg
1175 [255]
1195 [255]
1176
1196
1177 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1197 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1178 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1198 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1179 [255]
1199 [255]
1180
1200
1181 #if windows
1201 #if windows
1182 $ hg clone "ssh://%26touch%20owned%20/" --debug
1202 $ hg clone "ssh://%26touch%20owned%20/" --debug
1183 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1203 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1184 sending hello command
1204 sending hello command
1185 sending between command
1205 sending between command
1186 abort: no suitable response from remote hg
1206 abort: no suitable response from remote hg
1187 [255]
1207 [255]
1188 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1208 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1189 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1209 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1190 sending hello command
1210 sending hello command
1191 sending between command
1211 sending between command
1192 abort: no suitable response from remote hg
1212 abort: no suitable response from remote hg
1193 [255]
1213 [255]
1194 #else
1214 #else
1195 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1215 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1196 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1216 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1197 sending hello command
1217 sending hello command
1198 sending between command
1218 sending between command
1199 abort: no suitable response from remote hg
1219 abort: no suitable response from remote hg
1200 [255]
1220 [255]
1201 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1221 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1202 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1222 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1203 sending hello command
1223 sending hello command
1204 sending between command
1224 sending between command
1205 abort: no suitable response from remote hg
1225 abort: no suitable response from remote hg
1206 [255]
1226 [255]
1207 #endif
1227 #endif
1208
1228
1209 $ hg clone "ssh://v-alid.example.com/" --debug
1229 $ hg clone "ssh://v-alid.example.com/" --debug
1210 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1230 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1211 sending hello command
1231 sending hello command
1212 sending between command
1232 sending between command
1213 abort: no suitable response from remote hg
1233 abort: no suitable response from remote hg
1214 [255]
1234 [255]
1215
1235
1216 We should not have created a file named owned - if it exists, the
1236 We should not have created a file named owned - if it exists, the
1217 attack succeeded.
1237 attack succeeded.
1218 $ if test -f owned; then echo 'you got owned'; fi
1238 $ if test -f owned; then echo 'you got owned'; fi
1219
1239
1220 Cloning without fsmonitor enabled does not print a warning for small repos
1240 Cloning without fsmonitor enabled does not print a warning for small repos
1221
1241
1222 $ hg clone a fsmonitor-default
1242 $ hg clone a fsmonitor-default
1223 updating to bookmark @ on branch stable
1243 updating to bookmark @ on branch stable
1224 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1244 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1225
1245
1226 Lower the warning threshold to simulate a large repo
1246 Lower the warning threshold to simulate a large repo
1227
1247
1228 $ cat >> $HGRCPATH << EOF
1248 $ cat >> $HGRCPATH << EOF
1229 > [fsmonitor]
1249 > [fsmonitor]
1230 > warn_update_file_count = 2
1250 > warn_update_file_count = 2
1231 > warn_update_file_count_rust = 2
1251 > warn_update_file_count_rust = 2
1232 > EOF
1252 > EOF
1233
1253
1234 We should see a warning about no fsmonitor on supported platforms
1254 We should see a warning about no fsmonitor on supported platforms
1235
1255
1236 #if linuxormacos no-fsmonitor
1256 #if linuxormacos no-fsmonitor
1237 $ hg clone a nofsmonitor
1257 $ hg clone a nofsmonitor
1238 updating to bookmark @ on branch stable
1258 updating to bookmark @ on branch stable
1239 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1259 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1240 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1260 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1241 #else
1261 #else
1242 $ hg clone a nofsmonitor
1262 $ hg clone a nofsmonitor
1243 updating to bookmark @ on branch stable
1263 updating to bookmark @ on branch stable
1244 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1264 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1245 #endif
1265 #endif
1246
1266
1247 We should not see warning about fsmonitor when it is enabled
1267 We should not see warning about fsmonitor when it is enabled
1248
1268
1249 #if fsmonitor
1269 #if fsmonitor
1250 $ hg clone a fsmonitor-enabled
1270 $ hg clone a fsmonitor-enabled
1251 updating to bookmark @ on branch stable
1271 updating to bookmark @ on branch stable
1252 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1272 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1253 #endif
1273 #endif
1254
1274
1255 We can disable the fsmonitor warning
1275 We can disable the fsmonitor warning
1256
1276
1257 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1277 $ hg --config fsmonitor.warn_when_unused=false clone a fsmonitor-disable-warning
1258 updating to bookmark @ on branch stable
1278 updating to bookmark @ on branch stable
1259 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1279 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1260
1280
1261 Loaded fsmonitor but disabled in config should still print warning
1281 Loaded fsmonitor but disabled in config should still print warning
1262
1282
1263 #if linuxormacos fsmonitor
1283 #if linuxormacos fsmonitor
1264 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1284 $ hg --config fsmonitor.mode=off clone a fsmonitor-mode-off
1265 updating to bookmark @ on branch stable
1285 updating to bookmark @ on branch stable
1266 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1286 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (fsmonitor !)
1267 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1287 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
1268 #endif
1288 #endif
1269
1289
1270 Warning not printed if working directory isn't empty
1290 Warning not printed if working directory isn't empty
1271
1291
1272 $ hg -q clone a fsmonitor-update
1292 $ hg -q clone a fsmonitor-update
1273 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1293 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor") (?)
1274 $ cd fsmonitor-update
1294 $ cd fsmonitor-update
1275 $ hg up acb14030fe0a
1295 $ hg up acb14030fe0a
1276 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1296 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
1277 (leaving bookmark @)
1297 (leaving bookmark @)
1278 $ hg up cf0fe1914066
1298 $ hg up cf0fe1914066
1279 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1299 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1280
1300
1281 `hg update` from null revision also prints
1301 `hg update` from null revision also prints
1282
1302
1283 $ hg up null
1303 $ hg up null
1284 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1304 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1285
1305
1286 #if linuxormacos no-fsmonitor
1306 #if linuxormacos no-fsmonitor
1287 $ hg up cf0fe1914066
1307 $ hg up cf0fe1914066
1288 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1308 (warning: large working directory being used without fsmonitor enabled; enable fsmonitor to improve performance; see "hg help -e fsmonitor")
1289 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1309 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1290 #else
1310 #else
1291 $ hg up cf0fe1914066
1311 $ hg up cf0fe1914066
1292 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1312 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1293 #endif
1313 #endif
1294
1314
1295 $ cd ..
1315 $ cd ..
1296
1316
@@ -1,852 +1,869 b''
1 #require no-reposimplestore no-chg
1 #require no-reposimplestore no-chg
2
2
3 Set up a server
3 Set up a server
4
4
5 $ hg init server
5 $ hg init server
6 $ cd server
6 $ cd server
7 $ cat >> .hg/hgrc << EOF
7 $ cat >> .hg/hgrc << EOF
8 > [extensions]
8 > [extensions]
9 > clonebundles =
9 > clonebundles =
10 > EOF
10 > EOF
11
11
12 $ touch foo
12 $ touch foo
13 $ hg -q commit -A -m 'add foo'
13 $ hg -q commit -A -m 'add foo'
14 $ touch bar
14 $ touch bar
15 $ hg -q commit -A -m 'add bar'
15 $ hg -q commit -A -m 'add bar'
16
16
17 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
17 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
18 $ cat hg.pid >> $DAEMON_PIDS
18 $ cat hg.pid >> $DAEMON_PIDS
19 $ cd ..
19 $ cd ..
20
20
21 Missing manifest should not result in server lookup
21 Missing manifest should not result in server lookup
22
22
23 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
23 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
24 requesting all changes
24 requesting all changes
25 adding changesets
25 adding changesets
26 adding manifests
26 adding manifests
27 adding file changes
27 adding file changes
28 added 2 changesets with 2 changes to 2 files
28 added 2 changesets with 2 changes to 2 files
29 new changesets 53245c60e682:aaff8d2ffbbf
29 new changesets 53245c60e682:aaff8d2ffbbf
30 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
30 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
31
31
32 $ cat server/access.log
32 $ cat server/access.log
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
34 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
34 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
35 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
35 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
36
36
37 Empty manifest file results in retrieval
37 Empty manifest file results in retrieval
38 (the extension only checks if the manifest file exists)
38 (the extension only checks if the manifest file exists)
39
39
40 $ touch server/.hg/clonebundles.manifest
40 $ touch server/.hg/clonebundles.manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
42 no clone bundles available on remote; falling back to regular clone
42 no clone bundles available on remote; falling back to regular clone
43 requesting all changes
43 requesting all changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 2 changesets with 2 changes to 2 files
47 added 2 changesets with 2 changes to 2 files
48 new changesets 53245c60e682:aaff8d2ffbbf
48 new changesets 53245c60e682:aaff8d2ffbbf
49 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
49 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
50
50
51 Manifest file with invalid URL aborts
51 Manifest file with invalid URL aborts
52
52
53 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
53 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
54 $ hg clone http://localhost:$HGPORT 404-url
54 $ hg clone http://localhost:$HGPORT 404-url
55 applying clone bundle from http://does.not.exist/bundle.hg
55 applying clone bundle from http://does.not.exist/bundle.hg
56 error fetching bundle: (.* not known|(\[Errno -?\d+] )?([Nn]o address associated with (host)?name|Temporary failure in name resolution|Name does not resolve)) (re) (no-windows !)
56 error fetching bundle: (.* not known|(\[Errno -?\d+] )?([Nn]o address associated with (host)?name|Temporary failure in name resolution|Name does not resolve)) (re) (no-windows !)
57 error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
57 error fetching bundle: [Errno 1100*] getaddrinfo failed (glob) (windows !)
58 abort: error applying bundle
58 abort: error applying bundle
59 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
59 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
60 [255]
60 [255]
61
61
62 Manifest file with URL with unknown scheme skips the URL
62 Manifest file with URL with unknown scheme skips the URL
63 $ echo 'weirdscheme://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
63 $ echo 'weirdscheme://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
64 $ hg clone http://localhost:$HGPORT unknown-scheme
64 $ hg clone http://localhost:$HGPORT unknown-scheme
65 no compatible clone bundles available on server; falling back to regular clone
65 no compatible clone bundles available on server; falling back to regular clone
66 (you may want to report this to the server operator)
66 (you may want to report this to the server operator)
67 requesting all changes
67 requesting all changes
68 adding changesets
68 adding changesets
69 adding manifests
69 adding manifests
70 adding file changes
70 adding file changes
71 added 2 changesets with 2 changes to 2 files
71 added 2 changesets with 2 changes to 2 files
72 new changesets 53245c60e682:aaff8d2ffbbf
72 new changesets 53245c60e682:aaff8d2ffbbf
73 updating to branch default
73 updating to branch default
74 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
74 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
75
75
76 Server is not running aborts
76 Server is not running aborts
77
77
78 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
78 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
79 $ hg clone http://localhost:$HGPORT server-not-runner
79 $ hg clone http://localhost:$HGPORT server-not-runner
80 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
80 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
81 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?\$EADDRNOTAVAIL\$|.* No route to host) (re)
81 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?\$EADDRNOTAVAIL\$|.* No route to host) (re)
82 abort: error applying bundle
82 abort: error applying bundle
83 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
83 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
84 [255]
84 [255]
85
85
86 Server returns 404
86 Server returns 404
87
87
88 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
88 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
89 $ cat http.pid >> $DAEMON_PIDS
89 $ cat http.pid >> $DAEMON_PIDS
90 $ hg clone http://localhost:$HGPORT running-404
90 $ hg clone http://localhost:$HGPORT running-404
91 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
91 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
92 HTTP error fetching bundle: HTTP Error 404: File not found
92 HTTP error fetching bundle: HTTP Error 404: File not found
93 abort: error applying bundle
93 abort: error applying bundle
94 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
94 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
95 [255]
95 [255]
96
96
97 We can override failure to fall back to regular clone
97 We can override failure to fall back to regular clone
98
98
99 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
99 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
100 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
100 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
101 HTTP error fetching bundle: HTTP Error 404: File not found
101 HTTP error fetching bundle: HTTP Error 404: File not found
102 falling back to normal clone
102 falling back to normal clone
103 requesting all changes
103 requesting all changes
104 adding changesets
104 adding changesets
105 adding manifests
105 adding manifests
106 adding file changes
106 adding file changes
107 added 2 changesets with 2 changes to 2 files
107 added 2 changesets with 2 changes to 2 files
108 new changesets 53245c60e682:aaff8d2ffbbf
108 new changesets 53245c60e682:aaff8d2ffbbf
109
109
110 Bundle with partial content works
110 Bundle with partial content works
111
111
112 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
112 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
113 1 changesets found
113 1 changesets found
114
114
115 We verify exact bundle content as an extra check against accidental future
115 We verify exact bundle content as an extra check against accidental future
116 changes. If this output changes, we could break old clients.
116 changes. If this output changes, we could break old clients.
117
117
118 $ f --size --hexdump partial.hg
118 $ f --size --hexdump partial.hg
119 partial.hg: size=207
119 partial.hg: size=207
120 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
120 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
121 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
121 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
122 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
122 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
123 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
123 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
124 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
124 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
125 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
125 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
126 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
126 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
127 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
127 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
128 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
128 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
129 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
129 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
130 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
130 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
131 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
131 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
132 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
132 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
133
133
134 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
134 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
135 $ hg clone -U http://localhost:$HGPORT partial-bundle
135 $ hg clone -U http://localhost:$HGPORT partial-bundle
136 applying clone bundle from http://localhost:$HGPORT1/partial.hg
136 applying clone bundle from http://localhost:$HGPORT1/partial.hg
137 adding changesets
137 adding changesets
138 adding manifests
138 adding manifests
139 adding file changes
139 adding file changes
140 added 1 changesets with 1 changes to 1 files
140 added 1 changesets with 1 changes to 1 files
141 finished applying clone bundle
141 finished applying clone bundle
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 1 changesets with 1 changes to 1 files
146 added 1 changesets with 1 changes to 1 files
147 new changesets aaff8d2ffbbf
147 new changesets aaff8d2ffbbf
148 1 local changesets published
148 1 local changesets published
149
149
150 Incremental pull doesn't fetch bundle
150 Incremental pull doesn't fetch bundle
151
151
152 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
152 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
153 adding changesets
153 adding changesets
154 adding manifests
154 adding manifests
155 adding file changes
155 adding file changes
156 added 1 changesets with 1 changes to 1 files
156 added 1 changesets with 1 changes to 1 files
157 new changesets 53245c60e682
157 new changesets 53245c60e682
158
158
159 $ cd partial-clone
159 $ cd partial-clone
160 $ hg pull
160 $ hg pull
161 pulling from http://localhost:$HGPORT/
161 pulling from http://localhost:$HGPORT/
162 searching for changes
162 searching for changes
163 adding changesets
163 adding changesets
164 adding manifests
164 adding manifests
165 adding file changes
165 adding file changes
166 added 1 changesets with 1 changes to 1 files
166 added 1 changesets with 1 changes to 1 files
167 new changesets aaff8d2ffbbf
167 new changesets aaff8d2ffbbf
168 (run 'hg update' to get a working copy)
168 (run 'hg update' to get a working copy)
169 $ cd ..
169 $ cd ..
170
170
171 Bundle with full content works
171 Bundle with full content works
172
172
173 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
173 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
174 2 changesets found
174 2 changesets found
175
175
176 Again, we perform an extra check against bundle content changes. If this content
176 Again, we perform an extra check against bundle content changes. If this content
177 changes, clone bundles produced by new Mercurial versions may not be readable
177 changes, clone bundles produced by new Mercurial versions may not be readable
178 by old clients.
178 by old clients.
179
179
180 $ f --size --hexdump full.hg
180 $ f --size --hexdump full.hg
181 full.hg: size=442
181 full.hg: size=442
182 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
182 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
183 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
183 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
184 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
184 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
185 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
185 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
186 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
186 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
187 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
187 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
188 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
188 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
189 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
189 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
190 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
190 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
191 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
191 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
192 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
192 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
193 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
193 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
194 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
194 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
195 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
195 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
196 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
196 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
197 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
197 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
198 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
198 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
199 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
199 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
200 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
200 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
201 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
201 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
202 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
202 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
203 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
203 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
204 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
204 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
205 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
205 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
206 0180: 54 47 75 2b 89 48 b1 b2 62 c9 89 c9 19 a9 56 45 |TGu+.H..b.....VE|
206 0180: 54 47 75 2b 89 48 b1 b2 62 c9 89 c9 19 a9 56 45 |TGu+.H..b.....VE|
207 0190: a9 65 ba 49 45 89 79 c9 19 ba 60 01 a0 14 23 58 |.e.IE.y...`...#X|
207 0190: a9 65 ba 49 45 89 79 c9 19 ba 60 01 a0 14 23 58 |.e.IE.y...`...#X|
208 01a0: 81 35 c8 7d 40 cc 04 e2 a4 a4 a6 25 96 e6 94 60 |.5.}@......%...`|
208 01a0: 81 35 c8 7d 40 cc 04 e2 a4 a4 a6 25 96 e6 94 60 |.5.}@......%...`|
209 01b0: 33 17 5f 54 00 00 d3 1b 0d 4c |3._T.....L|
209 01b0: 33 17 5f 54 00 00 d3 1b 0d 4c |3._T.....L|
210
210
211 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
211 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
212 $ hg clone -U http://localhost:$HGPORT full-bundle
212 $ hg clone -U http://localhost:$HGPORT full-bundle
213 applying clone bundle from http://localhost:$HGPORT1/full.hg
213 applying clone bundle from http://localhost:$HGPORT1/full.hg
214 adding changesets
214 adding changesets
215 adding manifests
215 adding manifests
216 adding file changes
216 adding file changes
217 added 2 changesets with 2 changes to 2 files
217 added 2 changesets with 2 changes to 2 files
218 finished applying clone bundle
218 finished applying clone bundle
219 searching for changes
219 searching for changes
220 no changes found
220 no changes found
221 2 local changesets published
221 2 local changesets published
222
222
223 Feature works over SSH
223 Feature works over SSH
224
224
225 $ hg clone -U ssh://user@dummy/server ssh-full-clone
225 $ hg clone -U ssh://user@dummy/server ssh-full-clone
226 applying clone bundle from http://localhost:$HGPORT1/full.hg
226 applying clone bundle from http://localhost:$HGPORT1/full.hg
227 adding changesets
227 adding changesets
228 adding manifests
228 adding manifests
229 adding file changes
229 adding file changes
230 added 2 changesets with 2 changes to 2 files
230 added 2 changesets with 2 changes to 2 files
231 finished applying clone bundle
231 finished applying clone bundle
232 searching for changes
232 searching for changes
233 no changes found
233 no changes found
234 2 local changesets published
234 2 local changesets published
235
235
236 Inline bundle
236 Inline bundle
237 =============
237 =============
238
238
239 Checking bundle retrieved over the wireprotocol
239 Checking bundle retrieved over the wireprotocol
240
240
241 Feature works over SSH with inline bundle
241 Feature works over SSH with inline bundle
242 -----------------------------------------
242 -----------------------------------------
243
243
244 $ mkdir server/.hg/bundle-cache/
244 $ mkdir server/.hg/bundle-cache/
245 $ cp full.hg server/.hg/bundle-cache/
245 $ cp full.hg server/.hg/bundle-cache/
246 $ echo "peer-bundle-cache://full.hg" > server/.hg/clonebundles.manifest
246 $ echo "peer-bundle-cache://full.hg" > server/.hg/clonebundles.manifest
247 $ hg clone -U ssh://user@dummy/server ssh-inline-clone
247 $ hg clone -U ssh://user@dummy/server ssh-inline-clone
248 applying clone bundle from peer-bundle-cache://full.hg
248 applying clone bundle from peer-bundle-cache://full.hg
249 adding changesets
249 adding changesets
250 adding manifests
250 adding manifests
251 adding file changes
251 adding file changes
252 added 2 changesets with 2 changes to 2 files
252 added 2 changesets with 2 changes to 2 files
253 finished applying clone bundle
253 finished applying clone bundle
254 searching for changes
254 searching for changes
255 no changes found
255 no changes found
256 2 local changesets published
256 2 local changesets published
257
257
258 HTTP Supports
258 HTTP Supports
259 -------------
259 -------------
260
260
261 $ hg clone -U http://localhost:$HGPORT http-inline-clone
261 $ hg clone -U http://localhost:$HGPORT http-inline-clone
262 applying clone bundle from peer-bundle-cache://full.hg
262 applying clone bundle from peer-bundle-cache://full.hg
263 adding changesets
263 adding changesets
264 adding manifests
264 adding manifests
265 adding file changes
265 adding file changes
266 added 2 changesets with 2 changes to 2 files
266 added 2 changesets with 2 changes to 2 files
267 finished applying clone bundle
267 finished applying clone bundle
268 searching for changes
268 searching for changes
269 no changes found
269 no changes found
270 2 local changesets published
270 2 local changesets published
271
271
272
272
273 Check local behavior
273 Check local behavior
274 --------------------
274 --------------------
275
275
276 We don't use the clone bundle, but we do not crash either.
276 We don't use the clone bundle, but we do not crash either.
277
277
278 $ hg clone -U ./server local-inline-clone-default
278 $ hg clone -U ./server local-inline-clone-default
279 $ hg clone -U ./server local-inline-clone-pull --pull
279 $ hg clone -U ./server local-inline-clone-pull --pull
280 requesting all changes
280 requesting all changes
281 adding changesets
281 adding changesets
282 adding manifests
282 adding manifests
283 adding file changes
283 adding file changes
284 added 2 changesets with 2 changes to 2 files
284 added 2 changesets with 2 changes to 2 files
285 new changesets 53245c60e682:aaff8d2ffbbf
285 new changesets 53245c60e682:aaff8d2ffbbf
286
286
287 Pre-transmit Hook
287 Pre-transmit Hook
288 -----------------
288 -----------------
289
289
290 Hooks work with inline bundle
290 Hooks work with inline bundle
291
291
292 $ cp server/.hg/hgrc server/.hg/hgrc-beforeinlinehooks
292 $ cp server/.hg/hgrc server/.hg/hgrc-beforeinlinehooks
293 $ echo "[hooks]" >> server/.hg/hgrc
293 $ echo "[hooks]" >> server/.hg/hgrc
294 $ echo "pretransmit-inline-clone-bundle=echo foo" >> server/.hg/hgrc
294 $ echo "pretransmit-inline-clone-bundle=echo foo" >> server/.hg/hgrc
295 $ hg clone -U ssh://user@dummy/server ssh-inline-clone-hook
295 $ hg clone -U ssh://user@dummy/server ssh-inline-clone-hook
296 applying clone bundle from peer-bundle-cache://full.hg
296 applying clone bundle from peer-bundle-cache://full.hg
297 remote: foo
297 remote: foo
298 adding changesets
298 adding changesets
299 adding manifests
299 adding manifests
300 adding file changes
300 adding file changes
301 added 2 changesets with 2 changes to 2 files
301 added 2 changesets with 2 changes to 2 files
302 finished applying clone bundle
302 finished applying clone bundle
303 searching for changes
303 searching for changes
304 no changes found
304 no changes found
305 2 local changesets published
305 2 local changesets published
306
306
307 Hooks can make an inline bundle fail
307 Hooks can make an inline bundle fail
308
308
309 $ cp server/.hg/hgrc-beforeinlinehooks server/.hg/hgrc
309 $ cp server/.hg/hgrc-beforeinlinehooks server/.hg/hgrc
310 $ echo "[hooks]" >> server/.hg/hgrc
310 $ echo "[hooks]" >> server/.hg/hgrc
311 $ echo "pretransmit-inline-clone-bundle=echo bar && false" >> server/.hg/hgrc
311 $ echo "pretransmit-inline-clone-bundle=echo bar && false" >> server/.hg/hgrc
312 $ hg clone -U ssh://user@dummy/server ssh-inline-clone-hook-fail
312 $ hg clone -U ssh://user@dummy/server ssh-inline-clone-hook-fail
313 applying clone bundle from peer-bundle-cache://full.hg
313 applying clone bundle from peer-bundle-cache://full.hg
314 remote: bar
314 remote: bar
315 remote: abort: pretransmit-inline-clone-bundle hook exited with status 1
315 remote: abort: pretransmit-inline-clone-bundle hook exited with status 1
316 abort: stream ended unexpectedly (got 0 bytes, expected 1)
316 abort: stream ended unexpectedly (got 0 bytes, expected 1)
317 [255]
317 [255]
318 $ cp server/.hg/hgrc-beforeinlinehooks server/.hg/hgrc
318 $ cp server/.hg/hgrc-beforeinlinehooks server/.hg/hgrc
319
319
320 Other tests
320 Other tests
321 ===========
321 ===========
322
322
323 Entry with unknown BUNDLESPEC is filtered and not used
323 Entry with unknown BUNDLESPEC is filtered and not used
324
324
325 $ cat > server/.hg/clonebundles.manifest << EOF
325 $ cat > server/.hg/clonebundles.manifest << EOF
326 > http://bad.entry1 BUNDLESPEC=UNKNOWN
326 > http://bad.entry1 BUNDLESPEC=UNKNOWN
327 > http://bad.entry2 BUNDLESPEC=xz-v1
327 > http://bad.entry2 BUNDLESPEC=xz-v1
328 > http://bad.entry3 BUNDLESPEC=none-v100
328 > http://bad.entry3 BUNDLESPEC=none-v100
329 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
329 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
330 > EOF
330 > EOF
331
331
332 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
332 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
333 applying clone bundle from http://localhost:$HGPORT1/full.hg
333 applying clone bundle from http://localhost:$HGPORT1/full.hg
334 adding changesets
334 adding changesets
335 adding manifests
335 adding manifests
336 adding file changes
336 adding file changes
337 added 2 changesets with 2 changes to 2 files
337 added 2 changesets with 2 changes to 2 files
338 finished applying clone bundle
338 finished applying clone bundle
339 searching for changes
339 searching for changes
340 no changes found
340 no changes found
341 2 local changesets published
341 2 local changesets published
342
342
343 Automatic fallback when all entries are filtered
343 Automatic fallback when all entries are filtered
344
344
345 $ cat > server/.hg/clonebundles.manifest << EOF
345 $ cat > server/.hg/clonebundles.manifest << EOF
346 > http://bad.entry BUNDLESPEC=UNKNOWN
346 > http://bad.entry BUNDLESPEC=UNKNOWN
347 > EOF
347 > EOF
348
348
349 $ hg clone -U http://localhost:$HGPORT filter-all
349 $ hg clone -U http://localhost:$HGPORT filter-all
350 no compatible clone bundles available on server; falling back to regular clone
350 no compatible clone bundles available on server; falling back to regular clone
351 (you may want to report this to the server operator)
351 (you may want to report this to the server operator)
352 requesting all changes
352 requesting all changes
353 adding changesets
353 adding changesets
354 adding manifests
354 adding manifests
355 adding file changes
355 adding file changes
356 added 2 changesets with 2 changes to 2 files
356 added 2 changesets with 2 changes to 2 files
357 new changesets 53245c60e682:aaff8d2ffbbf
357 new changesets 53245c60e682:aaff8d2ffbbf
358
358
359 We require a Python version that supports SNI. Therefore, URLs requiring SNI
359 We require a Python version that supports SNI. Therefore, URLs requiring SNI
360 are not filtered.
360 are not filtered.
361
361
362 $ cp full.hg sni.hg
362 $ cp full.hg sni.hg
363 $ cat > server/.hg/clonebundles.manifest << EOF
363 $ cat > server/.hg/clonebundles.manifest << EOF
364 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
364 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
365 > http://localhost:$HGPORT1/full.hg
365 > http://localhost:$HGPORT1/full.hg
366 > EOF
366 > EOF
367
367
368 $ hg clone -U http://localhost:$HGPORT sni-supported
368 $ hg clone -U http://localhost:$HGPORT sni-supported
369 applying clone bundle from http://localhost:$HGPORT1/sni.hg
369 applying clone bundle from http://localhost:$HGPORT1/sni.hg
370 adding changesets
370 adding changesets
371 adding manifests
371 adding manifests
372 adding file changes
372 adding file changes
373 added 2 changesets with 2 changes to 2 files
373 added 2 changesets with 2 changes to 2 files
374 finished applying clone bundle
374 finished applying clone bundle
375 searching for changes
375 searching for changes
376 no changes found
376 no changes found
377 2 local changesets published
377 2 local changesets published
378
378
379 Stream clone bundles are supported
379 Stream clone bundles are supported
380
380
381 $ hg -R server debugcreatestreamclonebundle packed.hg
381 $ hg -R server debugcreatestreamclonebundle packed.hg
382 writing 613 bytes for 4 files
382 writing 613 bytes for 5 files (no-rust !)
383 writing 739 bytes for 7 files (rust !)
383 bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust no-zstd !)
384 bundle requirements: generaldelta, revlogv1, sparserevlog (no-rust no-zstd !)
384 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog (no-rust zstd !)
385 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog (no-rust zstd !)
385 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog (rust !)
386 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog (rust !)
386
387
387 No bundle spec should work
388 No bundle spec should work
388
389
389 $ cat > server/.hg/clonebundles.manifest << EOF
390 $ cat > server/.hg/clonebundles.manifest << EOF
390 > http://localhost:$HGPORT1/packed.hg
391 > http://localhost:$HGPORT1/packed.hg
391 > EOF
392 > EOF
392
393
393 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
394 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
394 applying clone bundle from http://localhost:$HGPORT1/packed.hg
395 applying clone bundle from http://localhost:$HGPORT1/packed.hg
395 4 files to transfer, 613 bytes of data
396 5 files to transfer, 613 bytes of data (no-rust !)
396 transferred 613 bytes in *.* seconds (*) (glob)
397 transferred 613 bytes in *.* seconds (*) (glob) (no-rust !)
398 7 files to transfer, 739 bytes of data (rust !)
399 transferred 739 bytes in *.* seconds (*) (glob) (rust !)
397 finished applying clone bundle
400 finished applying clone bundle
398 searching for changes
401 searching for changes
399 no changes found
402 no changes found
400
403
401 Bundle spec without parameters should work
404 Bundle spec without parameters should work
402
405
403 $ cat > server/.hg/clonebundles.manifest << EOF
406 $ cat > server/.hg/clonebundles.manifest << EOF
404 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
407 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
405 > EOF
408 > EOF
406
409
407 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
410 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
408 applying clone bundle from http://localhost:$HGPORT1/packed.hg
411 applying clone bundle from http://localhost:$HGPORT1/packed.hg
409 4 files to transfer, 613 bytes of data
412 5 files to transfer, 613 bytes of data (no-rust !)
410 transferred 613 bytes in *.* seconds (*) (glob)
413 transferred 613 bytes in *.* seconds (*) (glob) (no-rust !)
414 7 files to transfer, 739 bytes of data (rust !)
415 transferred 739 bytes in *.* seconds (*) (glob) (rust !)
411 finished applying clone bundle
416 finished applying clone bundle
412 searching for changes
417 searching for changes
413 no changes found
418 no changes found
414
419
415 Bundle spec with format requirements should work
420 Bundle spec with format requirements should work
416
421
417 $ cat > server/.hg/clonebundles.manifest << EOF
422 $ cat > server/.hg/clonebundles.manifest << EOF
418 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
423 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
419 > EOF
424 > EOF
420
425
421 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
426 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
422 applying clone bundle from http://localhost:$HGPORT1/packed.hg
427 applying clone bundle from http://localhost:$HGPORT1/packed.hg
423 4 files to transfer, 613 bytes of data
428 5 files to transfer, 613 bytes of data (no-rust !)
424 transferred 613 bytes in *.* seconds (*) (glob)
429 transferred 613 bytes in *.* seconds (*) (glob) (no-rust !)
430 7 files to transfer, 739 bytes of data (rust !)
431 transferred 739 bytes in *.* seconds (*) (glob) (rust !)
425 finished applying clone bundle
432 finished applying clone bundle
426 searching for changes
433 searching for changes
427 no changes found
434 no changes found
428
435
429 Stream bundle spec with unknown requirements should be filtered out
436 Stream bundle spec with unknown requirements should be filtered out
430
437
431 $ cat > server/.hg/clonebundles.manifest << EOF
438 $ cat > server/.hg/clonebundles.manifest << EOF
432 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
439 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
433 > EOF
440 > EOF
434
441
435 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
442 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
436 no compatible clone bundles available on server; falling back to regular clone
443 no compatible clone bundles available on server; falling back to regular clone
437 (you may want to report this to the server operator)
444 (you may want to report this to the server operator)
438 requesting all changes
445 requesting all changes
439 adding changesets
446 adding changesets
440 adding manifests
447 adding manifests
441 adding file changes
448 adding file changes
442 added 2 changesets with 2 changes to 2 files
449 added 2 changesets with 2 changes to 2 files
443 new changesets 53245c60e682:aaff8d2ffbbf
450 new changesets 53245c60e682:aaff8d2ffbbf
444
451
445 Set up manifest for testing preferences
452 Set up manifest for testing preferences
446 (Remember, the TYPE does not have to match reality - the URL is
453 (Remember, the TYPE does not have to match reality - the URL is
447 important)
454 important)
448
455
449 $ cp full.hg gz-a.hg
456 $ cp full.hg gz-a.hg
450 $ cp full.hg gz-b.hg
457 $ cp full.hg gz-b.hg
451 $ cp full.hg bz2-a.hg
458 $ cp full.hg bz2-a.hg
452 $ cp full.hg bz2-b.hg
459 $ cp full.hg bz2-b.hg
453 $ cat > server/.hg/clonebundles.manifest << EOF
460 $ cat > server/.hg/clonebundles.manifest << EOF
454 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
461 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
455 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
462 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
456 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
463 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
457 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
464 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
458 > EOF
465 > EOF
459
466
460 Preferring an undefined attribute will take first entry
467 Preferring an undefined attribute will take first entry
461
468
462 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
469 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
463 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
470 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
464 adding changesets
471 adding changesets
465 adding manifests
472 adding manifests
466 adding file changes
473 adding file changes
467 added 2 changesets with 2 changes to 2 files
474 added 2 changesets with 2 changes to 2 files
468 finished applying clone bundle
475 finished applying clone bundle
469 searching for changes
476 searching for changes
470 no changes found
477 no changes found
471 2 local changesets published
478 2 local changesets published
472
479
473 Preferring bz2 type will download first entry of that type
480 Preferring bz2 type will download first entry of that type
474
481
475 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
482 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
476 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
483 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
477 adding changesets
484 adding changesets
478 adding manifests
485 adding manifests
479 adding file changes
486 adding file changes
480 added 2 changesets with 2 changes to 2 files
487 added 2 changesets with 2 changes to 2 files
481 finished applying clone bundle
488 finished applying clone bundle
482 searching for changes
489 searching for changes
483 no changes found
490 no changes found
484 2 local changesets published
491 2 local changesets published
485
492
486 Preferring multiple values of an option works
493 Preferring multiple values of an option works
487
494
488 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
495 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
489 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
496 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
490 adding changesets
497 adding changesets
491 adding manifests
498 adding manifests
492 adding file changes
499 adding file changes
493 added 2 changesets with 2 changes to 2 files
500 added 2 changesets with 2 changes to 2 files
494 finished applying clone bundle
501 finished applying clone bundle
495 searching for changes
502 searching for changes
496 no changes found
503 no changes found
497 2 local changesets published
504 2 local changesets published
498
505
499 Sorting multiple values should get us back to original first entry
506 Sorting multiple values should get us back to original first entry
500
507
501 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
508 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
502 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
509 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
503 adding changesets
510 adding changesets
504 adding manifests
511 adding manifests
505 adding file changes
512 adding file changes
506 added 2 changesets with 2 changes to 2 files
513 added 2 changesets with 2 changes to 2 files
507 finished applying clone bundle
514 finished applying clone bundle
508 searching for changes
515 searching for changes
509 no changes found
516 no changes found
510 2 local changesets published
517 2 local changesets published
511
518
512 Preferring multiple attributes has correct order
519 Preferring multiple attributes has correct order
513
520
514 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
521 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
515 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
522 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
516 adding changesets
523 adding changesets
517 adding manifests
524 adding manifests
518 adding file changes
525 adding file changes
519 added 2 changesets with 2 changes to 2 files
526 added 2 changesets with 2 changes to 2 files
520 finished applying clone bundle
527 finished applying clone bundle
521 searching for changes
528 searching for changes
522 no changes found
529 no changes found
523 2 local changesets published
530 2 local changesets published
524
531
525 Test where attribute is missing from some entries
532 Test where attribute is missing from some entries
526
533
527 $ cat > server/.hg/clonebundles.manifest << EOF
534 $ cat > server/.hg/clonebundles.manifest << EOF
528 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
535 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
529 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
536 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
530 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
537 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
531 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
538 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
532 > EOF
539 > EOF
533
540
534 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
541 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
535 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
542 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
536 adding changesets
543 adding changesets
537 adding manifests
544 adding manifests
538 adding file changes
545 adding file changes
539 added 2 changesets with 2 changes to 2 files
546 added 2 changesets with 2 changes to 2 files
540 finished applying clone bundle
547 finished applying clone bundle
541 searching for changes
548 searching for changes
542 no changes found
549 no changes found
543 2 local changesets published
550 2 local changesets published
544
551
545 Test a bad attribute list
552 Test a bad attribute list
546
553
547 $ hg --config ui.clonebundleprefers=bad clone -U http://localhost:$HGPORT bad-input
554 $ hg --config ui.clonebundleprefers=bad clone -U http://localhost:$HGPORT bad-input
548 abort: invalid ui.clonebundleprefers item: bad
555 abort: invalid ui.clonebundleprefers item: bad
549 (each comma separated item should be key=value pairs)
556 (each comma separated item should be key=value pairs)
550 [255]
557 [255]
551 $ hg --config ui.clonebundleprefers=key=val,bad,key2=val2 clone \
558 $ hg --config ui.clonebundleprefers=key=val,bad,key2=val2 clone \
552 > -U http://localhost:$HGPORT bad-input
559 > -U http://localhost:$HGPORT bad-input
553 abort: invalid ui.clonebundleprefers item: bad
560 abort: invalid ui.clonebundleprefers item: bad
554 (each comma separated item should be key=value pairs)
561 (each comma separated item should be key=value pairs)
555 [255]
562 [255]
556
563
557
564
558 Test interaction between clone bundles and --stream
565 Test interaction between clone bundles and --stream
559
566
560 A manifest with just a gzip bundle
567 A manifest with just a gzip bundle
561
568
562 $ cat > server/.hg/clonebundles.manifest << EOF
569 $ cat > server/.hg/clonebundles.manifest << EOF
563 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
570 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
564 > EOF
571 > EOF
565
572
566 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip
573 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip
567 no compatible clone bundles available on server; falling back to regular clone
574 no compatible clone bundles available on server; falling back to regular clone
568 (you may want to report this to the server operator)
575 (you may want to report this to the server operator)
569 streaming all changes
576 streaming all changes
570 9 files to transfer, 816 bytes of data
577 10 files to transfer, 816 bytes of data (no-rust !)
571 transferred 816 bytes in * seconds (*) (glob)
578 transferred 816 bytes in * seconds (*) (glob) (no-rust !)
579 12 files to transfer, 942 bytes of data (rust !)
580 transferred 942 bytes in *.* seconds (*) (glob) (rust !)
572
581
573 A manifest with a stream clone but no BUNDLESPEC
582 A manifest with a stream clone but no BUNDLESPEC
574
583
575 $ cat > server/.hg/clonebundles.manifest << EOF
584 $ cat > server/.hg/clonebundles.manifest << EOF
576 > http://localhost:$HGPORT1/packed.hg
585 > http://localhost:$HGPORT1/packed.hg
577 > EOF
586 > EOF
578
587
579 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-no-bundlespec
588 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-no-bundlespec
580 no compatible clone bundles available on server; falling back to regular clone
589 no compatible clone bundles available on server; falling back to regular clone
581 (you may want to report this to the server operator)
590 (you may want to report this to the server operator)
582 streaming all changes
591 streaming all changes
583 9 files to transfer, 816 bytes of data
592 10 files to transfer, 816 bytes of data (no-rust !)
584 transferred 816 bytes in * seconds (*) (glob)
593 transferred 816 bytes in * seconds (*) (glob) (no-rust !)
594 12 files to transfer, 942 bytes of data (rust !)
595 transferred 942 bytes in *.* seconds (*) (glob) (rust !)
585
596
586 A manifest with a gzip bundle and a stream clone
597 A manifest with a gzip bundle and a stream clone
587
598
588 $ cat > server/.hg/clonebundles.manifest << EOF
599 $ cat > server/.hg/clonebundles.manifest << EOF
589 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
600 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
590 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
601 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
591 > EOF
602 > EOF
592
603
593 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed
604 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed
594 applying clone bundle from http://localhost:$HGPORT1/packed.hg
605 applying clone bundle from http://localhost:$HGPORT1/packed.hg
595 4 files to transfer, 613 bytes of data
606 5 files to transfer, 613 bytes of data (no-rust !)
596 transferred 613 bytes in * seconds (*) (glob)
607 transferred 613 bytes in *.* seconds (*) (glob) (no-rust !)
608 7 files to transfer, 739 bytes of data (rust !)
609 transferred 739 bytes in *.* seconds (*) (glob) (rust !)
597 finished applying clone bundle
610 finished applying clone bundle
598 searching for changes
611 searching for changes
599 no changes found
612 no changes found
600
613
601 A manifest with a gzip bundle and stream clone with supported requirements
614 A manifest with a gzip bundle and stream clone with supported requirements
602
615
603 $ cat > server/.hg/clonebundles.manifest << EOF
616 $ cat > server/.hg/clonebundles.manifest << EOF
604 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
617 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
605 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
618 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
606 > EOF
619 > EOF
607
620
608 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-requirements
621 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-requirements
609 applying clone bundle from http://localhost:$HGPORT1/packed.hg
622 applying clone bundle from http://localhost:$HGPORT1/packed.hg
610 4 files to transfer, 613 bytes of data
623 5 files to transfer, 613 bytes of data (no-rust !)
611 transferred 613 bytes in * seconds (*) (glob)
624 transferred 613 bytes in *.* seconds (*) (glob) (no-rust !)
625 7 files to transfer, 739 bytes of data (rust !)
626 transferred 739 bytes in *.* seconds (*) (glob) (rust !)
612 finished applying clone bundle
627 finished applying clone bundle
613 searching for changes
628 searching for changes
614 no changes found
629 no changes found
615
630
616 A manifest with a gzip bundle and a stream clone with unsupported requirements
631 A manifest with a gzip bundle and a stream clone with unsupported requirements
617
632
618 $ cat > server/.hg/clonebundles.manifest << EOF
633 $ cat > server/.hg/clonebundles.manifest << EOF
619 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
634 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
620 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
635 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
621 > EOF
636 > EOF
622
637
623 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
638 $ hg clone -U --stream http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
624 no compatible clone bundles available on server; falling back to regular clone
639 no compatible clone bundles available on server; falling back to regular clone
625 (you may want to report this to the server operator)
640 (you may want to report this to the server operator)
626 streaming all changes
641 streaming all changes
627 9 files to transfer, 816 bytes of data
642 10 files to transfer, 816 bytes of data (no-rust !)
628 transferred 816 bytes in * seconds (*) (glob)
643 transferred 816 bytes in * seconds (*) (glob) (no-rust !)
644 12 files to transfer, 942 bytes of data (rust !)
645 transferred 942 bytes in *.* seconds (*) (glob) (rust !)
629
646
630 Test clone bundle retrieved through bundle2
647 Test clone bundle retrieved through bundle2
631
648
632 $ cat << EOF >> $HGRCPATH
649 $ cat << EOF >> $HGRCPATH
633 > [extensions]
650 > [extensions]
634 > largefiles=
651 > largefiles=
635 > EOF
652 > EOF
636 $ killdaemons.py
653 $ killdaemons.py
637 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
654 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
638 $ cat hg.pid >> $DAEMON_PIDS
655 $ cat hg.pid >> $DAEMON_PIDS
639
656
640 $ hg -R server debuglfput gz-a.hg
657 $ hg -R server debuglfput gz-a.hg
641 1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
658 1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
642
659
643 $ cat > server/.hg/clonebundles.manifest << EOF
660 $ cat > server/.hg/clonebundles.manifest << EOF
644 > largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae BUNDLESPEC=gzip-v2
661 > largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae BUNDLESPEC=gzip-v2
645 > EOF
662 > EOF
646
663
647 $ hg clone -U http://localhost:$HGPORT largefile-provided --traceback
664 $ hg clone -U http://localhost:$HGPORT largefile-provided --traceback
648 applying clone bundle from largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
665 applying clone bundle from largefile://1f74b3d08286b9b3a16fb3fa185dd29219cbc6ae
649 adding changesets
666 adding changesets
650 adding manifests
667 adding manifests
651 adding file changes
668 adding file changes
652 added 2 changesets with 2 changes to 2 files
669 added 2 changesets with 2 changes to 2 files
653 finished applying clone bundle
670 finished applying clone bundle
654 searching for changes
671 searching for changes
655 no changes found
672 no changes found
656 2 local changesets published
673 2 local changesets published
657 $ killdaemons.py
674 $ killdaemons.py
658
675
659 A manifest with a gzip bundle requiring too much memory for a 16MB system and working
676 A manifest with a gzip bundle requiring too much memory for a 16MB system and working
660 on a 32MB system.
677 on a 32MB system.
661
678
662 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
679 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
663 $ cat http.pid >> $DAEMON_PIDS
680 $ cat http.pid >> $DAEMON_PIDS
664 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
681 $ hg -R server serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
665 $ cat hg.pid >> $DAEMON_PIDS
682 $ cat hg.pid >> $DAEMON_PIDS
666
683
667 $ cat > server/.hg/clonebundles.manifest << EOF
684 $ cat > server/.hg/clonebundles.manifest << EOF
668 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 REQUIREDRAM=12MB
685 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 REQUIREDRAM=12MB
669 > EOF
686 > EOF
670
687
671 $ hg clone -U --debug --config ui.available-memory=16MB http://localhost:$HGPORT gzip-too-large
688 $ hg clone -U --debug --config ui.available-memory=16MB http://localhost:$HGPORT gzip-too-large
672 using http://localhost:$HGPORT/
689 using http://localhost:$HGPORT/
673 sending capabilities command
690 sending capabilities command
674 sending clonebundles_manifest command
691 sending clonebundles_manifest command
675 filtering http://localhost:$HGPORT1/gz-a.hg as it needs more than 2/3 of system memory
692 filtering http://localhost:$HGPORT1/gz-a.hg as it needs more than 2/3 of system memory
676 no compatible clone bundles available on server; falling back to regular clone
693 no compatible clone bundles available on server; falling back to regular clone
677 (you may want to report this to the server operator)
694 (you may want to report this to the server operator)
678 query 1; heads
695 query 1; heads
679 sending batch command
696 sending batch command
680 requesting all changes
697 requesting all changes
681 sending getbundle command
698 sending getbundle command
682 bundle2-input-bundle: with-transaction
699 bundle2-input-bundle: with-transaction
683 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
700 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
684 adding changesets
701 adding changesets
685 add changeset 53245c60e682
702 add changeset 53245c60e682
686 add changeset aaff8d2ffbbf
703 add changeset aaff8d2ffbbf
687 adding manifests
704 adding manifests
688 adding file changes
705 adding file changes
689 adding bar revisions
706 adding bar revisions
690 adding foo revisions
707 adding foo revisions
691 bundle2-input-part: total payload size 936
708 bundle2-input-part: total payload size 936
692 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
709 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
693 bundle2-input-part: "phase-heads" supported
710 bundle2-input-part: "phase-heads" supported
694 bundle2-input-part: total payload size 24
711 bundle2-input-part: total payload size 24
695 bundle2-input-bundle: 3 parts total
712 bundle2-input-bundle: 3 parts total
696 checking for updated bookmarks
713 checking for updated bookmarks
697 updating the branch cache
714 updating the branch cache
698 added 2 changesets with 2 changes to 2 files
715 added 2 changesets with 2 changes to 2 files
699 new changesets 53245c60e682:aaff8d2ffbbf
716 new changesets 53245c60e682:aaff8d2ffbbf
700 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
717 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
701 updating the branch cache
718 updating the branch cache
702 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
719 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
703
720
704 $ hg clone -U --debug --config ui.available-memory=32MB http://localhost:$HGPORT gzip-too-large2
721 $ hg clone -U --debug --config ui.available-memory=32MB http://localhost:$HGPORT gzip-too-large2
705 using http://localhost:$HGPORT/
722 using http://localhost:$HGPORT/
706 sending capabilities command
723 sending capabilities command
707 sending clonebundles_manifest command
724 sending clonebundles_manifest command
708 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
725 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
709 bundle2-input-bundle: 1 params with-transaction
726 bundle2-input-bundle: 1 params with-transaction
710 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
727 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
711 adding changesets
728 adding changesets
712 add changeset 53245c60e682
729 add changeset 53245c60e682
713 add changeset aaff8d2ffbbf
730 add changeset aaff8d2ffbbf
714 adding manifests
731 adding manifests
715 adding file changes
732 adding file changes
716 adding bar revisions
733 adding bar revisions
717 adding foo revisions
734 adding foo revisions
718 bundle2-input-part: total payload size 920
735 bundle2-input-part: total payload size 920
719 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
736 bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
720 bundle2-input-part: total payload size 59
737 bundle2-input-part: total payload size 59
721 bundle2-input-bundle: 2 parts total
738 bundle2-input-bundle: 2 parts total
722 updating the branch cache
739 updating the branch cache
723 added 2 changesets with 2 changes to 2 files
740 added 2 changesets with 2 changes to 2 files
724 finished applying clone bundle
741 finished applying clone bundle
725 query 1; heads
742 query 1; heads
726 sending batch command
743 sending batch command
727 searching for changes
744 searching for changes
728 all remote heads known locally
745 all remote heads known locally
729 no changes found
746 no changes found
730 sending getbundle command
747 sending getbundle command
731 bundle2-input-bundle: with-transaction
748 bundle2-input-bundle: with-transaction
732 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
749 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
733 bundle2-input-part: "phase-heads" supported
750 bundle2-input-part: "phase-heads" supported
734 bundle2-input-part: total payload size 24
751 bundle2-input-part: total payload size 24
735 bundle2-input-bundle: 2 parts total
752 bundle2-input-bundle: 2 parts total
736 checking for updated bookmarks
753 checking for updated bookmarks
737 2 local changesets published
754 2 local changesets published
738 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
755 calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
739 updating the branch cache
756 updating the branch cache
740 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
757 (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
741 $ killdaemons.py
758 $ killdaemons.py
742
759
743 Testing a clone bundles that involves revlog splitting (issue6811)
760 Testing a clone bundles that involves revlog splitting (issue6811)
744 ==================================================================
761 ==================================================================
745
762
746 $ cat >> $HGRCPATH << EOF
763 $ cat >> $HGRCPATH << EOF
747 > [format]
764 > [format]
748 > revlog-compression=none
765 > revlog-compression=none
749 > use-persistent-nodemap=no
766 > use-persistent-nodemap=no
750 > EOF
767 > EOF
751
768
752 $ hg init server-revlog-split/
769 $ hg init server-revlog-split/
753 $ cd server-revlog-split
770 $ cd server-revlog-split
754 $ cat >> .hg/hgrc << EOF
771 $ cat >> .hg/hgrc << EOF
755 > [extensions]
772 > [extensions]
756 > clonebundles =
773 > clonebundles =
757 > EOF
774 > EOF
758 $ echo foo > A
775 $ echo foo > A
759 $ hg add A
776 $ hg add A
760 $ hg commit -m 'initial commit'
777 $ hg commit -m 'initial commit'
761 IMPORTANT: the revlogs must not be split
778 IMPORTANT: the revlogs must not be split
762 $ ls -1 .hg/store/00manifest.*
779 $ ls -1 .hg/store/00manifest.*
763 .hg/store/00manifest.i
780 .hg/store/00manifest.i
764 $ ls -1 .hg/store/data/_a.*
781 $ ls -1 .hg/store/data/_a.*
765 .hg/store/data/_a.i
782 .hg/store/data/_a.i
766
783
767 do big enough update to split the revlogs
784 do big enough update to split the revlogs
768
785
769 $ $TESTDIR/seq.py 100000 > A
786 $ $TESTDIR/seq.py 100000 > A
770 $ mkdir foo
787 $ mkdir foo
771 $ cd foo
788 $ cd foo
772 $ touch `$TESTDIR/seq.py 10000`
789 $ touch `$TESTDIR/seq.py 10000`
773 $ cd ..
790 $ cd ..
774 $ hg add -q foo
791 $ hg add -q foo
775 $ hg commit -m 'split the manifest and one filelog'
792 $ hg commit -m 'split the manifest and one filelog'
776
793
777 IMPORTANT: now the revlogs must be split
794 IMPORTANT: now the revlogs must be split
778 $ ls -1 .hg/store/00manifest.*
795 $ ls -1 .hg/store/00manifest.*
779 .hg/store/00manifest.d
796 .hg/store/00manifest.d
780 .hg/store/00manifest.i
797 .hg/store/00manifest.i
781 $ ls -1 .hg/store/data/_a.*
798 $ ls -1 .hg/store/data/_a.*
782 .hg/store/data/_a.d
799 .hg/store/data/_a.d
783 .hg/store/data/_a.i
800 .hg/store/data/_a.i
784
801
785 Add an extra commit on top of that
802 Add an extra commit on top of that
786
803
787 $ echo foo >> A
804 $ echo foo >> A
788 $ hg commit -m 'one extra commit'
805 $ hg commit -m 'one extra commit'
789
806
790 $ cd ..
807 $ cd ..
791
808
792 Do a bundle that contains the split, but not the update
809 Do a bundle that contains the split, but not the update
793
810
794 $ hg bundle --exact --rev '::(default~1)' -R server-revlog-split/ --type gzip-v2 split-test.hg
811 $ hg bundle --exact --rev '::(default~1)' -R server-revlog-split/ --type gzip-v2 split-test.hg
795 2 changesets found
812 2 changesets found
796
813
797 $ cat > server-revlog-split/.hg/clonebundles.manifest << EOF
814 $ cat > server-revlog-split/.hg/clonebundles.manifest << EOF
798 > http://localhost:$HGPORT1/split-test.hg BUNDLESPEC=gzip-v2
815 > http://localhost:$HGPORT1/split-test.hg BUNDLESPEC=gzip-v2
799 > EOF
816 > EOF
800
817
801 start the necessary server
818 start the necessary server
802
819
803 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
820 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
804 $ cat http.pid >> $DAEMON_PIDS
821 $ cat http.pid >> $DAEMON_PIDS
805 $ hg -R server-revlog-split serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
822 $ hg -R server-revlog-split serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
806 $ cat hg.pid >> $DAEMON_PIDS
823 $ cat hg.pid >> $DAEMON_PIDS
807
824
808 Check that clone works fine
825 Check that clone works fine
809 ===========================
826 ===========================
810
827
811 Here, the initial clone will trigger a revlog split (which is a bit clowny it
828 Here, the initial clone will trigger a revlog split (which is a bit clowny it
812 itself, but whatever). The split revlogs will see additionnal data added to
829 itself, but whatever). The split revlogs will see additionnal data added to
813 them in the subsequent pull. This should not be a problem
830 them in the subsequent pull. This should not be a problem
814
831
815 $ hg clone http://localhost:$HGPORT revlog-split-in-the-bundle
832 $ hg clone http://localhost:$HGPORT revlog-split-in-the-bundle
816 applying clone bundle from http://localhost:$HGPORT1/split-test.hg
833 applying clone bundle from http://localhost:$HGPORT1/split-test.hg
817 adding changesets
834 adding changesets
818 adding manifests
835 adding manifests
819 adding file changes
836 adding file changes
820 added 2 changesets with 10002 changes to 10001 files
837 added 2 changesets with 10002 changes to 10001 files
821 finished applying clone bundle
838 finished applying clone bundle
822 searching for changes
839 searching for changes
823 adding changesets
840 adding changesets
824 adding manifests
841 adding manifests
825 adding file changes
842 adding file changes
826 added 1 changesets with 1 changes to 1 files
843 added 1 changesets with 1 changes to 1 files
827 new changesets e3879eaa1db7
844 new changesets e3879eaa1db7
828 2 local changesets published
845 2 local changesets published
829 updating to branch default
846 updating to branch default
830 10001 files updated, 0 files merged, 0 files removed, 0 files unresolved
847 10001 files updated, 0 files merged, 0 files removed, 0 files unresolved
831
848
832 check the results
849 check the results
833
850
834 $ cd revlog-split-in-the-bundle
851 $ cd revlog-split-in-the-bundle
835 $ f --size .hg/store/00manifest.*
852 $ f --size .hg/store/00manifest.*
836 .hg/store/00manifest.d: size=499037
853 .hg/store/00manifest.d: size=499037
837 .hg/store/00manifest.i: size=192
854 .hg/store/00manifest.i: size=192
838 $ f --size .hg/store/data/_a.*
855 $ f --size .hg/store/data/_a.*
839 .hg/store/data/_a.d: size=588917
856 .hg/store/data/_a.d: size=588917
840 .hg/store/data/_a.i: size=192
857 .hg/store/data/_a.i: size=192
841
858
842 manifest should work
859 manifest should work
843
860
844 $ hg files -r tip | wc -l
861 $ hg files -r tip | wc -l
845 \s*10001 (re)
862 \s*10001 (re)
846
863
847 file content should work
864 file content should work
848
865
849 $ hg cat -r tip A | wc -l
866 $ hg cat -r tip A | wc -l
850 \s*100001 (re)
867 \s*100001 (re)
851
868
852
869
@@ -1,39 +1,39 b''
1 #require rust
1 #require rust
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [format]
4 > [format]
5 > use-dirstate-v2=1
5 > use-dirstate-v2=1
6 > [storage]
6 > [storage]
7 > dirstate-v2.slow-path=allow
7 > dirstate-v2.slow-path=allow
8 > EOF
8 > EOF
9
9
10 $ hg init t
10 $ hg init t
11 $ cd t
11 $ cd t
12
12
13 $ for i in 1 2 3 4 5 6 7 8 9 10; do touch foobar$i; done
13 $ for i in 1 2 3 4 5 6 7 8 9 10; do touch foobar$i; done
14 $ hg add .
14 $ hg add .
15 adding foobar1
15 adding foobar1
16 adding foobar10
16 adding foobar10
17 adding foobar2
17 adding foobar2
18 adding foobar3
18 adding foobar3
19 adding foobar4
19 adding foobar4
20 adding foobar5
20 adding foobar5
21 adding foobar6
21 adding foobar6
22 adding foobar7
22 adding foobar7
23 adding foobar8
23 adding foobar8
24 adding foobar9
24 adding foobar9
25 $ hg commit -m "1"
25 $ hg commit -m "1"
26
26
27 Check that there's no space leak on debugrebuilddirstate
27 Check that there's no space leak on debugrebuilddirstate
28
28
29 $ f --size .hg/dirstate*
29 $ f --size .hg/dirstate*
30 .hg/dirstate: size=133
30 .hg/dirstate: size=133
31 .hg/dirstate.b870a51b: size=511
32 $ hg debugrebuilddirstate
33 $ f --size .hg/dirstate*
34 .hg/dirstate: size=133
35 .hg/dirstate.88698448: size=511
31 .hg/dirstate.88698448: size=511
36 $ hg debugrebuilddirstate
32 $ hg debugrebuilddirstate
37 $ f --size .hg/dirstate*
33 $ f --size .hg/dirstate*
38 .hg/dirstate: size=133
34 .hg/dirstate: size=133
39 .hg/dirstate.6b8ab34b: size=511
35 .hg/dirstate.6b8ab34b: size=511
36 $ hg debugrebuilddirstate
37 $ f --size .hg/dirstate*
38 .hg/dirstate: size=133
39 .hg/dirstate.b875dfc5: size=511
@@ -1,818 +1,818 b''
1 $ cat << EOF >> $HGRCPATH
1 $ cat << EOF >> $HGRCPATH
2 > [ui]
2 > [ui]
3 > interactive=yes
3 > interactive=yes
4 > EOF
4 > EOF
5
5
6 $ hg init debugrevlog
6 $ hg init debugrevlog
7 $ cd debugrevlog
7 $ cd debugrevlog
8 $ echo a > a
8 $ echo a > a
9 $ hg ci -Am adda
9 $ hg ci -Am adda
10 adding a
10 adding a
11 $ hg rm .
11 $ hg rm .
12 removing a
12 removing a
13 $ hg ci -Am make-it-empty
13 $ hg ci -Am make-it-empty
14 $ hg revert --all -r 0
14 $ hg revert --all -r 0
15 adding a
15 adding a
16 $ hg ci -Am make-it-full
16 $ hg ci -Am make-it-full
17 #if reporevlogstore
17 #if reporevlogstore
18 $ hg debugrevlog -c
18 $ hg debugrevlog -c
19 format : 1
19 format : 1
20 flags : inline
20 flags : (none)
21
21
22 revisions : 3
22 revisions : 3
23 merges : 0 ( 0.00%)
23 merges : 0 ( 0.00%)
24 normal : 3 (100.00%)
24 normal : 3 (100.00%)
25 revisions : 3
25 revisions : 3
26 empty : 0 ( 0.00%)
26 empty : 0 ( 0.00%)
27 text : 0 (100.00%)
27 text : 0 (100.00%)
28 delta : 0 (100.00%)
28 delta : 0 (100.00%)
29 snapshot : 3 (100.00%)
29 snapshot : 3 (100.00%)
30 lvl-0 : 3 (100.00%)
30 lvl-0 : 3 (100.00%)
31 deltas : 0 ( 0.00%)
31 deltas : 0 ( 0.00%)
32 revision size : 191
32 revision size : 191
33 snapshot : 191 (100.00%)
33 snapshot : 191 (100.00%)
34 lvl-0 : 191 (100.00%)
34 lvl-0 : 191 (100.00%)
35 deltas : 0 ( 0.00%)
35 deltas : 0 ( 0.00%)
36
36
37 chunks : 3
37 chunks : 3
38 0x75 (u) : 3 (100.00%)
38 0x75 (u) : 3 (100.00%)
39 chunks size : 191
39 chunks size : 191
40 0x75 (u) : 191 (100.00%)
40 0x75 (u) : 191 (100.00%)
41
41
42
42
43 total-stored-content: 188 bytes
43 total-stored-content: 188 bytes
44
44
45 avg chain length : 0
45 avg chain length : 0
46 max chain length : 0
46 max chain length : 0
47 max chain reach : 67
47 max chain reach : 67
48 compression ratio : 0
48 compression ratio : 0
49
49
50 uncompressed data size (min/max/avg) : 57 / 66 / 62
50 uncompressed data size (min/max/avg) : 57 / 66 / 62
51 full revision size (min/max/avg) : 58 / 67 / 63
51 full revision size (min/max/avg) : 58 / 67 / 63
52 inter-snapshot size (min/max/avg) : 0 / 0 / 0
52 inter-snapshot size (min/max/avg) : 0 / 0 / 0
53 delta size (min/max/avg) : 0 / 0 / 0
53 delta size (min/max/avg) : 0 / 0 / 0
54 $ hg debugrevlog -m
54 $ hg debugrevlog -m
55 format : 1
55 format : 1
56 flags : inline, generaldelta
56 flags : inline, generaldelta
57
57
58 revisions : 3
58 revisions : 3
59 merges : 0 ( 0.00%)
59 merges : 0 ( 0.00%)
60 normal : 3 (100.00%)
60 normal : 3 (100.00%)
61 revisions : 3
61 revisions : 3
62 empty : 1 (33.33%)
62 empty : 1 (33.33%)
63 text : 1 (100.00%)
63 text : 1 (100.00%)
64 delta : 0 ( 0.00%)
64 delta : 0 ( 0.00%)
65 snapshot : 2 (66.67%)
65 snapshot : 2 (66.67%)
66 lvl-0 : 2 (66.67%)
66 lvl-0 : 2 (66.67%)
67 deltas : 0 ( 0.00%)
67 deltas : 0 ( 0.00%)
68 revision size : 88
68 revision size : 88
69 snapshot : 88 (100.00%)
69 snapshot : 88 (100.00%)
70 lvl-0 : 88 (100.00%)
70 lvl-0 : 88 (100.00%)
71 deltas : 0 ( 0.00%)
71 deltas : 0 ( 0.00%)
72
72
73 chunks : 3
73 chunks : 3
74 empty : 1 (33.33%)
74 empty : 1 (33.33%)
75 0x75 (u) : 2 (66.67%)
75 0x75 (u) : 2 (66.67%)
76 chunks size : 88
76 chunks size : 88
77 empty : 0 ( 0.00%)
77 empty : 0 ( 0.00%)
78 0x75 (u) : 88 (100.00%)
78 0x75 (u) : 88 (100.00%)
79
79
80
80
81 total-stored-content: 86 bytes
81 total-stored-content: 86 bytes
82
82
83 avg chain length : 0
83 avg chain length : 0
84 max chain length : 0
84 max chain length : 0
85 max chain reach : 44
85 max chain reach : 44
86 compression ratio : 0
86 compression ratio : 0
87
87
88 uncompressed data size (min/max/avg) : 0 / 43 / 28
88 uncompressed data size (min/max/avg) : 0 / 43 / 28
89 full revision size (min/max/avg) : 44 / 44 / 44
89 full revision size (min/max/avg) : 44 / 44 / 44
90 inter-snapshot size (min/max/avg) : 0 / 0 / 0
90 inter-snapshot size (min/max/avg) : 0 / 0 / 0
91 delta size (min/max/avg) : 0 / 0 / 0
91 delta size (min/max/avg) : 0 / 0 / 0
92 $ hg debugrevlog a
92 $ hg debugrevlog a
93 format : 1
93 format : 1
94 flags : inline, generaldelta
94 flags : inline, generaldelta
95
95
96 revisions : 1
96 revisions : 1
97 merges : 0 ( 0.00%)
97 merges : 0 ( 0.00%)
98 normal : 1 (100.00%)
98 normal : 1 (100.00%)
99 revisions : 1
99 revisions : 1
100 empty : 0 ( 0.00%)
100 empty : 0 ( 0.00%)
101 text : 0 (100.00%)
101 text : 0 (100.00%)
102 delta : 0 (100.00%)
102 delta : 0 (100.00%)
103 snapshot : 1 (100.00%)
103 snapshot : 1 (100.00%)
104 lvl-0 : 1 (100.00%)
104 lvl-0 : 1 (100.00%)
105 deltas : 0 ( 0.00%)
105 deltas : 0 ( 0.00%)
106 revision size : 3
106 revision size : 3
107 snapshot : 3 (100.00%)
107 snapshot : 3 (100.00%)
108 lvl-0 : 3 (100.00%)
108 lvl-0 : 3 (100.00%)
109 deltas : 0 ( 0.00%)
109 deltas : 0 ( 0.00%)
110
110
111 chunks : 1
111 chunks : 1
112 0x75 (u) : 1 (100.00%)
112 0x75 (u) : 1 (100.00%)
113 chunks size : 3
113 chunks size : 3
114 0x75 (u) : 3 (100.00%)
114 0x75 (u) : 3 (100.00%)
115
115
116
116
117 total-stored-content: 2 bytes
117 total-stored-content: 2 bytes
118
118
119 avg chain length : 0
119 avg chain length : 0
120 max chain length : 0
120 max chain length : 0
121 max chain reach : 3
121 max chain reach : 3
122 compression ratio : 0
122 compression ratio : 0
123
123
124 uncompressed data size (min/max/avg) : 2 / 2 / 2
124 uncompressed data size (min/max/avg) : 2 / 2 / 2
125 full revision size (min/max/avg) : 3 / 3 / 3
125 full revision size (min/max/avg) : 3 / 3 / 3
126 inter-snapshot size (min/max/avg) : 0 / 0 / 0
126 inter-snapshot size (min/max/avg) : 0 / 0 / 0
127 delta size (min/max/avg) : 0 / 0 / 0
127 delta size (min/max/avg) : 0 / 0 / 0
128 #endif
128 #endif
129
129
130 Test debugindex, with and without the --verbose/--debug flag
130 Test debugindex, with and without the --verbose/--debug flag
131 $ hg debugrevlogindex a
131 $ hg debugrevlogindex a
132 rev linkrev nodeid p1 p2
132 rev linkrev nodeid p1 p2
133 0 0 b789fdd96dc2 000000000000 000000000000
133 0 0 b789fdd96dc2 000000000000 000000000000
134
134
135 #if no-reposimplestore
135 #if no-reposimplestore
136 $ hg --verbose debugrevlogindex a
136 $ hg --verbose debugrevlogindex a
137 rev offset length linkrev nodeid p1 p2
137 rev offset length linkrev nodeid p1 p2
138 0 0 3 0 b789fdd96dc2 000000000000 000000000000
138 0 0 3 0 b789fdd96dc2 000000000000 000000000000
139
139
140 $ hg --debug debugrevlogindex a
140 $ hg --debug debugrevlogindex a
141 rev offset length linkrev nodeid p1 p2
141 rev offset length linkrev nodeid p1 p2
142 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
142 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
143 #endif
143 #endif
144
144
145 $ hg debugrevlogindex -f 1 a
145 $ hg debugrevlogindex -f 1 a
146 rev flag size link p1 p2 nodeid
146 rev flag size link p1 p2 nodeid
147 0 0000 2 0 -1 -1 b789fdd96dc2
147 0 0000 2 0 -1 -1 b789fdd96dc2
148
148
149 #if no-reposimplestore
149 #if no-reposimplestore
150 $ hg --verbose debugrevlogindex -f 1 a
150 $ hg --verbose debugrevlogindex -f 1 a
151 rev flag offset length size link p1 p2 nodeid
151 rev flag offset length size link p1 p2 nodeid
152 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
152 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
153
153
154 $ hg --debug debugrevlogindex -f 1 a
154 $ hg --debug debugrevlogindex -f 1 a
155 rev flag offset length size link p1 p2 nodeid
155 rev flag offset length size link p1 p2 nodeid
156 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
156 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
157 #endif
157 #endif
158
158
159 $ hg debugindex -c
159 $ hg debugindex -c
160 rev linkrev nodeid p1-nodeid p2-nodeid
160 rev linkrev nodeid p1-nodeid p2-nodeid
161 0 0 07f494440405 000000000000 000000000000
161 0 0 07f494440405 000000000000 000000000000
162 1 1 8cccb4b5fec2 07f494440405 000000000000
162 1 1 8cccb4b5fec2 07f494440405 000000000000
163 2 2 b1e228c512c5 8cccb4b5fec2 000000000000
163 2 2 b1e228c512c5 8cccb4b5fec2 000000000000
164 $ hg debugindex -c --debug
164 $ hg debugindex -c --debug
165 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
165 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
166 0 -1 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 57 0 0 2 0 58 inline 0 0
166 0 -1 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 57 0 0 2 0 58 inline 0 0
167 1 -1 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 66 1 0 2 58 67 inline 0 0
167 1 -1 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a 0 07f4944404050f47db2e5c5071e0e84e7a27bba9 -1 0000000000000000000000000000000000000000 66 1 0 2 58 67 inline 0 0
168 2 -1 2 b1e228c512c5d7066d70562ed839c3323a62d6d2 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a -1 0000000000000000000000000000000000000000 65 2 0 2 125 66 inline 0 0
168 2 -1 2 b1e228c512c5d7066d70562ed839c3323a62d6d2 1 8cccb4b5fec20cafeb99dd01c26d4dee8ea4388a -1 0000000000000000000000000000000000000000 65 2 0 2 125 66 inline 0 0
169 $ hg debugindex -m
169 $ hg debugindex -m
170 rev linkrev nodeid p1-nodeid p2-nodeid
170 rev linkrev nodeid p1-nodeid p2-nodeid
171 0 0 a0c8bcbbb45c 000000000000 000000000000
171 0 0 a0c8bcbbb45c 000000000000 000000000000
172 1 1 57faf8a737ae a0c8bcbbb45c 000000000000
172 1 1 57faf8a737ae a0c8bcbbb45c 000000000000
173 2 2 a35b10320954 57faf8a737ae 000000000000
173 2 2 a35b10320954 57faf8a737ae 000000000000
174 $ hg debugindex -m --debug
174 $ hg debugindex -m --debug
175 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
175 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
176 0 -1 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 43 0 0 2 0 44 inline 0 0
176 0 -1 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 43 0 0 2 0 44 inline 0 0
177 1 -1 1 57faf8a737ae7faf490582941a82319ba6529dca 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 0 1 0 2 44 0 inline 0 0
177 1 -1 1 57faf8a737ae7faf490582941a82319ba6529dca 0 a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 -1 0000000000000000000000000000000000000000 0 1 0 2 44 0 inline 0 0
178 2 -1 2 a35b103209548032201c16c7688cb2657f037a38 1 57faf8a737ae7faf490582941a82319ba6529dca -1 0000000000000000000000000000000000000000 43 2 0 2 44 44 inline 0 0
178 2 -1 2 a35b103209548032201c16c7688cb2657f037a38 1 57faf8a737ae7faf490582941a82319ba6529dca -1 0000000000000000000000000000000000000000 43 2 0 2 44 44 inline 0 0
179 $ hg debugindex a
179 $ hg debugindex a
180 rev linkrev nodeid p1-nodeid p2-nodeid
180 rev linkrev nodeid p1-nodeid p2-nodeid
181 0 0 b789fdd96dc2 000000000000 000000000000
181 0 0 b789fdd96dc2 000000000000 000000000000
182 $ hg debugindex --debug a
182 $ hg debugindex --debug a
183 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
183 rev rank linkrev nodeid p1-rev p1-nodeid p2-rev p2-nodeid full-size delta-base flags comp-mode data-offset chunk-size sd-comp-mode sidedata-offset sd-chunk-size
184 0 -1 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 2 0 0 2 0 3 inline 0 0
184 0 -1 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 2 0 0 2 0 3 inline 0 0
185
185
186 debugdelta chain basic output
186 debugdelta chain basic output
187
187
188 #if reporevlogstore pure
188 #if reporevlogstore pure
189 $ hg debugindexstats
189 $ hg debugindexstats
190 abort: debugindexstats only works with native code
190 abort: debugindexstats only works with native code
191 [255]
191 [255]
192 #endif
192 #endif
193 #if reporevlogstore no-pure
193 #if reporevlogstore no-pure
194 $ hg debugindexstats
194 $ hg debugindexstats
195 node trie capacity: 4
195 node trie capacity: 4
196 node trie count: 2
196 node trie count: 2
197 node trie depth: 1
197 node trie depth: 1
198 node trie last rev scanned: -1 (no-rust !)
198 node trie last rev scanned: -1 (no-rust !)
199 node trie last rev scanned: 3 (rust !)
199 node trie last rev scanned: 3 (rust !)
200 node trie lookups: 4 (no-rust !)
200 node trie lookups: 4 (no-rust !)
201 node trie lookups: 2 (rust !)
201 node trie lookups: 2 (rust !)
202 node trie misses: 1
202 node trie misses: 1
203 node trie splits: 1
203 node trie splits: 1
204 revs in memory: 3
204 revs in memory: 3
205 #endif
205 #endif
206
206
207 #if reporevlogstore no-pure
207 #if reporevlogstore no-pure
208 $ hg debugdeltachain -m --all-info
208 $ hg debugdeltachain -m --all-info
209 rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
209 rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
210 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
210 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
211 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
211 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
212 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
212 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
213
213
214 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
214 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
215 0 1 1
215 0 1 1
216 1 2 1
216 1 2 1
217 2 3 1
217 2 3 1
218
218
219 $ hg debugdeltachain -m -Tjson --size-info
219 $ hg debugdeltachain -m -Tjson --size-info
220 [
220 [
221 {
221 {
222 "chainid": 1,
222 "chainid": 1,
223 "chainlen": 1,
223 "chainlen": 1,
224 "chainratio": 1.0232558139534884,
224 "chainratio": 1.0232558139534884,
225 "chainsize": 44,
225 "chainsize": 44,
226 "compsize": 44,
226 "compsize": 44,
227 "deltatype": "base",
227 "deltatype": "base",
228 "p1": -1,
228 "p1": -1,
229 "p2": -1,
229 "p2": -1,
230 "prevrev": -1,
230 "prevrev": -1,
231 "rev": 0,
231 "rev": 0,
232 "uncompsize": 43
232 "uncompsize": 43
233 },
233 },
234 {
234 {
235 "chainid": 2,
235 "chainid": 2,
236 "chainlen": 1,
236 "chainlen": 1,
237 "chainratio": 0,
237 "chainratio": 0,
238 "chainsize": 0,
238 "chainsize": 0,
239 "compsize": 0,
239 "compsize": 0,
240 "deltatype": "base",
240 "deltatype": "base",
241 "p1": 0,
241 "p1": 0,
242 "p2": -1,
242 "p2": -1,
243 "prevrev": -1,
243 "prevrev": -1,
244 "rev": 1,
244 "rev": 1,
245 "uncompsize": 0
245 "uncompsize": 0
246 },
246 },
247 {
247 {
248 "chainid": 3,
248 "chainid": 3,
249 "chainlen": 1,
249 "chainlen": 1,
250 "chainratio": 1.0232558139534884,
250 "chainratio": 1.0232558139534884,
251 "chainsize": 44,
251 "chainsize": 44,
252 "compsize": 44,
252 "compsize": 44,
253 "deltatype": "base",
253 "deltatype": "base",
254 "p1": 1,
254 "p1": 1,
255 "p2": -1,
255 "p2": -1,
256 "prevrev": -1,
256 "prevrev": -1,
257 "rev": 2,
257 "rev": 2,
258 "uncompsize": 43
258 "uncompsize": 43
259 }
259 }
260 ]
260 ]
261
261
262 $ hg debugdeltachain -m -Tjson --all-info
262 $ hg debugdeltachain -m -Tjson --all-info
263 [
263 [
264 {
264 {
265 "chainid": 1,
265 "chainid": 1,
266 "chainlen": 1,
266 "chainlen": 1,
267 "chainratio": 1.0232558139534884,
267 "chainratio": 1.0232558139534884,
268 "chainsize": 44,
268 "chainsize": 44,
269 "compsize": 44,
269 "compsize": 44,
270 "deltatype": "base",
270 "deltatype": "base",
271 "extradist": 0,
271 "extradist": 0,
272 "extraratio": 0.0,
272 "extraratio": 0.0,
273 "largestblock": 44,
273 "largestblock": 44,
274 "lindist": 44,
274 "lindist": 44,
275 "p1": -1,
275 "p1": -1,
276 "p2": -1,
276 "p2": -1,
277 "prevrev": -1,
277 "prevrev": -1,
278 "readdensity": 1.0,
278 "readdensity": 1.0,
279 "readsize": 44,
279 "readsize": 44,
280 "rev": 0,
280 "rev": 0,
281 "srchunks": 1,
281 "srchunks": 1,
282 "uncompsize": 43
282 "uncompsize": 43
283 },
283 },
284 {
284 {
285 "chainid": 2,
285 "chainid": 2,
286 "chainlen": 1,
286 "chainlen": 1,
287 "chainratio": 0,
287 "chainratio": 0,
288 "chainsize": 0,
288 "chainsize": 0,
289 "compsize": 0,
289 "compsize": 0,
290 "deltatype": "base",
290 "deltatype": "base",
291 "extradist": 0,
291 "extradist": 0,
292 "extraratio": 0,
292 "extraratio": 0,
293 "largestblock": 0,
293 "largestblock": 0,
294 "lindist": 0,
294 "lindist": 0,
295 "p1": 0,
295 "p1": 0,
296 "p2": -1,
296 "p2": -1,
297 "prevrev": -1,
297 "prevrev": -1,
298 "readdensity": 1,
298 "readdensity": 1,
299 "readsize": 0,
299 "readsize": 0,
300 "rev": 1,
300 "rev": 1,
301 "srchunks": 1,
301 "srchunks": 1,
302 "uncompsize": 0
302 "uncompsize": 0
303 },
303 },
304 {
304 {
305 "chainid": 3,
305 "chainid": 3,
306 "chainlen": 1,
306 "chainlen": 1,
307 "chainratio": 1.0232558139534884,
307 "chainratio": 1.0232558139534884,
308 "chainsize": 44,
308 "chainsize": 44,
309 "compsize": 44,
309 "compsize": 44,
310 "deltatype": "base",
310 "deltatype": "base",
311 "extradist": 0,
311 "extradist": 0,
312 "extraratio": 0.0,
312 "extraratio": 0.0,
313 "largestblock": 44,
313 "largestblock": 44,
314 "lindist": 44,
314 "lindist": 44,
315 "p1": 1,
315 "p1": 1,
316 "p2": -1,
316 "p2": -1,
317 "prevrev": -1,
317 "prevrev": -1,
318 "readdensity": 1.0,
318 "readdensity": 1.0,
319 "readsize": 44,
319 "readsize": 44,
320 "rev": 2,
320 "rev": 2,
321 "srchunks": 1,
321 "srchunks": 1,
322 "uncompsize": 43
322 "uncompsize": 43
323 }
323 }
324 ]
324 ]
325
325
326 debugdelta chain with sparse read enabled
326 debugdelta chain with sparse read enabled
327
327
328 $ cat >> $HGRCPATH <<EOF
328 $ cat >> $HGRCPATH <<EOF
329 > [experimental]
329 > [experimental]
330 > sparse-read = True
330 > sparse-read = True
331 > EOF
331 > EOF
332 $ hg debugdeltachain -m --all-info
332 $ hg debugdeltachain -m --all-info
333 rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
333 rev p1 p2 chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
334 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
334 0 -1 -1 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
335 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
335 1 0 -1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
336 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
336 2 1 -1 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
337
337
338 $ hg debugdeltachain -m --sparse-info -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
338 $ hg debugdeltachain -m --sparse-info -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
339 0 1 1 44 44 1.0
339 0 1 1 44 44 1.0
340 1 2 1 0 0 1
340 1 2 1 0 0 1
341 2 3 1 44 44 1.0
341 2 3 1 44 44 1.0
342
342
343 $ hg debugdeltachain -m -Tjson --sparse-info
343 $ hg debugdeltachain -m -Tjson --sparse-info
344 [
344 [
345 {
345 {
346 "chainid": 1,
346 "chainid": 1,
347 "chainlen": 1,
347 "chainlen": 1,
348 "deltatype": "base",
348 "deltatype": "base",
349 "largestblock": 44,
349 "largestblock": 44,
350 "p1": -1,
350 "p1": -1,
351 "p2": -1,
351 "p2": -1,
352 "prevrev": -1,
352 "prevrev": -1,
353 "readdensity": 1.0,
353 "readdensity": 1.0,
354 "readsize": 44,
354 "readsize": 44,
355 "rev": 0,
355 "rev": 0,
356 "srchunks": 1
356 "srchunks": 1
357 },
357 },
358 {
358 {
359 "chainid": 2,
359 "chainid": 2,
360 "chainlen": 1,
360 "chainlen": 1,
361 "deltatype": "base",
361 "deltatype": "base",
362 "largestblock": 0,
362 "largestblock": 0,
363 "p1": 0,
363 "p1": 0,
364 "p2": -1,
364 "p2": -1,
365 "prevrev": -1,
365 "prevrev": -1,
366 "readdensity": 1,
366 "readdensity": 1,
367 "readsize": 0,
367 "readsize": 0,
368 "rev": 1,
368 "rev": 1,
369 "srchunks": 1
369 "srchunks": 1
370 },
370 },
371 {
371 {
372 "chainid": 3,
372 "chainid": 3,
373 "chainlen": 1,
373 "chainlen": 1,
374 "deltatype": "base",
374 "deltatype": "base",
375 "largestblock": 44,
375 "largestblock": 44,
376 "p1": 1,
376 "p1": 1,
377 "p2": -1,
377 "p2": -1,
378 "prevrev": -1,
378 "prevrev": -1,
379 "readdensity": 1.0,
379 "readdensity": 1.0,
380 "readsize": 44,
380 "readsize": 44,
381 "rev": 2,
381 "rev": 2,
382 "srchunks": 1
382 "srchunks": 1
383 }
383 }
384 ]
384 ]
385
385
386 $ hg debugdeltachain -m -Tjson --all-info
386 $ hg debugdeltachain -m -Tjson --all-info
387 [
387 [
388 {
388 {
389 "chainid": 1,
389 "chainid": 1,
390 "chainlen": 1,
390 "chainlen": 1,
391 "chainratio": 1.0232558139534884,
391 "chainratio": 1.0232558139534884,
392 "chainsize": 44,
392 "chainsize": 44,
393 "compsize": 44,
393 "compsize": 44,
394 "deltatype": "base",
394 "deltatype": "base",
395 "extradist": 0,
395 "extradist": 0,
396 "extraratio": 0.0,
396 "extraratio": 0.0,
397 "largestblock": 44,
397 "largestblock": 44,
398 "lindist": 44,
398 "lindist": 44,
399 "p1": -1,
399 "p1": -1,
400 "p2": -1,
400 "p2": -1,
401 "prevrev": -1,
401 "prevrev": -1,
402 "readdensity": 1.0,
402 "readdensity": 1.0,
403 "readsize": 44,
403 "readsize": 44,
404 "rev": 0,
404 "rev": 0,
405 "srchunks": 1,
405 "srchunks": 1,
406 "uncompsize": 43
406 "uncompsize": 43
407 },
407 },
408 {
408 {
409 "chainid": 2,
409 "chainid": 2,
410 "chainlen": 1,
410 "chainlen": 1,
411 "chainratio": 0,
411 "chainratio": 0,
412 "chainsize": 0,
412 "chainsize": 0,
413 "compsize": 0,
413 "compsize": 0,
414 "deltatype": "base",
414 "deltatype": "base",
415 "extradist": 0,
415 "extradist": 0,
416 "extraratio": 0,
416 "extraratio": 0,
417 "largestblock": 0,
417 "largestblock": 0,
418 "lindist": 0,
418 "lindist": 0,
419 "p1": 0,
419 "p1": 0,
420 "p2": -1,
420 "p2": -1,
421 "prevrev": -1,
421 "prevrev": -1,
422 "readdensity": 1,
422 "readdensity": 1,
423 "readsize": 0,
423 "readsize": 0,
424 "rev": 1,
424 "rev": 1,
425 "srchunks": 1,
425 "srchunks": 1,
426 "uncompsize": 0
426 "uncompsize": 0
427 },
427 },
428 {
428 {
429 "chainid": 3,
429 "chainid": 3,
430 "chainlen": 1,
430 "chainlen": 1,
431 "chainratio": 1.0232558139534884,
431 "chainratio": 1.0232558139534884,
432 "chainsize": 44,
432 "chainsize": 44,
433 "compsize": 44,
433 "compsize": 44,
434 "deltatype": "base",
434 "deltatype": "base",
435 "extradist": 0,
435 "extradist": 0,
436 "extraratio": 0.0,
436 "extraratio": 0.0,
437 "largestblock": 44,
437 "largestblock": 44,
438 "lindist": 44,
438 "lindist": 44,
439 "p1": 1,
439 "p1": 1,
440 "p2": -1,
440 "p2": -1,
441 "prevrev": -1,
441 "prevrev": -1,
442 "readdensity": 1.0,
442 "readdensity": 1.0,
443 "readsize": 44,
443 "readsize": 44,
444 "rev": 2,
444 "rev": 2,
445 "srchunks": 1,
445 "srchunks": 1,
446 "uncompsize": 43
446 "uncompsize": 43
447 }
447 }
448 ]
448 ]
449
449
450 $ printf "This test checks things.\n" >> a
450 $ printf "This test checks things.\n" >> a
451 $ hg ci -m a
451 $ hg ci -m a
452 $ hg branch other
452 $ hg branch other
453 marked working directory as branch other
453 marked working directory as branch other
454 (branches are permanent and global, did you want a bookmark?)
454 (branches are permanent and global, did you want a bookmark?)
455 $ for i in `$TESTDIR/seq.py 5`; do
455 $ for i in `$TESTDIR/seq.py 5`; do
456 > printf "shorter ${i}" >> a
456 > printf "shorter ${i}" >> a
457 > hg ci -m "a other:$i"
457 > hg ci -m "a other:$i"
458 > hg up -q default
458 > hg up -q default
459 > printf "for the branch default we want longer chains: ${i}" >> a
459 > printf "for the branch default we want longer chains: ${i}" >> a
460 > hg ci -m "a default:$i"
460 > hg ci -m "a default:$i"
461 > hg up -q other
461 > hg up -q other
462 > done
462 > done
463 $ hg debugdeltachain a -T '{rev} {srchunks}\n' --all-info\
463 $ hg debugdeltachain a -T '{rev} {srchunks}\n' --all-info\
464 > --config experimental.sparse-read.density-threshold=0.50 \
464 > --config experimental.sparse-read.density-threshold=0.50 \
465 > --config experimental.sparse-read.min-gap-size=0
465 > --config experimental.sparse-read.min-gap-size=0
466 0 1
466 0 1
467 1 1
467 1 1
468 2 1
468 2 1
469 3 1
469 3 1
470 4 1
470 4 1
471 5 1
471 5 1
472 6 1
472 6 1
473 7 1
473 7 1
474 8 1
474 8 1
475 9 1
475 9 1
476 10 2 (no-zstd !)
476 10 2 (no-zstd !)
477 10 1 (zstd !)
477 10 1 (zstd !)
478 11 1
478 11 1
479 $ hg --config extensions.strip= strip --no-backup -r 1
479 $ hg --config extensions.strip= strip --no-backup -r 1
480 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
480 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
481
481
482 Test max chain len
482 Test max chain len
483 $ cat >> $HGRCPATH << EOF
483 $ cat >> $HGRCPATH << EOF
484 > [format]
484 > [format]
485 > maxchainlen=4
485 > maxchainlen=4
486 > EOF
486 > EOF
487
487
488 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
488 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
489 $ hg ci -m a
489 $ hg ci -m a
490 $ printf "b\n" >> a
490 $ printf "b\n" >> a
491 $ hg ci -m a
491 $ hg ci -m a
492 $ printf "c\n" >> a
492 $ printf "c\n" >> a
493 $ hg ci -m a
493 $ hg ci -m a
494 $ printf "d\n" >> a
494 $ printf "d\n" >> a
495 $ hg ci -m a
495 $ hg ci -m a
496 $ printf "e\n" >> a
496 $ printf "e\n" >> a
497 $ hg ci -m a
497 $ hg ci -m a
498 $ printf "f\n" >> a
498 $ printf "f\n" >> a
499 $ hg ci -m a
499 $ hg ci -m a
500 $ printf 'g\n' >> a
500 $ printf 'g\n' >> a
501 $ hg ci -m a
501 $ hg ci -m a
502 $ printf 'h\n' >> a
502 $ printf 'h\n' >> a
503 $ hg ci -m a
503 $ hg ci -m a
504
504
505 $ hg debugrevlog -d a
505 $ hg debugrevlog -d a
506 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
506 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
507 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
507 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
508 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
508 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
509 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
509 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
510 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
510 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
511 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
511 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
512 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
512 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
513 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
513 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
514 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
514 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
515 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
515 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
516 #endif
516 #endif
517
517
518 Test debuglocks command:
518 Test debuglocks command:
519
519
520 $ hg debuglocks
520 $ hg debuglocks
521 lock: free
521 lock: free
522 wlock: free
522 wlock: free
523
523
524 * Test setting the lock
524 * Test setting the lock
525
525
526 waitlock <file> will wait for file to be created. If it isn't in a reasonable
526 waitlock <file> will wait for file to be created. If it isn't in a reasonable
527 amount of time, displays error message and returns 1
527 amount of time, displays error message and returns 1
528 $ waitlock() {
528 $ waitlock() {
529 > start=`date +%s`
529 > start=`date +%s`
530 > timeout=5
530 > timeout=5
531 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
531 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
532 > now=`date +%s`
532 > now=`date +%s`
533 > if [ "`expr $now - $start`" -gt $timeout ]; then
533 > if [ "`expr $now - $start`" -gt $timeout ]; then
534 > echo "timeout: $1 was not created in $timeout seconds"
534 > echo "timeout: $1 was not created in $timeout seconds"
535 > return 1
535 > return 1
536 > fi
536 > fi
537 > sleep 0.1
537 > sleep 0.1
538 > done
538 > done
539 > }
539 > }
540 $ dolock() {
540 $ dolock() {
541 > {
541 > {
542 > waitlock .hg/unlock
542 > waitlock .hg/unlock
543 > rm -f .hg/unlock
543 > rm -f .hg/unlock
544 > echo y
544 > echo y
545 > } | hg debuglocks "$@" > /dev/null
545 > } | hg debuglocks "$@" > /dev/null
546 > }
546 > }
547 $ dolock -s &
547 $ dolock -s &
548 $ waitlock .hg/store/lock
548 $ waitlock .hg/store/lock
549
549
550 $ hg debuglocks
550 $ hg debuglocks
551 lock: user *, process * (*s) (glob)
551 lock: user *, process * (*s) (glob)
552 wlock: free
552 wlock: free
553 [1]
553 [1]
554 $ touch .hg/unlock
554 $ touch .hg/unlock
555 $ wait
555 $ wait
556 $ [ -f .hg/store/lock ] || echo "There is no lock"
556 $ [ -f .hg/store/lock ] || echo "There is no lock"
557 There is no lock
557 There is no lock
558
558
559 * Test setting the wlock
559 * Test setting the wlock
560
560
561 $ dolock -S &
561 $ dolock -S &
562 $ waitlock .hg/wlock
562 $ waitlock .hg/wlock
563
563
564 $ hg debuglocks
564 $ hg debuglocks
565 lock: free
565 lock: free
566 wlock: user *, process * (*s) (glob)
566 wlock: user *, process * (*s) (glob)
567 [1]
567 [1]
568 $ touch .hg/unlock
568 $ touch .hg/unlock
569 $ wait
569 $ wait
570 $ [ -f .hg/wlock ] || echo "There is no wlock"
570 $ [ -f .hg/wlock ] || echo "There is no wlock"
571 There is no wlock
571 There is no wlock
572
572
573 * Test setting both locks
573 * Test setting both locks
574
574
575 $ dolock -Ss &
575 $ dolock -Ss &
576 $ waitlock .hg/wlock && waitlock .hg/store/lock
576 $ waitlock .hg/wlock && waitlock .hg/store/lock
577
577
578 $ hg debuglocks
578 $ hg debuglocks
579 lock: user *, process * (*s) (glob)
579 lock: user *, process * (*s) (glob)
580 wlock: user *, process * (*s) (glob)
580 wlock: user *, process * (*s) (glob)
581 [2]
581 [2]
582
582
583 * Test failing to set a lock
583 * Test failing to set a lock
584
584
585 $ hg debuglocks -s
585 $ hg debuglocks -s
586 abort: lock is already held
586 abort: lock is already held
587 [255]
587 [255]
588
588
589 $ hg debuglocks -S
589 $ hg debuglocks -S
590 abort: wlock is already held
590 abort: wlock is already held
591 [255]
591 [255]
592
592
593 $ touch .hg/unlock
593 $ touch .hg/unlock
594 $ wait
594 $ wait
595
595
596 $ hg debuglocks
596 $ hg debuglocks
597 lock: free
597 lock: free
598 wlock: free
598 wlock: free
599
599
600 * Test forcing the lock
600 * Test forcing the lock
601
601
602 $ dolock -s &
602 $ dolock -s &
603 $ waitlock .hg/store/lock
603 $ waitlock .hg/store/lock
604
604
605 $ hg debuglocks
605 $ hg debuglocks
606 lock: user *, process * (*s) (glob)
606 lock: user *, process * (*s) (glob)
607 wlock: free
607 wlock: free
608 [1]
608 [1]
609
609
610 $ hg debuglocks -L
610 $ hg debuglocks -L
611
611
612 $ hg debuglocks
612 $ hg debuglocks
613 lock: free
613 lock: free
614 wlock: free
614 wlock: free
615
615
616 $ touch .hg/unlock
616 $ touch .hg/unlock
617 $ wait
617 $ wait
618
618
619 * Test forcing the wlock
619 * Test forcing the wlock
620
620
621 $ dolock -S &
621 $ dolock -S &
622 $ waitlock .hg/wlock
622 $ waitlock .hg/wlock
623
623
624 $ hg debuglocks
624 $ hg debuglocks
625 lock: free
625 lock: free
626 wlock: user *, process * (*s) (glob)
626 wlock: user *, process * (*s) (glob)
627 [1]
627 [1]
628
628
629 $ hg debuglocks -W
629 $ hg debuglocks -W
630
630
631 $ hg debuglocks
631 $ hg debuglocks
632 lock: free
632 lock: free
633 wlock: free
633 wlock: free
634
634
635 $ touch .hg/unlock
635 $ touch .hg/unlock
636 $ wait
636 $ wait
637
637
638 Test WdirUnsupported exception
638 Test WdirUnsupported exception
639
639
640 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
640 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
641 abort: working directory revision cannot be specified
641 abort: working directory revision cannot be specified
642 [255]
642 [255]
643
643
644 Test cache warming command
644 Test cache warming command
645
645
646 $ rm -rf .hg/cache/
646 $ rm -rf .hg/cache/
647 $ hg debugupdatecaches --debug
647 $ hg debugupdatecaches --debug
648 updating the branch cache
648 updating the branch cache
649 $ ls -r .hg/cache/*
649 $ ls -r .hg/cache/*
650 .hg/cache/tags2-served
650 .hg/cache/tags2-served
651 .hg/cache/tags2
651 .hg/cache/tags2
652 .hg/cache/rbc-revs-v1
652 .hg/cache/rbc-revs-v1
653 .hg/cache/rbc-names-v1
653 .hg/cache/rbc-names-v1
654 .hg/cache/hgtagsfnodes1
654 .hg/cache/hgtagsfnodes1
655 .hg/cache/branch2-visible-hidden
655 .hg/cache/branch2-visible-hidden
656 .hg/cache/branch2-visible
656 .hg/cache/branch2-visible
657 .hg/cache/branch2-served.hidden
657 .hg/cache/branch2-served.hidden
658 .hg/cache/branch2-served
658 .hg/cache/branch2-served
659 .hg/cache/branch2-immutable
659 .hg/cache/branch2-immutable
660 .hg/cache/branch2-base
660 .hg/cache/branch2-base
661
661
662 Test debugcolor
662 Test debugcolor
663
663
664 #if no-windows
664 #if no-windows
665 $ hg debugcolor --style --color always | grep -E 'mode|style|log\.'
665 $ hg debugcolor --style --color always | grep -E 'mode|style|log\.'
666 color mode: 'ansi'
666 color mode: 'ansi'
667 available style:
667 available style:
668 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
668 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
669 #endif
669 #endif
670
670
671 $ hg debugcolor --style --color never
671 $ hg debugcolor --style --color never
672 color mode: None
672 color mode: None
673 available style:
673 available style:
674
674
675 $ cd ..
675 $ cd ..
676
676
677 Test internal debugstacktrace command
677 Test internal debugstacktrace command
678
678
679 $ cat > debugstacktrace.py << EOF
679 $ cat > debugstacktrace.py << EOF
680 > from mercurial import (
680 > from mercurial import (
681 > util,
681 > util,
682 > )
682 > )
683 > from mercurial.utils import (
683 > from mercurial.utils import (
684 > procutil,
684 > procutil,
685 > )
685 > )
686 > def f():
686 > def f():
687 > util.debugstacktrace(f=procutil.stdout)
687 > util.debugstacktrace(f=procutil.stdout)
688 > g()
688 > g()
689 > def g():
689 > def g():
690 > util.dst(b'hello from g\\n', skip=1)
690 > util.dst(b'hello from g\\n', skip=1)
691 > h()
691 > h()
692 > def h():
692 > def h():
693 > util.dst(b'hi ...\\nfrom h hidden in g', 1, depth=2)
693 > util.dst(b'hi ...\\nfrom h hidden in g', 1, depth=2)
694 > f()
694 > f()
695 > EOF
695 > EOF
696 $ "$PYTHON" debugstacktrace.py
696 $ "$PYTHON" debugstacktrace.py
697 stacktrace at:
697 stacktrace at:
698 *debugstacktrace.py:15 in * (glob)
698 *debugstacktrace.py:15 in * (glob)
699 *debugstacktrace.py:8 in f (glob)
699 *debugstacktrace.py:8 in f (glob)
700 hello from g at:
700 hello from g at:
701 *debugstacktrace.py:15 in * (glob)
701 *debugstacktrace.py:15 in * (glob)
702 *debugstacktrace.py:9 in f (glob)
702 *debugstacktrace.py:9 in f (glob)
703 hi ...
703 hi ...
704 from h hidden in g at:
704 from h hidden in g at:
705 *debugstacktrace.py:9 in f (glob)
705 *debugstacktrace.py:9 in f (glob)
706 *debugstacktrace.py:12 in g (glob)
706 *debugstacktrace.py:12 in g (glob)
707
707
708 Test debugcapabilities command:
708 Test debugcapabilities command:
709
709
710 $ hg debugcapabilities ./debugrevlog/
710 $ hg debugcapabilities ./debugrevlog/
711 Main capabilities:
711 Main capabilities:
712 branchmap
712 branchmap
713 $USUAL_BUNDLE2_CAPS$
713 $USUAL_BUNDLE2_CAPS$
714 getbundle
714 getbundle
715 known
715 known
716 lookup
716 lookup
717 pushkey
717 pushkey
718 unbundle
718 unbundle
719 Bundle2 capabilities:
719 Bundle2 capabilities:
720 HG20
720 HG20
721 bookmarks
721 bookmarks
722 changegroup
722 changegroup
723 01
723 01
724 02
724 02
725 03
725 03
726 checkheads
726 checkheads
727 related
727 related
728 digests
728 digests
729 md5
729 md5
730 sha1
730 sha1
731 sha512
731 sha512
732 error
732 error
733 abort
733 abort
734 unsupportedcontent
734 unsupportedcontent
735 pushraced
735 pushraced
736 pushkey
736 pushkey
737 hgtagsfnodes
737 hgtagsfnodes
738 listkeys
738 listkeys
739 phases
739 phases
740 heads
740 heads
741 pushkey
741 pushkey
742 remote-changegroup
742 remote-changegroup
743 http
743 http
744 https
744 https
745 stream
745 stream
746 v2
746 v2
747
747
748 Test debugpeer
748 Test debugpeer
749
749
750 $ hg debugpeer ssh://user@dummy/debugrevlog
750 $ hg debugpeer ssh://user@dummy/debugrevlog
751 url: ssh://user@dummy/debugrevlog
751 url: ssh://user@dummy/debugrevlog
752 local: no
752 local: no
753 pushable: yes
753 pushable: yes
754
754
755 #if rust
755 #if rust
756
756
757 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
757 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
758 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
758 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
759 devel-peer-request: hello+between
759 devel-peer-request: hello+between
760 devel-peer-request: pairs: 81 bytes
760 devel-peer-request: pairs: 81 bytes
761 sending hello command
761 sending hello command
762 sending between command
762 sending between command
763 remote: 473
763 remote: 473
764 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
764 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
765 remote: 1
765 remote: 1
766 devel-peer-request: protocaps
766 devel-peer-request: protocaps
767 devel-peer-request: caps: * bytes (glob)
767 devel-peer-request: caps: * bytes (glob)
768 sending protocaps command
768 sending protocaps command
769 url: ssh://user@dummy/debugrevlog
769 url: ssh://user@dummy/debugrevlog
770 local: no
770 local: no
771 pushable: yes
771 pushable: yes
772
772
773 #endif
773 #endif
774
774
775 #if no-rust zstd
775 #if no-rust zstd
776
776
777 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
777 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
778 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
778 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
779 devel-peer-request: hello+between
779 devel-peer-request: hello+between
780 devel-peer-request: pairs: 81 bytes
780 devel-peer-request: pairs: 81 bytes
781 sending hello command
781 sending hello command
782 sending between command
782 sending between command
783 remote: 473
783 remote: 473
784 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
784 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
785 remote: 1
785 remote: 1
786 devel-peer-request: protocaps
786 devel-peer-request: protocaps
787 devel-peer-request: caps: * bytes (glob)
787 devel-peer-request: caps: * bytes (glob)
788 sending protocaps command
788 sending protocaps command
789 url: ssh://user@dummy/debugrevlog
789 url: ssh://user@dummy/debugrevlog
790 local: no
790 local: no
791 pushable: yes
791 pushable: yes
792
792
793 #endif
793 #endif
794
794
795 #if no-rust no-zstd
795 #if no-rust no-zstd
796
796
797 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
797 $ hg --debug debugpeer ssh://user@dummy/debugrevlog
798 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
798 running .* ".*[/\\]dummyssh" ['"]user@dummy['"] ['"]hg -R debugrevlog serve --stdio['"] (re)
799 devel-peer-request: hello+between
799 devel-peer-request: hello+between
800 devel-peer-request: pairs: 81 bytes
800 devel-peer-request: pairs: 81 bytes
801 sending hello command
801 sending hello command
802 sending between command
802 sending between command
803 remote: 449
803 remote: 449
804 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
804 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
805 remote: 1
805 remote: 1
806 devel-peer-request: protocaps
806 devel-peer-request: protocaps
807 devel-peer-request: caps: * bytes (glob)
807 devel-peer-request: caps: * bytes (glob)
808 sending protocaps command
808 sending protocaps command
809 url: ssh://user@dummy/debugrevlog
809 url: ssh://user@dummy/debugrevlog
810 local: no
810 local: no
811 pushable: yes
811 pushable: yes
812
812
813 #endif
813 #endif
814
814
815 Test debugshell
815 Test debugshell
816
816
817 $ hg debugshell -c 'ui.write(b"%s\n" % ui.username())'
817 $ hg debugshell -c 'ui.write(b"%s\n" % ui.username())'
818 test
818 test
@@ -1,121 +1,122 b''
1 Testing cloning with the EOL extension
1 Testing cloning with the EOL extension
2
2
3 $ cat >> $HGRCPATH <<EOF
3 $ cat >> $HGRCPATH <<EOF
4 > [extensions]
4 > [extensions]
5 > eol =
5 > eol =
6 >
6 >
7 > [eol]
7 > [eol]
8 > native = CRLF
8 > native = CRLF
9 > EOF
9 > EOF
10
10
11 setup repository
11 setup repository
12
12
13 $ hg init repo
13 $ hg init repo
14 $ cd repo
14 $ cd repo
15 $ cat > .hgeol <<EOF
15 $ cat > .hgeol <<EOF
16 > [patterns]
16 > [patterns]
17 > **.txt = native
17 > **.txt = native
18 > EOF
18 > EOF
19 $ printf "first\r\nsecond\r\nthird\r\n" > a.txt
19 $ printf "first\r\nsecond\r\nthird\r\n" > a.txt
20 $ hg commit --addremove -m 'checkin'
20 $ hg commit --addremove -m 'checkin'
21 adding .hgeol
21 adding .hgeol
22 adding a.txt
22 adding a.txt
23
23
24 Test commit of removed .hgeol and how it immediately makes the automatic
24 Test commit of removed .hgeol and how it immediately makes the automatic
25 changes explicit and committable.
25 changes explicit and committable.
26
26
27 $ cd ..
27 $ cd ..
28 $ hg clone repo repo-2
28 $ hg clone repo repo-2
29 updating to branch default
29 updating to branch default
30 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 $ cd repo-2
31 $ cd repo-2
32 $ cat a.txt
32 $ cat a.txt
33 first\r (esc)
33 first\r (esc)
34 second\r (esc)
34 second\r (esc)
35 third\r (esc)
35 third\r (esc)
36 $ hg cat a.txt
36 $ hg cat a.txt
37 first
37 first
38 second
38 second
39 third
39 third
40 $ hg remove .hgeol
40 $ hg remove .hgeol
41 $ touch a.txt * # ensure consistent st dirtyness checks, ignoring dirstate timing
41 $ touch a.txt * # ensure consistent st dirtyness checks, ignoring dirstate timing
42 $ hg st -v --debug
42 $ hg st -v --debug
43 M a.txt
43 M a.txt
44 R .hgeol
44 R .hgeol
45 $ hg commit -m 'remove eol'
45 $ hg commit -m 'remove eol'
46 $ hg exp
46 $ hg exp
47 # HG changeset patch
47 # HG changeset patch
48 # User test
48 # User test
49 # Date 0 0
49 # Date 0 0
50 # Thu Jan 01 00:00:00 1970 +0000
50 # Thu Jan 01 00:00:00 1970 +0000
51 # Node ID 3c20c2d90333b6ecdc8f7aa8f9b73223c7c7a608
51 # Node ID 3c20c2d90333b6ecdc8f7aa8f9b73223c7c7a608
52 # Parent 90f94e2cf4e24628afddd641688dfe4cd476d6e4
52 # Parent 90f94e2cf4e24628afddd641688dfe4cd476d6e4
53 remove eol
53 remove eol
54
54
55 diff -r 90f94e2cf4e2 -r 3c20c2d90333 .hgeol
55 diff -r 90f94e2cf4e2 -r 3c20c2d90333 .hgeol
56 --- a/.hgeol Thu Jan 01 00:00:00 1970 +0000
56 --- a/.hgeol Thu Jan 01 00:00:00 1970 +0000
57 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
57 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
58 @@ -1,2 +0,0 @@
58 @@ -1,2 +0,0 @@
59 -[patterns]
59 -[patterns]
60 -**.txt = native
60 -**.txt = native
61 diff -r 90f94e2cf4e2 -r 3c20c2d90333 a.txt
61 diff -r 90f94e2cf4e2 -r 3c20c2d90333 a.txt
62 --- a/a.txt Thu Jan 01 00:00:00 1970 +0000
62 --- a/a.txt Thu Jan 01 00:00:00 1970 +0000
63 +++ b/a.txt Thu Jan 01 00:00:00 1970 +0000
63 +++ b/a.txt Thu Jan 01 00:00:00 1970 +0000
64 @@ -1,3 +1,3 @@
64 @@ -1,3 +1,3 @@
65 -first
65 -first
66 -second
66 -second
67 -third
67 -third
68 +first\r (esc)
68 +first\r (esc)
69 +second\r (esc)
69 +second\r (esc)
70 +third\r (esc)
70 +third\r (esc)
71 $ hg push --quiet
71 $ hg push --quiet
72 $ cd ..
72 $ cd ..
73
73
74 Test clone of repo with .hgeol in working dir, but no .hgeol in default
74 Test clone of repo with .hgeol in working dir, but no .hgeol in default
75 checkout revision tip. The repo is correctly updated to be consistent and have
75 checkout revision tip. The repo is correctly updated to be consistent and have
76 the exact content checked out without filtering, ignoring the current .hgeol in
76 the exact content checked out without filtering, ignoring the current .hgeol in
77 the source repo:
77 the source repo:
78
78
79 $ cat repo/.hgeol
79 $ cat repo/.hgeol
80 [patterns]
80 [patterns]
81 **.txt = native
81 **.txt = native
82 $ hg clone repo repo-3 -v --debug
82 $ hg clone repo repo-3 -v --debug
83 linked 7 files
83 linked 8 files (no-rust !)
84 linked 10 files (rust !)
84 updating to branch default
85 updating to branch default
85 resolving manifests
86 resolving manifests
86 branchmerge: False, force: False, partial: False
87 branchmerge: False, force: False, partial: False
87 ancestor: 000000000000, local: 000000000000+, remote: 3c20c2d90333
88 ancestor: 000000000000, local: 000000000000+, remote: 3c20c2d90333
88 calling hook preupdate.eol: hgext.eol.preupdate
89 calling hook preupdate.eol: hgext.eol.preupdate
89 a.txt: remote created -> g
90 a.txt: remote created -> g
90 getting a.txt
91 getting a.txt
91 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 updating the branch cache
93 updating the branch cache
93 $ cd repo-3
94 $ cd repo-3
94
95
95 $ cat a.txt
96 $ cat a.txt
96 first\r (esc)
97 first\r (esc)
97 second\r (esc)
98 second\r (esc)
98 third\r (esc)
99 third\r (esc)
99
100
100 Test clone of revision with .hgeol
101 Test clone of revision with .hgeol
101
102
102 $ cd ..
103 $ cd ..
103 $ hg clone -r 0 repo repo-4
104 $ hg clone -r 0 repo repo-4
104 adding changesets
105 adding changesets
105 adding manifests
106 adding manifests
106 adding file changes
107 adding file changes
107 added 1 changesets with 2 changes to 2 files
108 added 1 changesets with 2 changes to 2 files
108 new changesets 90f94e2cf4e2
109 new changesets 90f94e2cf4e2
109 updating to branch default
110 updating to branch default
110 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 $ cd repo-4
112 $ cd repo-4
112 $ cat .hgeol
113 $ cat .hgeol
113 [patterns]
114 [patterns]
114 **.txt = native
115 **.txt = native
115
116
116 $ cat a.txt
117 $ cat a.txt
117 first\r (esc)
118 first\r (esc)
118 second\r (esc)
119 second\r (esc)
119 third\r (esc)
120 third\r (esc)
120
121
121 $ cd ..
122 $ cd ..
@@ -1,532 +1,538 b''
1 #require repofncache
1 #require repofncache
2
2
3 An extension which will set fncache chunksize to 1 byte to make sure that logic
3 An extension which will set fncache chunksize to 1 byte to make sure that logic
4 does not break
4 does not break
5
5
6 $ cat > chunksize.py <<EOF
6 $ cat > chunksize.py <<EOF
7 > from mercurial import store
7 > from mercurial import store
8 > store.fncache_chunksize = 1
8 > store.fncache_chunksize = 1
9 > EOF
9 > EOF
10
10
11 $ cat >> $HGRCPATH <<EOF
11 $ cat >> $HGRCPATH <<EOF
12 > [extensions]
12 > [extensions]
13 > chunksize = $TESTTMP/chunksize.py
13 > chunksize = $TESTTMP/chunksize.py
14 > EOF
14 > EOF
15
15
16 Init repo1:
16 Init repo1:
17
17
18 $ hg init repo1
18 $ hg init repo1
19 $ cd repo1
19 $ cd repo1
20 $ echo "some text" > a
20 $ echo "some text" > a
21 $ hg add
21 $ hg add
22 adding a
22 adding a
23 $ hg ci -m first
23 $ hg ci -m first
24 $ cat .hg/store/fncache | sort
24 $ cat .hg/store/fncache | sort
25 data/a.i
25 data/a.i
26
26
27 Testing a.i/b:
27 Testing a.i/b:
28
28
29 $ mkdir a.i
29 $ mkdir a.i
30 $ echo "some other text" > a.i/b
30 $ echo "some other text" > a.i/b
31 $ hg add
31 $ hg add
32 adding a.i/b
32 adding a.i/b
33 $ hg ci -m second
33 $ hg ci -m second
34 $ cat .hg/store/fncache | sort
34 $ cat .hg/store/fncache | sort
35 data/a.i
35 data/a.i
36 data/a.i.hg/b.i
36 data/a.i.hg/b.i
37
37
38 Testing a.i.hg/c:
38 Testing a.i.hg/c:
39
39
40 $ mkdir a.i.hg
40 $ mkdir a.i.hg
41 $ echo "yet another text" > a.i.hg/c
41 $ echo "yet another text" > a.i.hg/c
42 $ hg add
42 $ hg add
43 adding a.i.hg/c
43 adding a.i.hg/c
44 $ hg ci -m third
44 $ hg ci -m third
45 $ cat .hg/store/fncache | sort
45 $ cat .hg/store/fncache | sort
46 data/a.i
46 data/a.i
47 data/a.i.hg.hg/c.i
47 data/a.i.hg.hg/c.i
48 data/a.i.hg/b.i
48 data/a.i.hg/b.i
49
49
50 Testing verify:
50 Testing verify:
51
51
52 $ hg verify -q
52 $ hg verify -q
53
53
54 $ rm .hg/store/fncache
54 $ rm .hg/store/fncache
55
55
56 $ hg verify
56 $ hg verify
57 checking changesets
57 checking changesets
58 checking manifests
58 checking manifests
59 crosschecking files in changesets and manifests
59 crosschecking files in changesets and manifests
60 checking files
60 checking files
61 warning: revlog 'data/a.i' not in fncache!
61 warning: revlog 'data/a.i' not in fncache!
62 warning: revlog 'data/a.i.hg/c.i' not in fncache!
62 warning: revlog 'data/a.i.hg/c.i' not in fncache!
63 warning: revlog 'data/a.i/b.i' not in fncache!
63 warning: revlog 'data/a.i/b.i' not in fncache!
64 checking dirstate
64 checking dirstate
65 checked 3 changesets with 3 changes to 3 files
65 checked 3 changesets with 3 changes to 3 files
66 3 warnings encountered!
66 3 warnings encountered!
67 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
67 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
68
68
69 Follow the hint to make sure it works
69 Follow the hint to make sure it works
70
70
71 $ hg debugrebuildfncache
71 $ hg debugrebuildfncache
72 adding data/a.i
72 adding data/a.i
73 adding data/a.i.hg/c.i
73 adding data/a.i.hg/c.i
74 adding data/a.i/b.i
74 adding data/a.i/b.i
75 3 items added, 0 removed from fncache
75 3 items added, 0 removed from fncache
76
76
77 $ hg verify -q
77 $ hg verify -q
78
78
79 $ cd ..
79 $ cd ..
80
80
81 Non store repo:
81 Non store repo:
82
82
83 $ hg --config format.usestore=False init foo
83 $ hg --config format.usestore=False init foo
84 $ cd foo
84 $ cd foo
85 $ mkdir tst.d
85 $ mkdir tst.d
86 $ echo foo > tst.d/foo
86 $ echo foo > tst.d/foo
87 $ hg ci -Amfoo
87 $ hg ci -Amfoo
88 adding tst.d/foo
88 adding tst.d/foo
89 $ find .hg | sort
89 $ find .hg | sort
90 .hg
90 .hg
91 .hg/00changelog-6b8ab34b.nd (rust !)
92 .hg/00changelog.d
91 .hg/00changelog.i
93 .hg/00changelog.i
94 .hg/00changelog.n (rust !)
92 .hg/00manifest.i
95 .hg/00manifest.i
93 .hg/branch
96 .hg/branch
94 .hg/cache
97 .hg/cache
95 .hg/cache/branch2-served
98 .hg/cache/branch2-served
96 .hg/cache/rbc-names-v1
99 .hg/cache/rbc-names-v1
97 .hg/cache/rbc-revs-v1
100 .hg/cache/rbc-revs-v1
98 .hg/data
101 .hg/data
99 .hg/data/tst.d.hg
102 .hg/data/tst.d.hg
100 .hg/data/tst.d.hg/foo.i
103 .hg/data/tst.d.hg/foo.i
101 .hg/dirstate
104 .hg/dirstate
102 .hg/fsmonitor.state (fsmonitor !)
105 .hg/fsmonitor.state (fsmonitor !)
103 .hg/last-message.txt
106 .hg/last-message.txt
104 .hg/phaseroots
107 .hg/phaseroots
105 .hg/requires
108 .hg/requires
106 .hg/undo
109 .hg/undo
107 .hg/undo.backup.branch.bck
110 .hg/undo.backup.branch.bck
108 .hg/undo.backupfiles
111 .hg/undo.backupfiles
109 .hg/undo.desc
112 .hg/undo.desc
110 .hg/wcache
113 .hg/wcache
111 .hg/wcache/checkisexec (execbit !)
114 .hg/wcache/checkisexec (execbit !)
112 .hg/wcache/checklink (symlink !)
115 .hg/wcache/checklink (symlink !)
113 .hg/wcache/checklink-target (symlink !)
116 .hg/wcache/checklink-target (symlink !)
114 .hg/wcache/manifestfulltextcache (reporevlogstore !)
117 .hg/wcache/manifestfulltextcache (reporevlogstore !)
115 $ cd ..
118 $ cd ..
116
119
117 Non fncache repo:
120 Non fncache repo:
118
121
119 $ hg --config format.usefncache=False init bar
122 $ hg --config format.usefncache=False init bar
120 $ cd bar
123 $ cd bar
121 $ mkdir tst.d
124 $ mkdir tst.d
122 $ echo foo > tst.d/Foo
125 $ echo foo > tst.d/Foo
123 $ hg ci -Amfoo
126 $ hg ci -Amfoo
124 adding tst.d/Foo
127 adding tst.d/Foo
125 $ find .hg | sort
128 $ find .hg | sort
126 .hg
129 .hg
127 .hg/00changelog.i
130 .hg/00changelog.i
128 .hg/branch
131 .hg/branch
129 .hg/cache
132 .hg/cache
130 .hg/cache/branch2-served
133 .hg/cache/branch2-served
131 .hg/cache/rbc-names-v1
134 .hg/cache/rbc-names-v1
132 .hg/cache/rbc-revs-v1
135 .hg/cache/rbc-revs-v1
133 .hg/dirstate
136 .hg/dirstate
134 .hg/fsmonitor.state (fsmonitor !)
137 .hg/fsmonitor.state (fsmonitor !)
135 .hg/last-message.txt
138 .hg/last-message.txt
136 .hg/requires
139 .hg/requires
137 .hg/store
140 .hg/store
141 .hg/store/00changelog-b875dfc5.nd (rust !)
142 .hg/store/00changelog.d
138 .hg/store/00changelog.i
143 .hg/store/00changelog.i
144 .hg/store/00changelog.n (rust !)
139 .hg/store/00manifest.i
145 .hg/store/00manifest.i
140 .hg/store/data
146 .hg/store/data
141 .hg/store/data/tst.d.hg
147 .hg/store/data/tst.d.hg
142 .hg/store/data/tst.d.hg/_foo.i
148 .hg/store/data/tst.d.hg/_foo.i
143 .hg/store/phaseroots
149 .hg/store/phaseroots
144 .hg/store/requires
150 .hg/store/requires
145 .hg/store/undo
151 .hg/store/undo
146 .hg/store/undo.backupfiles
152 .hg/store/undo.backupfiles
147 .hg/undo.backup.branch.bck
153 .hg/undo.backup.branch.bck
148 .hg/undo.desc
154 .hg/undo.desc
149 .hg/wcache
155 .hg/wcache
150 .hg/wcache/checkisexec (execbit !)
156 .hg/wcache/checkisexec (execbit !)
151 .hg/wcache/checklink (symlink !)
157 .hg/wcache/checklink (symlink !)
152 .hg/wcache/checklink-target (symlink !)
158 .hg/wcache/checklink-target (symlink !)
153 .hg/wcache/manifestfulltextcache (reporevlogstore !)
159 .hg/wcache/manifestfulltextcache (reporevlogstore !)
154 $ cd ..
160 $ cd ..
155
161
156 Encoding of reserved / long paths in the store
162 Encoding of reserved / long paths in the store
157
163
158 $ hg init r2
164 $ hg init r2
159 $ cd r2
165 $ cd r2
160 $ cat <<EOF > .hg/hgrc
166 $ cat <<EOF > .hg/hgrc
161 > [ui]
167 > [ui]
162 > portablefilenames = ignore
168 > portablefilenames = ignore
163 > EOF
169 > EOF
164
170
165 $ hg import -q --bypass - <<EOF
171 $ hg import -q --bypass - <<EOF
166 > # HG changeset patch
172 > # HG changeset patch
167 > # User test
173 > # User test
168 > # Date 0 0
174 > # Date 0 0
169 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
175 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
170 > # Parent 0000000000000000000000000000000000000000
176 > # Parent 0000000000000000000000000000000000000000
171 > 1
177 > 1
172 >
178 >
173 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
179 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
174 > new file mode 100644
180 > new file mode 100644
175 > --- /dev/null
181 > --- /dev/null
176 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
182 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
177 > @@ -0,0 +1,1 @@
183 > @@ -0,0 +1,1 @@
178 > +foo
184 > +foo
179 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
185 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
180 > new file mode 100644
186 > new file mode 100644
181 > --- /dev/null
187 > --- /dev/null
182 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
188 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
183 > @@ -0,0 +1,1 @@
189 > @@ -0,0 +1,1 @@
184 > +foo
190 > +foo
185 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
191 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
186 > new file mode 100644
192 > new file mode 100644
187 > --- /dev/null
193 > --- /dev/null
188 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
194 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
189 > @@ -0,0 +1,1 @@
195 > @@ -0,0 +1,1 @@
190 > +foo
196 > +foo
191 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
197 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
192 > new file mode 100644
198 > new file mode 100644
193 > --- /dev/null
199 > --- /dev/null
194 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
200 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
195 > @@ -0,0 +1,1 @@
201 > @@ -0,0 +1,1 @@
196 > +foo
202 > +foo
197 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
203 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
198 > new file mode 100644
204 > new file mode 100644
199 > --- /dev/null
205 > --- /dev/null
200 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
206 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
201 > @@ -0,0 +1,1 @@
207 > @@ -0,0 +1,1 @@
202 > +foo
208 > +foo
203 > EOF
209 > EOF
204
210
205 $ find .hg/store -name *.i | sort
211 $ find .hg/store -name *.i | sort
206 .hg/store/00changelog.i
212 .hg/store/00changelog.i
207 .hg/store/00manifest.i
213 .hg/store/00manifest.i
208 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
214 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
209 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
215 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
210 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
216 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
211 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
217 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
212 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
218 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
213
219
214 $ cd ..
220 $ cd ..
215
221
216 Aborting lock does not prevent fncache writes
222 Aborting lock does not prevent fncache writes
217
223
218 $ cat > exceptionext.py <<EOF
224 $ cat > exceptionext.py <<EOF
219 > import os
225 > import os
220 > from mercurial import commands, error, extensions
226 > from mercurial import commands, error, extensions
221 >
227 >
222 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
228 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
223 > def releasewrap():
229 > def releasewrap():
224 > l.held = False # ensure __del__ is a noop
230 > l.held = False # ensure __del__ is a noop
225 > raise error.Abort(b"forced lock failure")
231 > raise error.Abort(b"forced lock failure")
226 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
232 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
227 > return l
233 > return l
228 >
234 >
229 > def reposetup(ui, repo):
235 > def reposetup(ui, repo):
230 > extensions.wrapfunction(repo, '_lock', lockexception)
236 > extensions.wrapfunction(repo, '_lock', lockexception)
231 >
237 >
232 > cmdtable = {}
238 > cmdtable = {}
233 >
239 >
234 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
240 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
235 > # at the end of dispatching (for intentional "forced lcok failure")
241 > # at the end of dispatching (for intentional "forced lcok failure")
236 > def commitwrap(orig, ui, repo, *pats, **opts):
242 > def commitwrap(orig, ui, repo, *pats, **opts):
237 > repo = repo.unfiltered() # to use replaced repo._lock certainly
243 > repo = repo.unfiltered() # to use replaced repo._lock certainly
238 > wlock = repo.wlock()
244 > wlock = repo.wlock()
239 > try:
245 > try:
240 > return orig(ui, repo, *pats, **opts)
246 > return orig(ui, repo, *pats, **opts)
241 > finally:
247 > finally:
242 > # multiple 'relase()' is needed for complete releasing wlock,
248 > # multiple 'relase()' is needed for complete releasing wlock,
243 > # because "forced" abort at last releasing store lock
249 > # because "forced" abort at last releasing store lock
244 > # prevents wlock from being released at same 'lockmod.release()'
250 > # prevents wlock from being released at same 'lockmod.release()'
245 > for i in range(wlock.held):
251 > for i in range(wlock.held):
246 > wlock.release()
252 > wlock.release()
247 >
253 >
248 > def extsetup(ui):
254 > def extsetup(ui):
249 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
255 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
250 > EOF
256 > EOF
251 $ extpath=`pwd`/exceptionext.py
257 $ extpath=`pwd`/exceptionext.py
252 $ hg init fncachetxn
258 $ hg init fncachetxn
253 $ cd fncachetxn
259 $ cd fncachetxn
254 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
260 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
255 $ touch y
261 $ touch y
256 $ hg ci -qAm y
262 $ hg ci -qAm y
257 abort: forced lock failure
263 abort: forced lock failure
258 [255]
264 [255]
259 $ cat .hg/store/fncache
265 $ cat .hg/store/fncache
260 data/y.i
266 data/y.i
261
267
262 Aborting transaction prevents fncache change
268 Aborting transaction prevents fncache change
263
269
264 $ cat > ../exceptionext.py <<EOF
270 $ cat > ../exceptionext.py <<EOF
265 > import os
271 > import os
266 > from mercurial import commands, error, extensions, localrepo
272 > from mercurial import commands, error, extensions, localrepo
267 >
273 >
268 > def wrapper(orig, self, *args, **kwargs):
274 > def wrapper(orig, self, *args, **kwargs):
269 > tr = orig(self, *args, **kwargs)
275 > tr = orig(self, *args, **kwargs)
270 > def fail(tr):
276 > def fail(tr):
271 > raise error.Abort(b"forced transaction failure")
277 > raise error.Abort(b"forced transaction failure")
272 > # zzz prefix to ensure it sorted after store.write
278 > # zzz prefix to ensure it sorted after store.write
273 > tr.addfinalize(b'zzz-forcefails', fail)
279 > tr.addfinalize(b'zzz-forcefails', fail)
274 > return tr
280 > return tr
275 >
281 >
276 > def uisetup(ui):
282 > def uisetup(ui):
277 > extensions.wrapfunction(
283 > extensions.wrapfunction(
278 > localrepo.localrepository, 'transaction', wrapper)
284 > localrepo.localrepository, 'transaction', wrapper)
279 >
285 >
280 > cmdtable = {}
286 > cmdtable = {}
281 >
287 >
282 > EOF
288 > EOF
283
289
284 Clean cached version
290 Clean cached version
285 $ rm -f "${extpath}c"
291 $ rm -f "${extpath}c"
286 $ rm -Rf "`dirname $extpath`/__pycache__"
292 $ rm -Rf "`dirname $extpath`/__pycache__"
287
293
288 $ touch z
294 $ touch z
289 $ hg ci -qAm z
295 $ hg ci -qAm z
290 transaction abort!
296 transaction abort!
291 rollback completed
297 rollback completed
292 abort: forced transaction failure
298 abort: forced transaction failure
293 [255]
299 [255]
294 $ cat .hg/store/fncache
300 $ cat .hg/store/fncache
295 data/y.i
301 data/y.i
296
302
297 Aborted transactions can be recovered later
303 Aborted transactions can be recovered later
298
304
299 $ cat > ../exceptionext.py <<EOF
305 $ cat > ../exceptionext.py <<EOF
300 > import os
306 > import os
301 > import signal
307 > import signal
302 > from mercurial import (
308 > from mercurial import (
303 > commands,
309 > commands,
304 > error,
310 > error,
305 > extensions,
311 > extensions,
306 > localrepo,
312 > localrepo,
307 > transaction,
313 > transaction,
308 > )
314 > )
309 >
315 >
310 > def trwrapper(orig, self, *args, **kwargs):
316 > def trwrapper(orig, self, *args, **kwargs):
311 > tr = orig(self, *args, **kwargs)
317 > tr = orig(self, *args, **kwargs)
312 > def fail(tr):
318 > def fail(tr):
313 > os.kill(os.getpid(), signal.SIGKILL)
319 > os.kill(os.getpid(), signal.SIGKILL)
314 > # zzz prefix to ensure it sorted after store.write
320 > # zzz prefix to ensure it sorted after store.write
315 > tr.addfinalize(b'zzz-forcefails', fail)
321 > tr.addfinalize(b'zzz-forcefails', fail)
316 > return tr
322 > return tr
317 >
323 >
318 > def uisetup(ui):
324 > def uisetup(ui):
319 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
325 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
320 > trwrapper)
326 > trwrapper)
321 >
327 >
322 > cmdtable = {}
328 > cmdtable = {}
323 >
329 >
324 > EOF
330 > EOF
325
331
326 Clean cached versions
332 Clean cached versions
327 $ rm -f "${extpath}c"
333 $ rm -f "${extpath}c"
328 $ rm -Rf "`dirname $extpath`/__pycache__"
334 $ rm -Rf "`dirname $extpath`/__pycache__"
329
335
330 $ hg up -q 1
336 $ hg up -q 1
331 $ touch z
337 $ touch z
332 # Cannot rely on the return code value as chg use a different one.
338 # Cannot rely on the return code value as chg use a different one.
333 # So we use a `|| echo` trick
339 # So we use a `|| echo` trick
334 # XXX-CHG fixing chg behavior would be nice here.
340 # XXX-CHG fixing chg behavior would be nice here.
335 $ hg ci -qAm z || echo "He's Dead, Jim." 2>/dev/null
341 $ hg ci -qAm z || echo "He's Dead, Jim." 2>/dev/null
336 *Killed* (glob) (?)
342 *Killed* (glob) (?)
337 He's Dead, Jim.
343 He's Dead, Jim.
338 $ cat .hg/store/fncache | sort
344 $ cat .hg/store/fncache | sort
339 data/y.i
345 data/y.i
340 data/z.i
346 data/z.i
341 $ hg recover --verify
347 $ hg recover --verify
342 rolling back interrupted transaction
348 rolling back interrupted transaction
343 checking changesets
349 checking changesets
344 checking manifests
350 checking manifests
345 crosschecking files in changesets and manifests
351 crosschecking files in changesets and manifests
346 checking files
352 checking files
347 checking dirstate
353 checking dirstate
348 checked 1 changesets with 1 changes to 1 files
354 checked 1 changesets with 1 changes to 1 files
349 $ cat .hg/store/fncache
355 $ cat .hg/store/fncache
350 data/y.i
356 data/y.i
351
357
352 $ cd ..
358 $ cd ..
353
359
354 debugrebuildfncache does nothing unless repo has fncache requirement
360 debugrebuildfncache does nothing unless repo has fncache requirement
355
361
356 $ hg --config format.usefncache=false init nofncache
362 $ hg --config format.usefncache=false init nofncache
357 $ cd nofncache
363 $ cd nofncache
358 $ hg debugrebuildfncache
364 $ hg debugrebuildfncache
359 (not rebuilding fncache because repository does not support fncache)
365 (not rebuilding fncache because repository does not support fncache)
360
366
361 $ cd ..
367 $ cd ..
362
368
363 debugrebuildfncache works on empty repository
369 debugrebuildfncache works on empty repository
364
370
365 $ hg init empty
371 $ hg init empty
366 $ cd empty
372 $ cd empty
367 $ hg debugrebuildfncache
373 $ hg debugrebuildfncache
368 fncache already up to date
374 fncache already up to date
369 $ cd ..
375 $ cd ..
370
376
371 debugrebuildfncache on an up to date repository no-ops
377 debugrebuildfncache on an up to date repository no-ops
372
378
373 $ hg init repo
379 $ hg init repo
374 $ cd repo
380 $ cd repo
375 $ echo initial > foo
381 $ echo initial > foo
376 $ echo initial > .bar
382 $ echo initial > .bar
377 $ hg commit -A -m initial
383 $ hg commit -A -m initial
378 adding .bar
384 adding .bar
379 adding foo
385 adding foo
380
386
381 $ cat .hg/store/fncache | sort
387 $ cat .hg/store/fncache | sort
382 data/.bar.i
388 data/.bar.i
383 data/foo.i
389 data/foo.i
384
390
385 $ hg debugrebuildfncache
391 $ hg debugrebuildfncache
386 fncache already up to date
392 fncache already up to date
387
393
388 debugrebuildfncache restores deleted fncache file
394 debugrebuildfncache restores deleted fncache file
389
395
390 $ rm -f .hg/store/fncache
396 $ rm -f .hg/store/fncache
391 $ hg debugrebuildfncache
397 $ hg debugrebuildfncache
392 adding data/.bar.i
398 adding data/.bar.i
393 adding data/foo.i
399 adding data/foo.i
394 2 items added, 0 removed from fncache
400 2 items added, 0 removed from fncache
395
401
396 $ cat .hg/store/fncache | sort
402 $ cat .hg/store/fncache | sort
397 data/.bar.i
403 data/.bar.i
398 data/foo.i
404 data/foo.i
399
405
400 Rebuild after rebuild should no-op
406 Rebuild after rebuild should no-op
401
407
402 $ hg debugrebuildfncache
408 $ hg debugrebuildfncache
403 fncache already up to date
409 fncache already up to date
404
410
405 A single missing file should get restored, an extra file should be removed
411 A single missing file should get restored, an extra file should be removed
406
412
407 $ cat > .hg/store/fncache << EOF
413 $ cat > .hg/store/fncache << EOF
408 > data/foo.i
414 > data/foo.i
409 > data/bad-entry.i
415 > data/bad-entry.i
410 > EOF
416 > EOF
411
417
412 $ hg debugrebuildfncache
418 $ hg debugrebuildfncache
413 removing data/bad-entry.i
419 removing data/bad-entry.i
414 adding data/.bar.i
420 adding data/.bar.i
415 1 items added, 1 removed from fncache
421 1 items added, 1 removed from fncache
416
422
417 $ cat .hg/store/fncache | sort
423 $ cat .hg/store/fncache | sort
418 data/.bar.i
424 data/.bar.i
419 data/foo.i
425 data/foo.i
420
426
421 debugrebuildfncache recovers from truncated line in fncache
427 debugrebuildfncache recovers from truncated line in fncache
422
428
423 $ printf a > .hg/store/fncache
429 $ printf a > .hg/store/fncache
424 $ hg debugrebuildfncache
430 $ hg debugrebuildfncache
425 fncache does not ends with a newline
431 fncache does not ends with a newline
426 adding data/.bar.i
432 adding data/.bar.i
427 adding data/foo.i
433 adding data/foo.i
428 2 items added, 0 removed from fncache
434 2 items added, 0 removed from fncache
429
435
430 $ cat .hg/store/fncache | sort
436 $ cat .hg/store/fncache | sort
431 data/.bar.i
437 data/.bar.i
432 data/foo.i
438 data/foo.i
433
439
434 $ cd ..
440 $ cd ..
435
441
436 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
442 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
437
443
438 $ hg --config format.dotencode=false init nodotencode
444 $ hg --config format.dotencode=false init nodotencode
439 $ cd nodotencode
445 $ cd nodotencode
440 $ echo initial > foo
446 $ echo initial > foo
441 $ echo initial > .bar
447 $ echo initial > .bar
442 $ hg commit -A -m initial
448 $ hg commit -A -m initial
443 adding .bar
449 adding .bar
444 adding foo
450 adding foo
445
451
446 $ cat .hg/store/fncache | sort
452 $ cat .hg/store/fncache | sort
447 data/.bar.i
453 data/.bar.i
448 data/foo.i
454 data/foo.i
449
455
450 $ rm .hg/store/fncache
456 $ rm .hg/store/fncache
451 $ hg debugrebuildfncache
457 $ hg debugrebuildfncache
452 adding data/.bar.i
458 adding data/.bar.i
453 adding data/foo.i
459 adding data/foo.i
454 2 items added, 0 removed from fncache
460 2 items added, 0 removed from fncache
455
461
456 $ cat .hg/store/fncache | sort
462 $ cat .hg/store/fncache | sort
457 data/.bar.i
463 data/.bar.i
458 data/foo.i
464 data/foo.i
459
465
460 $ cd ..
466 $ cd ..
461
467
462 In repositories that have accumulated a large number of files over time, the
468 In repositories that have accumulated a large number of files over time, the
463 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
469 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
464 The cache should not loaded when committing changes to existing files, or when unbundling
470 The cache should not loaded when committing changes to existing files, or when unbundling
465 changesets that only contain changes to existing files:
471 changesets that only contain changes to existing files:
466
472
467 $ cat > fncacheloadwarn.py << EOF
473 $ cat > fncacheloadwarn.py << EOF
468 > from mercurial import extensions, localrepo
474 > from mercurial import extensions, localrepo
469 >
475 >
470 > def extsetup(ui):
476 > def extsetup(ui):
471 > def wrapstore(orig, requirements, *args):
477 > def wrapstore(orig, requirements, *args):
472 > store = orig(requirements, *args)
478 > store = orig(requirements, *args)
473 > if b'store' in requirements and b'fncache' in requirements:
479 > if b'store' in requirements and b'fncache' in requirements:
474 > instrumentfncachestore(store, ui)
480 > instrumentfncachestore(store, ui)
475 > return store
481 > return store
476 > extensions.wrapfunction(localrepo, 'makestore', wrapstore)
482 > extensions.wrapfunction(localrepo, 'makestore', wrapstore)
477 >
483 >
478 > def instrumentfncachestore(fncachestore, ui):
484 > def instrumentfncachestore(fncachestore, ui):
479 > class instrumentedfncache(type(fncachestore.fncache)):
485 > class instrumentedfncache(type(fncachestore.fncache)):
480 > def _load(self):
486 > def _load(self):
481 > ui.warn(b'fncache load triggered!\n')
487 > ui.warn(b'fncache load triggered!\n')
482 > super(instrumentedfncache, self)._load()
488 > super(instrumentedfncache, self)._load()
483 > fncachestore.fncache.__class__ = instrumentedfncache
489 > fncachestore.fncache.__class__ = instrumentedfncache
484 > EOF
490 > EOF
485
491
486 $ fncachextpath=`pwd`/fncacheloadwarn.py
492 $ fncachextpath=`pwd`/fncacheloadwarn.py
487 $ hg init nofncacheload
493 $ hg init nofncacheload
488 $ cd nofncacheload
494 $ cd nofncacheload
489 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
495 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
490
496
491 A new file should trigger a load, as we'd want to update the fncache set in that case:
497 A new file should trigger a load, as we'd want to update the fncache set in that case:
492
498
493 $ touch foo
499 $ touch foo
494 $ hg ci -qAm foo
500 $ hg ci -qAm foo
495 fncache load triggered!
501 fncache load triggered!
496
502
497 But modifying that file should not:
503 But modifying that file should not:
498
504
499 $ echo bar >> foo
505 $ echo bar >> foo
500 $ hg ci -qm foo
506 $ hg ci -qm foo
501
507
502 If a transaction has been aborted, the zero-size truncated index file will
508 If a transaction has been aborted, the zero-size truncated index file will
503 not prevent the fncache from being loaded; rather than actually abort
509 not prevent the fncache from being loaded; rather than actually abort
504 a transaction, we simulate the situation by creating a zero-size index file:
510 a transaction, we simulate the situation by creating a zero-size index file:
505
511
506 $ touch .hg/store/data/bar.i
512 $ touch .hg/store/data/bar.i
507 $ touch bar
513 $ touch bar
508 $ hg ci -qAm bar
514 $ hg ci -qAm bar
509 fncache load triggered!
515 fncache load triggered!
510
516
511 Unbundling should follow the same rules; existing files should not cause a load:
517 Unbundling should follow the same rules; existing files should not cause a load:
512
518
513 (loading during the clone is expected)
519 (loading during the clone is expected)
514 $ hg clone -q . tobundle
520 $ hg clone -q . tobundle
515 fncache load triggered!
521 fncache load triggered!
516 fncache load triggered!
522 fncache load triggered!
517 fncache load triggered!
523 fncache load triggered!
518
524
519 $ echo 'new line' > tobundle/bar
525 $ echo 'new line' > tobundle/bar
520 $ hg -R tobundle ci -qm bar
526 $ hg -R tobundle ci -qm bar
521 $ hg -R tobundle bundle -q barupdated.hg
527 $ hg -R tobundle bundle -q barupdated.hg
522 $ hg unbundle -q barupdated.hg
528 $ hg unbundle -q barupdated.hg
523
529
524 but adding new files should:
530 but adding new files should:
525
531
526 $ touch tobundle/newfile
532 $ touch tobundle/newfile
527 $ hg -R tobundle ci -qAm newfile
533 $ hg -R tobundle ci -qAm newfile
528 $ hg -R tobundle bundle -q newfile.hg
534 $ hg -R tobundle bundle -q newfile.hg
529 $ hg unbundle -q newfile.hg
535 $ hg unbundle -q newfile.hg
530 fncache load triggered!
536 fncache load triggered!
531
537
532 $ cd ..
538 $ cd ..
@@ -1,425 +1,469 b''
1 #require hardlink reporevlogstore
1 #require hardlink reporevlogstore
2
2
3 $ cat > nlinks.py <<EOF
3 $ cat > nlinks.py <<EOF
4 > import sys
4 > import sys
5 > from mercurial import pycompat, util
5 > from mercurial import pycompat, util
6 > for f in sorted(sys.stdin.readlines()):
6 > for f in sorted(sys.stdin.readlines()):
7 > f = f[:-1]
7 > f = f[:-1]
8 > print(util.nlinks(pycompat.fsencode(f)), f)
8 > print(util.nlinks(pycompat.fsencode(f)), f)
9 > EOF
9 > EOF
10
10
11 $ nlinksdir()
11 $ nlinksdir()
12 > {
12 > {
13 > find "$@" -type f | "$PYTHON" $TESTTMP/nlinks.py
13 > find "$@" -type f | "$PYTHON" $TESTTMP/nlinks.py
14 > }
14 > }
15
15
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
17
17
18 $ cat > linkcp.py <<EOF
18 $ cat > linkcp.py <<EOF
19 > import sys
19 > import sys
20 > from mercurial import pycompat, util
20 > from mercurial import pycompat, util
21 > util.copyfiles(pycompat.fsencode(sys.argv[1]),
21 > util.copyfiles(pycompat.fsencode(sys.argv[1]),
22 > pycompat.fsencode(sys.argv[2]), hardlink=True)
22 > pycompat.fsencode(sys.argv[2]), hardlink=True)
23 > EOF
23 > EOF
24
24
25 $ linkcp()
25 $ linkcp()
26 > {
26 > {
27 > "$PYTHON" $TESTTMP/linkcp.py $1 $2
27 > "$PYTHON" $TESTTMP/linkcp.py $1 $2
28 > }
28 > }
29
29
30 Prepare repo r1:
30 Prepare repo r1:
31
31
32 $ hg init r1
32 $ hg init r1
33 $ cd r1
33 $ cd r1
34
34
35 $ echo c1 > f1
35 $ echo c1 > f1
36 $ hg add f1
36 $ hg add f1
37 $ hg ci -m0
37 $ hg ci -m0
38
38
39 $ mkdir d1
39 $ mkdir d1
40 $ cd d1
40 $ cd d1
41 $ echo c2 > f2
41 $ echo c2 > f2
42 $ hg add f2
42 $ hg add f2
43 $ hg ci -m1
43 $ hg ci -m1
44 $ cd ../..
44 $ cd ../..
45
45
46 $ nlinksdir r1/.hg/store
46 $ nlinksdir r1/.hg/store
47 1 r1/.hg/store/00changelog-b870a51b.nd (rust !)
48 1 r1/.hg/store/00changelog.d
47 1 r1/.hg/store/00changelog.i
49 1 r1/.hg/store/00changelog.i
50 1 r1/.hg/store/00changelog.n (rust !)
48 1 r1/.hg/store/00manifest.i
51 1 r1/.hg/store/00manifest.i
49 1 r1/.hg/store/data/d1/f2.i
52 1 r1/.hg/store/data/d1/f2.i
50 1 r1/.hg/store/data/f1.i
53 1 r1/.hg/store/data/f1.i
51 1 r1/.hg/store/fncache (repofncache !)
54 1 r1/.hg/store/fncache (repofncache !)
52 1 r1/.hg/store/phaseroots
55 1 r1/.hg/store/phaseroots
53 1 r1/.hg/store/requires
56 1 r1/.hg/store/requires
54 1 r1/.hg/store/undo
57 1 r1/.hg/store/undo
58 1 r1/.hg/store/undo.backup.00changelog.n.bck (rust !)
55 1 r1/.hg/store/undo.backup.fncache.bck (repofncache !)
59 1 r1/.hg/store/undo.backup.fncache.bck (repofncache !)
56 1 r1/.hg/store/undo.backupfiles
60 1 r1/.hg/store/undo.backupfiles
57
61
58
62
59 Create hardlinked clone r2:
63 Create hardlinked clone r2:
60
64
61 $ hg clone -U --debug r1 r2 --config progress.debug=true
65 $ hg clone -U --debug r1 r2 --config progress.debug=true
62 linking: 1/7 files (14.29%)
66 linking: 1/8 files (12.50%) (no-rust !)
63 linking: 2/7 files (28.57%)
67 linking: 2/8 files (25.00%) (no-rust !)
64 linking: 3/7 files (42.86%)
68 linking: 3/8 files (37.50%) (no-rust !)
65 linking: 4/7 files (57.14%)
69 linking: 4/8 files (50.00%) (no-rust !)
66 linking: 5/7 files (71.43%)
70 linking: 5/8 files (62.50%) (no-rust !)
67 linking: 6/7 files (85.71%)
71 linking: 6/8 files (75.00%) (no-rust !)
68 linking: 7/7 files (100.00%)
72 linking: 7/8 files (87.50%) (no-rust !)
69 linked 7 files
73 linking: 8/8 files (100.00%) (no-rust !)
74 linked 8 files (no-rust !)
75 linking: 1/10 files (10.00%) (rust !)
76 linking: 2/10 files (20.00%) (rust !)
77 linking: 3/10 files (30.00%) (rust !)
78 linking: 4/10 files (40.00%) (rust !)
79 linking: 5/10 files (50.00%) (rust !)
80 linking: 6/10 files (60.00%) (rust !)
81 linking: 7/10 files (70.00%) (rust !)
82 linking: 8/10 files (80.00%) (rust !)
83 linking: 9/10 files (90.00%) (rust !)
84 linking: 10/10 files (100.00%) (rust !)
85 linked 10 files (rust !)
70 updating the branch cache
86 updating the branch cache
71
87
72 Create non-hardlinked clone r3:
88 Create non-hardlinked clone r3:
73
89
74 $ hg clone --pull r1 r3
90 $ hg clone --pull r1 r3
75 requesting all changes
91 requesting all changes
76 adding changesets
92 adding changesets
77 adding manifests
93 adding manifests
78 adding file changes
94 adding file changes
79 added 2 changesets with 2 changes to 2 files
95 added 2 changesets with 2 changes to 2 files
80 new changesets 40d85e9847f2:7069c422939c
96 new changesets 40d85e9847f2:7069c422939c
81 updating to branch default
97 updating to branch default
82 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
83
99
84
100
85 Repos r1 and r2 should now contain hardlinked files:
101 Repos r1 and r2 should now contain hardlinked files:
86
102
87 $ nlinksdir r1/.hg/store
103 $ nlinksdir r1/.hg/store
104 1 r1/.hg/store/00changelog-b870a51b.nd (rust !)
105 2 r1/.hg/store/00changelog.d
88 2 r1/.hg/store/00changelog.i
106 2 r1/.hg/store/00changelog.i
107 1 r1/.hg/store/00changelog.n (rust !)
89 2 r1/.hg/store/00manifest.i
108 2 r1/.hg/store/00manifest.i
90 2 r1/.hg/store/data/d1/f2.i
109 2 r1/.hg/store/data/d1/f2.i
91 2 r1/.hg/store/data/f1.i
110 2 r1/.hg/store/data/f1.i
92 1 r1/.hg/store/fncache (repofncache !)
111 1 r1/.hg/store/fncache (repofncache !)
93 1 r1/.hg/store/phaseroots
112 1 r1/.hg/store/phaseroots
94 1 r1/.hg/store/requires
113 1 r1/.hg/store/requires
95 1 r1/.hg/store/undo
114 1 r1/.hg/store/undo
115 1 r1/.hg/store/undo.backup.00changelog.n.bck (rust !)
96 1 r1/.hg/store/undo.backup.fncache.bck (repofncache !)
116 1 r1/.hg/store/undo.backup.fncache.bck (repofncache !)
97 1 r1/.hg/store/undo.backupfiles
117 1 r1/.hg/store/undo.backupfiles
98
118
99 $ nlinksdir r2/.hg/store
119 $ nlinksdir r2/.hg/store
120 1 r2/.hg/store/00changelog-b870a51b.nd (rust !)
121 2 r2/.hg/store/00changelog.d
100 2 r2/.hg/store/00changelog.i
122 2 r2/.hg/store/00changelog.i
123 1 r2/.hg/store/00changelog.n (rust !)
101 2 r2/.hg/store/00manifest.i
124 2 r2/.hg/store/00manifest.i
102 2 r2/.hg/store/data/d1/f2.i
125 2 r2/.hg/store/data/d1/f2.i
103 2 r2/.hg/store/data/f1.i
126 2 r2/.hg/store/data/f1.i
104 1 r2/.hg/store/fncache (repofncache !)
127 1 r2/.hg/store/fncache (repofncache !)
105 1 r2/.hg/store/requires
128 1 r2/.hg/store/requires
106
129
107 Repo r3 should not be hardlinked:
130 Repo r3 should not be hardlinked:
108
131
109 $ nlinksdir r3/.hg/store
132 $ nlinksdir r3/.hg/store
133 1 r3/.hg/store/00changelog-88698448.nd (rust !)
134 1 r3/.hg/store/00changelog.d
110 1 r3/.hg/store/00changelog.i
135 1 r3/.hg/store/00changelog.i
136 1 r3/.hg/store/00changelog.n (rust !)
111 1 r3/.hg/store/00manifest.i
137 1 r3/.hg/store/00manifest.i
112 1 r3/.hg/store/data/d1/f2.i
138 1 r3/.hg/store/data/d1/f2.i
113 1 r3/.hg/store/data/f1.i
139 1 r3/.hg/store/data/f1.i
114 1 r3/.hg/store/fncache (repofncache !)
140 1 r3/.hg/store/fncache (repofncache !)
115 1 r3/.hg/store/phaseroots
141 1 r3/.hg/store/phaseroots
116 1 r3/.hg/store/requires
142 1 r3/.hg/store/requires
117 1 r3/.hg/store/undo
143 1 r3/.hg/store/undo
118 1 r3/.hg/store/undo.backupfiles
144 1 r3/.hg/store/undo.backupfiles
119
145
120
146
121 Create a non-inlined filelog in r3:
147 Create a non-inlined filelog in r3:
122
148
123 $ cd r3/d1
149 $ cd r3/d1
124 >>> f = open('data1', 'wb')
150 >>> f = open('data1', 'wb')
125 >>> for x in range(10000):
151 >>> for x in range(10000):
126 ... f.write(b"%d\n" % x) and None
152 ... f.write(b"%d\n" % x) and None
127 >>> f.close()
153 >>> f.close()
128 $ for j in 0 1 2 3 4 5 6 7 8 9; do
154 $ for j in 0 1 2 3 4 5 6 7 8 9; do
129 > cat data1 >> f2
155 > cat data1 >> f2
130 > hg commit -m$j
156 > hg commit -m$j
131 > done
157 > done
132 $ cd ../..
158 $ cd ../..
133
159
134 $ nlinksdir r3/.hg/store
160 $ nlinksdir r3/.hg/store
161 1 r3/.hg/store/00changelog-ea337809.nd (rust !)
162 1 r3/.hg/store/00changelog.d
135 1 r3/.hg/store/00changelog.i
163 1 r3/.hg/store/00changelog.i
164 1 r3/.hg/store/00changelog.n (rust !)
136 1 r3/.hg/store/00manifest.i
165 1 r3/.hg/store/00manifest.i
137 1 r3/.hg/store/data/d1/f2.d
166 1 r3/.hg/store/data/d1/f2.d
138 1 r3/.hg/store/data/d1/f2.i
167 1 r3/.hg/store/data/d1/f2.i
139 1 r3/.hg/store/data/f1.i
168 1 r3/.hg/store/data/f1.i
140 1 r3/.hg/store/fncache (repofncache !)
169 1 r3/.hg/store/fncache (repofncache !)
141 1 r3/.hg/store/phaseroots
170 1 r3/.hg/store/phaseroots
142 1 r3/.hg/store/requires
171 1 r3/.hg/store/requires
143 1 r3/.hg/store/undo
172 1 r3/.hg/store/undo
173 1 r3/.hg/store/undo.backup.00changelog.n.bck (rust !)
144 1 r3/.hg/store/undo.backupfiles
174 1 r3/.hg/store/undo.backupfiles
145
175
146 Push to repo r1 should break up most hardlinks in r2:
176 Push to repo r1 should break up most hardlinks in r2:
147
177
148 $ hg -R r2 verify -q
178 $ hg -R r2 verify -q
149
179
150 $ cd r3
180 $ cd r3
151 $ hg push
181 $ hg push
152 pushing to $TESTTMP/r1
182 pushing to $TESTTMP/r1
153 searching for changes
183 searching for changes
154 adding changesets
184 adding changesets
155 adding manifests
185 adding manifests
156 adding file changes
186 adding file changes
157 added 10 changesets with 10 changes to 1 files
187 added 10 changesets with 10 changes to 1 files
158
188
159 $ cd ..
189 $ cd ..
160
190
161 $ nlinksdir r2/.hg/store
191 $ nlinksdir r2/.hg/store
192 1 r2/.hg/store/00changelog-b870a51b.nd (rust !)
193 1 r2/.hg/store/00changelog.d
162 1 r2/.hg/store/00changelog.i
194 1 r2/.hg/store/00changelog.i
195 1 r2/.hg/store/00changelog.n (rust !)
163 1 r2/.hg/store/00manifest.i
196 1 r2/.hg/store/00manifest.i
164 1 r2/.hg/store/data/d1/f2.i
197 1 r2/.hg/store/data/d1/f2.i
165 2 r2/.hg/store/data/f1.i
198 2 r2/.hg/store/data/f1.i
166 [12] r2/\.hg/store/fncache (re) (repofncache !)
199 [12] r2/\.hg/store/fncache (re) (repofncache !)
167 1 r2/.hg/store/requires
200 1 r2/.hg/store/requires
168
201
169 #if hardlink-whitelisted repofncache
202 #if hardlink-whitelisted repofncache
170 $ nlinksdir r2/.hg/store/fncache
203 $ nlinksdir r2/.hg/store/fncache
171 1 r2/.hg/store/fncache
204 1 r2/.hg/store/fncache
172 #endif
205 #endif
173
206
174 $ hg -R r2 verify -q
207 $ hg -R r2 verify -q
175
208
176 $ cd r1
209 $ cd r1
177 $ hg up
210 $ hg up
178 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
211 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
179
212
180 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
213 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
181
214
182 $ echo c1c1 >> f1
215 $ echo c1c1 >> f1
183 $ hg ci -m00
216 $ hg ci -m00
184 $ cd ..
217 $ cd ..
185
218
186 $ nlinksdir r2/.hg/store
219 $ nlinksdir r2/.hg/store
220 1 r2/.hg/store/00changelog-b870a51b.nd (rust !)
221 1 r2/.hg/store/00changelog.d
187 1 r2/.hg/store/00changelog.i
222 1 r2/.hg/store/00changelog.i
223 1 r2/.hg/store/00changelog.n (rust !)
188 1 r2/.hg/store/00manifest.i
224 1 r2/.hg/store/00manifest.i
189 1 r2/.hg/store/data/d1/f2.i
225 1 r2/.hg/store/data/d1/f2.i
190 1 r2/.hg/store/data/f1.i
226 1 r2/.hg/store/data/f1.i
191 1 r2/.hg/store/fncache (repofncache !)
227 1 r2/.hg/store/fncache (repofncache !)
192 1 r2/.hg/store/requires
228 1 r2/.hg/store/requires
193
229
194 #if hardlink-whitelisted repofncache
230 #if hardlink-whitelisted repofncache
195 $ nlinksdir r2/.hg/store/fncache
231 $ nlinksdir r2/.hg/store/fncache
196 1 r2/.hg/store/fncache
232 1 r2/.hg/store/fncache
197 #endif
233 #endif
198
234
199 Create a file which exec permissions we will change
235 Create a file which exec permissions we will change
200 $ cd r3
236 $ cd r3
201 $ echo "echo hello world" > f3
237 $ echo "echo hello world" > f3
202 $ hg add f3
238 $ hg add f3
203 $ hg ci -mf3
239 $ hg ci -mf3
204 $ cd ..
240 $ cd ..
205
241
206 $ cd r3
242 $ cd r3
207 $ hg tip --template '{rev}:{node|short}\n'
243 $ hg tip --template '{rev}:{node|short}\n'
208 12:d3b77733a28a
244 12:d3b77733a28a
209 $ echo bla > f1
245 $ echo bla > f1
210 $ chmod +x f3
246 $ chmod +x f3
211 $ hg ci -m1
247 $ hg ci -m1
212 $ cd ..
248 $ cd ..
213
249
214 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
250 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
215
251
216 $ linkcp r3 r4
252 $ linkcp r3 r4
217
253
218 'checklink' is produced by hardlinking a symlink, which is undefined whether
254 'checklink' is produced by hardlinking a symlink, which is undefined whether
219 the symlink should be followed or not. It does behave differently on Linux and
255 the symlink should be followed or not. It does behave differently on Linux and
220 BSD. Just remove it so the test pass on both platforms.
256 BSD. Just remove it so the test pass on both platforms.
221
257
222 $ rm -f r4/.hg/wcache/checklink
258 $ rm -f r4/.hg/wcache/checklink
223
259
224 r4 has hardlinks in the working dir (not just inside .hg):
260 r4 has hardlinks in the working dir (not just inside .hg):
225
261
226 $ nlinksdir r4
262 $ nlinksdir r4
227 2 r4/.hg/00changelog.i
263 2 r4/.hg/00changelog.i
228 [24] r4/.hg/branch (re)
264 [24] r4/.hg/branch (re)
229 2 r4/.hg/cache/branch2-base
265 2 r4/.hg/cache/branch2-base
230 2 r4/.hg/cache/branch2-immutable
266 2 r4/.hg/cache/branch2-immutable
231 2 r4/.hg/cache/branch2-served
267 2 r4/.hg/cache/branch2-served
232 2 r4/.hg/cache/branch2-served.hidden
268 2 r4/.hg/cache/branch2-served.hidden
233 2 r4/.hg/cache/branch2-visible
269 2 r4/.hg/cache/branch2-visible
234 2 r4/.hg/cache/branch2-visible-hidden
270 2 r4/.hg/cache/branch2-visible-hidden
235 2 r4/.hg/cache/rbc-names-v1
271 2 r4/.hg/cache/rbc-names-v1
236 2 r4/.hg/cache/rbc-revs-v1
272 2 r4/.hg/cache/rbc-revs-v1
237 2 r4/.hg/cache/tags2
273 2 r4/.hg/cache/tags2
238 2 r4/.hg/cache/tags2-served
274 2 r4/.hg/cache/tags2-served
239 2 r4/.hg/dirstate
275 2 r4/.hg/dirstate
240 2 r4/.hg/fsmonitor.state (fsmonitor !)
276 2 r4/.hg/fsmonitor.state (fsmonitor !)
241 2 r4/.hg/hgrc
277 2 r4/.hg/hgrc
242 2 r4/.hg/last-message.txt
278 2 r4/.hg/last-message.txt
243 2 r4/.hg/requires
279 2 r4/.hg/requires
280 2 r4/.hg/store/00changelog-7f2eb713.nd (rust !)
281 2 r4/.hg/store/00changelog.d
244 2 r4/.hg/store/00changelog.i
282 2 r4/.hg/store/00changelog.i
283 2 r4/.hg/store/00changelog.n (rust !)
245 2 r4/.hg/store/00manifest.i
284 2 r4/.hg/store/00manifest.i
246 2 r4/.hg/store/data/d1/f2.d
285 2 r4/.hg/store/data/d1/f2.d
247 2 r4/.hg/store/data/d1/f2.i
286 2 r4/.hg/store/data/d1/f2.i
248 2 r4/.hg/store/data/f1.i
287 2 r4/.hg/store/data/f1.i
249 2 r4/.hg/store/data/f3.i
288 2 r4/.hg/store/data/f3.i
250 2 r4/.hg/store/fncache (repofncache !)
289 2 r4/.hg/store/fncache (repofncache !)
251 2 r4/.hg/store/phaseroots
290 2 r4/.hg/store/phaseroots
252 2 r4/.hg/store/requires
291 2 r4/.hg/store/requires
253 2 r4/.hg/store/undo
292 2 r4/.hg/store/undo
293 2 r4/.hg/store/undo.backup.00changelog.n.bck (rust !)
254 2 r4/.hg/store/undo.backupfiles
294 2 r4/.hg/store/undo.backupfiles
255 [24] r4/.hg/undo.backup.branch.bck (re)
295 [24] r4/.hg/undo.backup.branch.bck (re)
256 2 r4/\.hg/undo\.backup\.dirstate.bck (re)
296 2 r4/\.hg/undo\.backup\.dirstate.bck (re)
257 2 r4/.hg/undo.desc
297 2 r4/.hg/undo.desc
258 2 r4/.hg/wcache/checkisexec (execbit !)
298 2 r4/.hg/wcache/checkisexec (execbit !)
259 2 r4/.hg/wcache/checklink-target (symlink !)
299 2 r4/.hg/wcache/checklink-target (symlink !)
260 2 r4/.hg/wcache/checknoexec (execbit !)
300 2 r4/.hg/wcache/checknoexec (execbit !)
261 2 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !)
301 2 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !)
262 2 r4/d1/data1
302 2 r4/d1/data1
263 2 r4/d1/f2
303 2 r4/d1/f2
264 2 r4/f1
304 2 r4/f1
265 2 r4/f3
305 2 r4/f3
266
306
267 Update back to revision 12 in r4 should break hardlink of file f1 and f3:
307 Update back to revision 12 in r4 should break hardlink of file f1 and f3:
268 #if hardlink-whitelisted
308 #if hardlink-whitelisted
269 $ nlinksdir r4/.hg/undo.backup.dirstate.bck r4/.hg/dirstate
309 $ nlinksdir r4/.hg/undo.backup.dirstate.bck r4/.hg/dirstate
270 2 r4/.hg/dirstate
310 2 r4/.hg/dirstate
271 2 r4/.hg/undo.backup.dirstate.bck
311 2 r4/.hg/undo.backup.dirstate.bck
272 #endif
312 #endif
273
313
274
314
275 $ hg -R r4 up 12
315 $ hg -R r4 up 12
276 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
316 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (execbit !)
277 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
317 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (no-execbit !)
278
318
279 $ nlinksdir r4
319 $ nlinksdir r4
280 2 r4/.hg/00changelog.i
320 2 r4/.hg/00changelog.i
281 1 r4/.hg/branch
321 1 r4/.hg/branch
282 2 r4/.hg/cache/branch2-base
322 2 r4/.hg/cache/branch2-base
283 2 r4/.hg/cache/branch2-immutable
323 2 r4/.hg/cache/branch2-immutable
284 2 r4/.hg/cache/branch2-served
324 2 r4/.hg/cache/branch2-served
285 2 r4/.hg/cache/branch2-served.hidden
325 2 r4/.hg/cache/branch2-served.hidden
286 2 r4/.hg/cache/branch2-visible
326 2 r4/.hg/cache/branch2-visible
287 2 r4/.hg/cache/branch2-visible-hidden
327 2 r4/.hg/cache/branch2-visible-hidden
288 2 r4/.hg/cache/rbc-names-v1
328 2 r4/.hg/cache/rbc-names-v1
289 2 r4/.hg/cache/rbc-revs-v1
329 2 r4/.hg/cache/rbc-revs-v1
290 2 r4/.hg/cache/tags2
330 2 r4/.hg/cache/tags2
291 2 r4/.hg/cache/tags2-served
331 2 r4/.hg/cache/tags2-served
292 1 r4/.hg/dirstate
332 1 r4/.hg/dirstate
293 1 r4/.hg/fsmonitor.state (fsmonitor !)
333 1 r4/.hg/fsmonitor.state (fsmonitor !)
294 2 r4/.hg/hgrc
334 2 r4/.hg/hgrc
295 2 r4/.hg/last-message.txt
335 2 r4/.hg/last-message.txt
296 2 r4/.hg/requires
336 2 r4/.hg/requires
337 2 r4/.hg/store/00changelog-7f2eb713.nd (rust !)
338 2 r4/.hg/store/00changelog.d
297 2 r4/.hg/store/00changelog.i
339 2 r4/.hg/store/00changelog.i
340 2 r4/.hg/store/00changelog.n (rust !)
298 2 r4/.hg/store/00manifest.i
341 2 r4/.hg/store/00manifest.i
299 2 r4/.hg/store/data/d1/f2.d
342 2 r4/.hg/store/data/d1/f2.d
300 2 r4/.hg/store/data/d1/f2.i
343 2 r4/.hg/store/data/d1/f2.i
301 2 r4/.hg/store/data/f1.i
344 2 r4/.hg/store/data/f1.i
302 2 r4/.hg/store/data/f3.i
345 2 r4/.hg/store/data/f3.i
303 2 r4/.hg/store/fncache
346 2 r4/.hg/store/fncache
304 2 r4/.hg/store/phaseroots
347 2 r4/.hg/store/phaseroots
305 2 r4/.hg/store/requires
348 2 r4/.hg/store/requires
306 2 r4/.hg/store/undo
349 2 r4/.hg/store/undo
350 2 r4/.hg/store/undo.backup.00changelog.n.bck (rust !)
307 2 r4/.hg/store/undo.backupfiles
351 2 r4/.hg/store/undo.backupfiles
308 [23] r4/.hg/undo.backup.branch.bck (re)
352 [23] r4/.hg/undo.backup.branch.bck (re)
309 2 r4/\.hg/undo\.backup\.dirstate.bck (re)
353 2 r4/\.hg/undo\.backup\.dirstate.bck (re)
310 2 r4/.hg/undo.desc
354 2 r4/.hg/undo.desc
311 2 r4/.hg/wcache/checkisexec (execbit !)
355 2 r4/.hg/wcache/checkisexec (execbit !)
312 2 r4/.hg/wcache/checklink-target (symlink !)
356 2 r4/.hg/wcache/checklink-target (symlink !)
313 2 r4/.hg/wcache/checknoexec (execbit !)
357 2 r4/.hg/wcache/checknoexec (execbit !)
314 1 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !)
358 1 r4/.hg/wcache/manifestfulltextcache (reporevlogstore !)
315 2 r4/d1/data1
359 2 r4/d1/data1
316 2 r4/d1/f2
360 2 r4/d1/f2
317 1 r4/f1
361 1 r4/f1
318 1 r4/f3 (execbit !)
362 1 r4/f3 (execbit !)
319 2 r4/f3 (no-execbit !)
363 2 r4/f3 (no-execbit !)
320
364
321 #if hardlink-whitelisted
365 #if hardlink-whitelisted
322 $ nlinksdir r4/.hg/undo.backup.dirstate.bck r4/.hg/dirstate
366 $ nlinksdir r4/.hg/undo.backup.dirstate.bck r4/.hg/dirstate
323 1 r4/.hg/dirstate
367 1 r4/.hg/dirstate
324 2 r4/.hg/undo.backup.dirstate.bck
368 2 r4/.hg/undo.backup.dirstate.bck
325 #endif
369 #endif
326
370
327 Test hardlinking outside hg:
371 Test hardlinking outside hg:
328
372
329 $ mkdir x
373 $ mkdir x
330 $ echo foo > x/a
374 $ echo foo > x/a
331
375
332 $ linkcp x y
376 $ linkcp x y
333 $ echo bar >> y/a
377 $ echo bar >> y/a
334
378
335 No diff if hardlink:
379 No diff if hardlink:
336
380
337 $ diff x/a y/a
381 $ diff x/a y/a
338
382
339 Test mq hardlinking:
383 Test mq hardlinking:
340
384
341 $ echo "[extensions]" >> $HGRCPATH
385 $ echo "[extensions]" >> $HGRCPATH
342 $ echo "mq=" >> $HGRCPATH
386 $ echo "mq=" >> $HGRCPATH
343
387
344 $ hg init a
388 $ hg init a
345 $ cd a
389 $ cd a
346
390
347 $ hg qimport -n foo - << EOF
391 $ hg qimport -n foo - << EOF
348 > # HG changeset patch
392 > # HG changeset patch
349 > # Date 1 0
393 > # Date 1 0
350 > diff -r 2588a8b53d66 a
394 > diff -r 2588a8b53d66 a
351 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
395 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
352 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
396 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
353 > @@ -0,0 +1,1 @@
397 > @@ -0,0 +1,1 @@
354 > +a
398 > +a
355 > EOF
399 > EOF
356 adding foo to series file
400 adding foo to series file
357
401
358 $ hg qpush
402 $ hg qpush
359 applying foo
403 applying foo
360 now at: foo
404 now at: foo
361
405
362 $ cd ..
406 $ cd ..
363 $ linkcp a b
407 $ linkcp a b
364 $ cd b
408 $ cd b
365
409
366 $ hg qimport -n bar - << EOF
410 $ hg qimport -n bar - << EOF
367 > # HG changeset patch
411 > # HG changeset patch
368 > # Date 2 0
412 > # Date 2 0
369 > diff -r 2588a8b53d66 a
413 > diff -r 2588a8b53d66 a
370 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
414 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
371 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
415 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
372 > @@ -0,0 +1,1 @@
416 > @@ -0,0 +1,1 @@
373 > +b
417 > +b
374 > EOF
418 > EOF
375 adding bar to series file
419 adding bar to series file
376
420
377 $ hg qpush
421 $ hg qpush
378 applying bar
422 applying bar
379 now at: bar
423 now at: bar
380
424
381 $ cat .hg/patches/status
425 $ cat .hg/patches/status
382 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
426 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
383 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
427 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
384
428
385 $ cat .hg/patches/series
429 $ cat .hg/patches/series
386 foo
430 foo
387 bar
431 bar
388
432
389 $ cat ../a/.hg/patches/status
433 $ cat ../a/.hg/patches/status
390 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
434 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
391
435
392 $ cat ../a/.hg/patches/series
436 $ cat ../a/.hg/patches/series
393 foo
437 foo
394
438
395 Test tags hardlinking:
439 Test tags hardlinking:
396
440
397 $ hg qdel -r qbase:qtip
441 $ hg qdel -r qbase:qtip
398 patch foo finalized without changeset message
442 patch foo finalized without changeset message
399 patch bar finalized without changeset message
443 patch bar finalized without changeset message
400
444
401 $ hg tag -l lfoo
445 $ hg tag -l lfoo
402 $ hg tag foo
446 $ hg tag foo
403
447
404 $ cd ..
448 $ cd ..
405 $ linkcp b c
449 $ linkcp b c
406 $ cd c
450 $ cd c
407
451
408 $ hg tag -l -r 0 lbar
452 $ hg tag -l -r 0 lbar
409 $ hg tag -r 0 bar
453 $ hg tag -r 0 bar
410
454
411 $ cat .hgtags
455 $ cat .hgtags
412 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
456 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
413 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
457 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
414
458
415 $ cat .hg/localtags
459 $ cat .hg/localtags
416 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
460 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
417 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
461 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
418
462
419 $ cat ../b/.hgtags
463 $ cat ../b/.hgtags
420 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
464 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
421
465
422 $ cat ../b/.hg/localtags
466 $ cat ../b/.hg/localtags
423 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
467 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
424
468
425 $ cd ..
469 $ cd ..
@@ -1,1463 +1,1467 b''
1 commit hooks can see env vars
1 commit hooks can see env vars
2 (and post-transaction one are run unlocked)
2 (and post-transaction one are run unlocked)
3
3
4
4
5 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
5 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
6 > from mercurial import pycompat
6 > from mercurial import pycompat
7 > def showargs(ui, repo, hooktype, **kwargs):
7 > def showargs(ui, repo, hooktype, **kwargs):
8 > kwargs = pycompat.byteskwargs(kwargs)
8 > kwargs = pycompat.byteskwargs(kwargs)
9 > ui.write(b'%s Python hook: %s\n' % (hooktype,
9 > ui.write(b'%s Python hook: %s\n' % (hooktype,
10 > b','.join(sorted(kwargs))))
10 > b','.join(sorted(kwargs))))
11 > EOF
11 > EOF
12
12
13 $ hg init a
13 $ hg init a
14 $ cd a
14 $ cd a
15 $ cat > .hg/hgrc <<EOF
15 $ cat > .hg/hgrc <<EOF
16 > [hooks]
16 > [hooks]
17 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit"
17 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit"
18 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit.b"
18 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py --line commit.b"
19 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py --line precommit"
19 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py --line precommit"
20 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxncommit"
20 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxncommit"
21 > pretxncommit.tip = hg -q tip
21 > pretxncommit.tip = hg -q tip
22 > pre-identify = sh -c "printenv.py --line pre-identify 1"
22 > pre-identify = sh -c "printenv.py --line pre-identify 1"
23 > pre-cat = sh -c "printenv.py --line pre-cat"
23 > pre-cat = sh -c "printenv.py --line pre-cat"
24 > post-cat = sh -c "printenv.py --line post-cat"
24 > post-cat = sh -c "printenv.py --line post-cat"
25 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnopen"
25 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnopen"
26 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnclose"
26 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line pretxnclose"
27 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnclose"
27 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnclose"
28 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
28 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
29 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnabort"
29 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py --line txnabort"
30 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
30 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
31 > EOF
31 > EOF
32 $ echo a > a
32 $ echo a > a
33 $ hg add a
33 $ hg add a
34 $ hg commit -m a
34 $ hg commit -m a
35 precommit hook: HG_HOOKNAME=precommit
35 precommit hook: HG_HOOKNAME=precommit
36 HG_HOOKTYPE=precommit
36 HG_HOOKTYPE=precommit
37 HG_PARENT1=0000000000000000000000000000000000000000
37 HG_PARENT1=0000000000000000000000000000000000000000
38
38
39 pretxnopen hook: HG_HOOKNAME=pretxnopen
39 pretxnopen hook: HG_HOOKNAME=pretxnopen
40 HG_HOOKTYPE=pretxnopen
40 HG_HOOKTYPE=pretxnopen
41 HG_TXNID=TXN:$ID$
41 HG_TXNID=TXN:$ID$
42 HG_TXNNAME=commit
42 HG_TXNNAME=commit
43
43
44 pretxncommit hook: HG_HOOKNAME=pretxncommit
44 pretxncommit hook: HG_HOOKNAME=pretxncommit
45 HG_HOOKTYPE=pretxncommit
45 HG_HOOKTYPE=pretxncommit
46 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
46 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
47 HG_PARENT1=0000000000000000000000000000000000000000
47 HG_PARENT1=0000000000000000000000000000000000000000
48 HG_PENDING=$TESTTMP/a
48 HG_PENDING=$TESTTMP/a
49
49
50 0:cb9a9f314b8b
50 0:cb9a9f314b8b
51 pretxnclose hook: HG_HOOKNAME=pretxnclose
51 pretxnclose hook: HG_HOOKNAME=pretxnclose
52 HG_HOOKTYPE=pretxnclose
52 HG_HOOKTYPE=pretxnclose
53 HG_PENDING=$TESTTMP/a
53 HG_PENDING=$TESTTMP/a
54 HG_PHASES_MOVED=1
54 HG_PHASES_MOVED=1
55 HG_TXNID=TXN:$ID$
55 HG_TXNID=TXN:$ID$
56 HG_TXNNAME=commit
56 HG_TXNNAME=commit
57
57
58 txnclose hook: HG_HOOKNAME=txnclose
58 txnclose hook: HG_HOOKNAME=txnclose
59 HG_HOOKTYPE=txnclose
59 HG_HOOKTYPE=txnclose
60 HG_PHASES_MOVED=1
60 HG_PHASES_MOVED=1
61 HG_TXNID=TXN:$ID$
61 HG_TXNID=TXN:$ID$
62 HG_TXNNAME=commit
62 HG_TXNNAME=commit
63
63
64 commit hook: HG_HOOKNAME=commit
64 commit hook: HG_HOOKNAME=commit
65 HG_HOOKTYPE=commit
65 HG_HOOKTYPE=commit
66 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
66 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 HG_PARENT1=0000000000000000000000000000000000000000
67 HG_PARENT1=0000000000000000000000000000000000000000
68
68
69 commit.b hook: HG_HOOKNAME=commit.b
69 commit.b hook: HG_HOOKNAME=commit.b
70 HG_HOOKTYPE=commit
70 HG_HOOKTYPE=commit
71 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
71 HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
72 HG_PARENT1=0000000000000000000000000000000000000000
72 HG_PARENT1=0000000000000000000000000000000000000000
73
73
74
74
75 $ hg clone . ../b
75 $ hg clone . ../b
76 updating to branch default
76 updating to branch default
77 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
77 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 $ cd ../b
78 $ cd ../b
79
79
80 changegroup hooks can see env vars
80 changegroup hooks can see env vars
81
81
82 $ cat > .hg/hgrc <<EOF
82 $ cat > .hg/hgrc <<EOF
83 > [hooks]
83 > [hooks]
84 > prechangegroup = sh -c "printenv.py --line prechangegroup"
84 > prechangegroup = sh -c "printenv.py --line prechangegroup"
85 > changegroup = sh -c "printenv.py --line changegroup"
85 > changegroup = sh -c "printenv.py --line changegroup"
86 > incoming = sh -c "printenv.py --line incoming"
86 > incoming = sh -c "printenv.py --line incoming"
87 > EOF
87 > EOF
88
88
89 pretxncommit and commit hooks can see both parents of merge
89 pretxncommit and commit hooks can see both parents of merge
90
90
91 $ cd ../a
91 $ cd ../a
92 $ echo b >> a
92 $ echo b >> a
93 $ hg commit -m a1 -d "1 0"
93 $ hg commit -m a1 -d "1 0"
94 precommit hook: HG_HOOKNAME=precommit
94 precommit hook: HG_HOOKNAME=precommit
95 HG_HOOKTYPE=precommit
95 HG_HOOKTYPE=precommit
96 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
96 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
97
97
98 pretxnopen hook: HG_HOOKNAME=pretxnopen
98 pretxnopen hook: HG_HOOKNAME=pretxnopen
99 HG_HOOKTYPE=pretxnopen
99 HG_HOOKTYPE=pretxnopen
100 HG_TXNID=TXN:$ID$
100 HG_TXNID=TXN:$ID$
101 HG_TXNNAME=commit
101 HG_TXNNAME=commit
102
102
103 pretxncommit hook: HG_HOOKNAME=pretxncommit
103 pretxncommit hook: HG_HOOKNAME=pretxncommit
104 HG_HOOKTYPE=pretxncommit
104 HG_HOOKTYPE=pretxncommit
105 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
105 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
106 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
106 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
107 HG_PENDING=$TESTTMP/a
107 HG_PENDING=$TESTTMP/a
108
108
109 1:ab228980c14d
109 1:ab228980c14d
110 pretxnclose hook: HG_HOOKNAME=pretxnclose
110 pretxnclose hook: HG_HOOKNAME=pretxnclose
111 HG_HOOKTYPE=pretxnclose
111 HG_HOOKTYPE=pretxnclose
112 HG_PENDING=$TESTTMP/a
112 HG_PENDING=$TESTTMP/a
113 HG_TXNID=TXN:$ID$
113 HG_TXNID=TXN:$ID$
114 HG_TXNNAME=commit
114 HG_TXNNAME=commit
115
115
116 txnclose hook: HG_HOOKNAME=txnclose
116 txnclose hook: HG_HOOKNAME=txnclose
117 HG_HOOKTYPE=txnclose
117 HG_HOOKTYPE=txnclose
118 HG_TXNID=TXN:$ID$
118 HG_TXNID=TXN:$ID$
119 HG_TXNNAME=commit
119 HG_TXNNAME=commit
120
120
121 commit hook: HG_HOOKNAME=commit
121 commit hook: HG_HOOKNAME=commit
122 HG_HOOKTYPE=commit
122 HG_HOOKTYPE=commit
123 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
123 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
124 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
124 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
125
125
126 commit.b hook: HG_HOOKNAME=commit.b
126 commit.b hook: HG_HOOKNAME=commit.b
127 HG_HOOKTYPE=commit
127 HG_HOOKTYPE=commit
128 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
128 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
129 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
129 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
130
130
131 $ hg update -C 0
131 $ hg update -C 0
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 $ echo b > b
133 $ echo b > b
134 $ hg add b
134 $ hg add b
135 $ hg commit -m b -d '1 0'
135 $ hg commit -m b -d '1 0'
136 precommit hook: HG_HOOKNAME=precommit
136 precommit hook: HG_HOOKNAME=precommit
137 HG_HOOKTYPE=precommit
137 HG_HOOKTYPE=precommit
138 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
138 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
139
139
140 pretxnopen hook: HG_HOOKNAME=pretxnopen
140 pretxnopen hook: HG_HOOKNAME=pretxnopen
141 HG_HOOKTYPE=pretxnopen
141 HG_HOOKTYPE=pretxnopen
142 HG_TXNID=TXN:$ID$
142 HG_TXNID=TXN:$ID$
143 HG_TXNNAME=commit
143 HG_TXNNAME=commit
144
144
145 pretxncommit hook: HG_HOOKNAME=pretxncommit
145 pretxncommit hook: HG_HOOKNAME=pretxncommit
146 HG_HOOKTYPE=pretxncommit
146 HG_HOOKTYPE=pretxncommit
147 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
147 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
148 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
148 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
149 HG_PENDING=$TESTTMP/a
149 HG_PENDING=$TESTTMP/a
150
150
151 2:ee9deb46ab31
151 2:ee9deb46ab31
152 pretxnclose hook: HG_HOOKNAME=pretxnclose
152 pretxnclose hook: HG_HOOKNAME=pretxnclose
153 HG_HOOKTYPE=pretxnclose
153 HG_HOOKTYPE=pretxnclose
154 HG_PENDING=$TESTTMP/a
154 HG_PENDING=$TESTTMP/a
155 HG_TXNID=TXN:$ID$
155 HG_TXNID=TXN:$ID$
156 HG_TXNNAME=commit
156 HG_TXNNAME=commit
157
157
158 created new head
158 created new head
159 txnclose hook: HG_HOOKNAME=txnclose
159 txnclose hook: HG_HOOKNAME=txnclose
160 HG_HOOKTYPE=txnclose
160 HG_HOOKTYPE=txnclose
161 HG_TXNID=TXN:$ID$
161 HG_TXNID=TXN:$ID$
162 HG_TXNNAME=commit
162 HG_TXNNAME=commit
163
163
164 commit hook: HG_HOOKNAME=commit
164 commit hook: HG_HOOKNAME=commit
165 HG_HOOKTYPE=commit
165 HG_HOOKTYPE=commit
166 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
166 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
167 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
167 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
168
168
169 commit.b hook: HG_HOOKNAME=commit.b
169 commit.b hook: HG_HOOKNAME=commit.b
170 HG_HOOKTYPE=commit
170 HG_HOOKTYPE=commit
171 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
171 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
172 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
172 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
173
173
174 $ hg merge 1
174 $ hg merge 1
175 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
176 (branch merge, don't forget to commit)
176 (branch merge, don't forget to commit)
177 $ hg commit -m merge -d '2 0'
177 $ hg commit -m merge -d '2 0'
178 precommit hook: HG_HOOKNAME=precommit
178 precommit hook: HG_HOOKNAME=precommit
179 HG_HOOKTYPE=precommit
179 HG_HOOKTYPE=precommit
180 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
180 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
181 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
181 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
182
182
183 pretxnopen hook: HG_HOOKNAME=pretxnopen
183 pretxnopen hook: HG_HOOKNAME=pretxnopen
184 HG_HOOKTYPE=pretxnopen
184 HG_HOOKTYPE=pretxnopen
185 HG_TXNID=TXN:$ID$
185 HG_TXNID=TXN:$ID$
186 HG_TXNNAME=commit
186 HG_TXNNAME=commit
187
187
188 pretxncommit hook: HG_HOOKNAME=pretxncommit
188 pretxncommit hook: HG_HOOKNAME=pretxncommit
189 HG_HOOKTYPE=pretxncommit
189 HG_HOOKTYPE=pretxncommit
190 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
190 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
191 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
191 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
192 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
192 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
193 HG_PENDING=$TESTTMP/a
193 HG_PENDING=$TESTTMP/a
194
194
195 3:07f3376c1e65
195 3:07f3376c1e65
196 pretxnclose hook: HG_HOOKNAME=pretxnclose
196 pretxnclose hook: HG_HOOKNAME=pretxnclose
197 HG_HOOKTYPE=pretxnclose
197 HG_HOOKTYPE=pretxnclose
198 HG_PENDING=$TESTTMP/a
198 HG_PENDING=$TESTTMP/a
199 HG_TXNID=TXN:$ID$
199 HG_TXNID=TXN:$ID$
200 HG_TXNNAME=commit
200 HG_TXNNAME=commit
201
201
202 txnclose hook: HG_HOOKNAME=txnclose
202 txnclose hook: HG_HOOKNAME=txnclose
203 HG_HOOKTYPE=txnclose
203 HG_HOOKTYPE=txnclose
204 HG_TXNID=TXN:$ID$
204 HG_TXNID=TXN:$ID$
205 HG_TXNNAME=commit
205 HG_TXNNAME=commit
206
206
207 commit hook: HG_HOOKNAME=commit
207 commit hook: HG_HOOKNAME=commit
208 HG_HOOKTYPE=commit
208 HG_HOOKTYPE=commit
209 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
209 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
210 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
210 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
211 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
211 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
212
212
213 commit.b hook: HG_HOOKNAME=commit.b
213 commit.b hook: HG_HOOKNAME=commit.b
214 HG_HOOKTYPE=commit
214 HG_HOOKTYPE=commit
215 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
215 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
216 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
216 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
217 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
217 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
218
218
219
219
220 test generic hooks
220 test generic hooks
221
221
222 $ hg id
222 $ hg id
223 pre-identify hook: HG_ARGS=id
223 pre-identify hook: HG_ARGS=id
224 HG_HOOKNAME=pre-identify
224 HG_HOOKNAME=pre-identify
225 HG_HOOKTYPE=pre-identify
225 HG_HOOKTYPE=pre-identify
226 HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''}
226 HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None, 'template': ''}
227 HG_PATS=[]
227 HG_PATS=[]
228
228
229 abort: pre-identify hook exited with status 1
229 abort: pre-identify hook exited with status 1
230 [40]
230 [40]
231 $ hg cat b
231 $ hg cat b
232 pre-cat hook: HG_ARGS=cat b
232 pre-cat hook: HG_ARGS=cat b
233 HG_HOOKNAME=pre-cat
233 HG_HOOKNAME=pre-cat
234 HG_HOOKTYPE=pre-cat
234 HG_HOOKTYPE=pre-cat
235 HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''}
235 HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''}
236 HG_PATS=['b']
236 HG_PATS=['b']
237
237
238 b
238 b
239 post-cat hook: HG_ARGS=cat b
239 post-cat hook: HG_ARGS=cat b
240 HG_HOOKNAME=post-cat
240 HG_HOOKNAME=post-cat
241 HG_HOOKTYPE=post-cat
241 HG_HOOKTYPE=post-cat
242 HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''}
242 HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': '', 'template': ''}
243 HG_PATS=['b']
243 HG_PATS=['b']
244 HG_RESULT=0
244 HG_RESULT=0
245
245
246
246
247 $ cd ../b
247 $ cd ../b
248 $ hg pull ../a
248 $ hg pull ../a
249 pulling from ../a
249 pulling from ../a
250 searching for changes
250 searching for changes
251 prechangegroup hook: HG_HOOKNAME=prechangegroup
251 prechangegroup hook: HG_HOOKNAME=prechangegroup
252 HG_HOOKTYPE=prechangegroup
252 HG_HOOKTYPE=prechangegroup
253 HG_SOURCE=pull
253 HG_SOURCE=pull
254 HG_TXNID=TXN:$ID$
254 HG_TXNID=TXN:$ID$
255 HG_TXNNAME=pull
255 HG_TXNNAME=pull
256 file:/*/$TESTTMP/a (glob)
256 file:/*/$TESTTMP/a (glob)
257 HG_URL=file:$TESTTMP/a
257 HG_URL=file:$TESTTMP/a
258
258
259 adding changesets
259 adding changesets
260 adding manifests
260 adding manifests
261 adding file changes
261 adding file changes
262 added 3 changesets with 2 changes to 2 files
262 added 3 changesets with 2 changes to 2 files
263 new changesets ab228980c14d:07f3376c1e65
263 new changesets ab228980c14d:07f3376c1e65
264 changegroup hook: HG_HOOKNAME=changegroup
264 changegroup hook: HG_HOOKNAME=changegroup
265 HG_HOOKTYPE=changegroup
265 HG_HOOKTYPE=changegroup
266 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
266 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
267 HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2
267 HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2
268 HG_SOURCE=pull
268 HG_SOURCE=pull
269 HG_TXNID=TXN:$ID$
269 HG_TXNID=TXN:$ID$
270 HG_TXNNAME=pull
270 HG_TXNNAME=pull
271 file:/*/$TESTTMP/a (glob)
271 file:/*/$TESTTMP/a (glob)
272 HG_URL=file:$TESTTMP/a
272 HG_URL=file:$TESTTMP/a
273
273
274 incoming hook: HG_HOOKNAME=incoming
274 incoming hook: HG_HOOKNAME=incoming
275 HG_HOOKTYPE=incoming
275 HG_HOOKTYPE=incoming
276 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
276 HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd
277 HG_SOURCE=pull
277 HG_SOURCE=pull
278 HG_TXNID=TXN:$ID$
278 HG_TXNID=TXN:$ID$
279 HG_TXNNAME=pull
279 HG_TXNNAME=pull
280 file:/*/$TESTTMP/a (glob)
280 file:/*/$TESTTMP/a (glob)
281 HG_URL=file:$TESTTMP/a
281 HG_URL=file:$TESTTMP/a
282
282
283 incoming hook: HG_HOOKNAME=incoming
283 incoming hook: HG_HOOKNAME=incoming
284 HG_HOOKTYPE=incoming
284 HG_HOOKTYPE=incoming
285 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
285 HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2
286 HG_SOURCE=pull
286 HG_SOURCE=pull
287 HG_TXNID=TXN:$ID$
287 HG_TXNID=TXN:$ID$
288 HG_TXNNAME=pull
288 HG_TXNNAME=pull
289 file:/*/$TESTTMP/a (glob)
289 file:/*/$TESTTMP/a (glob)
290 HG_URL=file:$TESTTMP/a
290 HG_URL=file:$TESTTMP/a
291
291
292 incoming hook: HG_HOOKNAME=incoming
292 incoming hook: HG_HOOKNAME=incoming
293 HG_HOOKTYPE=incoming
293 HG_HOOKTYPE=incoming
294 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
294 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
295 HG_SOURCE=pull
295 HG_SOURCE=pull
296 HG_TXNID=TXN:$ID$
296 HG_TXNID=TXN:$ID$
297 HG_TXNNAME=pull
297 HG_TXNNAME=pull
298 file:/*/$TESTTMP/a (glob)
298 file:/*/$TESTTMP/a (glob)
299 HG_URL=file:$TESTTMP/a
299 HG_URL=file:$TESTTMP/a
300
300
301 (run 'hg update' to get a working copy)
301 (run 'hg update' to get a working copy)
302
302
303 tag hooks can see env vars
303 tag hooks can see env vars
304
304
305 $ cd ../a
305 $ cd ../a
306 $ cat >> .hg/hgrc <<EOF
306 $ cat >> .hg/hgrc <<EOF
307 > pretag = sh -c "printenv.py --line pretag"
307 > pretag = sh -c "printenv.py --line pretag"
308 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py --line tag"
308 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py --line tag"
309 > EOF
309 > EOF
310 $ hg tag -d '3 0' a
310 $ hg tag -d '3 0' a
311 pretag hook: HG_HOOKNAME=pretag
311 pretag hook: HG_HOOKNAME=pretag
312 HG_HOOKTYPE=pretag
312 HG_HOOKTYPE=pretag
313 HG_LOCAL=0
313 HG_LOCAL=0
314 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
314 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
315 HG_TAG=a
315 HG_TAG=a
316
316
317 precommit hook: HG_HOOKNAME=precommit
317 precommit hook: HG_HOOKNAME=precommit
318 HG_HOOKTYPE=precommit
318 HG_HOOKTYPE=precommit
319 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
319 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
320
320
321 pretxnopen hook: HG_HOOKNAME=pretxnopen
321 pretxnopen hook: HG_HOOKNAME=pretxnopen
322 HG_HOOKTYPE=pretxnopen
322 HG_HOOKTYPE=pretxnopen
323 HG_TXNID=TXN:$ID$
323 HG_TXNID=TXN:$ID$
324 HG_TXNNAME=commit
324 HG_TXNNAME=commit
325
325
326 pretxncommit hook: HG_HOOKNAME=pretxncommit
326 pretxncommit hook: HG_HOOKNAME=pretxncommit
327 HG_HOOKTYPE=pretxncommit
327 HG_HOOKTYPE=pretxncommit
328 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
328 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
329 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
329 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
330 HG_PENDING=$TESTTMP/a
330 HG_PENDING=$TESTTMP/a
331
331
332 4:539e4b31b6dc
332 4:539e4b31b6dc
333 pretxnclose hook: HG_HOOKNAME=pretxnclose
333 pretxnclose hook: HG_HOOKNAME=pretxnclose
334 HG_HOOKTYPE=pretxnclose
334 HG_HOOKTYPE=pretxnclose
335 HG_PENDING=$TESTTMP/a
335 HG_PENDING=$TESTTMP/a
336 HG_TXNID=TXN:$ID$
336 HG_TXNID=TXN:$ID$
337 HG_TXNNAME=commit
337 HG_TXNNAME=commit
338
338
339 tag hook: HG_HOOKNAME=tag
339 tag hook: HG_HOOKNAME=tag
340 HG_HOOKTYPE=tag
340 HG_HOOKTYPE=tag
341 HG_LOCAL=0
341 HG_LOCAL=0
342 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
342 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2
343 HG_TAG=a
343 HG_TAG=a
344
344
345 txnclose hook: HG_HOOKNAME=txnclose
345 txnclose hook: HG_HOOKNAME=txnclose
346 HG_HOOKTYPE=txnclose
346 HG_HOOKTYPE=txnclose
347 HG_TXNID=TXN:$ID$
347 HG_TXNID=TXN:$ID$
348 HG_TXNNAME=commit
348 HG_TXNNAME=commit
349
349
350 commit hook: HG_HOOKNAME=commit
350 commit hook: HG_HOOKNAME=commit
351 HG_HOOKTYPE=commit
351 HG_HOOKTYPE=commit
352 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
352 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
353 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
353 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
354
354
355 commit.b hook: HG_HOOKNAME=commit.b
355 commit.b hook: HG_HOOKNAME=commit.b
356 HG_HOOKTYPE=commit
356 HG_HOOKTYPE=commit
357 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
357 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
358 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
358 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
359
359
360 $ hg tag -l la
360 $ hg tag -l la
361 pretag hook: HG_HOOKNAME=pretag
361 pretag hook: HG_HOOKNAME=pretag
362 HG_HOOKTYPE=pretag
362 HG_HOOKTYPE=pretag
363 HG_LOCAL=1
363 HG_LOCAL=1
364 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
364 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
365 HG_TAG=la
365 HG_TAG=la
366
366
367 tag hook: HG_HOOKNAME=tag
367 tag hook: HG_HOOKNAME=tag
368 HG_HOOKTYPE=tag
368 HG_HOOKTYPE=tag
369 HG_LOCAL=1
369 HG_LOCAL=1
370 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
370 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
371 HG_TAG=la
371 HG_TAG=la
372
372
373
373
374 pretag hook can forbid tagging
374 pretag hook can forbid tagging
375
375
376 $ cat >> .hg/hgrc <<EOF
376 $ cat >> .hg/hgrc <<EOF
377 > pretag.forbid = sh -c "printenv.py --line pretag.forbid 1"
377 > pretag.forbid = sh -c "printenv.py --line pretag.forbid 1"
378 > EOF
378 > EOF
379 $ hg tag -d '4 0' fa
379 $ hg tag -d '4 0' fa
380 pretag hook: HG_HOOKNAME=pretag
380 pretag hook: HG_HOOKNAME=pretag
381 HG_HOOKTYPE=pretag
381 HG_HOOKTYPE=pretag
382 HG_LOCAL=0
382 HG_LOCAL=0
383 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
383 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
384 HG_TAG=fa
384 HG_TAG=fa
385
385
386 pretag.forbid hook: HG_HOOKNAME=pretag.forbid
386 pretag.forbid hook: HG_HOOKNAME=pretag.forbid
387 HG_HOOKTYPE=pretag
387 HG_HOOKTYPE=pretag
388 HG_LOCAL=0
388 HG_LOCAL=0
389 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
389 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
390 HG_TAG=fa
390 HG_TAG=fa
391
391
392 abort: pretag.forbid hook exited with status 1
392 abort: pretag.forbid hook exited with status 1
393 [40]
393 [40]
394 $ hg tag -l fla
394 $ hg tag -l fla
395 pretag hook: HG_HOOKNAME=pretag
395 pretag hook: HG_HOOKNAME=pretag
396 HG_HOOKTYPE=pretag
396 HG_HOOKTYPE=pretag
397 HG_LOCAL=1
397 HG_LOCAL=1
398 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
398 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
399 HG_TAG=fla
399 HG_TAG=fla
400
400
401 pretag.forbid hook: HG_HOOKNAME=pretag.forbid
401 pretag.forbid hook: HG_HOOKNAME=pretag.forbid
402 HG_HOOKTYPE=pretag
402 HG_HOOKTYPE=pretag
403 HG_LOCAL=1
403 HG_LOCAL=1
404 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
404 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
405 HG_TAG=fla
405 HG_TAG=fla
406
406
407 abort: pretag.forbid hook exited with status 1
407 abort: pretag.forbid hook exited with status 1
408 [40]
408 [40]
409
409
410 pretxncommit hook can see changeset, can roll back txn, changeset no
410 pretxncommit hook can see changeset, can roll back txn, changeset no
411 more there after
411 more there after
412
412
413 $ cat >> .hg/hgrc <<EOF
413 $ cat >> .hg/hgrc <<EOF
414 > pretxncommit.forbid0 = sh -c "hg tip -q"
414 > pretxncommit.forbid0 = sh -c "hg tip -q"
415 > pretxncommit.forbid1 = sh -c "printenv.py --line pretxncommit.forbid 1"
415 > pretxncommit.forbid1 = sh -c "printenv.py --line pretxncommit.forbid 1"
416 > EOF
416 > EOF
417 $ echo z > z
417 $ echo z > z
418 $ hg add z
418 $ hg add z
419 $ hg -q tip
419 $ hg -q tip
420 4:539e4b31b6dc
420 4:539e4b31b6dc
421 $ hg commit -m 'fail' -d '4 0'
421 $ hg commit -m 'fail' -d '4 0'
422 precommit hook: HG_HOOKNAME=precommit
422 precommit hook: HG_HOOKNAME=precommit
423 HG_HOOKTYPE=precommit
423 HG_HOOKTYPE=precommit
424 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
424 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
425
425
426 pretxnopen hook: HG_HOOKNAME=pretxnopen
426 pretxnopen hook: HG_HOOKNAME=pretxnopen
427 HG_HOOKTYPE=pretxnopen
427 HG_HOOKTYPE=pretxnopen
428 HG_TXNID=TXN:$ID$
428 HG_TXNID=TXN:$ID$
429 HG_TXNNAME=commit
429 HG_TXNNAME=commit
430
430
431 pretxncommit hook: HG_HOOKNAME=pretxncommit
431 pretxncommit hook: HG_HOOKNAME=pretxncommit
432 HG_HOOKTYPE=pretxncommit
432 HG_HOOKTYPE=pretxncommit
433 HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567
433 HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567
434 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
434 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
435 HG_PENDING=$TESTTMP/a
435 HG_PENDING=$TESTTMP/a
436
436
437 5:6f611f8018c1
437 5:6f611f8018c1
438 5:6f611f8018c1
438 5:6f611f8018c1
439 pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1
439 pretxncommit.forbid hook: HG_HOOKNAME=pretxncommit.forbid1
440 HG_HOOKTYPE=pretxncommit
440 HG_HOOKTYPE=pretxncommit
441 HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567
441 HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567
442 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
442 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
443 HG_PENDING=$TESTTMP/a
443 HG_PENDING=$TESTTMP/a
444
444
445 transaction abort!
445 transaction abort!
446 txnabort Python hook: changes,txnid,txnname
446 txnabort Python hook: changes,txnid,txnname
447 txnabort hook: HG_HOOKNAME=txnabort.1
447 txnabort hook: HG_HOOKNAME=txnabort.1
448 HG_HOOKTYPE=txnabort
448 HG_HOOKTYPE=txnabort
449 HG_TXNID=TXN:$ID$
449 HG_TXNID=TXN:$ID$
450 HG_TXNNAME=commit
450 HG_TXNNAME=commit
451
451
452 rollback completed
452 rollback completed
453 abort: pretxncommit.forbid1 hook exited with status 1
453 abort: pretxncommit.forbid1 hook exited with status 1
454 [40]
454 [40]
455 $ hg -q tip
455 $ hg -q tip
456 4:539e4b31b6dc
456 4:539e4b31b6dc
457
457
458 (Check that no 'changelog.i.a' file were left behind)
458 (Check that no 'changelog.i.a' file were left behind)
459
459
460 $ ls -1 .hg/store/
460 $ ls -1 .hg/store/
461 00changelog-1335303a.nd (rust !)
462 00changelog.d
461 00changelog.i
463 00changelog.i
464 00changelog.n (rust !)
462 00manifest.i
465 00manifest.i
463 data
466 data
464 fncache (repofncache !)
467 fncache
465 phaseroots
468 phaseroots
466 requires
469 requires
467 undo
470 undo
468 undo.backup.fncache.bck (repofncache !)
471 undo.backup.00changelog.n.bck (rust !)
472 undo.backup.fncache.bck
469 undo.backupfiles
473 undo.backupfiles
470
474
471
475
472 precommit hook can prevent commit
476 precommit hook can prevent commit
473
477
474 $ cat >> .hg/hgrc <<EOF
478 $ cat >> .hg/hgrc <<EOF
475 > precommit.forbid = sh -c "printenv.py --line precommit.forbid 1"
479 > precommit.forbid = sh -c "printenv.py --line precommit.forbid 1"
476 > EOF
480 > EOF
477 $ hg commit -m 'fail' -d '4 0'
481 $ hg commit -m 'fail' -d '4 0'
478 precommit hook: HG_HOOKNAME=precommit
482 precommit hook: HG_HOOKNAME=precommit
479 HG_HOOKTYPE=precommit
483 HG_HOOKTYPE=precommit
480 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
484 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
481
485
482 precommit.forbid hook: HG_HOOKNAME=precommit.forbid
486 precommit.forbid hook: HG_HOOKNAME=precommit.forbid
483 HG_HOOKTYPE=precommit
487 HG_HOOKTYPE=precommit
484 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
488 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
485
489
486 abort: precommit.forbid hook exited with status 1
490 abort: precommit.forbid hook exited with status 1
487 [40]
491 [40]
488 $ hg -q tip
492 $ hg -q tip
489 4:539e4b31b6dc
493 4:539e4b31b6dc
490
494
491 preupdate hook can prevent update
495 preupdate hook can prevent update
492
496
493 $ cat >> .hg/hgrc <<EOF
497 $ cat >> .hg/hgrc <<EOF
494 > preupdate = sh -c "printenv.py --line preupdate"
498 > preupdate = sh -c "printenv.py --line preupdate"
495 > EOF
499 > EOF
496 $ hg update 1
500 $ hg update 1
497 preupdate hook: HG_HOOKNAME=preupdate
501 preupdate hook: HG_HOOKNAME=preupdate
498 HG_HOOKTYPE=preupdate
502 HG_HOOKTYPE=preupdate
499 HG_PARENT1=ab228980c14d
503 HG_PARENT1=ab228980c14d
500
504
501 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
505 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
502
506
503 update hook
507 update hook
504
508
505 $ cat >> .hg/hgrc <<EOF
509 $ cat >> .hg/hgrc <<EOF
506 > update = sh -c "printenv.py --line update"
510 > update = sh -c "printenv.py --line update"
507 > EOF
511 > EOF
508 $ hg update
512 $ hg update
509 preupdate hook: HG_HOOKNAME=preupdate
513 preupdate hook: HG_HOOKNAME=preupdate
510 HG_HOOKTYPE=preupdate
514 HG_HOOKTYPE=preupdate
511 HG_PARENT1=539e4b31b6dc
515 HG_PARENT1=539e4b31b6dc
512
516
513 update hook: HG_ERROR=0
517 update hook: HG_ERROR=0
514 HG_HOOKNAME=update
518 HG_HOOKNAME=update
515 HG_HOOKTYPE=update
519 HG_HOOKTYPE=update
516 HG_PARENT1=539e4b31b6dc
520 HG_PARENT1=539e4b31b6dc
517
521
518 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
522 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
519
523
520 pushkey hook
524 pushkey hook
521
525
522 $ cat >> .hg/hgrc <<EOF
526 $ cat >> .hg/hgrc <<EOF
523 > pushkey = sh -c "printenv.py --line pushkey"
527 > pushkey = sh -c "printenv.py --line pushkey"
524 > EOF
528 > EOF
525 $ cd ../b
529 $ cd ../b
526 $ hg bookmark -r null foo
530 $ hg bookmark -r null foo
527 $ hg push -B foo ../a
531 $ hg push -B foo ../a
528 pushing to ../a
532 pushing to ../a
529 searching for changes
533 searching for changes
530 no changes found
534 no changes found
531 pretxnopen hook: HG_HOOKNAME=pretxnopen
535 pretxnopen hook: HG_HOOKNAME=pretxnopen
532 HG_HOOKTYPE=pretxnopen
536 HG_HOOKTYPE=pretxnopen
533 HG_TXNID=TXN:$ID$
537 HG_TXNID=TXN:$ID$
534 HG_TXNNAME=push
538 HG_TXNNAME=push
535
539
536 pretxnclose hook: HG_BOOKMARK_MOVED=1
540 pretxnclose hook: HG_BOOKMARK_MOVED=1
537 HG_BUNDLE2=1
541 HG_BUNDLE2=1
538 HG_HOOKNAME=pretxnclose
542 HG_HOOKNAME=pretxnclose
539 HG_HOOKTYPE=pretxnclose
543 HG_HOOKTYPE=pretxnclose
540 HG_PENDING=$TESTTMP/a
544 HG_PENDING=$TESTTMP/a
541 HG_SOURCE=push
545 HG_SOURCE=push
542 HG_TXNID=TXN:$ID$
546 HG_TXNID=TXN:$ID$
543 HG_TXNNAME=push
547 HG_TXNNAME=push
544 HG_URL=file:$TESTTMP/a
548 HG_URL=file:$TESTTMP/a
545
549
546 pushkey hook: HG_BUNDLE2=1
550 pushkey hook: HG_BUNDLE2=1
547 HG_HOOKNAME=pushkey
551 HG_HOOKNAME=pushkey
548 HG_HOOKTYPE=pushkey
552 HG_HOOKTYPE=pushkey
549 HG_KEY=foo
553 HG_KEY=foo
550 HG_NAMESPACE=bookmarks
554 HG_NAMESPACE=bookmarks
551 HG_NEW=0000000000000000000000000000000000000000
555 HG_NEW=0000000000000000000000000000000000000000
552 HG_PUSHKEYCOMPAT=1
556 HG_PUSHKEYCOMPAT=1
553 HG_SOURCE=push
557 HG_SOURCE=push
554 HG_TXNID=TXN:$ID$
558 HG_TXNID=TXN:$ID$
555 HG_TXNNAME=push
559 HG_TXNNAME=push
556 HG_URL=file:$TESTTMP/a
560 HG_URL=file:$TESTTMP/a
557
561
558 txnclose hook: HG_BOOKMARK_MOVED=1
562 txnclose hook: HG_BOOKMARK_MOVED=1
559 HG_BUNDLE2=1
563 HG_BUNDLE2=1
560 HG_HOOKNAME=txnclose
564 HG_HOOKNAME=txnclose
561 HG_HOOKTYPE=txnclose
565 HG_HOOKTYPE=txnclose
562 HG_SOURCE=push
566 HG_SOURCE=push
563 HG_TXNID=TXN:$ID$
567 HG_TXNID=TXN:$ID$
564 HG_TXNNAME=push
568 HG_TXNNAME=push
565 HG_URL=file:$TESTTMP/a
569 HG_URL=file:$TESTTMP/a
566
570
567 exporting bookmark foo
571 exporting bookmark foo
568 [1]
572 [1]
569 $ cd ../a
573 $ cd ../a
570
574
571 listkeys hook
575 listkeys hook
572
576
573 $ cat >> .hg/hgrc <<EOF
577 $ cat >> .hg/hgrc <<EOF
574 > listkeys = sh -c "printenv.py --line listkeys"
578 > listkeys = sh -c "printenv.py --line listkeys"
575 > EOF
579 > EOF
576 $ hg bookmark -r null bar
580 $ hg bookmark -r null bar
577 pretxnopen hook: HG_HOOKNAME=pretxnopen
581 pretxnopen hook: HG_HOOKNAME=pretxnopen
578 HG_HOOKTYPE=pretxnopen
582 HG_HOOKTYPE=pretxnopen
579 HG_TXNID=TXN:$ID$
583 HG_TXNID=TXN:$ID$
580 HG_TXNNAME=bookmark
584 HG_TXNNAME=bookmark
581
585
582 pretxnclose hook: HG_BOOKMARK_MOVED=1
586 pretxnclose hook: HG_BOOKMARK_MOVED=1
583 HG_HOOKNAME=pretxnclose
587 HG_HOOKNAME=pretxnclose
584 HG_HOOKTYPE=pretxnclose
588 HG_HOOKTYPE=pretxnclose
585 HG_PENDING=$TESTTMP/a
589 HG_PENDING=$TESTTMP/a
586 HG_TXNID=TXN:$ID$
590 HG_TXNID=TXN:$ID$
587 HG_TXNNAME=bookmark
591 HG_TXNNAME=bookmark
588
592
589 txnclose hook: HG_BOOKMARK_MOVED=1
593 txnclose hook: HG_BOOKMARK_MOVED=1
590 HG_HOOKNAME=txnclose
594 HG_HOOKNAME=txnclose
591 HG_HOOKTYPE=txnclose
595 HG_HOOKTYPE=txnclose
592 HG_TXNID=TXN:$ID$
596 HG_TXNID=TXN:$ID$
593 HG_TXNNAME=bookmark
597 HG_TXNNAME=bookmark
594
598
595 $ cd ../b
599 $ cd ../b
596 $ hg pull -B bar ../a
600 $ hg pull -B bar ../a
597 pulling from ../a
601 pulling from ../a
598 listkeys hook: HG_HOOKNAME=listkeys
602 listkeys hook: HG_HOOKNAME=listkeys
599 HG_HOOKTYPE=listkeys
603 HG_HOOKTYPE=listkeys
600 HG_NAMESPACE=bookmarks
604 HG_NAMESPACE=bookmarks
601 HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
605 HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
602
606
603 no changes found
607 no changes found
604 adding remote bookmark bar
608 adding remote bookmark bar
605 $ cd ../a
609 $ cd ../a
606
610
607 test that prepushkey can prevent incoming keys
611 test that prepushkey can prevent incoming keys
608
612
609 $ cat >> .hg/hgrc <<EOF
613 $ cat >> .hg/hgrc <<EOF
610 > prepushkey = sh -c "printenv.py --line prepushkey.forbid 1"
614 > prepushkey = sh -c "printenv.py --line prepushkey.forbid 1"
611 > EOF
615 > EOF
612 $ cd ../b
616 $ cd ../b
613 $ hg bookmark -r null baz
617 $ hg bookmark -r null baz
614 $ hg push -B baz ../a
618 $ hg push -B baz ../a
615 pushing to ../a
619 pushing to ../a
616 searching for changes
620 searching for changes
617 listkeys hook: HG_HOOKNAME=listkeys
621 listkeys hook: HG_HOOKNAME=listkeys
618 HG_HOOKTYPE=listkeys
622 HG_HOOKTYPE=listkeys
619 HG_NAMESPACE=phases
623 HG_NAMESPACE=phases
620 HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
624 HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
621
625
622 listkeys hook: HG_HOOKNAME=listkeys
626 listkeys hook: HG_HOOKNAME=listkeys
623 HG_HOOKTYPE=listkeys
627 HG_HOOKTYPE=listkeys
624 HG_NAMESPACE=bookmarks
628 HG_NAMESPACE=bookmarks
625 HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
629 HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
626
630
627 no changes found
631 no changes found
628 pretxnopen hook: HG_HOOKNAME=pretxnopen
632 pretxnopen hook: HG_HOOKNAME=pretxnopen
629 HG_HOOKTYPE=pretxnopen
633 HG_HOOKTYPE=pretxnopen
630 HG_TXNID=TXN:$ID$
634 HG_TXNID=TXN:$ID$
631 HG_TXNNAME=push
635 HG_TXNNAME=push
632
636
633 prepushkey.forbid hook: HG_BUNDLE2=1
637 prepushkey.forbid hook: HG_BUNDLE2=1
634 HG_HOOKNAME=prepushkey
638 HG_HOOKNAME=prepushkey
635 HG_HOOKTYPE=prepushkey
639 HG_HOOKTYPE=prepushkey
636 HG_KEY=baz
640 HG_KEY=baz
637 HG_NAMESPACE=bookmarks
641 HG_NAMESPACE=bookmarks
638 HG_NEW=0000000000000000000000000000000000000000
642 HG_NEW=0000000000000000000000000000000000000000
639 HG_PUSHKEYCOMPAT=1
643 HG_PUSHKEYCOMPAT=1
640 HG_SOURCE=push
644 HG_SOURCE=push
641 HG_TXNID=TXN:$ID$
645 HG_TXNID=TXN:$ID$
642 HG_TXNNAME=push
646 HG_TXNNAME=push
643 HG_URL=file:$TESTTMP/a
647 HG_URL=file:$TESTTMP/a
644
648
645 txnabort Python hook: bundle2,changes,source,txnid,txnname,url
649 txnabort Python hook: bundle2,changes,source,txnid,txnname,url
646 txnabort hook: HG_BUNDLE2=1
650 txnabort hook: HG_BUNDLE2=1
647 HG_HOOKNAME=txnabort.1
651 HG_HOOKNAME=txnabort.1
648 HG_HOOKTYPE=txnabort
652 HG_HOOKTYPE=txnabort
649 HG_SOURCE=push
653 HG_SOURCE=push
650 HG_TXNID=TXN:$ID$
654 HG_TXNID=TXN:$ID$
651 HG_TXNNAME=push
655 HG_TXNNAME=push
652 HG_URL=file:$TESTTMP/a
656 HG_URL=file:$TESTTMP/a
653
657
654 abort: prepushkey hook exited with status 1
658 abort: prepushkey hook exited with status 1
655 [40]
659 [40]
656 $ cd ../a
660 $ cd ../a
657
661
658 test that prelistkeys can prevent listing keys
662 test that prelistkeys can prevent listing keys
659
663
660 $ cat >> .hg/hgrc <<EOF
664 $ cat >> .hg/hgrc <<EOF
661 > prelistkeys = sh -c "printenv.py --line prelistkeys.forbid 1"
665 > prelistkeys = sh -c "printenv.py --line prelistkeys.forbid 1"
662 > EOF
666 > EOF
663 $ hg bookmark -r null quux
667 $ hg bookmark -r null quux
664 pretxnopen hook: HG_HOOKNAME=pretxnopen
668 pretxnopen hook: HG_HOOKNAME=pretxnopen
665 HG_HOOKTYPE=pretxnopen
669 HG_HOOKTYPE=pretxnopen
666 HG_TXNID=TXN:$ID$
670 HG_TXNID=TXN:$ID$
667 HG_TXNNAME=bookmark
671 HG_TXNNAME=bookmark
668
672
669 pretxnclose hook: HG_BOOKMARK_MOVED=1
673 pretxnclose hook: HG_BOOKMARK_MOVED=1
670 HG_HOOKNAME=pretxnclose
674 HG_HOOKNAME=pretxnclose
671 HG_HOOKTYPE=pretxnclose
675 HG_HOOKTYPE=pretxnclose
672 HG_PENDING=$TESTTMP/a
676 HG_PENDING=$TESTTMP/a
673 HG_TXNID=TXN:$ID$
677 HG_TXNID=TXN:$ID$
674 HG_TXNNAME=bookmark
678 HG_TXNNAME=bookmark
675
679
676 txnclose hook: HG_BOOKMARK_MOVED=1
680 txnclose hook: HG_BOOKMARK_MOVED=1
677 HG_HOOKNAME=txnclose
681 HG_HOOKNAME=txnclose
678 HG_HOOKTYPE=txnclose
682 HG_HOOKTYPE=txnclose
679 HG_TXNID=TXN:$ID$
683 HG_TXNID=TXN:$ID$
680 HG_TXNNAME=bookmark
684 HG_TXNNAME=bookmark
681
685
682 $ cd ../b
686 $ cd ../b
683 $ hg pull -B quux ../a
687 $ hg pull -B quux ../a
684 pulling from ../a
688 pulling from ../a
685 prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys
689 prelistkeys.forbid hook: HG_HOOKNAME=prelistkeys
686 HG_HOOKTYPE=prelistkeys
690 HG_HOOKTYPE=prelistkeys
687 HG_NAMESPACE=bookmarks
691 HG_NAMESPACE=bookmarks
688
692
689 abort: prelistkeys hook exited with status 1
693 abort: prelistkeys hook exited with status 1
690 [40]
694 [40]
691 $ cd ../a
695 $ cd ../a
692 $ rm .hg/hgrc
696 $ rm .hg/hgrc
693
697
694 prechangegroup hook can prevent incoming changes
698 prechangegroup hook can prevent incoming changes
695
699
696 $ cd ../b
700 $ cd ../b
697 $ hg -q tip
701 $ hg -q tip
698 3:07f3376c1e65
702 3:07f3376c1e65
699 $ cat > .hg/hgrc <<EOF
703 $ cat > .hg/hgrc <<EOF
700 > [hooks]
704 > [hooks]
701 > prechangegroup.forbid = sh -c "printenv.py --line prechangegroup.forbid 1"
705 > prechangegroup.forbid = sh -c "printenv.py --line prechangegroup.forbid 1"
702 > EOF
706 > EOF
703 $ hg pull ../a
707 $ hg pull ../a
704 pulling from ../a
708 pulling from ../a
705 searching for changes
709 searching for changes
706 prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid
710 prechangegroup.forbid hook: HG_HOOKNAME=prechangegroup.forbid
707 HG_HOOKTYPE=prechangegroup
711 HG_HOOKTYPE=prechangegroup
708 HG_SOURCE=pull
712 HG_SOURCE=pull
709 HG_TXNID=TXN:$ID$
713 HG_TXNID=TXN:$ID$
710 HG_TXNNAME=pull
714 HG_TXNNAME=pull
711 file:/*/$TESTTMP/a (glob)
715 file:/*/$TESTTMP/a (glob)
712 HG_URL=file:$TESTTMP/a
716 HG_URL=file:$TESTTMP/a
713
717
714 abort: prechangegroup.forbid hook exited with status 1
718 abort: prechangegroup.forbid hook exited with status 1
715 [40]
719 [40]
716
720
717 pretxnchangegroup hook can see incoming changes, can roll back txn,
721 pretxnchangegroup hook can see incoming changes, can roll back txn,
718 incoming changes no longer there after
722 incoming changes no longer there after
719
723
720 $ cat > .hg/hgrc <<EOF
724 $ cat > .hg/hgrc <<EOF
721 > [hooks]
725 > [hooks]
722 > pretxnchangegroup.forbid0 = hg tip -q
726 > pretxnchangegroup.forbid0 = hg tip -q
723 > pretxnchangegroup.forbid1 = sh -c "printenv.py --line pretxnchangegroup.forbid 1"
727 > pretxnchangegroup.forbid1 = sh -c "printenv.py --line pretxnchangegroup.forbid 1"
724 > EOF
728 > EOF
725 $ hg pull ../a
729 $ hg pull ../a
726 pulling from ../a
730 pulling from ../a
727 searching for changes
731 searching for changes
728 adding changesets
732 adding changesets
729 adding manifests
733 adding manifests
730 adding file changes
734 adding file changes
731 4:539e4b31b6dc
735 4:539e4b31b6dc
732 pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1
736 pretxnchangegroup.forbid hook: HG_HOOKNAME=pretxnchangegroup.forbid1
733 HG_HOOKTYPE=pretxnchangegroup
737 HG_HOOKTYPE=pretxnchangegroup
734 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
738 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
735 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
739 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
736 HG_PENDING=$TESTTMP/b
740 HG_PENDING=$TESTTMP/b
737 HG_SOURCE=pull
741 HG_SOURCE=pull
738 HG_TXNID=TXN:$ID$
742 HG_TXNID=TXN:$ID$
739 HG_TXNNAME=pull
743 HG_TXNNAME=pull
740 file:/*/$TESTTMP/a (glob)
744 file:/*/$TESTTMP/a (glob)
741 HG_URL=file:$TESTTMP/a
745 HG_URL=file:$TESTTMP/a
742
746
743 transaction abort!
747 transaction abort!
744 rollback completed
748 rollback completed
745 abort: pretxnchangegroup.forbid1 hook exited with status 1
749 abort: pretxnchangegroup.forbid1 hook exited with status 1
746 [40]
750 [40]
747 $ hg -q tip
751 $ hg -q tip
748 3:07f3376c1e65
752 3:07f3376c1e65
749
753
750 outgoing hooks can see env vars
754 outgoing hooks can see env vars
751
755
752 $ rm .hg/hgrc
756 $ rm .hg/hgrc
753 $ cat > ../a/.hg/hgrc <<EOF
757 $ cat > ../a/.hg/hgrc <<EOF
754 > [hooks]
758 > [hooks]
755 > preoutgoing = sh -c "printenv.py --line preoutgoing"
759 > preoutgoing = sh -c "printenv.py --line preoutgoing"
756 > outgoing = sh -c "printenv.py --line outgoing"
760 > outgoing = sh -c "printenv.py --line outgoing"
757 > EOF
761 > EOF
758 $ hg pull ../a
762 $ hg pull ../a
759 pulling from ../a
763 pulling from ../a
760 searching for changes
764 searching for changes
761 preoutgoing hook: HG_HOOKNAME=preoutgoing
765 preoutgoing hook: HG_HOOKNAME=preoutgoing
762 HG_HOOKTYPE=preoutgoing
766 HG_HOOKTYPE=preoutgoing
763 HG_SOURCE=pull
767 HG_SOURCE=pull
764
768
765 outgoing hook: HG_HOOKNAME=outgoing
769 outgoing hook: HG_HOOKNAME=outgoing
766 HG_HOOKTYPE=outgoing
770 HG_HOOKTYPE=outgoing
767 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
771 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
768 HG_SOURCE=pull
772 HG_SOURCE=pull
769
773
770 adding changesets
774 adding changesets
771 adding manifests
775 adding manifests
772 adding file changes
776 adding file changes
773 adding remote bookmark quux
777 adding remote bookmark quux
774 added 1 changesets with 1 changes to 1 files
778 added 1 changesets with 1 changes to 1 files
775 new changesets 539e4b31b6dc
779 new changesets 539e4b31b6dc
776 (run 'hg update' to get a working copy)
780 (run 'hg update' to get a working copy)
777 $ hg rollback
781 $ hg rollback
778 repository tip rolled back to revision 3 (undo pull)
782 repository tip rolled back to revision 3 (undo pull)
779
783
780 preoutgoing hook can prevent outgoing changes
784 preoutgoing hook can prevent outgoing changes
781
785
782 $ cat >> ../a/.hg/hgrc <<EOF
786 $ cat >> ../a/.hg/hgrc <<EOF
783 > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1"
787 > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1"
784 > EOF
788 > EOF
785 $ hg pull ../a
789 $ hg pull ../a
786 pulling from ../a
790 pulling from ../a
787 searching for changes
791 searching for changes
788 preoutgoing hook: HG_HOOKNAME=preoutgoing
792 preoutgoing hook: HG_HOOKNAME=preoutgoing
789 HG_HOOKTYPE=preoutgoing
793 HG_HOOKTYPE=preoutgoing
790 HG_SOURCE=pull
794 HG_SOURCE=pull
791
795
792 preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid
796 preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid
793 HG_HOOKTYPE=preoutgoing
797 HG_HOOKTYPE=preoutgoing
794 HG_SOURCE=pull
798 HG_SOURCE=pull
795
799
796 abort: preoutgoing.forbid hook exited with status 1
800 abort: preoutgoing.forbid hook exited with status 1
797 [40]
801 [40]
798
802
799 outgoing hooks work for local clones
803 outgoing hooks work for local clones
800
804
801 $ cd ..
805 $ cd ..
802 $ cat > a/.hg/hgrc <<EOF
806 $ cat > a/.hg/hgrc <<EOF
803 > [hooks]
807 > [hooks]
804 > preoutgoing = sh -c "printenv.py --line preoutgoing"
808 > preoutgoing = sh -c "printenv.py --line preoutgoing"
805 > outgoing = sh -c "printenv.py --line outgoing"
809 > outgoing = sh -c "printenv.py --line outgoing"
806 > EOF
810 > EOF
807 $ hg clone a c
811 $ hg clone a c
808 preoutgoing hook: HG_HOOKNAME=preoutgoing
812 preoutgoing hook: HG_HOOKNAME=preoutgoing
809 HG_HOOKTYPE=preoutgoing
813 HG_HOOKTYPE=preoutgoing
810 HG_SOURCE=clone
814 HG_SOURCE=clone
811
815
812 outgoing hook: HG_HOOKNAME=outgoing
816 outgoing hook: HG_HOOKNAME=outgoing
813 HG_HOOKTYPE=outgoing
817 HG_HOOKTYPE=outgoing
814 HG_NODE=0000000000000000000000000000000000000000
818 HG_NODE=0000000000000000000000000000000000000000
815 HG_SOURCE=clone
819 HG_SOURCE=clone
816
820
817 updating to branch default
821 updating to branch default
818 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
822 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
819 $ rm -rf c
823 $ rm -rf c
820
824
821 preoutgoing hook can prevent outgoing changes for local clones
825 preoutgoing hook can prevent outgoing changes for local clones
822
826
823 $ cat >> a/.hg/hgrc <<EOF
827 $ cat >> a/.hg/hgrc <<EOF
824 > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1"
828 > preoutgoing.forbid = sh -c "printenv.py --line preoutgoing.forbid 1"
825 > EOF
829 > EOF
826 $ hg clone a zzz
830 $ hg clone a zzz
827 preoutgoing hook: HG_HOOKNAME=preoutgoing
831 preoutgoing hook: HG_HOOKNAME=preoutgoing
828 HG_HOOKTYPE=preoutgoing
832 HG_HOOKTYPE=preoutgoing
829 HG_SOURCE=clone
833 HG_SOURCE=clone
830
834
831 preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid
835 preoutgoing.forbid hook: HG_HOOKNAME=preoutgoing.forbid
832 HG_HOOKTYPE=preoutgoing
836 HG_HOOKTYPE=preoutgoing
833 HG_SOURCE=clone
837 HG_SOURCE=clone
834
838
835 abort: preoutgoing.forbid hook exited with status 1
839 abort: preoutgoing.forbid hook exited with status 1
836 [40]
840 [40]
837
841
838 $ cd "$TESTTMP/b"
842 $ cd "$TESTTMP/b"
839
843
840 $ cat > hooktests.py <<EOF
844 $ cat > hooktests.py <<EOF
841 > from mercurial import (
845 > from mercurial import (
842 > error,
846 > error,
843 > pycompat,
847 > pycompat,
844 > )
848 > )
845 >
849 >
846 > uncallable = 0
850 > uncallable = 0
847 >
851 >
848 > def printargs(ui, args):
852 > def printargs(ui, args):
849 > a = list(pycompat.byteskwargs(args).items())
853 > a = list(pycompat.byteskwargs(args).items())
850 > a.sort()
854 > a.sort()
851 > ui.write(b'hook args:\n')
855 > ui.write(b'hook args:\n')
852 > for k, v in a:
856 > for k, v in a:
853 > ui.write(b' %s %s\n' % (k, v))
857 > ui.write(b' %s %s\n' % (k, v))
854 >
858 >
855 > def passhook(ui, repo, **args):
859 > def passhook(ui, repo, **args):
856 > printargs(ui, args)
860 > printargs(ui, args)
857 >
861 >
858 > def failhook(ui, repo, **args):
862 > def failhook(ui, repo, **args):
859 > printargs(ui, args)
863 > printargs(ui, args)
860 > return True
864 > return True
861 >
865 >
862 > class LocalException(Exception):
866 > class LocalException(Exception):
863 > pass
867 > pass
864 >
868 >
865 > def raisehook(**args):
869 > def raisehook(**args):
866 > raise LocalException('exception from hook')
870 > raise LocalException('exception from hook')
867 >
871 >
868 > def aborthook(**args):
872 > def aborthook(**args):
869 > raise error.Abort(b'raise abort from hook')
873 > raise error.Abort(b'raise abort from hook')
870 >
874 >
871 > def brokenhook(**args):
875 > def brokenhook(**args):
872 > return 1 + {}
876 > return 1 + {}
873 >
877 >
874 > def verbosehook(ui, **args):
878 > def verbosehook(ui, **args):
875 > ui.note(b'verbose output from hook\n')
879 > ui.note(b'verbose output from hook\n')
876 >
880 >
877 > def printtags(ui, repo, **args):
881 > def printtags(ui, repo, **args):
878 > ui.write(b'[%s]\n' % b', '.join(sorted(repo.tags())))
882 > ui.write(b'[%s]\n' % b', '.join(sorted(repo.tags())))
879 >
883 >
880 > class container(object):
884 > class container(object):
881 > unreachable = 1
885 > unreachable = 1
882 > EOF
886 > EOF
883
887
884 $ cat > syntaxerror.py << NO_CHECK_EOF
888 $ cat > syntaxerror.py << NO_CHECK_EOF
885 > (foo
889 > (foo
886 > NO_CHECK_EOF
890 > NO_CHECK_EOF
887
891
888 test python hooks
892 test python hooks
889
893
890 #if windows
894 #if windows
891 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
895 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
892 #else
896 #else
893 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
897 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
894 #endif
898 #endif
895 $ export PYTHONPATH
899 $ export PYTHONPATH
896
900
897 $ echo '[hooks]' > ../a/.hg/hgrc
901 $ echo '[hooks]' > ../a/.hg/hgrc
898 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
902 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
899 $ hg pull ../a 2>&1 | grep 'raised an exception'
903 $ hg pull ../a 2>&1 | grep 'raised an exception'
900 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
904 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
901
905
902 $ echo '[hooks]' > ../a/.hg/hgrc
906 $ echo '[hooks]' > ../a/.hg/hgrc
903 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
907 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
904 $ hg pull ../a 2>&1 | grep 'raised an exception'
908 $ hg pull ../a 2>&1 | grep 'raised an exception'
905 error: preoutgoing.raise hook raised an exception: exception from hook
909 error: preoutgoing.raise hook raised an exception: exception from hook
906
910
907 $ echo '[hooks]' > ../a/.hg/hgrc
911 $ echo '[hooks]' > ../a/.hg/hgrc
908 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
912 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
909 $ hg pull ../a
913 $ hg pull ../a
910 pulling from ../a
914 pulling from ../a
911 searching for changes
915 searching for changes
912 error: preoutgoing.abort hook failed: raise abort from hook
916 error: preoutgoing.abort hook failed: raise abort from hook
913 abort: raise abort from hook
917 abort: raise abort from hook
914 [255]
918 [255]
915
919
916 $ echo '[hooks]' > ../a/.hg/hgrc
920 $ echo '[hooks]' > ../a/.hg/hgrc
917 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
921 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
918 $ hg pull ../a
922 $ hg pull ../a
919 pulling from ../a
923 pulling from ../a
920 searching for changes
924 searching for changes
921 hook args:
925 hook args:
922 hooktype preoutgoing
926 hooktype preoutgoing
923 source pull
927 source pull
924 abort: preoutgoing.fail hook failed
928 abort: preoutgoing.fail hook failed
925 [40]
929 [40]
926
930
927 $ echo '[hooks]' > ../a/.hg/hgrc
931 $ echo '[hooks]' > ../a/.hg/hgrc
928 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
932 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
929 $ hg pull ../a
933 $ hg pull ../a
930 pulling from ../a
934 pulling from ../a
931 searching for changes
935 searching for changes
932 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
936 abort: preoutgoing.uncallable hook is invalid: "hooktests.uncallable" is not callable
933 [255]
937 [255]
934
938
935 $ echo '[hooks]' > ../a/.hg/hgrc
939 $ echo '[hooks]' > ../a/.hg/hgrc
936 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
940 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
937 $ hg pull ../a
941 $ hg pull ../a
938 pulling from ../a
942 pulling from ../a
939 searching for changes
943 searching for changes
940 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
944 abort: preoutgoing.nohook hook is invalid: "hooktests.nohook" is not defined
941 [255]
945 [255]
942
946
943 $ echo '[hooks]' > ../a/.hg/hgrc
947 $ echo '[hooks]' > ../a/.hg/hgrc
944 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
948 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
945 $ hg pull ../a
949 $ hg pull ../a
946 pulling from ../a
950 pulling from ../a
947 searching for changes
951 searching for changes
948 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
952 abort: preoutgoing.nomodule hook is invalid: "nomodule" not in a module
949 [255]
953 [255]
950
954
951 $ echo '[hooks]' > ../a/.hg/hgrc
955 $ echo '[hooks]' > ../a/.hg/hgrc
952 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
956 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
953 $ hg pull ../a
957 $ hg pull ../a
954 pulling from ../a
958 pulling from ../a
955 searching for changes
959 searching for changes
956 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
960 abort: preoutgoing.badmodule hook is invalid: import of "nomodule" failed
957 (run with --traceback for stack trace)
961 (run with --traceback for stack trace)
958 [255]
962 [255]
959
963
960 $ echo '[hooks]' > ../a/.hg/hgrc
964 $ echo '[hooks]' > ../a/.hg/hgrc
961 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
965 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
962 $ hg pull ../a
966 $ hg pull ../a
963 pulling from ../a
967 pulling from ../a
964 searching for changes
968 searching for changes
965 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
969 abort: preoutgoing.unreachable hook is invalid: import of "hooktests.container" failed
966 (run with --traceback for stack trace)
970 (run with --traceback for stack trace)
967 [255]
971 [255]
968
972
969 $ echo '[hooks]' > ../a/.hg/hgrc
973 $ echo '[hooks]' > ../a/.hg/hgrc
970 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
974 $ echo 'preoutgoing.syntaxerror = python:syntaxerror.syntaxerror' >> ../a/.hg/hgrc
971 $ hg pull ../a
975 $ hg pull ../a
972 pulling from ../a
976 pulling from ../a
973 searching for changes
977 searching for changes
974 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
978 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
975 (run with --traceback for stack trace)
979 (run with --traceback for stack trace)
976 [255]
980 [255]
977
981
978 $ hg pull ../a --traceback 2>&1 | grep -E 'pulling|searching|^exception|Traceback|SyntaxError|ImportError|ModuleNotFoundError|HookLoadError|abort'
982 $ hg pull ../a --traceback 2>&1 | grep -E 'pulling|searching|^exception|Traceback|SyntaxError|ImportError|ModuleNotFoundError|HookLoadError|abort'
979 pulling from ../a
983 pulling from ../a
980 searching for changes
984 searching for changes
981 exception from first failed import attempt:
985 exception from first failed import attempt:
982 Traceback (most recent call last):
986 Traceback (most recent call last):
983 SyntaxError: * (glob)
987 SyntaxError: * (glob)
984 exception from second failed import attempt:
988 exception from second failed import attempt:
985 Traceback (most recent call last):
989 Traceback (most recent call last):
986 SyntaxError: * (glob)
990 SyntaxError: * (glob)
987 Traceback (most recent call last):
991 Traceback (most recent call last):
988 ModuleNotFoundError: No module named 'hgext_syntaxerror'
992 ModuleNotFoundError: No module named 'hgext_syntaxerror'
989 Traceback (most recent call last):
993 Traceback (most recent call last):
990 SyntaxError: * (glob)
994 SyntaxError: * (glob)
991 Traceback (most recent call last):
995 Traceback (most recent call last):
992 ModuleNotFoundError: No module named 'hgext_syntaxerror'
996 ModuleNotFoundError: No module named 'hgext_syntaxerror'
993 Traceback (most recent call last):
997 Traceback (most recent call last):
994 raise error.HookLoadError(msg, hint=tracebackhint) (py37 !)
998 raise error.HookLoadError(msg, hint=tracebackhint) (py37 !)
995 mercurial.error.HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
999 mercurial.error.HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
996 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
1000 abort: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
997
1001
998 $ echo '[hooks]' > ../a/.hg/hgrc
1002 $ echo '[hooks]' > ../a/.hg/hgrc
999 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
1003 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
1000 $ hg pull ../a
1004 $ hg pull ../a
1001 pulling from ../a
1005 pulling from ../a
1002 searching for changes
1006 searching for changes
1003 hook args:
1007 hook args:
1004 hooktype preoutgoing
1008 hooktype preoutgoing
1005 source pull
1009 source pull
1006 adding changesets
1010 adding changesets
1007 adding manifests
1011 adding manifests
1008 adding file changes
1012 adding file changes
1009 adding remote bookmark quux
1013 adding remote bookmark quux
1010 added 1 changesets with 1 changes to 1 files
1014 added 1 changesets with 1 changes to 1 files
1011 new changesets 539e4b31b6dc
1015 new changesets 539e4b31b6dc
1012 (run 'hg update' to get a working copy)
1016 (run 'hg update' to get a working copy)
1013
1017
1014 post- python hooks that fail to *run* don't cause an abort
1018 post- python hooks that fail to *run* don't cause an abort
1015 $ rm ../a/.hg/hgrc
1019 $ rm ../a/.hg/hgrc
1016 $ echo '[hooks]' > .hg/hgrc
1020 $ echo '[hooks]' > .hg/hgrc
1017 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
1021 $ echo 'post-pull.broken = python:hooktests.brokenhook' >> .hg/hgrc
1018 $ hg pull ../a
1022 $ hg pull ../a
1019 pulling from ../a
1023 pulling from ../a
1020 searching for changes
1024 searching for changes
1021 no changes found
1025 no changes found
1022 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
1026 error: post-pull.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
1023 (run with --traceback for stack trace)
1027 (run with --traceback for stack trace)
1024
1028
1025 but post- python hooks that fail to *load* do
1029 but post- python hooks that fail to *load* do
1026 $ echo '[hooks]' > .hg/hgrc
1030 $ echo '[hooks]' > .hg/hgrc
1027 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
1031 $ echo 'post-pull.nomodule = python:nomodule' >> .hg/hgrc
1028 $ hg pull ../a
1032 $ hg pull ../a
1029 pulling from ../a
1033 pulling from ../a
1030 searching for changes
1034 searching for changes
1031 no changes found
1035 no changes found
1032 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
1036 abort: post-pull.nomodule hook is invalid: "nomodule" not in a module
1033 [255]
1037 [255]
1034
1038
1035 $ echo '[hooks]' > .hg/hgrc
1039 $ echo '[hooks]' > .hg/hgrc
1036 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
1040 $ echo 'post-pull.badmodule = python:nomodule.nowhere' >> .hg/hgrc
1037 $ hg pull ../a
1041 $ hg pull ../a
1038 pulling from ../a
1042 pulling from ../a
1039 searching for changes
1043 searching for changes
1040 no changes found
1044 no changes found
1041 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
1045 abort: post-pull.badmodule hook is invalid: import of "nomodule" failed
1042 (run with --traceback for stack trace)
1046 (run with --traceback for stack trace)
1043 [255]
1047 [255]
1044
1048
1045 $ echo '[hooks]' > .hg/hgrc
1049 $ echo '[hooks]' > .hg/hgrc
1046 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
1050 $ echo 'post-pull.nohook = python:hooktests.nohook' >> .hg/hgrc
1047 $ hg pull ../a
1051 $ hg pull ../a
1048 pulling from ../a
1052 pulling from ../a
1049 searching for changes
1053 searching for changes
1050 no changes found
1054 no changes found
1051 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
1055 abort: post-pull.nohook hook is invalid: "hooktests.nohook" is not defined
1052 [255]
1056 [255]
1053
1057
1054 make sure --traceback works
1058 make sure --traceback works
1055
1059
1056 $ echo '[hooks]' > .hg/hgrc
1060 $ echo '[hooks]' > .hg/hgrc
1057 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
1061 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
1058
1062
1059 $ echo aa > a
1063 $ echo aa > a
1060 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
1064 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
1061 Traceback (most recent call last):
1065 Traceback (most recent call last):
1062
1066
1063 $ cd ..
1067 $ cd ..
1064 $ hg init c
1068 $ hg init c
1065 $ cd c
1069 $ cd c
1066
1070
1067 $ cat > hookext.py <<EOF
1071 $ cat > hookext.py <<EOF
1068 > def autohook(ui, **args):
1072 > def autohook(ui, **args):
1069 > ui.write(b'Automatically installed hook\n')
1073 > ui.write(b'Automatically installed hook\n')
1070 >
1074 >
1071 > def reposetup(ui, repo):
1075 > def reposetup(ui, repo):
1072 > repo.ui.setconfig(b"hooks", b"commit.auto", autohook)
1076 > repo.ui.setconfig(b"hooks", b"commit.auto", autohook)
1073 > EOF
1077 > EOF
1074 $ echo '[extensions]' >> .hg/hgrc
1078 $ echo '[extensions]' >> .hg/hgrc
1075 $ echo 'hookext = hookext.py' >> .hg/hgrc
1079 $ echo 'hookext = hookext.py' >> .hg/hgrc
1076
1080
1077 $ touch foo
1081 $ touch foo
1078 $ hg add foo
1082 $ hg add foo
1079 $ hg ci -d '0 0' -m 'add foo'
1083 $ hg ci -d '0 0' -m 'add foo'
1080 Automatically installed hook
1084 Automatically installed hook
1081 $ echo >> foo
1085 $ echo >> foo
1082 $ hg ci --debug -d '0 0' -m 'change foo'
1086 $ hg ci --debug -d '0 0' -m 'change foo'
1083 committing files:
1087 committing files:
1084 foo
1088 foo
1085 committing manifest
1089 committing manifest
1086 committing changelog
1090 committing changelog
1087 updating the branch cache
1091 updating the branch cache
1088 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
1092 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
1089 calling hook commit.auto: hgext_hookext.autohook
1093 calling hook commit.auto: hgext_hookext.autohook
1090 Automatically installed hook
1094 Automatically installed hook
1091
1095
1092 $ hg showconfig hooks
1096 $ hg showconfig hooks
1093 hooks.commit.auto=<function autohook at *> (glob)
1097 hooks.commit.auto=<function autohook at *> (glob)
1094
1098
1095 test python hook configured with python:[file]:[hook] syntax
1099 test python hook configured with python:[file]:[hook] syntax
1096
1100
1097 $ cd ..
1101 $ cd ..
1098 $ mkdir d
1102 $ mkdir d
1099 $ cd d
1103 $ cd d
1100 $ hg init repo
1104 $ hg init repo
1101 $ mkdir hooks
1105 $ mkdir hooks
1102
1106
1103 $ cd hooks
1107 $ cd hooks
1104 $ cat > testhooks.py <<EOF
1108 $ cat > testhooks.py <<EOF
1105 > def testhook(ui, **args):
1109 > def testhook(ui, **args):
1106 > ui.write(b'hook works\n')
1110 > ui.write(b'hook works\n')
1107 > EOF
1111 > EOF
1108 $ echo '[hooks]' > ../repo/.hg/hgrc
1112 $ echo '[hooks]' > ../repo/.hg/hgrc
1109 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
1113 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
1110
1114
1111 $ cd ../repo
1115 $ cd ../repo
1112 $ hg commit -d '0 0'
1116 $ hg commit -d '0 0'
1113 hook works
1117 hook works
1114 nothing changed
1118 nothing changed
1115 [1]
1119 [1]
1116
1120
1117 $ echo '[hooks]' > .hg/hgrc
1121 $ echo '[hooks]' > .hg/hgrc
1118 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
1122 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
1119 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
1123 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
1120
1124
1121 $ hg up null
1125 $ hg up null
1122 loading update.ne hook failed:
1126 loading update.ne hook failed:
1123 abort: $ENOENT$: '$TESTTMP/d/repo/nonexistent.py'
1127 abort: $ENOENT$: '$TESTTMP/d/repo/nonexistent.py'
1124 [255]
1128 [255]
1125
1129
1126 $ hg id
1130 $ hg id
1127 loading pre-identify.npmd hook failed:
1131 loading pre-identify.npmd hook failed:
1128 abort: No module named 'repo'
1132 abort: No module named 'repo'
1129 [255]
1133 [255]
1130
1134
1131 $ cd ../../b
1135 $ cd ../../b
1132
1136
1133 make sure --traceback works on hook import failure
1137 make sure --traceback works on hook import failure
1134
1138
1135 $ cat > importfail.py <<EOF
1139 $ cat > importfail.py <<EOF
1136 > import somebogusmodule
1140 > import somebogusmodule
1137 > # dereference something in the module to force demandimport to load it
1141 > # dereference something in the module to force demandimport to load it
1138 > somebogusmodule.whatever
1142 > somebogusmodule.whatever
1139 > EOF
1143 > EOF
1140
1144
1141 $ echo '[hooks]' > .hg/hgrc
1145 $ echo '[hooks]' > .hg/hgrc
1142 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
1146 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
1143
1147
1144 $ echo a >> a
1148 $ echo a >> a
1145 $ hg --traceback commit -ma 2>&1 | grep -E '^exception|ImportError|ModuleNotFoundError|Traceback|HookLoadError|abort'
1149 $ hg --traceback commit -ma 2>&1 | grep -E '^exception|ImportError|ModuleNotFoundError|Traceback|HookLoadError|abort'
1146 exception from first failed import attempt:
1150 exception from first failed import attempt:
1147 Traceback (most recent call last):
1151 Traceback (most recent call last):
1148 ModuleNotFoundError: No module named 'somebogusmodule'
1152 ModuleNotFoundError: No module named 'somebogusmodule'
1149 exception from second failed import attempt:
1153 exception from second failed import attempt:
1150 Traceback (most recent call last):
1154 Traceback (most recent call last):
1151 ModuleNotFoundError: No module named 'somebogusmodule'
1155 ModuleNotFoundError: No module named 'somebogusmodule'
1152 Traceback (most recent call last):
1156 Traceback (most recent call last):
1153 ModuleNotFoundError: No module named 'hgext_importfail'
1157 ModuleNotFoundError: No module named 'hgext_importfail'
1154 Traceback (most recent call last):
1158 Traceback (most recent call last):
1155 ModuleNotFoundError: No module named 'somebogusmodule'
1159 ModuleNotFoundError: No module named 'somebogusmodule'
1156 Traceback (most recent call last):
1160 Traceback (most recent call last):
1157 ModuleNotFoundError: No module named 'hgext_importfail'
1161 ModuleNotFoundError: No module named 'hgext_importfail'
1158 Traceback (most recent call last):
1162 Traceback (most recent call last):
1159 raise error.HookLoadError(msg, hint=tracebackhint) (py37 !)
1163 raise error.HookLoadError(msg, hint=tracebackhint) (py37 !)
1160 mercurial.error.HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
1164 mercurial.error.HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
1161 abort: precommit.importfail hook is invalid: import of "importfail" failed
1165 abort: precommit.importfail hook is invalid: import of "importfail" failed
1162
1166
1163 Issue1827: Hooks Update & Commit not completely post operation
1167 Issue1827: Hooks Update & Commit not completely post operation
1164
1168
1165 commit and update hooks should run after command completion. The largefiles
1169 commit and update hooks should run after command completion. The largefiles
1166 use demonstrates a recursive wlock, showing the hook doesn't run until the
1170 use demonstrates a recursive wlock, showing the hook doesn't run until the
1167 final release (and dirstate flush).
1171 final release (and dirstate flush).
1168
1172
1169 $ echo '[hooks]' > .hg/hgrc
1173 $ echo '[hooks]' > .hg/hgrc
1170 $ echo 'commit = hg id' >> .hg/hgrc
1174 $ echo 'commit = hg id' >> .hg/hgrc
1171 $ echo 'update = hg id' >> .hg/hgrc
1175 $ echo 'update = hg id' >> .hg/hgrc
1172 $ echo bb > a
1176 $ echo bb > a
1173 $ hg ci -ma
1177 $ hg ci -ma
1174 223eafe2750c tip
1178 223eafe2750c tip
1175 $ hg up 0 --config extensions.largefiles=
1179 $ hg up 0 --config extensions.largefiles=
1176 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
1180 The fsmonitor extension is incompatible with the largefiles extension and has been disabled. (fsmonitor !)
1177 cb9a9f314b8b
1181 cb9a9f314b8b
1178 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1182 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1179
1183
1180 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
1184 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
1181 that is passed to pre/post hooks
1185 that is passed to pre/post hooks
1182
1186
1183 $ echo '[hooks]' > .hg/hgrc
1187 $ echo '[hooks]' > .hg/hgrc
1184 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
1188 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
1185 $ hg id
1189 $ hg id
1186 cb9a9f314b8b
1190 cb9a9f314b8b
1187 $ hg id --verbose
1191 $ hg id --verbose
1188 calling hook pre-identify: hooktests.verbosehook
1192 calling hook pre-identify: hooktests.verbosehook
1189 verbose output from hook
1193 verbose output from hook
1190 cb9a9f314b8b
1194 cb9a9f314b8b
1191
1195
1192 Ensure hooks can be prioritized
1196 Ensure hooks can be prioritized
1193
1197
1194 $ echo '[hooks]' > .hg/hgrc
1198 $ echo '[hooks]' > .hg/hgrc
1195 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
1199 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
1196 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
1200 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
1197 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
1201 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
1198 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
1202 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
1199 $ hg id --verbose
1203 $ hg id --verbose
1200 calling hook pre-identify.b: hooktests.verbosehook
1204 calling hook pre-identify.b: hooktests.verbosehook
1201 verbose output from hook
1205 verbose output from hook
1202 calling hook pre-identify.a: hooktests.verbosehook
1206 calling hook pre-identify.a: hooktests.verbosehook
1203 verbose output from hook
1207 verbose output from hook
1204 calling hook pre-identify.c: hooktests.verbosehook
1208 calling hook pre-identify.c: hooktests.verbosehook
1205 verbose output from hook
1209 verbose output from hook
1206 cb9a9f314b8b
1210 cb9a9f314b8b
1207
1211
1208 new tags must be visible in pretxncommit (issue3210)
1212 new tags must be visible in pretxncommit (issue3210)
1209
1213
1210 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
1214 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
1211 $ hg tag -f foo
1215 $ hg tag -f foo
1212 [a, foo, tip]
1216 [a, foo, tip]
1213
1217
1214 post-init hooks must not crash (issue4983)
1218 post-init hooks must not crash (issue4983)
1215 This also creates the `to` repo for the next test block.
1219 This also creates the `to` repo for the next test block.
1216
1220
1217 $ cd ..
1221 $ cd ..
1218 $ cat << EOF >> hgrc-with-post-init-hook
1222 $ cat << EOF >> hgrc-with-post-init-hook
1219 > [hooks]
1223 > [hooks]
1220 > post-init = sh -c "printenv.py --line post-init"
1224 > post-init = sh -c "printenv.py --line post-init"
1221 > EOF
1225 > EOF
1222 $ HGRCPATH=hgrc-with-post-init-hook hg init to
1226 $ HGRCPATH=hgrc-with-post-init-hook hg init to
1223 post-init hook: HG_ARGS=init to
1227 post-init hook: HG_ARGS=init to
1224 HG_HOOKNAME=post-init
1228 HG_HOOKNAME=post-init
1225 HG_HOOKTYPE=post-init
1229 HG_HOOKTYPE=post-init
1226 HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''}
1230 HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''}
1227 HG_PATS=['to']
1231 HG_PATS=['to']
1228 HG_RESULT=0
1232 HG_RESULT=0
1229
1233
1230
1234
1231 new commits must be visible in pretxnchangegroup (issue3428)
1235 new commits must be visible in pretxnchangegroup (issue3428)
1232
1236
1233 $ echo '[hooks]' >> to/.hg/hgrc
1237 $ echo '[hooks]' >> to/.hg/hgrc
1234 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
1238 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
1235 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
1239 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
1236 $ echo a >> to/a
1240 $ echo a >> to/a
1237 $ hg --cwd to ci -Ama
1241 $ hg --cwd to ci -Ama
1238 adding a
1242 adding a
1239 $ hg clone to from
1243 $ hg clone to from
1240 updating to branch default
1244 updating to branch default
1241 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1245 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1242 $ echo aa >> from/a
1246 $ echo aa >> from/a
1243 $ hg --cwd from ci -mb
1247 $ hg --cwd from ci -mb
1244 $ hg --cwd from push
1248 $ hg --cwd from push
1245 pushing to $TESTTMP/to
1249 pushing to $TESTTMP/to
1246 searching for changes
1250 searching for changes
1247 changeset: 0:cb9a9f314b8b
1251 changeset: 0:cb9a9f314b8b
1248 tag: tip
1252 tag: tip
1249 user: test
1253 user: test
1250 date: Thu Jan 01 00:00:00 1970 +0000
1254 date: Thu Jan 01 00:00:00 1970 +0000
1251 summary: a
1255 summary: a
1252
1256
1253 adding changesets
1257 adding changesets
1254 adding manifests
1258 adding manifests
1255 adding file changes
1259 adding file changes
1256 changeset: 1:9836a07b9b9d
1260 changeset: 1:9836a07b9b9d
1257 tag: tip
1261 tag: tip
1258 user: test
1262 user: test
1259 date: Thu Jan 01 00:00:00 1970 +0000
1263 date: Thu Jan 01 00:00:00 1970 +0000
1260 summary: b
1264 summary: b
1261
1265
1262 added 1 changesets with 1 changes to 1 files
1266 added 1 changesets with 1 changes to 1 files
1263
1267
1264 pretxnclose hook failure should abort the transaction
1268 pretxnclose hook failure should abort the transaction
1265
1269
1266 $ hg init txnfailure
1270 $ hg init txnfailure
1267 $ cd txnfailure
1271 $ cd txnfailure
1268 $ touch a && hg commit -Aqm a
1272 $ touch a && hg commit -Aqm a
1269 $ cat >> .hg/hgrc <<EOF
1273 $ cat >> .hg/hgrc <<EOF
1270 > [hooks]
1274 > [hooks]
1271 > pretxnclose.error = exit 1
1275 > pretxnclose.error = exit 1
1272 > EOF
1276 > EOF
1273 $ hg strip -r 0 --config extensions.strip=
1277 $ hg strip -r 0 --config extensions.strip=
1274 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1278 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1275 saved backup bundle to * (glob)
1279 saved backup bundle to * (glob)
1276 transaction abort!
1280 transaction abort!
1277 rollback completed
1281 rollback completed
1278 strip failed, backup bundle stored in * (glob)
1282 strip failed, backup bundle stored in * (glob)
1279 abort: pretxnclose.error hook exited with status 1
1283 abort: pretxnclose.error hook exited with status 1
1280 [40]
1284 [40]
1281 $ hg recover
1285 $ hg recover
1282 no interrupted transaction available
1286 no interrupted transaction available
1283 [1]
1287 [1]
1284 $ cd ..
1288 $ cd ..
1285
1289
1286 check whether HG_PENDING makes pending changes only in related
1290 check whether HG_PENDING makes pending changes only in related
1287 repositories visible to an external hook.
1291 repositories visible to an external hook.
1288
1292
1289 (emulate a transaction running concurrently by copied
1293 (emulate a transaction running concurrently by copied
1290 .hg/store/00changelog.i.a in subsequent test)
1294 .hg/store/00changelog.i.a in subsequent test)
1291
1295
1292 $ cat > $TESTTMP/savepending.sh <<EOF
1296 $ cat > $TESTTMP/savepending.sh <<EOF
1293 > cp .hg/store/00changelog.i.a .hg/store/00changelog.i.a.saved
1297 > cp .hg/store/00changelog.i.a .hg/store/00changelog.i.a.saved
1294 > exit 1 # to avoid adding new revision for subsequent tests
1298 > exit 1 # to avoid adding new revision for subsequent tests
1295 > EOF
1299 > EOF
1296 $ cd a
1300 $ cd a
1297 $ hg tip -q
1301 $ hg tip -q
1298 4:539e4b31b6dc
1302 4:539e4b31b6dc
1299 $ hg --config hooks.pretxnclose="sh $TESTTMP/savepending.sh" commit -m "invisible"
1303 $ hg --config hooks.pretxnclose="sh $TESTTMP/savepending.sh" commit -m "invisible"
1300 transaction abort!
1304 transaction abort!
1301 rollback completed
1305 rollback completed
1302 abort: pretxnclose hook exited with status 1
1306 abort: pretxnclose hook exited with status 1
1303 [40]
1307 [40]
1304 $ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a
1308 $ cp .hg/store/00changelog.i.a.saved .hg/store/00changelog.i.a
1305
1309
1306 (check (in)visibility of new changeset while transaction running in
1310 (check (in)visibility of new changeset while transaction running in
1307 repo)
1311 repo)
1308
1312
1309 $ cat > $TESTTMP/checkpending.sh <<EOF
1313 $ cat > $TESTTMP/checkpending.sh <<EOF
1310 > echo '@a'
1314 > echo '@a'
1311 > hg -R "$TESTTMP/a" tip -q
1315 > hg -R "$TESTTMP/a" tip -q
1312 > echo '@a/nested'
1316 > echo '@a/nested'
1313 > hg -R "$TESTTMP/a/nested" tip -q
1317 > hg -R "$TESTTMP/a/nested" tip -q
1314 > exit 1 # to avoid adding new revision for subsequent tests
1318 > exit 1 # to avoid adding new revision for subsequent tests
1315 > EOF
1319 > EOF
1316 $ hg init nested
1320 $ hg init nested
1317 $ cd nested
1321 $ cd nested
1318 $ echo a > a
1322 $ echo a > a
1319 $ hg add a
1323 $ hg add a
1320 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkpending.sh" commit -m '#0'
1324 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkpending.sh" commit -m '#0'
1321 @a
1325 @a
1322 4:539e4b31b6dc
1326 4:539e4b31b6dc
1323 @a/nested
1327 @a/nested
1324 0:bf5e395ced2c
1328 0:bf5e395ced2c
1325 transaction abort!
1329 transaction abort!
1326 rollback completed
1330 rollback completed
1327 abort: pretxnclose hook exited with status 1
1331 abort: pretxnclose hook exited with status 1
1328 [40]
1332 [40]
1329
1333
1330 Hook from untrusted hgrc are reported as failure
1334 Hook from untrusted hgrc are reported as failure
1331 ================================================
1335 ================================================
1332
1336
1333 $ cat << EOF > $TESTTMP/untrusted.py
1337 $ cat << EOF > $TESTTMP/untrusted.py
1334 > from mercurial import scmutil, util
1338 > from mercurial import scmutil, util
1335 > def uisetup(ui):
1339 > def uisetup(ui):
1336 > class untrustedui(ui.__class__):
1340 > class untrustedui(ui.__class__):
1337 > def _trusted(self, fp, f):
1341 > def _trusted(self, fp, f):
1338 > if util.normpath(fp.name).endswith(b'untrusted/.hg/hgrc'):
1342 > if util.normpath(fp.name).endswith(b'untrusted/.hg/hgrc'):
1339 > return False
1343 > return False
1340 > return super(untrustedui, self)._trusted(fp, f)
1344 > return super(untrustedui, self)._trusted(fp, f)
1341 > ui.__class__ = untrustedui
1345 > ui.__class__ = untrustedui
1342 > EOF
1346 > EOF
1343 $ cat << EOF >> $HGRCPATH
1347 $ cat << EOF >> $HGRCPATH
1344 > [extensions]
1348 > [extensions]
1345 > untrusted=$TESTTMP/untrusted.py
1349 > untrusted=$TESTTMP/untrusted.py
1346 > EOF
1350 > EOF
1347 $ hg init untrusted
1351 $ hg init untrusted
1348 $ cd untrusted
1352 $ cd untrusted
1349
1353
1350 Non-blocking hook
1354 Non-blocking hook
1351 -----------------
1355 -----------------
1352
1356
1353 $ cat << EOF >> .hg/hgrc
1357 $ cat << EOF >> .hg/hgrc
1354 > [hooks]
1358 > [hooks]
1355 > txnclose.testing=echo txnclose hook called
1359 > txnclose.testing=echo txnclose hook called
1356 > EOF
1360 > EOF
1357 $ touch a && hg commit -Aqm a
1361 $ touch a && hg commit -Aqm a
1358 warning: untrusted hook txnclose.testing not executed
1362 warning: untrusted hook txnclose.testing not executed
1359 $ hg log
1363 $ hg log
1360 changeset: 0:3903775176ed
1364 changeset: 0:3903775176ed
1361 tag: tip
1365 tag: tip
1362 user: test
1366 user: test
1363 date: Thu Jan 01 00:00:00 1970 +0000
1367 date: Thu Jan 01 00:00:00 1970 +0000
1364 summary: a
1368 summary: a
1365
1369
1366
1370
1367 Non-blocking hook
1371 Non-blocking hook
1368 -----------------
1372 -----------------
1369
1373
1370 $ cat << EOF >> .hg/hgrc
1374 $ cat << EOF >> .hg/hgrc
1371 > [hooks]
1375 > [hooks]
1372 > pretxnclose.testing=echo pre-txnclose hook called
1376 > pretxnclose.testing=echo pre-txnclose hook called
1373 > EOF
1377 > EOF
1374 $ touch b && hg commit -Aqm a
1378 $ touch b && hg commit -Aqm a
1375 transaction abort!
1379 transaction abort!
1376 rollback completed
1380 rollback completed
1377 abort: untrusted hook pretxnclose.testing not executed
1381 abort: untrusted hook pretxnclose.testing not executed
1378 (see 'hg help config.trusted')
1382 (see 'hg help config.trusted')
1379 [40]
1383 [40]
1380 $ hg log
1384 $ hg log
1381 changeset: 0:3903775176ed
1385 changeset: 0:3903775176ed
1382 tag: tip
1386 tag: tip
1383 user: test
1387 user: test
1384 date: Thu Jan 01 00:00:00 1970 +0000
1388 date: Thu Jan 01 00:00:00 1970 +0000
1385 summary: a
1389 summary: a
1386
1390
1387
1391
1388 unsetup the test
1392 unsetup the test
1389 ----------------
1393 ----------------
1390
1394
1391 # touch the file to unconfuse chg with a diffrent mtime
1395 # touch the file to unconfuse chg with a diffrent mtime
1392 $ sleep 1
1396 $ sleep 1
1393 $ touch $TESTTMP/untrusted.py
1397 $ touch $TESTTMP/untrusted.py
1394 $ cat << EOF >> $HGRCPATH
1398 $ cat << EOF >> $HGRCPATH
1395 > [extensions]
1399 > [extensions]
1396 > untrusted=!
1400 > untrusted=!
1397 > EOF
1401 > EOF
1398
1402
1399 HGPLAIN setting in hooks
1403 HGPLAIN setting in hooks
1400 ========================
1404 ========================
1401
1405
1402 $ cat << EOF >> .hg/hgrc
1406 $ cat << EOF >> .hg/hgrc
1403 > [hooks]
1407 > [hooks]
1404 > pre-version.testing-default=sh -c "echo '### default ###' plain: \${HGPLAIN:-'<unset>'}"
1408 > pre-version.testing-default=sh -c "echo '### default ###' plain: \${HGPLAIN:-'<unset>'}"
1405 > pre-version.testing-yes=sh -c "echo '### yes #######' plain: \${HGPLAIN:-'<unset>'}"
1409 > pre-version.testing-yes=sh -c "echo '### yes #######' plain: \${HGPLAIN:-'<unset>'}"
1406 > pre-version.testing-yes:run-with-plain=yes
1410 > pre-version.testing-yes:run-with-plain=yes
1407 > pre-version.testing-no=sh -c "echo '### no ########' plain: \${HGPLAIN:-'<unset>'}"
1411 > pre-version.testing-no=sh -c "echo '### no ########' plain: \${HGPLAIN:-'<unset>'}"
1408 > pre-version.testing-no:run-with-plain=no
1412 > pre-version.testing-no:run-with-plain=no
1409 > pre-version.testing-auto=sh -c "echo '### auto ######' plain: \${HGPLAIN:-'<unset>'}"
1413 > pre-version.testing-auto=sh -c "echo '### auto ######' plain: \${HGPLAIN:-'<unset>'}"
1410 > pre-version.testing-auto:run-with-plain=auto
1414 > pre-version.testing-auto:run-with-plain=auto
1411 > EOF
1415 > EOF
1412
1416
1413 $ (unset HGPLAIN; hg version --quiet)
1417 $ (unset HGPLAIN; hg version --quiet)
1414 ### default ### plain: 1
1418 ### default ### plain: 1
1415 ### yes ####### plain: 1
1419 ### yes ####### plain: 1
1416 ### no ######## plain: <unset>
1420 ### no ######## plain: <unset>
1417 ### auto ###### plain: <unset>
1421 ### auto ###### plain: <unset>
1418 Mercurial Distributed SCM (*) (glob)
1422 Mercurial Distributed SCM (*) (glob)
1419
1423
1420 $ HGPLAIN=1 hg version --quiet
1424 $ HGPLAIN=1 hg version --quiet
1421 ### default ### plain: 1
1425 ### default ### plain: 1
1422 ### yes ####### plain: 1
1426 ### yes ####### plain: 1
1423 ### no ######## plain: <unset>
1427 ### no ######## plain: <unset>
1424 ### auto ###### plain: 1
1428 ### auto ###### plain: 1
1425 Mercurial Distributed SCM (*) (glob)
1429 Mercurial Distributed SCM (*) (glob)
1426
1430
1427 Test hook that change the underlying repo
1431 Test hook that change the underlying repo
1428 =========================================
1432 =========================================
1429
1433
1430 blackbox access the dirstate afterward and can see a changelog / dirstate
1434 blackbox access the dirstate afterward and can see a changelog / dirstate
1431 desync.
1435 desync.
1432
1436
1433
1437
1434 $ cd $TESTTMP
1438 $ cd $TESTTMP
1435 $ cat <<EOF >> $HGRCPATH
1439 $ cat <<EOF >> $HGRCPATH
1436 > [extensions]
1440 > [extensions]
1437 > blackbox=
1441 > blackbox=
1438 > [hooks]
1442 > [hooks]
1439 > post-merge = hg commit -m "auto merge"
1443 > post-merge = hg commit -m "auto merge"
1440 > EOF
1444 > EOF
1441
1445
1442 $ hg init t
1446 $ hg init t
1443 $ cd t
1447 $ cd t
1444 $ touch ".hgignore"
1448 $ touch ".hgignore"
1445 $ hg commit -Am "initial" -d'0 0'
1449 $ hg commit -Am "initial" -d'0 0'
1446 adding .hgignore
1450 adding .hgignore
1447
1451
1448 $ echo This is file a1 > a
1452 $ echo This is file a1 > a
1449 $ hg commit -Am "commit #1" -d'0 0'
1453 $ hg commit -Am "commit #1" -d'0 0'
1450 adding a
1454 adding a
1451
1455
1452 $ hg update 0
1456 $ hg update 0
1453 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1457 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1454 $ echo This is file b1 > b
1458 $ echo This is file b1 > b
1455 $ hg commit -Am "commit #2" -d'0 0'
1459 $ hg commit -Am "commit #2" -d'0 0'
1456 adding b
1460 adding b
1457 created new head
1461 created new head
1458
1462
1459 $ hg merge 1
1463 $ hg merge 1
1460 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1464 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1461 (branch merge, don't forget to commit)
1465 (branch merge, don't forget to commit)
1462
1466
1463 $ cd ..
1467 $ cd ..
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now