##// END OF EJS Templates
changegroup: delete unused 'bundlecaps' argument (API)
Martin von Zweigbergk -
r32150:282b288a default
parent child Browse files
Show More
@@ -1,1045 +1,1034 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import tempfile
12 import tempfile
13 import weakref
13 import weakref
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 branchmap,
23 branchmap,
24 dagutil,
24 dagutil,
25 discovery,
25 discovery,
26 error,
26 error,
27 mdiff,
27 mdiff,
28 phases,
28 phases,
29 pycompat,
29 pycompat,
30 util,
30 util,
31 )
31 )
32
32
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
35 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
35 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
36
36
37 def readexactly(stream, n):
37 def readexactly(stream, n):
38 '''read n bytes from stream.read and abort if less was available'''
38 '''read n bytes from stream.read and abort if less was available'''
39 s = stream.read(n)
39 s = stream.read(n)
40 if len(s) < n:
40 if len(s) < n:
41 raise error.Abort(_("stream ended unexpectedly"
41 raise error.Abort(_("stream ended unexpectedly"
42 " (got %d bytes, expected %d)")
42 " (got %d bytes, expected %d)")
43 % (len(s), n))
43 % (len(s), n))
44 return s
44 return s
45
45
46 def getchunk(stream):
46 def getchunk(stream):
47 """return the next chunk from stream as a string"""
47 """return the next chunk from stream as a string"""
48 d = readexactly(stream, 4)
48 d = readexactly(stream, 4)
49 l = struct.unpack(">l", d)[0]
49 l = struct.unpack(">l", d)[0]
50 if l <= 4:
50 if l <= 4:
51 if l:
51 if l:
52 raise error.Abort(_("invalid chunk length %d") % l)
52 raise error.Abort(_("invalid chunk length %d") % l)
53 return ""
53 return ""
54 return readexactly(stream, l - 4)
54 return readexactly(stream, l - 4)
55
55
56 def chunkheader(length):
56 def chunkheader(length):
57 """return a changegroup chunk header (string)"""
57 """return a changegroup chunk header (string)"""
58 return struct.pack(">l", length + 4)
58 return struct.pack(">l", length + 4)
59
59
60 def closechunk():
60 def closechunk():
61 """return a changegroup chunk header (string) for a zero-length chunk"""
61 """return a changegroup chunk header (string) for a zero-length chunk"""
62 return struct.pack(">l", 0)
62 return struct.pack(">l", 0)
63
63
64 def combineresults(results):
64 def combineresults(results):
65 """logic to combine 0 or more addchangegroup results into one"""
65 """logic to combine 0 or more addchangegroup results into one"""
66 changedheads = 0
66 changedheads = 0
67 result = 1
67 result = 1
68 for ret in results:
68 for ret in results:
69 # If any changegroup result is 0, return 0
69 # If any changegroup result is 0, return 0
70 if ret == 0:
70 if ret == 0:
71 result = 0
71 result = 0
72 break
72 break
73 if ret < -1:
73 if ret < -1:
74 changedheads += ret + 1
74 changedheads += ret + 1
75 elif ret > 1:
75 elif ret > 1:
76 changedheads += ret - 1
76 changedheads += ret - 1
77 if changedheads > 0:
77 if changedheads > 0:
78 result = 1 + changedheads
78 result = 1 + changedheads
79 elif changedheads < 0:
79 elif changedheads < 0:
80 result = -1 + changedheads
80 result = -1 + changedheads
81 return result
81 return result
82
82
83 def writechunks(ui, chunks, filename, vfs=None):
83 def writechunks(ui, chunks, filename, vfs=None):
84 """Write chunks to a file and return its filename.
84 """Write chunks to a file and return its filename.
85
85
86 The stream is assumed to be a bundle file.
86 The stream is assumed to be a bundle file.
87 Existing files will not be overwritten.
87 Existing files will not be overwritten.
88 If no filename is specified, a temporary file is created.
88 If no filename is specified, a temporary file is created.
89 """
89 """
90 fh = None
90 fh = None
91 cleanup = None
91 cleanup = None
92 try:
92 try:
93 if filename:
93 if filename:
94 if vfs:
94 if vfs:
95 fh = vfs.open(filename, "wb")
95 fh = vfs.open(filename, "wb")
96 else:
96 else:
97 # Increase default buffer size because default is usually
97 # Increase default buffer size because default is usually
98 # small (4k is common on Linux).
98 # small (4k is common on Linux).
99 fh = open(filename, "wb", 131072)
99 fh = open(filename, "wb", 131072)
100 else:
100 else:
101 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
101 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
102 fh = os.fdopen(fd, pycompat.sysstr("wb"))
102 fh = os.fdopen(fd, pycompat.sysstr("wb"))
103 cleanup = filename
103 cleanup = filename
104 for c in chunks:
104 for c in chunks:
105 fh.write(c)
105 fh.write(c)
106 cleanup = None
106 cleanup = None
107 return filename
107 return filename
108 finally:
108 finally:
109 if fh is not None:
109 if fh is not None:
110 fh.close()
110 fh.close()
111 if cleanup is not None:
111 if cleanup is not None:
112 if filename and vfs:
112 if filename and vfs:
113 vfs.unlink(cleanup)
113 vfs.unlink(cleanup)
114 else:
114 else:
115 os.unlink(cleanup)
115 os.unlink(cleanup)
116
116
117 class cg1unpacker(object):
117 class cg1unpacker(object):
118 """Unpacker for cg1 changegroup streams.
118 """Unpacker for cg1 changegroup streams.
119
119
120 A changegroup unpacker handles the framing of the revision data in
120 A changegroup unpacker handles the framing of the revision data in
121 the wire format. Most consumers will want to use the apply()
121 the wire format. Most consumers will want to use the apply()
122 method to add the changes from the changegroup to a repository.
122 method to add the changes from the changegroup to a repository.
123
123
124 If you're forwarding a changegroup unmodified to another consumer,
124 If you're forwarding a changegroup unmodified to another consumer,
125 use getchunks(), which returns an iterator of changegroup
125 use getchunks(), which returns an iterator of changegroup
126 chunks. This is mostly useful for cases where you need to know the
126 chunks. This is mostly useful for cases where you need to know the
127 data stream has ended by observing the end of the changegroup.
127 data stream has ended by observing the end of the changegroup.
128
128
129 deltachunk() is useful only if you're applying delta data. Most
129 deltachunk() is useful only if you're applying delta data. Most
130 consumers should prefer apply() instead.
130 consumers should prefer apply() instead.
131
131
132 A few other public methods exist. Those are used only for
132 A few other public methods exist. Those are used only for
133 bundlerepo and some debug commands - their use is discouraged.
133 bundlerepo and some debug commands - their use is discouraged.
134 """
134 """
135 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
135 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
136 deltaheadersize = struct.calcsize(deltaheader)
136 deltaheadersize = struct.calcsize(deltaheader)
137 version = '01'
137 version = '01'
138 _grouplistcount = 1 # One list of files after the manifests
138 _grouplistcount = 1 # One list of files after the manifests
139
139
140 def __init__(self, fh, alg, extras=None):
140 def __init__(self, fh, alg, extras=None):
141 if alg is None:
141 if alg is None:
142 alg = 'UN'
142 alg = 'UN'
143 if alg not in util.compengines.supportedbundletypes:
143 if alg not in util.compengines.supportedbundletypes:
144 raise error.Abort(_('unknown stream compression type: %s')
144 raise error.Abort(_('unknown stream compression type: %s')
145 % alg)
145 % alg)
146 if alg == 'BZ':
146 if alg == 'BZ':
147 alg = '_truncatedBZ'
147 alg = '_truncatedBZ'
148
148
149 compengine = util.compengines.forbundletype(alg)
149 compengine = util.compengines.forbundletype(alg)
150 self._stream = compengine.decompressorreader(fh)
150 self._stream = compengine.decompressorreader(fh)
151 self._type = alg
151 self._type = alg
152 self.extras = extras or {}
152 self.extras = extras or {}
153 self.callback = None
153 self.callback = None
154
154
155 # These methods (compressed, read, seek, tell) all appear to only
155 # These methods (compressed, read, seek, tell) all appear to only
156 # be used by bundlerepo, but it's a little hard to tell.
156 # be used by bundlerepo, but it's a little hard to tell.
157 def compressed(self):
157 def compressed(self):
158 return self._type is not None and self._type != 'UN'
158 return self._type is not None and self._type != 'UN'
159 def read(self, l):
159 def read(self, l):
160 return self._stream.read(l)
160 return self._stream.read(l)
161 def seek(self, pos):
161 def seek(self, pos):
162 return self._stream.seek(pos)
162 return self._stream.seek(pos)
163 def tell(self):
163 def tell(self):
164 return self._stream.tell()
164 return self._stream.tell()
165 def close(self):
165 def close(self):
166 return self._stream.close()
166 return self._stream.close()
167
167
168 def _chunklength(self):
168 def _chunklength(self):
169 d = readexactly(self._stream, 4)
169 d = readexactly(self._stream, 4)
170 l = struct.unpack(">l", d)[0]
170 l = struct.unpack(">l", d)[0]
171 if l <= 4:
171 if l <= 4:
172 if l:
172 if l:
173 raise error.Abort(_("invalid chunk length %d") % l)
173 raise error.Abort(_("invalid chunk length %d") % l)
174 return 0
174 return 0
175 if self.callback:
175 if self.callback:
176 self.callback()
176 self.callback()
177 return l - 4
177 return l - 4
178
178
179 def changelogheader(self):
179 def changelogheader(self):
180 """v10 does not have a changelog header chunk"""
180 """v10 does not have a changelog header chunk"""
181 return {}
181 return {}
182
182
183 def manifestheader(self):
183 def manifestheader(self):
184 """v10 does not have a manifest header chunk"""
184 """v10 does not have a manifest header chunk"""
185 return {}
185 return {}
186
186
187 def filelogheader(self):
187 def filelogheader(self):
188 """return the header of the filelogs chunk, v10 only has the filename"""
188 """return the header of the filelogs chunk, v10 only has the filename"""
189 l = self._chunklength()
189 l = self._chunklength()
190 if not l:
190 if not l:
191 return {}
191 return {}
192 fname = readexactly(self._stream, l)
192 fname = readexactly(self._stream, l)
193 return {'filename': fname}
193 return {'filename': fname}
194
194
195 def _deltaheader(self, headertuple, prevnode):
195 def _deltaheader(self, headertuple, prevnode):
196 node, p1, p2, cs = headertuple
196 node, p1, p2, cs = headertuple
197 if prevnode is None:
197 if prevnode is None:
198 deltabase = p1
198 deltabase = p1
199 else:
199 else:
200 deltabase = prevnode
200 deltabase = prevnode
201 flags = 0
201 flags = 0
202 return node, p1, p2, deltabase, cs, flags
202 return node, p1, p2, deltabase, cs, flags
203
203
204 def deltachunk(self, prevnode):
204 def deltachunk(self, prevnode):
205 l = self._chunklength()
205 l = self._chunklength()
206 if not l:
206 if not l:
207 return {}
207 return {}
208 headerdata = readexactly(self._stream, self.deltaheadersize)
208 headerdata = readexactly(self._stream, self.deltaheadersize)
209 header = struct.unpack(self.deltaheader, headerdata)
209 header = struct.unpack(self.deltaheader, headerdata)
210 delta = readexactly(self._stream, l - self.deltaheadersize)
210 delta = readexactly(self._stream, l - self.deltaheadersize)
211 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
211 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
212 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
212 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
213 'deltabase': deltabase, 'delta': delta, 'flags': flags}
213 'deltabase': deltabase, 'delta': delta, 'flags': flags}
214
214
215 def getchunks(self):
215 def getchunks(self):
216 """returns all the chunks contains in the bundle
216 """returns all the chunks contains in the bundle
217
217
218 Used when you need to forward the binary stream to a file or another
218 Used when you need to forward the binary stream to a file or another
219 network API. To do so, it parse the changegroup data, otherwise it will
219 network API. To do so, it parse the changegroup data, otherwise it will
220 block in case of sshrepo because it don't know the end of the stream.
220 block in case of sshrepo because it don't know the end of the stream.
221 """
221 """
222 # an empty chunkgroup is the end of the changegroup
222 # an empty chunkgroup is the end of the changegroup
223 # a changegroup has at least 2 chunkgroups (changelog and manifest).
223 # a changegroup has at least 2 chunkgroups (changelog and manifest).
224 # after that, changegroup versions 1 and 2 have a series of groups
224 # after that, changegroup versions 1 and 2 have a series of groups
225 # with one group per file. changegroup 3 has a series of directory
225 # with one group per file. changegroup 3 has a series of directory
226 # manifests before the files.
226 # manifests before the files.
227 count = 0
227 count = 0
228 emptycount = 0
228 emptycount = 0
229 while emptycount < self._grouplistcount:
229 while emptycount < self._grouplistcount:
230 empty = True
230 empty = True
231 count += 1
231 count += 1
232 while True:
232 while True:
233 chunk = getchunk(self)
233 chunk = getchunk(self)
234 if not chunk:
234 if not chunk:
235 if empty and count > 2:
235 if empty and count > 2:
236 emptycount += 1
236 emptycount += 1
237 break
237 break
238 empty = False
238 empty = False
239 yield chunkheader(len(chunk))
239 yield chunkheader(len(chunk))
240 pos = 0
240 pos = 0
241 while pos < len(chunk):
241 while pos < len(chunk):
242 next = pos + 2**20
242 next = pos + 2**20
243 yield chunk[pos:next]
243 yield chunk[pos:next]
244 pos = next
244 pos = next
245 yield closechunk()
245 yield closechunk()
246
246
247 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
247 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
248 # We know that we'll never have more manifests than we had
248 # We know that we'll never have more manifests than we had
249 # changesets.
249 # changesets.
250 self.callback = prog(_('manifests'), numchanges)
250 self.callback = prog(_('manifests'), numchanges)
251 # no need to check for empty manifest group here:
251 # no need to check for empty manifest group here:
252 # if the result of the merge of 1 and 2 is the same in 3 and 4,
252 # if the result of the merge of 1 and 2 is the same in 3 and 4,
253 # no new manifest will be created and the manifest group will
253 # no new manifest will be created and the manifest group will
254 # be empty during the pull
254 # be empty during the pull
255 self.manifestheader()
255 self.manifestheader()
256 repo.manifestlog._revlog.addgroup(self, revmap, trp)
256 repo.manifestlog._revlog.addgroup(self, revmap, trp)
257 repo.ui.progress(_('manifests'), None)
257 repo.ui.progress(_('manifests'), None)
258 self.callback = None
258 self.callback = None
259
259
260 def apply(self, repo, srctype, url, emptyok=False,
260 def apply(self, repo, srctype, url, emptyok=False,
261 targetphase=phases.draft, expectedtotal=None):
261 targetphase=phases.draft, expectedtotal=None):
262 """Add the changegroup returned by source.read() to this repo.
262 """Add the changegroup returned by source.read() to this repo.
263 srctype is a string like 'push', 'pull', or 'unbundle'. url is
263 srctype is a string like 'push', 'pull', or 'unbundle'. url is
264 the URL of the repo where this changegroup is coming from.
264 the URL of the repo where this changegroup is coming from.
265
265
266 Return an integer summarizing the change to this repo:
266 Return an integer summarizing the change to this repo:
267 - nothing changed or no source: 0
267 - nothing changed or no source: 0
268 - more heads than before: 1+added heads (2..n)
268 - more heads than before: 1+added heads (2..n)
269 - fewer heads than before: -1-removed heads (-2..-n)
269 - fewer heads than before: -1-removed heads (-2..-n)
270 - number of heads stays the same: 1
270 - number of heads stays the same: 1
271 """
271 """
272 repo = repo.unfiltered()
272 repo = repo.unfiltered()
273 def csmap(x):
273 def csmap(x):
274 repo.ui.debug("add changeset %s\n" % short(x))
274 repo.ui.debug("add changeset %s\n" % short(x))
275 return len(cl)
275 return len(cl)
276
276
277 def revmap(x):
277 def revmap(x):
278 return cl.rev(x)
278 return cl.rev(x)
279
279
280 changesets = files = revisions = 0
280 changesets = files = revisions = 0
281
281
282 try:
282 try:
283 with repo.transaction("\n".join([srctype,
283 with repo.transaction("\n".join([srctype,
284 util.hidepassword(url)])) as tr:
284 util.hidepassword(url)])) as tr:
285 # The transaction could have been created before and already
285 # The transaction could have been created before and already
286 # carries source information. In this case we use the top
286 # carries source information. In this case we use the top
287 # level data. We overwrite the argument because we need to use
287 # level data. We overwrite the argument because we need to use
288 # the top level value (if they exist) in this function.
288 # the top level value (if they exist) in this function.
289 srctype = tr.hookargs.setdefault('source', srctype)
289 srctype = tr.hookargs.setdefault('source', srctype)
290 url = tr.hookargs.setdefault('url', url)
290 url = tr.hookargs.setdefault('url', url)
291 repo.hook('prechangegroup', throw=True, **tr.hookargs)
291 repo.hook('prechangegroup', throw=True, **tr.hookargs)
292
292
293 # write changelog data to temp files so concurrent readers
293 # write changelog data to temp files so concurrent readers
294 # will not see an inconsistent view
294 # will not see an inconsistent view
295 cl = repo.changelog
295 cl = repo.changelog
296 cl.delayupdate(tr)
296 cl.delayupdate(tr)
297 oldheads = set(cl.heads())
297 oldheads = set(cl.heads())
298
298
299 trp = weakref.proxy(tr)
299 trp = weakref.proxy(tr)
300 # pull off the changeset group
300 # pull off the changeset group
301 repo.ui.status(_("adding changesets\n"))
301 repo.ui.status(_("adding changesets\n"))
302 clstart = len(cl)
302 clstart = len(cl)
303 class prog(object):
303 class prog(object):
304 def __init__(self, step, total):
304 def __init__(self, step, total):
305 self._step = step
305 self._step = step
306 self._total = total
306 self._total = total
307 self._count = 1
307 self._count = 1
308 def __call__(self):
308 def __call__(self):
309 repo.ui.progress(self._step, self._count,
309 repo.ui.progress(self._step, self._count,
310 unit=_('chunks'), total=self._total)
310 unit=_('chunks'), total=self._total)
311 self._count += 1
311 self._count += 1
312 self.callback = prog(_('changesets'), expectedtotal)
312 self.callback = prog(_('changesets'), expectedtotal)
313
313
314 efiles = set()
314 efiles = set()
315 def onchangelog(cl, node):
315 def onchangelog(cl, node):
316 efiles.update(cl.readfiles(node))
316 efiles.update(cl.readfiles(node))
317
317
318 self.changelogheader()
318 self.changelogheader()
319 srccontent = cl.addgroup(self, csmap, trp,
319 srccontent = cl.addgroup(self, csmap, trp,
320 addrevisioncb=onchangelog)
320 addrevisioncb=onchangelog)
321 efiles = len(efiles)
321 efiles = len(efiles)
322
322
323 if not (srccontent or emptyok):
323 if not (srccontent or emptyok):
324 raise error.Abort(_("received changelog group is empty"))
324 raise error.Abort(_("received changelog group is empty"))
325 clend = len(cl)
325 clend = len(cl)
326 changesets = clend - clstart
326 changesets = clend - clstart
327 repo.ui.progress(_('changesets'), None)
327 repo.ui.progress(_('changesets'), None)
328 self.callback = None
328 self.callback = None
329
329
330 # pull off the manifest group
330 # pull off the manifest group
331 repo.ui.status(_("adding manifests\n"))
331 repo.ui.status(_("adding manifests\n"))
332 self._unpackmanifests(repo, revmap, trp, prog, changesets)
332 self._unpackmanifests(repo, revmap, trp, prog, changesets)
333
333
334 needfiles = {}
334 needfiles = {}
335 if repo.ui.configbool('server', 'validate', default=False):
335 if repo.ui.configbool('server', 'validate', default=False):
336 cl = repo.changelog
336 cl = repo.changelog
337 ml = repo.manifestlog
337 ml = repo.manifestlog
338 # validate incoming csets have their manifests
338 # validate incoming csets have their manifests
339 for cset in xrange(clstart, clend):
339 for cset in xrange(clstart, clend):
340 mfnode = cl.changelogrevision(cset).manifest
340 mfnode = cl.changelogrevision(cset).manifest
341 mfest = ml[mfnode].readdelta()
341 mfest = ml[mfnode].readdelta()
342 # store file nodes we must see
342 # store file nodes we must see
343 for f, n in mfest.iteritems():
343 for f, n in mfest.iteritems():
344 needfiles.setdefault(f, set()).add(n)
344 needfiles.setdefault(f, set()).add(n)
345
345
346 # process the files
346 # process the files
347 repo.ui.status(_("adding file changes\n"))
347 repo.ui.status(_("adding file changes\n"))
348 newrevs, newfiles = _addchangegroupfiles(
348 newrevs, newfiles = _addchangegroupfiles(
349 repo, self, revmap, trp, efiles, needfiles)
349 repo, self, revmap, trp, efiles, needfiles)
350 revisions += newrevs
350 revisions += newrevs
351 files += newfiles
351 files += newfiles
352
352
353 dh = 0
353 dh = 0
354 if oldheads:
354 if oldheads:
355 heads = cl.heads()
355 heads = cl.heads()
356 dh = len(heads) - len(oldheads)
356 dh = len(heads) - len(oldheads)
357 for h in heads:
357 for h in heads:
358 if h not in oldheads and repo[h].closesbranch():
358 if h not in oldheads and repo[h].closesbranch():
359 dh -= 1
359 dh -= 1
360 htext = ""
360 htext = ""
361 if dh:
361 if dh:
362 htext = _(" (%+d heads)") % dh
362 htext = _(" (%+d heads)") % dh
363
363
364 repo.ui.status(_("added %d changesets"
364 repo.ui.status(_("added %d changesets"
365 " with %d changes to %d files%s\n")
365 " with %d changes to %d files%s\n")
366 % (changesets, revisions, files, htext))
366 % (changesets, revisions, files, htext))
367 repo.invalidatevolatilesets()
367 repo.invalidatevolatilesets()
368
368
369 if changesets > 0:
369 if changesets > 0:
370 if 'node' not in tr.hookargs:
370 if 'node' not in tr.hookargs:
371 tr.hookargs['node'] = hex(cl.node(clstart))
371 tr.hookargs['node'] = hex(cl.node(clstart))
372 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
372 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
373 hookargs = dict(tr.hookargs)
373 hookargs = dict(tr.hookargs)
374 else:
374 else:
375 hookargs = dict(tr.hookargs)
375 hookargs = dict(tr.hookargs)
376 hookargs['node'] = hex(cl.node(clstart))
376 hookargs['node'] = hex(cl.node(clstart))
377 hookargs['node_last'] = hex(cl.node(clend - 1))
377 hookargs['node_last'] = hex(cl.node(clend - 1))
378 repo.hook('pretxnchangegroup', throw=True, **hookargs)
378 repo.hook('pretxnchangegroup', throw=True, **hookargs)
379
379
380 added = [cl.node(r) for r in xrange(clstart, clend)]
380 added = [cl.node(r) for r in xrange(clstart, clend)]
381 publishing = repo.publishing()
381 publishing = repo.publishing()
382 if srctype in ('push', 'serve'):
382 if srctype in ('push', 'serve'):
383 # Old servers can not push the boundary themselves.
383 # Old servers can not push the boundary themselves.
384 # New servers won't push the boundary if changeset already
384 # New servers won't push the boundary if changeset already
385 # exists locally as secret
385 # exists locally as secret
386 #
386 #
387 # We should not use added here but the list of all change in
387 # We should not use added here but the list of all change in
388 # the bundle
388 # the bundle
389 if publishing:
389 if publishing:
390 phases.advanceboundary(repo, tr, phases.public,
390 phases.advanceboundary(repo, tr, phases.public,
391 srccontent)
391 srccontent)
392 else:
392 else:
393 # Those changesets have been pushed from the
393 # Those changesets have been pushed from the
394 # outside, their phases are going to be pushed
394 # outside, their phases are going to be pushed
395 # alongside. Therefor `targetphase` is
395 # alongside. Therefor `targetphase` is
396 # ignored.
396 # ignored.
397 phases.advanceboundary(repo, tr, phases.draft,
397 phases.advanceboundary(repo, tr, phases.draft,
398 srccontent)
398 srccontent)
399 phases.retractboundary(repo, tr, phases.draft, added)
399 phases.retractboundary(repo, tr, phases.draft, added)
400 elif srctype != 'strip':
400 elif srctype != 'strip':
401 # publishing only alter behavior during push
401 # publishing only alter behavior during push
402 #
402 #
403 # strip should not touch boundary at all
403 # strip should not touch boundary at all
404 phases.retractboundary(repo, tr, targetphase, added)
404 phases.retractboundary(repo, tr, targetphase, added)
405
405
406 if changesets > 0:
406 if changesets > 0:
407 if srctype != 'strip':
407 if srctype != 'strip':
408 # During strip, branchcache is invalid but
408 # During strip, branchcache is invalid but
409 # coming call to `destroyed` will repair it.
409 # coming call to `destroyed` will repair it.
410 # In other case we can safely update cache on
410 # In other case we can safely update cache on
411 # disk.
411 # disk.
412 repo.ui.debug('updating the branch cache\n')
412 repo.ui.debug('updating the branch cache\n')
413 branchmap.updatecache(repo.filtered('served'))
413 branchmap.updatecache(repo.filtered('served'))
414
414
415 def runhooks():
415 def runhooks():
416 # These hooks run when the lock releases, not when the
416 # These hooks run when the lock releases, not when the
417 # transaction closes. So it's possible for the changelog
417 # transaction closes. So it's possible for the changelog
418 # to have changed since we last saw it.
418 # to have changed since we last saw it.
419 if clstart >= len(repo):
419 if clstart >= len(repo):
420 return
420 return
421
421
422 repo.hook("changegroup", **hookargs)
422 repo.hook("changegroup", **hookargs)
423
423
424 for n in added:
424 for n in added:
425 args = hookargs.copy()
425 args = hookargs.copy()
426 args['node'] = hex(n)
426 args['node'] = hex(n)
427 del args['node_last']
427 del args['node_last']
428 repo.hook("incoming", **args)
428 repo.hook("incoming", **args)
429
429
430 newheads = [h for h in repo.heads()
430 newheads = [h for h in repo.heads()
431 if h not in oldheads]
431 if h not in oldheads]
432 repo.ui.log("incoming",
432 repo.ui.log("incoming",
433 "%s incoming changes - new heads: %s\n",
433 "%s incoming changes - new heads: %s\n",
434 len(added),
434 len(added),
435 ', '.join([hex(c[:6]) for c in newheads]))
435 ', '.join([hex(c[:6]) for c in newheads]))
436
436
437 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
437 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
438 lambda tr: repo._afterlock(runhooks))
438 lambda tr: repo._afterlock(runhooks))
439 finally:
439 finally:
440 repo.ui.flush()
440 repo.ui.flush()
441 # never return 0 here:
441 # never return 0 here:
442 if dh < 0:
442 if dh < 0:
443 return dh - 1
443 return dh - 1
444 else:
444 else:
445 return dh + 1
445 return dh + 1
446
446
447 class cg2unpacker(cg1unpacker):
447 class cg2unpacker(cg1unpacker):
448 """Unpacker for cg2 streams.
448 """Unpacker for cg2 streams.
449
449
450 cg2 streams add support for generaldelta, so the delta header
450 cg2 streams add support for generaldelta, so the delta header
451 format is slightly different. All other features about the data
451 format is slightly different. All other features about the data
452 remain the same.
452 remain the same.
453 """
453 """
454 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
454 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
455 deltaheadersize = struct.calcsize(deltaheader)
455 deltaheadersize = struct.calcsize(deltaheader)
456 version = '02'
456 version = '02'
457
457
458 def _deltaheader(self, headertuple, prevnode):
458 def _deltaheader(self, headertuple, prevnode):
459 node, p1, p2, deltabase, cs = headertuple
459 node, p1, p2, deltabase, cs = headertuple
460 flags = 0
460 flags = 0
461 return node, p1, p2, deltabase, cs, flags
461 return node, p1, p2, deltabase, cs, flags
462
462
463 class cg3unpacker(cg2unpacker):
463 class cg3unpacker(cg2unpacker):
464 """Unpacker for cg3 streams.
464 """Unpacker for cg3 streams.
465
465
466 cg3 streams add support for exchanging treemanifests and revlog
466 cg3 streams add support for exchanging treemanifests and revlog
467 flags. It adds the revlog flags to the delta header and an empty chunk
467 flags. It adds the revlog flags to the delta header and an empty chunk
468 separating manifests and files.
468 separating manifests and files.
469 """
469 """
470 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
470 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
471 deltaheadersize = struct.calcsize(deltaheader)
471 deltaheadersize = struct.calcsize(deltaheader)
472 version = '03'
472 version = '03'
473 _grouplistcount = 2 # One list of manifests and one list of files
473 _grouplistcount = 2 # One list of manifests and one list of files
474
474
475 def _deltaheader(self, headertuple, prevnode):
475 def _deltaheader(self, headertuple, prevnode):
476 node, p1, p2, deltabase, cs, flags = headertuple
476 node, p1, p2, deltabase, cs, flags = headertuple
477 return node, p1, p2, deltabase, cs, flags
477 return node, p1, p2, deltabase, cs, flags
478
478
479 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
479 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
480 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
480 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
481 numchanges)
481 numchanges)
482 for chunkdata in iter(self.filelogheader, {}):
482 for chunkdata in iter(self.filelogheader, {}):
483 # If we get here, there are directory manifests in the changegroup
483 # If we get here, there are directory manifests in the changegroup
484 d = chunkdata["filename"]
484 d = chunkdata["filename"]
485 repo.ui.debug("adding %s revisions\n" % d)
485 repo.ui.debug("adding %s revisions\n" % d)
486 dirlog = repo.manifestlog._revlog.dirlog(d)
486 dirlog = repo.manifestlog._revlog.dirlog(d)
487 if not dirlog.addgroup(self, revmap, trp):
487 if not dirlog.addgroup(self, revmap, trp):
488 raise error.Abort(_("received dir revlog group is empty"))
488 raise error.Abort(_("received dir revlog group is empty"))
489
489
490 class headerlessfixup(object):
490 class headerlessfixup(object):
491 def __init__(self, fh, h):
491 def __init__(self, fh, h):
492 self._h = h
492 self._h = h
493 self._fh = fh
493 self._fh = fh
494 def read(self, n):
494 def read(self, n):
495 if self._h:
495 if self._h:
496 d, self._h = self._h[:n], self._h[n:]
496 d, self._h = self._h[:n], self._h[n:]
497 if len(d) < n:
497 if len(d) < n:
498 d += readexactly(self._fh, n - len(d))
498 d += readexactly(self._fh, n - len(d))
499 return d
499 return d
500 return readexactly(self._fh, n)
500 return readexactly(self._fh, n)
501
501
502 class cg1packer(object):
502 class cg1packer(object):
503 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
503 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
504 version = '01'
504 version = '01'
505 def __init__(self, repo, bundlecaps=None):
505 def __init__(self, repo):
506 """Given a source repo, construct a bundler.
506 """Given a source repo, construct a bundler.
507
508 bundlecaps is optional and can be used to specify the set of
509 capabilities which can be used to build the bundle.
510 """
507 """
511 # Set of capabilities we can use to build the bundle.
512 if bundlecaps is None:
513 bundlecaps = set()
514 self._bundlecaps = bundlecaps
515 # experimental config: bundle.reorder
508 # experimental config: bundle.reorder
516 reorder = repo.ui.config('bundle', 'reorder', 'auto')
509 reorder = repo.ui.config('bundle', 'reorder', 'auto')
517 if reorder == 'auto':
510 if reorder == 'auto':
518 reorder = None
511 reorder = None
519 else:
512 else:
520 reorder = util.parsebool(reorder)
513 reorder = util.parsebool(reorder)
521 self._repo = repo
514 self._repo = repo
522 self._reorder = reorder
515 self._reorder = reorder
523 self._progress = repo.ui.progress
516 self._progress = repo.ui.progress
524 if self._repo.ui.verbose and not self._repo.ui.debugflag:
517 if self._repo.ui.verbose and not self._repo.ui.debugflag:
525 self._verbosenote = self._repo.ui.note
518 self._verbosenote = self._repo.ui.note
526 else:
519 else:
527 self._verbosenote = lambda s: None
520 self._verbosenote = lambda s: None
528
521
529 def close(self):
522 def close(self):
530 return closechunk()
523 return closechunk()
531
524
532 def fileheader(self, fname):
525 def fileheader(self, fname):
533 return chunkheader(len(fname)) + fname
526 return chunkheader(len(fname)) + fname
534
527
535 # Extracted both for clarity and for overriding in extensions.
528 # Extracted both for clarity and for overriding in extensions.
536 def _sortgroup(self, revlog, nodelist, lookup):
529 def _sortgroup(self, revlog, nodelist, lookup):
537 """Sort nodes for change group and turn them into revnums."""
530 """Sort nodes for change group and turn them into revnums."""
538 # for generaldelta revlogs, we linearize the revs; this will both be
531 # for generaldelta revlogs, we linearize the revs; this will both be
539 # much quicker and generate a much smaller bundle
532 # much quicker and generate a much smaller bundle
540 if (revlog._generaldelta and self._reorder is None) or self._reorder:
533 if (revlog._generaldelta and self._reorder is None) or self._reorder:
541 dag = dagutil.revlogdag(revlog)
534 dag = dagutil.revlogdag(revlog)
542 return dag.linearize(set(revlog.rev(n) for n in nodelist))
535 return dag.linearize(set(revlog.rev(n) for n in nodelist))
543 else:
536 else:
544 return sorted([revlog.rev(n) for n in nodelist])
537 return sorted([revlog.rev(n) for n in nodelist])
545
538
546 def group(self, nodelist, revlog, lookup, units=None):
539 def group(self, nodelist, revlog, lookup, units=None):
547 """Calculate a delta group, yielding a sequence of changegroup chunks
540 """Calculate a delta group, yielding a sequence of changegroup chunks
548 (strings).
541 (strings).
549
542
550 Given a list of changeset revs, return a set of deltas and
543 Given a list of changeset revs, return a set of deltas and
551 metadata corresponding to nodes. The first delta is
544 metadata corresponding to nodes. The first delta is
552 first parent(nodelist[0]) -> nodelist[0], the receiver is
545 first parent(nodelist[0]) -> nodelist[0], the receiver is
553 guaranteed to have this parent as it has all history before
546 guaranteed to have this parent as it has all history before
554 these changesets. In the case firstparent is nullrev the
547 these changesets. In the case firstparent is nullrev the
555 changegroup starts with a full revision.
548 changegroup starts with a full revision.
556
549
557 If units is not None, progress detail will be generated, units specifies
550 If units is not None, progress detail will be generated, units specifies
558 the type of revlog that is touched (changelog, manifest, etc.).
551 the type of revlog that is touched (changelog, manifest, etc.).
559 """
552 """
560 # if we don't have any revisions touched by these changesets, bail
553 # if we don't have any revisions touched by these changesets, bail
561 if len(nodelist) == 0:
554 if len(nodelist) == 0:
562 yield self.close()
555 yield self.close()
563 return
556 return
564
557
565 revs = self._sortgroup(revlog, nodelist, lookup)
558 revs = self._sortgroup(revlog, nodelist, lookup)
566
559
567 # add the parent of the first rev
560 # add the parent of the first rev
568 p = revlog.parentrevs(revs[0])[0]
561 p = revlog.parentrevs(revs[0])[0]
569 revs.insert(0, p)
562 revs.insert(0, p)
570
563
571 # build deltas
564 # build deltas
572 total = len(revs) - 1
565 total = len(revs) - 1
573 msgbundling = _('bundling')
566 msgbundling = _('bundling')
574 for r in xrange(len(revs) - 1):
567 for r in xrange(len(revs) - 1):
575 if units is not None:
568 if units is not None:
576 self._progress(msgbundling, r + 1, unit=units, total=total)
569 self._progress(msgbundling, r + 1, unit=units, total=total)
577 prev, curr = revs[r], revs[r + 1]
570 prev, curr = revs[r], revs[r + 1]
578 linknode = lookup(revlog.node(curr))
571 linknode = lookup(revlog.node(curr))
579 for c in self.revchunk(revlog, curr, prev, linknode):
572 for c in self.revchunk(revlog, curr, prev, linknode):
580 yield c
573 yield c
581
574
582 if units is not None:
575 if units is not None:
583 self._progress(msgbundling, None)
576 self._progress(msgbundling, None)
584 yield self.close()
577 yield self.close()
585
578
586 # filter any nodes that claim to be part of the known set
579 # filter any nodes that claim to be part of the known set
587 def prune(self, revlog, missing, commonrevs):
580 def prune(self, revlog, missing, commonrevs):
588 rr, rl = revlog.rev, revlog.linkrev
581 rr, rl = revlog.rev, revlog.linkrev
589 return [n for n in missing if rl(rr(n)) not in commonrevs]
582 return [n for n in missing if rl(rr(n)) not in commonrevs]
590
583
591 def _packmanifests(self, dir, mfnodes, lookuplinknode):
584 def _packmanifests(self, dir, mfnodes, lookuplinknode):
592 """Pack flat manifests into a changegroup stream."""
585 """Pack flat manifests into a changegroup stream."""
593 assert not dir
586 assert not dir
594 for chunk in self.group(mfnodes, self._repo.manifestlog._revlog,
587 for chunk in self.group(mfnodes, self._repo.manifestlog._revlog,
595 lookuplinknode, units=_('manifests')):
588 lookuplinknode, units=_('manifests')):
596 yield chunk
589 yield chunk
597
590
598 def _manifestsdone(self):
591 def _manifestsdone(self):
599 return ''
592 return ''
600
593
601 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
594 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
602 '''yield a sequence of changegroup chunks (strings)'''
595 '''yield a sequence of changegroup chunks (strings)'''
603 repo = self._repo
596 repo = self._repo
604 cl = repo.changelog
597 cl = repo.changelog
605
598
606 clrevorder = {}
599 clrevorder = {}
607 mfs = {} # needed manifests
600 mfs = {} # needed manifests
608 fnodes = {} # needed file nodes
601 fnodes = {} # needed file nodes
609 changedfiles = set()
602 changedfiles = set()
610
603
611 # Callback for the changelog, used to collect changed files and manifest
604 # Callback for the changelog, used to collect changed files and manifest
612 # nodes.
605 # nodes.
613 # Returns the linkrev node (identity in the changelog case).
606 # Returns the linkrev node (identity in the changelog case).
614 def lookupcl(x):
607 def lookupcl(x):
615 c = cl.read(x)
608 c = cl.read(x)
616 clrevorder[x] = len(clrevorder)
609 clrevorder[x] = len(clrevorder)
617 n = c[0]
610 n = c[0]
618 # record the first changeset introducing this manifest version
611 # record the first changeset introducing this manifest version
619 mfs.setdefault(n, x)
612 mfs.setdefault(n, x)
620 # Record a complete list of potentially-changed files in
613 # Record a complete list of potentially-changed files in
621 # this manifest.
614 # this manifest.
622 changedfiles.update(c[3])
615 changedfiles.update(c[3])
623 return x
616 return x
624
617
625 self._verbosenote(_('uncompressed size of bundle content:\n'))
618 self._verbosenote(_('uncompressed size of bundle content:\n'))
626 size = 0
619 size = 0
627 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
620 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
628 size += len(chunk)
621 size += len(chunk)
629 yield chunk
622 yield chunk
630 self._verbosenote(_('%8.i (changelog)\n') % size)
623 self._verbosenote(_('%8.i (changelog)\n') % size)
631
624
632 # We need to make sure that the linkrev in the changegroup refers to
625 # We need to make sure that the linkrev in the changegroup refers to
633 # the first changeset that introduced the manifest or file revision.
626 # the first changeset that introduced the manifest or file revision.
634 # The fastpath is usually safer than the slowpath, because the filelogs
627 # The fastpath is usually safer than the slowpath, because the filelogs
635 # are walked in revlog order.
628 # are walked in revlog order.
636 #
629 #
637 # When taking the slowpath with reorder=None and the manifest revlog
630 # When taking the slowpath with reorder=None and the manifest revlog
638 # uses generaldelta, the manifest may be walked in the "wrong" order.
631 # uses generaldelta, the manifest may be walked in the "wrong" order.
639 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
632 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
640 # cc0ff93d0c0c).
633 # cc0ff93d0c0c).
641 #
634 #
642 # When taking the fastpath, we are only vulnerable to reordering
635 # When taking the fastpath, we are only vulnerable to reordering
643 # of the changelog itself. The changelog never uses generaldelta, so
636 # of the changelog itself. The changelog never uses generaldelta, so
644 # it is only reordered when reorder=True. To handle this case, we
637 # it is only reordered when reorder=True. To handle this case, we
645 # simply take the slowpath, which already has the 'clrevorder' logic.
638 # simply take the slowpath, which already has the 'clrevorder' logic.
646 # This was also fixed in cc0ff93d0c0c.
639 # This was also fixed in cc0ff93d0c0c.
647 fastpathlinkrev = fastpathlinkrev and not self._reorder
640 fastpathlinkrev = fastpathlinkrev and not self._reorder
648 # Treemanifests don't work correctly with fastpathlinkrev
641 # Treemanifests don't work correctly with fastpathlinkrev
649 # either, because we don't discover which directory nodes to
642 # either, because we don't discover which directory nodes to
650 # send along with files. This could probably be fixed.
643 # send along with files. This could probably be fixed.
651 fastpathlinkrev = fastpathlinkrev and (
644 fastpathlinkrev = fastpathlinkrev and (
652 'treemanifest' not in repo.requirements)
645 'treemanifest' not in repo.requirements)
653
646
654 for chunk in self.generatemanifests(commonrevs, clrevorder,
647 for chunk in self.generatemanifests(commonrevs, clrevorder,
655 fastpathlinkrev, mfs, fnodes):
648 fastpathlinkrev, mfs, fnodes):
656 yield chunk
649 yield chunk
657 mfs.clear()
650 mfs.clear()
658 clrevs = set(cl.rev(x) for x in clnodes)
651 clrevs = set(cl.rev(x) for x in clnodes)
659
652
660 if not fastpathlinkrev:
653 if not fastpathlinkrev:
661 def linknodes(unused, fname):
654 def linknodes(unused, fname):
662 return fnodes.get(fname, {})
655 return fnodes.get(fname, {})
663 else:
656 else:
664 cln = cl.node
657 cln = cl.node
665 def linknodes(filerevlog, fname):
658 def linknodes(filerevlog, fname):
666 llr = filerevlog.linkrev
659 llr = filerevlog.linkrev
667 fln = filerevlog.node
660 fln = filerevlog.node
668 revs = ((r, llr(r)) for r in filerevlog)
661 revs = ((r, llr(r)) for r in filerevlog)
669 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
662 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
670
663
671 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
664 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
672 source):
665 source):
673 yield chunk
666 yield chunk
674
667
675 yield self.close()
668 yield self.close()
676
669
677 if clnodes:
670 if clnodes:
678 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
671 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
679
672
680 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
673 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
681 fnodes):
674 fnodes):
682 repo = self._repo
675 repo = self._repo
683 mfl = repo.manifestlog
676 mfl = repo.manifestlog
684 dirlog = mfl._revlog.dirlog
677 dirlog = mfl._revlog.dirlog
685 tmfnodes = {'': mfs}
678 tmfnodes = {'': mfs}
686
679
687 # Callback for the manifest, used to collect linkrevs for filelog
680 # Callback for the manifest, used to collect linkrevs for filelog
688 # revisions.
681 # revisions.
689 # Returns the linkrev node (collected in lookupcl).
682 # Returns the linkrev node (collected in lookupcl).
690 def makelookupmflinknode(dir):
683 def makelookupmflinknode(dir):
691 if fastpathlinkrev:
684 if fastpathlinkrev:
692 assert not dir
685 assert not dir
693 return mfs.__getitem__
686 return mfs.__getitem__
694
687
695 def lookupmflinknode(x):
688 def lookupmflinknode(x):
696 """Callback for looking up the linknode for manifests.
689 """Callback for looking up the linknode for manifests.
697
690
698 Returns the linkrev node for the specified manifest.
691 Returns the linkrev node for the specified manifest.
699
692
700 SIDE EFFECT:
693 SIDE EFFECT:
701
694
702 1) fclnodes gets populated with the list of relevant
695 1) fclnodes gets populated with the list of relevant
703 file nodes if we're not using fastpathlinkrev
696 file nodes if we're not using fastpathlinkrev
704 2) When treemanifests are in use, collects treemanifest nodes
697 2) When treemanifests are in use, collects treemanifest nodes
705 to send
698 to send
706
699
707 Note that this means manifests must be completely sent to
700 Note that this means manifests must be completely sent to
708 the client before you can trust the list of files and
701 the client before you can trust the list of files and
709 treemanifests to send.
702 treemanifests to send.
710 """
703 """
711 clnode = tmfnodes[dir][x]
704 clnode = tmfnodes[dir][x]
712 mdata = mfl.get(dir, x).readfast(shallow=True)
705 mdata = mfl.get(dir, x).readfast(shallow=True)
713 for p, n, fl in mdata.iterentries():
706 for p, n, fl in mdata.iterentries():
714 if fl == 't': # subdirectory manifest
707 if fl == 't': # subdirectory manifest
715 subdir = dir + p + '/'
708 subdir = dir + p + '/'
716 tmfclnodes = tmfnodes.setdefault(subdir, {})
709 tmfclnodes = tmfnodes.setdefault(subdir, {})
717 tmfclnode = tmfclnodes.setdefault(n, clnode)
710 tmfclnode = tmfclnodes.setdefault(n, clnode)
718 if clrevorder[clnode] < clrevorder[tmfclnode]:
711 if clrevorder[clnode] < clrevorder[tmfclnode]:
719 tmfclnodes[n] = clnode
712 tmfclnodes[n] = clnode
720 else:
713 else:
721 f = dir + p
714 f = dir + p
722 fclnodes = fnodes.setdefault(f, {})
715 fclnodes = fnodes.setdefault(f, {})
723 fclnode = fclnodes.setdefault(n, clnode)
716 fclnode = fclnodes.setdefault(n, clnode)
724 if clrevorder[clnode] < clrevorder[fclnode]:
717 if clrevorder[clnode] < clrevorder[fclnode]:
725 fclnodes[n] = clnode
718 fclnodes[n] = clnode
726 return clnode
719 return clnode
727 return lookupmflinknode
720 return lookupmflinknode
728
721
729 size = 0
722 size = 0
730 while tmfnodes:
723 while tmfnodes:
731 dir = min(tmfnodes)
724 dir = min(tmfnodes)
732 nodes = tmfnodes[dir]
725 nodes = tmfnodes[dir]
733 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
726 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
734 if not dir or prunednodes:
727 if not dir or prunednodes:
735 for x in self._packmanifests(dir, prunednodes,
728 for x in self._packmanifests(dir, prunednodes,
736 makelookupmflinknode(dir)):
729 makelookupmflinknode(dir)):
737 size += len(x)
730 size += len(x)
738 yield x
731 yield x
739 del tmfnodes[dir]
732 del tmfnodes[dir]
740 self._verbosenote(_('%8.i (manifests)\n') % size)
733 self._verbosenote(_('%8.i (manifests)\n') % size)
741 yield self._manifestsdone()
734 yield self._manifestsdone()
742
735
743 # The 'source' parameter is useful for extensions
736 # The 'source' parameter is useful for extensions
744 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
737 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
745 repo = self._repo
738 repo = self._repo
746 progress = self._progress
739 progress = self._progress
747 msgbundling = _('bundling')
740 msgbundling = _('bundling')
748
741
749 total = len(changedfiles)
742 total = len(changedfiles)
750 # for progress output
743 # for progress output
751 msgfiles = _('files')
744 msgfiles = _('files')
752 for i, fname in enumerate(sorted(changedfiles)):
745 for i, fname in enumerate(sorted(changedfiles)):
753 filerevlog = repo.file(fname)
746 filerevlog = repo.file(fname)
754 if not filerevlog:
747 if not filerevlog:
755 raise error.Abort(_("empty or missing revlog for %s") % fname)
748 raise error.Abort(_("empty or missing revlog for %s") % fname)
756
749
757 linkrevnodes = linknodes(filerevlog, fname)
750 linkrevnodes = linknodes(filerevlog, fname)
758 # Lookup for filenodes, we collected the linkrev nodes above in the
751 # Lookup for filenodes, we collected the linkrev nodes above in the
759 # fastpath case and with lookupmf in the slowpath case.
752 # fastpath case and with lookupmf in the slowpath case.
760 def lookupfilelog(x):
753 def lookupfilelog(x):
761 return linkrevnodes[x]
754 return linkrevnodes[x]
762
755
763 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
756 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
764 if filenodes:
757 if filenodes:
765 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
758 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
766 total=total)
759 total=total)
767 h = self.fileheader(fname)
760 h = self.fileheader(fname)
768 size = len(h)
761 size = len(h)
769 yield h
762 yield h
770 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
763 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
771 size += len(chunk)
764 size += len(chunk)
772 yield chunk
765 yield chunk
773 self._verbosenote(_('%8.i %s\n') % (size, fname))
766 self._verbosenote(_('%8.i %s\n') % (size, fname))
774 progress(msgbundling, None)
767 progress(msgbundling, None)
775
768
776 def deltaparent(self, revlog, rev, p1, p2, prev):
769 def deltaparent(self, revlog, rev, p1, p2, prev):
777 return prev
770 return prev
778
771
779 def revchunk(self, revlog, rev, prev, linknode):
772 def revchunk(self, revlog, rev, prev, linknode):
780 node = revlog.node(rev)
773 node = revlog.node(rev)
781 p1, p2 = revlog.parentrevs(rev)
774 p1, p2 = revlog.parentrevs(rev)
782 base = self.deltaparent(revlog, rev, p1, p2, prev)
775 base = self.deltaparent(revlog, rev, p1, p2, prev)
783
776
784 prefix = ''
777 prefix = ''
785 if revlog.iscensored(base) or revlog.iscensored(rev):
778 if revlog.iscensored(base) or revlog.iscensored(rev):
786 try:
779 try:
787 delta = revlog.revision(node, raw=True)
780 delta = revlog.revision(node, raw=True)
788 except error.CensoredNodeError as e:
781 except error.CensoredNodeError as e:
789 delta = e.tombstone
782 delta = e.tombstone
790 if base == nullrev:
783 if base == nullrev:
791 prefix = mdiff.trivialdiffheader(len(delta))
784 prefix = mdiff.trivialdiffheader(len(delta))
792 else:
785 else:
793 baselen = revlog.rawsize(base)
786 baselen = revlog.rawsize(base)
794 prefix = mdiff.replacediffheader(baselen, len(delta))
787 prefix = mdiff.replacediffheader(baselen, len(delta))
795 elif base == nullrev:
788 elif base == nullrev:
796 delta = revlog.revision(node, raw=True)
789 delta = revlog.revision(node, raw=True)
797 prefix = mdiff.trivialdiffheader(len(delta))
790 prefix = mdiff.trivialdiffheader(len(delta))
798 else:
791 else:
799 delta = revlog.revdiff(base, rev)
792 delta = revlog.revdiff(base, rev)
800 p1n, p2n = revlog.parents(node)
793 p1n, p2n = revlog.parents(node)
801 basenode = revlog.node(base)
794 basenode = revlog.node(base)
802 flags = revlog.flags(rev)
795 flags = revlog.flags(rev)
803 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
796 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
804 meta += prefix
797 meta += prefix
805 l = len(meta) + len(delta)
798 l = len(meta) + len(delta)
806 yield chunkheader(l)
799 yield chunkheader(l)
807 yield meta
800 yield meta
808 yield delta
801 yield delta
809 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
802 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
810 # do nothing with basenode, it is implicitly the previous one in HG10
803 # do nothing with basenode, it is implicitly the previous one in HG10
811 # do nothing with flags, it is implicitly 0 for cg1 and cg2
804 # do nothing with flags, it is implicitly 0 for cg1 and cg2
812 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
805 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
813
806
814 class cg2packer(cg1packer):
807 class cg2packer(cg1packer):
815 version = '02'
808 version = '02'
816 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
809 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
817
810
818 def __init__(self, repo, bundlecaps=None):
811 def __init__(self, repo):
819 super(cg2packer, self).__init__(repo, bundlecaps)
812 super(cg2packer, self).__init__(repo)
820 if self._reorder is None:
813 if self._reorder is None:
821 # Since generaldelta is directly supported by cg2, reordering
814 # Since generaldelta is directly supported by cg2, reordering
822 # generally doesn't help, so we disable it by default (treating
815 # generally doesn't help, so we disable it by default (treating
823 # bundle.reorder=auto just like bundle.reorder=False).
816 # bundle.reorder=auto just like bundle.reorder=False).
824 self._reorder = False
817 self._reorder = False
825
818
826 def deltaparent(self, revlog, rev, p1, p2, prev):
819 def deltaparent(self, revlog, rev, p1, p2, prev):
827 dp = revlog.deltaparent(rev)
820 dp = revlog.deltaparent(rev)
828 if dp == nullrev and revlog.storedeltachains:
821 if dp == nullrev and revlog.storedeltachains:
829 # Avoid sending full revisions when delta parent is null. Pick prev
822 # Avoid sending full revisions when delta parent is null. Pick prev
830 # in that case. It's tempting to pick p1 in this case, as p1 will
823 # in that case. It's tempting to pick p1 in this case, as p1 will
831 # be smaller in the common case. However, computing a delta against
824 # be smaller in the common case. However, computing a delta against
832 # p1 may require resolving the raw text of p1, which could be
825 # p1 may require resolving the raw text of p1, which could be
833 # expensive. The revlog caches should have prev cached, meaning
826 # expensive. The revlog caches should have prev cached, meaning
834 # less CPU for changegroup generation. There is likely room to add
827 # less CPU for changegroup generation. There is likely room to add
835 # a flag and/or config option to control this behavior.
828 # a flag and/or config option to control this behavior.
836 return prev
829 return prev
837 elif dp == nullrev:
830 elif dp == nullrev:
838 # revlog is configured to use full snapshot for a reason,
831 # revlog is configured to use full snapshot for a reason,
839 # stick to full snapshot.
832 # stick to full snapshot.
840 return nullrev
833 return nullrev
841 elif dp not in (p1, p2, prev):
834 elif dp not in (p1, p2, prev):
842 # Pick prev when we can't be sure remote has the base revision.
835 # Pick prev when we can't be sure remote has the base revision.
843 return prev
836 return prev
844 else:
837 else:
845 return dp
838 return dp
846
839
847 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
840 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
848 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
841 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
849 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
842 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
850
843
851 class cg3packer(cg2packer):
844 class cg3packer(cg2packer):
852 version = '03'
845 version = '03'
853 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
846 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
854
847
855 def _packmanifests(self, dir, mfnodes, lookuplinknode):
848 def _packmanifests(self, dir, mfnodes, lookuplinknode):
856 if dir:
849 if dir:
857 yield self.fileheader(dir)
850 yield self.fileheader(dir)
858
851
859 dirlog = self._repo.manifestlog._revlog.dirlog(dir)
852 dirlog = self._repo.manifestlog._revlog.dirlog(dir)
860 for chunk in self.group(mfnodes, dirlog, lookuplinknode,
853 for chunk in self.group(mfnodes, dirlog, lookuplinknode,
861 units=_('manifests')):
854 units=_('manifests')):
862 yield chunk
855 yield chunk
863
856
864 def _manifestsdone(self):
857 def _manifestsdone(self):
865 return self.close()
858 return self.close()
866
859
867 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
860 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
868 return struct.pack(
861 return struct.pack(
869 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
862 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
870
863
871 _packermap = {'01': (cg1packer, cg1unpacker),
864 _packermap = {'01': (cg1packer, cg1unpacker),
872 # cg2 adds support for exchanging generaldelta
865 # cg2 adds support for exchanging generaldelta
873 '02': (cg2packer, cg2unpacker),
866 '02': (cg2packer, cg2unpacker),
874 # cg3 adds support for exchanging revlog flags and treemanifests
867 # cg3 adds support for exchanging revlog flags and treemanifests
875 '03': (cg3packer, cg3unpacker),
868 '03': (cg3packer, cg3unpacker),
876 }
869 }
877
870
878 def allsupportedversions(repo):
871 def allsupportedversions(repo):
879 versions = set(_packermap.keys())
872 versions = set(_packermap.keys())
880 if not (repo.ui.configbool('experimental', 'changegroup3') or
873 if not (repo.ui.configbool('experimental', 'changegroup3') or
881 repo.ui.configbool('experimental', 'treemanifest') or
874 repo.ui.configbool('experimental', 'treemanifest') or
882 'treemanifest' in repo.requirements):
875 'treemanifest' in repo.requirements):
883 versions.discard('03')
876 versions.discard('03')
884 return versions
877 return versions
885
878
886 # Changegroup versions that can be applied to the repo
879 # Changegroup versions that can be applied to the repo
887 def supportedincomingversions(repo):
880 def supportedincomingversions(repo):
888 return allsupportedversions(repo)
881 return allsupportedversions(repo)
889
882
890 # Changegroup versions that can be created from the repo
883 # Changegroup versions that can be created from the repo
891 def supportedoutgoingversions(repo):
884 def supportedoutgoingversions(repo):
892 versions = allsupportedversions(repo)
885 versions = allsupportedversions(repo)
893 if 'treemanifest' in repo.requirements:
886 if 'treemanifest' in repo.requirements:
894 # Versions 01 and 02 support only flat manifests and it's just too
887 # Versions 01 and 02 support only flat manifests and it's just too
895 # expensive to convert between the flat manifest and tree manifest on
888 # expensive to convert between the flat manifest and tree manifest on
896 # the fly. Since tree manifests are hashed differently, all of history
889 # the fly. Since tree manifests are hashed differently, all of history
897 # would have to be converted. Instead, we simply don't even pretend to
890 # would have to be converted. Instead, we simply don't even pretend to
898 # support versions 01 and 02.
891 # support versions 01 and 02.
899 versions.discard('01')
892 versions.discard('01')
900 versions.discard('02')
893 versions.discard('02')
901 return versions
894 return versions
902
895
903 def safeversion(repo):
896 def safeversion(repo):
904 # Finds the smallest version that it's safe to assume clients of the repo
897 # Finds the smallest version that it's safe to assume clients of the repo
905 # will support. For example, all hg versions that support generaldelta also
898 # will support. For example, all hg versions that support generaldelta also
906 # support changegroup 02.
899 # support changegroup 02.
907 versions = supportedoutgoingversions(repo)
900 versions = supportedoutgoingversions(repo)
908 if 'generaldelta' in repo.requirements:
901 if 'generaldelta' in repo.requirements:
909 versions.discard('01')
902 versions.discard('01')
910 assert versions
903 assert versions
911 return min(versions)
904 return min(versions)
912
905
913 def getbundler(version, repo, bundlecaps=None):
906 def getbundler(version, repo):
914 assert version in supportedoutgoingversions(repo)
907 assert version in supportedoutgoingversions(repo)
915 return _packermap[version][0](repo, bundlecaps)
908 return _packermap[version][0](repo)
916
909
917 def getunbundler(version, fh, alg, extras=None):
910 def getunbundler(version, fh, alg, extras=None):
918 return _packermap[version][1](fh, alg, extras=extras)
911 return _packermap[version][1](fh, alg, extras=extras)
919
912
920 def _changegroupinfo(repo, nodes, source):
913 def _changegroupinfo(repo, nodes, source):
921 if repo.ui.verbose or source == 'bundle':
914 if repo.ui.verbose or source == 'bundle':
922 repo.ui.status(_("%d changesets found\n") % len(nodes))
915 repo.ui.status(_("%d changesets found\n") % len(nodes))
923 if repo.ui.debugflag:
916 if repo.ui.debugflag:
924 repo.ui.debug("list of changesets:\n")
917 repo.ui.debug("list of changesets:\n")
925 for node in nodes:
918 for node in nodes:
926 repo.ui.debug("%s\n" % hex(node))
919 repo.ui.debug("%s\n" % hex(node))
927
920
928 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
921 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
929 repo = repo.unfiltered()
922 repo = repo.unfiltered()
930 commonrevs = outgoing.common
923 commonrevs = outgoing.common
931 csets = outgoing.missing
924 csets = outgoing.missing
932 heads = outgoing.missingheads
925 heads = outgoing.missingheads
933 # We go through the fast path if we get told to, or if all (unfiltered
926 # We go through the fast path if we get told to, or if all (unfiltered
934 # heads have been requested (since we then know there all linkrevs will
927 # heads have been requested (since we then know there all linkrevs will
935 # be pulled by the client).
928 # be pulled by the client).
936 heads.sort()
929 heads.sort()
937 fastpathlinkrev = fastpath or (
930 fastpathlinkrev = fastpath or (
938 repo.filtername is None and heads == sorted(repo.heads()))
931 repo.filtername is None and heads == sorted(repo.heads()))
939
932
940 repo.hook('preoutgoing', throw=True, source=source)
933 repo.hook('preoutgoing', throw=True, source=source)
941 _changegroupinfo(repo, csets, source)
934 _changegroupinfo(repo, csets, source)
942 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
935 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
943
936
944 def getsubset(repo, outgoing, bundler, source, fastpath=False):
937 def getsubset(repo, outgoing, bundler, source, fastpath=False):
945 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
938 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
946 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
939 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
947 {'clcount': len(outgoing.missing)})
940 {'clcount': len(outgoing.missing)})
948
941
949 def changegroupsubset(repo, roots, heads, source, version='01'):
942 def changegroupsubset(repo, roots, heads, source, version='01'):
950 """Compute a changegroup consisting of all the nodes that are
943 """Compute a changegroup consisting of all the nodes that are
951 descendants of any of the roots and ancestors of any of the heads.
944 descendants of any of the roots and ancestors of any of the heads.
952 Return a chunkbuffer object whose read() method will return
945 Return a chunkbuffer object whose read() method will return
953 successive changegroup chunks.
946 successive changegroup chunks.
954
947
955 It is fairly complex as determining which filenodes and which
948 It is fairly complex as determining which filenodes and which
956 manifest nodes need to be included for the changeset to be complete
949 manifest nodes need to be included for the changeset to be complete
957 is non-trivial.
950 is non-trivial.
958
951
959 Another wrinkle is doing the reverse, figuring out which changeset in
952 Another wrinkle is doing the reverse, figuring out which changeset in
960 the changegroup a particular filenode or manifestnode belongs to.
953 the changegroup a particular filenode or manifestnode belongs to.
961 """
954 """
962 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
955 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
963 bundler = getbundler(version, repo)
956 bundler = getbundler(version, repo)
964 return getsubset(repo, outgoing, bundler, source)
957 return getsubset(repo, outgoing, bundler, source)
965
958
966 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
959 def getlocalchangegroupraw(repo, source, outgoing, version='01'):
967 version='01'):
968 """Like getbundle, but taking a discovery.outgoing as an argument.
960 """Like getbundle, but taking a discovery.outgoing as an argument.
969
961
970 This is only implemented for local repos and reuses potentially
962 This is only implemented for local repos and reuses potentially
971 precomputed sets in outgoing. Returns a raw changegroup generator."""
963 precomputed sets in outgoing. Returns a raw changegroup generator."""
972 if not outgoing.missing:
964 if not outgoing.missing:
973 return None
965 return None
974 bundler = getbundler(version, repo, bundlecaps)
966 bundler = getbundler(version, repo)
975 return getsubsetraw(repo, outgoing, bundler, source)
967 return getsubsetraw(repo, outgoing, bundler, source)
976
968
977 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
969 def getlocalchangegroup(repo, source, outgoing, version='01'):
978 version='01'):
979 """Like getbundle, but taking a discovery.outgoing as an argument.
970 """Like getbundle, but taking a discovery.outgoing as an argument.
980
971
981 This is only implemented for local repos and reuses potentially
972 This is only implemented for local repos and reuses potentially
982 precomputed sets in outgoing."""
973 precomputed sets in outgoing."""
983 if not outgoing.missing:
974 if not outgoing.missing:
984 return None
975 return None
985 bundler = getbundler(version, repo, bundlecaps)
976 bundler = getbundler(version, repo)
986 return getsubset(repo, outgoing, bundler, source)
977 return getsubset(repo, outgoing, bundler, source)
987
978
988 def getchangegroup(repo, source, outgoing, bundlecaps=None,
979 def getchangegroup(repo, source, outgoing, version='01'):
989 version='01'):
990 """Like changegroupsubset, but returns the set difference between the
980 """Like changegroupsubset, but returns the set difference between the
991 ancestors of heads and the ancestors common.
981 ancestors of heads and the ancestors common.
992
982
993 If heads is None, use the local heads. If common is None, use [nullid].
983 If heads is None, use the local heads. If common is None, use [nullid].
994
984
995 The nodes in common might not all be known locally due to the way the
985 The nodes in common might not all be known locally due to the way the
996 current discovery protocol works.
986 current discovery protocol works.
997 """
987 """
998 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
988 return getlocalchangegroup(repo, source, outgoing, version=version)
999 version=version)
1000
989
1001 def changegroup(repo, basenodes, source):
990 def changegroup(repo, basenodes, source):
1002 # to avoid a race we use changegroupsubset() (issue1320)
991 # to avoid a race we use changegroupsubset() (issue1320)
1003 return changegroupsubset(repo, basenodes, repo.heads(), source)
992 return changegroupsubset(repo, basenodes, repo.heads(), source)
1004
993
1005 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
994 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1006 revisions = 0
995 revisions = 0
1007 files = 0
996 files = 0
1008 for chunkdata in iter(source.filelogheader, {}):
997 for chunkdata in iter(source.filelogheader, {}):
1009 files += 1
998 files += 1
1010 f = chunkdata["filename"]
999 f = chunkdata["filename"]
1011 repo.ui.debug("adding %s revisions\n" % f)
1000 repo.ui.debug("adding %s revisions\n" % f)
1012 repo.ui.progress(_('files'), files, unit=_('files'),
1001 repo.ui.progress(_('files'), files, unit=_('files'),
1013 total=expectedfiles)
1002 total=expectedfiles)
1014 fl = repo.file(f)
1003 fl = repo.file(f)
1015 o = len(fl)
1004 o = len(fl)
1016 try:
1005 try:
1017 if not fl.addgroup(source, revmap, trp):
1006 if not fl.addgroup(source, revmap, trp):
1018 raise error.Abort(_("received file revlog group is empty"))
1007 raise error.Abort(_("received file revlog group is empty"))
1019 except error.CensoredBaseError as e:
1008 except error.CensoredBaseError as e:
1020 raise error.Abort(_("received delta base is censored: %s") % e)
1009 raise error.Abort(_("received delta base is censored: %s") % e)
1021 revisions += len(fl) - o
1010 revisions += len(fl) - o
1022 if f in needfiles:
1011 if f in needfiles:
1023 needs = needfiles[f]
1012 needs = needfiles[f]
1024 for new in xrange(o, len(fl)):
1013 for new in xrange(o, len(fl)):
1025 n = fl.node(new)
1014 n = fl.node(new)
1026 if n in needs:
1015 if n in needs:
1027 needs.remove(n)
1016 needs.remove(n)
1028 else:
1017 else:
1029 raise error.Abort(
1018 raise error.Abort(
1030 _("received spurious file revlog entry"))
1019 _("received spurious file revlog entry"))
1031 if not needs:
1020 if not needs:
1032 del needfiles[f]
1021 del needfiles[f]
1033 repo.ui.progress(_('files'), None)
1022 repo.ui.progress(_('files'), None)
1034
1023
1035 for f, needs in needfiles.iteritems():
1024 for f, needs in needfiles.iteritems():
1036 fl = repo.file(f)
1025 fl = repo.file(f)
1037 for n in needs:
1026 for n in needs:
1038 try:
1027 try:
1039 fl.rev(n)
1028 fl.rev(n)
1040 except error.LookupError:
1029 except error.LookupError:
1041 raise error.Abort(
1030 raise error.Abort(
1042 _('missing file data for %s:%s - run hg verify') %
1031 _('missing file data for %s:%s - run hg verify') %
1043 (f, hex(n)))
1032 (f, hex(n)))
1044
1033
1045 return revisions, files
1034 return revisions, files
@@ -1,5519 +1,5516 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22 from . import (
22 from . import (
23 archival,
23 archival,
24 bookmarks,
24 bookmarks,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 cmdutil,
27 cmdutil,
28 copies,
28 copies,
29 destutil,
29 destutil,
30 dirstateguard,
30 dirstateguard,
31 discovery,
31 discovery,
32 encoding,
32 encoding,
33 error,
33 error,
34 exchange,
34 exchange,
35 extensions,
35 extensions,
36 graphmod,
36 graphmod,
37 hbisect,
37 hbisect,
38 help,
38 help,
39 hg,
39 hg,
40 lock as lockmod,
40 lock as lockmod,
41 merge as mergemod,
41 merge as mergemod,
42 obsolete,
42 obsolete,
43 patch,
43 patch,
44 phases,
44 phases,
45 pycompat,
45 pycompat,
46 rcutil,
46 rcutil,
47 revsetlang,
47 revsetlang,
48 scmutil,
48 scmutil,
49 server,
49 server,
50 sshserver,
50 sshserver,
51 streamclone,
51 streamclone,
52 tags as tagsmod,
52 tags as tagsmod,
53 templatekw,
53 templatekw,
54 ui as uimod,
54 ui as uimod,
55 util,
55 util,
56 )
56 )
57
57
58 release = lockmod.release
58 release = lockmod.release
59
59
60 table = {}
60 table = {}
61
61
62 command = cmdutil.command(table)
62 command = cmdutil.command(table)
63
63
64 # label constants
64 # label constants
65 # until 3.5, bookmarks.current was the advertised name, not
65 # until 3.5, bookmarks.current was the advertised name, not
66 # bookmarks.active, so we must use both to avoid breaking old
66 # bookmarks.active, so we must use both to avoid breaking old
67 # custom styles
67 # custom styles
68 activebookmarklabel = 'bookmarks.active bookmarks.current'
68 activebookmarklabel = 'bookmarks.active bookmarks.current'
69
69
70 # common command options
70 # common command options
71
71
72 globalopts = [
72 globalopts = [
73 ('R', 'repository', '',
73 ('R', 'repository', '',
74 _('repository root directory or name of overlay bundle file'),
74 _('repository root directory or name of overlay bundle file'),
75 _('REPO')),
75 _('REPO')),
76 ('', 'cwd', '',
76 ('', 'cwd', '',
77 _('change working directory'), _('DIR')),
77 _('change working directory'), _('DIR')),
78 ('y', 'noninteractive', None,
78 ('y', 'noninteractive', None,
79 _('do not prompt, automatically pick the first choice for all prompts')),
79 _('do not prompt, automatically pick the first choice for all prompts')),
80 ('q', 'quiet', None, _('suppress output')),
80 ('q', 'quiet', None, _('suppress output')),
81 ('v', 'verbose', None, _('enable additional output')),
81 ('v', 'verbose', None, _('enable additional output')),
82 ('', 'color', '',
82 ('', 'color', '',
83 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
83 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
84 # and should not be translated
84 # and should not be translated
85 _("when to colorize (boolean, always, auto, never, or debug)"),
85 _("when to colorize (boolean, always, auto, never, or debug)"),
86 _('TYPE')),
86 _('TYPE')),
87 ('', 'config', [],
87 ('', 'config', [],
88 _('set/override config option (use \'section.name=value\')'),
88 _('set/override config option (use \'section.name=value\')'),
89 _('CONFIG')),
89 _('CONFIG')),
90 ('', 'debug', None, _('enable debugging output')),
90 ('', 'debug', None, _('enable debugging output')),
91 ('', 'debugger', None, _('start debugger')),
91 ('', 'debugger', None, _('start debugger')),
92 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
92 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
93 _('ENCODE')),
93 _('ENCODE')),
94 ('', 'encodingmode', encoding.encodingmode,
94 ('', 'encodingmode', encoding.encodingmode,
95 _('set the charset encoding mode'), _('MODE')),
95 _('set the charset encoding mode'), _('MODE')),
96 ('', 'traceback', None, _('always print a traceback on exception')),
96 ('', 'traceback', None, _('always print a traceback on exception')),
97 ('', 'time', None, _('time how long the command takes')),
97 ('', 'time', None, _('time how long the command takes')),
98 ('', 'profile', None, _('print command execution profile')),
98 ('', 'profile', None, _('print command execution profile')),
99 ('', 'version', None, _('output version information and exit')),
99 ('', 'version', None, _('output version information and exit')),
100 ('h', 'help', None, _('display help and exit')),
100 ('h', 'help', None, _('display help and exit')),
101 ('', 'hidden', False, _('consider hidden changesets')),
101 ('', 'hidden', False, _('consider hidden changesets')),
102 ('', 'pager', 'auto',
102 ('', 'pager', 'auto',
103 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
103 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
104 ]
104 ]
105
105
106 dryrunopts = [('n', 'dry-run', None,
106 dryrunopts = [('n', 'dry-run', None,
107 _('do not perform actions, just print output'))]
107 _('do not perform actions, just print output'))]
108
108
109 remoteopts = [
109 remoteopts = [
110 ('e', 'ssh', '',
110 ('e', 'ssh', '',
111 _('specify ssh command to use'), _('CMD')),
111 _('specify ssh command to use'), _('CMD')),
112 ('', 'remotecmd', '',
112 ('', 'remotecmd', '',
113 _('specify hg command to run on the remote side'), _('CMD')),
113 _('specify hg command to run on the remote side'), _('CMD')),
114 ('', 'insecure', None,
114 ('', 'insecure', None,
115 _('do not verify server certificate (ignoring web.cacerts config)')),
115 _('do not verify server certificate (ignoring web.cacerts config)')),
116 ]
116 ]
117
117
118 walkopts = [
118 walkopts = [
119 ('I', 'include', [],
119 ('I', 'include', [],
120 _('include names matching the given patterns'), _('PATTERN')),
120 _('include names matching the given patterns'), _('PATTERN')),
121 ('X', 'exclude', [],
121 ('X', 'exclude', [],
122 _('exclude names matching the given patterns'), _('PATTERN')),
122 _('exclude names matching the given patterns'), _('PATTERN')),
123 ]
123 ]
124
124
125 commitopts = [
125 commitopts = [
126 ('m', 'message', '',
126 ('m', 'message', '',
127 _('use text as commit message'), _('TEXT')),
127 _('use text as commit message'), _('TEXT')),
128 ('l', 'logfile', '',
128 ('l', 'logfile', '',
129 _('read commit message from file'), _('FILE')),
129 _('read commit message from file'), _('FILE')),
130 ]
130 ]
131
131
132 commitopts2 = [
132 commitopts2 = [
133 ('d', 'date', '',
133 ('d', 'date', '',
134 _('record the specified date as commit date'), _('DATE')),
134 _('record the specified date as commit date'), _('DATE')),
135 ('u', 'user', '',
135 ('u', 'user', '',
136 _('record the specified user as committer'), _('USER')),
136 _('record the specified user as committer'), _('USER')),
137 ]
137 ]
138
138
139 # hidden for now
139 # hidden for now
140 formatteropts = [
140 formatteropts = [
141 ('T', 'template', '',
141 ('T', 'template', '',
142 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
142 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
143 ]
143 ]
144
144
145 templateopts = [
145 templateopts = [
146 ('', 'style', '',
146 ('', 'style', '',
147 _('display using template map file (DEPRECATED)'), _('STYLE')),
147 _('display using template map file (DEPRECATED)'), _('STYLE')),
148 ('T', 'template', '',
148 ('T', 'template', '',
149 _('display with template'), _('TEMPLATE')),
149 _('display with template'), _('TEMPLATE')),
150 ]
150 ]
151
151
152 logopts = [
152 logopts = [
153 ('p', 'patch', None, _('show patch')),
153 ('p', 'patch', None, _('show patch')),
154 ('g', 'git', None, _('use git extended diff format')),
154 ('g', 'git', None, _('use git extended diff format')),
155 ('l', 'limit', '',
155 ('l', 'limit', '',
156 _('limit number of changes displayed'), _('NUM')),
156 _('limit number of changes displayed'), _('NUM')),
157 ('M', 'no-merges', None, _('do not show merges')),
157 ('M', 'no-merges', None, _('do not show merges')),
158 ('', 'stat', None, _('output diffstat-style summary of changes')),
158 ('', 'stat', None, _('output diffstat-style summary of changes')),
159 ('G', 'graph', None, _("show the revision DAG")),
159 ('G', 'graph', None, _("show the revision DAG")),
160 ] + templateopts
160 ] + templateopts
161
161
162 diffopts = [
162 diffopts = [
163 ('a', 'text', None, _('treat all files as text')),
163 ('a', 'text', None, _('treat all files as text')),
164 ('g', 'git', None, _('use git extended diff format')),
164 ('g', 'git', None, _('use git extended diff format')),
165 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
165 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
166 ('', 'nodates', None, _('omit dates from diff headers'))
166 ('', 'nodates', None, _('omit dates from diff headers'))
167 ]
167 ]
168
168
169 diffwsopts = [
169 diffwsopts = [
170 ('w', 'ignore-all-space', None,
170 ('w', 'ignore-all-space', None,
171 _('ignore white space when comparing lines')),
171 _('ignore white space when comparing lines')),
172 ('b', 'ignore-space-change', None,
172 ('b', 'ignore-space-change', None,
173 _('ignore changes in the amount of white space')),
173 _('ignore changes in the amount of white space')),
174 ('B', 'ignore-blank-lines', None,
174 ('B', 'ignore-blank-lines', None,
175 _('ignore changes whose lines are all blank')),
175 _('ignore changes whose lines are all blank')),
176 ]
176 ]
177
177
178 diffopts2 = [
178 diffopts2 = [
179 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
179 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
180 ('p', 'show-function', None, _('show which function each change is in')),
180 ('p', 'show-function', None, _('show which function each change is in')),
181 ('', 'reverse', None, _('produce a diff that undoes the changes')),
181 ('', 'reverse', None, _('produce a diff that undoes the changes')),
182 ] + diffwsopts + [
182 ] + diffwsopts + [
183 ('U', 'unified', '',
183 ('U', 'unified', '',
184 _('number of lines of context to show'), _('NUM')),
184 _('number of lines of context to show'), _('NUM')),
185 ('', 'stat', None, _('output diffstat-style summary of changes')),
185 ('', 'stat', None, _('output diffstat-style summary of changes')),
186 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
186 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
187 ]
187 ]
188
188
189 mergetoolopts = [
189 mergetoolopts = [
190 ('t', 'tool', '', _('specify merge tool')),
190 ('t', 'tool', '', _('specify merge tool')),
191 ]
191 ]
192
192
193 similarityopts = [
193 similarityopts = [
194 ('s', 'similarity', '',
194 ('s', 'similarity', '',
195 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
195 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
196 ]
196 ]
197
197
198 subrepoopts = [
198 subrepoopts = [
199 ('S', 'subrepos', None,
199 ('S', 'subrepos', None,
200 _('recurse into subrepositories'))
200 _('recurse into subrepositories'))
201 ]
201 ]
202
202
203 debugrevlogopts = [
203 debugrevlogopts = [
204 ('c', 'changelog', False, _('open changelog')),
204 ('c', 'changelog', False, _('open changelog')),
205 ('m', 'manifest', False, _('open manifest')),
205 ('m', 'manifest', False, _('open manifest')),
206 ('', 'dir', '', _('open directory manifest')),
206 ('', 'dir', '', _('open directory manifest')),
207 ]
207 ]
208
208
209 # Commands start here, listed alphabetically
209 # Commands start here, listed alphabetically
210
210
211 @command('^add',
211 @command('^add',
212 walkopts + subrepoopts + dryrunopts,
212 walkopts + subrepoopts + dryrunopts,
213 _('[OPTION]... [FILE]...'),
213 _('[OPTION]... [FILE]...'),
214 inferrepo=True)
214 inferrepo=True)
215 def add(ui, repo, *pats, **opts):
215 def add(ui, repo, *pats, **opts):
216 """add the specified files on the next commit
216 """add the specified files on the next commit
217
217
218 Schedule files to be version controlled and added to the
218 Schedule files to be version controlled and added to the
219 repository.
219 repository.
220
220
221 The files will be added to the repository at the next commit. To
221 The files will be added to the repository at the next commit. To
222 undo an add before that, see :hg:`forget`.
222 undo an add before that, see :hg:`forget`.
223
223
224 If no names are given, add all files to the repository (except
224 If no names are given, add all files to the repository (except
225 files matching ``.hgignore``).
225 files matching ``.hgignore``).
226
226
227 .. container:: verbose
227 .. container:: verbose
228
228
229 Examples:
229 Examples:
230
230
231 - New (unknown) files are added
231 - New (unknown) files are added
232 automatically by :hg:`add`::
232 automatically by :hg:`add`::
233
233
234 $ ls
234 $ ls
235 foo.c
235 foo.c
236 $ hg status
236 $ hg status
237 ? foo.c
237 ? foo.c
238 $ hg add
238 $ hg add
239 adding foo.c
239 adding foo.c
240 $ hg status
240 $ hg status
241 A foo.c
241 A foo.c
242
242
243 - Specific files to be added can be specified::
243 - Specific files to be added can be specified::
244
244
245 $ ls
245 $ ls
246 bar.c foo.c
246 bar.c foo.c
247 $ hg status
247 $ hg status
248 ? bar.c
248 ? bar.c
249 ? foo.c
249 ? foo.c
250 $ hg add bar.c
250 $ hg add bar.c
251 $ hg status
251 $ hg status
252 A bar.c
252 A bar.c
253 ? foo.c
253 ? foo.c
254
254
255 Returns 0 if all files are successfully added.
255 Returns 0 if all files are successfully added.
256 """
256 """
257
257
258 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
258 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
259 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
259 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
260 return rejected and 1 or 0
260 return rejected and 1 or 0
261
261
262 @command('addremove',
262 @command('addremove',
263 similarityopts + subrepoopts + walkopts + dryrunopts,
263 similarityopts + subrepoopts + walkopts + dryrunopts,
264 _('[OPTION]... [FILE]...'),
264 _('[OPTION]... [FILE]...'),
265 inferrepo=True)
265 inferrepo=True)
266 def addremove(ui, repo, *pats, **opts):
266 def addremove(ui, repo, *pats, **opts):
267 """add all new files, delete all missing files
267 """add all new files, delete all missing files
268
268
269 Add all new files and remove all missing files from the
269 Add all new files and remove all missing files from the
270 repository.
270 repository.
271
271
272 Unless names are given, new files are ignored if they match any of
272 Unless names are given, new files are ignored if they match any of
273 the patterns in ``.hgignore``. As with add, these changes take
273 the patterns in ``.hgignore``. As with add, these changes take
274 effect at the next commit.
274 effect at the next commit.
275
275
276 Use the -s/--similarity option to detect renamed files. This
276 Use the -s/--similarity option to detect renamed files. This
277 option takes a percentage between 0 (disabled) and 100 (files must
277 option takes a percentage between 0 (disabled) and 100 (files must
278 be identical) as its parameter. With a parameter greater than 0,
278 be identical) as its parameter. With a parameter greater than 0,
279 this compares every removed file with every added file and records
279 this compares every removed file with every added file and records
280 those similar enough as renames. Detecting renamed files this way
280 those similar enough as renames. Detecting renamed files this way
281 can be expensive. After using this option, :hg:`status -C` can be
281 can be expensive. After using this option, :hg:`status -C` can be
282 used to check which files were identified as moved or renamed. If
282 used to check which files were identified as moved or renamed. If
283 not specified, -s/--similarity defaults to 100 and only renames of
283 not specified, -s/--similarity defaults to 100 and only renames of
284 identical files are detected.
284 identical files are detected.
285
285
286 .. container:: verbose
286 .. container:: verbose
287
287
288 Examples:
288 Examples:
289
289
290 - A number of files (bar.c and foo.c) are new,
290 - A number of files (bar.c and foo.c) are new,
291 while foobar.c has been removed (without using :hg:`remove`)
291 while foobar.c has been removed (without using :hg:`remove`)
292 from the repository::
292 from the repository::
293
293
294 $ ls
294 $ ls
295 bar.c foo.c
295 bar.c foo.c
296 $ hg status
296 $ hg status
297 ! foobar.c
297 ! foobar.c
298 ? bar.c
298 ? bar.c
299 ? foo.c
299 ? foo.c
300 $ hg addremove
300 $ hg addremove
301 adding bar.c
301 adding bar.c
302 adding foo.c
302 adding foo.c
303 removing foobar.c
303 removing foobar.c
304 $ hg status
304 $ hg status
305 A bar.c
305 A bar.c
306 A foo.c
306 A foo.c
307 R foobar.c
307 R foobar.c
308
308
309 - A file foobar.c was moved to foo.c without using :hg:`rename`.
309 - A file foobar.c was moved to foo.c without using :hg:`rename`.
310 Afterwards, it was edited slightly::
310 Afterwards, it was edited slightly::
311
311
312 $ ls
312 $ ls
313 foo.c
313 foo.c
314 $ hg status
314 $ hg status
315 ! foobar.c
315 ! foobar.c
316 ? foo.c
316 ? foo.c
317 $ hg addremove --similarity 90
317 $ hg addremove --similarity 90
318 removing foobar.c
318 removing foobar.c
319 adding foo.c
319 adding foo.c
320 recording removal of foobar.c as rename to foo.c (94% similar)
320 recording removal of foobar.c as rename to foo.c (94% similar)
321 $ hg status -C
321 $ hg status -C
322 A foo.c
322 A foo.c
323 foobar.c
323 foobar.c
324 R foobar.c
324 R foobar.c
325
325
326 Returns 0 if all files are successfully added.
326 Returns 0 if all files are successfully added.
327 """
327 """
328 opts = pycompat.byteskwargs(opts)
328 opts = pycompat.byteskwargs(opts)
329 try:
329 try:
330 sim = float(opts.get('similarity') or 100)
330 sim = float(opts.get('similarity') or 100)
331 except ValueError:
331 except ValueError:
332 raise error.Abort(_('similarity must be a number'))
332 raise error.Abort(_('similarity must be a number'))
333 if sim < 0 or sim > 100:
333 if sim < 0 or sim > 100:
334 raise error.Abort(_('similarity must be between 0 and 100'))
334 raise error.Abort(_('similarity must be between 0 and 100'))
335 matcher = scmutil.match(repo[None], pats, opts)
335 matcher = scmutil.match(repo[None], pats, opts)
336 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
336 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
337
337
338 @command('^annotate|blame',
338 @command('^annotate|blame',
339 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
339 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
340 ('', 'follow', None,
340 ('', 'follow', None,
341 _('follow copies/renames and list the filename (DEPRECATED)')),
341 _('follow copies/renames and list the filename (DEPRECATED)')),
342 ('', 'no-follow', None, _("don't follow copies and renames")),
342 ('', 'no-follow', None, _("don't follow copies and renames")),
343 ('a', 'text', None, _('treat all files as text')),
343 ('a', 'text', None, _('treat all files as text')),
344 ('u', 'user', None, _('list the author (long with -v)')),
344 ('u', 'user', None, _('list the author (long with -v)')),
345 ('f', 'file', None, _('list the filename')),
345 ('f', 'file', None, _('list the filename')),
346 ('d', 'date', None, _('list the date (short with -q)')),
346 ('d', 'date', None, _('list the date (short with -q)')),
347 ('n', 'number', None, _('list the revision number (default)')),
347 ('n', 'number', None, _('list the revision number (default)')),
348 ('c', 'changeset', None, _('list the changeset')),
348 ('c', 'changeset', None, _('list the changeset')),
349 ('l', 'line-number', None, _('show line number at the first appearance'))
349 ('l', 'line-number', None, _('show line number at the first appearance'))
350 ] + diffwsopts + walkopts + formatteropts,
350 ] + diffwsopts + walkopts + formatteropts,
351 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
351 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
352 inferrepo=True)
352 inferrepo=True)
353 def annotate(ui, repo, *pats, **opts):
353 def annotate(ui, repo, *pats, **opts):
354 """show changeset information by line for each file
354 """show changeset information by line for each file
355
355
356 List changes in files, showing the revision id responsible for
356 List changes in files, showing the revision id responsible for
357 each line.
357 each line.
358
358
359 This command is useful for discovering when a change was made and
359 This command is useful for discovering when a change was made and
360 by whom.
360 by whom.
361
361
362 If you include --file, --user, or --date, the revision number is
362 If you include --file, --user, or --date, the revision number is
363 suppressed unless you also include --number.
363 suppressed unless you also include --number.
364
364
365 Without the -a/--text option, annotate will avoid processing files
365 Without the -a/--text option, annotate will avoid processing files
366 it detects as binary. With -a, annotate will annotate the file
366 it detects as binary. With -a, annotate will annotate the file
367 anyway, although the results will probably be neither useful
367 anyway, although the results will probably be neither useful
368 nor desirable.
368 nor desirable.
369
369
370 Returns 0 on success.
370 Returns 0 on success.
371 """
371 """
372 opts = pycompat.byteskwargs(opts)
372 opts = pycompat.byteskwargs(opts)
373 if not pats:
373 if not pats:
374 raise error.Abort(_('at least one filename or pattern is required'))
374 raise error.Abort(_('at least one filename or pattern is required'))
375
375
376 if opts.get('follow'):
376 if opts.get('follow'):
377 # --follow is deprecated and now just an alias for -f/--file
377 # --follow is deprecated and now just an alias for -f/--file
378 # to mimic the behavior of Mercurial before version 1.5
378 # to mimic the behavior of Mercurial before version 1.5
379 opts['file'] = True
379 opts['file'] = True
380
380
381 ctx = scmutil.revsingle(repo, opts.get('rev'))
381 ctx = scmutil.revsingle(repo, opts.get('rev'))
382
382
383 fm = ui.formatter('annotate', opts)
383 fm = ui.formatter('annotate', opts)
384 if ui.quiet:
384 if ui.quiet:
385 datefunc = util.shortdate
385 datefunc = util.shortdate
386 else:
386 else:
387 datefunc = util.datestr
387 datefunc = util.datestr
388 if ctx.rev() is None:
388 if ctx.rev() is None:
389 def hexfn(node):
389 def hexfn(node):
390 if node is None:
390 if node is None:
391 return None
391 return None
392 else:
392 else:
393 return fm.hexfunc(node)
393 return fm.hexfunc(node)
394 if opts.get('changeset'):
394 if opts.get('changeset'):
395 # omit "+" suffix which is appended to node hex
395 # omit "+" suffix which is appended to node hex
396 def formatrev(rev):
396 def formatrev(rev):
397 if rev is None:
397 if rev is None:
398 return '%d' % ctx.p1().rev()
398 return '%d' % ctx.p1().rev()
399 else:
399 else:
400 return '%d' % rev
400 return '%d' % rev
401 else:
401 else:
402 def formatrev(rev):
402 def formatrev(rev):
403 if rev is None:
403 if rev is None:
404 return '%d+' % ctx.p1().rev()
404 return '%d+' % ctx.p1().rev()
405 else:
405 else:
406 return '%d ' % rev
406 return '%d ' % rev
407 def formathex(hex):
407 def formathex(hex):
408 if hex is None:
408 if hex is None:
409 return '%s+' % fm.hexfunc(ctx.p1().node())
409 return '%s+' % fm.hexfunc(ctx.p1().node())
410 else:
410 else:
411 return '%s ' % hex
411 return '%s ' % hex
412 else:
412 else:
413 hexfn = fm.hexfunc
413 hexfn = fm.hexfunc
414 formatrev = formathex = str
414 formatrev = formathex = str
415
415
416 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
416 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
417 ('number', ' ', lambda x: x[0].rev(), formatrev),
417 ('number', ' ', lambda x: x[0].rev(), formatrev),
418 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
418 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
419 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
419 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
420 ('file', ' ', lambda x: x[0].path(), str),
420 ('file', ' ', lambda x: x[0].path(), str),
421 ('line_number', ':', lambda x: x[1], str),
421 ('line_number', ':', lambda x: x[1], str),
422 ]
422 ]
423 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
423 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
424
424
425 if (not opts.get('user') and not opts.get('changeset')
425 if (not opts.get('user') and not opts.get('changeset')
426 and not opts.get('date') and not opts.get('file')):
426 and not opts.get('date') and not opts.get('file')):
427 opts['number'] = True
427 opts['number'] = True
428
428
429 linenumber = opts.get('line_number') is not None
429 linenumber = opts.get('line_number') is not None
430 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
430 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
431 raise error.Abort(_('at least one of -n/-c is required for -l'))
431 raise error.Abort(_('at least one of -n/-c is required for -l'))
432
432
433 ui.pager('annotate')
433 ui.pager('annotate')
434
434
435 if fm.isplain():
435 if fm.isplain():
436 def makefunc(get, fmt):
436 def makefunc(get, fmt):
437 return lambda x: fmt(get(x))
437 return lambda x: fmt(get(x))
438 else:
438 else:
439 def makefunc(get, fmt):
439 def makefunc(get, fmt):
440 return get
440 return get
441 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
441 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
442 if opts.get(op)]
442 if opts.get(op)]
443 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
443 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
444 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
444 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
445 if opts.get(op))
445 if opts.get(op))
446
446
447 def bad(x, y):
447 def bad(x, y):
448 raise error.Abort("%s: %s" % (x, y))
448 raise error.Abort("%s: %s" % (x, y))
449
449
450 m = scmutil.match(ctx, pats, opts, badfn=bad)
450 m = scmutil.match(ctx, pats, opts, badfn=bad)
451
451
452 follow = not opts.get('no_follow')
452 follow = not opts.get('no_follow')
453 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
453 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
454 whitespace=True)
454 whitespace=True)
455 for abs in ctx.walk(m):
455 for abs in ctx.walk(m):
456 fctx = ctx[abs]
456 fctx = ctx[abs]
457 if not opts.get('text') and fctx.isbinary():
457 if not opts.get('text') and fctx.isbinary():
458 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
458 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
459 continue
459 continue
460
460
461 lines = fctx.annotate(follow=follow, linenumber=linenumber,
461 lines = fctx.annotate(follow=follow, linenumber=linenumber,
462 diffopts=diffopts)
462 diffopts=diffopts)
463 if not lines:
463 if not lines:
464 continue
464 continue
465 formats = []
465 formats = []
466 pieces = []
466 pieces = []
467
467
468 for f, sep in funcmap:
468 for f, sep in funcmap:
469 l = [f(n) for n, dummy in lines]
469 l = [f(n) for n, dummy in lines]
470 if fm.isplain():
470 if fm.isplain():
471 sizes = [encoding.colwidth(x) for x in l]
471 sizes = [encoding.colwidth(x) for x in l]
472 ml = max(sizes)
472 ml = max(sizes)
473 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
473 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
474 else:
474 else:
475 formats.append(['%s' for x in l])
475 formats.append(['%s' for x in l])
476 pieces.append(l)
476 pieces.append(l)
477
477
478 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
478 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
479 fm.startitem()
479 fm.startitem()
480 fm.write(fields, "".join(f), *p)
480 fm.write(fields, "".join(f), *p)
481 fm.write('line', ": %s", l[1])
481 fm.write('line', ": %s", l[1])
482
482
483 if not lines[-1][1].endswith('\n'):
483 if not lines[-1][1].endswith('\n'):
484 fm.plain('\n')
484 fm.plain('\n')
485
485
486 fm.end()
486 fm.end()
487
487
488 @command('archive',
488 @command('archive',
489 [('', 'no-decode', None, _('do not pass files through decoders')),
489 [('', 'no-decode', None, _('do not pass files through decoders')),
490 ('p', 'prefix', '', _('directory prefix for files in archive'),
490 ('p', 'prefix', '', _('directory prefix for files in archive'),
491 _('PREFIX')),
491 _('PREFIX')),
492 ('r', 'rev', '', _('revision to distribute'), _('REV')),
492 ('r', 'rev', '', _('revision to distribute'), _('REV')),
493 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
493 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
494 ] + subrepoopts + walkopts,
494 ] + subrepoopts + walkopts,
495 _('[OPTION]... DEST'))
495 _('[OPTION]... DEST'))
496 def archive(ui, repo, dest, **opts):
496 def archive(ui, repo, dest, **opts):
497 '''create an unversioned archive of a repository revision
497 '''create an unversioned archive of a repository revision
498
498
499 By default, the revision used is the parent of the working
499 By default, the revision used is the parent of the working
500 directory; use -r/--rev to specify a different revision.
500 directory; use -r/--rev to specify a different revision.
501
501
502 The archive type is automatically detected based on file
502 The archive type is automatically detected based on file
503 extension (to override, use -t/--type).
503 extension (to override, use -t/--type).
504
504
505 .. container:: verbose
505 .. container:: verbose
506
506
507 Examples:
507 Examples:
508
508
509 - create a zip file containing the 1.0 release::
509 - create a zip file containing the 1.0 release::
510
510
511 hg archive -r 1.0 project-1.0.zip
511 hg archive -r 1.0 project-1.0.zip
512
512
513 - create a tarball excluding .hg files::
513 - create a tarball excluding .hg files::
514
514
515 hg archive project.tar.gz -X ".hg*"
515 hg archive project.tar.gz -X ".hg*"
516
516
517 Valid types are:
517 Valid types are:
518
518
519 :``files``: a directory full of files (default)
519 :``files``: a directory full of files (default)
520 :``tar``: tar archive, uncompressed
520 :``tar``: tar archive, uncompressed
521 :``tbz2``: tar archive, compressed using bzip2
521 :``tbz2``: tar archive, compressed using bzip2
522 :``tgz``: tar archive, compressed using gzip
522 :``tgz``: tar archive, compressed using gzip
523 :``uzip``: zip archive, uncompressed
523 :``uzip``: zip archive, uncompressed
524 :``zip``: zip archive, compressed using deflate
524 :``zip``: zip archive, compressed using deflate
525
525
526 The exact name of the destination archive or directory is given
526 The exact name of the destination archive or directory is given
527 using a format string; see :hg:`help export` for details.
527 using a format string; see :hg:`help export` for details.
528
528
529 Each member added to an archive file has a directory prefix
529 Each member added to an archive file has a directory prefix
530 prepended. Use -p/--prefix to specify a format string for the
530 prepended. Use -p/--prefix to specify a format string for the
531 prefix. The default is the basename of the archive, with suffixes
531 prefix. The default is the basename of the archive, with suffixes
532 removed.
532 removed.
533
533
534 Returns 0 on success.
534 Returns 0 on success.
535 '''
535 '''
536
536
537 opts = pycompat.byteskwargs(opts)
537 opts = pycompat.byteskwargs(opts)
538 ctx = scmutil.revsingle(repo, opts.get('rev'))
538 ctx = scmutil.revsingle(repo, opts.get('rev'))
539 if not ctx:
539 if not ctx:
540 raise error.Abort(_('no working directory: please specify a revision'))
540 raise error.Abort(_('no working directory: please specify a revision'))
541 node = ctx.node()
541 node = ctx.node()
542 dest = cmdutil.makefilename(repo, dest, node)
542 dest = cmdutil.makefilename(repo, dest, node)
543 if os.path.realpath(dest) == repo.root:
543 if os.path.realpath(dest) == repo.root:
544 raise error.Abort(_('repository root cannot be destination'))
544 raise error.Abort(_('repository root cannot be destination'))
545
545
546 kind = opts.get('type') or archival.guesskind(dest) or 'files'
546 kind = opts.get('type') or archival.guesskind(dest) or 'files'
547 prefix = opts.get('prefix')
547 prefix = opts.get('prefix')
548
548
549 if dest == '-':
549 if dest == '-':
550 if kind == 'files':
550 if kind == 'files':
551 raise error.Abort(_('cannot archive plain files to stdout'))
551 raise error.Abort(_('cannot archive plain files to stdout'))
552 dest = cmdutil.makefileobj(repo, dest)
552 dest = cmdutil.makefileobj(repo, dest)
553 if not prefix:
553 if not prefix:
554 prefix = os.path.basename(repo.root) + '-%h'
554 prefix = os.path.basename(repo.root) + '-%h'
555
555
556 prefix = cmdutil.makefilename(repo, prefix, node)
556 prefix = cmdutil.makefilename(repo, prefix, node)
557 matchfn = scmutil.match(ctx, [], opts)
557 matchfn = scmutil.match(ctx, [], opts)
558 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
558 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
559 matchfn, prefix, subrepos=opts.get('subrepos'))
559 matchfn, prefix, subrepos=opts.get('subrepos'))
560
560
561 @command('backout',
561 @command('backout',
562 [('', 'merge', None, _('merge with old dirstate parent after backout')),
562 [('', 'merge', None, _('merge with old dirstate parent after backout')),
563 ('', 'commit', None,
563 ('', 'commit', None,
564 _('commit if no conflicts were encountered (DEPRECATED)')),
564 _('commit if no conflicts were encountered (DEPRECATED)')),
565 ('', 'no-commit', None, _('do not commit')),
565 ('', 'no-commit', None, _('do not commit')),
566 ('', 'parent', '',
566 ('', 'parent', '',
567 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
567 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
568 ('r', 'rev', '', _('revision to backout'), _('REV')),
568 ('r', 'rev', '', _('revision to backout'), _('REV')),
569 ('e', 'edit', False, _('invoke editor on commit messages')),
569 ('e', 'edit', False, _('invoke editor on commit messages')),
570 ] + mergetoolopts + walkopts + commitopts + commitopts2,
570 ] + mergetoolopts + walkopts + commitopts + commitopts2,
571 _('[OPTION]... [-r] REV'))
571 _('[OPTION]... [-r] REV'))
572 def backout(ui, repo, node=None, rev=None, **opts):
572 def backout(ui, repo, node=None, rev=None, **opts):
573 '''reverse effect of earlier changeset
573 '''reverse effect of earlier changeset
574
574
575 Prepare a new changeset with the effect of REV undone in the
575 Prepare a new changeset with the effect of REV undone in the
576 current working directory. If no conflicts were encountered,
576 current working directory. If no conflicts were encountered,
577 it will be committed immediately.
577 it will be committed immediately.
578
578
579 If REV is the parent of the working directory, then this new changeset
579 If REV is the parent of the working directory, then this new changeset
580 is committed automatically (unless --no-commit is specified).
580 is committed automatically (unless --no-commit is specified).
581
581
582 .. note::
582 .. note::
583
583
584 :hg:`backout` cannot be used to fix either an unwanted or
584 :hg:`backout` cannot be used to fix either an unwanted or
585 incorrect merge.
585 incorrect merge.
586
586
587 .. container:: verbose
587 .. container:: verbose
588
588
589 Examples:
589 Examples:
590
590
591 - Reverse the effect of the parent of the working directory.
591 - Reverse the effect of the parent of the working directory.
592 This backout will be committed immediately::
592 This backout will be committed immediately::
593
593
594 hg backout -r .
594 hg backout -r .
595
595
596 - Reverse the effect of previous bad revision 23::
596 - Reverse the effect of previous bad revision 23::
597
597
598 hg backout -r 23
598 hg backout -r 23
599
599
600 - Reverse the effect of previous bad revision 23 and
600 - Reverse the effect of previous bad revision 23 and
601 leave changes uncommitted::
601 leave changes uncommitted::
602
602
603 hg backout -r 23 --no-commit
603 hg backout -r 23 --no-commit
604 hg commit -m "Backout revision 23"
604 hg commit -m "Backout revision 23"
605
605
606 By default, the pending changeset will have one parent,
606 By default, the pending changeset will have one parent,
607 maintaining a linear history. With --merge, the pending
607 maintaining a linear history. With --merge, the pending
608 changeset will instead have two parents: the old parent of the
608 changeset will instead have two parents: the old parent of the
609 working directory and a new child of REV that simply undoes REV.
609 working directory and a new child of REV that simply undoes REV.
610
610
611 Before version 1.7, the behavior without --merge was equivalent
611 Before version 1.7, the behavior without --merge was equivalent
612 to specifying --merge followed by :hg:`update --clean .` to
612 to specifying --merge followed by :hg:`update --clean .` to
613 cancel the merge and leave the child of REV as a head to be
613 cancel the merge and leave the child of REV as a head to be
614 merged separately.
614 merged separately.
615
615
616 See :hg:`help dates` for a list of formats valid for -d/--date.
616 See :hg:`help dates` for a list of formats valid for -d/--date.
617
617
618 See :hg:`help revert` for a way to restore files to the state
618 See :hg:`help revert` for a way to restore files to the state
619 of another revision.
619 of another revision.
620
620
621 Returns 0 on success, 1 if nothing to backout or there are unresolved
621 Returns 0 on success, 1 if nothing to backout or there are unresolved
622 files.
622 files.
623 '''
623 '''
624 wlock = lock = None
624 wlock = lock = None
625 try:
625 try:
626 wlock = repo.wlock()
626 wlock = repo.wlock()
627 lock = repo.lock()
627 lock = repo.lock()
628 return _dobackout(ui, repo, node, rev, **opts)
628 return _dobackout(ui, repo, node, rev, **opts)
629 finally:
629 finally:
630 release(lock, wlock)
630 release(lock, wlock)
631
631
632 def _dobackout(ui, repo, node=None, rev=None, **opts):
632 def _dobackout(ui, repo, node=None, rev=None, **opts):
633 opts = pycompat.byteskwargs(opts)
633 opts = pycompat.byteskwargs(opts)
634 if opts.get('commit') and opts.get('no_commit'):
634 if opts.get('commit') and opts.get('no_commit'):
635 raise error.Abort(_("cannot use --commit with --no-commit"))
635 raise error.Abort(_("cannot use --commit with --no-commit"))
636 if opts.get('merge') and opts.get('no_commit'):
636 if opts.get('merge') and opts.get('no_commit'):
637 raise error.Abort(_("cannot use --merge with --no-commit"))
637 raise error.Abort(_("cannot use --merge with --no-commit"))
638
638
639 if rev and node:
639 if rev and node:
640 raise error.Abort(_("please specify just one revision"))
640 raise error.Abort(_("please specify just one revision"))
641
641
642 if not rev:
642 if not rev:
643 rev = node
643 rev = node
644
644
645 if not rev:
645 if not rev:
646 raise error.Abort(_("please specify a revision to backout"))
646 raise error.Abort(_("please specify a revision to backout"))
647
647
648 date = opts.get('date')
648 date = opts.get('date')
649 if date:
649 if date:
650 opts['date'] = util.parsedate(date)
650 opts['date'] = util.parsedate(date)
651
651
652 cmdutil.checkunfinished(repo)
652 cmdutil.checkunfinished(repo)
653 cmdutil.bailifchanged(repo)
653 cmdutil.bailifchanged(repo)
654 node = scmutil.revsingle(repo, rev).node()
654 node = scmutil.revsingle(repo, rev).node()
655
655
656 op1, op2 = repo.dirstate.parents()
656 op1, op2 = repo.dirstate.parents()
657 if not repo.changelog.isancestor(node, op1):
657 if not repo.changelog.isancestor(node, op1):
658 raise error.Abort(_('cannot backout change that is not an ancestor'))
658 raise error.Abort(_('cannot backout change that is not an ancestor'))
659
659
660 p1, p2 = repo.changelog.parents(node)
660 p1, p2 = repo.changelog.parents(node)
661 if p1 == nullid:
661 if p1 == nullid:
662 raise error.Abort(_('cannot backout a change with no parents'))
662 raise error.Abort(_('cannot backout a change with no parents'))
663 if p2 != nullid:
663 if p2 != nullid:
664 if not opts.get('parent'):
664 if not opts.get('parent'):
665 raise error.Abort(_('cannot backout a merge changeset'))
665 raise error.Abort(_('cannot backout a merge changeset'))
666 p = repo.lookup(opts['parent'])
666 p = repo.lookup(opts['parent'])
667 if p not in (p1, p2):
667 if p not in (p1, p2):
668 raise error.Abort(_('%s is not a parent of %s') %
668 raise error.Abort(_('%s is not a parent of %s') %
669 (short(p), short(node)))
669 (short(p), short(node)))
670 parent = p
670 parent = p
671 else:
671 else:
672 if opts.get('parent'):
672 if opts.get('parent'):
673 raise error.Abort(_('cannot use --parent on non-merge changeset'))
673 raise error.Abort(_('cannot use --parent on non-merge changeset'))
674 parent = p1
674 parent = p1
675
675
676 # the backout should appear on the same branch
676 # the backout should appear on the same branch
677 branch = repo.dirstate.branch()
677 branch = repo.dirstate.branch()
678 bheads = repo.branchheads(branch)
678 bheads = repo.branchheads(branch)
679 rctx = scmutil.revsingle(repo, hex(parent))
679 rctx = scmutil.revsingle(repo, hex(parent))
680 if not opts.get('merge') and op1 != node:
680 if not opts.get('merge') and op1 != node:
681 dsguard = dirstateguard.dirstateguard(repo, 'backout')
681 dsguard = dirstateguard.dirstateguard(repo, 'backout')
682 try:
682 try:
683 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
683 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
684 'backout')
684 'backout')
685 stats = mergemod.update(repo, parent, True, True, node, False)
685 stats = mergemod.update(repo, parent, True, True, node, False)
686 repo.setparents(op1, op2)
686 repo.setparents(op1, op2)
687 dsguard.close()
687 dsguard.close()
688 hg._showstats(repo, stats)
688 hg._showstats(repo, stats)
689 if stats[3]:
689 if stats[3]:
690 repo.ui.status(_("use 'hg resolve' to retry unresolved "
690 repo.ui.status(_("use 'hg resolve' to retry unresolved "
691 "file merges\n"))
691 "file merges\n"))
692 return 1
692 return 1
693 finally:
693 finally:
694 ui.setconfig('ui', 'forcemerge', '', '')
694 ui.setconfig('ui', 'forcemerge', '', '')
695 lockmod.release(dsguard)
695 lockmod.release(dsguard)
696 else:
696 else:
697 hg.clean(repo, node, show_stats=False)
697 hg.clean(repo, node, show_stats=False)
698 repo.dirstate.setbranch(branch)
698 repo.dirstate.setbranch(branch)
699 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
699 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
700
700
701 if opts.get('no_commit'):
701 if opts.get('no_commit'):
702 msg = _("changeset %s backed out, "
702 msg = _("changeset %s backed out, "
703 "don't forget to commit.\n")
703 "don't forget to commit.\n")
704 ui.status(msg % short(node))
704 ui.status(msg % short(node))
705 return 0
705 return 0
706
706
707 def commitfunc(ui, repo, message, match, opts):
707 def commitfunc(ui, repo, message, match, opts):
708 editform = 'backout'
708 editform = 'backout'
709 e = cmdutil.getcommiteditor(editform=editform, **opts)
709 e = cmdutil.getcommiteditor(editform=editform, **opts)
710 if not message:
710 if not message:
711 # we don't translate commit messages
711 # we don't translate commit messages
712 message = "Backed out changeset %s" % short(node)
712 message = "Backed out changeset %s" % short(node)
713 e = cmdutil.getcommiteditor(edit=True, editform=editform)
713 e = cmdutil.getcommiteditor(edit=True, editform=editform)
714 return repo.commit(message, opts.get('user'), opts.get('date'),
714 return repo.commit(message, opts.get('user'), opts.get('date'),
715 match, editor=e)
715 match, editor=e)
716 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
716 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
717 if not newnode:
717 if not newnode:
718 ui.status(_("nothing changed\n"))
718 ui.status(_("nothing changed\n"))
719 return 1
719 return 1
720 cmdutil.commitstatus(repo, newnode, branch, bheads)
720 cmdutil.commitstatus(repo, newnode, branch, bheads)
721
721
722 def nice(node):
722 def nice(node):
723 return '%d:%s' % (repo.changelog.rev(node), short(node))
723 return '%d:%s' % (repo.changelog.rev(node), short(node))
724 ui.status(_('changeset %s backs out changeset %s\n') %
724 ui.status(_('changeset %s backs out changeset %s\n') %
725 (nice(repo.changelog.tip()), nice(node)))
725 (nice(repo.changelog.tip()), nice(node)))
726 if opts.get('merge') and op1 != node:
726 if opts.get('merge') and op1 != node:
727 hg.clean(repo, op1, show_stats=False)
727 hg.clean(repo, op1, show_stats=False)
728 ui.status(_('merging with changeset %s\n')
728 ui.status(_('merging with changeset %s\n')
729 % nice(repo.changelog.tip()))
729 % nice(repo.changelog.tip()))
730 try:
730 try:
731 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
731 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
732 'backout')
732 'backout')
733 return hg.merge(repo, hex(repo.changelog.tip()))
733 return hg.merge(repo, hex(repo.changelog.tip()))
734 finally:
734 finally:
735 ui.setconfig('ui', 'forcemerge', '', '')
735 ui.setconfig('ui', 'forcemerge', '', '')
736 return 0
736 return 0
737
737
738 @command('bisect',
738 @command('bisect',
739 [('r', 'reset', False, _('reset bisect state')),
739 [('r', 'reset', False, _('reset bisect state')),
740 ('g', 'good', False, _('mark changeset good')),
740 ('g', 'good', False, _('mark changeset good')),
741 ('b', 'bad', False, _('mark changeset bad')),
741 ('b', 'bad', False, _('mark changeset bad')),
742 ('s', 'skip', False, _('skip testing changeset')),
742 ('s', 'skip', False, _('skip testing changeset')),
743 ('e', 'extend', False, _('extend the bisect range')),
743 ('e', 'extend', False, _('extend the bisect range')),
744 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
744 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
745 ('U', 'noupdate', False, _('do not update to target'))],
745 ('U', 'noupdate', False, _('do not update to target'))],
746 _("[-gbsr] [-U] [-c CMD] [REV]"))
746 _("[-gbsr] [-U] [-c CMD] [REV]"))
747 def bisect(ui, repo, rev=None, extra=None, command=None,
747 def bisect(ui, repo, rev=None, extra=None, command=None,
748 reset=None, good=None, bad=None, skip=None, extend=None,
748 reset=None, good=None, bad=None, skip=None, extend=None,
749 noupdate=None):
749 noupdate=None):
750 """subdivision search of changesets
750 """subdivision search of changesets
751
751
752 This command helps to find changesets which introduce problems. To
752 This command helps to find changesets which introduce problems. To
753 use, mark the earliest changeset you know exhibits the problem as
753 use, mark the earliest changeset you know exhibits the problem as
754 bad, then mark the latest changeset which is free from the problem
754 bad, then mark the latest changeset which is free from the problem
755 as good. Bisect will update your working directory to a revision
755 as good. Bisect will update your working directory to a revision
756 for testing (unless the -U/--noupdate option is specified). Once
756 for testing (unless the -U/--noupdate option is specified). Once
757 you have performed tests, mark the working directory as good or
757 you have performed tests, mark the working directory as good or
758 bad, and bisect will either update to another candidate changeset
758 bad, and bisect will either update to another candidate changeset
759 or announce that it has found the bad revision.
759 or announce that it has found the bad revision.
760
760
761 As a shortcut, you can also use the revision argument to mark a
761 As a shortcut, you can also use the revision argument to mark a
762 revision as good or bad without checking it out first.
762 revision as good or bad without checking it out first.
763
763
764 If you supply a command, it will be used for automatic bisection.
764 If you supply a command, it will be used for automatic bisection.
765 The environment variable HG_NODE will contain the ID of the
765 The environment variable HG_NODE will contain the ID of the
766 changeset being tested. The exit status of the command will be
766 changeset being tested. The exit status of the command will be
767 used to mark revisions as good or bad: status 0 means good, 125
767 used to mark revisions as good or bad: status 0 means good, 125
768 means to skip the revision, 127 (command not found) will abort the
768 means to skip the revision, 127 (command not found) will abort the
769 bisection, and any other non-zero exit status means the revision
769 bisection, and any other non-zero exit status means the revision
770 is bad.
770 is bad.
771
771
772 .. container:: verbose
772 .. container:: verbose
773
773
774 Some examples:
774 Some examples:
775
775
776 - start a bisection with known bad revision 34, and good revision 12::
776 - start a bisection with known bad revision 34, and good revision 12::
777
777
778 hg bisect --bad 34
778 hg bisect --bad 34
779 hg bisect --good 12
779 hg bisect --good 12
780
780
781 - advance the current bisection by marking current revision as good or
781 - advance the current bisection by marking current revision as good or
782 bad::
782 bad::
783
783
784 hg bisect --good
784 hg bisect --good
785 hg bisect --bad
785 hg bisect --bad
786
786
787 - mark the current revision, or a known revision, to be skipped (e.g. if
787 - mark the current revision, or a known revision, to be skipped (e.g. if
788 that revision is not usable because of another issue)::
788 that revision is not usable because of another issue)::
789
789
790 hg bisect --skip
790 hg bisect --skip
791 hg bisect --skip 23
791 hg bisect --skip 23
792
792
793 - skip all revisions that do not touch directories ``foo`` or ``bar``::
793 - skip all revisions that do not touch directories ``foo`` or ``bar``::
794
794
795 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
795 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
796
796
797 - forget the current bisection::
797 - forget the current bisection::
798
798
799 hg bisect --reset
799 hg bisect --reset
800
800
801 - use 'make && make tests' to automatically find the first broken
801 - use 'make && make tests' to automatically find the first broken
802 revision::
802 revision::
803
803
804 hg bisect --reset
804 hg bisect --reset
805 hg bisect --bad 34
805 hg bisect --bad 34
806 hg bisect --good 12
806 hg bisect --good 12
807 hg bisect --command "make && make tests"
807 hg bisect --command "make && make tests"
808
808
809 - see all changesets whose states are already known in the current
809 - see all changesets whose states are already known in the current
810 bisection::
810 bisection::
811
811
812 hg log -r "bisect(pruned)"
812 hg log -r "bisect(pruned)"
813
813
814 - see the changeset currently being bisected (especially useful
814 - see the changeset currently being bisected (especially useful
815 if running with -U/--noupdate)::
815 if running with -U/--noupdate)::
816
816
817 hg log -r "bisect(current)"
817 hg log -r "bisect(current)"
818
818
819 - see all changesets that took part in the current bisection::
819 - see all changesets that took part in the current bisection::
820
820
821 hg log -r "bisect(range)"
821 hg log -r "bisect(range)"
822
822
823 - you can even get a nice graph::
823 - you can even get a nice graph::
824
824
825 hg log --graph -r "bisect(range)"
825 hg log --graph -r "bisect(range)"
826
826
827 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
827 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
828
828
829 Returns 0 on success.
829 Returns 0 on success.
830 """
830 """
831 # backward compatibility
831 # backward compatibility
832 if rev in "good bad reset init".split():
832 if rev in "good bad reset init".split():
833 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
833 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
834 cmd, rev, extra = rev, extra, None
834 cmd, rev, extra = rev, extra, None
835 if cmd == "good":
835 if cmd == "good":
836 good = True
836 good = True
837 elif cmd == "bad":
837 elif cmd == "bad":
838 bad = True
838 bad = True
839 else:
839 else:
840 reset = True
840 reset = True
841 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
841 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
842 raise error.Abort(_('incompatible arguments'))
842 raise error.Abort(_('incompatible arguments'))
843
843
844 if reset:
844 if reset:
845 hbisect.resetstate(repo)
845 hbisect.resetstate(repo)
846 return
846 return
847
847
848 state = hbisect.load_state(repo)
848 state = hbisect.load_state(repo)
849
849
850 # update state
850 # update state
851 if good or bad or skip:
851 if good or bad or skip:
852 if rev:
852 if rev:
853 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
853 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
854 else:
854 else:
855 nodes = [repo.lookup('.')]
855 nodes = [repo.lookup('.')]
856 if good:
856 if good:
857 state['good'] += nodes
857 state['good'] += nodes
858 elif bad:
858 elif bad:
859 state['bad'] += nodes
859 state['bad'] += nodes
860 elif skip:
860 elif skip:
861 state['skip'] += nodes
861 state['skip'] += nodes
862 hbisect.save_state(repo, state)
862 hbisect.save_state(repo, state)
863 if not (state['good'] and state['bad']):
863 if not (state['good'] and state['bad']):
864 return
864 return
865
865
866 def mayupdate(repo, node, show_stats=True):
866 def mayupdate(repo, node, show_stats=True):
867 """common used update sequence"""
867 """common used update sequence"""
868 if noupdate:
868 if noupdate:
869 return
869 return
870 cmdutil.checkunfinished(repo)
870 cmdutil.checkunfinished(repo)
871 cmdutil.bailifchanged(repo)
871 cmdutil.bailifchanged(repo)
872 return hg.clean(repo, node, show_stats=show_stats)
872 return hg.clean(repo, node, show_stats=show_stats)
873
873
874 displayer = cmdutil.show_changeset(ui, repo, {})
874 displayer = cmdutil.show_changeset(ui, repo, {})
875
875
876 if command:
876 if command:
877 changesets = 1
877 changesets = 1
878 if noupdate:
878 if noupdate:
879 try:
879 try:
880 node = state['current'][0]
880 node = state['current'][0]
881 except LookupError:
881 except LookupError:
882 raise error.Abort(_('current bisect revision is unknown - '
882 raise error.Abort(_('current bisect revision is unknown - '
883 'start a new bisect to fix'))
883 'start a new bisect to fix'))
884 else:
884 else:
885 node, p2 = repo.dirstate.parents()
885 node, p2 = repo.dirstate.parents()
886 if p2 != nullid:
886 if p2 != nullid:
887 raise error.Abort(_('current bisect revision is a merge'))
887 raise error.Abort(_('current bisect revision is a merge'))
888 if rev:
888 if rev:
889 node = repo[scmutil.revsingle(repo, rev, node)].node()
889 node = repo[scmutil.revsingle(repo, rev, node)].node()
890 try:
890 try:
891 while changesets:
891 while changesets:
892 # update state
892 # update state
893 state['current'] = [node]
893 state['current'] = [node]
894 hbisect.save_state(repo, state)
894 hbisect.save_state(repo, state)
895 status = ui.system(command, environ={'HG_NODE': hex(node)},
895 status = ui.system(command, environ={'HG_NODE': hex(node)},
896 blockedtag='bisect_check')
896 blockedtag='bisect_check')
897 if status == 125:
897 if status == 125:
898 transition = "skip"
898 transition = "skip"
899 elif status == 0:
899 elif status == 0:
900 transition = "good"
900 transition = "good"
901 # status < 0 means process was killed
901 # status < 0 means process was killed
902 elif status == 127:
902 elif status == 127:
903 raise error.Abort(_("failed to execute %s") % command)
903 raise error.Abort(_("failed to execute %s") % command)
904 elif status < 0:
904 elif status < 0:
905 raise error.Abort(_("%s killed") % command)
905 raise error.Abort(_("%s killed") % command)
906 else:
906 else:
907 transition = "bad"
907 transition = "bad"
908 state[transition].append(node)
908 state[transition].append(node)
909 ctx = repo[node]
909 ctx = repo[node]
910 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
910 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
911 hbisect.checkstate(state)
911 hbisect.checkstate(state)
912 # bisect
912 # bisect
913 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
913 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
914 # update to next check
914 # update to next check
915 node = nodes[0]
915 node = nodes[0]
916 mayupdate(repo, node, show_stats=False)
916 mayupdate(repo, node, show_stats=False)
917 finally:
917 finally:
918 state['current'] = [node]
918 state['current'] = [node]
919 hbisect.save_state(repo, state)
919 hbisect.save_state(repo, state)
920 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
920 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
921 return
921 return
922
922
923 hbisect.checkstate(state)
923 hbisect.checkstate(state)
924
924
925 # actually bisect
925 # actually bisect
926 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
926 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
927 if extend:
927 if extend:
928 if not changesets:
928 if not changesets:
929 extendnode = hbisect.extendrange(repo, state, nodes, good)
929 extendnode = hbisect.extendrange(repo, state, nodes, good)
930 if extendnode is not None:
930 if extendnode is not None:
931 ui.write(_("Extending search to changeset %d:%s\n")
931 ui.write(_("Extending search to changeset %d:%s\n")
932 % (extendnode.rev(), extendnode))
932 % (extendnode.rev(), extendnode))
933 state['current'] = [extendnode.node()]
933 state['current'] = [extendnode.node()]
934 hbisect.save_state(repo, state)
934 hbisect.save_state(repo, state)
935 return mayupdate(repo, extendnode.node())
935 return mayupdate(repo, extendnode.node())
936 raise error.Abort(_("nothing to extend"))
936 raise error.Abort(_("nothing to extend"))
937
937
938 if changesets == 0:
938 if changesets == 0:
939 hbisect.printresult(ui, repo, state, displayer, nodes, good)
939 hbisect.printresult(ui, repo, state, displayer, nodes, good)
940 else:
940 else:
941 assert len(nodes) == 1 # only a single node can be tested next
941 assert len(nodes) == 1 # only a single node can be tested next
942 node = nodes[0]
942 node = nodes[0]
943 # compute the approximate number of remaining tests
943 # compute the approximate number of remaining tests
944 tests, size = 0, 2
944 tests, size = 0, 2
945 while size <= changesets:
945 while size <= changesets:
946 tests, size = tests + 1, size * 2
946 tests, size = tests + 1, size * 2
947 rev = repo.changelog.rev(node)
947 rev = repo.changelog.rev(node)
948 ui.write(_("Testing changeset %d:%s "
948 ui.write(_("Testing changeset %d:%s "
949 "(%d changesets remaining, ~%d tests)\n")
949 "(%d changesets remaining, ~%d tests)\n")
950 % (rev, short(node), changesets, tests))
950 % (rev, short(node), changesets, tests))
951 state['current'] = [node]
951 state['current'] = [node]
952 hbisect.save_state(repo, state)
952 hbisect.save_state(repo, state)
953 return mayupdate(repo, node)
953 return mayupdate(repo, node)
954
954
955 @command('bookmarks|bookmark',
955 @command('bookmarks|bookmark',
956 [('f', 'force', False, _('force')),
956 [('f', 'force', False, _('force')),
957 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
957 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
958 ('d', 'delete', False, _('delete a given bookmark')),
958 ('d', 'delete', False, _('delete a given bookmark')),
959 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
959 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
960 ('i', 'inactive', False, _('mark a bookmark inactive')),
960 ('i', 'inactive', False, _('mark a bookmark inactive')),
961 ] + formatteropts,
961 ] + formatteropts,
962 _('hg bookmarks [OPTIONS]... [NAME]...'))
962 _('hg bookmarks [OPTIONS]... [NAME]...'))
963 def bookmark(ui, repo, *names, **opts):
963 def bookmark(ui, repo, *names, **opts):
964 '''create a new bookmark or list existing bookmarks
964 '''create a new bookmark or list existing bookmarks
965
965
966 Bookmarks are labels on changesets to help track lines of development.
966 Bookmarks are labels on changesets to help track lines of development.
967 Bookmarks are unversioned and can be moved, renamed and deleted.
967 Bookmarks are unversioned and can be moved, renamed and deleted.
968 Deleting or moving a bookmark has no effect on the associated changesets.
968 Deleting or moving a bookmark has no effect on the associated changesets.
969
969
970 Creating or updating to a bookmark causes it to be marked as 'active'.
970 Creating or updating to a bookmark causes it to be marked as 'active'.
971 The active bookmark is indicated with a '*'.
971 The active bookmark is indicated with a '*'.
972 When a commit is made, the active bookmark will advance to the new commit.
972 When a commit is made, the active bookmark will advance to the new commit.
973 A plain :hg:`update` will also advance an active bookmark, if possible.
973 A plain :hg:`update` will also advance an active bookmark, if possible.
974 Updating away from a bookmark will cause it to be deactivated.
974 Updating away from a bookmark will cause it to be deactivated.
975
975
976 Bookmarks can be pushed and pulled between repositories (see
976 Bookmarks can be pushed and pulled between repositories (see
977 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
977 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
978 diverged, a new 'divergent bookmark' of the form 'name@path' will
978 diverged, a new 'divergent bookmark' of the form 'name@path' will
979 be created. Using :hg:`merge` will resolve the divergence.
979 be created. Using :hg:`merge` will resolve the divergence.
980
980
981 A bookmark named '@' has the special property that :hg:`clone` will
981 A bookmark named '@' has the special property that :hg:`clone` will
982 check it out by default if it exists.
982 check it out by default if it exists.
983
983
984 .. container:: verbose
984 .. container:: verbose
985
985
986 Examples:
986 Examples:
987
987
988 - create an active bookmark for a new line of development::
988 - create an active bookmark for a new line of development::
989
989
990 hg book new-feature
990 hg book new-feature
991
991
992 - create an inactive bookmark as a place marker::
992 - create an inactive bookmark as a place marker::
993
993
994 hg book -i reviewed
994 hg book -i reviewed
995
995
996 - create an inactive bookmark on another changeset::
996 - create an inactive bookmark on another changeset::
997
997
998 hg book -r .^ tested
998 hg book -r .^ tested
999
999
1000 - rename bookmark turkey to dinner::
1000 - rename bookmark turkey to dinner::
1001
1001
1002 hg book -m turkey dinner
1002 hg book -m turkey dinner
1003
1003
1004 - move the '@' bookmark from another branch::
1004 - move the '@' bookmark from another branch::
1005
1005
1006 hg book -f @
1006 hg book -f @
1007 '''
1007 '''
1008 opts = pycompat.byteskwargs(opts)
1008 opts = pycompat.byteskwargs(opts)
1009 force = opts.get('force')
1009 force = opts.get('force')
1010 rev = opts.get('rev')
1010 rev = opts.get('rev')
1011 delete = opts.get('delete')
1011 delete = opts.get('delete')
1012 rename = opts.get('rename')
1012 rename = opts.get('rename')
1013 inactive = opts.get('inactive')
1013 inactive = opts.get('inactive')
1014
1014
1015 def checkformat(mark):
1015 def checkformat(mark):
1016 mark = mark.strip()
1016 mark = mark.strip()
1017 if not mark:
1017 if not mark:
1018 raise error.Abort(_("bookmark names cannot consist entirely of "
1018 raise error.Abort(_("bookmark names cannot consist entirely of "
1019 "whitespace"))
1019 "whitespace"))
1020 scmutil.checknewlabel(repo, mark, 'bookmark')
1020 scmutil.checknewlabel(repo, mark, 'bookmark')
1021 return mark
1021 return mark
1022
1022
1023 def checkconflict(repo, mark, cur, force=False, target=None):
1023 def checkconflict(repo, mark, cur, force=False, target=None):
1024 if mark in marks and not force:
1024 if mark in marks and not force:
1025 if target:
1025 if target:
1026 if marks[mark] == target and target == cur:
1026 if marks[mark] == target and target == cur:
1027 # re-activating a bookmark
1027 # re-activating a bookmark
1028 return
1028 return
1029 anc = repo.changelog.ancestors([repo[target].rev()])
1029 anc = repo.changelog.ancestors([repo[target].rev()])
1030 bmctx = repo[marks[mark]]
1030 bmctx = repo[marks[mark]]
1031 divs = [repo[b].node() for b in marks
1031 divs = [repo[b].node() for b in marks
1032 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1032 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1033
1033
1034 # allow resolving a single divergent bookmark even if moving
1034 # allow resolving a single divergent bookmark even if moving
1035 # the bookmark across branches when a revision is specified
1035 # the bookmark across branches when a revision is specified
1036 # that contains a divergent bookmark
1036 # that contains a divergent bookmark
1037 if bmctx.rev() not in anc and target in divs:
1037 if bmctx.rev() not in anc and target in divs:
1038 bookmarks.deletedivergent(repo, [target], mark)
1038 bookmarks.deletedivergent(repo, [target], mark)
1039 return
1039 return
1040
1040
1041 deletefrom = [b for b in divs
1041 deletefrom = [b for b in divs
1042 if repo[b].rev() in anc or b == target]
1042 if repo[b].rev() in anc or b == target]
1043 bookmarks.deletedivergent(repo, deletefrom, mark)
1043 bookmarks.deletedivergent(repo, deletefrom, mark)
1044 if bookmarks.validdest(repo, bmctx, repo[target]):
1044 if bookmarks.validdest(repo, bmctx, repo[target]):
1045 ui.status(_("moving bookmark '%s' forward from %s\n") %
1045 ui.status(_("moving bookmark '%s' forward from %s\n") %
1046 (mark, short(bmctx.node())))
1046 (mark, short(bmctx.node())))
1047 return
1047 return
1048 raise error.Abort(_("bookmark '%s' already exists "
1048 raise error.Abort(_("bookmark '%s' already exists "
1049 "(use -f to force)") % mark)
1049 "(use -f to force)") % mark)
1050 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1050 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1051 and not force):
1051 and not force):
1052 raise error.Abort(
1052 raise error.Abort(
1053 _("a bookmark cannot have the name of an existing branch"))
1053 _("a bookmark cannot have the name of an existing branch"))
1054
1054
1055 if delete and rename:
1055 if delete and rename:
1056 raise error.Abort(_("--delete and --rename are incompatible"))
1056 raise error.Abort(_("--delete and --rename are incompatible"))
1057 if delete and rev:
1057 if delete and rev:
1058 raise error.Abort(_("--rev is incompatible with --delete"))
1058 raise error.Abort(_("--rev is incompatible with --delete"))
1059 if rename and rev:
1059 if rename and rev:
1060 raise error.Abort(_("--rev is incompatible with --rename"))
1060 raise error.Abort(_("--rev is incompatible with --rename"))
1061 if not names and (delete or rev):
1061 if not names and (delete or rev):
1062 raise error.Abort(_("bookmark name required"))
1062 raise error.Abort(_("bookmark name required"))
1063
1063
1064 if delete or rename or names or inactive:
1064 if delete or rename or names or inactive:
1065 wlock = lock = tr = None
1065 wlock = lock = tr = None
1066 try:
1066 try:
1067 wlock = repo.wlock()
1067 wlock = repo.wlock()
1068 lock = repo.lock()
1068 lock = repo.lock()
1069 cur = repo.changectx('.').node()
1069 cur = repo.changectx('.').node()
1070 marks = repo._bookmarks
1070 marks = repo._bookmarks
1071 if delete:
1071 if delete:
1072 tr = repo.transaction('bookmark')
1072 tr = repo.transaction('bookmark')
1073 for mark in names:
1073 for mark in names:
1074 if mark not in marks:
1074 if mark not in marks:
1075 raise error.Abort(_("bookmark '%s' does not exist") %
1075 raise error.Abort(_("bookmark '%s' does not exist") %
1076 mark)
1076 mark)
1077 if mark == repo._activebookmark:
1077 if mark == repo._activebookmark:
1078 bookmarks.deactivate(repo)
1078 bookmarks.deactivate(repo)
1079 del marks[mark]
1079 del marks[mark]
1080
1080
1081 elif rename:
1081 elif rename:
1082 tr = repo.transaction('bookmark')
1082 tr = repo.transaction('bookmark')
1083 if not names:
1083 if not names:
1084 raise error.Abort(_("new bookmark name required"))
1084 raise error.Abort(_("new bookmark name required"))
1085 elif len(names) > 1:
1085 elif len(names) > 1:
1086 raise error.Abort(_("only one new bookmark name allowed"))
1086 raise error.Abort(_("only one new bookmark name allowed"))
1087 mark = checkformat(names[0])
1087 mark = checkformat(names[0])
1088 if rename not in marks:
1088 if rename not in marks:
1089 raise error.Abort(_("bookmark '%s' does not exist")
1089 raise error.Abort(_("bookmark '%s' does not exist")
1090 % rename)
1090 % rename)
1091 checkconflict(repo, mark, cur, force)
1091 checkconflict(repo, mark, cur, force)
1092 marks[mark] = marks[rename]
1092 marks[mark] = marks[rename]
1093 if repo._activebookmark == rename and not inactive:
1093 if repo._activebookmark == rename and not inactive:
1094 bookmarks.activate(repo, mark)
1094 bookmarks.activate(repo, mark)
1095 del marks[rename]
1095 del marks[rename]
1096 elif names:
1096 elif names:
1097 tr = repo.transaction('bookmark')
1097 tr = repo.transaction('bookmark')
1098 newact = None
1098 newact = None
1099 for mark in names:
1099 for mark in names:
1100 mark = checkformat(mark)
1100 mark = checkformat(mark)
1101 if newact is None:
1101 if newact is None:
1102 newact = mark
1102 newact = mark
1103 if inactive and mark == repo._activebookmark:
1103 if inactive and mark == repo._activebookmark:
1104 bookmarks.deactivate(repo)
1104 bookmarks.deactivate(repo)
1105 return
1105 return
1106 tgt = cur
1106 tgt = cur
1107 if rev:
1107 if rev:
1108 tgt = scmutil.revsingle(repo, rev).node()
1108 tgt = scmutil.revsingle(repo, rev).node()
1109 checkconflict(repo, mark, cur, force, tgt)
1109 checkconflict(repo, mark, cur, force, tgt)
1110 marks[mark] = tgt
1110 marks[mark] = tgt
1111 if not inactive and cur == marks[newact] and not rev:
1111 if not inactive and cur == marks[newact] and not rev:
1112 bookmarks.activate(repo, newact)
1112 bookmarks.activate(repo, newact)
1113 elif cur != tgt and newact == repo._activebookmark:
1113 elif cur != tgt and newact == repo._activebookmark:
1114 bookmarks.deactivate(repo)
1114 bookmarks.deactivate(repo)
1115 elif inactive:
1115 elif inactive:
1116 if len(marks) == 0:
1116 if len(marks) == 0:
1117 ui.status(_("no bookmarks set\n"))
1117 ui.status(_("no bookmarks set\n"))
1118 elif not repo._activebookmark:
1118 elif not repo._activebookmark:
1119 ui.status(_("no active bookmark\n"))
1119 ui.status(_("no active bookmark\n"))
1120 else:
1120 else:
1121 bookmarks.deactivate(repo)
1121 bookmarks.deactivate(repo)
1122 if tr is not None:
1122 if tr is not None:
1123 marks.recordchange(tr)
1123 marks.recordchange(tr)
1124 tr.close()
1124 tr.close()
1125 finally:
1125 finally:
1126 lockmod.release(tr, lock, wlock)
1126 lockmod.release(tr, lock, wlock)
1127 else: # show bookmarks
1127 else: # show bookmarks
1128 fm = ui.formatter('bookmarks', opts)
1128 fm = ui.formatter('bookmarks', opts)
1129 hexfn = fm.hexfunc
1129 hexfn = fm.hexfunc
1130 marks = repo._bookmarks
1130 marks = repo._bookmarks
1131 if len(marks) == 0 and fm.isplain():
1131 if len(marks) == 0 and fm.isplain():
1132 ui.status(_("no bookmarks set\n"))
1132 ui.status(_("no bookmarks set\n"))
1133 for bmark, n in sorted(marks.iteritems()):
1133 for bmark, n in sorted(marks.iteritems()):
1134 active = repo._activebookmark
1134 active = repo._activebookmark
1135 if bmark == active:
1135 if bmark == active:
1136 prefix, label = '*', activebookmarklabel
1136 prefix, label = '*', activebookmarklabel
1137 else:
1137 else:
1138 prefix, label = ' ', ''
1138 prefix, label = ' ', ''
1139
1139
1140 fm.startitem()
1140 fm.startitem()
1141 if not ui.quiet:
1141 if not ui.quiet:
1142 fm.plain(' %s ' % prefix, label=label)
1142 fm.plain(' %s ' % prefix, label=label)
1143 fm.write('bookmark', '%s', bmark, label=label)
1143 fm.write('bookmark', '%s', bmark, label=label)
1144 pad = " " * (25 - encoding.colwidth(bmark))
1144 pad = " " * (25 - encoding.colwidth(bmark))
1145 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1145 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1146 repo.changelog.rev(n), hexfn(n), label=label)
1146 repo.changelog.rev(n), hexfn(n), label=label)
1147 fm.data(active=(bmark == active))
1147 fm.data(active=(bmark == active))
1148 fm.plain('\n')
1148 fm.plain('\n')
1149 fm.end()
1149 fm.end()
1150
1150
1151 @command('branch',
1151 @command('branch',
1152 [('f', 'force', None,
1152 [('f', 'force', None,
1153 _('set branch name even if it shadows an existing branch')),
1153 _('set branch name even if it shadows an existing branch')),
1154 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1154 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1155 _('[-fC] [NAME]'))
1155 _('[-fC] [NAME]'))
1156 def branch(ui, repo, label=None, **opts):
1156 def branch(ui, repo, label=None, **opts):
1157 """set or show the current branch name
1157 """set or show the current branch name
1158
1158
1159 .. note::
1159 .. note::
1160
1160
1161 Branch names are permanent and global. Use :hg:`bookmark` to create a
1161 Branch names are permanent and global. Use :hg:`bookmark` to create a
1162 light-weight bookmark instead. See :hg:`help glossary` for more
1162 light-weight bookmark instead. See :hg:`help glossary` for more
1163 information about named branches and bookmarks.
1163 information about named branches and bookmarks.
1164
1164
1165 With no argument, show the current branch name. With one argument,
1165 With no argument, show the current branch name. With one argument,
1166 set the working directory branch name (the branch will not exist
1166 set the working directory branch name (the branch will not exist
1167 in the repository until the next commit). Standard practice
1167 in the repository until the next commit). Standard practice
1168 recommends that primary development take place on the 'default'
1168 recommends that primary development take place on the 'default'
1169 branch.
1169 branch.
1170
1170
1171 Unless -f/--force is specified, branch will not let you set a
1171 Unless -f/--force is specified, branch will not let you set a
1172 branch name that already exists.
1172 branch name that already exists.
1173
1173
1174 Use -C/--clean to reset the working directory branch to that of
1174 Use -C/--clean to reset the working directory branch to that of
1175 the parent of the working directory, negating a previous branch
1175 the parent of the working directory, negating a previous branch
1176 change.
1176 change.
1177
1177
1178 Use the command :hg:`update` to switch to an existing branch. Use
1178 Use the command :hg:`update` to switch to an existing branch. Use
1179 :hg:`commit --close-branch` to mark this branch head as closed.
1179 :hg:`commit --close-branch` to mark this branch head as closed.
1180 When all heads of a branch are closed, the branch will be
1180 When all heads of a branch are closed, the branch will be
1181 considered closed.
1181 considered closed.
1182
1182
1183 Returns 0 on success.
1183 Returns 0 on success.
1184 """
1184 """
1185 opts = pycompat.byteskwargs(opts)
1185 opts = pycompat.byteskwargs(opts)
1186 if label:
1186 if label:
1187 label = label.strip()
1187 label = label.strip()
1188
1188
1189 if not opts.get('clean') and not label:
1189 if not opts.get('clean') and not label:
1190 ui.write("%s\n" % repo.dirstate.branch())
1190 ui.write("%s\n" % repo.dirstate.branch())
1191 return
1191 return
1192
1192
1193 with repo.wlock():
1193 with repo.wlock():
1194 if opts.get('clean'):
1194 if opts.get('clean'):
1195 label = repo[None].p1().branch()
1195 label = repo[None].p1().branch()
1196 repo.dirstate.setbranch(label)
1196 repo.dirstate.setbranch(label)
1197 ui.status(_('reset working directory to branch %s\n') % label)
1197 ui.status(_('reset working directory to branch %s\n') % label)
1198 elif label:
1198 elif label:
1199 if not opts.get('force') and label in repo.branchmap():
1199 if not opts.get('force') and label in repo.branchmap():
1200 if label not in [p.branch() for p in repo[None].parents()]:
1200 if label not in [p.branch() for p in repo[None].parents()]:
1201 raise error.Abort(_('a branch of the same name already'
1201 raise error.Abort(_('a branch of the same name already'
1202 ' exists'),
1202 ' exists'),
1203 # i18n: "it" refers to an existing branch
1203 # i18n: "it" refers to an existing branch
1204 hint=_("use 'hg update' to switch to it"))
1204 hint=_("use 'hg update' to switch to it"))
1205 scmutil.checknewlabel(repo, label, 'branch')
1205 scmutil.checknewlabel(repo, label, 'branch')
1206 repo.dirstate.setbranch(label)
1206 repo.dirstate.setbranch(label)
1207 ui.status(_('marked working directory as branch %s\n') % label)
1207 ui.status(_('marked working directory as branch %s\n') % label)
1208
1208
1209 # find any open named branches aside from default
1209 # find any open named branches aside from default
1210 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1210 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1211 if n != "default" and not c]
1211 if n != "default" and not c]
1212 if not others:
1212 if not others:
1213 ui.status(_('(branches are permanent and global, '
1213 ui.status(_('(branches are permanent and global, '
1214 'did you want a bookmark?)\n'))
1214 'did you want a bookmark?)\n'))
1215
1215
1216 @command('branches',
1216 @command('branches',
1217 [('a', 'active', False,
1217 [('a', 'active', False,
1218 _('show only branches that have unmerged heads (DEPRECATED)')),
1218 _('show only branches that have unmerged heads (DEPRECATED)')),
1219 ('c', 'closed', False, _('show normal and closed branches')),
1219 ('c', 'closed', False, _('show normal and closed branches')),
1220 ] + formatteropts,
1220 ] + formatteropts,
1221 _('[-c]'))
1221 _('[-c]'))
1222 def branches(ui, repo, active=False, closed=False, **opts):
1222 def branches(ui, repo, active=False, closed=False, **opts):
1223 """list repository named branches
1223 """list repository named branches
1224
1224
1225 List the repository's named branches, indicating which ones are
1225 List the repository's named branches, indicating which ones are
1226 inactive. If -c/--closed is specified, also list branches which have
1226 inactive. If -c/--closed is specified, also list branches which have
1227 been marked closed (see :hg:`commit --close-branch`).
1227 been marked closed (see :hg:`commit --close-branch`).
1228
1228
1229 Use the command :hg:`update` to switch to an existing branch.
1229 Use the command :hg:`update` to switch to an existing branch.
1230
1230
1231 Returns 0.
1231 Returns 0.
1232 """
1232 """
1233
1233
1234 opts = pycompat.byteskwargs(opts)
1234 opts = pycompat.byteskwargs(opts)
1235 ui.pager('branches')
1235 ui.pager('branches')
1236 fm = ui.formatter('branches', opts)
1236 fm = ui.formatter('branches', opts)
1237 hexfunc = fm.hexfunc
1237 hexfunc = fm.hexfunc
1238
1238
1239 allheads = set(repo.heads())
1239 allheads = set(repo.heads())
1240 branches = []
1240 branches = []
1241 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1241 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1242 isactive = not isclosed and bool(set(heads) & allheads)
1242 isactive = not isclosed and bool(set(heads) & allheads)
1243 branches.append((tag, repo[tip], isactive, not isclosed))
1243 branches.append((tag, repo[tip], isactive, not isclosed))
1244 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1244 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1245 reverse=True)
1245 reverse=True)
1246
1246
1247 for tag, ctx, isactive, isopen in branches:
1247 for tag, ctx, isactive, isopen in branches:
1248 if active and not isactive:
1248 if active and not isactive:
1249 continue
1249 continue
1250 if isactive:
1250 if isactive:
1251 label = 'branches.active'
1251 label = 'branches.active'
1252 notice = ''
1252 notice = ''
1253 elif not isopen:
1253 elif not isopen:
1254 if not closed:
1254 if not closed:
1255 continue
1255 continue
1256 label = 'branches.closed'
1256 label = 'branches.closed'
1257 notice = _(' (closed)')
1257 notice = _(' (closed)')
1258 else:
1258 else:
1259 label = 'branches.inactive'
1259 label = 'branches.inactive'
1260 notice = _(' (inactive)')
1260 notice = _(' (inactive)')
1261 current = (tag == repo.dirstate.branch())
1261 current = (tag == repo.dirstate.branch())
1262 if current:
1262 if current:
1263 label = 'branches.current'
1263 label = 'branches.current'
1264
1264
1265 fm.startitem()
1265 fm.startitem()
1266 fm.write('branch', '%s', tag, label=label)
1266 fm.write('branch', '%s', tag, label=label)
1267 rev = ctx.rev()
1267 rev = ctx.rev()
1268 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1268 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1269 fmt = ' ' * padsize + ' %d:%s'
1269 fmt = ' ' * padsize + ' %d:%s'
1270 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1270 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1271 label='log.changeset changeset.%s' % ctx.phasestr())
1271 label='log.changeset changeset.%s' % ctx.phasestr())
1272 fm.context(ctx=ctx)
1272 fm.context(ctx=ctx)
1273 fm.data(active=isactive, closed=not isopen, current=current)
1273 fm.data(active=isactive, closed=not isopen, current=current)
1274 if not ui.quiet:
1274 if not ui.quiet:
1275 fm.plain(notice)
1275 fm.plain(notice)
1276 fm.plain('\n')
1276 fm.plain('\n')
1277 fm.end()
1277 fm.end()
1278
1278
1279 @command('bundle',
1279 @command('bundle',
1280 [('f', 'force', None, _('run even when the destination is unrelated')),
1280 [('f', 'force', None, _('run even when the destination is unrelated')),
1281 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1281 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1282 _('REV')),
1282 _('REV')),
1283 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1283 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1284 _('BRANCH')),
1284 _('BRANCH')),
1285 ('', 'base', [],
1285 ('', 'base', [],
1286 _('a base changeset assumed to be available at the destination'),
1286 _('a base changeset assumed to be available at the destination'),
1287 _('REV')),
1287 _('REV')),
1288 ('a', 'all', None, _('bundle all changesets in the repository')),
1288 ('a', 'all', None, _('bundle all changesets in the repository')),
1289 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1289 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1290 ] + remoteopts,
1290 ] + remoteopts,
1291 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1291 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1292 def bundle(ui, repo, fname, dest=None, **opts):
1292 def bundle(ui, repo, fname, dest=None, **opts):
1293 """create a bundle file
1293 """create a bundle file
1294
1294
1295 Generate a bundle file containing data to be added to a repository.
1295 Generate a bundle file containing data to be added to a repository.
1296
1296
1297 To create a bundle containing all changesets, use -a/--all
1297 To create a bundle containing all changesets, use -a/--all
1298 (or --base null). Otherwise, hg assumes the destination will have
1298 (or --base null). Otherwise, hg assumes the destination will have
1299 all the nodes you specify with --base parameters. Otherwise, hg
1299 all the nodes you specify with --base parameters. Otherwise, hg
1300 will assume the repository has all the nodes in destination, or
1300 will assume the repository has all the nodes in destination, or
1301 default-push/default if no destination is specified.
1301 default-push/default if no destination is specified.
1302
1302
1303 You can change bundle format with the -t/--type option. See
1303 You can change bundle format with the -t/--type option. See
1304 :hg:`help bundlespec` for documentation on this format. By default,
1304 :hg:`help bundlespec` for documentation on this format. By default,
1305 the most appropriate format is used and compression defaults to
1305 the most appropriate format is used and compression defaults to
1306 bzip2.
1306 bzip2.
1307
1307
1308 The bundle file can then be transferred using conventional means
1308 The bundle file can then be transferred using conventional means
1309 and applied to another repository with the unbundle or pull
1309 and applied to another repository with the unbundle or pull
1310 command. This is useful when direct push and pull are not
1310 command. This is useful when direct push and pull are not
1311 available or when exporting an entire repository is undesirable.
1311 available or when exporting an entire repository is undesirable.
1312
1312
1313 Applying bundles preserves all changeset contents including
1313 Applying bundles preserves all changeset contents including
1314 permissions, copy/rename information, and revision history.
1314 permissions, copy/rename information, and revision history.
1315
1315
1316 Returns 0 on success, 1 if no changes found.
1316 Returns 0 on success, 1 if no changes found.
1317 """
1317 """
1318 opts = pycompat.byteskwargs(opts)
1318 opts = pycompat.byteskwargs(opts)
1319 revs = None
1319 revs = None
1320 if 'rev' in opts:
1320 if 'rev' in opts:
1321 revstrings = opts['rev']
1321 revstrings = opts['rev']
1322 revs = scmutil.revrange(repo, revstrings)
1322 revs = scmutil.revrange(repo, revstrings)
1323 if revstrings and not revs:
1323 if revstrings and not revs:
1324 raise error.Abort(_('no commits to bundle'))
1324 raise error.Abort(_('no commits to bundle'))
1325
1325
1326 bundletype = opts.get('type', 'bzip2').lower()
1326 bundletype = opts.get('type', 'bzip2').lower()
1327 try:
1327 try:
1328 bcompression, cgversion, params = exchange.parsebundlespec(
1328 bcompression, cgversion, params = exchange.parsebundlespec(
1329 repo, bundletype, strict=False)
1329 repo, bundletype, strict=False)
1330 except error.UnsupportedBundleSpecification as e:
1330 except error.UnsupportedBundleSpecification as e:
1331 raise error.Abort(str(e),
1331 raise error.Abort(str(e),
1332 hint=_("see 'hg help bundlespec' for supported "
1332 hint=_("see 'hg help bundlespec' for supported "
1333 "values for --type"))
1333 "values for --type"))
1334
1334
1335 # Packed bundles are a pseudo bundle format for now.
1335 # Packed bundles are a pseudo bundle format for now.
1336 if cgversion == 's1':
1336 if cgversion == 's1':
1337 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1337 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1338 hint=_("use 'hg debugcreatestreamclonebundle'"))
1338 hint=_("use 'hg debugcreatestreamclonebundle'"))
1339
1339
1340 if opts.get('all'):
1340 if opts.get('all'):
1341 if dest:
1341 if dest:
1342 raise error.Abort(_("--all is incompatible with specifying "
1342 raise error.Abort(_("--all is incompatible with specifying "
1343 "a destination"))
1343 "a destination"))
1344 if opts.get('base'):
1344 if opts.get('base'):
1345 ui.warn(_("ignoring --base because --all was specified\n"))
1345 ui.warn(_("ignoring --base because --all was specified\n"))
1346 base = ['null']
1346 base = ['null']
1347 else:
1347 else:
1348 base = scmutil.revrange(repo, opts.get('base'))
1348 base = scmutil.revrange(repo, opts.get('base'))
1349 # TODO: get desired bundlecaps from command line.
1350 bundlecaps = None
1351 if cgversion not in changegroup.supportedoutgoingversions(repo):
1349 if cgversion not in changegroup.supportedoutgoingversions(repo):
1352 raise error.Abort(_("repository does not support bundle version %s") %
1350 raise error.Abort(_("repository does not support bundle version %s") %
1353 cgversion)
1351 cgversion)
1354
1352
1355 if base:
1353 if base:
1356 if dest:
1354 if dest:
1357 raise error.Abort(_("--base is incompatible with specifying "
1355 raise error.Abort(_("--base is incompatible with specifying "
1358 "a destination"))
1356 "a destination"))
1359 common = [repo.lookup(rev) for rev in base]
1357 common = [repo.lookup(rev) for rev in base]
1360 heads = revs and map(repo.lookup, revs) or None
1358 heads = revs and map(repo.lookup, revs) or None
1361 outgoing = discovery.outgoing(repo, common, heads)
1359 outgoing = discovery.outgoing(repo, common, heads)
1362 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1360 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1363 bundlecaps=bundlecaps,
1364 version=cgversion)
1361 version=cgversion)
1365 outgoing = None
1362 outgoing = None
1366 else:
1363 else:
1367 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1364 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1368 dest, branches = hg.parseurl(dest, opts.get('branch'))
1365 dest, branches = hg.parseurl(dest, opts.get('branch'))
1369 other = hg.peer(repo, opts, dest)
1366 other = hg.peer(repo, opts, dest)
1370 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1367 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1371 heads = revs and map(repo.lookup, revs) or revs
1368 heads = revs and map(repo.lookup, revs) or revs
1372 outgoing = discovery.findcommonoutgoing(repo, other,
1369 outgoing = discovery.findcommonoutgoing(repo, other,
1373 onlyheads=heads,
1370 onlyheads=heads,
1374 force=opts.get('force'),
1371 force=opts.get('force'),
1375 portable=True)
1372 portable=True)
1376 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1373 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1377 bundlecaps, version=cgversion)
1374 version=cgversion)
1378 if not cg:
1375 if not cg:
1379 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1376 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1380 return 1
1377 return 1
1381
1378
1382 if cgversion == '01': #bundle1
1379 if cgversion == '01': #bundle1
1383 if bcompression is None:
1380 if bcompression is None:
1384 bcompression = 'UN'
1381 bcompression = 'UN'
1385 bversion = 'HG10' + bcompression
1382 bversion = 'HG10' + bcompression
1386 bcompression = None
1383 bcompression = None
1387 elif cgversion in ('02', '03'):
1384 elif cgversion in ('02', '03'):
1388 bversion = 'HG20'
1385 bversion = 'HG20'
1389 else:
1386 else:
1390 raise error.ProgrammingError(
1387 raise error.ProgrammingError(
1391 'bundle: unexpected changegroup version %s' % cgversion)
1388 'bundle: unexpected changegroup version %s' % cgversion)
1392
1389
1393 # TODO compression options should be derived from bundlespec parsing.
1390 # TODO compression options should be derived from bundlespec parsing.
1394 # This is a temporary hack to allow adjusting bundle compression
1391 # This is a temporary hack to allow adjusting bundle compression
1395 # level without a) formalizing the bundlespec changes to declare it
1392 # level without a) formalizing the bundlespec changes to declare it
1396 # b) introducing a command flag.
1393 # b) introducing a command flag.
1397 compopts = {}
1394 compopts = {}
1398 complevel = ui.configint('experimental', 'bundlecomplevel')
1395 complevel = ui.configint('experimental', 'bundlecomplevel')
1399 if complevel is not None:
1396 if complevel is not None:
1400 compopts['level'] = complevel
1397 compopts['level'] = complevel
1401
1398
1402 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression,
1399 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression,
1403 compopts=compopts)
1400 compopts=compopts)
1404
1401
1405 @command('cat',
1402 @command('cat',
1406 [('o', 'output', '',
1403 [('o', 'output', '',
1407 _('print output to file with formatted name'), _('FORMAT')),
1404 _('print output to file with formatted name'), _('FORMAT')),
1408 ('r', 'rev', '', _('print the given revision'), _('REV')),
1405 ('r', 'rev', '', _('print the given revision'), _('REV')),
1409 ('', 'decode', None, _('apply any matching decode filter')),
1406 ('', 'decode', None, _('apply any matching decode filter')),
1410 ] + walkopts,
1407 ] + walkopts,
1411 _('[OPTION]... FILE...'),
1408 _('[OPTION]... FILE...'),
1412 inferrepo=True)
1409 inferrepo=True)
1413 def cat(ui, repo, file1, *pats, **opts):
1410 def cat(ui, repo, file1, *pats, **opts):
1414 """output the current or given revision of files
1411 """output the current or given revision of files
1415
1412
1416 Print the specified files as they were at the given revision. If
1413 Print the specified files as they were at the given revision. If
1417 no revision is given, the parent of the working directory is used.
1414 no revision is given, the parent of the working directory is used.
1418
1415
1419 Output may be to a file, in which case the name of the file is
1416 Output may be to a file, in which case the name of the file is
1420 given using a format string. The formatting rules as follows:
1417 given using a format string. The formatting rules as follows:
1421
1418
1422 :``%%``: literal "%" character
1419 :``%%``: literal "%" character
1423 :``%s``: basename of file being printed
1420 :``%s``: basename of file being printed
1424 :``%d``: dirname of file being printed, or '.' if in repository root
1421 :``%d``: dirname of file being printed, or '.' if in repository root
1425 :``%p``: root-relative path name of file being printed
1422 :``%p``: root-relative path name of file being printed
1426 :``%H``: changeset hash (40 hexadecimal digits)
1423 :``%H``: changeset hash (40 hexadecimal digits)
1427 :``%R``: changeset revision number
1424 :``%R``: changeset revision number
1428 :``%h``: short-form changeset hash (12 hexadecimal digits)
1425 :``%h``: short-form changeset hash (12 hexadecimal digits)
1429 :``%r``: zero-padded changeset revision number
1426 :``%r``: zero-padded changeset revision number
1430 :``%b``: basename of the exporting repository
1427 :``%b``: basename of the exporting repository
1431
1428
1432 Returns 0 on success.
1429 Returns 0 on success.
1433 """
1430 """
1434 ctx = scmutil.revsingle(repo, opts.get('rev'))
1431 ctx = scmutil.revsingle(repo, opts.get('rev'))
1435 m = scmutil.match(ctx, (file1,) + pats, opts)
1432 m = scmutil.match(ctx, (file1,) + pats, opts)
1436
1433
1437 ui.pager('cat')
1434 ui.pager('cat')
1438 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1435 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1439
1436
1440 @command('^clone',
1437 @command('^clone',
1441 [('U', 'noupdate', None, _('the clone will include an empty working '
1438 [('U', 'noupdate', None, _('the clone will include an empty working '
1442 'directory (only a repository)')),
1439 'directory (only a repository)')),
1443 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1440 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1444 _('REV')),
1441 _('REV')),
1445 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1442 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1446 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1443 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1447 ('', 'pull', None, _('use pull protocol to copy metadata')),
1444 ('', 'pull', None, _('use pull protocol to copy metadata')),
1448 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1445 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1449 ] + remoteopts,
1446 ] + remoteopts,
1450 _('[OPTION]... SOURCE [DEST]'),
1447 _('[OPTION]... SOURCE [DEST]'),
1451 norepo=True)
1448 norepo=True)
1452 def clone(ui, source, dest=None, **opts):
1449 def clone(ui, source, dest=None, **opts):
1453 """make a copy of an existing repository
1450 """make a copy of an existing repository
1454
1451
1455 Create a copy of an existing repository in a new directory.
1452 Create a copy of an existing repository in a new directory.
1456
1453
1457 If no destination directory name is specified, it defaults to the
1454 If no destination directory name is specified, it defaults to the
1458 basename of the source.
1455 basename of the source.
1459
1456
1460 The location of the source is added to the new repository's
1457 The location of the source is added to the new repository's
1461 ``.hg/hgrc`` file, as the default to be used for future pulls.
1458 ``.hg/hgrc`` file, as the default to be used for future pulls.
1462
1459
1463 Only local paths and ``ssh://`` URLs are supported as
1460 Only local paths and ``ssh://`` URLs are supported as
1464 destinations. For ``ssh://`` destinations, no working directory or
1461 destinations. For ``ssh://`` destinations, no working directory or
1465 ``.hg/hgrc`` will be created on the remote side.
1462 ``.hg/hgrc`` will be created on the remote side.
1466
1463
1467 If the source repository has a bookmark called '@' set, that
1464 If the source repository has a bookmark called '@' set, that
1468 revision will be checked out in the new repository by default.
1465 revision will be checked out in the new repository by default.
1469
1466
1470 To check out a particular version, use -u/--update, or
1467 To check out a particular version, use -u/--update, or
1471 -U/--noupdate to create a clone with no working directory.
1468 -U/--noupdate to create a clone with no working directory.
1472
1469
1473 To pull only a subset of changesets, specify one or more revisions
1470 To pull only a subset of changesets, specify one or more revisions
1474 identifiers with -r/--rev or branches with -b/--branch. The
1471 identifiers with -r/--rev or branches with -b/--branch. The
1475 resulting clone will contain only the specified changesets and
1472 resulting clone will contain only the specified changesets and
1476 their ancestors. These options (or 'clone src#rev dest') imply
1473 their ancestors. These options (or 'clone src#rev dest') imply
1477 --pull, even for local source repositories.
1474 --pull, even for local source repositories.
1478
1475
1479 .. note::
1476 .. note::
1480
1477
1481 Specifying a tag will include the tagged changeset but not the
1478 Specifying a tag will include the tagged changeset but not the
1482 changeset containing the tag.
1479 changeset containing the tag.
1483
1480
1484 .. container:: verbose
1481 .. container:: verbose
1485
1482
1486 For efficiency, hardlinks are used for cloning whenever the
1483 For efficiency, hardlinks are used for cloning whenever the
1487 source and destination are on the same filesystem (note this
1484 source and destination are on the same filesystem (note this
1488 applies only to the repository data, not to the working
1485 applies only to the repository data, not to the working
1489 directory). Some filesystems, such as AFS, implement hardlinking
1486 directory). Some filesystems, such as AFS, implement hardlinking
1490 incorrectly, but do not report errors. In these cases, use the
1487 incorrectly, but do not report errors. In these cases, use the
1491 --pull option to avoid hardlinking.
1488 --pull option to avoid hardlinking.
1492
1489
1493 In some cases, you can clone repositories and the working
1490 In some cases, you can clone repositories and the working
1494 directory using full hardlinks with ::
1491 directory using full hardlinks with ::
1495
1492
1496 $ cp -al REPO REPOCLONE
1493 $ cp -al REPO REPOCLONE
1497
1494
1498 This is the fastest way to clone, but it is not always safe. The
1495 This is the fastest way to clone, but it is not always safe. The
1499 operation is not atomic (making sure REPO is not modified during
1496 operation is not atomic (making sure REPO is not modified during
1500 the operation is up to you) and you have to make sure your
1497 the operation is up to you) and you have to make sure your
1501 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1498 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1502 so). Also, this is not compatible with certain extensions that
1499 so). Also, this is not compatible with certain extensions that
1503 place their metadata under the .hg directory, such as mq.
1500 place their metadata under the .hg directory, such as mq.
1504
1501
1505 Mercurial will update the working directory to the first applicable
1502 Mercurial will update the working directory to the first applicable
1506 revision from this list:
1503 revision from this list:
1507
1504
1508 a) null if -U or the source repository has no changesets
1505 a) null if -U or the source repository has no changesets
1509 b) if -u . and the source repository is local, the first parent of
1506 b) if -u . and the source repository is local, the first parent of
1510 the source repository's working directory
1507 the source repository's working directory
1511 c) the changeset specified with -u (if a branch name, this means the
1508 c) the changeset specified with -u (if a branch name, this means the
1512 latest head of that branch)
1509 latest head of that branch)
1513 d) the changeset specified with -r
1510 d) the changeset specified with -r
1514 e) the tipmost head specified with -b
1511 e) the tipmost head specified with -b
1515 f) the tipmost head specified with the url#branch source syntax
1512 f) the tipmost head specified with the url#branch source syntax
1516 g) the revision marked with the '@' bookmark, if present
1513 g) the revision marked with the '@' bookmark, if present
1517 h) the tipmost head of the default branch
1514 h) the tipmost head of the default branch
1518 i) tip
1515 i) tip
1519
1516
1520 When cloning from servers that support it, Mercurial may fetch
1517 When cloning from servers that support it, Mercurial may fetch
1521 pre-generated data from a server-advertised URL. When this is done,
1518 pre-generated data from a server-advertised URL. When this is done,
1522 hooks operating on incoming changesets and changegroups may fire twice,
1519 hooks operating on incoming changesets and changegroups may fire twice,
1523 once for the bundle fetched from the URL and another for any additional
1520 once for the bundle fetched from the URL and another for any additional
1524 data not fetched from this URL. In addition, if an error occurs, the
1521 data not fetched from this URL. In addition, if an error occurs, the
1525 repository may be rolled back to a partial clone. This behavior may
1522 repository may be rolled back to a partial clone. This behavior may
1526 change in future releases. See :hg:`help -e clonebundles` for more.
1523 change in future releases. See :hg:`help -e clonebundles` for more.
1527
1524
1528 Examples:
1525 Examples:
1529
1526
1530 - clone a remote repository to a new directory named hg/::
1527 - clone a remote repository to a new directory named hg/::
1531
1528
1532 hg clone https://www.mercurial-scm.org/repo/hg/
1529 hg clone https://www.mercurial-scm.org/repo/hg/
1533
1530
1534 - create a lightweight local clone::
1531 - create a lightweight local clone::
1535
1532
1536 hg clone project/ project-feature/
1533 hg clone project/ project-feature/
1537
1534
1538 - clone from an absolute path on an ssh server (note double-slash)::
1535 - clone from an absolute path on an ssh server (note double-slash)::
1539
1536
1540 hg clone ssh://user@server//home/projects/alpha/
1537 hg clone ssh://user@server//home/projects/alpha/
1541
1538
1542 - do a high-speed clone over a LAN while checking out a
1539 - do a high-speed clone over a LAN while checking out a
1543 specified version::
1540 specified version::
1544
1541
1545 hg clone --uncompressed http://server/repo -u 1.5
1542 hg clone --uncompressed http://server/repo -u 1.5
1546
1543
1547 - create a repository without changesets after a particular revision::
1544 - create a repository without changesets after a particular revision::
1548
1545
1549 hg clone -r 04e544 experimental/ good/
1546 hg clone -r 04e544 experimental/ good/
1550
1547
1551 - clone (and track) a particular named branch::
1548 - clone (and track) a particular named branch::
1552
1549
1553 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1550 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1554
1551
1555 See :hg:`help urls` for details on specifying URLs.
1552 See :hg:`help urls` for details on specifying URLs.
1556
1553
1557 Returns 0 on success.
1554 Returns 0 on success.
1558 """
1555 """
1559 opts = pycompat.byteskwargs(opts)
1556 opts = pycompat.byteskwargs(opts)
1560 if opts.get('noupdate') and opts.get('updaterev'):
1557 if opts.get('noupdate') and opts.get('updaterev'):
1561 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1558 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1562
1559
1563 r = hg.clone(ui, opts, source, dest,
1560 r = hg.clone(ui, opts, source, dest,
1564 pull=opts.get('pull'),
1561 pull=opts.get('pull'),
1565 stream=opts.get('uncompressed'),
1562 stream=opts.get('uncompressed'),
1566 rev=opts.get('rev'),
1563 rev=opts.get('rev'),
1567 update=opts.get('updaterev') or not opts.get('noupdate'),
1564 update=opts.get('updaterev') or not opts.get('noupdate'),
1568 branch=opts.get('branch'),
1565 branch=opts.get('branch'),
1569 shareopts=opts.get('shareopts'))
1566 shareopts=opts.get('shareopts'))
1570
1567
1571 return r is None
1568 return r is None
1572
1569
1573 @command('^commit|ci',
1570 @command('^commit|ci',
1574 [('A', 'addremove', None,
1571 [('A', 'addremove', None,
1575 _('mark new/missing files as added/removed before committing')),
1572 _('mark new/missing files as added/removed before committing')),
1576 ('', 'close-branch', None,
1573 ('', 'close-branch', None,
1577 _('mark a branch head as closed')),
1574 _('mark a branch head as closed')),
1578 ('', 'amend', None, _('amend the parent of the working directory')),
1575 ('', 'amend', None, _('amend the parent of the working directory')),
1579 ('s', 'secret', None, _('use the secret phase for committing')),
1576 ('s', 'secret', None, _('use the secret phase for committing')),
1580 ('e', 'edit', None, _('invoke editor on commit messages')),
1577 ('e', 'edit', None, _('invoke editor on commit messages')),
1581 ('i', 'interactive', None, _('use interactive mode')),
1578 ('i', 'interactive', None, _('use interactive mode')),
1582 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1579 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1583 _('[OPTION]... [FILE]...'),
1580 _('[OPTION]... [FILE]...'),
1584 inferrepo=True)
1581 inferrepo=True)
1585 def commit(ui, repo, *pats, **opts):
1582 def commit(ui, repo, *pats, **opts):
1586 """commit the specified files or all outstanding changes
1583 """commit the specified files or all outstanding changes
1587
1584
1588 Commit changes to the given files into the repository. Unlike a
1585 Commit changes to the given files into the repository. Unlike a
1589 centralized SCM, this operation is a local operation. See
1586 centralized SCM, this operation is a local operation. See
1590 :hg:`push` for a way to actively distribute your changes.
1587 :hg:`push` for a way to actively distribute your changes.
1591
1588
1592 If a list of files is omitted, all changes reported by :hg:`status`
1589 If a list of files is omitted, all changes reported by :hg:`status`
1593 will be committed.
1590 will be committed.
1594
1591
1595 If you are committing the result of a merge, do not provide any
1592 If you are committing the result of a merge, do not provide any
1596 filenames or -I/-X filters.
1593 filenames or -I/-X filters.
1597
1594
1598 If no commit message is specified, Mercurial starts your
1595 If no commit message is specified, Mercurial starts your
1599 configured editor where you can enter a message. In case your
1596 configured editor where you can enter a message. In case your
1600 commit fails, you will find a backup of your message in
1597 commit fails, you will find a backup of your message in
1601 ``.hg/last-message.txt``.
1598 ``.hg/last-message.txt``.
1602
1599
1603 The --close-branch flag can be used to mark the current branch
1600 The --close-branch flag can be used to mark the current branch
1604 head closed. When all heads of a branch are closed, the branch
1601 head closed. When all heads of a branch are closed, the branch
1605 will be considered closed and no longer listed.
1602 will be considered closed and no longer listed.
1606
1603
1607 The --amend flag can be used to amend the parent of the
1604 The --amend flag can be used to amend the parent of the
1608 working directory with a new commit that contains the changes
1605 working directory with a new commit that contains the changes
1609 in the parent in addition to those currently reported by :hg:`status`,
1606 in the parent in addition to those currently reported by :hg:`status`,
1610 if there are any. The old commit is stored in a backup bundle in
1607 if there are any. The old commit is stored in a backup bundle in
1611 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1608 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1612 on how to restore it).
1609 on how to restore it).
1613
1610
1614 Message, user and date are taken from the amended commit unless
1611 Message, user and date are taken from the amended commit unless
1615 specified. When a message isn't specified on the command line,
1612 specified. When a message isn't specified on the command line,
1616 the editor will open with the message of the amended commit.
1613 the editor will open with the message of the amended commit.
1617
1614
1618 It is not possible to amend public changesets (see :hg:`help phases`)
1615 It is not possible to amend public changesets (see :hg:`help phases`)
1619 or changesets that have children.
1616 or changesets that have children.
1620
1617
1621 See :hg:`help dates` for a list of formats valid for -d/--date.
1618 See :hg:`help dates` for a list of formats valid for -d/--date.
1622
1619
1623 Returns 0 on success, 1 if nothing changed.
1620 Returns 0 on success, 1 if nothing changed.
1624
1621
1625 .. container:: verbose
1622 .. container:: verbose
1626
1623
1627 Examples:
1624 Examples:
1628
1625
1629 - commit all files ending in .py::
1626 - commit all files ending in .py::
1630
1627
1631 hg commit --include "set:**.py"
1628 hg commit --include "set:**.py"
1632
1629
1633 - commit all non-binary files::
1630 - commit all non-binary files::
1634
1631
1635 hg commit --exclude "set:binary()"
1632 hg commit --exclude "set:binary()"
1636
1633
1637 - amend the current commit and set the date to now::
1634 - amend the current commit and set the date to now::
1638
1635
1639 hg commit --amend --date now
1636 hg commit --amend --date now
1640 """
1637 """
1641 wlock = lock = None
1638 wlock = lock = None
1642 try:
1639 try:
1643 wlock = repo.wlock()
1640 wlock = repo.wlock()
1644 lock = repo.lock()
1641 lock = repo.lock()
1645 return _docommit(ui, repo, *pats, **opts)
1642 return _docommit(ui, repo, *pats, **opts)
1646 finally:
1643 finally:
1647 release(lock, wlock)
1644 release(lock, wlock)
1648
1645
1649 def _docommit(ui, repo, *pats, **opts):
1646 def _docommit(ui, repo, *pats, **opts):
1650 if opts.get(r'interactive'):
1647 if opts.get(r'interactive'):
1651 opts.pop(r'interactive')
1648 opts.pop(r'interactive')
1652 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1649 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1653 cmdutil.recordfilter, *pats,
1650 cmdutil.recordfilter, *pats,
1654 **opts)
1651 **opts)
1655 # ret can be 0 (no changes to record) or the value returned by
1652 # ret can be 0 (no changes to record) or the value returned by
1656 # commit(), 1 if nothing changed or None on success.
1653 # commit(), 1 if nothing changed or None on success.
1657 return 1 if ret == 0 else ret
1654 return 1 if ret == 0 else ret
1658
1655
1659 opts = pycompat.byteskwargs(opts)
1656 opts = pycompat.byteskwargs(opts)
1660 if opts.get('subrepos'):
1657 if opts.get('subrepos'):
1661 if opts.get('amend'):
1658 if opts.get('amend'):
1662 raise error.Abort(_('cannot amend with --subrepos'))
1659 raise error.Abort(_('cannot amend with --subrepos'))
1663 # Let --subrepos on the command line override config setting.
1660 # Let --subrepos on the command line override config setting.
1664 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1661 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1665
1662
1666 cmdutil.checkunfinished(repo, commit=True)
1663 cmdutil.checkunfinished(repo, commit=True)
1667
1664
1668 branch = repo[None].branch()
1665 branch = repo[None].branch()
1669 bheads = repo.branchheads(branch)
1666 bheads = repo.branchheads(branch)
1670
1667
1671 extra = {}
1668 extra = {}
1672 if opts.get('close_branch'):
1669 if opts.get('close_branch'):
1673 extra['close'] = 1
1670 extra['close'] = 1
1674
1671
1675 if not bheads:
1672 if not bheads:
1676 raise error.Abort(_('can only close branch heads'))
1673 raise error.Abort(_('can only close branch heads'))
1677 elif opts.get('amend'):
1674 elif opts.get('amend'):
1678 if repo[None].parents()[0].p1().branch() != branch and \
1675 if repo[None].parents()[0].p1().branch() != branch and \
1679 repo[None].parents()[0].p2().branch() != branch:
1676 repo[None].parents()[0].p2().branch() != branch:
1680 raise error.Abort(_('can only close branch heads'))
1677 raise error.Abort(_('can only close branch heads'))
1681
1678
1682 if opts.get('amend'):
1679 if opts.get('amend'):
1683 if ui.configbool('ui', 'commitsubrepos'):
1680 if ui.configbool('ui', 'commitsubrepos'):
1684 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1681 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1685
1682
1686 old = repo['.']
1683 old = repo['.']
1687 if not old.mutable():
1684 if not old.mutable():
1688 raise error.Abort(_('cannot amend public changesets'))
1685 raise error.Abort(_('cannot amend public changesets'))
1689 if len(repo[None].parents()) > 1:
1686 if len(repo[None].parents()) > 1:
1690 raise error.Abort(_('cannot amend while merging'))
1687 raise error.Abort(_('cannot amend while merging'))
1691 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1688 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1692 if not allowunstable and old.children():
1689 if not allowunstable and old.children():
1693 raise error.Abort(_('cannot amend changeset with children'))
1690 raise error.Abort(_('cannot amend changeset with children'))
1694
1691
1695 # Currently histedit gets confused if an amend happens while histedit
1692 # Currently histedit gets confused if an amend happens while histedit
1696 # is in progress. Since we have a checkunfinished command, we are
1693 # is in progress. Since we have a checkunfinished command, we are
1697 # temporarily honoring it.
1694 # temporarily honoring it.
1698 #
1695 #
1699 # Note: eventually this guard will be removed. Please do not expect
1696 # Note: eventually this guard will be removed. Please do not expect
1700 # this behavior to remain.
1697 # this behavior to remain.
1701 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1698 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1702 cmdutil.checkunfinished(repo)
1699 cmdutil.checkunfinished(repo)
1703
1700
1704 # commitfunc is used only for temporary amend commit by cmdutil.amend
1701 # commitfunc is used only for temporary amend commit by cmdutil.amend
1705 def commitfunc(ui, repo, message, match, opts):
1702 def commitfunc(ui, repo, message, match, opts):
1706 return repo.commit(message,
1703 return repo.commit(message,
1707 opts.get('user') or old.user(),
1704 opts.get('user') or old.user(),
1708 opts.get('date') or old.date(),
1705 opts.get('date') or old.date(),
1709 match,
1706 match,
1710 extra=extra)
1707 extra=extra)
1711
1708
1712 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1709 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1713 if node == old.node():
1710 if node == old.node():
1714 ui.status(_("nothing changed\n"))
1711 ui.status(_("nothing changed\n"))
1715 return 1
1712 return 1
1716 else:
1713 else:
1717 def commitfunc(ui, repo, message, match, opts):
1714 def commitfunc(ui, repo, message, match, opts):
1718 overrides = {}
1715 overrides = {}
1719 if opts.get('secret'):
1716 if opts.get('secret'):
1720 overrides[('phases', 'new-commit')] = 'secret'
1717 overrides[('phases', 'new-commit')] = 'secret'
1721
1718
1722 baseui = repo.baseui
1719 baseui = repo.baseui
1723 with baseui.configoverride(overrides, 'commit'):
1720 with baseui.configoverride(overrides, 'commit'):
1724 with ui.configoverride(overrides, 'commit'):
1721 with ui.configoverride(overrides, 'commit'):
1725 editform = cmdutil.mergeeditform(repo[None],
1722 editform = cmdutil.mergeeditform(repo[None],
1726 'commit.normal')
1723 'commit.normal')
1727 editor = cmdutil.getcommiteditor(
1724 editor = cmdutil.getcommiteditor(
1728 editform=editform, **pycompat.strkwargs(opts))
1725 editform=editform, **pycompat.strkwargs(opts))
1729 return repo.commit(message,
1726 return repo.commit(message,
1730 opts.get('user'),
1727 opts.get('user'),
1731 opts.get('date'),
1728 opts.get('date'),
1732 match,
1729 match,
1733 editor=editor,
1730 editor=editor,
1734 extra=extra)
1731 extra=extra)
1735
1732
1736 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1733 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1737
1734
1738 if not node:
1735 if not node:
1739 stat = cmdutil.postcommitstatus(repo, pats, opts)
1736 stat = cmdutil.postcommitstatus(repo, pats, opts)
1740 if stat[3]:
1737 if stat[3]:
1741 ui.status(_("nothing changed (%d missing files, see "
1738 ui.status(_("nothing changed (%d missing files, see "
1742 "'hg status')\n") % len(stat[3]))
1739 "'hg status')\n") % len(stat[3]))
1743 else:
1740 else:
1744 ui.status(_("nothing changed\n"))
1741 ui.status(_("nothing changed\n"))
1745 return 1
1742 return 1
1746
1743
1747 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1744 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1748
1745
1749 @command('config|showconfig|debugconfig',
1746 @command('config|showconfig|debugconfig',
1750 [('u', 'untrusted', None, _('show untrusted configuration options')),
1747 [('u', 'untrusted', None, _('show untrusted configuration options')),
1751 ('e', 'edit', None, _('edit user config')),
1748 ('e', 'edit', None, _('edit user config')),
1752 ('l', 'local', None, _('edit repository config')),
1749 ('l', 'local', None, _('edit repository config')),
1753 ('g', 'global', None, _('edit global config'))] + formatteropts,
1750 ('g', 'global', None, _('edit global config'))] + formatteropts,
1754 _('[-u] [NAME]...'),
1751 _('[-u] [NAME]...'),
1755 optionalrepo=True)
1752 optionalrepo=True)
1756 def config(ui, repo, *values, **opts):
1753 def config(ui, repo, *values, **opts):
1757 """show combined config settings from all hgrc files
1754 """show combined config settings from all hgrc files
1758
1755
1759 With no arguments, print names and values of all config items.
1756 With no arguments, print names and values of all config items.
1760
1757
1761 With one argument of the form section.name, print just the value
1758 With one argument of the form section.name, print just the value
1762 of that config item.
1759 of that config item.
1763
1760
1764 With multiple arguments, print names and values of all config
1761 With multiple arguments, print names and values of all config
1765 items with matching section names.
1762 items with matching section names.
1766
1763
1767 With --edit, start an editor on the user-level config file. With
1764 With --edit, start an editor on the user-level config file. With
1768 --global, edit the system-wide config file. With --local, edit the
1765 --global, edit the system-wide config file. With --local, edit the
1769 repository-level config file.
1766 repository-level config file.
1770
1767
1771 With --debug, the source (filename and line number) is printed
1768 With --debug, the source (filename and line number) is printed
1772 for each config item.
1769 for each config item.
1773
1770
1774 See :hg:`help config` for more information about config files.
1771 See :hg:`help config` for more information about config files.
1775
1772
1776 Returns 0 on success, 1 if NAME does not exist.
1773 Returns 0 on success, 1 if NAME does not exist.
1777
1774
1778 """
1775 """
1779
1776
1780 opts = pycompat.byteskwargs(opts)
1777 opts = pycompat.byteskwargs(opts)
1781 if opts.get('edit') or opts.get('local') or opts.get('global'):
1778 if opts.get('edit') or opts.get('local') or opts.get('global'):
1782 if opts.get('local') and opts.get('global'):
1779 if opts.get('local') and opts.get('global'):
1783 raise error.Abort(_("can't use --local and --global together"))
1780 raise error.Abort(_("can't use --local and --global together"))
1784
1781
1785 if opts.get('local'):
1782 if opts.get('local'):
1786 if not repo:
1783 if not repo:
1787 raise error.Abort(_("can't use --local outside a repository"))
1784 raise error.Abort(_("can't use --local outside a repository"))
1788 paths = [repo.vfs.join('hgrc')]
1785 paths = [repo.vfs.join('hgrc')]
1789 elif opts.get('global'):
1786 elif opts.get('global'):
1790 paths = rcutil.systemrcpath()
1787 paths = rcutil.systemrcpath()
1791 else:
1788 else:
1792 paths = rcutil.userrcpath()
1789 paths = rcutil.userrcpath()
1793
1790
1794 for f in paths:
1791 for f in paths:
1795 if os.path.exists(f):
1792 if os.path.exists(f):
1796 break
1793 break
1797 else:
1794 else:
1798 if opts.get('global'):
1795 if opts.get('global'):
1799 samplehgrc = uimod.samplehgrcs['global']
1796 samplehgrc = uimod.samplehgrcs['global']
1800 elif opts.get('local'):
1797 elif opts.get('local'):
1801 samplehgrc = uimod.samplehgrcs['local']
1798 samplehgrc = uimod.samplehgrcs['local']
1802 else:
1799 else:
1803 samplehgrc = uimod.samplehgrcs['user']
1800 samplehgrc = uimod.samplehgrcs['user']
1804
1801
1805 f = paths[0]
1802 f = paths[0]
1806 fp = open(f, "w")
1803 fp = open(f, "w")
1807 fp.write(samplehgrc)
1804 fp.write(samplehgrc)
1808 fp.close()
1805 fp.close()
1809
1806
1810 editor = ui.geteditor()
1807 editor = ui.geteditor()
1811 ui.system("%s \"%s\"" % (editor, f),
1808 ui.system("%s \"%s\"" % (editor, f),
1812 onerr=error.Abort, errprefix=_("edit failed"),
1809 onerr=error.Abort, errprefix=_("edit failed"),
1813 blockedtag='config_edit')
1810 blockedtag='config_edit')
1814 return
1811 return
1815 ui.pager('config')
1812 ui.pager('config')
1816 fm = ui.formatter('config', opts)
1813 fm = ui.formatter('config', opts)
1817 for t, f in rcutil.rccomponents():
1814 for t, f in rcutil.rccomponents():
1818 if t == 'path':
1815 if t == 'path':
1819 ui.debug('read config from: %s\n' % f)
1816 ui.debug('read config from: %s\n' % f)
1820 elif t == 'items':
1817 elif t == 'items':
1821 for section, name, value, source in f:
1818 for section, name, value, source in f:
1822 ui.debug('set config by: %s\n' % source)
1819 ui.debug('set config by: %s\n' % source)
1823 else:
1820 else:
1824 raise error.ProgrammingError('unknown rctype: %s' % t)
1821 raise error.ProgrammingError('unknown rctype: %s' % t)
1825 untrusted = bool(opts.get('untrusted'))
1822 untrusted = bool(opts.get('untrusted'))
1826 if values:
1823 if values:
1827 sections = [v for v in values if '.' not in v]
1824 sections = [v for v in values if '.' not in v]
1828 items = [v for v in values if '.' in v]
1825 items = [v for v in values if '.' in v]
1829 if len(items) > 1 or items and sections:
1826 if len(items) > 1 or items and sections:
1830 raise error.Abort(_('only one config item permitted'))
1827 raise error.Abort(_('only one config item permitted'))
1831 matched = False
1828 matched = False
1832 for section, name, value in ui.walkconfig(untrusted=untrusted):
1829 for section, name, value in ui.walkconfig(untrusted=untrusted):
1833 source = ui.configsource(section, name, untrusted)
1830 source = ui.configsource(section, name, untrusted)
1834 value = pycompat.bytestr(value)
1831 value = pycompat.bytestr(value)
1835 if fm.isplain():
1832 if fm.isplain():
1836 source = source or 'none'
1833 source = source or 'none'
1837 value = value.replace('\n', '\\n')
1834 value = value.replace('\n', '\\n')
1838 entryname = section + '.' + name
1835 entryname = section + '.' + name
1839 if values:
1836 if values:
1840 for v in values:
1837 for v in values:
1841 if v == section:
1838 if v == section:
1842 fm.startitem()
1839 fm.startitem()
1843 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1840 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1844 fm.write('name value', '%s=%s\n', entryname, value)
1841 fm.write('name value', '%s=%s\n', entryname, value)
1845 matched = True
1842 matched = True
1846 elif v == entryname:
1843 elif v == entryname:
1847 fm.startitem()
1844 fm.startitem()
1848 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1845 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1849 fm.write('value', '%s\n', value)
1846 fm.write('value', '%s\n', value)
1850 fm.data(name=entryname)
1847 fm.data(name=entryname)
1851 matched = True
1848 matched = True
1852 else:
1849 else:
1853 fm.startitem()
1850 fm.startitem()
1854 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1851 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1855 fm.write('name value', '%s=%s\n', entryname, value)
1852 fm.write('name value', '%s=%s\n', entryname, value)
1856 matched = True
1853 matched = True
1857 fm.end()
1854 fm.end()
1858 if matched:
1855 if matched:
1859 return 0
1856 return 0
1860 return 1
1857 return 1
1861
1858
1862 @command('copy|cp',
1859 @command('copy|cp',
1863 [('A', 'after', None, _('record a copy that has already occurred')),
1860 [('A', 'after', None, _('record a copy that has already occurred')),
1864 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1861 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1865 ] + walkopts + dryrunopts,
1862 ] + walkopts + dryrunopts,
1866 _('[OPTION]... [SOURCE]... DEST'))
1863 _('[OPTION]... [SOURCE]... DEST'))
1867 def copy(ui, repo, *pats, **opts):
1864 def copy(ui, repo, *pats, **opts):
1868 """mark files as copied for the next commit
1865 """mark files as copied for the next commit
1869
1866
1870 Mark dest as having copies of source files. If dest is a
1867 Mark dest as having copies of source files. If dest is a
1871 directory, copies are put in that directory. If dest is a file,
1868 directory, copies are put in that directory. If dest is a file,
1872 the source must be a single file.
1869 the source must be a single file.
1873
1870
1874 By default, this command copies the contents of files as they
1871 By default, this command copies the contents of files as they
1875 exist in the working directory. If invoked with -A/--after, the
1872 exist in the working directory. If invoked with -A/--after, the
1876 operation is recorded, but no copying is performed.
1873 operation is recorded, but no copying is performed.
1877
1874
1878 This command takes effect with the next commit. To undo a copy
1875 This command takes effect with the next commit. To undo a copy
1879 before that, see :hg:`revert`.
1876 before that, see :hg:`revert`.
1880
1877
1881 Returns 0 on success, 1 if errors are encountered.
1878 Returns 0 on success, 1 if errors are encountered.
1882 """
1879 """
1883 opts = pycompat.byteskwargs(opts)
1880 opts = pycompat.byteskwargs(opts)
1884 with repo.wlock(False):
1881 with repo.wlock(False):
1885 return cmdutil.copy(ui, repo, pats, opts)
1882 return cmdutil.copy(ui, repo, pats, opts)
1886
1883
1887 @command('^diff',
1884 @command('^diff',
1888 [('r', 'rev', [], _('revision'), _('REV')),
1885 [('r', 'rev', [], _('revision'), _('REV')),
1889 ('c', 'change', '', _('change made by revision'), _('REV'))
1886 ('c', 'change', '', _('change made by revision'), _('REV'))
1890 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1887 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1891 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1888 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1892 inferrepo=True)
1889 inferrepo=True)
1893 def diff(ui, repo, *pats, **opts):
1890 def diff(ui, repo, *pats, **opts):
1894 """diff repository (or selected files)
1891 """diff repository (or selected files)
1895
1892
1896 Show differences between revisions for the specified files.
1893 Show differences between revisions for the specified files.
1897
1894
1898 Differences between files are shown using the unified diff format.
1895 Differences between files are shown using the unified diff format.
1899
1896
1900 .. note::
1897 .. note::
1901
1898
1902 :hg:`diff` may generate unexpected results for merges, as it will
1899 :hg:`diff` may generate unexpected results for merges, as it will
1903 default to comparing against the working directory's first
1900 default to comparing against the working directory's first
1904 parent changeset if no revisions are specified.
1901 parent changeset if no revisions are specified.
1905
1902
1906 When two revision arguments are given, then changes are shown
1903 When two revision arguments are given, then changes are shown
1907 between those revisions. If only one revision is specified then
1904 between those revisions. If only one revision is specified then
1908 that revision is compared to the working directory, and, when no
1905 that revision is compared to the working directory, and, when no
1909 revisions are specified, the working directory files are compared
1906 revisions are specified, the working directory files are compared
1910 to its first parent.
1907 to its first parent.
1911
1908
1912 Alternatively you can specify -c/--change with a revision to see
1909 Alternatively you can specify -c/--change with a revision to see
1913 the changes in that changeset relative to its first parent.
1910 the changes in that changeset relative to its first parent.
1914
1911
1915 Without the -a/--text option, diff will avoid generating diffs of
1912 Without the -a/--text option, diff will avoid generating diffs of
1916 files it detects as binary. With -a, diff will generate a diff
1913 files it detects as binary. With -a, diff will generate a diff
1917 anyway, probably with undesirable results.
1914 anyway, probably with undesirable results.
1918
1915
1919 Use the -g/--git option to generate diffs in the git extended diff
1916 Use the -g/--git option to generate diffs in the git extended diff
1920 format. For more information, read :hg:`help diffs`.
1917 format. For more information, read :hg:`help diffs`.
1921
1918
1922 .. container:: verbose
1919 .. container:: verbose
1923
1920
1924 Examples:
1921 Examples:
1925
1922
1926 - compare a file in the current working directory to its parent::
1923 - compare a file in the current working directory to its parent::
1927
1924
1928 hg diff foo.c
1925 hg diff foo.c
1929
1926
1930 - compare two historical versions of a directory, with rename info::
1927 - compare two historical versions of a directory, with rename info::
1931
1928
1932 hg diff --git -r 1.0:1.2 lib/
1929 hg diff --git -r 1.0:1.2 lib/
1933
1930
1934 - get change stats relative to the last change on some date::
1931 - get change stats relative to the last change on some date::
1935
1932
1936 hg diff --stat -r "date('may 2')"
1933 hg diff --stat -r "date('may 2')"
1937
1934
1938 - diff all newly-added files that contain a keyword::
1935 - diff all newly-added files that contain a keyword::
1939
1936
1940 hg diff "set:added() and grep(GNU)"
1937 hg diff "set:added() and grep(GNU)"
1941
1938
1942 - compare a revision and its parents::
1939 - compare a revision and its parents::
1943
1940
1944 hg diff -c 9353 # compare against first parent
1941 hg diff -c 9353 # compare against first parent
1945 hg diff -r 9353^:9353 # same using revset syntax
1942 hg diff -r 9353^:9353 # same using revset syntax
1946 hg diff -r 9353^2:9353 # compare against the second parent
1943 hg diff -r 9353^2:9353 # compare against the second parent
1947
1944
1948 Returns 0 on success.
1945 Returns 0 on success.
1949 """
1946 """
1950
1947
1951 opts = pycompat.byteskwargs(opts)
1948 opts = pycompat.byteskwargs(opts)
1952 revs = opts.get('rev')
1949 revs = opts.get('rev')
1953 change = opts.get('change')
1950 change = opts.get('change')
1954 stat = opts.get('stat')
1951 stat = opts.get('stat')
1955 reverse = opts.get('reverse')
1952 reverse = opts.get('reverse')
1956
1953
1957 if revs and change:
1954 if revs and change:
1958 msg = _('cannot specify --rev and --change at the same time')
1955 msg = _('cannot specify --rev and --change at the same time')
1959 raise error.Abort(msg)
1956 raise error.Abort(msg)
1960 elif change:
1957 elif change:
1961 node2 = scmutil.revsingle(repo, change, None).node()
1958 node2 = scmutil.revsingle(repo, change, None).node()
1962 node1 = repo[node2].p1().node()
1959 node1 = repo[node2].p1().node()
1963 else:
1960 else:
1964 node1, node2 = scmutil.revpair(repo, revs)
1961 node1, node2 = scmutil.revpair(repo, revs)
1965
1962
1966 if reverse:
1963 if reverse:
1967 node1, node2 = node2, node1
1964 node1, node2 = node2, node1
1968
1965
1969 diffopts = patch.diffallopts(ui, opts)
1966 diffopts = patch.diffallopts(ui, opts)
1970 m = scmutil.match(repo[node2], pats, opts)
1967 m = scmutil.match(repo[node2], pats, opts)
1971 ui.pager('diff')
1968 ui.pager('diff')
1972 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1969 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1973 listsubrepos=opts.get('subrepos'),
1970 listsubrepos=opts.get('subrepos'),
1974 root=opts.get('root'))
1971 root=opts.get('root'))
1975
1972
1976 @command('^export',
1973 @command('^export',
1977 [('o', 'output', '',
1974 [('o', 'output', '',
1978 _('print output to file with formatted name'), _('FORMAT')),
1975 _('print output to file with formatted name'), _('FORMAT')),
1979 ('', 'switch-parent', None, _('diff against the second parent')),
1976 ('', 'switch-parent', None, _('diff against the second parent')),
1980 ('r', 'rev', [], _('revisions to export'), _('REV')),
1977 ('r', 'rev', [], _('revisions to export'), _('REV')),
1981 ] + diffopts,
1978 ] + diffopts,
1982 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1979 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1983 def export(ui, repo, *changesets, **opts):
1980 def export(ui, repo, *changesets, **opts):
1984 """dump the header and diffs for one or more changesets
1981 """dump the header and diffs for one or more changesets
1985
1982
1986 Print the changeset header and diffs for one or more revisions.
1983 Print the changeset header and diffs for one or more revisions.
1987 If no revision is given, the parent of the working directory is used.
1984 If no revision is given, the parent of the working directory is used.
1988
1985
1989 The information shown in the changeset header is: author, date,
1986 The information shown in the changeset header is: author, date,
1990 branch name (if non-default), changeset hash, parent(s) and commit
1987 branch name (if non-default), changeset hash, parent(s) and commit
1991 comment.
1988 comment.
1992
1989
1993 .. note::
1990 .. note::
1994
1991
1995 :hg:`export` may generate unexpected diff output for merge
1992 :hg:`export` may generate unexpected diff output for merge
1996 changesets, as it will compare the merge changeset against its
1993 changesets, as it will compare the merge changeset against its
1997 first parent only.
1994 first parent only.
1998
1995
1999 Output may be to a file, in which case the name of the file is
1996 Output may be to a file, in which case the name of the file is
2000 given using a format string. The formatting rules are as follows:
1997 given using a format string. The formatting rules are as follows:
2001
1998
2002 :``%%``: literal "%" character
1999 :``%%``: literal "%" character
2003 :``%H``: changeset hash (40 hexadecimal digits)
2000 :``%H``: changeset hash (40 hexadecimal digits)
2004 :``%N``: number of patches being generated
2001 :``%N``: number of patches being generated
2005 :``%R``: changeset revision number
2002 :``%R``: changeset revision number
2006 :``%b``: basename of the exporting repository
2003 :``%b``: basename of the exporting repository
2007 :``%h``: short-form changeset hash (12 hexadecimal digits)
2004 :``%h``: short-form changeset hash (12 hexadecimal digits)
2008 :``%m``: first line of the commit message (only alphanumeric characters)
2005 :``%m``: first line of the commit message (only alphanumeric characters)
2009 :``%n``: zero-padded sequence number, starting at 1
2006 :``%n``: zero-padded sequence number, starting at 1
2010 :``%r``: zero-padded changeset revision number
2007 :``%r``: zero-padded changeset revision number
2011
2008
2012 Without the -a/--text option, export will avoid generating diffs
2009 Without the -a/--text option, export will avoid generating diffs
2013 of files it detects as binary. With -a, export will generate a
2010 of files it detects as binary. With -a, export will generate a
2014 diff anyway, probably with undesirable results.
2011 diff anyway, probably with undesirable results.
2015
2012
2016 Use the -g/--git option to generate diffs in the git extended diff
2013 Use the -g/--git option to generate diffs in the git extended diff
2017 format. See :hg:`help diffs` for more information.
2014 format. See :hg:`help diffs` for more information.
2018
2015
2019 With the --switch-parent option, the diff will be against the
2016 With the --switch-parent option, the diff will be against the
2020 second parent. It can be useful to review a merge.
2017 second parent. It can be useful to review a merge.
2021
2018
2022 .. container:: verbose
2019 .. container:: verbose
2023
2020
2024 Examples:
2021 Examples:
2025
2022
2026 - use export and import to transplant a bugfix to the current
2023 - use export and import to transplant a bugfix to the current
2027 branch::
2024 branch::
2028
2025
2029 hg export -r 9353 | hg import -
2026 hg export -r 9353 | hg import -
2030
2027
2031 - export all the changesets between two revisions to a file with
2028 - export all the changesets between two revisions to a file with
2032 rename information::
2029 rename information::
2033
2030
2034 hg export --git -r 123:150 > changes.txt
2031 hg export --git -r 123:150 > changes.txt
2035
2032
2036 - split outgoing changes into a series of patches with
2033 - split outgoing changes into a series of patches with
2037 descriptive names::
2034 descriptive names::
2038
2035
2039 hg export -r "outgoing()" -o "%n-%m.patch"
2036 hg export -r "outgoing()" -o "%n-%m.patch"
2040
2037
2041 Returns 0 on success.
2038 Returns 0 on success.
2042 """
2039 """
2043 opts = pycompat.byteskwargs(opts)
2040 opts = pycompat.byteskwargs(opts)
2044 changesets += tuple(opts.get('rev', []))
2041 changesets += tuple(opts.get('rev', []))
2045 if not changesets:
2042 if not changesets:
2046 changesets = ['.']
2043 changesets = ['.']
2047 revs = scmutil.revrange(repo, changesets)
2044 revs = scmutil.revrange(repo, changesets)
2048 if not revs:
2045 if not revs:
2049 raise error.Abort(_("export requires at least one changeset"))
2046 raise error.Abort(_("export requires at least one changeset"))
2050 if len(revs) > 1:
2047 if len(revs) > 1:
2051 ui.note(_('exporting patches:\n'))
2048 ui.note(_('exporting patches:\n'))
2052 else:
2049 else:
2053 ui.note(_('exporting patch:\n'))
2050 ui.note(_('exporting patch:\n'))
2054 ui.pager('export')
2051 ui.pager('export')
2055 cmdutil.export(repo, revs, template=opts.get('output'),
2052 cmdutil.export(repo, revs, template=opts.get('output'),
2056 switch_parent=opts.get('switch_parent'),
2053 switch_parent=opts.get('switch_parent'),
2057 opts=patch.diffallopts(ui, opts))
2054 opts=patch.diffallopts(ui, opts))
2058
2055
2059 @command('files',
2056 @command('files',
2060 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
2057 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
2061 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2058 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2062 ] + walkopts + formatteropts + subrepoopts,
2059 ] + walkopts + formatteropts + subrepoopts,
2063 _('[OPTION]... [FILE]...'))
2060 _('[OPTION]... [FILE]...'))
2064 def files(ui, repo, *pats, **opts):
2061 def files(ui, repo, *pats, **opts):
2065 """list tracked files
2062 """list tracked files
2066
2063
2067 Print files under Mercurial control in the working directory or
2064 Print files under Mercurial control in the working directory or
2068 specified revision for given files (excluding removed files).
2065 specified revision for given files (excluding removed files).
2069 Files can be specified as filenames or filesets.
2066 Files can be specified as filenames or filesets.
2070
2067
2071 If no files are given to match, this command prints the names
2068 If no files are given to match, this command prints the names
2072 of all files under Mercurial control.
2069 of all files under Mercurial control.
2073
2070
2074 .. container:: verbose
2071 .. container:: verbose
2075
2072
2076 Examples:
2073 Examples:
2077
2074
2078 - list all files under the current directory::
2075 - list all files under the current directory::
2079
2076
2080 hg files .
2077 hg files .
2081
2078
2082 - shows sizes and flags for current revision::
2079 - shows sizes and flags for current revision::
2083
2080
2084 hg files -vr .
2081 hg files -vr .
2085
2082
2086 - list all files named README::
2083 - list all files named README::
2087
2084
2088 hg files -I "**/README"
2085 hg files -I "**/README"
2089
2086
2090 - list all binary files::
2087 - list all binary files::
2091
2088
2092 hg files "set:binary()"
2089 hg files "set:binary()"
2093
2090
2094 - find files containing a regular expression::
2091 - find files containing a regular expression::
2095
2092
2096 hg files "set:grep('bob')"
2093 hg files "set:grep('bob')"
2097
2094
2098 - search tracked file contents with xargs and grep::
2095 - search tracked file contents with xargs and grep::
2099
2096
2100 hg files -0 | xargs -0 grep foo
2097 hg files -0 | xargs -0 grep foo
2101
2098
2102 See :hg:`help patterns` and :hg:`help filesets` for more information
2099 See :hg:`help patterns` and :hg:`help filesets` for more information
2103 on specifying file patterns.
2100 on specifying file patterns.
2104
2101
2105 Returns 0 if a match is found, 1 otherwise.
2102 Returns 0 if a match is found, 1 otherwise.
2106
2103
2107 """
2104 """
2108
2105
2109 opts = pycompat.byteskwargs(opts)
2106 opts = pycompat.byteskwargs(opts)
2110 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2107 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2111
2108
2112 end = '\n'
2109 end = '\n'
2113 if opts.get('print0'):
2110 if opts.get('print0'):
2114 end = '\0'
2111 end = '\0'
2115 fmt = '%s' + end
2112 fmt = '%s' + end
2116
2113
2117 m = scmutil.match(ctx, pats, opts)
2114 m = scmutil.match(ctx, pats, opts)
2118 ui.pager('files')
2115 ui.pager('files')
2119 with ui.formatter('files', opts) as fm:
2116 with ui.formatter('files', opts) as fm:
2120 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2117 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2121
2118
2122 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2119 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2123 def forget(ui, repo, *pats, **opts):
2120 def forget(ui, repo, *pats, **opts):
2124 """forget the specified files on the next commit
2121 """forget the specified files on the next commit
2125
2122
2126 Mark the specified files so they will no longer be tracked
2123 Mark the specified files so they will no longer be tracked
2127 after the next commit.
2124 after the next commit.
2128
2125
2129 This only removes files from the current branch, not from the
2126 This only removes files from the current branch, not from the
2130 entire project history, and it does not delete them from the
2127 entire project history, and it does not delete them from the
2131 working directory.
2128 working directory.
2132
2129
2133 To delete the file from the working directory, see :hg:`remove`.
2130 To delete the file from the working directory, see :hg:`remove`.
2134
2131
2135 To undo a forget before the next commit, see :hg:`add`.
2132 To undo a forget before the next commit, see :hg:`add`.
2136
2133
2137 .. container:: verbose
2134 .. container:: verbose
2138
2135
2139 Examples:
2136 Examples:
2140
2137
2141 - forget newly-added binary files::
2138 - forget newly-added binary files::
2142
2139
2143 hg forget "set:added() and binary()"
2140 hg forget "set:added() and binary()"
2144
2141
2145 - forget files that would be excluded by .hgignore::
2142 - forget files that would be excluded by .hgignore::
2146
2143
2147 hg forget "set:hgignore()"
2144 hg forget "set:hgignore()"
2148
2145
2149 Returns 0 on success.
2146 Returns 0 on success.
2150 """
2147 """
2151
2148
2152 opts = pycompat.byteskwargs(opts)
2149 opts = pycompat.byteskwargs(opts)
2153 if not pats:
2150 if not pats:
2154 raise error.Abort(_('no files specified'))
2151 raise error.Abort(_('no files specified'))
2155
2152
2156 m = scmutil.match(repo[None], pats, opts)
2153 m = scmutil.match(repo[None], pats, opts)
2157 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2154 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2158 return rejected and 1 or 0
2155 return rejected and 1 or 0
2159
2156
2160 @command(
2157 @command(
2161 'graft',
2158 'graft',
2162 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2159 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2163 ('c', 'continue', False, _('resume interrupted graft')),
2160 ('c', 'continue', False, _('resume interrupted graft')),
2164 ('e', 'edit', False, _('invoke editor on commit messages')),
2161 ('e', 'edit', False, _('invoke editor on commit messages')),
2165 ('', 'log', None, _('append graft info to log message')),
2162 ('', 'log', None, _('append graft info to log message')),
2166 ('f', 'force', False, _('force graft')),
2163 ('f', 'force', False, _('force graft')),
2167 ('D', 'currentdate', False,
2164 ('D', 'currentdate', False,
2168 _('record the current date as commit date')),
2165 _('record the current date as commit date')),
2169 ('U', 'currentuser', False,
2166 ('U', 'currentuser', False,
2170 _('record the current user as committer'), _('DATE'))]
2167 _('record the current user as committer'), _('DATE'))]
2171 + commitopts2 + mergetoolopts + dryrunopts,
2168 + commitopts2 + mergetoolopts + dryrunopts,
2172 _('[OPTION]... [-r REV]... REV...'))
2169 _('[OPTION]... [-r REV]... REV...'))
2173 def graft(ui, repo, *revs, **opts):
2170 def graft(ui, repo, *revs, **opts):
2174 '''copy changes from other branches onto the current branch
2171 '''copy changes from other branches onto the current branch
2175
2172
2176 This command uses Mercurial's merge logic to copy individual
2173 This command uses Mercurial's merge logic to copy individual
2177 changes from other branches without merging branches in the
2174 changes from other branches without merging branches in the
2178 history graph. This is sometimes known as 'backporting' or
2175 history graph. This is sometimes known as 'backporting' or
2179 'cherry-picking'. By default, graft will copy user, date, and
2176 'cherry-picking'. By default, graft will copy user, date, and
2180 description from the source changesets.
2177 description from the source changesets.
2181
2178
2182 Changesets that are ancestors of the current revision, that have
2179 Changesets that are ancestors of the current revision, that have
2183 already been grafted, or that are merges will be skipped.
2180 already been grafted, or that are merges will be skipped.
2184
2181
2185 If --log is specified, log messages will have a comment appended
2182 If --log is specified, log messages will have a comment appended
2186 of the form::
2183 of the form::
2187
2184
2188 (grafted from CHANGESETHASH)
2185 (grafted from CHANGESETHASH)
2189
2186
2190 If --force is specified, revisions will be grafted even if they
2187 If --force is specified, revisions will be grafted even if they
2191 are already ancestors of or have been grafted to the destination.
2188 are already ancestors of or have been grafted to the destination.
2192 This is useful when the revisions have since been backed out.
2189 This is useful when the revisions have since been backed out.
2193
2190
2194 If a graft merge results in conflicts, the graft process is
2191 If a graft merge results in conflicts, the graft process is
2195 interrupted so that the current merge can be manually resolved.
2192 interrupted so that the current merge can be manually resolved.
2196 Once all conflicts are addressed, the graft process can be
2193 Once all conflicts are addressed, the graft process can be
2197 continued with the -c/--continue option.
2194 continued with the -c/--continue option.
2198
2195
2199 .. note::
2196 .. note::
2200
2197
2201 The -c/--continue option does not reapply earlier options, except
2198 The -c/--continue option does not reapply earlier options, except
2202 for --force.
2199 for --force.
2203
2200
2204 .. container:: verbose
2201 .. container:: verbose
2205
2202
2206 Examples:
2203 Examples:
2207
2204
2208 - copy a single change to the stable branch and edit its description::
2205 - copy a single change to the stable branch and edit its description::
2209
2206
2210 hg update stable
2207 hg update stable
2211 hg graft --edit 9393
2208 hg graft --edit 9393
2212
2209
2213 - graft a range of changesets with one exception, updating dates::
2210 - graft a range of changesets with one exception, updating dates::
2214
2211
2215 hg graft -D "2085::2093 and not 2091"
2212 hg graft -D "2085::2093 and not 2091"
2216
2213
2217 - continue a graft after resolving conflicts::
2214 - continue a graft after resolving conflicts::
2218
2215
2219 hg graft -c
2216 hg graft -c
2220
2217
2221 - show the source of a grafted changeset::
2218 - show the source of a grafted changeset::
2222
2219
2223 hg log --debug -r .
2220 hg log --debug -r .
2224
2221
2225 - show revisions sorted by date::
2222 - show revisions sorted by date::
2226
2223
2227 hg log -r "sort(all(), date)"
2224 hg log -r "sort(all(), date)"
2228
2225
2229 See :hg:`help revisions` for more about specifying revisions.
2226 See :hg:`help revisions` for more about specifying revisions.
2230
2227
2231 Returns 0 on successful completion.
2228 Returns 0 on successful completion.
2232 '''
2229 '''
2233 with repo.wlock():
2230 with repo.wlock():
2234 return _dograft(ui, repo, *revs, **opts)
2231 return _dograft(ui, repo, *revs, **opts)
2235
2232
2236 def _dograft(ui, repo, *revs, **opts):
2233 def _dograft(ui, repo, *revs, **opts):
2237 opts = pycompat.byteskwargs(opts)
2234 opts = pycompat.byteskwargs(opts)
2238 if revs and opts.get('rev'):
2235 if revs and opts.get('rev'):
2239 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2236 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2240 'revision ordering!\n'))
2237 'revision ordering!\n'))
2241
2238
2242 revs = list(revs)
2239 revs = list(revs)
2243 revs.extend(opts.get('rev'))
2240 revs.extend(opts.get('rev'))
2244
2241
2245 if not opts.get('user') and opts.get('currentuser'):
2242 if not opts.get('user') and opts.get('currentuser'):
2246 opts['user'] = ui.username()
2243 opts['user'] = ui.username()
2247 if not opts.get('date') and opts.get('currentdate'):
2244 if not opts.get('date') and opts.get('currentdate'):
2248 opts['date'] = "%d %d" % util.makedate()
2245 opts['date'] = "%d %d" % util.makedate()
2249
2246
2250 editor = cmdutil.getcommiteditor(editform='graft', **opts)
2247 editor = cmdutil.getcommiteditor(editform='graft', **opts)
2251
2248
2252 cont = False
2249 cont = False
2253 if opts.get('continue'):
2250 if opts.get('continue'):
2254 cont = True
2251 cont = True
2255 if revs:
2252 if revs:
2256 raise error.Abort(_("can't specify --continue and revisions"))
2253 raise error.Abort(_("can't specify --continue and revisions"))
2257 # read in unfinished revisions
2254 # read in unfinished revisions
2258 try:
2255 try:
2259 nodes = repo.vfs.read('graftstate').splitlines()
2256 nodes = repo.vfs.read('graftstate').splitlines()
2260 revs = [repo[node].rev() for node in nodes]
2257 revs = [repo[node].rev() for node in nodes]
2261 except IOError as inst:
2258 except IOError as inst:
2262 if inst.errno != errno.ENOENT:
2259 if inst.errno != errno.ENOENT:
2263 raise
2260 raise
2264 cmdutil.wrongtooltocontinue(repo, _('graft'))
2261 cmdutil.wrongtooltocontinue(repo, _('graft'))
2265 else:
2262 else:
2266 cmdutil.checkunfinished(repo)
2263 cmdutil.checkunfinished(repo)
2267 cmdutil.bailifchanged(repo)
2264 cmdutil.bailifchanged(repo)
2268 if not revs:
2265 if not revs:
2269 raise error.Abort(_('no revisions specified'))
2266 raise error.Abort(_('no revisions specified'))
2270 revs = scmutil.revrange(repo, revs)
2267 revs = scmutil.revrange(repo, revs)
2271
2268
2272 skipped = set()
2269 skipped = set()
2273 # check for merges
2270 # check for merges
2274 for rev in repo.revs('%ld and merge()', revs):
2271 for rev in repo.revs('%ld and merge()', revs):
2275 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2272 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2276 skipped.add(rev)
2273 skipped.add(rev)
2277 revs = [r for r in revs if r not in skipped]
2274 revs = [r for r in revs if r not in skipped]
2278 if not revs:
2275 if not revs:
2279 return -1
2276 return -1
2280
2277
2281 # Don't check in the --continue case, in effect retaining --force across
2278 # Don't check in the --continue case, in effect retaining --force across
2282 # --continues. That's because without --force, any revisions we decided to
2279 # --continues. That's because without --force, any revisions we decided to
2283 # skip would have been filtered out here, so they wouldn't have made their
2280 # skip would have been filtered out here, so they wouldn't have made their
2284 # way to the graftstate. With --force, any revisions we would have otherwise
2281 # way to the graftstate. With --force, any revisions we would have otherwise
2285 # skipped would not have been filtered out, and if they hadn't been applied
2282 # skipped would not have been filtered out, and if they hadn't been applied
2286 # already, they'd have been in the graftstate.
2283 # already, they'd have been in the graftstate.
2287 if not (cont or opts.get('force')):
2284 if not (cont or opts.get('force')):
2288 # check for ancestors of dest branch
2285 # check for ancestors of dest branch
2289 crev = repo['.'].rev()
2286 crev = repo['.'].rev()
2290 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2287 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2291 # XXX make this lazy in the future
2288 # XXX make this lazy in the future
2292 # don't mutate while iterating, create a copy
2289 # don't mutate while iterating, create a copy
2293 for rev in list(revs):
2290 for rev in list(revs):
2294 if rev in ancestors:
2291 if rev in ancestors:
2295 ui.warn(_('skipping ancestor revision %d:%s\n') %
2292 ui.warn(_('skipping ancestor revision %d:%s\n') %
2296 (rev, repo[rev]))
2293 (rev, repo[rev]))
2297 # XXX remove on list is slow
2294 # XXX remove on list is slow
2298 revs.remove(rev)
2295 revs.remove(rev)
2299 if not revs:
2296 if not revs:
2300 return -1
2297 return -1
2301
2298
2302 # analyze revs for earlier grafts
2299 # analyze revs for earlier grafts
2303 ids = {}
2300 ids = {}
2304 for ctx in repo.set("%ld", revs):
2301 for ctx in repo.set("%ld", revs):
2305 ids[ctx.hex()] = ctx.rev()
2302 ids[ctx.hex()] = ctx.rev()
2306 n = ctx.extra().get('source')
2303 n = ctx.extra().get('source')
2307 if n:
2304 if n:
2308 ids[n] = ctx.rev()
2305 ids[n] = ctx.rev()
2309
2306
2310 # check ancestors for earlier grafts
2307 # check ancestors for earlier grafts
2311 ui.debug('scanning for duplicate grafts\n')
2308 ui.debug('scanning for duplicate grafts\n')
2312
2309
2313 for rev in repo.changelog.findmissingrevs(revs, [crev]):
2310 for rev in repo.changelog.findmissingrevs(revs, [crev]):
2314 ctx = repo[rev]
2311 ctx = repo[rev]
2315 n = ctx.extra().get('source')
2312 n = ctx.extra().get('source')
2316 if n in ids:
2313 if n in ids:
2317 try:
2314 try:
2318 r = repo[n].rev()
2315 r = repo[n].rev()
2319 except error.RepoLookupError:
2316 except error.RepoLookupError:
2320 r = None
2317 r = None
2321 if r in revs:
2318 if r in revs:
2322 ui.warn(_('skipping revision %d:%s '
2319 ui.warn(_('skipping revision %d:%s '
2323 '(already grafted to %d:%s)\n')
2320 '(already grafted to %d:%s)\n')
2324 % (r, repo[r], rev, ctx))
2321 % (r, repo[r], rev, ctx))
2325 revs.remove(r)
2322 revs.remove(r)
2326 elif ids[n] in revs:
2323 elif ids[n] in revs:
2327 if r is None:
2324 if r is None:
2328 ui.warn(_('skipping already grafted revision %d:%s '
2325 ui.warn(_('skipping already grafted revision %d:%s '
2329 '(%d:%s also has unknown origin %s)\n')
2326 '(%d:%s also has unknown origin %s)\n')
2330 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2327 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2331 else:
2328 else:
2332 ui.warn(_('skipping already grafted revision %d:%s '
2329 ui.warn(_('skipping already grafted revision %d:%s '
2333 '(%d:%s also has origin %d:%s)\n')
2330 '(%d:%s also has origin %d:%s)\n')
2334 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2331 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2335 revs.remove(ids[n])
2332 revs.remove(ids[n])
2336 elif ctx.hex() in ids:
2333 elif ctx.hex() in ids:
2337 r = ids[ctx.hex()]
2334 r = ids[ctx.hex()]
2338 ui.warn(_('skipping already grafted revision %d:%s '
2335 ui.warn(_('skipping already grafted revision %d:%s '
2339 '(was grafted from %d:%s)\n') %
2336 '(was grafted from %d:%s)\n') %
2340 (r, repo[r], rev, ctx))
2337 (r, repo[r], rev, ctx))
2341 revs.remove(r)
2338 revs.remove(r)
2342 if not revs:
2339 if not revs:
2343 return -1
2340 return -1
2344
2341
2345 for pos, ctx in enumerate(repo.set("%ld", revs)):
2342 for pos, ctx in enumerate(repo.set("%ld", revs)):
2346 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2343 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2347 ctx.description().split('\n', 1)[0])
2344 ctx.description().split('\n', 1)[0])
2348 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2345 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2349 if names:
2346 if names:
2350 desc += ' (%s)' % ' '.join(names)
2347 desc += ' (%s)' % ' '.join(names)
2351 ui.status(_('grafting %s\n') % desc)
2348 ui.status(_('grafting %s\n') % desc)
2352 if opts.get('dry_run'):
2349 if opts.get('dry_run'):
2353 continue
2350 continue
2354
2351
2355 source = ctx.extra().get('source')
2352 source = ctx.extra().get('source')
2356 extra = {}
2353 extra = {}
2357 if source:
2354 if source:
2358 extra['source'] = source
2355 extra['source'] = source
2359 extra['intermediate-source'] = ctx.hex()
2356 extra['intermediate-source'] = ctx.hex()
2360 else:
2357 else:
2361 extra['source'] = ctx.hex()
2358 extra['source'] = ctx.hex()
2362 user = ctx.user()
2359 user = ctx.user()
2363 if opts.get('user'):
2360 if opts.get('user'):
2364 user = opts['user']
2361 user = opts['user']
2365 date = ctx.date()
2362 date = ctx.date()
2366 if opts.get('date'):
2363 if opts.get('date'):
2367 date = opts['date']
2364 date = opts['date']
2368 message = ctx.description()
2365 message = ctx.description()
2369 if opts.get('log'):
2366 if opts.get('log'):
2370 message += '\n(grafted from %s)' % ctx.hex()
2367 message += '\n(grafted from %s)' % ctx.hex()
2371
2368
2372 # we don't merge the first commit when continuing
2369 # we don't merge the first commit when continuing
2373 if not cont:
2370 if not cont:
2374 # perform the graft merge with p1(rev) as 'ancestor'
2371 # perform the graft merge with p1(rev) as 'ancestor'
2375 try:
2372 try:
2376 # ui.forcemerge is an internal variable, do not document
2373 # ui.forcemerge is an internal variable, do not document
2377 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2374 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2378 'graft')
2375 'graft')
2379 stats = mergemod.graft(repo, ctx, ctx.p1(),
2376 stats = mergemod.graft(repo, ctx, ctx.p1(),
2380 ['local', 'graft'])
2377 ['local', 'graft'])
2381 finally:
2378 finally:
2382 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2379 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2383 # report any conflicts
2380 # report any conflicts
2384 if stats and stats[3] > 0:
2381 if stats and stats[3] > 0:
2385 # write out state for --continue
2382 # write out state for --continue
2386 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2383 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2387 repo.vfs.write('graftstate', ''.join(nodelines))
2384 repo.vfs.write('graftstate', ''.join(nodelines))
2388 extra = ''
2385 extra = ''
2389 if opts.get('user'):
2386 if opts.get('user'):
2390 extra += ' --user %s' % util.shellquote(opts['user'])
2387 extra += ' --user %s' % util.shellquote(opts['user'])
2391 if opts.get('date'):
2388 if opts.get('date'):
2392 extra += ' --date %s' % util.shellquote(opts['date'])
2389 extra += ' --date %s' % util.shellquote(opts['date'])
2393 if opts.get('log'):
2390 if opts.get('log'):
2394 extra += ' --log'
2391 extra += ' --log'
2395 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2392 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2396 raise error.Abort(
2393 raise error.Abort(
2397 _("unresolved conflicts, can't continue"),
2394 _("unresolved conflicts, can't continue"),
2398 hint=hint)
2395 hint=hint)
2399 else:
2396 else:
2400 cont = False
2397 cont = False
2401
2398
2402 # commit
2399 # commit
2403 node = repo.commit(text=message, user=user,
2400 node = repo.commit(text=message, user=user,
2404 date=date, extra=extra, editor=editor)
2401 date=date, extra=extra, editor=editor)
2405 if node is None:
2402 if node is None:
2406 ui.warn(
2403 ui.warn(
2407 _('note: graft of %d:%s created no changes to commit\n') %
2404 _('note: graft of %d:%s created no changes to commit\n') %
2408 (ctx.rev(), ctx))
2405 (ctx.rev(), ctx))
2409
2406
2410 # remove state when we complete successfully
2407 # remove state when we complete successfully
2411 if not opts.get('dry_run'):
2408 if not opts.get('dry_run'):
2412 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2409 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2413
2410
2414 return 0
2411 return 0
2415
2412
2416 @command('grep',
2413 @command('grep',
2417 [('0', 'print0', None, _('end fields with NUL')),
2414 [('0', 'print0', None, _('end fields with NUL')),
2418 ('', 'all', None, _('print all revisions that match')),
2415 ('', 'all', None, _('print all revisions that match')),
2419 ('a', 'text', None, _('treat all files as text')),
2416 ('a', 'text', None, _('treat all files as text')),
2420 ('f', 'follow', None,
2417 ('f', 'follow', None,
2421 _('follow changeset history,'
2418 _('follow changeset history,'
2422 ' or file history across copies and renames')),
2419 ' or file history across copies and renames')),
2423 ('i', 'ignore-case', None, _('ignore case when matching')),
2420 ('i', 'ignore-case', None, _('ignore case when matching')),
2424 ('l', 'files-with-matches', None,
2421 ('l', 'files-with-matches', None,
2425 _('print only filenames and revisions that match')),
2422 _('print only filenames and revisions that match')),
2426 ('n', 'line-number', None, _('print matching line numbers')),
2423 ('n', 'line-number', None, _('print matching line numbers')),
2427 ('r', 'rev', [],
2424 ('r', 'rev', [],
2428 _('only search files changed within revision range'), _('REV')),
2425 _('only search files changed within revision range'), _('REV')),
2429 ('u', 'user', None, _('list the author (long with -v)')),
2426 ('u', 'user', None, _('list the author (long with -v)')),
2430 ('d', 'date', None, _('list the date (short with -q)')),
2427 ('d', 'date', None, _('list the date (short with -q)')),
2431 ] + formatteropts + walkopts,
2428 ] + formatteropts + walkopts,
2432 _('[OPTION]... PATTERN [FILE]...'),
2429 _('[OPTION]... PATTERN [FILE]...'),
2433 inferrepo=True)
2430 inferrepo=True)
2434 def grep(ui, repo, pattern, *pats, **opts):
2431 def grep(ui, repo, pattern, *pats, **opts):
2435 """search revision history for a pattern in specified files
2432 """search revision history for a pattern in specified files
2436
2433
2437 Search revision history for a regular expression in the specified
2434 Search revision history for a regular expression in the specified
2438 files or the entire project.
2435 files or the entire project.
2439
2436
2440 By default, grep prints the most recent revision number for each
2437 By default, grep prints the most recent revision number for each
2441 file in which it finds a match. To get it to print every revision
2438 file in which it finds a match. To get it to print every revision
2442 that contains a change in match status ("-" for a match that becomes
2439 that contains a change in match status ("-" for a match that becomes
2443 a non-match, or "+" for a non-match that becomes a match), use the
2440 a non-match, or "+" for a non-match that becomes a match), use the
2444 --all flag.
2441 --all flag.
2445
2442
2446 PATTERN can be any Python (roughly Perl-compatible) regular
2443 PATTERN can be any Python (roughly Perl-compatible) regular
2447 expression.
2444 expression.
2448
2445
2449 If no FILEs are specified (and -f/--follow isn't set), all files in
2446 If no FILEs are specified (and -f/--follow isn't set), all files in
2450 the repository are searched, including those that don't exist in the
2447 the repository are searched, including those that don't exist in the
2451 current branch or have been deleted in a prior changeset.
2448 current branch or have been deleted in a prior changeset.
2452
2449
2453 Returns 0 if a match is found, 1 otherwise.
2450 Returns 0 if a match is found, 1 otherwise.
2454 """
2451 """
2455 opts = pycompat.byteskwargs(opts)
2452 opts = pycompat.byteskwargs(opts)
2456 reflags = re.M
2453 reflags = re.M
2457 if opts.get('ignore_case'):
2454 if opts.get('ignore_case'):
2458 reflags |= re.I
2455 reflags |= re.I
2459 try:
2456 try:
2460 regexp = util.re.compile(pattern, reflags)
2457 regexp = util.re.compile(pattern, reflags)
2461 except re.error as inst:
2458 except re.error as inst:
2462 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2459 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2463 return 1
2460 return 1
2464 sep, eol = ':', '\n'
2461 sep, eol = ':', '\n'
2465 if opts.get('print0'):
2462 if opts.get('print0'):
2466 sep = eol = '\0'
2463 sep = eol = '\0'
2467
2464
2468 getfile = util.lrucachefunc(repo.file)
2465 getfile = util.lrucachefunc(repo.file)
2469
2466
2470 def matchlines(body):
2467 def matchlines(body):
2471 begin = 0
2468 begin = 0
2472 linenum = 0
2469 linenum = 0
2473 while begin < len(body):
2470 while begin < len(body):
2474 match = regexp.search(body, begin)
2471 match = regexp.search(body, begin)
2475 if not match:
2472 if not match:
2476 break
2473 break
2477 mstart, mend = match.span()
2474 mstart, mend = match.span()
2478 linenum += body.count('\n', begin, mstart) + 1
2475 linenum += body.count('\n', begin, mstart) + 1
2479 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2476 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2480 begin = body.find('\n', mend) + 1 or len(body) + 1
2477 begin = body.find('\n', mend) + 1 or len(body) + 1
2481 lend = begin - 1
2478 lend = begin - 1
2482 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2479 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2483
2480
2484 class linestate(object):
2481 class linestate(object):
2485 def __init__(self, line, linenum, colstart, colend):
2482 def __init__(self, line, linenum, colstart, colend):
2486 self.line = line
2483 self.line = line
2487 self.linenum = linenum
2484 self.linenum = linenum
2488 self.colstart = colstart
2485 self.colstart = colstart
2489 self.colend = colend
2486 self.colend = colend
2490
2487
2491 def __hash__(self):
2488 def __hash__(self):
2492 return hash((self.linenum, self.line))
2489 return hash((self.linenum, self.line))
2493
2490
2494 def __eq__(self, other):
2491 def __eq__(self, other):
2495 return self.line == other.line
2492 return self.line == other.line
2496
2493
2497 def findpos(self):
2494 def findpos(self):
2498 """Iterate all (start, end) indices of matches"""
2495 """Iterate all (start, end) indices of matches"""
2499 yield self.colstart, self.colend
2496 yield self.colstart, self.colend
2500 p = self.colend
2497 p = self.colend
2501 while p < len(self.line):
2498 while p < len(self.line):
2502 m = regexp.search(self.line, p)
2499 m = regexp.search(self.line, p)
2503 if not m:
2500 if not m:
2504 break
2501 break
2505 yield m.span()
2502 yield m.span()
2506 p = m.end()
2503 p = m.end()
2507
2504
2508 matches = {}
2505 matches = {}
2509 copies = {}
2506 copies = {}
2510 def grepbody(fn, rev, body):
2507 def grepbody(fn, rev, body):
2511 matches[rev].setdefault(fn, [])
2508 matches[rev].setdefault(fn, [])
2512 m = matches[rev][fn]
2509 m = matches[rev][fn]
2513 for lnum, cstart, cend, line in matchlines(body):
2510 for lnum, cstart, cend, line in matchlines(body):
2514 s = linestate(line, lnum, cstart, cend)
2511 s = linestate(line, lnum, cstart, cend)
2515 m.append(s)
2512 m.append(s)
2516
2513
2517 def difflinestates(a, b):
2514 def difflinestates(a, b):
2518 sm = difflib.SequenceMatcher(None, a, b)
2515 sm = difflib.SequenceMatcher(None, a, b)
2519 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2516 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2520 if tag == 'insert':
2517 if tag == 'insert':
2521 for i in xrange(blo, bhi):
2518 for i in xrange(blo, bhi):
2522 yield ('+', b[i])
2519 yield ('+', b[i])
2523 elif tag == 'delete':
2520 elif tag == 'delete':
2524 for i in xrange(alo, ahi):
2521 for i in xrange(alo, ahi):
2525 yield ('-', a[i])
2522 yield ('-', a[i])
2526 elif tag == 'replace':
2523 elif tag == 'replace':
2527 for i in xrange(alo, ahi):
2524 for i in xrange(alo, ahi):
2528 yield ('-', a[i])
2525 yield ('-', a[i])
2529 for i in xrange(blo, bhi):
2526 for i in xrange(blo, bhi):
2530 yield ('+', b[i])
2527 yield ('+', b[i])
2531
2528
2532 def display(fm, fn, ctx, pstates, states):
2529 def display(fm, fn, ctx, pstates, states):
2533 rev = ctx.rev()
2530 rev = ctx.rev()
2534 if fm.isplain():
2531 if fm.isplain():
2535 formatuser = ui.shortuser
2532 formatuser = ui.shortuser
2536 else:
2533 else:
2537 formatuser = str
2534 formatuser = str
2538 if ui.quiet:
2535 if ui.quiet:
2539 datefmt = '%Y-%m-%d'
2536 datefmt = '%Y-%m-%d'
2540 else:
2537 else:
2541 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2538 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2542 found = False
2539 found = False
2543 @util.cachefunc
2540 @util.cachefunc
2544 def binary():
2541 def binary():
2545 flog = getfile(fn)
2542 flog = getfile(fn)
2546 return util.binary(flog.read(ctx.filenode(fn)))
2543 return util.binary(flog.read(ctx.filenode(fn)))
2547
2544
2548 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2545 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2549 if opts.get('all'):
2546 if opts.get('all'):
2550 iter = difflinestates(pstates, states)
2547 iter = difflinestates(pstates, states)
2551 else:
2548 else:
2552 iter = [('', l) for l in states]
2549 iter = [('', l) for l in states]
2553 for change, l in iter:
2550 for change, l in iter:
2554 fm.startitem()
2551 fm.startitem()
2555 fm.data(node=fm.hexfunc(ctx.node()))
2552 fm.data(node=fm.hexfunc(ctx.node()))
2556 cols = [
2553 cols = [
2557 ('filename', fn, True),
2554 ('filename', fn, True),
2558 ('rev', rev, True),
2555 ('rev', rev, True),
2559 ('linenumber', l.linenum, opts.get('line_number')),
2556 ('linenumber', l.linenum, opts.get('line_number')),
2560 ]
2557 ]
2561 if opts.get('all'):
2558 if opts.get('all'):
2562 cols.append(('change', change, True))
2559 cols.append(('change', change, True))
2563 cols.extend([
2560 cols.extend([
2564 ('user', formatuser(ctx.user()), opts.get('user')),
2561 ('user', formatuser(ctx.user()), opts.get('user')),
2565 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2562 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2566 ])
2563 ])
2567 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2564 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2568 for name, data, cond in cols:
2565 for name, data, cond in cols:
2569 field = fieldnamemap.get(name, name)
2566 field = fieldnamemap.get(name, name)
2570 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2567 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2571 if cond and name != lastcol:
2568 if cond and name != lastcol:
2572 fm.plain(sep, label='grep.sep')
2569 fm.plain(sep, label='grep.sep')
2573 if not opts.get('files_with_matches'):
2570 if not opts.get('files_with_matches'):
2574 fm.plain(sep, label='grep.sep')
2571 fm.plain(sep, label='grep.sep')
2575 if not opts.get('text') and binary():
2572 if not opts.get('text') and binary():
2576 fm.plain(_(" Binary file matches"))
2573 fm.plain(_(" Binary file matches"))
2577 else:
2574 else:
2578 displaymatches(fm.nested('texts'), l)
2575 displaymatches(fm.nested('texts'), l)
2579 fm.plain(eol)
2576 fm.plain(eol)
2580 found = True
2577 found = True
2581 if opts.get('files_with_matches'):
2578 if opts.get('files_with_matches'):
2582 break
2579 break
2583 return found
2580 return found
2584
2581
2585 def displaymatches(fm, l):
2582 def displaymatches(fm, l):
2586 p = 0
2583 p = 0
2587 for s, e in l.findpos():
2584 for s, e in l.findpos():
2588 if p < s:
2585 if p < s:
2589 fm.startitem()
2586 fm.startitem()
2590 fm.write('text', '%s', l.line[p:s])
2587 fm.write('text', '%s', l.line[p:s])
2591 fm.data(matched=False)
2588 fm.data(matched=False)
2592 fm.startitem()
2589 fm.startitem()
2593 fm.write('text', '%s', l.line[s:e], label='grep.match')
2590 fm.write('text', '%s', l.line[s:e], label='grep.match')
2594 fm.data(matched=True)
2591 fm.data(matched=True)
2595 p = e
2592 p = e
2596 if p < len(l.line):
2593 if p < len(l.line):
2597 fm.startitem()
2594 fm.startitem()
2598 fm.write('text', '%s', l.line[p:])
2595 fm.write('text', '%s', l.line[p:])
2599 fm.data(matched=False)
2596 fm.data(matched=False)
2600 fm.end()
2597 fm.end()
2601
2598
2602 skip = {}
2599 skip = {}
2603 revfiles = {}
2600 revfiles = {}
2604 matchfn = scmutil.match(repo[None], pats, opts)
2601 matchfn = scmutil.match(repo[None], pats, opts)
2605 found = False
2602 found = False
2606 follow = opts.get('follow')
2603 follow = opts.get('follow')
2607
2604
2608 def prep(ctx, fns):
2605 def prep(ctx, fns):
2609 rev = ctx.rev()
2606 rev = ctx.rev()
2610 pctx = ctx.p1()
2607 pctx = ctx.p1()
2611 parent = pctx.rev()
2608 parent = pctx.rev()
2612 matches.setdefault(rev, {})
2609 matches.setdefault(rev, {})
2613 matches.setdefault(parent, {})
2610 matches.setdefault(parent, {})
2614 files = revfiles.setdefault(rev, [])
2611 files = revfiles.setdefault(rev, [])
2615 for fn in fns:
2612 for fn in fns:
2616 flog = getfile(fn)
2613 flog = getfile(fn)
2617 try:
2614 try:
2618 fnode = ctx.filenode(fn)
2615 fnode = ctx.filenode(fn)
2619 except error.LookupError:
2616 except error.LookupError:
2620 continue
2617 continue
2621
2618
2622 copied = flog.renamed(fnode)
2619 copied = flog.renamed(fnode)
2623 copy = follow and copied and copied[0]
2620 copy = follow and copied and copied[0]
2624 if copy:
2621 if copy:
2625 copies.setdefault(rev, {})[fn] = copy
2622 copies.setdefault(rev, {})[fn] = copy
2626 if fn in skip:
2623 if fn in skip:
2627 if copy:
2624 if copy:
2628 skip[copy] = True
2625 skip[copy] = True
2629 continue
2626 continue
2630 files.append(fn)
2627 files.append(fn)
2631
2628
2632 if fn not in matches[rev]:
2629 if fn not in matches[rev]:
2633 grepbody(fn, rev, flog.read(fnode))
2630 grepbody(fn, rev, flog.read(fnode))
2634
2631
2635 pfn = copy or fn
2632 pfn = copy or fn
2636 if pfn not in matches[parent]:
2633 if pfn not in matches[parent]:
2637 try:
2634 try:
2638 fnode = pctx.filenode(pfn)
2635 fnode = pctx.filenode(pfn)
2639 grepbody(pfn, parent, flog.read(fnode))
2636 grepbody(pfn, parent, flog.read(fnode))
2640 except error.LookupError:
2637 except error.LookupError:
2641 pass
2638 pass
2642
2639
2643 ui.pager('grep')
2640 ui.pager('grep')
2644 fm = ui.formatter('grep', opts)
2641 fm = ui.formatter('grep', opts)
2645 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2642 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2646 rev = ctx.rev()
2643 rev = ctx.rev()
2647 parent = ctx.p1().rev()
2644 parent = ctx.p1().rev()
2648 for fn in sorted(revfiles.get(rev, [])):
2645 for fn in sorted(revfiles.get(rev, [])):
2649 states = matches[rev][fn]
2646 states = matches[rev][fn]
2650 copy = copies.get(rev, {}).get(fn)
2647 copy = copies.get(rev, {}).get(fn)
2651 if fn in skip:
2648 if fn in skip:
2652 if copy:
2649 if copy:
2653 skip[copy] = True
2650 skip[copy] = True
2654 continue
2651 continue
2655 pstates = matches.get(parent, {}).get(copy or fn, [])
2652 pstates = matches.get(parent, {}).get(copy or fn, [])
2656 if pstates or states:
2653 if pstates or states:
2657 r = display(fm, fn, ctx, pstates, states)
2654 r = display(fm, fn, ctx, pstates, states)
2658 found = found or r
2655 found = found or r
2659 if r and not opts.get('all'):
2656 if r and not opts.get('all'):
2660 skip[fn] = True
2657 skip[fn] = True
2661 if copy:
2658 if copy:
2662 skip[copy] = True
2659 skip[copy] = True
2663 del matches[rev]
2660 del matches[rev]
2664 del revfiles[rev]
2661 del revfiles[rev]
2665 fm.end()
2662 fm.end()
2666
2663
2667 return not found
2664 return not found
2668
2665
2669 @command('heads',
2666 @command('heads',
2670 [('r', 'rev', '',
2667 [('r', 'rev', '',
2671 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2668 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2672 ('t', 'topo', False, _('show topological heads only')),
2669 ('t', 'topo', False, _('show topological heads only')),
2673 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2670 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2674 ('c', 'closed', False, _('show normal and closed branch heads')),
2671 ('c', 'closed', False, _('show normal and closed branch heads')),
2675 ] + templateopts,
2672 ] + templateopts,
2676 _('[-ct] [-r STARTREV] [REV]...'))
2673 _('[-ct] [-r STARTREV] [REV]...'))
2677 def heads(ui, repo, *branchrevs, **opts):
2674 def heads(ui, repo, *branchrevs, **opts):
2678 """show branch heads
2675 """show branch heads
2679
2676
2680 With no arguments, show all open branch heads in the repository.
2677 With no arguments, show all open branch heads in the repository.
2681 Branch heads are changesets that have no descendants on the
2678 Branch heads are changesets that have no descendants on the
2682 same branch. They are where development generally takes place and
2679 same branch. They are where development generally takes place and
2683 are the usual targets for update and merge operations.
2680 are the usual targets for update and merge operations.
2684
2681
2685 If one or more REVs are given, only open branch heads on the
2682 If one or more REVs are given, only open branch heads on the
2686 branches associated with the specified changesets are shown. This
2683 branches associated with the specified changesets are shown. This
2687 means that you can use :hg:`heads .` to see the heads on the
2684 means that you can use :hg:`heads .` to see the heads on the
2688 currently checked-out branch.
2685 currently checked-out branch.
2689
2686
2690 If -c/--closed is specified, also show branch heads marked closed
2687 If -c/--closed is specified, also show branch heads marked closed
2691 (see :hg:`commit --close-branch`).
2688 (see :hg:`commit --close-branch`).
2692
2689
2693 If STARTREV is specified, only those heads that are descendants of
2690 If STARTREV is specified, only those heads that are descendants of
2694 STARTREV will be displayed.
2691 STARTREV will be displayed.
2695
2692
2696 If -t/--topo is specified, named branch mechanics will be ignored and only
2693 If -t/--topo is specified, named branch mechanics will be ignored and only
2697 topological heads (changesets with no children) will be shown.
2694 topological heads (changesets with no children) will be shown.
2698
2695
2699 Returns 0 if matching heads are found, 1 if not.
2696 Returns 0 if matching heads are found, 1 if not.
2700 """
2697 """
2701
2698
2702 opts = pycompat.byteskwargs(opts)
2699 opts = pycompat.byteskwargs(opts)
2703 start = None
2700 start = None
2704 if 'rev' in opts:
2701 if 'rev' in opts:
2705 start = scmutil.revsingle(repo, opts['rev'], None).node()
2702 start = scmutil.revsingle(repo, opts['rev'], None).node()
2706
2703
2707 if opts.get('topo'):
2704 if opts.get('topo'):
2708 heads = [repo[h] for h in repo.heads(start)]
2705 heads = [repo[h] for h in repo.heads(start)]
2709 else:
2706 else:
2710 heads = []
2707 heads = []
2711 for branch in repo.branchmap():
2708 for branch in repo.branchmap():
2712 heads += repo.branchheads(branch, start, opts.get('closed'))
2709 heads += repo.branchheads(branch, start, opts.get('closed'))
2713 heads = [repo[h] for h in heads]
2710 heads = [repo[h] for h in heads]
2714
2711
2715 if branchrevs:
2712 if branchrevs:
2716 branches = set(repo[br].branch() for br in branchrevs)
2713 branches = set(repo[br].branch() for br in branchrevs)
2717 heads = [h for h in heads if h.branch() in branches]
2714 heads = [h for h in heads if h.branch() in branches]
2718
2715
2719 if opts.get('active') and branchrevs:
2716 if opts.get('active') and branchrevs:
2720 dagheads = repo.heads(start)
2717 dagheads = repo.heads(start)
2721 heads = [h for h in heads if h.node() in dagheads]
2718 heads = [h for h in heads if h.node() in dagheads]
2722
2719
2723 if branchrevs:
2720 if branchrevs:
2724 haveheads = set(h.branch() for h in heads)
2721 haveheads = set(h.branch() for h in heads)
2725 if branches - haveheads:
2722 if branches - haveheads:
2726 headless = ', '.join(b for b in branches - haveheads)
2723 headless = ', '.join(b for b in branches - haveheads)
2727 msg = _('no open branch heads found on branches %s')
2724 msg = _('no open branch heads found on branches %s')
2728 if opts.get('rev'):
2725 if opts.get('rev'):
2729 msg += _(' (started at %s)') % opts['rev']
2726 msg += _(' (started at %s)') % opts['rev']
2730 ui.warn((msg + '\n') % headless)
2727 ui.warn((msg + '\n') % headless)
2731
2728
2732 if not heads:
2729 if not heads:
2733 return 1
2730 return 1
2734
2731
2735 ui.pager('heads')
2732 ui.pager('heads')
2736 heads = sorted(heads, key=lambda x: -x.rev())
2733 heads = sorted(heads, key=lambda x: -x.rev())
2737 displayer = cmdutil.show_changeset(ui, repo, opts)
2734 displayer = cmdutil.show_changeset(ui, repo, opts)
2738 for ctx in heads:
2735 for ctx in heads:
2739 displayer.show(ctx)
2736 displayer.show(ctx)
2740 displayer.close()
2737 displayer.close()
2741
2738
2742 @command('help',
2739 @command('help',
2743 [('e', 'extension', None, _('show only help for extensions')),
2740 [('e', 'extension', None, _('show only help for extensions')),
2744 ('c', 'command', None, _('show only help for commands')),
2741 ('c', 'command', None, _('show only help for commands')),
2745 ('k', 'keyword', None, _('show topics matching keyword')),
2742 ('k', 'keyword', None, _('show topics matching keyword')),
2746 ('s', 'system', [], _('show help for specific platform(s)')),
2743 ('s', 'system', [], _('show help for specific platform(s)')),
2747 ],
2744 ],
2748 _('[-ecks] [TOPIC]'),
2745 _('[-ecks] [TOPIC]'),
2749 norepo=True)
2746 norepo=True)
2750 def help_(ui, name=None, **opts):
2747 def help_(ui, name=None, **opts):
2751 """show help for a given topic or a help overview
2748 """show help for a given topic or a help overview
2752
2749
2753 With no arguments, print a list of commands with short help messages.
2750 With no arguments, print a list of commands with short help messages.
2754
2751
2755 Given a topic, extension, or command name, print help for that
2752 Given a topic, extension, or command name, print help for that
2756 topic.
2753 topic.
2757
2754
2758 Returns 0 if successful.
2755 Returns 0 if successful.
2759 """
2756 """
2760
2757
2761 keep = opts.get(r'system') or []
2758 keep = opts.get(r'system') or []
2762 if len(keep) == 0:
2759 if len(keep) == 0:
2763 if pycompat.sysplatform.startswith('win'):
2760 if pycompat.sysplatform.startswith('win'):
2764 keep.append('windows')
2761 keep.append('windows')
2765 elif pycompat.sysplatform == 'OpenVMS':
2762 elif pycompat.sysplatform == 'OpenVMS':
2766 keep.append('vms')
2763 keep.append('vms')
2767 elif pycompat.sysplatform == 'plan9':
2764 elif pycompat.sysplatform == 'plan9':
2768 keep.append('plan9')
2765 keep.append('plan9')
2769 else:
2766 else:
2770 keep.append('unix')
2767 keep.append('unix')
2771 keep.append(pycompat.sysplatform.lower())
2768 keep.append(pycompat.sysplatform.lower())
2772 if ui.verbose:
2769 if ui.verbose:
2773 keep.append('verbose')
2770 keep.append('verbose')
2774
2771
2775 formatted = help.formattedhelp(ui, name, keep=keep, **opts)
2772 formatted = help.formattedhelp(ui, name, keep=keep, **opts)
2776 ui.pager('help')
2773 ui.pager('help')
2777 ui.write(formatted)
2774 ui.write(formatted)
2778
2775
2779
2776
2780 @command('identify|id',
2777 @command('identify|id',
2781 [('r', 'rev', '',
2778 [('r', 'rev', '',
2782 _('identify the specified revision'), _('REV')),
2779 _('identify the specified revision'), _('REV')),
2783 ('n', 'num', None, _('show local revision number')),
2780 ('n', 'num', None, _('show local revision number')),
2784 ('i', 'id', None, _('show global revision id')),
2781 ('i', 'id', None, _('show global revision id')),
2785 ('b', 'branch', None, _('show branch')),
2782 ('b', 'branch', None, _('show branch')),
2786 ('t', 'tags', None, _('show tags')),
2783 ('t', 'tags', None, _('show tags')),
2787 ('B', 'bookmarks', None, _('show bookmarks')),
2784 ('B', 'bookmarks', None, _('show bookmarks')),
2788 ] + remoteopts,
2785 ] + remoteopts,
2789 _('[-nibtB] [-r REV] [SOURCE]'),
2786 _('[-nibtB] [-r REV] [SOURCE]'),
2790 optionalrepo=True)
2787 optionalrepo=True)
2791 def identify(ui, repo, source=None, rev=None,
2788 def identify(ui, repo, source=None, rev=None,
2792 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2789 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2793 """identify the working directory or specified revision
2790 """identify the working directory or specified revision
2794
2791
2795 Print a summary identifying the repository state at REV using one or
2792 Print a summary identifying the repository state at REV using one or
2796 two parent hash identifiers, followed by a "+" if the working
2793 two parent hash identifiers, followed by a "+" if the working
2797 directory has uncommitted changes, the branch name (if not default),
2794 directory has uncommitted changes, the branch name (if not default),
2798 a list of tags, and a list of bookmarks.
2795 a list of tags, and a list of bookmarks.
2799
2796
2800 When REV is not given, print a summary of the current state of the
2797 When REV is not given, print a summary of the current state of the
2801 repository.
2798 repository.
2802
2799
2803 Specifying a path to a repository root or Mercurial bundle will
2800 Specifying a path to a repository root or Mercurial bundle will
2804 cause lookup to operate on that repository/bundle.
2801 cause lookup to operate on that repository/bundle.
2805
2802
2806 .. container:: verbose
2803 .. container:: verbose
2807
2804
2808 Examples:
2805 Examples:
2809
2806
2810 - generate a build identifier for the working directory::
2807 - generate a build identifier for the working directory::
2811
2808
2812 hg id --id > build-id.dat
2809 hg id --id > build-id.dat
2813
2810
2814 - find the revision corresponding to a tag::
2811 - find the revision corresponding to a tag::
2815
2812
2816 hg id -n -r 1.3
2813 hg id -n -r 1.3
2817
2814
2818 - check the most recent revision of a remote repository::
2815 - check the most recent revision of a remote repository::
2819
2816
2820 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2817 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2821
2818
2822 See :hg:`log` for generating more information about specific revisions,
2819 See :hg:`log` for generating more information about specific revisions,
2823 including full hash identifiers.
2820 including full hash identifiers.
2824
2821
2825 Returns 0 if successful.
2822 Returns 0 if successful.
2826 """
2823 """
2827
2824
2828 opts = pycompat.byteskwargs(opts)
2825 opts = pycompat.byteskwargs(opts)
2829 if not repo and not source:
2826 if not repo and not source:
2830 raise error.Abort(_("there is no Mercurial repository here "
2827 raise error.Abort(_("there is no Mercurial repository here "
2831 "(.hg not found)"))
2828 "(.hg not found)"))
2832
2829
2833 if ui.debugflag:
2830 if ui.debugflag:
2834 hexfunc = hex
2831 hexfunc = hex
2835 else:
2832 else:
2836 hexfunc = short
2833 hexfunc = short
2837 default = not (num or id or branch or tags or bookmarks)
2834 default = not (num or id or branch or tags or bookmarks)
2838 output = []
2835 output = []
2839 revs = []
2836 revs = []
2840
2837
2841 if source:
2838 if source:
2842 source, branches = hg.parseurl(ui.expandpath(source))
2839 source, branches = hg.parseurl(ui.expandpath(source))
2843 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2840 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2844 repo = peer.local()
2841 repo = peer.local()
2845 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2842 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2846
2843
2847 if not repo:
2844 if not repo:
2848 if num or branch or tags:
2845 if num or branch or tags:
2849 raise error.Abort(
2846 raise error.Abort(
2850 _("can't query remote revision number, branch, or tags"))
2847 _("can't query remote revision number, branch, or tags"))
2851 if not rev and revs:
2848 if not rev and revs:
2852 rev = revs[0]
2849 rev = revs[0]
2853 if not rev:
2850 if not rev:
2854 rev = "tip"
2851 rev = "tip"
2855
2852
2856 remoterev = peer.lookup(rev)
2853 remoterev = peer.lookup(rev)
2857 if default or id:
2854 if default or id:
2858 output = [hexfunc(remoterev)]
2855 output = [hexfunc(remoterev)]
2859
2856
2860 def getbms():
2857 def getbms():
2861 bms = []
2858 bms = []
2862
2859
2863 if 'bookmarks' in peer.listkeys('namespaces'):
2860 if 'bookmarks' in peer.listkeys('namespaces'):
2864 hexremoterev = hex(remoterev)
2861 hexremoterev = hex(remoterev)
2865 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2862 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2866 if bmr == hexremoterev]
2863 if bmr == hexremoterev]
2867
2864
2868 return sorted(bms)
2865 return sorted(bms)
2869
2866
2870 if bookmarks:
2867 if bookmarks:
2871 output.extend(getbms())
2868 output.extend(getbms())
2872 elif default and not ui.quiet:
2869 elif default and not ui.quiet:
2873 # multiple bookmarks for a single parent separated by '/'
2870 # multiple bookmarks for a single parent separated by '/'
2874 bm = '/'.join(getbms())
2871 bm = '/'.join(getbms())
2875 if bm:
2872 if bm:
2876 output.append(bm)
2873 output.append(bm)
2877 else:
2874 else:
2878 ctx = scmutil.revsingle(repo, rev, None)
2875 ctx = scmutil.revsingle(repo, rev, None)
2879
2876
2880 if ctx.rev() is None:
2877 if ctx.rev() is None:
2881 ctx = repo[None]
2878 ctx = repo[None]
2882 parents = ctx.parents()
2879 parents = ctx.parents()
2883 taglist = []
2880 taglist = []
2884 for p in parents:
2881 for p in parents:
2885 taglist.extend(p.tags())
2882 taglist.extend(p.tags())
2886
2883
2887 changed = ""
2884 changed = ""
2888 if default or id or num:
2885 if default or id or num:
2889 if (any(repo.status())
2886 if (any(repo.status())
2890 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2887 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2891 changed = '+'
2888 changed = '+'
2892 if default or id:
2889 if default or id:
2893 output = ["%s%s" %
2890 output = ["%s%s" %
2894 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2891 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2895 if num:
2892 if num:
2896 output.append("%s%s" %
2893 output.append("%s%s" %
2897 ('+'.join([str(p.rev()) for p in parents]), changed))
2894 ('+'.join([str(p.rev()) for p in parents]), changed))
2898 else:
2895 else:
2899 if default or id:
2896 if default or id:
2900 output = [hexfunc(ctx.node())]
2897 output = [hexfunc(ctx.node())]
2901 if num:
2898 if num:
2902 output.append(str(ctx.rev()))
2899 output.append(str(ctx.rev()))
2903 taglist = ctx.tags()
2900 taglist = ctx.tags()
2904
2901
2905 if default and not ui.quiet:
2902 if default and not ui.quiet:
2906 b = ctx.branch()
2903 b = ctx.branch()
2907 if b != 'default':
2904 if b != 'default':
2908 output.append("(%s)" % b)
2905 output.append("(%s)" % b)
2909
2906
2910 # multiple tags for a single parent separated by '/'
2907 # multiple tags for a single parent separated by '/'
2911 t = '/'.join(taglist)
2908 t = '/'.join(taglist)
2912 if t:
2909 if t:
2913 output.append(t)
2910 output.append(t)
2914
2911
2915 # multiple bookmarks for a single parent separated by '/'
2912 # multiple bookmarks for a single parent separated by '/'
2916 bm = '/'.join(ctx.bookmarks())
2913 bm = '/'.join(ctx.bookmarks())
2917 if bm:
2914 if bm:
2918 output.append(bm)
2915 output.append(bm)
2919 else:
2916 else:
2920 if branch:
2917 if branch:
2921 output.append(ctx.branch())
2918 output.append(ctx.branch())
2922
2919
2923 if tags:
2920 if tags:
2924 output.extend(taglist)
2921 output.extend(taglist)
2925
2922
2926 if bookmarks:
2923 if bookmarks:
2927 output.extend(ctx.bookmarks())
2924 output.extend(ctx.bookmarks())
2928
2925
2929 ui.write("%s\n" % ' '.join(output))
2926 ui.write("%s\n" % ' '.join(output))
2930
2927
2931 @command('import|patch',
2928 @command('import|patch',
2932 [('p', 'strip', 1,
2929 [('p', 'strip', 1,
2933 _('directory strip option for patch. This has the same '
2930 _('directory strip option for patch. This has the same '
2934 'meaning as the corresponding patch option'), _('NUM')),
2931 'meaning as the corresponding patch option'), _('NUM')),
2935 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2932 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2936 ('e', 'edit', False, _('invoke editor on commit messages')),
2933 ('e', 'edit', False, _('invoke editor on commit messages')),
2937 ('f', 'force', None,
2934 ('f', 'force', None,
2938 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2935 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2939 ('', 'no-commit', None,
2936 ('', 'no-commit', None,
2940 _("don't commit, just update the working directory")),
2937 _("don't commit, just update the working directory")),
2941 ('', 'bypass', None,
2938 ('', 'bypass', None,
2942 _("apply patch without touching the working directory")),
2939 _("apply patch without touching the working directory")),
2943 ('', 'partial', None,
2940 ('', 'partial', None,
2944 _('commit even if some hunks fail')),
2941 _('commit even if some hunks fail')),
2945 ('', 'exact', None,
2942 ('', 'exact', None,
2946 _('abort if patch would apply lossily')),
2943 _('abort if patch would apply lossily')),
2947 ('', 'prefix', '',
2944 ('', 'prefix', '',
2948 _('apply patch to subdirectory'), _('DIR')),
2945 _('apply patch to subdirectory'), _('DIR')),
2949 ('', 'import-branch', None,
2946 ('', 'import-branch', None,
2950 _('use any branch information in patch (implied by --exact)'))] +
2947 _('use any branch information in patch (implied by --exact)'))] +
2951 commitopts + commitopts2 + similarityopts,
2948 commitopts + commitopts2 + similarityopts,
2952 _('[OPTION]... PATCH...'))
2949 _('[OPTION]... PATCH...'))
2953 def import_(ui, repo, patch1=None, *patches, **opts):
2950 def import_(ui, repo, patch1=None, *patches, **opts):
2954 """import an ordered set of patches
2951 """import an ordered set of patches
2955
2952
2956 Import a list of patches and commit them individually (unless
2953 Import a list of patches and commit them individually (unless
2957 --no-commit is specified).
2954 --no-commit is specified).
2958
2955
2959 To read a patch from standard input (stdin), use "-" as the patch
2956 To read a patch from standard input (stdin), use "-" as the patch
2960 name. If a URL is specified, the patch will be downloaded from
2957 name. If a URL is specified, the patch will be downloaded from
2961 there.
2958 there.
2962
2959
2963 Import first applies changes to the working directory (unless
2960 Import first applies changes to the working directory (unless
2964 --bypass is specified), import will abort if there are outstanding
2961 --bypass is specified), import will abort if there are outstanding
2965 changes.
2962 changes.
2966
2963
2967 Use --bypass to apply and commit patches directly to the
2964 Use --bypass to apply and commit patches directly to the
2968 repository, without affecting the working directory. Without
2965 repository, without affecting the working directory. Without
2969 --exact, patches will be applied on top of the working directory
2966 --exact, patches will be applied on top of the working directory
2970 parent revision.
2967 parent revision.
2971
2968
2972 You can import a patch straight from a mail message. Even patches
2969 You can import a patch straight from a mail message. Even patches
2973 as attachments work (to use the body part, it must have type
2970 as attachments work (to use the body part, it must have type
2974 text/plain or text/x-patch). From and Subject headers of email
2971 text/plain or text/x-patch). From and Subject headers of email
2975 message are used as default committer and commit message. All
2972 message are used as default committer and commit message. All
2976 text/plain body parts before first diff are added to the commit
2973 text/plain body parts before first diff are added to the commit
2977 message.
2974 message.
2978
2975
2979 If the imported patch was generated by :hg:`export`, user and
2976 If the imported patch was generated by :hg:`export`, user and
2980 description from patch override values from message headers and
2977 description from patch override values from message headers and
2981 body. Values given on command line with -m/--message and -u/--user
2978 body. Values given on command line with -m/--message and -u/--user
2982 override these.
2979 override these.
2983
2980
2984 If --exact is specified, import will set the working directory to
2981 If --exact is specified, import will set the working directory to
2985 the parent of each patch before applying it, and will abort if the
2982 the parent of each patch before applying it, and will abort if the
2986 resulting changeset has a different ID than the one recorded in
2983 resulting changeset has a different ID than the one recorded in
2987 the patch. This will guard against various ways that portable
2984 the patch. This will guard against various ways that portable
2988 patch formats and mail systems might fail to transfer Mercurial
2985 patch formats and mail systems might fail to transfer Mercurial
2989 data or metadata. See :hg:`bundle` for lossless transmission.
2986 data or metadata. See :hg:`bundle` for lossless transmission.
2990
2987
2991 Use --partial to ensure a changeset will be created from the patch
2988 Use --partial to ensure a changeset will be created from the patch
2992 even if some hunks fail to apply. Hunks that fail to apply will be
2989 even if some hunks fail to apply. Hunks that fail to apply will be
2993 written to a <target-file>.rej file. Conflicts can then be resolved
2990 written to a <target-file>.rej file. Conflicts can then be resolved
2994 by hand before :hg:`commit --amend` is run to update the created
2991 by hand before :hg:`commit --amend` is run to update the created
2995 changeset. This flag exists to let people import patches that
2992 changeset. This flag exists to let people import patches that
2996 partially apply without losing the associated metadata (author,
2993 partially apply without losing the associated metadata (author,
2997 date, description, ...).
2994 date, description, ...).
2998
2995
2999 .. note::
2996 .. note::
3000
2997
3001 When no hunks apply cleanly, :hg:`import --partial` will create
2998 When no hunks apply cleanly, :hg:`import --partial` will create
3002 an empty changeset, importing only the patch metadata.
2999 an empty changeset, importing only the patch metadata.
3003
3000
3004 With -s/--similarity, hg will attempt to discover renames and
3001 With -s/--similarity, hg will attempt to discover renames and
3005 copies in the patch in the same way as :hg:`addremove`.
3002 copies in the patch in the same way as :hg:`addremove`.
3006
3003
3007 It is possible to use external patch programs to perform the patch
3004 It is possible to use external patch programs to perform the patch
3008 by setting the ``ui.patch`` configuration option. For the default
3005 by setting the ``ui.patch`` configuration option. For the default
3009 internal tool, the fuzz can also be configured via ``patch.fuzz``.
3006 internal tool, the fuzz can also be configured via ``patch.fuzz``.
3010 See :hg:`help config` for more information about configuration
3007 See :hg:`help config` for more information about configuration
3011 files and how to use these options.
3008 files and how to use these options.
3012
3009
3013 See :hg:`help dates` for a list of formats valid for -d/--date.
3010 See :hg:`help dates` for a list of formats valid for -d/--date.
3014
3011
3015 .. container:: verbose
3012 .. container:: verbose
3016
3013
3017 Examples:
3014 Examples:
3018
3015
3019 - import a traditional patch from a website and detect renames::
3016 - import a traditional patch from a website and detect renames::
3020
3017
3021 hg import -s 80 http://example.com/bugfix.patch
3018 hg import -s 80 http://example.com/bugfix.patch
3022
3019
3023 - import a changeset from an hgweb server::
3020 - import a changeset from an hgweb server::
3024
3021
3025 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
3022 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
3026
3023
3027 - import all the patches in an Unix-style mbox::
3024 - import all the patches in an Unix-style mbox::
3028
3025
3029 hg import incoming-patches.mbox
3026 hg import incoming-patches.mbox
3030
3027
3031 - import patches from stdin::
3028 - import patches from stdin::
3032
3029
3033 hg import -
3030 hg import -
3034
3031
3035 - attempt to exactly restore an exported changeset (not always
3032 - attempt to exactly restore an exported changeset (not always
3036 possible)::
3033 possible)::
3037
3034
3038 hg import --exact proposed-fix.patch
3035 hg import --exact proposed-fix.patch
3039
3036
3040 - use an external tool to apply a patch which is too fuzzy for
3037 - use an external tool to apply a patch which is too fuzzy for
3041 the default internal tool.
3038 the default internal tool.
3042
3039
3043 hg import --config ui.patch="patch --merge" fuzzy.patch
3040 hg import --config ui.patch="patch --merge" fuzzy.patch
3044
3041
3045 - change the default fuzzing from 2 to a less strict 7
3042 - change the default fuzzing from 2 to a less strict 7
3046
3043
3047 hg import --config ui.fuzz=7 fuzz.patch
3044 hg import --config ui.fuzz=7 fuzz.patch
3048
3045
3049 Returns 0 on success, 1 on partial success (see --partial).
3046 Returns 0 on success, 1 on partial success (see --partial).
3050 """
3047 """
3051
3048
3052 opts = pycompat.byteskwargs(opts)
3049 opts = pycompat.byteskwargs(opts)
3053 if not patch1:
3050 if not patch1:
3054 raise error.Abort(_('need at least one patch to import'))
3051 raise error.Abort(_('need at least one patch to import'))
3055
3052
3056 patches = (patch1,) + patches
3053 patches = (patch1,) + patches
3057
3054
3058 date = opts.get('date')
3055 date = opts.get('date')
3059 if date:
3056 if date:
3060 opts['date'] = util.parsedate(date)
3057 opts['date'] = util.parsedate(date)
3061
3058
3062 exact = opts.get('exact')
3059 exact = opts.get('exact')
3063 update = not opts.get('bypass')
3060 update = not opts.get('bypass')
3064 if not update and opts.get('no_commit'):
3061 if not update and opts.get('no_commit'):
3065 raise error.Abort(_('cannot use --no-commit with --bypass'))
3062 raise error.Abort(_('cannot use --no-commit with --bypass'))
3066 try:
3063 try:
3067 sim = float(opts.get('similarity') or 0)
3064 sim = float(opts.get('similarity') or 0)
3068 except ValueError:
3065 except ValueError:
3069 raise error.Abort(_('similarity must be a number'))
3066 raise error.Abort(_('similarity must be a number'))
3070 if sim < 0 or sim > 100:
3067 if sim < 0 or sim > 100:
3071 raise error.Abort(_('similarity must be between 0 and 100'))
3068 raise error.Abort(_('similarity must be between 0 and 100'))
3072 if sim and not update:
3069 if sim and not update:
3073 raise error.Abort(_('cannot use --similarity with --bypass'))
3070 raise error.Abort(_('cannot use --similarity with --bypass'))
3074 if exact:
3071 if exact:
3075 if opts.get('edit'):
3072 if opts.get('edit'):
3076 raise error.Abort(_('cannot use --exact with --edit'))
3073 raise error.Abort(_('cannot use --exact with --edit'))
3077 if opts.get('prefix'):
3074 if opts.get('prefix'):
3078 raise error.Abort(_('cannot use --exact with --prefix'))
3075 raise error.Abort(_('cannot use --exact with --prefix'))
3079
3076
3080 base = opts["base"]
3077 base = opts["base"]
3081 wlock = dsguard = lock = tr = None
3078 wlock = dsguard = lock = tr = None
3082 msgs = []
3079 msgs = []
3083 ret = 0
3080 ret = 0
3084
3081
3085
3082
3086 try:
3083 try:
3087 wlock = repo.wlock()
3084 wlock = repo.wlock()
3088
3085
3089 if update:
3086 if update:
3090 cmdutil.checkunfinished(repo)
3087 cmdutil.checkunfinished(repo)
3091 if (exact or not opts.get('force')):
3088 if (exact or not opts.get('force')):
3092 cmdutil.bailifchanged(repo)
3089 cmdutil.bailifchanged(repo)
3093
3090
3094 if not opts.get('no_commit'):
3091 if not opts.get('no_commit'):
3095 lock = repo.lock()
3092 lock = repo.lock()
3096 tr = repo.transaction('import')
3093 tr = repo.transaction('import')
3097 else:
3094 else:
3098 dsguard = dirstateguard.dirstateguard(repo, 'import')
3095 dsguard = dirstateguard.dirstateguard(repo, 'import')
3099 parents = repo[None].parents()
3096 parents = repo[None].parents()
3100 for patchurl in patches:
3097 for patchurl in patches:
3101 if patchurl == '-':
3098 if patchurl == '-':
3102 ui.status(_('applying patch from stdin\n'))
3099 ui.status(_('applying patch from stdin\n'))
3103 patchfile = ui.fin
3100 patchfile = ui.fin
3104 patchurl = 'stdin' # for error message
3101 patchurl = 'stdin' # for error message
3105 else:
3102 else:
3106 patchurl = os.path.join(base, patchurl)
3103 patchurl = os.path.join(base, patchurl)
3107 ui.status(_('applying %s\n') % patchurl)
3104 ui.status(_('applying %s\n') % patchurl)
3108 patchfile = hg.openpath(ui, patchurl)
3105 patchfile = hg.openpath(ui, patchurl)
3109
3106
3110 haspatch = False
3107 haspatch = False
3111 for hunk in patch.split(patchfile):
3108 for hunk in patch.split(patchfile):
3112 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3109 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3113 parents, opts,
3110 parents, opts,
3114 msgs, hg.clean)
3111 msgs, hg.clean)
3115 if msg:
3112 if msg:
3116 haspatch = True
3113 haspatch = True
3117 ui.note(msg + '\n')
3114 ui.note(msg + '\n')
3118 if update or exact:
3115 if update or exact:
3119 parents = repo[None].parents()
3116 parents = repo[None].parents()
3120 else:
3117 else:
3121 parents = [repo[node]]
3118 parents = [repo[node]]
3122 if rej:
3119 if rej:
3123 ui.write_err(_("patch applied partially\n"))
3120 ui.write_err(_("patch applied partially\n"))
3124 ui.write_err(_("(fix the .rej files and run "
3121 ui.write_err(_("(fix the .rej files and run "
3125 "`hg commit --amend`)\n"))
3122 "`hg commit --amend`)\n"))
3126 ret = 1
3123 ret = 1
3127 break
3124 break
3128
3125
3129 if not haspatch:
3126 if not haspatch:
3130 raise error.Abort(_('%s: no diffs found') % patchurl)
3127 raise error.Abort(_('%s: no diffs found') % patchurl)
3131
3128
3132 if tr:
3129 if tr:
3133 tr.close()
3130 tr.close()
3134 if msgs:
3131 if msgs:
3135 repo.savecommitmessage('\n* * *\n'.join(msgs))
3132 repo.savecommitmessage('\n* * *\n'.join(msgs))
3136 if dsguard:
3133 if dsguard:
3137 dsguard.close()
3134 dsguard.close()
3138 return ret
3135 return ret
3139 finally:
3136 finally:
3140 if tr:
3137 if tr:
3141 tr.release()
3138 tr.release()
3142 release(lock, dsguard, wlock)
3139 release(lock, dsguard, wlock)
3143
3140
3144 @command('incoming|in',
3141 @command('incoming|in',
3145 [('f', 'force', None,
3142 [('f', 'force', None,
3146 _('run even if remote repository is unrelated')),
3143 _('run even if remote repository is unrelated')),
3147 ('n', 'newest-first', None, _('show newest record first')),
3144 ('n', 'newest-first', None, _('show newest record first')),
3148 ('', 'bundle', '',
3145 ('', 'bundle', '',
3149 _('file to store the bundles into'), _('FILE')),
3146 _('file to store the bundles into'), _('FILE')),
3150 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3147 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3151 ('B', 'bookmarks', False, _("compare bookmarks")),
3148 ('B', 'bookmarks', False, _("compare bookmarks")),
3152 ('b', 'branch', [],
3149 ('b', 'branch', [],
3153 _('a specific branch you would like to pull'), _('BRANCH')),
3150 _('a specific branch you would like to pull'), _('BRANCH')),
3154 ] + logopts + remoteopts + subrepoopts,
3151 ] + logopts + remoteopts + subrepoopts,
3155 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3152 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3156 def incoming(ui, repo, source="default", **opts):
3153 def incoming(ui, repo, source="default", **opts):
3157 """show new changesets found in source
3154 """show new changesets found in source
3158
3155
3159 Show new changesets found in the specified path/URL or the default
3156 Show new changesets found in the specified path/URL or the default
3160 pull location. These are the changesets that would have been pulled
3157 pull location. These are the changesets that would have been pulled
3161 if a pull at the time you issued this command.
3158 if a pull at the time you issued this command.
3162
3159
3163 See pull for valid source format details.
3160 See pull for valid source format details.
3164
3161
3165 .. container:: verbose
3162 .. container:: verbose
3166
3163
3167 With -B/--bookmarks, the result of bookmark comparison between
3164 With -B/--bookmarks, the result of bookmark comparison between
3168 local and remote repositories is displayed. With -v/--verbose,
3165 local and remote repositories is displayed. With -v/--verbose,
3169 status is also displayed for each bookmark like below::
3166 status is also displayed for each bookmark like below::
3170
3167
3171 BM1 01234567890a added
3168 BM1 01234567890a added
3172 BM2 1234567890ab advanced
3169 BM2 1234567890ab advanced
3173 BM3 234567890abc diverged
3170 BM3 234567890abc diverged
3174 BM4 34567890abcd changed
3171 BM4 34567890abcd changed
3175
3172
3176 The action taken locally when pulling depends on the
3173 The action taken locally when pulling depends on the
3177 status of each bookmark:
3174 status of each bookmark:
3178
3175
3179 :``added``: pull will create it
3176 :``added``: pull will create it
3180 :``advanced``: pull will update it
3177 :``advanced``: pull will update it
3181 :``diverged``: pull will create a divergent bookmark
3178 :``diverged``: pull will create a divergent bookmark
3182 :``changed``: result depends on remote changesets
3179 :``changed``: result depends on remote changesets
3183
3180
3184 From the point of view of pulling behavior, bookmark
3181 From the point of view of pulling behavior, bookmark
3185 existing only in the remote repository are treated as ``added``,
3182 existing only in the remote repository are treated as ``added``,
3186 even if it is in fact locally deleted.
3183 even if it is in fact locally deleted.
3187
3184
3188 .. container:: verbose
3185 .. container:: verbose
3189
3186
3190 For remote repository, using --bundle avoids downloading the
3187 For remote repository, using --bundle avoids downloading the
3191 changesets twice if the incoming is followed by a pull.
3188 changesets twice if the incoming is followed by a pull.
3192
3189
3193 Examples:
3190 Examples:
3194
3191
3195 - show incoming changes with patches and full description::
3192 - show incoming changes with patches and full description::
3196
3193
3197 hg incoming -vp
3194 hg incoming -vp
3198
3195
3199 - show incoming changes excluding merges, store a bundle::
3196 - show incoming changes excluding merges, store a bundle::
3200
3197
3201 hg in -vpM --bundle incoming.hg
3198 hg in -vpM --bundle incoming.hg
3202 hg pull incoming.hg
3199 hg pull incoming.hg
3203
3200
3204 - briefly list changes inside a bundle::
3201 - briefly list changes inside a bundle::
3205
3202
3206 hg in changes.hg -T "{desc|firstline}\\n"
3203 hg in changes.hg -T "{desc|firstline}\\n"
3207
3204
3208 Returns 0 if there are incoming changes, 1 otherwise.
3205 Returns 0 if there are incoming changes, 1 otherwise.
3209 """
3206 """
3210 opts = pycompat.byteskwargs(opts)
3207 opts = pycompat.byteskwargs(opts)
3211 if opts.get('graph'):
3208 if opts.get('graph'):
3212 cmdutil.checkunsupportedgraphflags([], opts)
3209 cmdutil.checkunsupportedgraphflags([], opts)
3213 def display(other, chlist, displayer):
3210 def display(other, chlist, displayer):
3214 revdag = cmdutil.graphrevs(other, chlist, opts)
3211 revdag = cmdutil.graphrevs(other, chlist, opts)
3215 cmdutil.displaygraph(ui, repo, revdag, displayer,
3212 cmdutil.displaygraph(ui, repo, revdag, displayer,
3216 graphmod.asciiedges)
3213 graphmod.asciiedges)
3217
3214
3218 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3215 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3219 return 0
3216 return 0
3220
3217
3221 if opts.get('bundle') and opts.get('subrepos'):
3218 if opts.get('bundle') and opts.get('subrepos'):
3222 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3219 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3223
3220
3224 if opts.get('bookmarks'):
3221 if opts.get('bookmarks'):
3225 source, branches = hg.parseurl(ui.expandpath(source),
3222 source, branches = hg.parseurl(ui.expandpath(source),
3226 opts.get('branch'))
3223 opts.get('branch'))
3227 other = hg.peer(repo, opts, source)
3224 other = hg.peer(repo, opts, source)
3228 if 'bookmarks' not in other.listkeys('namespaces'):
3225 if 'bookmarks' not in other.listkeys('namespaces'):
3229 ui.warn(_("remote doesn't support bookmarks\n"))
3226 ui.warn(_("remote doesn't support bookmarks\n"))
3230 return 0
3227 return 0
3231 ui.pager('incoming')
3228 ui.pager('incoming')
3232 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3229 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3233 return bookmarks.incoming(ui, repo, other)
3230 return bookmarks.incoming(ui, repo, other)
3234
3231
3235 repo._subtoppath = ui.expandpath(source)
3232 repo._subtoppath = ui.expandpath(source)
3236 try:
3233 try:
3237 return hg.incoming(ui, repo, source, opts)
3234 return hg.incoming(ui, repo, source, opts)
3238 finally:
3235 finally:
3239 del repo._subtoppath
3236 del repo._subtoppath
3240
3237
3241
3238
3242 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3239 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3243 norepo=True)
3240 norepo=True)
3244 def init(ui, dest=".", **opts):
3241 def init(ui, dest=".", **opts):
3245 """create a new repository in the given directory
3242 """create a new repository in the given directory
3246
3243
3247 Initialize a new repository in the given directory. If the given
3244 Initialize a new repository in the given directory. If the given
3248 directory does not exist, it will be created.
3245 directory does not exist, it will be created.
3249
3246
3250 If no directory is given, the current directory is used.
3247 If no directory is given, the current directory is used.
3251
3248
3252 It is possible to specify an ``ssh://`` URL as the destination.
3249 It is possible to specify an ``ssh://`` URL as the destination.
3253 See :hg:`help urls` for more information.
3250 See :hg:`help urls` for more information.
3254
3251
3255 Returns 0 on success.
3252 Returns 0 on success.
3256 """
3253 """
3257 opts = pycompat.byteskwargs(opts)
3254 opts = pycompat.byteskwargs(opts)
3258 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3255 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3259
3256
3260 @command('locate',
3257 @command('locate',
3261 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3258 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3262 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3259 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3263 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3260 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3264 ] + walkopts,
3261 ] + walkopts,
3265 _('[OPTION]... [PATTERN]...'))
3262 _('[OPTION]... [PATTERN]...'))
3266 def locate(ui, repo, *pats, **opts):
3263 def locate(ui, repo, *pats, **opts):
3267 """locate files matching specific patterns (DEPRECATED)
3264 """locate files matching specific patterns (DEPRECATED)
3268
3265
3269 Print files under Mercurial control in the working directory whose
3266 Print files under Mercurial control in the working directory whose
3270 names match the given patterns.
3267 names match the given patterns.
3271
3268
3272 By default, this command searches all directories in the working
3269 By default, this command searches all directories in the working
3273 directory. To search just the current directory and its
3270 directory. To search just the current directory and its
3274 subdirectories, use "--include .".
3271 subdirectories, use "--include .".
3275
3272
3276 If no patterns are given to match, this command prints the names
3273 If no patterns are given to match, this command prints the names
3277 of all files under Mercurial control in the working directory.
3274 of all files under Mercurial control in the working directory.
3278
3275
3279 If you want to feed the output of this command into the "xargs"
3276 If you want to feed the output of this command into the "xargs"
3280 command, use the -0 option to both this command and "xargs". This
3277 command, use the -0 option to both this command and "xargs". This
3281 will avoid the problem of "xargs" treating single filenames that
3278 will avoid the problem of "xargs" treating single filenames that
3282 contain whitespace as multiple filenames.
3279 contain whitespace as multiple filenames.
3283
3280
3284 See :hg:`help files` for a more versatile command.
3281 See :hg:`help files` for a more versatile command.
3285
3282
3286 Returns 0 if a match is found, 1 otherwise.
3283 Returns 0 if a match is found, 1 otherwise.
3287 """
3284 """
3288 opts = pycompat.byteskwargs(opts)
3285 opts = pycompat.byteskwargs(opts)
3289 if opts.get('print0'):
3286 if opts.get('print0'):
3290 end = '\0'
3287 end = '\0'
3291 else:
3288 else:
3292 end = '\n'
3289 end = '\n'
3293 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3290 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3294
3291
3295 ret = 1
3292 ret = 1
3296 ctx = repo[rev]
3293 ctx = repo[rev]
3297 m = scmutil.match(ctx, pats, opts, default='relglob',
3294 m = scmutil.match(ctx, pats, opts, default='relglob',
3298 badfn=lambda x, y: False)
3295 badfn=lambda x, y: False)
3299
3296
3300 ui.pager('locate')
3297 ui.pager('locate')
3301 for abs in ctx.matches(m):
3298 for abs in ctx.matches(m):
3302 if opts.get('fullpath'):
3299 if opts.get('fullpath'):
3303 ui.write(repo.wjoin(abs), end)
3300 ui.write(repo.wjoin(abs), end)
3304 else:
3301 else:
3305 ui.write(((pats and m.rel(abs)) or abs), end)
3302 ui.write(((pats and m.rel(abs)) or abs), end)
3306 ret = 0
3303 ret = 0
3307
3304
3308 return ret
3305 return ret
3309
3306
3310 @command('^log|history',
3307 @command('^log|history',
3311 [('f', 'follow', None,
3308 [('f', 'follow', None,
3312 _('follow changeset history, or file history across copies and renames')),
3309 _('follow changeset history, or file history across copies and renames')),
3313 ('', 'follow-first', None,
3310 ('', 'follow-first', None,
3314 _('only follow the first parent of merge changesets (DEPRECATED)')),
3311 _('only follow the first parent of merge changesets (DEPRECATED)')),
3315 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3312 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3316 ('C', 'copies', None, _('show copied files')),
3313 ('C', 'copies', None, _('show copied files')),
3317 ('k', 'keyword', [],
3314 ('k', 'keyword', [],
3318 _('do case-insensitive search for a given text'), _('TEXT')),
3315 _('do case-insensitive search for a given text'), _('TEXT')),
3319 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3316 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3320 ('', 'removed', None, _('include revisions where files were removed')),
3317 ('', 'removed', None, _('include revisions where files were removed')),
3321 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3318 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3322 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3319 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3323 ('', 'only-branch', [],
3320 ('', 'only-branch', [],
3324 _('show only changesets within the given named branch (DEPRECATED)'),
3321 _('show only changesets within the given named branch (DEPRECATED)'),
3325 _('BRANCH')),
3322 _('BRANCH')),
3326 ('b', 'branch', [],
3323 ('b', 'branch', [],
3327 _('show changesets within the given named branch'), _('BRANCH')),
3324 _('show changesets within the given named branch'), _('BRANCH')),
3328 ('P', 'prune', [],
3325 ('P', 'prune', [],
3329 _('do not display revision or any of its ancestors'), _('REV')),
3326 _('do not display revision or any of its ancestors'), _('REV')),
3330 ] + logopts + walkopts,
3327 ] + logopts + walkopts,
3331 _('[OPTION]... [FILE]'),
3328 _('[OPTION]... [FILE]'),
3332 inferrepo=True)
3329 inferrepo=True)
3333 def log(ui, repo, *pats, **opts):
3330 def log(ui, repo, *pats, **opts):
3334 """show revision history of entire repository or files
3331 """show revision history of entire repository or files
3335
3332
3336 Print the revision history of the specified files or the entire
3333 Print the revision history of the specified files or the entire
3337 project.
3334 project.
3338
3335
3339 If no revision range is specified, the default is ``tip:0`` unless
3336 If no revision range is specified, the default is ``tip:0`` unless
3340 --follow is set, in which case the working directory parent is
3337 --follow is set, in which case the working directory parent is
3341 used as the starting revision.
3338 used as the starting revision.
3342
3339
3343 File history is shown without following rename or copy history of
3340 File history is shown without following rename or copy history of
3344 files. Use -f/--follow with a filename to follow history across
3341 files. Use -f/--follow with a filename to follow history across
3345 renames and copies. --follow without a filename will only show
3342 renames and copies. --follow without a filename will only show
3346 ancestors or descendants of the starting revision.
3343 ancestors or descendants of the starting revision.
3347
3344
3348 By default this command prints revision number and changeset id,
3345 By default this command prints revision number and changeset id,
3349 tags, non-trivial parents, user, date and time, and a summary for
3346 tags, non-trivial parents, user, date and time, and a summary for
3350 each commit. When the -v/--verbose switch is used, the list of
3347 each commit. When the -v/--verbose switch is used, the list of
3351 changed files and full commit message are shown.
3348 changed files and full commit message are shown.
3352
3349
3353 With --graph the revisions are shown as an ASCII art DAG with the most
3350 With --graph the revisions are shown as an ASCII art DAG with the most
3354 recent changeset at the top.
3351 recent changeset at the top.
3355 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3352 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3356 and '+' represents a fork where the changeset from the lines below is a
3353 and '+' represents a fork where the changeset from the lines below is a
3357 parent of the 'o' merge on the same line.
3354 parent of the 'o' merge on the same line.
3358 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3355 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3359 of a '|' indicates one or more revisions in a path are omitted.
3356 of a '|' indicates one or more revisions in a path are omitted.
3360
3357
3361 .. note::
3358 .. note::
3362
3359
3363 :hg:`log --patch` may generate unexpected diff output for merge
3360 :hg:`log --patch` may generate unexpected diff output for merge
3364 changesets, as it will only compare the merge changeset against
3361 changesets, as it will only compare the merge changeset against
3365 its first parent. Also, only files different from BOTH parents
3362 its first parent. Also, only files different from BOTH parents
3366 will appear in files:.
3363 will appear in files:.
3367
3364
3368 .. note::
3365 .. note::
3369
3366
3370 For performance reasons, :hg:`log FILE` may omit duplicate changes
3367 For performance reasons, :hg:`log FILE` may omit duplicate changes
3371 made on branches and will not show removals or mode changes. To
3368 made on branches and will not show removals or mode changes. To
3372 see all such changes, use the --removed switch.
3369 see all such changes, use the --removed switch.
3373
3370
3374 .. container:: verbose
3371 .. container:: verbose
3375
3372
3376 Some examples:
3373 Some examples:
3377
3374
3378 - changesets with full descriptions and file lists::
3375 - changesets with full descriptions and file lists::
3379
3376
3380 hg log -v
3377 hg log -v
3381
3378
3382 - changesets ancestral to the working directory::
3379 - changesets ancestral to the working directory::
3383
3380
3384 hg log -f
3381 hg log -f
3385
3382
3386 - last 10 commits on the current branch::
3383 - last 10 commits on the current branch::
3387
3384
3388 hg log -l 10 -b .
3385 hg log -l 10 -b .
3389
3386
3390 - changesets showing all modifications of a file, including removals::
3387 - changesets showing all modifications of a file, including removals::
3391
3388
3392 hg log --removed file.c
3389 hg log --removed file.c
3393
3390
3394 - all changesets that touch a directory, with diffs, excluding merges::
3391 - all changesets that touch a directory, with diffs, excluding merges::
3395
3392
3396 hg log -Mp lib/
3393 hg log -Mp lib/
3397
3394
3398 - all revision numbers that match a keyword::
3395 - all revision numbers that match a keyword::
3399
3396
3400 hg log -k bug --template "{rev}\\n"
3397 hg log -k bug --template "{rev}\\n"
3401
3398
3402 - the full hash identifier of the working directory parent::
3399 - the full hash identifier of the working directory parent::
3403
3400
3404 hg log -r . --template "{node}\\n"
3401 hg log -r . --template "{node}\\n"
3405
3402
3406 - list available log templates::
3403 - list available log templates::
3407
3404
3408 hg log -T list
3405 hg log -T list
3409
3406
3410 - check if a given changeset is included in a tagged release::
3407 - check if a given changeset is included in a tagged release::
3411
3408
3412 hg log -r "a21ccf and ancestor(1.9)"
3409 hg log -r "a21ccf and ancestor(1.9)"
3413
3410
3414 - find all changesets by some user in a date range::
3411 - find all changesets by some user in a date range::
3415
3412
3416 hg log -k alice -d "may 2008 to jul 2008"
3413 hg log -k alice -d "may 2008 to jul 2008"
3417
3414
3418 - summary of all changesets after the last tag::
3415 - summary of all changesets after the last tag::
3419
3416
3420 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3417 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3421
3418
3422 See :hg:`help dates` for a list of formats valid for -d/--date.
3419 See :hg:`help dates` for a list of formats valid for -d/--date.
3423
3420
3424 See :hg:`help revisions` for more about specifying and ordering
3421 See :hg:`help revisions` for more about specifying and ordering
3425 revisions.
3422 revisions.
3426
3423
3427 See :hg:`help templates` for more about pre-packaged styles and
3424 See :hg:`help templates` for more about pre-packaged styles and
3428 specifying custom templates.
3425 specifying custom templates.
3429
3426
3430 Returns 0 on success.
3427 Returns 0 on success.
3431
3428
3432 """
3429 """
3433 opts = pycompat.byteskwargs(opts)
3430 opts = pycompat.byteskwargs(opts)
3434 if opts.get('follow') and opts.get('rev'):
3431 if opts.get('follow') and opts.get('rev'):
3435 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3432 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3436 del opts['follow']
3433 del opts['follow']
3437
3434
3438 if opts.get('graph'):
3435 if opts.get('graph'):
3439 return cmdutil.graphlog(ui, repo, pats, opts)
3436 return cmdutil.graphlog(ui, repo, pats, opts)
3440
3437
3441 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3438 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3442 limit = cmdutil.loglimit(opts)
3439 limit = cmdutil.loglimit(opts)
3443 count = 0
3440 count = 0
3444
3441
3445 getrenamed = None
3442 getrenamed = None
3446 if opts.get('copies'):
3443 if opts.get('copies'):
3447 endrev = None
3444 endrev = None
3448 if opts.get('rev'):
3445 if opts.get('rev'):
3449 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3446 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3450 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3447 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3451
3448
3452 ui.pager('log')
3449 ui.pager('log')
3453 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3450 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3454 for rev in revs:
3451 for rev in revs:
3455 if count == limit:
3452 if count == limit:
3456 break
3453 break
3457 ctx = repo[rev]
3454 ctx = repo[rev]
3458 copies = None
3455 copies = None
3459 if getrenamed is not None and rev:
3456 if getrenamed is not None and rev:
3460 copies = []
3457 copies = []
3461 for fn in ctx.files():
3458 for fn in ctx.files():
3462 rename = getrenamed(fn, rev)
3459 rename = getrenamed(fn, rev)
3463 if rename:
3460 if rename:
3464 copies.append((fn, rename[0]))
3461 copies.append((fn, rename[0]))
3465 if filematcher:
3462 if filematcher:
3466 revmatchfn = filematcher(ctx.rev())
3463 revmatchfn = filematcher(ctx.rev())
3467 else:
3464 else:
3468 revmatchfn = None
3465 revmatchfn = None
3469 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3466 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3470 if displayer.flush(ctx):
3467 if displayer.flush(ctx):
3471 count += 1
3468 count += 1
3472
3469
3473 displayer.close()
3470 displayer.close()
3474
3471
3475 @command('manifest',
3472 @command('manifest',
3476 [('r', 'rev', '', _('revision to display'), _('REV')),
3473 [('r', 'rev', '', _('revision to display'), _('REV')),
3477 ('', 'all', False, _("list files from all revisions"))]
3474 ('', 'all', False, _("list files from all revisions"))]
3478 + formatteropts,
3475 + formatteropts,
3479 _('[-r REV]'))
3476 _('[-r REV]'))
3480 def manifest(ui, repo, node=None, rev=None, **opts):
3477 def manifest(ui, repo, node=None, rev=None, **opts):
3481 """output the current or given revision of the project manifest
3478 """output the current or given revision of the project manifest
3482
3479
3483 Print a list of version controlled files for the given revision.
3480 Print a list of version controlled files for the given revision.
3484 If no revision is given, the first parent of the working directory
3481 If no revision is given, the first parent of the working directory
3485 is used, or the null revision if no revision is checked out.
3482 is used, or the null revision if no revision is checked out.
3486
3483
3487 With -v, print file permissions, symlink and executable bits.
3484 With -v, print file permissions, symlink and executable bits.
3488 With --debug, print file revision hashes.
3485 With --debug, print file revision hashes.
3489
3486
3490 If option --all is specified, the list of all files from all revisions
3487 If option --all is specified, the list of all files from all revisions
3491 is printed. This includes deleted and renamed files.
3488 is printed. This includes deleted and renamed files.
3492
3489
3493 Returns 0 on success.
3490 Returns 0 on success.
3494 """
3491 """
3495 opts = pycompat.byteskwargs(opts)
3492 opts = pycompat.byteskwargs(opts)
3496 fm = ui.formatter('manifest', opts)
3493 fm = ui.formatter('manifest', opts)
3497
3494
3498 if opts.get('all'):
3495 if opts.get('all'):
3499 if rev or node:
3496 if rev or node:
3500 raise error.Abort(_("can't specify a revision with --all"))
3497 raise error.Abort(_("can't specify a revision with --all"))
3501
3498
3502 res = []
3499 res = []
3503 prefix = "data/"
3500 prefix = "data/"
3504 suffix = ".i"
3501 suffix = ".i"
3505 plen = len(prefix)
3502 plen = len(prefix)
3506 slen = len(suffix)
3503 slen = len(suffix)
3507 with repo.lock():
3504 with repo.lock():
3508 for fn, b, size in repo.store.datafiles():
3505 for fn, b, size in repo.store.datafiles():
3509 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3506 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3510 res.append(fn[plen:-slen])
3507 res.append(fn[plen:-slen])
3511 ui.pager('manifest')
3508 ui.pager('manifest')
3512 for f in res:
3509 for f in res:
3513 fm.startitem()
3510 fm.startitem()
3514 fm.write("path", '%s\n', f)
3511 fm.write("path", '%s\n', f)
3515 fm.end()
3512 fm.end()
3516 return
3513 return
3517
3514
3518 if rev and node:
3515 if rev and node:
3519 raise error.Abort(_("please specify just one revision"))
3516 raise error.Abort(_("please specify just one revision"))
3520
3517
3521 if not node:
3518 if not node:
3522 node = rev
3519 node = rev
3523
3520
3524 char = {'l': '@', 'x': '*', '': ''}
3521 char = {'l': '@', 'x': '*', '': ''}
3525 mode = {'l': '644', 'x': '755', '': '644'}
3522 mode = {'l': '644', 'x': '755', '': '644'}
3526 ctx = scmutil.revsingle(repo, node)
3523 ctx = scmutil.revsingle(repo, node)
3527 mf = ctx.manifest()
3524 mf = ctx.manifest()
3528 ui.pager('manifest')
3525 ui.pager('manifest')
3529 for f in ctx:
3526 for f in ctx:
3530 fm.startitem()
3527 fm.startitem()
3531 fl = ctx[f].flags()
3528 fl = ctx[f].flags()
3532 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3529 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3533 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3530 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3534 fm.write('path', '%s\n', f)
3531 fm.write('path', '%s\n', f)
3535 fm.end()
3532 fm.end()
3536
3533
3537 @command('^merge',
3534 @command('^merge',
3538 [('f', 'force', None,
3535 [('f', 'force', None,
3539 _('force a merge including outstanding changes (DEPRECATED)')),
3536 _('force a merge including outstanding changes (DEPRECATED)')),
3540 ('r', 'rev', '', _('revision to merge'), _('REV')),
3537 ('r', 'rev', '', _('revision to merge'), _('REV')),
3541 ('P', 'preview', None,
3538 ('P', 'preview', None,
3542 _('review revisions to merge (no merge is performed)'))
3539 _('review revisions to merge (no merge is performed)'))
3543 ] + mergetoolopts,
3540 ] + mergetoolopts,
3544 _('[-P] [[-r] REV]'))
3541 _('[-P] [[-r] REV]'))
3545 def merge(ui, repo, node=None, **opts):
3542 def merge(ui, repo, node=None, **opts):
3546 """merge another revision into working directory
3543 """merge another revision into working directory
3547
3544
3548 The current working directory is updated with all changes made in
3545 The current working directory is updated with all changes made in
3549 the requested revision since the last common predecessor revision.
3546 the requested revision since the last common predecessor revision.
3550
3547
3551 Files that changed between either parent are marked as changed for
3548 Files that changed between either parent are marked as changed for
3552 the next commit and a commit must be performed before any further
3549 the next commit and a commit must be performed before any further
3553 updates to the repository are allowed. The next commit will have
3550 updates to the repository are allowed. The next commit will have
3554 two parents.
3551 two parents.
3555
3552
3556 ``--tool`` can be used to specify the merge tool used for file
3553 ``--tool`` can be used to specify the merge tool used for file
3557 merges. It overrides the HGMERGE environment variable and your
3554 merges. It overrides the HGMERGE environment variable and your
3558 configuration files. See :hg:`help merge-tools` for options.
3555 configuration files. See :hg:`help merge-tools` for options.
3559
3556
3560 If no revision is specified, the working directory's parent is a
3557 If no revision is specified, the working directory's parent is a
3561 head revision, and the current branch contains exactly one other
3558 head revision, and the current branch contains exactly one other
3562 head, the other head is merged with by default. Otherwise, an
3559 head, the other head is merged with by default. Otherwise, an
3563 explicit revision with which to merge with must be provided.
3560 explicit revision with which to merge with must be provided.
3564
3561
3565 See :hg:`help resolve` for information on handling file conflicts.
3562 See :hg:`help resolve` for information on handling file conflicts.
3566
3563
3567 To undo an uncommitted merge, use :hg:`update --clean .` which
3564 To undo an uncommitted merge, use :hg:`update --clean .` which
3568 will check out a clean copy of the original merge parent, losing
3565 will check out a clean copy of the original merge parent, losing
3569 all changes.
3566 all changes.
3570
3567
3571 Returns 0 on success, 1 if there are unresolved files.
3568 Returns 0 on success, 1 if there are unresolved files.
3572 """
3569 """
3573
3570
3574 opts = pycompat.byteskwargs(opts)
3571 opts = pycompat.byteskwargs(opts)
3575 if opts.get('rev') and node:
3572 if opts.get('rev') and node:
3576 raise error.Abort(_("please specify just one revision"))
3573 raise error.Abort(_("please specify just one revision"))
3577 if not node:
3574 if not node:
3578 node = opts.get('rev')
3575 node = opts.get('rev')
3579
3576
3580 if node:
3577 if node:
3581 node = scmutil.revsingle(repo, node).node()
3578 node = scmutil.revsingle(repo, node).node()
3582
3579
3583 if not node:
3580 if not node:
3584 node = repo[destutil.destmerge(repo)].node()
3581 node = repo[destutil.destmerge(repo)].node()
3585
3582
3586 if opts.get('preview'):
3583 if opts.get('preview'):
3587 # find nodes that are ancestors of p2 but not of p1
3584 # find nodes that are ancestors of p2 but not of p1
3588 p1 = repo.lookup('.')
3585 p1 = repo.lookup('.')
3589 p2 = repo.lookup(node)
3586 p2 = repo.lookup(node)
3590 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3587 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3591
3588
3592 displayer = cmdutil.show_changeset(ui, repo, opts)
3589 displayer = cmdutil.show_changeset(ui, repo, opts)
3593 for node in nodes:
3590 for node in nodes:
3594 displayer.show(repo[node])
3591 displayer.show(repo[node])
3595 displayer.close()
3592 displayer.close()
3596 return 0
3593 return 0
3597
3594
3598 try:
3595 try:
3599 # ui.forcemerge is an internal variable, do not document
3596 # ui.forcemerge is an internal variable, do not document
3600 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3597 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3601 force = opts.get('force')
3598 force = opts.get('force')
3602 labels = ['working copy', 'merge rev']
3599 labels = ['working copy', 'merge rev']
3603 return hg.merge(repo, node, force=force, mergeforce=force,
3600 return hg.merge(repo, node, force=force, mergeforce=force,
3604 labels=labels)
3601 labels=labels)
3605 finally:
3602 finally:
3606 ui.setconfig('ui', 'forcemerge', '', 'merge')
3603 ui.setconfig('ui', 'forcemerge', '', 'merge')
3607
3604
3608 @command('outgoing|out',
3605 @command('outgoing|out',
3609 [('f', 'force', None, _('run even when the destination is unrelated')),
3606 [('f', 'force', None, _('run even when the destination is unrelated')),
3610 ('r', 'rev', [],
3607 ('r', 'rev', [],
3611 _('a changeset intended to be included in the destination'), _('REV')),
3608 _('a changeset intended to be included in the destination'), _('REV')),
3612 ('n', 'newest-first', None, _('show newest record first')),
3609 ('n', 'newest-first', None, _('show newest record first')),
3613 ('B', 'bookmarks', False, _('compare bookmarks')),
3610 ('B', 'bookmarks', False, _('compare bookmarks')),
3614 ('b', 'branch', [], _('a specific branch you would like to push'),
3611 ('b', 'branch', [], _('a specific branch you would like to push'),
3615 _('BRANCH')),
3612 _('BRANCH')),
3616 ] + logopts + remoteopts + subrepoopts,
3613 ] + logopts + remoteopts + subrepoopts,
3617 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3614 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3618 def outgoing(ui, repo, dest=None, **opts):
3615 def outgoing(ui, repo, dest=None, **opts):
3619 """show changesets not found in the destination
3616 """show changesets not found in the destination
3620
3617
3621 Show changesets not found in the specified destination repository
3618 Show changesets not found in the specified destination repository
3622 or the default push location. These are the changesets that would
3619 or the default push location. These are the changesets that would
3623 be pushed if a push was requested.
3620 be pushed if a push was requested.
3624
3621
3625 See pull for details of valid destination formats.
3622 See pull for details of valid destination formats.
3626
3623
3627 .. container:: verbose
3624 .. container:: verbose
3628
3625
3629 With -B/--bookmarks, the result of bookmark comparison between
3626 With -B/--bookmarks, the result of bookmark comparison between
3630 local and remote repositories is displayed. With -v/--verbose,
3627 local and remote repositories is displayed. With -v/--verbose,
3631 status is also displayed for each bookmark like below::
3628 status is also displayed for each bookmark like below::
3632
3629
3633 BM1 01234567890a added
3630 BM1 01234567890a added
3634 BM2 deleted
3631 BM2 deleted
3635 BM3 234567890abc advanced
3632 BM3 234567890abc advanced
3636 BM4 34567890abcd diverged
3633 BM4 34567890abcd diverged
3637 BM5 4567890abcde changed
3634 BM5 4567890abcde changed
3638
3635
3639 The action taken when pushing depends on the
3636 The action taken when pushing depends on the
3640 status of each bookmark:
3637 status of each bookmark:
3641
3638
3642 :``added``: push with ``-B`` will create it
3639 :``added``: push with ``-B`` will create it
3643 :``deleted``: push with ``-B`` will delete it
3640 :``deleted``: push with ``-B`` will delete it
3644 :``advanced``: push will update it
3641 :``advanced``: push will update it
3645 :``diverged``: push with ``-B`` will update it
3642 :``diverged``: push with ``-B`` will update it
3646 :``changed``: push with ``-B`` will update it
3643 :``changed``: push with ``-B`` will update it
3647
3644
3648 From the point of view of pushing behavior, bookmarks
3645 From the point of view of pushing behavior, bookmarks
3649 existing only in the remote repository are treated as
3646 existing only in the remote repository are treated as
3650 ``deleted``, even if it is in fact added remotely.
3647 ``deleted``, even if it is in fact added remotely.
3651
3648
3652 Returns 0 if there are outgoing changes, 1 otherwise.
3649 Returns 0 if there are outgoing changes, 1 otherwise.
3653 """
3650 """
3654 opts = pycompat.byteskwargs(opts)
3651 opts = pycompat.byteskwargs(opts)
3655 if opts.get('graph'):
3652 if opts.get('graph'):
3656 cmdutil.checkunsupportedgraphflags([], opts)
3653 cmdutil.checkunsupportedgraphflags([], opts)
3657 o, other = hg._outgoing(ui, repo, dest, opts)
3654 o, other = hg._outgoing(ui, repo, dest, opts)
3658 if not o:
3655 if not o:
3659 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3656 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3660 return
3657 return
3661
3658
3662 revdag = cmdutil.graphrevs(repo, o, opts)
3659 revdag = cmdutil.graphrevs(repo, o, opts)
3663 ui.pager('outgoing')
3660 ui.pager('outgoing')
3664 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3661 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3665 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3662 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3666 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3663 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3667 return 0
3664 return 0
3668
3665
3669 if opts.get('bookmarks'):
3666 if opts.get('bookmarks'):
3670 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3667 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3671 dest, branches = hg.parseurl(dest, opts.get('branch'))
3668 dest, branches = hg.parseurl(dest, opts.get('branch'))
3672 other = hg.peer(repo, opts, dest)
3669 other = hg.peer(repo, opts, dest)
3673 if 'bookmarks' not in other.listkeys('namespaces'):
3670 if 'bookmarks' not in other.listkeys('namespaces'):
3674 ui.warn(_("remote doesn't support bookmarks\n"))
3671 ui.warn(_("remote doesn't support bookmarks\n"))
3675 return 0
3672 return 0
3676 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3673 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3677 ui.pager('outgoing')
3674 ui.pager('outgoing')
3678 return bookmarks.outgoing(ui, repo, other)
3675 return bookmarks.outgoing(ui, repo, other)
3679
3676
3680 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3677 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3681 try:
3678 try:
3682 return hg.outgoing(ui, repo, dest, opts)
3679 return hg.outgoing(ui, repo, dest, opts)
3683 finally:
3680 finally:
3684 del repo._subtoppath
3681 del repo._subtoppath
3685
3682
3686 @command('parents',
3683 @command('parents',
3687 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3684 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3688 ] + templateopts,
3685 ] + templateopts,
3689 _('[-r REV] [FILE]'),
3686 _('[-r REV] [FILE]'),
3690 inferrepo=True)
3687 inferrepo=True)
3691 def parents(ui, repo, file_=None, **opts):
3688 def parents(ui, repo, file_=None, **opts):
3692 """show the parents of the working directory or revision (DEPRECATED)
3689 """show the parents of the working directory or revision (DEPRECATED)
3693
3690
3694 Print the working directory's parent revisions. If a revision is
3691 Print the working directory's parent revisions. If a revision is
3695 given via -r/--rev, the parent of that revision will be printed.
3692 given via -r/--rev, the parent of that revision will be printed.
3696 If a file argument is given, the revision in which the file was
3693 If a file argument is given, the revision in which the file was
3697 last changed (before the working directory revision or the
3694 last changed (before the working directory revision or the
3698 argument to --rev if given) is printed.
3695 argument to --rev if given) is printed.
3699
3696
3700 This command is equivalent to::
3697 This command is equivalent to::
3701
3698
3702 hg log -r "p1()+p2()" or
3699 hg log -r "p1()+p2()" or
3703 hg log -r "p1(REV)+p2(REV)" or
3700 hg log -r "p1(REV)+p2(REV)" or
3704 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3701 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3705 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3702 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3706
3703
3707 See :hg:`summary` and :hg:`help revsets` for related information.
3704 See :hg:`summary` and :hg:`help revsets` for related information.
3708
3705
3709 Returns 0 on success.
3706 Returns 0 on success.
3710 """
3707 """
3711
3708
3712 opts = pycompat.byteskwargs(opts)
3709 opts = pycompat.byteskwargs(opts)
3713 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3710 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3714
3711
3715 if file_:
3712 if file_:
3716 m = scmutil.match(ctx, (file_,), opts)
3713 m = scmutil.match(ctx, (file_,), opts)
3717 if m.anypats() or len(m.files()) != 1:
3714 if m.anypats() or len(m.files()) != 1:
3718 raise error.Abort(_('can only specify an explicit filename'))
3715 raise error.Abort(_('can only specify an explicit filename'))
3719 file_ = m.files()[0]
3716 file_ = m.files()[0]
3720 filenodes = []
3717 filenodes = []
3721 for cp in ctx.parents():
3718 for cp in ctx.parents():
3722 if not cp:
3719 if not cp:
3723 continue
3720 continue
3724 try:
3721 try:
3725 filenodes.append(cp.filenode(file_))
3722 filenodes.append(cp.filenode(file_))
3726 except error.LookupError:
3723 except error.LookupError:
3727 pass
3724 pass
3728 if not filenodes:
3725 if not filenodes:
3729 raise error.Abort(_("'%s' not found in manifest!") % file_)
3726 raise error.Abort(_("'%s' not found in manifest!") % file_)
3730 p = []
3727 p = []
3731 for fn in filenodes:
3728 for fn in filenodes:
3732 fctx = repo.filectx(file_, fileid=fn)
3729 fctx = repo.filectx(file_, fileid=fn)
3733 p.append(fctx.node())
3730 p.append(fctx.node())
3734 else:
3731 else:
3735 p = [cp.node() for cp in ctx.parents()]
3732 p = [cp.node() for cp in ctx.parents()]
3736
3733
3737 displayer = cmdutil.show_changeset(ui, repo, opts)
3734 displayer = cmdutil.show_changeset(ui, repo, opts)
3738 for n in p:
3735 for n in p:
3739 if n != nullid:
3736 if n != nullid:
3740 displayer.show(repo[n])
3737 displayer.show(repo[n])
3741 displayer.close()
3738 displayer.close()
3742
3739
3743 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3740 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3744 def paths(ui, repo, search=None, **opts):
3741 def paths(ui, repo, search=None, **opts):
3745 """show aliases for remote repositories
3742 """show aliases for remote repositories
3746
3743
3747 Show definition of symbolic path name NAME. If no name is given,
3744 Show definition of symbolic path name NAME. If no name is given,
3748 show definition of all available names.
3745 show definition of all available names.
3749
3746
3750 Option -q/--quiet suppresses all output when searching for NAME
3747 Option -q/--quiet suppresses all output when searching for NAME
3751 and shows only the path names when listing all definitions.
3748 and shows only the path names when listing all definitions.
3752
3749
3753 Path names are defined in the [paths] section of your
3750 Path names are defined in the [paths] section of your
3754 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3751 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3755 repository, ``.hg/hgrc`` is used, too.
3752 repository, ``.hg/hgrc`` is used, too.
3756
3753
3757 The path names ``default`` and ``default-push`` have a special
3754 The path names ``default`` and ``default-push`` have a special
3758 meaning. When performing a push or pull operation, they are used
3755 meaning. When performing a push or pull operation, they are used
3759 as fallbacks if no location is specified on the command-line.
3756 as fallbacks if no location is specified on the command-line.
3760 When ``default-push`` is set, it will be used for push and
3757 When ``default-push`` is set, it will be used for push and
3761 ``default`` will be used for pull; otherwise ``default`` is used
3758 ``default`` will be used for pull; otherwise ``default`` is used
3762 as the fallback for both. When cloning a repository, the clone
3759 as the fallback for both. When cloning a repository, the clone
3763 source is written as ``default`` in ``.hg/hgrc``.
3760 source is written as ``default`` in ``.hg/hgrc``.
3764
3761
3765 .. note::
3762 .. note::
3766
3763
3767 ``default`` and ``default-push`` apply to all inbound (e.g.
3764 ``default`` and ``default-push`` apply to all inbound (e.g.
3768 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3765 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3769 and :hg:`bundle`) operations.
3766 and :hg:`bundle`) operations.
3770
3767
3771 See :hg:`help urls` for more information.
3768 See :hg:`help urls` for more information.
3772
3769
3773 Returns 0 on success.
3770 Returns 0 on success.
3774 """
3771 """
3775
3772
3776 opts = pycompat.byteskwargs(opts)
3773 opts = pycompat.byteskwargs(opts)
3777 ui.pager('paths')
3774 ui.pager('paths')
3778 if search:
3775 if search:
3779 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3776 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3780 if name == search]
3777 if name == search]
3781 else:
3778 else:
3782 pathitems = sorted(ui.paths.iteritems())
3779 pathitems = sorted(ui.paths.iteritems())
3783
3780
3784 fm = ui.formatter('paths', opts)
3781 fm = ui.formatter('paths', opts)
3785 if fm.isplain():
3782 if fm.isplain():
3786 hidepassword = util.hidepassword
3783 hidepassword = util.hidepassword
3787 else:
3784 else:
3788 hidepassword = str
3785 hidepassword = str
3789 if ui.quiet:
3786 if ui.quiet:
3790 namefmt = '%s\n'
3787 namefmt = '%s\n'
3791 else:
3788 else:
3792 namefmt = '%s = '
3789 namefmt = '%s = '
3793 showsubopts = not search and not ui.quiet
3790 showsubopts = not search and not ui.quiet
3794
3791
3795 for name, path in pathitems:
3792 for name, path in pathitems:
3796 fm.startitem()
3793 fm.startitem()
3797 fm.condwrite(not search, 'name', namefmt, name)
3794 fm.condwrite(not search, 'name', namefmt, name)
3798 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3795 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3799 for subopt, value in sorted(path.suboptions.items()):
3796 for subopt, value in sorted(path.suboptions.items()):
3800 assert subopt not in ('name', 'url')
3797 assert subopt not in ('name', 'url')
3801 if showsubopts:
3798 if showsubopts:
3802 fm.plain('%s:%s = ' % (name, subopt))
3799 fm.plain('%s:%s = ' % (name, subopt))
3803 fm.condwrite(showsubopts, subopt, '%s\n', value)
3800 fm.condwrite(showsubopts, subopt, '%s\n', value)
3804
3801
3805 fm.end()
3802 fm.end()
3806
3803
3807 if search and not pathitems:
3804 if search and not pathitems:
3808 if not ui.quiet:
3805 if not ui.quiet:
3809 ui.warn(_("not found!\n"))
3806 ui.warn(_("not found!\n"))
3810 return 1
3807 return 1
3811 else:
3808 else:
3812 return 0
3809 return 0
3813
3810
3814 @command('phase',
3811 @command('phase',
3815 [('p', 'public', False, _('set changeset phase to public')),
3812 [('p', 'public', False, _('set changeset phase to public')),
3816 ('d', 'draft', False, _('set changeset phase to draft')),
3813 ('d', 'draft', False, _('set changeset phase to draft')),
3817 ('s', 'secret', False, _('set changeset phase to secret')),
3814 ('s', 'secret', False, _('set changeset phase to secret')),
3818 ('f', 'force', False, _('allow to move boundary backward')),
3815 ('f', 'force', False, _('allow to move boundary backward')),
3819 ('r', 'rev', [], _('target revision'), _('REV')),
3816 ('r', 'rev', [], _('target revision'), _('REV')),
3820 ],
3817 ],
3821 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3818 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3822 def phase(ui, repo, *revs, **opts):
3819 def phase(ui, repo, *revs, **opts):
3823 """set or show the current phase name
3820 """set or show the current phase name
3824
3821
3825 With no argument, show the phase name of the current revision(s).
3822 With no argument, show the phase name of the current revision(s).
3826
3823
3827 With one of -p/--public, -d/--draft or -s/--secret, change the
3824 With one of -p/--public, -d/--draft or -s/--secret, change the
3828 phase value of the specified revisions.
3825 phase value of the specified revisions.
3829
3826
3830 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3827 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3831 lower phase to an higher phase. Phases are ordered as follows::
3828 lower phase to an higher phase. Phases are ordered as follows::
3832
3829
3833 public < draft < secret
3830 public < draft < secret
3834
3831
3835 Returns 0 on success, 1 if some phases could not be changed.
3832 Returns 0 on success, 1 if some phases could not be changed.
3836
3833
3837 (For more information about the phases concept, see :hg:`help phases`.)
3834 (For more information about the phases concept, see :hg:`help phases`.)
3838 """
3835 """
3839 opts = pycompat.byteskwargs(opts)
3836 opts = pycompat.byteskwargs(opts)
3840 # search for a unique phase argument
3837 # search for a unique phase argument
3841 targetphase = None
3838 targetphase = None
3842 for idx, name in enumerate(phases.phasenames):
3839 for idx, name in enumerate(phases.phasenames):
3843 if opts[name]:
3840 if opts[name]:
3844 if targetphase is not None:
3841 if targetphase is not None:
3845 raise error.Abort(_('only one phase can be specified'))
3842 raise error.Abort(_('only one phase can be specified'))
3846 targetphase = idx
3843 targetphase = idx
3847
3844
3848 # look for specified revision
3845 # look for specified revision
3849 revs = list(revs)
3846 revs = list(revs)
3850 revs.extend(opts['rev'])
3847 revs.extend(opts['rev'])
3851 if not revs:
3848 if not revs:
3852 # display both parents as the second parent phase can influence
3849 # display both parents as the second parent phase can influence
3853 # the phase of a merge commit
3850 # the phase of a merge commit
3854 revs = [c.rev() for c in repo[None].parents()]
3851 revs = [c.rev() for c in repo[None].parents()]
3855
3852
3856 revs = scmutil.revrange(repo, revs)
3853 revs = scmutil.revrange(repo, revs)
3857
3854
3858 lock = None
3855 lock = None
3859 ret = 0
3856 ret = 0
3860 if targetphase is None:
3857 if targetphase is None:
3861 # display
3858 # display
3862 for r in revs:
3859 for r in revs:
3863 ctx = repo[r]
3860 ctx = repo[r]
3864 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3861 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3865 else:
3862 else:
3866 tr = None
3863 tr = None
3867 lock = repo.lock()
3864 lock = repo.lock()
3868 try:
3865 try:
3869 tr = repo.transaction("phase")
3866 tr = repo.transaction("phase")
3870 # set phase
3867 # set phase
3871 if not revs:
3868 if not revs:
3872 raise error.Abort(_('empty revision set'))
3869 raise error.Abort(_('empty revision set'))
3873 nodes = [repo[r].node() for r in revs]
3870 nodes = [repo[r].node() for r in revs]
3874 # moving revision from public to draft may hide them
3871 # moving revision from public to draft may hide them
3875 # We have to check result on an unfiltered repository
3872 # We have to check result on an unfiltered repository
3876 unfi = repo.unfiltered()
3873 unfi = repo.unfiltered()
3877 getphase = unfi._phasecache.phase
3874 getphase = unfi._phasecache.phase
3878 olddata = [getphase(unfi, r) for r in unfi]
3875 olddata = [getphase(unfi, r) for r in unfi]
3879 phases.advanceboundary(repo, tr, targetphase, nodes)
3876 phases.advanceboundary(repo, tr, targetphase, nodes)
3880 if opts['force']:
3877 if opts['force']:
3881 phases.retractboundary(repo, tr, targetphase, nodes)
3878 phases.retractboundary(repo, tr, targetphase, nodes)
3882 tr.close()
3879 tr.close()
3883 finally:
3880 finally:
3884 if tr is not None:
3881 if tr is not None:
3885 tr.release()
3882 tr.release()
3886 lock.release()
3883 lock.release()
3887 getphase = unfi._phasecache.phase
3884 getphase = unfi._phasecache.phase
3888 newdata = [getphase(unfi, r) for r in unfi]
3885 newdata = [getphase(unfi, r) for r in unfi]
3889 changes = sum(newdata[r] != olddata[r] for r in unfi)
3886 changes = sum(newdata[r] != olddata[r] for r in unfi)
3890 cl = unfi.changelog
3887 cl = unfi.changelog
3891 rejected = [n for n in nodes
3888 rejected = [n for n in nodes
3892 if newdata[cl.rev(n)] < targetphase]
3889 if newdata[cl.rev(n)] < targetphase]
3893 if rejected:
3890 if rejected:
3894 ui.warn(_('cannot move %i changesets to a higher '
3891 ui.warn(_('cannot move %i changesets to a higher '
3895 'phase, use --force\n') % len(rejected))
3892 'phase, use --force\n') % len(rejected))
3896 ret = 1
3893 ret = 1
3897 if changes:
3894 if changes:
3898 msg = _('phase changed for %i changesets\n') % changes
3895 msg = _('phase changed for %i changesets\n') % changes
3899 if ret:
3896 if ret:
3900 ui.status(msg)
3897 ui.status(msg)
3901 else:
3898 else:
3902 ui.note(msg)
3899 ui.note(msg)
3903 else:
3900 else:
3904 ui.warn(_('no phases changed\n'))
3901 ui.warn(_('no phases changed\n'))
3905 return ret
3902 return ret
3906
3903
3907 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3904 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3908 """Run after a changegroup has been added via pull/unbundle
3905 """Run after a changegroup has been added via pull/unbundle
3909
3906
3910 This takes arguments below:
3907 This takes arguments below:
3911
3908
3912 :modheads: change of heads by pull/unbundle
3909 :modheads: change of heads by pull/unbundle
3913 :optupdate: updating working directory is needed or not
3910 :optupdate: updating working directory is needed or not
3914 :checkout: update destination revision (or None to default destination)
3911 :checkout: update destination revision (or None to default destination)
3915 :brev: a name, which might be a bookmark to be activated after updating
3912 :brev: a name, which might be a bookmark to be activated after updating
3916 """
3913 """
3917 if modheads == 0:
3914 if modheads == 0:
3918 return
3915 return
3919 if optupdate:
3916 if optupdate:
3920 try:
3917 try:
3921 return hg.updatetotally(ui, repo, checkout, brev)
3918 return hg.updatetotally(ui, repo, checkout, brev)
3922 except error.UpdateAbort as inst:
3919 except error.UpdateAbort as inst:
3923 msg = _("not updating: %s") % str(inst)
3920 msg = _("not updating: %s") % str(inst)
3924 hint = inst.hint
3921 hint = inst.hint
3925 raise error.UpdateAbort(msg, hint=hint)
3922 raise error.UpdateAbort(msg, hint=hint)
3926 if modheads > 1:
3923 if modheads > 1:
3927 currentbranchheads = len(repo.branchheads())
3924 currentbranchheads = len(repo.branchheads())
3928 if currentbranchheads == modheads:
3925 if currentbranchheads == modheads:
3929 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3926 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3930 elif currentbranchheads > 1:
3927 elif currentbranchheads > 1:
3931 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3928 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3932 "merge)\n"))
3929 "merge)\n"))
3933 else:
3930 else:
3934 ui.status(_("(run 'hg heads' to see heads)\n"))
3931 ui.status(_("(run 'hg heads' to see heads)\n"))
3935 else:
3932 else:
3936 ui.status(_("(run 'hg update' to get a working copy)\n"))
3933 ui.status(_("(run 'hg update' to get a working copy)\n"))
3937
3934
3938 @command('^pull',
3935 @command('^pull',
3939 [('u', 'update', None,
3936 [('u', 'update', None,
3940 _('update to new branch head if changesets were pulled')),
3937 _('update to new branch head if changesets were pulled')),
3941 ('f', 'force', None, _('run even when remote repository is unrelated')),
3938 ('f', 'force', None, _('run even when remote repository is unrelated')),
3942 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3939 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3943 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3940 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3944 ('b', 'branch', [], _('a specific branch you would like to pull'),
3941 ('b', 'branch', [], _('a specific branch you would like to pull'),
3945 _('BRANCH')),
3942 _('BRANCH')),
3946 ] + remoteopts,
3943 ] + remoteopts,
3947 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3944 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3948 def pull(ui, repo, source="default", **opts):
3945 def pull(ui, repo, source="default", **opts):
3949 """pull changes from the specified source
3946 """pull changes from the specified source
3950
3947
3951 Pull changes from a remote repository to a local one.
3948 Pull changes from a remote repository to a local one.
3952
3949
3953 This finds all changes from the repository at the specified path
3950 This finds all changes from the repository at the specified path
3954 or URL and adds them to a local repository (the current one unless
3951 or URL and adds them to a local repository (the current one unless
3955 -R is specified). By default, this does not update the copy of the
3952 -R is specified). By default, this does not update the copy of the
3956 project in the working directory.
3953 project in the working directory.
3957
3954
3958 Use :hg:`incoming` if you want to see what would have been added
3955 Use :hg:`incoming` if you want to see what would have been added
3959 by a pull at the time you issued this command. If you then decide
3956 by a pull at the time you issued this command. If you then decide
3960 to add those changes to the repository, you should use :hg:`pull
3957 to add those changes to the repository, you should use :hg:`pull
3961 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3958 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3962
3959
3963 If SOURCE is omitted, the 'default' path will be used.
3960 If SOURCE is omitted, the 'default' path will be used.
3964 See :hg:`help urls` for more information.
3961 See :hg:`help urls` for more information.
3965
3962
3966 Specifying bookmark as ``.`` is equivalent to specifying the active
3963 Specifying bookmark as ``.`` is equivalent to specifying the active
3967 bookmark's name.
3964 bookmark's name.
3968
3965
3969 Returns 0 on success, 1 if an update had unresolved files.
3966 Returns 0 on success, 1 if an update had unresolved files.
3970 """
3967 """
3971
3968
3972 opts = pycompat.byteskwargs(opts)
3969 opts = pycompat.byteskwargs(opts)
3973 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3970 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3974 msg = _('update destination required by configuration')
3971 msg = _('update destination required by configuration')
3975 hint = _('use hg pull followed by hg update DEST')
3972 hint = _('use hg pull followed by hg update DEST')
3976 raise error.Abort(msg, hint=hint)
3973 raise error.Abort(msg, hint=hint)
3977
3974
3978 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3975 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3979 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3976 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3980 other = hg.peer(repo, opts, source)
3977 other = hg.peer(repo, opts, source)
3981 try:
3978 try:
3982 revs, checkout = hg.addbranchrevs(repo, other, branches,
3979 revs, checkout = hg.addbranchrevs(repo, other, branches,
3983 opts.get('rev'))
3980 opts.get('rev'))
3984
3981
3985
3982
3986 pullopargs = {}
3983 pullopargs = {}
3987 if opts.get('bookmark'):
3984 if opts.get('bookmark'):
3988 if not revs:
3985 if not revs:
3989 revs = []
3986 revs = []
3990 # The list of bookmark used here is not the one used to actually
3987 # The list of bookmark used here is not the one used to actually
3991 # update the bookmark name. This can result in the revision pulled
3988 # update the bookmark name. This can result in the revision pulled
3992 # not ending up with the name of the bookmark because of a race
3989 # not ending up with the name of the bookmark because of a race
3993 # condition on the server. (See issue 4689 for details)
3990 # condition on the server. (See issue 4689 for details)
3994 remotebookmarks = other.listkeys('bookmarks')
3991 remotebookmarks = other.listkeys('bookmarks')
3995 pullopargs['remotebookmarks'] = remotebookmarks
3992 pullopargs['remotebookmarks'] = remotebookmarks
3996 for b in opts['bookmark']:
3993 for b in opts['bookmark']:
3997 b = repo._bookmarks.expandname(b)
3994 b = repo._bookmarks.expandname(b)
3998 if b not in remotebookmarks:
3995 if b not in remotebookmarks:
3999 raise error.Abort(_('remote bookmark %s not found!') % b)
3996 raise error.Abort(_('remote bookmark %s not found!') % b)
4000 revs.append(remotebookmarks[b])
3997 revs.append(remotebookmarks[b])
4001
3998
4002 if revs:
3999 if revs:
4003 try:
4000 try:
4004 # When 'rev' is a bookmark name, we cannot guarantee that it
4001 # When 'rev' is a bookmark name, we cannot guarantee that it
4005 # will be updated with that name because of a race condition
4002 # will be updated with that name because of a race condition
4006 # server side. (See issue 4689 for details)
4003 # server side. (See issue 4689 for details)
4007 oldrevs = revs
4004 oldrevs = revs
4008 revs = [] # actually, nodes
4005 revs = [] # actually, nodes
4009 for r in oldrevs:
4006 for r in oldrevs:
4010 node = other.lookup(r)
4007 node = other.lookup(r)
4011 revs.append(node)
4008 revs.append(node)
4012 if r == checkout:
4009 if r == checkout:
4013 checkout = node
4010 checkout = node
4014 except error.CapabilityError:
4011 except error.CapabilityError:
4015 err = _("other repository doesn't support revision lookup, "
4012 err = _("other repository doesn't support revision lookup, "
4016 "so a rev cannot be specified.")
4013 "so a rev cannot be specified.")
4017 raise error.Abort(err)
4014 raise error.Abort(err)
4018
4015
4019 pullopargs.update(opts.get('opargs', {}))
4016 pullopargs.update(opts.get('opargs', {}))
4020 modheads = exchange.pull(repo, other, heads=revs,
4017 modheads = exchange.pull(repo, other, heads=revs,
4021 force=opts.get('force'),
4018 force=opts.get('force'),
4022 bookmarks=opts.get('bookmark', ()),
4019 bookmarks=opts.get('bookmark', ()),
4023 opargs=pullopargs).cgresult
4020 opargs=pullopargs).cgresult
4024
4021
4025 # brev is a name, which might be a bookmark to be activated at
4022 # brev is a name, which might be a bookmark to be activated at
4026 # the end of the update. In other words, it is an explicit
4023 # the end of the update. In other words, it is an explicit
4027 # destination of the update
4024 # destination of the update
4028 brev = None
4025 brev = None
4029
4026
4030 if checkout:
4027 if checkout:
4031 checkout = str(repo.changelog.rev(checkout))
4028 checkout = str(repo.changelog.rev(checkout))
4032
4029
4033 # order below depends on implementation of
4030 # order below depends on implementation of
4034 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4031 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4035 # because 'checkout' is determined without it.
4032 # because 'checkout' is determined without it.
4036 if opts.get('rev'):
4033 if opts.get('rev'):
4037 brev = opts['rev'][0]
4034 brev = opts['rev'][0]
4038 elif opts.get('branch'):
4035 elif opts.get('branch'):
4039 brev = opts['branch'][0]
4036 brev = opts['branch'][0]
4040 else:
4037 else:
4041 brev = branches[0]
4038 brev = branches[0]
4042 repo._subtoppath = source
4039 repo._subtoppath = source
4043 try:
4040 try:
4044 ret = postincoming(ui, repo, modheads, opts.get('update'),
4041 ret = postincoming(ui, repo, modheads, opts.get('update'),
4045 checkout, brev)
4042 checkout, brev)
4046
4043
4047 finally:
4044 finally:
4048 del repo._subtoppath
4045 del repo._subtoppath
4049
4046
4050 finally:
4047 finally:
4051 other.close()
4048 other.close()
4052 return ret
4049 return ret
4053
4050
4054 @command('^push',
4051 @command('^push',
4055 [('f', 'force', None, _('force push')),
4052 [('f', 'force', None, _('force push')),
4056 ('r', 'rev', [],
4053 ('r', 'rev', [],
4057 _('a changeset intended to be included in the destination'),
4054 _('a changeset intended to be included in the destination'),
4058 _('REV')),
4055 _('REV')),
4059 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4056 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4060 ('b', 'branch', [],
4057 ('b', 'branch', [],
4061 _('a specific branch you would like to push'), _('BRANCH')),
4058 _('a specific branch you would like to push'), _('BRANCH')),
4062 ('', 'new-branch', False, _('allow pushing a new branch')),
4059 ('', 'new-branch', False, _('allow pushing a new branch')),
4063 ] + remoteopts,
4060 ] + remoteopts,
4064 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4061 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4065 def push(ui, repo, dest=None, **opts):
4062 def push(ui, repo, dest=None, **opts):
4066 """push changes to the specified destination
4063 """push changes to the specified destination
4067
4064
4068 Push changesets from the local repository to the specified
4065 Push changesets from the local repository to the specified
4069 destination.
4066 destination.
4070
4067
4071 This operation is symmetrical to pull: it is identical to a pull
4068 This operation is symmetrical to pull: it is identical to a pull
4072 in the destination repository from the current one.
4069 in the destination repository from the current one.
4073
4070
4074 By default, push will not allow creation of new heads at the
4071 By default, push will not allow creation of new heads at the
4075 destination, since multiple heads would make it unclear which head
4072 destination, since multiple heads would make it unclear which head
4076 to use. In this situation, it is recommended to pull and merge
4073 to use. In this situation, it is recommended to pull and merge
4077 before pushing.
4074 before pushing.
4078
4075
4079 Use --new-branch if you want to allow push to create a new named
4076 Use --new-branch if you want to allow push to create a new named
4080 branch that is not present at the destination. This allows you to
4077 branch that is not present at the destination. This allows you to
4081 only create a new branch without forcing other changes.
4078 only create a new branch without forcing other changes.
4082
4079
4083 .. note::
4080 .. note::
4084
4081
4085 Extra care should be taken with the -f/--force option,
4082 Extra care should be taken with the -f/--force option,
4086 which will push all new heads on all branches, an action which will
4083 which will push all new heads on all branches, an action which will
4087 almost always cause confusion for collaborators.
4084 almost always cause confusion for collaborators.
4088
4085
4089 If -r/--rev is used, the specified revision and all its ancestors
4086 If -r/--rev is used, the specified revision and all its ancestors
4090 will be pushed to the remote repository.
4087 will be pushed to the remote repository.
4091
4088
4092 If -B/--bookmark is used, the specified bookmarked revision, its
4089 If -B/--bookmark is used, the specified bookmarked revision, its
4093 ancestors, and the bookmark will be pushed to the remote
4090 ancestors, and the bookmark will be pushed to the remote
4094 repository. Specifying ``.`` is equivalent to specifying the active
4091 repository. Specifying ``.`` is equivalent to specifying the active
4095 bookmark's name.
4092 bookmark's name.
4096
4093
4097 Please see :hg:`help urls` for important details about ``ssh://``
4094 Please see :hg:`help urls` for important details about ``ssh://``
4098 URLs. If DESTINATION is omitted, a default path will be used.
4095 URLs. If DESTINATION is omitted, a default path will be used.
4099
4096
4100 Returns 0 if push was successful, 1 if nothing to push.
4097 Returns 0 if push was successful, 1 if nothing to push.
4101 """
4098 """
4102
4099
4103 opts = pycompat.byteskwargs(opts)
4100 opts = pycompat.byteskwargs(opts)
4104 if opts.get('bookmark'):
4101 if opts.get('bookmark'):
4105 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4102 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4106 for b in opts['bookmark']:
4103 for b in opts['bookmark']:
4107 # translate -B options to -r so changesets get pushed
4104 # translate -B options to -r so changesets get pushed
4108 b = repo._bookmarks.expandname(b)
4105 b = repo._bookmarks.expandname(b)
4109 if b in repo._bookmarks:
4106 if b in repo._bookmarks:
4110 opts.setdefault('rev', []).append(b)
4107 opts.setdefault('rev', []).append(b)
4111 else:
4108 else:
4112 # if we try to push a deleted bookmark, translate it to null
4109 # if we try to push a deleted bookmark, translate it to null
4113 # this lets simultaneous -r, -b options continue working
4110 # this lets simultaneous -r, -b options continue working
4114 opts.setdefault('rev', []).append("null")
4111 opts.setdefault('rev', []).append("null")
4115
4112
4116 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4113 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4117 if not path:
4114 if not path:
4118 raise error.Abort(_('default repository not configured!'),
4115 raise error.Abort(_('default repository not configured!'),
4119 hint=_("see 'hg help config.paths'"))
4116 hint=_("see 'hg help config.paths'"))
4120 dest = path.pushloc or path.loc
4117 dest = path.pushloc or path.loc
4121 branches = (path.branch, opts.get('branch') or [])
4118 branches = (path.branch, opts.get('branch') or [])
4122 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4119 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4123 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4120 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4124 other = hg.peer(repo, opts, dest)
4121 other = hg.peer(repo, opts, dest)
4125
4122
4126 if revs:
4123 if revs:
4127 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4124 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4128 if not revs:
4125 if not revs:
4129 raise error.Abort(_("specified revisions evaluate to an empty set"),
4126 raise error.Abort(_("specified revisions evaluate to an empty set"),
4130 hint=_("use different revision arguments"))
4127 hint=_("use different revision arguments"))
4131 elif path.pushrev:
4128 elif path.pushrev:
4132 # It doesn't make any sense to specify ancestor revisions. So limit
4129 # It doesn't make any sense to specify ancestor revisions. So limit
4133 # to DAG heads to make discovery simpler.
4130 # to DAG heads to make discovery simpler.
4134 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4131 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4135 revs = scmutil.revrange(repo, [expr])
4132 revs = scmutil.revrange(repo, [expr])
4136 revs = [repo[rev].node() for rev in revs]
4133 revs = [repo[rev].node() for rev in revs]
4137 if not revs:
4134 if not revs:
4138 raise error.Abort(_('default push revset for path evaluates to an '
4135 raise error.Abort(_('default push revset for path evaluates to an '
4139 'empty set'))
4136 'empty set'))
4140
4137
4141 repo._subtoppath = dest
4138 repo._subtoppath = dest
4142 try:
4139 try:
4143 # push subrepos depth-first for coherent ordering
4140 # push subrepos depth-first for coherent ordering
4144 c = repo['']
4141 c = repo['']
4145 subs = c.substate # only repos that are committed
4142 subs = c.substate # only repos that are committed
4146 for s in sorted(subs):
4143 for s in sorted(subs):
4147 result = c.sub(s).push(opts)
4144 result = c.sub(s).push(opts)
4148 if result == 0:
4145 if result == 0:
4149 return not result
4146 return not result
4150 finally:
4147 finally:
4151 del repo._subtoppath
4148 del repo._subtoppath
4152 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4149 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4153 newbranch=opts.get('new_branch'),
4150 newbranch=opts.get('new_branch'),
4154 bookmarks=opts.get('bookmark', ()),
4151 bookmarks=opts.get('bookmark', ()),
4155 opargs=opts.get('opargs'))
4152 opargs=opts.get('opargs'))
4156
4153
4157 result = not pushop.cgresult
4154 result = not pushop.cgresult
4158
4155
4159 if pushop.bkresult is not None:
4156 if pushop.bkresult is not None:
4160 if pushop.bkresult == 2:
4157 if pushop.bkresult == 2:
4161 result = 2
4158 result = 2
4162 elif not result and pushop.bkresult:
4159 elif not result and pushop.bkresult:
4163 result = 2
4160 result = 2
4164
4161
4165 return result
4162 return result
4166
4163
4167 @command('recover', [])
4164 @command('recover', [])
4168 def recover(ui, repo):
4165 def recover(ui, repo):
4169 """roll back an interrupted transaction
4166 """roll back an interrupted transaction
4170
4167
4171 Recover from an interrupted commit or pull.
4168 Recover from an interrupted commit or pull.
4172
4169
4173 This command tries to fix the repository status after an
4170 This command tries to fix the repository status after an
4174 interrupted operation. It should only be necessary when Mercurial
4171 interrupted operation. It should only be necessary when Mercurial
4175 suggests it.
4172 suggests it.
4176
4173
4177 Returns 0 if successful, 1 if nothing to recover or verify fails.
4174 Returns 0 if successful, 1 if nothing to recover or verify fails.
4178 """
4175 """
4179 if repo.recover():
4176 if repo.recover():
4180 return hg.verify(repo)
4177 return hg.verify(repo)
4181 return 1
4178 return 1
4182
4179
4183 @command('^remove|rm',
4180 @command('^remove|rm',
4184 [('A', 'after', None, _('record delete for missing files')),
4181 [('A', 'after', None, _('record delete for missing files')),
4185 ('f', 'force', None,
4182 ('f', 'force', None,
4186 _('forget added files, delete modified files')),
4183 _('forget added files, delete modified files')),
4187 ] + subrepoopts + walkopts,
4184 ] + subrepoopts + walkopts,
4188 _('[OPTION]... FILE...'),
4185 _('[OPTION]... FILE...'),
4189 inferrepo=True)
4186 inferrepo=True)
4190 def remove(ui, repo, *pats, **opts):
4187 def remove(ui, repo, *pats, **opts):
4191 """remove the specified files on the next commit
4188 """remove the specified files on the next commit
4192
4189
4193 Schedule the indicated files for removal from the current branch.
4190 Schedule the indicated files for removal from the current branch.
4194
4191
4195 This command schedules the files to be removed at the next commit.
4192 This command schedules the files to be removed at the next commit.
4196 To undo a remove before that, see :hg:`revert`. To undo added
4193 To undo a remove before that, see :hg:`revert`. To undo added
4197 files, see :hg:`forget`.
4194 files, see :hg:`forget`.
4198
4195
4199 .. container:: verbose
4196 .. container:: verbose
4200
4197
4201 -A/--after can be used to remove only files that have already
4198 -A/--after can be used to remove only files that have already
4202 been deleted, -f/--force can be used to force deletion, and -Af
4199 been deleted, -f/--force can be used to force deletion, and -Af
4203 can be used to remove files from the next revision without
4200 can be used to remove files from the next revision without
4204 deleting them from the working directory.
4201 deleting them from the working directory.
4205
4202
4206 The following table details the behavior of remove for different
4203 The following table details the behavior of remove for different
4207 file states (columns) and option combinations (rows). The file
4204 file states (columns) and option combinations (rows). The file
4208 states are Added [A], Clean [C], Modified [M] and Missing [!]
4205 states are Added [A], Clean [C], Modified [M] and Missing [!]
4209 (as reported by :hg:`status`). The actions are Warn, Remove
4206 (as reported by :hg:`status`). The actions are Warn, Remove
4210 (from branch) and Delete (from disk):
4207 (from branch) and Delete (from disk):
4211
4208
4212 ========= == == == ==
4209 ========= == == == ==
4213 opt/state A C M !
4210 opt/state A C M !
4214 ========= == == == ==
4211 ========= == == == ==
4215 none W RD W R
4212 none W RD W R
4216 -f R RD RD R
4213 -f R RD RD R
4217 -A W W W R
4214 -A W W W R
4218 -Af R R R R
4215 -Af R R R R
4219 ========= == == == ==
4216 ========= == == == ==
4220
4217
4221 .. note::
4218 .. note::
4222
4219
4223 :hg:`remove` never deletes files in Added [A] state from the
4220 :hg:`remove` never deletes files in Added [A] state from the
4224 working directory, not even if ``--force`` is specified.
4221 working directory, not even if ``--force`` is specified.
4225
4222
4226 Returns 0 on success, 1 if any warnings encountered.
4223 Returns 0 on success, 1 if any warnings encountered.
4227 """
4224 """
4228
4225
4229 opts = pycompat.byteskwargs(opts)
4226 opts = pycompat.byteskwargs(opts)
4230 after, force = opts.get('after'), opts.get('force')
4227 after, force = opts.get('after'), opts.get('force')
4231 if not pats and not after:
4228 if not pats and not after:
4232 raise error.Abort(_('no files specified'))
4229 raise error.Abort(_('no files specified'))
4233
4230
4234 m = scmutil.match(repo[None], pats, opts)
4231 m = scmutil.match(repo[None], pats, opts)
4235 subrepos = opts.get('subrepos')
4232 subrepos = opts.get('subrepos')
4236 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4233 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4237
4234
4238 @command('rename|move|mv',
4235 @command('rename|move|mv',
4239 [('A', 'after', None, _('record a rename that has already occurred')),
4236 [('A', 'after', None, _('record a rename that has already occurred')),
4240 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4237 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4241 ] + walkopts + dryrunopts,
4238 ] + walkopts + dryrunopts,
4242 _('[OPTION]... SOURCE... DEST'))
4239 _('[OPTION]... SOURCE... DEST'))
4243 def rename(ui, repo, *pats, **opts):
4240 def rename(ui, repo, *pats, **opts):
4244 """rename files; equivalent of copy + remove
4241 """rename files; equivalent of copy + remove
4245
4242
4246 Mark dest as copies of sources; mark sources for deletion. If dest
4243 Mark dest as copies of sources; mark sources for deletion. If dest
4247 is a directory, copies are put in that directory. If dest is a
4244 is a directory, copies are put in that directory. If dest is a
4248 file, there can only be one source.
4245 file, there can only be one source.
4249
4246
4250 By default, this command copies the contents of files as they
4247 By default, this command copies the contents of files as they
4251 exist in the working directory. If invoked with -A/--after, the
4248 exist in the working directory. If invoked with -A/--after, the
4252 operation is recorded, but no copying is performed.
4249 operation is recorded, but no copying is performed.
4253
4250
4254 This command takes effect at the next commit. To undo a rename
4251 This command takes effect at the next commit. To undo a rename
4255 before that, see :hg:`revert`.
4252 before that, see :hg:`revert`.
4256
4253
4257 Returns 0 on success, 1 if errors are encountered.
4254 Returns 0 on success, 1 if errors are encountered.
4258 """
4255 """
4259 opts = pycompat.byteskwargs(opts)
4256 opts = pycompat.byteskwargs(opts)
4260 with repo.wlock(False):
4257 with repo.wlock(False):
4261 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4258 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4262
4259
4263 @command('resolve',
4260 @command('resolve',
4264 [('a', 'all', None, _('select all unresolved files')),
4261 [('a', 'all', None, _('select all unresolved files')),
4265 ('l', 'list', None, _('list state of files needing merge')),
4262 ('l', 'list', None, _('list state of files needing merge')),
4266 ('m', 'mark', None, _('mark files as resolved')),
4263 ('m', 'mark', None, _('mark files as resolved')),
4267 ('u', 'unmark', None, _('mark files as unresolved')),
4264 ('u', 'unmark', None, _('mark files as unresolved')),
4268 ('n', 'no-status', None, _('hide status prefix'))]
4265 ('n', 'no-status', None, _('hide status prefix'))]
4269 + mergetoolopts + walkopts + formatteropts,
4266 + mergetoolopts + walkopts + formatteropts,
4270 _('[OPTION]... [FILE]...'),
4267 _('[OPTION]... [FILE]...'),
4271 inferrepo=True)
4268 inferrepo=True)
4272 def resolve(ui, repo, *pats, **opts):
4269 def resolve(ui, repo, *pats, **opts):
4273 """redo merges or set/view the merge status of files
4270 """redo merges or set/view the merge status of files
4274
4271
4275 Merges with unresolved conflicts are often the result of
4272 Merges with unresolved conflicts are often the result of
4276 non-interactive merging using the ``internal:merge`` configuration
4273 non-interactive merging using the ``internal:merge`` configuration
4277 setting, or a command-line merge tool like ``diff3``. The resolve
4274 setting, or a command-line merge tool like ``diff3``. The resolve
4278 command is used to manage the files involved in a merge, after
4275 command is used to manage the files involved in a merge, after
4279 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4276 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4280 working directory must have two parents). See :hg:`help
4277 working directory must have two parents). See :hg:`help
4281 merge-tools` for information on configuring merge tools.
4278 merge-tools` for information on configuring merge tools.
4282
4279
4283 The resolve command can be used in the following ways:
4280 The resolve command can be used in the following ways:
4284
4281
4285 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4282 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4286 files, discarding any previous merge attempts. Re-merging is not
4283 files, discarding any previous merge attempts. Re-merging is not
4287 performed for files already marked as resolved. Use ``--all/-a``
4284 performed for files already marked as resolved. Use ``--all/-a``
4288 to select all unresolved files. ``--tool`` can be used to specify
4285 to select all unresolved files. ``--tool`` can be used to specify
4289 the merge tool used for the given files. It overrides the HGMERGE
4286 the merge tool used for the given files. It overrides the HGMERGE
4290 environment variable and your configuration files. Previous file
4287 environment variable and your configuration files. Previous file
4291 contents are saved with a ``.orig`` suffix.
4288 contents are saved with a ``.orig`` suffix.
4292
4289
4293 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4290 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4294 (e.g. after having manually fixed-up the files). The default is
4291 (e.g. after having manually fixed-up the files). The default is
4295 to mark all unresolved files.
4292 to mark all unresolved files.
4296
4293
4297 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4294 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4298 default is to mark all resolved files.
4295 default is to mark all resolved files.
4299
4296
4300 - :hg:`resolve -l`: list files which had or still have conflicts.
4297 - :hg:`resolve -l`: list files which had or still have conflicts.
4301 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4298 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4302 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4299 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4303 the list. See :hg:`help filesets` for details.
4300 the list. See :hg:`help filesets` for details.
4304
4301
4305 .. note::
4302 .. note::
4306
4303
4307 Mercurial will not let you commit files with unresolved merge
4304 Mercurial will not let you commit files with unresolved merge
4308 conflicts. You must use :hg:`resolve -m ...` before you can
4305 conflicts. You must use :hg:`resolve -m ...` before you can
4309 commit after a conflicting merge.
4306 commit after a conflicting merge.
4310
4307
4311 Returns 0 on success, 1 if any files fail a resolve attempt.
4308 Returns 0 on success, 1 if any files fail a resolve attempt.
4312 """
4309 """
4313
4310
4314 opts = pycompat.byteskwargs(opts)
4311 opts = pycompat.byteskwargs(opts)
4315 flaglist = 'all mark unmark list no_status'.split()
4312 flaglist = 'all mark unmark list no_status'.split()
4316 all, mark, unmark, show, nostatus = \
4313 all, mark, unmark, show, nostatus = \
4317 [opts.get(o) for o in flaglist]
4314 [opts.get(o) for o in flaglist]
4318
4315
4319 if (show and (mark or unmark)) or (mark and unmark):
4316 if (show and (mark or unmark)) or (mark and unmark):
4320 raise error.Abort(_("too many options specified"))
4317 raise error.Abort(_("too many options specified"))
4321 if pats and all:
4318 if pats and all:
4322 raise error.Abort(_("can't specify --all and patterns"))
4319 raise error.Abort(_("can't specify --all and patterns"))
4323 if not (all or pats or show or mark or unmark):
4320 if not (all or pats or show or mark or unmark):
4324 raise error.Abort(_('no files or directories specified'),
4321 raise error.Abort(_('no files or directories specified'),
4325 hint=('use --all to re-merge all unresolved files'))
4322 hint=('use --all to re-merge all unresolved files'))
4326
4323
4327 if show:
4324 if show:
4328 ui.pager('resolve')
4325 ui.pager('resolve')
4329 fm = ui.formatter('resolve', opts)
4326 fm = ui.formatter('resolve', opts)
4330 ms = mergemod.mergestate.read(repo)
4327 ms = mergemod.mergestate.read(repo)
4331 m = scmutil.match(repo[None], pats, opts)
4328 m = scmutil.match(repo[None], pats, opts)
4332 for f in ms:
4329 for f in ms:
4333 if not m(f):
4330 if not m(f):
4334 continue
4331 continue
4335 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4332 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4336 'd': 'driverresolved'}[ms[f]]
4333 'd': 'driverresolved'}[ms[f]]
4337 fm.startitem()
4334 fm.startitem()
4338 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4335 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4339 fm.write('path', '%s\n', f, label=l)
4336 fm.write('path', '%s\n', f, label=l)
4340 fm.end()
4337 fm.end()
4341 return 0
4338 return 0
4342
4339
4343 with repo.wlock():
4340 with repo.wlock():
4344 ms = mergemod.mergestate.read(repo)
4341 ms = mergemod.mergestate.read(repo)
4345
4342
4346 if not (ms.active() or repo.dirstate.p2() != nullid):
4343 if not (ms.active() or repo.dirstate.p2() != nullid):
4347 raise error.Abort(
4344 raise error.Abort(
4348 _('resolve command not applicable when not merging'))
4345 _('resolve command not applicable when not merging'))
4349
4346
4350 wctx = repo[None]
4347 wctx = repo[None]
4351
4348
4352 if ms.mergedriver and ms.mdstate() == 'u':
4349 if ms.mergedriver and ms.mdstate() == 'u':
4353 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4350 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4354 ms.commit()
4351 ms.commit()
4355 # allow mark and unmark to go through
4352 # allow mark and unmark to go through
4356 if not mark and not unmark and not proceed:
4353 if not mark and not unmark and not proceed:
4357 return 1
4354 return 1
4358
4355
4359 m = scmutil.match(wctx, pats, opts)
4356 m = scmutil.match(wctx, pats, opts)
4360 ret = 0
4357 ret = 0
4361 didwork = False
4358 didwork = False
4362 runconclude = False
4359 runconclude = False
4363
4360
4364 tocomplete = []
4361 tocomplete = []
4365 for f in ms:
4362 for f in ms:
4366 if not m(f):
4363 if not m(f):
4367 continue
4364 continue
4368
4365
4369 didwork = True
4366 didwork = True
4370
4367
4371 # don't let driver-resolved files be marked, and run the conclude
4368 # don't let driver-resolved files be marked, and run the conclude
4372 # step if asked to resolve
4369 # step if asked to resolve
4373 if ms[f] == "d":
4370 if ms[f] == "d":
4374 exact = m.exact(f)
4371 exact = m.exact(f)
4375 if mark:
4372 if mark:
4376 if exact:
4373 if exact:
4377 ui.warn(_('not marking %s as it is driver-resolved\n')
4374 ui.warn(_('not marking %s as it is driver-resolved\n')
4378 % f)
4375 % f)
4379 elif unmark:
4376 elif unmark:
4380 if exact:
4377 if exact:
4381 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4378 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4382 % f)
4379 % f)
4383 else:
4380 else:
4384 runconclude = True
4381 runconclude = True
4385 continue
4382 continue
4386
4383
4387 if mark:
4384 if mark:
4388 ms.mark(f, "r")
4385 ms.mark(f, "r")
4389 elif unmark:
4386 elif unmark:
4390 ms.mark(f, "u")
4387 ms.mark(f, "u")
4391 else:
4388 else:
4392 # backup pre-resolve (merge uses .orig for its own purposes)
4389 # backup pre-resolve (merge uses .orig for its own purposes)
4393 a = repo.wjoin(f)
4390 a = repo.wjoin(f)
4394 try:
4391 try:
4395 util.copyfile(a, a + ".resolve")
4392 util.copyfile(a, a + ".resolve")
4396 except (IOError, OSError) as inst:
4393 except (IOError, OSError) as inst:
4397 if inst.errno != errno.ENOENT:
4394 if inst.errno != errno.ENOENT:
4398 raise
4395 raise
4399
4396
4400 try:
4397 try:
4401 # preresolve file
4398 # preresolve file
4402 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4399 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4403 'resolve')
4400 'resolve')
4404 complete, r = ms.preresolve(f, wctx)
4401 complete, r = ms.preresolve(f, wctx)
4405 if not complete:
4402 if not complete:
4406 tocomplete.append(f)
4403 tocomplete.append(f)
4407 elif r:
4404 elif r:
4408 ret = 1
4405 ret = 1
4409 finally:
4406 finally:
4410 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4407 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4411 ms.commit()
4408 ms.commit()
4412
4409
4413 # replace filemerge's .orig file with our resolve file, but only
4410 # replace filemerge's .orig file with our resolve file, but only
4414 # for merges that are complete
4411 # for merges that are complete
4415 if complete:
4412 if complete:
4416 try:
4413 try:
4417 util.rename(a + ".resolve",
4414 util.rename(a + ".resolve",
4418 scmutil.origpath(ui, repo, a))
4415 scmutil.origpath(ui, repo, a))
4419 except OSError as inst:
4416 except OSError as inst:
4420 if inst.errno != errno.ENOENT:
4417 if inst.errno != errno.ENOENT:
4421 raise
4418 raise
4422
4419
4423 for f in tocomplete:
4420 for f in tocomplete:
4424 try:
4421 try:
4425 # resolve file
4422 # resolve file
4426 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4423 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4427 'resolve')
4424 'resolve')
4428 r = ms.resolve(f, wctx)
4425 r = ms.resolve(f, wctx)
4429 if r:
4426 if r:
4430 ret = 1
4427 ret = 1
4431 finally:
4428 finally:
4432 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4429 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4433 ms.commit()
4430 ms.commit()
4434
4431
4435 # replace filemerge's .orig file with our resolve file
4432 # replace filemerge's .orig file with our resolve file
4436 a = repo.wjoin(f)
4433 a = repo.wjoin(f)
4437 try:
4434 try:
4438 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4435 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4439 except OSError as inst:
4436 except OSError as inst:
4440 if inst.errno != errno.ENOENT:
4437 if inst.errno != errno.ENOENT:
4441 raise
4438 raise
4442
4439
4443 ms.commit()
4440 ms.commit()
4444 ms.recordactions()
4441 ms.recordactions()
4445
4442
4446 if not didwork and pats:
4443 if not didwork and pats:
4447 hint = None
4444 hint = None
4448 if not any([p for p in pats if p.find(':') >= 0]):
4445 if not any([p for p in pats if p.find(':') >= 0]):
4449 pats = ['path:%s' % p for p in pats]
4446 pats = ['path:%s' % p for p in pats]
4450 m = scmutil.match(wctx, pats, opts)
4447 m = scmutil.match(wctx, pats, opts)
4451 for f in ms:
4448 for f in ms:
4452 if not m(f):
4449 if not m(f):
4453 continue
4450 continue
4454 flags = ''.join(['-%s ' % o[0] for o in flaglist
4451 flags = ''.join(['-%s ' % o[0] for o in flaglist
4455 if opts.get(o)])
4452 if opts.get(o)])
4456 hint = _("(try: hg resolve %s%s)\n") % (
4453 hint = _("(try: hg resolve %s%s)\n") % (
4457 flags,
4454 flags,
4458 ' '.join(pats))
4455 ' '.join(pats))
4459 break
4456 break
4460 ui.warn(_("arguments do not match paths that need resolving\n"))
4457 ui.warn(_("arguments do not match paths that need resolving\n"))
4461 if hint:
4458 if hint:
4462 ui.warn(hint)
4459 ui.warn(hint)
4463 elif ms.mergedriver and ms.mdstate() != 's':
4460 elif ms.mergedriver and ms.mdstate() != 's':
4464 # run conclude step when either a driver-resolved file is requested
4461 # run conclude step when either a driver-resolved file is requested
4465 # or there are no driver-resolved files
4462 # or there are no driver-resolved files
4466 # we can't use 'ret' to determine whether any files are unresolved
4463 # we can't use 'ret' to determine whether any files are unresolved
4467 # because we might not have tried to resolve some
4464 # because we might not have tried to resolve some
4468 if ((runconclude or not list(ms.driverresolved()))
4465 if ((runconclude or not list(ms.driverresolved()))
4469 and not list(ms.unresolved())):
4466 and not list(ms.unresolved())):
4470 proceed = mergemod.driverconclude(repo, ms, wctx)
4467 proceed = mergemod.driverconclude(repo, ms, wctx)
4471 ms.commit()
4468 ms.commit()
4472 if not proceed:
4469 if not proceed:
4473 return 1
4470 return 1
4474
4471
4475 # Nudge users into finishing an unfinished operation
4472 # Nudge users into finishing an unfinished operation
4476 unresolvedf = list(ms.unresolved())
4473 unresolvedf = list(ms.unresolved())
4477 driverresolvedf = list(ms.driverresolved())
4474 driverresolvedf = list(ms.driverresolved())
4478 if not unresolvedf and not driverresolvedf:
4475 if not unresolvedf and not driverresolvedf:
4479 ui.status(_('(no more unresolved files)\n'))
4476 ui.status(_('(no more unresolved files)\n'))
4480 cmdutil.checkafterresolved(repo)
4477 cmdutil.checkafterresolved(repo)
4481 elif not unresolvedf:
4478 elif not unresolvedf:
4482 ui.status(_('(no more unresolved files -- '
4479 ui.status(_('(no more unresolved files -- '
4483 'run "hg resolve --all" to conclude)\n'))
4480 'run "hg resolve --all" to conclude)\n'))
4484
4481
4485 return ret
4482 return ret
4486
4483
4487 @command('revert',
4484 @command('revert',
4488 [('a', 'all', None, _('revert all changes when no arguments given')),
4485 [('a', 'all', None, _('revert all changes when no arguments given')),
4489 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4486 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4490 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4487 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4491 ('C', 'no-backup', None, _('do not save backup copies of files')),
4488 ('C', 'no-backup', None, _('do not save backup copies of files')),
4492 ('i', 'interactive', None,
4489 ('i', 'interactive', None,
4493 _('interactively select the changes (EXPERIMENTAL)')),
4490 _('interactively select the changes (EXPERIMENTAL)')),
4494 ] + walkopts + dryrunopts,
4491 ] + walkopts + dryrunopts,
4495 _('[OPTION]... [-r REV] [NAME]...'))
4492 _('[OPTION]... [-r REV] [NAME]...'))
4496 def revert(ui, repo, *pats, **opts):
4493 def revert(ui, repo, *pats, **opts):
4497 """restore files to their checkout state
4494 """restore files to their checkout state
4498
4495
4499 .. note::
4496 .. note::
4500
4497
4501 To check out earlier revisions, you should use :hg:`update REV`.
4498 To check out earlier revisions, you should use :hg:`update REV`.
4502 To cancel an uncommitted merge (and lose your changes),
4499 To cancel an uncommitted merge (and lose your changes),
4503 use :hg:`update --clean .`.
4500 use :hg:`update --clean .`.
4504
4501
4505 With no revision specified, revert the specified files or directories
4502 With no revision specified, revert the specified files or directories
4506 to the contents they had in the parent of the working directory.
4503 to the contents they had in the parent of the working directory.
4507 This restores the contents of files to an unmodified
4504 This restores the contents of files to an unmodified
4508 state and unschedules adds, removes, copies, and renames. If the
4505 state and unschedules adds, removes, copies, and renames. If the
4509 working directory has two parents, you must explicitly specify a
4506 working directory has two parents, you must explicitly specify a
4510 revision.
4507 revision.
4511
4508
4512 Using the -r/--rev or -d/--date options, revert the given files or
4509 Using the -r/--rev or -d/--date options, revert the given files or
4513 directories to their states as of a specific revision. Because
4510 directories to their states as of a specific revision. Because
4514 revert does not change the working directory parents, this will
4511 revert does not change the working directory parents, this will
4515 cause these files to appear modified. This can be helpful to "back
4512 cause these files to appear modified. This can be helpful to "back
4516 out" some or all of an earlier change. See :hg:`backout` for a
4513 out" some or all of an earlier change. See :hg:`backout` for a
4517 related method.
4514 related method.
4518
4515
4519 Modified files are saved with a .orig suffix before reverting.
4516 Modified files are saved with a .orig suffix before reverting.
4520 To disable these backups, use --no-backup. It is possible to store
4517 To disable these backups, use --no-backup. It is possible to store
4521 the backup files in a custom directory relative to the root of the
4518 the backup files in a custom directory relative to the root of the
4522 repository by setting the ``ui.origbackuppath`` configuration
4519 repository by setting the ``ui.origbackuppath`` configuration
4523 option.
4520 option.
4524
4521
4525 See :hg:`help dates` for a list of formats valid for -d/--date.
4522 See :hg:`help dates` for a list of formats valid for -d/--date.
4526
4523
4527 See :hg:`help backout` for a way to reverse the effect of an
4524 See :hg:`help backout` for a way to reverse the effect of an
4528 earlier changeset.
4525 earlier changeset.
4529
4526
4530 Returns 0 on success.
4527 Returns 0 on success.
4531 """
4528 """
4532
4529
4533 if opts.get("date"):
4530 if opts.get("date"):
4534 if opts.get("rev"):
4531 if opts.get("rev"):
4535 raise error.Abort(_("you can't specify a revision and a date"))
4532 raise error.Abort(_("you can't specify a revision and a date"))
4536 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4533 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4537
4534
4538 parent, p2 = repo.dirstate.parents()
4535 parent, p2 = repo.dirstate.parents()
4539 if not opts.get('rev') and p2 != nullid:
4536 if not opts.get('rev') and p2 != nullid:
4540 # revert after merge is a trap for new users (issue2915)
4537 # revert after merge is a trap for new users (issue2915)
4541 raise error.Abort(_('uncommitted merge with no revision specified'),
4538 raise error.Abort(_('uncommitted merge with no revision specified'),
4542 hint=_("use 'hg update' or see 'hg help revert'"))
4539 hint=_("use 'hg update' or see 'hg help revert'"))
4543
4540
4544 ctx = scmutil.revsingle(repo, opts.get('rev'))
4541 ctx = scmutil.revsingle(repo, opts.get('rev'))
4545
4542
4546 if (not (pats or opts.get('include') or opts.get('exclude') or
4543 if (not (pats or opts.get('include') or opts.get('exclude') or
4547 opts.get('all') or opts.get('interactive'))):
4544 opts.get('all') or opts.get('interactive'))):
4548 msg = _("no files or directories specified")
4545 msg = _("no files or directories specified")
4549 if p2 != nullid:
4546 if p2 != nullid:
4550 hint = _("uncommitted merge, use --all to discard all changes,"
4547 hint = _("uncommitted merge, use --all to discard all changes,"
4551 " or 'hg update -C .' to abort the merge")
4548 " or 'hg update -C .' to abort the merge")
4552 raise error.Abort(msg, hint=hint)
4549 raise error.Abort(msg, hint=hint)
4553 dirty = any(repo.status())
4550 dirty = any(repo.status())
4554 node = ctx.node()
4551 node = ctx.node()
4555 if node != parent:
4552 if node != parent:
4556 if dirty:
4553 if dirty:
4557 hint = _("uncommitted changes, use --all to discard all"
4554 hint = _("uncommitted changes, use --all to discard all"
4558 " changes, or 'hg update %s' to update") % ctx.rev()
4555 " changes, or 'hg update %s' to update") % ctx.rev()
4559 else:
4556 else:
4560 hint = _("use --all to revert all files,"
4557 hint = _("use --all to revert all files,"
4561 " or 'hg update %s' to update") % ctx.rev()
4558 " or 'hg update %s' to update") % ctx.rev()
4562 elif dirty:
4559 elif dirty:
4563 hint = _("uncommitted changes, use --all to discard all changes")
4560 hint = _("uncommitted changes, use --all to discard all changes")
4564 else:
4561 else:
4565 hint = _("use --all to revert all files")
4562 hint = _("use --all to revert all files")
4566 raise error.Abort(msg, hint=hint)
4563 raise error.Abort(msg, hint=hint)
4567
4564
4568 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4565 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4569
4566
4570 @command('rollback', dryrunopts +
4567 @command('rollback', dryrunopts +
4571 [('f', 'force', False, _('ignore safety measures'))])
4568 [('f', 'force', False, _('ignore safety measures'))])
4572 def rollback(ui, repo, **opts):
4569 def rollback(ui, repo, **opts):
4573 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4570 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4574
4571
4575 Please use :hg:`commit --amend` instead of rollback to correct
4572 Please use :hg:`commit --amend` instead of rollback to correct
4576 mistakes in the last commit.
4573 mistakes in the last commit.
4577
4574
4578 This command should be used with care. There is only one level of
4575 This command should be used with care. There is only one level of
4579 rollback, and there is no way to undo a rollback. It will also
4576 rollback, and there is no way to undo a rollback. It will also
4580 restore the dirstate at the time of the last transaction, losing
4577 restore the dirstate at the time of the last transaction, losing
4581 any dirstate changes since that time. This command does not alter
4578 any dirstate changes since that time. This command does not alter
4582 the working directory.
4579 the working directory.
4583
4580
4584 Transactions are used to encapsulate the effects of all commands
4581 Transactions are used to encapsulate the effects of all commands
4585 that create new changesets or propagate existing changesets into a
4582 that create new changesets or propagate existing changesets into a
4586 repository.
4583 repository.
4587
4584
4588 .. container:: verbose
4585 .. container:: verbose
4589
4586
4590 For example, the following commands are transactional, and their
4587 For example, the following commands are transactional, and their
4591 effects can be rolled back:
4588 effects can be rolled back:
4592
4589
4593 - commit
4590 - commit
4594 - import
4591 - import
4595 - pull
4592 - pull
4596 - push (with this repository as the destination)
4593 - push (with this repository as the destination)
4597 - unbundle
4594 - unbundle
4598
4595
4599 To avoid permanent data loss, rollback will refuse to rollback a
4596 To avoid permanent data loss, rollback will refuse to rollback a
4600 commit transaction if it isn't checked out. Use --force to
4597 commit transaction if it isn't checked out. Use --force to
4601 override this protection.
4598 override this protection.
4602
4599
4603 The rollback command can be entirely disabled by setting the
4600 The rollback command can be entirely disabled by setting the
4604 ``ui.rollback`` configuration setting to false. If you're here
4601 ``ui.rollback`` configuration setting to false. If you're here
4605 because you want to use rollback and it's disabled, you can
4602 because you want to use rollback and it's disabled, you can
4606 re-enable the command by setting ``ui.rollback`` to true.
4603 re-enable the command by setting ``ui.rollback`` to true.
4607
4604
4608 This command is not intended for use on public repositories. Once
4605 This command is not intended for use on public repositories. Once
4609 changes are visible for pull by other users, rolling a transaction
4606 changes are visible for pull by other users, rolling a transaction
4610 back locally is ineffective (someone else may already have pulled
4607 back locally is ineffective (someone else may already have pulled
4611 the changes). Furthermore, a race is possible with readers of the
4608 the changes). Furthermore, a race is possible with readers of the
4612 repository; for example an in-progress pull from the repository
4609 repository; for example an in-progress pull from the repository
4613 may fail if a rollback is performed.
4610 may fail if a rollback is performed.
4614
4611
4615 Returns 0 on success, 1 if no rollback data is available.
4612 Returns 0 on success, 1 if no rollback data is available.
4616 """
4613 """
4617 if not ui.configbool('ui', 'rollback', True):
4614 if not ui.configbool('ui', 'rollback', True):
4618 raise error.Abort(_('rollback is disabled because it is unsafe'),
4615 raise error.Abort(_('rollback is disabled because it is unsafe'),
4619 hint=('see `hg help -v rollback` for information'))
4616 hint=('see `hg help -v rollback` for information'))
4620 return repo.rollback(dryrun=opts.get(r'dry_run'),
4617 return repo.rollback(dryrun=opts.get(r'dry_run'),
4621 force=opts.get(r'force'))
4618 force=opts.get(r'force'))
4622
4619
4623 @command('root', [])
4620 @command('root', [])
4624 def root(ui, repo):
4621 def root(ui, repo):
4625 """print the root (top) of the current working directory
4622 """print the root (top) of the current working directory
4626
4623
4627 Print the root directory of the current repository.
4624 Print the root directory of the current repository.
4628
4625
4629 Returns 0 on success.
4626 Returns 0 on success.
4630 """
4627 """
4631 ui.write(repo.root + "\n")
4628 ui.write(repo.root + "\n")
4632
4629
4633 @command('^serve',
4630 @command('^serve',
4634 [('A', 'accesslog', '', _('name of access log file to write to'),
4631 [('A', 'accesslog', '', _('name of access log file to write to'),
4635 _('FILE')),
4632 _('FILE')),
4636 ('d', 'daemon', None, _('run server in background')),
4633 ('d', 'daemon', None, _('run server in background')),
4637 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4634 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4638 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4635 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4639 # use string type, then we can check if something was passed
4636 # use string type, then we can check if something was passed
4640 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4637 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4641 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4638 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4642 _('ADDR')),
4639 _('ADDR')),
4643 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4640 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4644 _('PREFIX')),
4641 _('PREFIX')),
4645 ('n', 'name', '',
4642 ('n', 'name', '',
4646 _('name to show in web pages (default: working directory)'), _('NAME')),
4643 _('name to show in web pages (default: working directory)'), _('NAME')),
4647 ('', 'web-conf', '',
4644 ('', 'web-conf', '',
4648 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4645 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4649 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4646 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4650 _('FILE')),
4647 _('FILE')),
4651 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4648 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4652 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4649 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4653 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4650 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4654 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4651 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4655 ('', 'style', '', _('template style to use'), _('STYLE')),
4652 ('', 'style', '', _('template style to use'), _('STYLE')),
4656 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4653 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4657 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4654 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4658 + subrepoopts,
4655 + subrepoopts,
4659 _('[OPTION]...'),
4656 _('[OPTION]...'),
4660 optionalrepo=True)
4657 optionalrepo=True)
4661 def serve(ui, repo, **opts):
4658 def serve(ui, repo, **opts):
4662 """start stand-alone webserver
4659 """start stand-alone webserver
4663
4660
4664 Start a local HTTP repository browser and pull server. You can use
4661 Start a local HTTP repository browser and pull server. You can use
4665 this for ad-hoc sharing and browsing of repositories. It is
4662 this for ad-hoc sharing and browsing of repositories. It is
4666 recommended to use a real web server to serve a repository for
4663 recommended to use a real web server to serve a repository for
4667 longer periods of time.
4664 longer periods of time.
4668
4665
4669 Please note that the server does not implement access control.
4666 Please note that the server does not implement access control.
4670 This means that, by default, anybody can read from the server and
4667 This means that, by default, anybody can read from the server and
4671 nobody can write to it by default. Set the ``web.allow_push``
4668 nobody can write to it by default. Set the ``web.allow_push``
4672 option to ``*`` to allow everybody to push to the server. You
4669 option to ``*`` to allow everybody to push to the server. You
4673 should use a real web server if you need to authenticate users.
4670 should use a real web server if you need to authenticate users.
4674
4671
4675 By default, the server logs accesses to stdout and errors to
4672 By default, the server logs accesses to stdout and errors to
4676 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4673 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4677 files.
4674 files.
4678
4675
4679 To have the server choose a free port number to listen on, specify
4676 To have the server choose a free port number to listen on, specify
4680 a port number of 0; in this case, the server will print the port
4677 a port number of 0; in this case, the server will print the port
4681 number it uses.
4678 number it uses.
4682
4679
4683 Returns 0 on success.
4680 Returns 0 on success.
4684 """
4681 """
4685
4682
4686 opts = pycompat.byteskwargs(opts)
4683 opts = pycompat.byteskwargs(opts)
4687 if opts["stdio"] and opts["cmdserver"]:
4684 if opts["stdio"] and opts["cmdserver"]:
4688 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4685 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4689
4686
4690 if opts["stdio"]:
4687 if opts["stdio"]:
4691 if repo is None:
4688 if repo is None:
4692 raise error.RepoError(_("there is no Mercurial repository here"
4689 raise error.RepoError(_("there is no Mercurial repository here"
4693 " (.hg not found)"))
4690 " (.hg not found)"))
4694 s = sshserver.sshserver(ui, repo)
4691 s = sshserver.sshserver(ui, repo)
4695 s.serve_forever()
4692 s.serve_forever()
4696
4693
4697 service = server.createservice(ui, repo, opts)
4694 service = server.createservice(ui, repo, opts)
4698 return server.runservice(opts, initfn=service.init, runfn=service.run)
4695 return server.runservice(opts, initfn=service.init, runfn=service.run)
4699
4696
4700 @command('^status|st',
4697 @command('^status|st',
4701 [('A', 'all', None, _('show status of all files')),
4698 [('A', 'all', None, _('show status of all files')),
4702 ('m', 'modified', None, _('show only modified files')),
4699 ('m', 'modified', None, _('show only modified files')),
4703 ('a', 'added', None, _('show only added files')),
4700 ('a', 'added', None, _('show only added files')),
4704 ('r', 'removed', None, _('show only removed files')),
4701 ('r', 'removed', None, _('show only removed files')),
4705 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4702 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4706 ('c', 'clean', None, _('show only files without changes')),
4703 ('c', 'clean', None, _('show only files without changes')),
4707 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4704 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4708 ('i', 'ignored', None, _('show only ignored files')),
4705 ('i', 'ignored', None, _('show only ignored files')),
4709 ('n', 'no-status', None, _('hide status prefix')),
4706 ('n', 'no-status', None, _('hide status prefix')),
4710 ('C', 'copies', None, _('show source of copied files')),
4707 ('C', 'copies', None, _('show source of copied files')),
4711 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4708 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4712 ('', 'rev', [], _('show difference from revision'), _('REV')),
4709 ('', 'rev', [], _('show difference from revision'), _('REV')),
4713 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4710 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4714 ] + walkopts + subrepoopts + formatteropts,
4711 ] + walkopts + subrepoopts + formatteropts,
4715 _('[OPTION]... [FILE]...'),
4712 _('[OPTION]... [FILE]...'),
4716 inferrepo=True)
4713 inferrepo=True)
4717 def status(ui, repo, *pats, **opts):
4714 def status(ui, repo, *pats, **opts):
4718 """show changed files in the working directory
4715 """show changed files in the working directory
4719
4716
4720 Show status of files in the repository. If names are given, only
4717 Show status of files in the repository. If names are given, only
4721 files that match are shown. Files that are clean or ignored or
4718 files that match are shown. Files that are clean or ignored or
4722 the source of a copy/move operation, are not listed unless
4719 the source of a copy/move operation, are not listed unless
4723 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4720 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4724 Unless options described with "show only ..." are given, the
4721 Unless options described with "show only ..." are given, the
4725 options -mardu are used.
4722 options -mardu are used.
4726
4723
4727 Option -q/--quiet hides untracked (unknown and ignored) files
4724 Option -q/--quiet hides untracked (unknown and ignored) files
4728 unless explicitly requested with -u/--unknown or -i/--ignored.
4725 unless explicitly requested with -u/--unknown or -i/--ignored.
4729
4726
4730 .. note::
4727 .. note::
4731
4728
4732 :hg:`status` may appear to disagree with diff if permissions have
4729 :hg:`status` may appear to disagree with diff if permissions have
4733 changed or a merge has occurred. The standard diff format does
4730 changed or a merge has occurred. The standard diff format does
4734 not report permission changes and diff only reports changes
4731 not report permission changes and diff only reports changes
4735 relative to one merge parent.
4732 relative to one merge parent.
4736
4733
4737 If one revision is given, it is used as the base revision.
4734 If one revision is given, it is used as the base revision.
4738 If two revisions are given, the differences between them are
4735 If two revisions are given, the differences between them are
4739 shown. The --change option can also be used as a shortcut to list
4736 shown. The --change option can also be used as a shortcut to list
4740 the changed files of a revision from its first parent.
4737 the changed files of a revision from its first parent.
4741
4738
4742 The codes used to show the status of files are::
4739 The codes used to show the status of files are::
4743
4740
4744 M = modified
4741 M = modified
4745 A = added
4742 A = added
4746 R = removed
4743 R = removed
4747 C = clean
4744 C = clean
4748 ! = missing (deleted by non-hg command, but still tracked)
4745 ! = missing (deleted by non-hg command, but still tracked)
4749 ? = not tracked
4746 ? = not tracked
4750 I = ignored
4747 I = ignored
4751 = origin of the previous file (with --copies)
4748 = origin of the previous file (with --copies)
4752
4749
4753 .. container:: verbose
4750 .. container:: verbose
4754
4751
4755 Examples:
4752 Examples:
4756
4753
4757 - show changes in the working directory relative to a
4754 - show changes in the working directory relative to a
4758 changeset::
4755 changeset::
4759
4756
4760 hg status --rev 9353
4757 hg status --rev 9353
4761
4758
4762 - show changes in the working directory relative to the
4759 - show changes in the working directory relative to the
4763 current directory (see :hg:`help patterns` for more information)::
4760 current directory (see :hg:`help patterns` for more information)::
4764
4761
4765 hg status re:
4762 hg status re:
4766
4763
4767 - show all changes including copies in an existing changeset::
4764 - show all changes including copies in an existing changeset::
4768
4765
4769 hg status --copies --change 9353
4766 hg status --copies --change 9353
4770
4767
4771 - get a NUL separated list of added files, suitable for xargs::
4768 - get a NUL separated list of added files, suitable for xargs::
4772
4769
4773 hg status -an0
4770 hg status -an0
4774
4771
4775 Returns 0 on success.
4772 Returns 0 on success.
4776 """
4773 """
4777
4774
4778 opts = pycompat.byteskwargs(opts)
4775 opts = pycompat.byteskwargs(opts)
4779 revs = opts.get('rev')
4776 revs = opts.get('rev')
4780 change = opts.get('change')
4777 change = opts.get('change')
4781
4778
4782 if revs and change:
4779 if revs and change:
4783 msg = _('cannot specify --rev and --change at the same time')
4780 msg = _('cannot specify --rev and --change at the same time')
4784 raise error.Abort(msg)
4781 raise error.Abort(msg)
4785 elif change:
4782 elif change:
4786 node2 = scmutil.revsingle(repo, change, None).node()
4783 node2 = scmutil.revsingle(repo, change, None).node()
4787 node1 = repo[node2].p1().node()
4784 node1 = repo[node2].p1().node()
4788 else:
4785 else:
4789 node1, node2 = scmutil.revpair(repo, revs)
4786 node1, node2 = scmutil.revpair(repo, revs)
4790
4787
4791 if pats or ui.configbool('commands', 'status.relative'):
4788 if pats or ui.configbool('commands', 'status.relative'):
4792 cwd = repo.getcwd()
4789 cwd = repo.getcwd()
4793 else:
4790 else:
4794 cwd = ''
4791 cwd = ''
4795
4792
4796 if opts.get('print0'):
4793 if opts.get('print0'):
4797 end = '\0'
4794 end = '\0'
4798 else:
4795 else:
4799 end = '\n'
4796 end = '\n'
4800 copy = {}
4797 copy = {}
4801 states = 'modified added removed deleted unknown ignored clean'.split()
4798 states = 'modified added removed deleted unknown ignored clean'.split()
4802 show = [k for k in states if opts.get(k)]
4799 show = [k for k in states if opts.get(k)]
4803 if opts.get('all'):
4800 if opts.get('all'):
4804 show += ui.quiet and (states[:4] + ['clean']) or states
4801 show += ui.quiet and (states[:4] + ['clean']) or states
4805 if not show:
4802 if not show:
4806 if ui.quiet:
4803 if ui.quiet:
4807 show = states[:4]
4804 show = states[:4]
4808 else:
4805 else:
4809 show = states[:5]
4806 show = states[:5]
4810
4807
4811 m = scmutil.match(repo[node2], pats, opts)
4808 m = scmutil.match(repo[node2], pats, opts)
4812 stat = repo.status(node1, node2, m,
4809 stat = repo.status(node1, node2, m,
4813 'ignored' in show, 'clean' in show, 'unknown' in show,
4810 'ignored' in show, 'clean' in show, 'unknown' in show,
4814 opts.get('subrepos'))
4811 opts.get('subrepos'))
4815 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4812 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4816
4813
4817 if (opts.get('all') or opts.get('copies')
4814 if (opts.get('all') or opts.get('copies')
4818 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4815 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4819 copy = copies.pathcopies(repo[node1], repo[node2], m)
4816 copy = copies.pathcopies(repo[node1], repo[node2], m)
4820
4817
4821 ui.pager('status')
4818 ui.pager('status')
4822 fm = ui.formatter('status', opts)
4819 fm = ui.formatter('status', opts)
4823 fmt = '%s' + end
4820 fmt = '%s' + end
4824 showchar = not opts.get('no_status')
4821 showchar = not opts.get('no_status')
4825
4822
4826 for state, char, files in changestates:
4823 for state, char, files in changestates:
4827 if state in show:
4824 if state in show:
4828 label = 'status.' + state
4825 label = 'status.' + state
4829 for f in files:
4826 for f in files:
4830 fm.startitem()
4827 fm.startitem()
4831 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4828 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4832 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4829 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4833 if f in copy:
4830 if f in copy:
4834 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4831 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4835 label='status.copied')
4832 label='status.copied')
4836 fm.end()
4833 fm.end()
4837
4834
4838 @command('^summary|sum',
4835 @command('^summary|sum',
4839 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4836 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4840 def summary(ui, repo, **opts):
4837 def summary(ui, repo, **opts):
4841 """summarize working directory state
4838 """summarize working directory state
4842
4839
4843 This generates a brief summary of the working directory state,
4840 This generates a brief summary of the working directory state,
4844 including parents, branch, commit status, phase and available updates.
4841 including parents, branch, commit status, phase and available updates.
4845
4842
4846 With the --remote option, this will check the default paths for
4843 With the --remote option, this will check the default paths for
4847 incoming and outgoing changes. This can be time-consuming.
4844 incoming and outgoing changes. This can be time-consuming.
4848
4845
4849 Returns 0 on success.
4846 Returns 0 on success.
4850 """
4847 """
4851
4848
4852 opts = pycompat.byteskwargs(opts)
4849 opts = pycompat.byteskwargs(opts)
4853 ui.pager('summary')
4850 ui.pager('summary')
4854 ctx = repo[None]
4851 ctx = repo[None]
4855 parents = ctx.parents()
4852 parents = ctx.parents()
4856 pnode = parents[0].node()
4853 pnode = parents[0].node()
4857 marks = []
4854 marks = []
4858
4855
4859 ms = None
4856 ms = None
4860 try:
4857 try:
4861 ms = mergemod.mergestate.read(repo)
4858 ms = mergemod.mergestate.read(repo)
4862 except error.UnsupportedMergeRecords as e:
4859 except error.UnsupportedMergeRecords as e:
4863 s = ' '.join(e.recordtypes)
4860 s = ' '.join(e.recordtypes)
4864 ui.warn(
4861 ui.warn(
4865 _('warning: merge state has unsupported record types: %s\n') % s)
4862 _('warning: merge state has unsupported record types: %s\n') % s)
4866 unresolved = 0
4863 unresolved = 0
4867 else:
4864 else:
4868 unresolved = [f for f in ms if ms[f] == 'u']
4865 unresolved = [f for f in ms if ms[f] == 'u']
4869
4866
4870 for p in parents:
4867 for p in parents:
4871 # label with log.changeset (instead of log.parent) since this
4868 # label with log.changeset (instead of log.parent) since this
4872 # shows a working directory parent *changeset*:
4869 # shows a working directory parent *changeset*:
4873 # i18n: column positioning for "hg summary"
4870 # i18n: column positioning for "hg summary"
4874 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4871 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4875 label=cmdutil._changesetlabels(p))
4872 label=cmdutil._changesetlabels(p))
4876 ui.write(' '.join(p.tags()), label='log.tag')
4873 ui.write(' '.join(p.tags()), label='log.tag')
4877 if p.bookmarks():
4874 if p.bookmarks():
4878 marks.extend(p.bookmarks())
4875 marks.extend(p.bookmarks())
4879 if p.rev() == -1:
4876 if p.rev() == -1:
4880 if not len(repo):
4877 if not len(repo):
4881 ui.write(_(' (empty repository)'))
4878 ui.write(_(' (empty repository)'))
4882 else:
4879 else:
4883 ui.write(_(' (no revision checked out)'))
4880 ui.write(_(' (no revision checked out)'))
4884 if p.obsolete():
4881 if p.obsolete():
4885 ui.write(_(' (obsolete)'))
4882 ui.write(_(' (obsolete)'))
4886 if p.troubled():
4883 if p.troubled():
4887 ui.write(' ('
4884 ui.write(' ('
4888 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4885 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4889 for trouble in p.troubles())
4886 for trouble in p.troubles())
4890 + ')')
4887 + ')')
4891 ui.write('\n')
4888 ui.write('\n')
4892 if p.description():
4889 if p.description():
4893 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4890 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4894 label='log.summary')
4891 label='log.summary')
4895
4892
4896 branch = ctx.branch()
4893 branch = ctx.branch()
4897 bheads = repo.branchheads(branch)
4894 bheads = repo.branchheads(branch)
4898 # i18n: column positioning for "hg summary"
4895 # i18n: column positioning for "hg summary"
4899 m = _('branch: %s\n') % branch
4896 m = _('branch: %s\n') % branch
4900 if branch != 'default':
4897 if branch != 'default':
4901 ui.write(m, label='log.branch')
4898 ui.write(m, label='log.branch')
4902 else:
4899 else:
4903 ui.status(m, label='log.branch')
4900 ui.status(m, label='log.branch')
4904
4901
4905 if marks:
4902 if marks:
4906 active = repo._activebookmark
4903 active = repo._activebookmark
4907 # i18n: column positioning for "hg summary"
4904 # i18n: column positioning for "hg summary"
4908 ui.write(_('bookmarks:'), label='log.bookmark')
4905 ui.write(_('bookmarks:'), label='log.bookmark')
4909 if active is not None:
4906 if active is not None:
4910 if active in marks:
4907 if active in marks:
4911 ui.write(' *' + active, label=activebookmarklabel)
4908 ui.write(' *' + active, label=activebookmarklabel)
4912 marks.remove(active)
4909 marks.remove(active)
4913 else:
4910 else:
4914 ui.write(' [%s]' % active, label=activebookmarklabel)
4911 ui.write(' [%s]' % active, label=activebookmarklabel)
4915 for m in marks:
4912 for m in marks:
4916 ui.write(' ' + m, label='log.bookmark')
4913 ui.write(' ' + m, label='log.bookmark')
4917 ui.write('\n', label='log.bookmark')
4914 ui.write('\n', label='log.bookmark')
4918
4915
4919 status = repo.status(unknown=True)
4916 status = repo.status(unknown=True)
4920
4917
4921 c = repo.dirstate.copies()
4918 c = repo.dirstate.copies()
4922 copied, renamed = [], []
4919 copied, renamed = [], []
4923 for d, s in c.iteritems():
4920 for d, s in c.iteritems():
4924 if s in status.removed:
4921 if s in status.removed:
4925 status.removed.remove(s)
4922 status.removed.remove(s)
4926 renamed.append(d)
4923 renamed.append(d)
4927 else:
4924 else:
4928 copied.append(d)
4925 copied.append(d)
4929 if d in status.added:
4926 if d in status.added:
4930 status.added.remove(d)
4927 status.added.remove(d)
4931
4928
4932 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4929 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4933
4930
4934 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4931 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4935 (ui.label(_('%d added'), 'status.added'), status.added),
4932 (ui.label(_('%d added'), 'status.added'), status.added),
4936 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4933 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4937 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4934 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4938 (ui.label(_('%d copied'), 'status.copied'), copied),
4935 (ui.label(_('%d copied'), 'status.copied'), copied),
4939 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4936 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4940 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4937 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4941 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4938 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4942 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4939 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4943 t = []
4940 t = []
4944 for l, s in labels:
4941 for l, s in labels:
4945 if s:
4942 if s:
4946 t.append(l % len(s))
4943 t.append(l % len(s))
4947
4944
4948 t = ', '.join(t)
4945 t = ', '.join(t)
4949 cleanworkdir = False
4946 cleanworkdir = False
4950
4947
4951 if repo.vfs.exists('graftstate'):
4948 if repo.vfs.exists('graftstate'):
4952 t += _(' (graft in progress)')
4949 t += _(' (graft in progress)')
4953 if repo.vfs.exists('updatestate'):
4950 if repo.vfs.exists('updatestate'):
4954 t += _(' (interrupted update)')
4951 t += _(' (interrupted update)')
4955 elif len(parents) > 1:
4952 elif len(parents) > 1:
4956 t += _(' (merge)')
4953 t += _(' (merge)')
4957 elif branch != parents[0].branch():
4954 elif branch != parents[0].branch():
4958 t += _(' (new branch)')
4955 t += _(' (new branch)')
4959 elif (parents[0].closesbranch() and
4956 elif (parents[0].closesbranch() and
4960 pnode in repo.branchheads(branch, closed=True)):
4957 pnode in repo.branchheads(branch, closed=True)):
4961 t += _(' (head closed)')
4958 t += _(' (head closed)')
4962 elif not (status.modified or status.added or status.removed or renamed or
4959 elif not (status.modified or status.added or status.removed or renamed or
4963 copied or subs):
4960 copied or subs):
4964 t += _(' (clean)')
4961 t += _(' (clean)')
4965 cleanworkdir = True
4962 cleanworkdir = True
4966 elif pnode not in bheads:
4963 elif pnode not in bheads:
4967 t += _(' (new branch head)')
4964 t += _(' (new branch head)')
4968
4965
4969 if parents:
4966 if parents:
4970 pendingphase = max(p.phase() for p in parents)
4967 pendingphase = max(p.phase() for p in parents)
4971 else:
4968 else:
4972 pendingphase = phases.public
4969 pendingphase = phases.public
4973
4970
4974 if pendingphase > phases.newcommitphase(ui):
4971 if pendingphase > phases.newcommitphase(ui):
4975 t += ' (%s)' % phases.phasenames[pendingphase]
4972 t += ' (%s)' % phases.phasenames[pendingphase]
4976
4973
4977 if cleanworkdir:
4974 if cleanworkdir:
4978 # i18n: column positioning for "hg summary"
4975 # i18n: column positioning for "hg summary"
4979 ui.status(_('commit: %s\n') % t.strip())
4976 ui.status(_('commit: %s\n') % t.strip())
4980 else:
4977 else:
4981 # i18n: column positioning for "hg summary"
4978 # i18n: column positioning for "hg summary"
4982 ui.write(_('commit: %s\n') % t.strip())
4979 ui.write(_('commit: %s\n') % t.strip())
4983
4980
4984 # all ancestors of branch heads - all ancestors of parent = new csets
4981 # all ancestors of branch heads - all ancestors of parent = new csets
4985 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4982 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4986 bheads))
4983 bheads))
4987
4984
4988 if new == 0:
4985 if new == 0:
4989 # i18n: column positioning for "hg summary"
4986 # i18n: column positioning for "hg summary"
4990 ui.status(_('update: (current)\n'))
4987 ui.status(_('update: (current)\n'))
4991 elif pnode not in bheads:
4988 elif pnode not in bheads:
4992 # i18n: column positioning for "hg summary"
4989 # i18n: column positioning for "hg summary"
4993 ui.write(_('update: %d new changesets (update)\n') % new)
4990 ui.write(_('update: %d new changesets (update)\n') % new)
4994 else:
4991 else:
4995 # i18n: column positioning for "hg summary"
4992 # i18n: column positioning for "hg summary"
4996 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4993 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4997 (new, len(bheads)))
4994 (new, len(bheads)))
4998
4995
4999 t = []
4996 t = []
5000 draft = len(repo.revs('draft()'))
4997 draft = len(repo.revs('draft()'))
5001 if draft:
4998 if draft:
5002 t.append(_('%d draft') % draft)
4999 t.append(_('%d draft') % draft)
5003 secret = len(repo.revs('secret()'))
5000 secret = len(repo.revs('secret()'))
5004 if secret:
5001 if secret:
5005 t.append(_('%d secret') % secret)
5002 t.append(_('%d secret') % secret)
5006
5003
5007 if draft or secret:
5004 if draft or secret:
5008 ui.status(_('phases: %s\n') % ', '.join(t))
5005 ui.status(_('phases: %s\n') % ', '.join(t))
5009
5006
5010 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5007 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5011 for trouble in ("unstable", "divergent", "bumped"):
5008 for trouble in ("unstable", "divergent", "bumped"):
5012 numtrouble = len(repo.revs(trouble + "()"))
5009 numtrouble = len(repo.revs(trouble + "()"))
5013 # We write all the possibilities to ease translation
5010 # We write all the possibilities to ease translation
5014 troublemsg = {
5011 troublemsg = {
5015 "unstable": _("unstable: %d changesets"),
5012 "unstable": _("unstable: %d changesets"),
5016 "divergent": _("divergent: %d changesets"),
5013 "divergent": _("divergent: %d changesets"),
5017 "bumped": _("bumped: %d changesets"),
5014 "bumped": _("bumped: %d changesets"),
5018 }
5015 }
5019 if numtrouble > 0:
5016 if numtrouble > 0:
5020 ui.status(troublemsg[trouble] % numtrouble + "\n")
5017 ui.status(troublemsg[trouble] % numtrouble + "\n")
5021
5018
5022 cmdutil.summaryhooks(ui, repo)
5019 cmdutil.summaryhooks(ui, repo)
5023
5020
5024 if opts.get('remote'):
5021 if opts.get('remote'):
5025 needsincoming, needsoutgoing = True, True
5022 needsincoming, needsoutgoing = True, True
5026 else:
5023 else:
5027 needsincoming, needsoutgoing = False, False
5024 needsincoming, needsoutgoing = False, False
5028 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5025 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5029 if i:
5026 if i:
5030 needsincoming = True
5027 needsincoming = True
5031 if o:
5028 if o:
5032 needsoutgoing = True
5029 needsoutgoing = True
5033 if not needsincoming and not needsoutgoing:
5030 if not needsincoming and not needsoutgoing:
5034 return
5031 return
5035
5032
5036 def getincoming():
5033 def getincoming():
5037 source, branches = hg.parseurl(ui.expandpath('default'))
5034 source, branches = hg.parseurl(ui.expandpath('default'))
5038 sbranch = branches[0]
5035 sbranch = branches[0]
5039 try:
5036 try:
5040 other = hg.peer(repo, {}, source)
5037 other = hg.peer(repo, {}, source)
5041 except error.RepoError:
5038 except error.RepoError:
5042 if opts.get('remote'):
5039 if opts.get('remote'):
5043 raise
5040 raise
5044 return source, sbranch, None, None, None
5041 return source, sbranch, None, None, None
5045 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5042 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5046 if revs:
5043 if revs:
5047 revs = [other.lookup(rev) for rev in revs]
5044 revs = [other.lookup(rev) for rev in revs]
5048 ui.debug('comparing with %s\n' % util.hidepassword(source))
5045 ui.debug('comparing with %s\n' % util.hidepassword(source))
5049 repo.ui.pushbuffer()
5046 repo.ui.pushbuffer()
5050 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5047 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5051 repo.ui.popbuffer()
5048 repo.ui.popbuffer()
5052 return source, sbranch, other, commoninc, commoninc[1]
5049 return source, sbranch, other, commoninc, commoninc[1]
5053
5050
5054 if needsincoming:
5051 if needsincoming:
5055 source, sbranch, sother, commoninc, incoming = getincoming()
5052 source, sbranch, sother, commoninc, incoming = getincoming()
5056 else:
5053 else:
5057 source = sbranch = sother = commoninc = incoming = None
5054 source = sbranch = sother = commoninc = incoming = None
5058
5055
5059 def getoutgoing():
5056 def getoutgoing():
5060 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5057 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5061 dbranch = branches[0]
5058 dbranch = branches[0]
5062 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5059 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5063 if source != dest:
5060 if source != dest:
5064 try:
5061 try:
5065 dother = hg.peer(repo, {}, dest)
5062 dother = hg.peer(repo, {}, dest)
5066 except error.RepoError:
5063 except error.RepoError:
5067 if opts.get('remote'):
5064 if opts.get('remote'):
5068 raise
5065 raise
5069 return dest, dbranch, None, None
5066 return dest, dbranch, None, None
5070 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5067 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5071 elif sother is None:
5068 elif sother is None:
5072 # there is no explicit destination peer, but source one is invalid
5069 # there is no explicit destination peer, but source one is invalid
5073 return dest, dbranch, None, None
5070 return dest, dbranch, None, None
5074 else:
5071 else:
5075 dother = sother
5072 dother = sother
5076 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5073 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5077 common = None
5074 common = None
5078 else:
5075 else:
5079 common = commoninc
5076 common = commoninc
5080 if revs:
5077 if revs:
5081 revs = [repo.lookup(rev) for rev in revs]
5078 revs = [repo.lookup(rev) for rev in revs]
5082 repo.ui.pushbuffer()
5079 repo.ui.pushbuffer()
5083 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5080 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5084 commoninc=common)
5081 commoninc=common)
5085 repo.ui.popbuffer()
5082 repo.ui.popbuffer()
5086 return dest, dbranch, dother, outgoing
5083 return dest, dbranch, dother, outgoing
5087
5084
5088 if needsoutgoing:
5085 if needsoutgoing:
5089 dest, dbranch, dother, outgoing = getoutgoing()
5086 dest, dbranch, dother, outgoing = getoutgoing()
5090 else:
5087 else:
5091 dest = dbranch = dother = outgoing = None
5088 dest = dbranch = dother = outgoing = None
5092
5089
5093 if opts.get('remote'):
5090 if opts.get('remote'):
5094 t = []
5091 t = []
5095 if incoming:
5092 if incoming:
5096 t.append(_('1 or more incoming'))
5093 t.append(_('1 or more incoming'))
5097 o = outgoing.missing
5094 o = outgoing.missing
5098 if o:
5095 if o:
5099 t.append(_('%d outgoing') % len(o))
5096 t.append(_('%d outgoing') % len(o))
5100 other = dother or sother
5097 other = dother or sother
5101 if 'bookmarks' in other.listkeys('namespaces'):
5098 if 'bookmarks' in other.listkeys('namespaces'):
5102 counts = bookmarks.summary(repo, other)
5099 counts = bookmarks.summary(repo, other)
5103 if counts[0] > 0:
5100 if counts[0] > 0:
5104 t.append(_('%d incoming bookmarks') % counts[0])
5101 t.append(_('%d incoming bookmarks') % counts[0])
5105 if counts[1] > 0:
5102 if counts[1] > 0:
5106 t.append(_('%d outgoing bookmarks') % counts[1])
5103 t.append(_('%d outgoing bookmarks') % counts[1])
5107
5104
5108 if t:
5105 if t:
5109 # i18n: column positioning for "hg summary"
5106 # i18n: column positioning for "hg summary"
5110 ui.write(_('remote: %s\n') % (', '.join(t)))
5107 ui.write(_('remote: %s\n') % (', '.join(t)))
5111 else:
5108 else:
5112 # i18n: column positioning for "hg summary"
5109 # i18n: column positioning for "hg summary"
5113 ui.status(_('remote: (synced)\n'))
5110 ui.status(_('remote: (synced)\n'))
5114
5111
5115 cmdutil.summaryremotehooks(ui, repo, opts,
5112 cmdutil.summaryremotehooks(ui, repo, opts,
5116 ((source, sbranch, sother, commoninc),
5113 ((source, sbranch, sother, commoninc),
5117 (dest, dbranch, dother, outgoing)))
5114 (dest, dbranch, dother, outgoing)))
5118
5115
5119 @command('tag',
5116 @command('tag',
5120 [('f', 'force', None, _('force tag')),
5117 [('f', 'force', None, _('force tag')),
5121 ('l', 'local', None, _('make the tag local')),
5118 ('l', 'local', None, _('make the tag local')),
5122 ('r', 'rev', '', _('revision to tag'), _('REV')),
5119 ('r', 'rev', '', _('revision to tag'), _('REV')),
5123 ('', 'remove', None, _('remove a tag')),
5120 ('', 'remove', None, _('remove a tag')),
5124 # -l/--local is already there, commitopts cannot be used
5121 # -l/--local is already there, commitopts cannot be used
5125 ('e', 'edit', None, _('invoke editor on commit messages')),
5122 ('e', 'edit', None, _('invoke editor on commit messages')),
5126 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5123 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5127 ] + commitopts2,
5124 ] + commitopts2,
5128 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5125 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5129 def tag(ui, repo, name1, *names, **opts):
5126 def tag(ui, repo, name1, *names, **opts):
5130 """add one or more tags for the current or given revision
5127 """add one or more tags for the current or given revision
5131
5128
5132 Name a particular revision using <name>.
5129 Name a particular revision using <name>.
5133
5130
5134 Tags are used to name particular revisions of the repository and are
5131 Tags are used to name particular revisions of the repository and are
5135 very useful to compare different revisions, to go back to significant
5132 very useful to compare different revisions, to go back to significant
5136 earlier versions or to mark branch points as releases, etc. Changing
5133 earlier versions or to mark branch points as releases, etc. Changing
5137 an existing tag is normally disallowed; use -f/--force to override.
5134 an existing tag is normally disallowed; use -f/--force to override.
5138
5135
5139 If no revision is given, the parent of the working directory is
5136 If no revision is given, the parent of the working directory is
5140 used.
5137 used.
5141
5138
5142 To facilitate version control, distribution, and merging of tags,
5139 To facilitate version control, distribution, and merging of tags,
5143 they are stored as a file named ".hgtags" which is managed similarly
5140 they are stored as a file named ".hgtags" which is managed similarly
5144 to other project files and can be hand-edited if necessary. This
5141 to other project files and can be hand-edited if necessary. This
5145 also means that tagging creates a new commit. The file
5142 also means that tagging creates a new commit. The file
5146 ".hg/localtags" is used for local tags (not shared among
5143 ".hg/localtags" is used for local tags (not shared among
5147 repositories).
5144 repositories).
5148
5145
5149 Tag commits are usually made at the head of a branch. If the parent
5146 Tag commits are usually made at the head of a branch. If the parent
5150 of the working directory is not a branch head, :hg:`tag` aborts; use
5147 of the working directory is not a branch head, :hg:`tag` aborts; use
5151 -f/--force to force the tag commit to be based on a non-head
5148 -f/--force to force the tag commit to be based on a non-head
5152 changeset.
5149 changeset.
5153
5150
5154 See :hg:`help dates` for a list of formats valid for -d/--date.
5151 See :hg:`help dates` for a list of formats valid for -d/--date.
5155
5152
5156 Since tag names have priority over branch names during revision
5153 Since tag names have priority over branch names during revision
5157 lookup, using an existing branch name as a tag name is discouraged.
5154 lookup, using an existing branch name as a tag name is discouraged.
5158
5155
5159 Returns 0 on success.
5156 Returns 0 on success.
5160 """
5157 """
5161 opts = pycompat.byteskwargs(opts)
5158 opts = pycompat.byteskwargs(opts)
5162 wlock = lock = None
5159 wlock = lock = None
5163 try:
5160 try:
5164 wlock = repo.wlock()
5161 wlock = repo.wlock()
5165 lock = repo.lock()
5162 lock = repo.lock()
5166 rev_ = "."
5163 rev_ = "."
5167 names = [t.strip() for t in (name1,) + names]
5164 names = [t.strip() for t in (name1,) + names]
5168 if len(names) != len(set(names)):
5165 if len(names) != len(set(names)):
5169 raise error.Abort(_('tag names must be unique'))
5166 raise error.Abort(_('tag names must be unique'))
5170 for n in names:
5167 for n in names:
5171 scmutil.checknewlabel(repo, n, 'tag')
5168 scmutil.checknewlabel(repo, n, 'tag')
5172 if not n:
5169 if not n:
5173 raise error.Abort(_('tag names cannot consist entirely of '
5170 raise error.Abort(_('tag names cannot consist entirely of '
5174 'whitespace'))
5171 'whitespace'))
5175 if opts.get('rev') and opts.get('remove'):
5172 if opts.get('rev') and opts.get('remove'):
5176 raise error.Abort(_("--rev and --remove are incompatible"))
5173 raise error.Abort(_("--rev and --remove are incompatible"))
5177 if opts.get('rev'):
5174 if opts.get('rev'):
5178 rev_ = opts['rev']
5175 rev_ = opts['rev']
5179 message = opts.get('message')
5176 message = opts.get('message')
5180 if opts.get('remove'):
5177 if opts.get('remove'):
5181 if opts.get('local'):
5178 if opts.get('local'):
5182 expectedtype = 'local'
5179 expectedtype = 'local'
5183 else:
5180 else:
5184 expectedtype = 'global'
5181 expectedtype = 'global'
5185
5182
5186 for n in names:
5183 for n in names:
5187 if not repo.tagtype(n):
5184 if not repo.tagtype(n):
5188 raise error.Abort(_("tag '%s' does not exist") % n)
5185 raise error.Abort(_("tag '%s' does not exist") % n)
5189 if repo.tagtype(n) != expectedtype:
5186 if repo.tagtype(n) != expectedtype:
5190 if expectedtype == 'global':
5187 if expectedtype == 'global':
5191 raise error.Abort(_("tag '%s' is not a global tag") % n)
5188 raise error.Abort(_("tag '%s' is not a global tag") % n)
5192 else:
5189 else:
5193 raise error.Abort(_("tag '%s' is not a local tag") % n)
5190 raise error.Abort(_("tag '%s' is not a local tag") % n)
5194 rev_ = 'null'
5191 rev_ = 'null'
5195 if not message:
5192 if not message:
5196 # we don't translate commit messages
5193 # we don't translate commit messages
5197 message = 'Removed tag %s' % ', '.join(names)
5194 message = 'Removed tag %s' % ', '.join(names)
5198 elif not opts.get('force'):
5195 elif not opts.get('force'):
5199 for n in names:
5196 for n in names:
5200 if n in repo.tags():
5197 if n in repo.tags():
5201 raise error.Abort(_("tag '%s' already exists "
5198 raise error.Abort(_("tag '%s' already exists "
5202 "(use -f to force)") % n)
5199 "(use -f to force)") % n)
5203 if not opts.get('local'):
5200 if not opts.get('local'):
5204 p1, p2 = repo.dirstate.parents()
5201 p1, p2 = repo.dirstate.parents()
5205 if p2 != nullid:
5202 if p2 != nullid:
5206 raise error.Abort(_('uncommitted merge'))
5203 raise error.Abort(_('uncommitted merge'))
5207 bheads = repo.branchheads()
5204 bheads = repo.branchheads()
5208 if not opts.get('force') and bheads and p1 not in bheads:
5205 if not opts.get('force') and bheads and p1 not in bheads:
5209 raise error.Abort(_('working directory is not at a branch head '
5206 raise error.Abort(_('working directory is not at a branch head '
5210 '(use -f to force)'))
5207 '(use -f to force)'))
5211 r = scmutil.revsingle(repo, rev_).node()
5208 r = scmutil.revsingle(repo, rev_).node()
5212
5209
5213 if not message:
5210 if not message:
5214 # we don't translate commit messages
5211 # we don't translate commit messages
5215 message = ('Added tag %s for changeset %s' %
5212 message = ('Added tag %s for changeset %s' %
5216 (', '.join(names), short(r)))
5213 (', '.join(names), short(r)))
5217
5214
5218 date = opts.get('date')
5215 date = opts.get('date')
5219 if date:
5216 if date:
5220 date = util.parsedate(date)
5217 date = util.parsedate(date)
5221
5218
5222 if opts.get('remove'):
5219 if opts.get('remove'):
5223 editform = 'tag.remove'
5220 editform = 'tag.remove'
5224 else:
5221 else:
5225 editform = 'tag.add'
5222 editform = 'tag.add'
5226 editor = cmdutil.getcommiteditor(editform=editform, **opts)
5223 editor = cmdutil.getcommiteditor(editform=editform, **opts)
5227
5224
5228 # don't allow tagging the null rev
5225 # don't allow tagging the null rev
5229 if (not opts.get('remove') and
5226 if (not opts.get('remove') and
5230 scmutil.revsingle(repo, rev_).rev() == nullrev):
5227 scmutil.revsingle(repo, rev_).rev() == nullrev):
5231 raise error.Abort(_("cannot tag null revision"))
5228 raise error.Abort(_("cannot tag null revision"))
5232
5229
5233 tagsmod.tag(repo, names, r, message, opts.get('local'),
5230 tagsmod.tag(repo, names, r, message, opts.get('local'),
5234 opts.get('user'), date, editor=editor)
5231 opts.get('user'), date, editor=editor)
5235 finally:
5232 finally:
5236 release(lock, wlock)
5233 release(lock, wlock)
5237
5234
5238 @command('tags', formatteropts, '')
5235 @command('tags', formatteropts, '')
5239 def tags(ui, repo, **opts):
5236 def tags(ui, repo, **opts):
5240 """list repository tags
5237 """list repository tags
5241
5238
5242 This lists both regular and local tags. When the -v/--verbose
5239 This lists both regular and local tags. When the -v/--verbose
5243 switch is used, a third column "local" is printed for local tags.
5240 switch is used, a third column "local" is printed for local tags.
5244 When the -q/--quiet switch is used, only the tag name is printed.
5241 When the -q/--quiet switch is used, only the tag name is printed.
5245
5242
5246 Returns 0 on success.
5243 Returns 0 on success.
5247 """
5244 """
5248
5245
5249 opts = pycompat.byteskwargs(opts)
5246 opts = pycompat.byteskwargs(opts)
5250 ui.pager('tags')
5247 ui.pager('tags')
5251 fm = ui.formatter('tags', opts)
5248 fm = ui.formatter('tags', opts)
5252 hexfunc = fm.hexfunc
5249 hexfunc = fm.hexfunc
5253 tagtype = ""
5250 tagtype = ""
5254
5251
5255 for t, n in reversed(repo.tagslist()):
5252 for t, n in reversed(repo.tagslist()):
5256 hn = hexfunc(n)
5253 hn = hexfunc(n)
5257 label = 'tags.normal'
5254 label = 'tags.normal'
5258 tagtype = ''
5255 tagtype = ''
5259 if repo.tagtype(t) == 'local':
5256 if repo.tagtype(t) == 'local':
5260 label = 'tags.local'
5257 label = 'tags.local'
5261 tagtype = 'local'
5258 tagtype = 'local'
5262
5259
5263 fm.startitem()
5260 fm.startitem()
5264 fm.write('tag', '%s', t, label=label)
5261 fm.write('tag', '%s', t, label=label)
5265 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5262 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5266 fm.condwrite(not ui.quiet, 'rev node', fmt,
5263 fm.condwrite(not ui.quiet, 'rev node', fmt,
5267 repo.changelog.rev(n), hn, label=label)
5264 repo.changelog.rev(n), hn, label=label)
5268 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5265 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5269 tagtype, label=label)
5266 tagtype, label=label)
5270 fm.plain('\n')
5267 fm.plain('\n')
5271 fm.end()
5268 fm.end()
5272
5269
5273 @command('tip',
5270 @command('tip',
5274 [('p', 'patch', None, _('show patch')),
5271 [('p', 'patch', None, _('show patch')),
5275 ('g', 'git', None, _('use git extended diff format')),
5272 ('g', 'git', None, _('use git extended diff format')),
5276 ] + templateopts,
5273 ] + templateopts,
5277 _('[-p] [-g]'))
5274 _('[-p] [-g]'))
5278 def tip(ui, repo, **opts):
5275 def tip(ui, repo, **opts):
5279 """show the tip revision (DEPRECATED)
5276 """show the tip revision (DEPRECATED)
5280
5277
5281 The tip revision (usually just called the tip) is the changeset
5278 The tip revision (usually just called the tip) is the changeset
5282 most recently added to the repository (and therefore the most
5279 most recently added to the repository (and therefore the most
5283 recently changed head).
5280 recently changed head).
5284
5281
5285 If you have just made a commit, that commit will be the tip. If
5282 If you have just made a commit, that commit will be the tip. If
5286 you have just pulled changes from another repository, the tip of
5283 you have just pulled changes from another repository, the tip of
5287 that repository becomes the current tip. The "tip" tag is special
5284 that repository becomes the current tip. The "tip" tag is special
5288 and cannot be renamed or assigned to a different changeset.
5285 and cannot be renamed or assigned to a different changeset.
5289
5286
5290 This command is deprecated, please use :hg:`heads` instead.
5287 This command is deprecated, please use :hg:`heads` instead.
5291
5288
5292 Returns 0 on success.
5289 Returns 0 on success.
5293 """
5290 """
5294 opts = pycompat.byteskwargs(opts)
5291 opts = pycompat.byteskwargs(opts)
5295 displayer = cmdutil.show_changeset(ui, repo, opts)
5292 displayer = cmdutil.show_changeset(ui, repo, opts)
5296 displayer.show(repo['tip'])
5293 displayer.show(repo['tip'])
5297 displayer.close()
5294 displayer.close()
5298
5295
5299 @command('unbundle',
5296 @command('unbundle',
5300 [('u', 'update', None,
5297 [('u', 'update', None,
5301 _('update to new branch head if changesets were unbundled'))],
5298 _('update to new branch head if changesets were unbundled'))],
5302 _('[-u] FILE...'))
5299 _('[-u] FILE...'))
5303 def unbundle(ui, repo, fname1, *fnames, **opts):
5300 def unbundle(ui, repo, fname1, *fnames, **opts):
5304 """apply one or more bundle files
5301 """apply one or more bundle files
5305
5302
5306 Apply one or more bundle files generated by :hg:`bundle`.
5303 Apply one or more bundle files generated by :hg:`bundle`.
5307
5304
5308 Returns 0 on success, 1 if an update has unresolved files.
5305 Returns 0 on success, 1 if an update has unresolved files.
5309 """
5306 """
5310 fnames = (fname1,) + fnames
5307 fnames = (fname1,) + fnames
5311
5308
5312 with repo.lock():
5309 with repo.lock():
5313 for fname in fnames:
5310 for fname in fnames:
5314 f = hg.openpath(ui, fname)
5311 f = hg.openpath(ui, fname)
5315 gen = exchange.readbundle(ui, f, fname)
5312 gen = exchange.readbundle(ui, f, fname)
5316 if isinstance(gen, bundle2.unbundle20):
5313 if isinstance(gen, bundle2.unbundle20):
5317 tr = repo.transaction('unbundle')
5314 tr = repo.transaction('unbundle')
5318 try:
5315 try:
5319 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5316 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5320 url='bundle:' + fname)
5317 url='bundle:' + fname)
5321 tr.close()
5318 tr.close()
5322 except error.BundleUnknownFeatureError as exc:
5319 except error.BundleUnknownFeatureError as exc:
5323 raise error.Abort(_('%s: unknown bundle feature, %s')
5320 raise error.Abort(_('%s: unknown bundle feature, %s')
5324 % (fname, exc),
5321 % (fname, exc),
5325 hint=_("see https://mercurial-scm.org/"
5322 hint=_("see https://mercurial-scm.org/"
5326 "wiki/BundleFeature for more "
5323 "wiki/BundleFeature for more "
5327 "information"))
5324 "information"))
5328 finally:
5325 finally:
5329 if tr:
5326 if tr:
5330 tr.release()
5327 tr.release()
5331 changes = [r.get('return', 0)
5328 changes = [r.get('return', 0)
5332 for r in op.records['changegroup']]
5329 for r in op.records['changegroup']]
5333 modheads = changegroup.combineresults(changes)
5330 modheads = changegroup.combineresults(changes)
5334 elif isinstance(gen, streamclone.streamcloneapplier):
5331 elif isinstance(gen, streamclone.streamcloneapplier):
5335 raise error.Abort(
5332 raise error.Abort(
5336 _('packed bundles cannot be applied with '
5333 _('packed bundles cannot be applied with '
5337 '"hg unbundle"'),
5334 '"hg unbundle"'),
5338 hint=_('use "hg debugapplystreamclonebundle"'))
5335 hint=_('use "hg debugapplystreamclonebundle"'))
5339 else:
5336 else:
5340 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
5337 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
5341
5338
5342 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5339 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5343
5340
5344 @command('^update|up|checkout|co',
5341 @command('^update|up|checkout|co',
5345 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5342 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5346 ('c', 'check', None, _('require clean working directory')),
5343 ('c', 'check', None, _('require clean working directory')),
5347 ('m', 'merge', None, _('merge uncommitted changes')),
5344 ('m', 'merge', None, _('merge uncommitted changes')),
5348 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5345 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5349 ('r', 'rev', '', _('revision'), _('REV'))
5346 ('r', 'rev', '', _('revision'), _('REV'))
5350 ] + mergetoolopts,
5347 ] + mergetoolopts,
5351 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5348 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5352 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5349 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5353 merge=None, tool=None):
5350 merge=None, tool=None):
5354 """update working directory (or switch revisions)
5351 """update working directory (or switch revisions)
5355
5352
5356 Update the repository's working directory to the specified
5353 Update the repository's working directory to the specified
5357 changeset. If no changeset is specified, update to the tip of the
5354 changeset. If no changeset is specified, update to the tip of the
5358 current named branch and move the active bookmark (see :hg:`help
5355 current named branch and move the active bookmark (see :hg:`help
5359 bookmarks`).
5356 bookmarks`).
5360
5357
5361 Update sets the working directory's parent revision to the specified
5358 Update sets the working directory's parent revision to the specified
5362 changeset (see :hg:`help parents`).
5359 changeset (see :hg:`help parents`).
5363
5360
5364 If the changeset is not a descendant or ancestor of the working
5361 If the changeset is not a descendant or ancestor of the working
5365 directory's parent and there are uncommitted changes, the update is
5362 directory's parent and there are uncommitted changes, the update is
5366 aborted. With the -c/--check option, the working directory is checked
5363 aborted. With the -c/--check option, the working directory is checked
5367 for uncommitted changes; if none are found, the working directory is
5364 for uncommitted changes; if none are found, the working directory is
5368 updated to the specified changeset.
5365 updated to the specified changeset.
5369
5366
5370 .. container:: verbose
5367 .. container:: verbose
5371
5368
5372 The -C/--clean, -c/--check, and -m/--merge options control what
5369 The -C/--clean, -c/--check, and -m/--merge options control what
5373 happens if the working directory contains uncommitted changes.
5370 happens if the working directory contains uncommitted changes.
5374 At most of one of them can be specified.
5371 At most of one of them can be specified.
5375
5372
5376 1. If no option is specified, and if
5373 1. If no option is specified, and if
5377 the requested changeset is an ancestor or descendant of
5374 the requested changeset is an ancestor or descendant of
5378 the working directory's parent, the uncommitted changes
5375 the working directory's parent, the uncommitted changes
5379 are merged into the requested changeset and the merged
5376 are merged into the requested changeset and the merged
5380 result is left uncommitted. If the requested changeset is
5377 result is left uncommitted. If the requested changeset is
5381 not an ancestor or descendant (that is, it is on another
5378 not an ancestor or descendant (that is, it is on another
5382 branch), the update is aborted and the uncommitted changes
5379 branch), the update is aborted and the uncommitted changes
5383 are preserved.
5380 are preserved.
5384
5381
5385 2. With the -m/--merge option, the update is allowed even if the
5382 2. With the -m/--merge option, the update is allowed even if the
5386 requested changeset is not an ancestor or descendant of
5383 requested changeset is not an ancestor or descendant of
5387 the working directory's parent.
5384 the working directory's parent.
5388
5385
5389 3. With the -c/--check option, the update is aborted and the
5386 3. With the -c/--check option, the update is aborted and the
5390 uncommitted changes are preserved.
5387 uncommitted changes are preserved.
5391
5388
5392 4. With the -C/--clean option, uncommitted changes are discarded and
5389 4. With the -C/--clean option, uncommitted changes are discarded and
5393 the working directory is updated to the requested changeset.
5390 the working directory is updated to the requested changeset.
5394
5391
5395 To cancel an uncommitted merge (and lose your changes), use
5392 To cancel an uncommitted merge (and lose your changes), use
5396 :hg:`update --clean .`.
5393 :hg:`update --clean .`.
5397
5394
5398 Use null as the changeset to remove the working directory (like
5395 Use null as the changeset to remove the working directory (like
5399 :hg:`clone -U`).
5396 :hg:`clone -U`).
5400
5397
5401 If you want to revert just one file to an older revision, use
5398 If you want to revert just one file to an older revision, use
5402 :hg:`revert [-r REV] NAME`.
5399 :hg:`revert [-r REV] NAME`.
5403
5400
5404 See :hg:`help dates` for a list of formats valid for -d/--date.
5401 See :hg:`help dates` for a list of formats valid for -d/--date.
5405
5402
5406 Returns 0 on success, 1 if there are unresolved files.
5403 Returns 0 on success, 1 if there are unresolved files.
5407 """
5404 """
5408 if rev and node:
5405 if rev and node:
5409 raise error.Abort(_("please specify just one revision"))
5406 raise error.Abort(_("please specify just one revision"))
5410
5407
5411 if ui.configbool('commands', 'update.requiredest'):
5408 if ui.configbool('commands', 'update.requiredest'):
5412 if not node and not rev and not date:
5409 if not node and not rev and not date:
5413 raise error.Abort(_('you must specify a destination'),
5410 raise error.Abort(_('you must specify a destination'),
5414 hint=_('for example: hg update ".::"'))
5411 hint=_('for example: hg update ".::"'))
5415
5412
5416 if rev is None or rev == '':
5413 if rev is None or rev == '':
5417 rev = node
5414 rev = node
5418
5415
5419 if date and rev is not None:
5416 if date and rev is not None:
5420 raise error.Abort(_("you can't specify a revision and a date"))
5417 raise error.Abort(_("you can't specify a revision and a date"))
5421
5418
5422 if len([x for x in (clean, check, merge) if x]) > 1:
5419 if len([x for x in (clean, check, merge) if x]) > 1:
5423 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5420 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5424 "or -m/merge"))
5421 "or -m/merge"))
5425
5422
5426 updatecheck = None
5423 updatecheck = None
5427 if check:
5424 if check:
5428 updatecheck = 'abort'
5425 updatecheck = 'abort'
5429 elif merge:
5426 elif merge:
5430 updatecheck = 'none'
5427 updatecheck = 'none'
5431
5428
5432 with repo.wlock():
5429 with repo.wlock():
5433 cmdutil.clearunfinished(repo)
5430 cmdutil.clearunfinished(repo)
5434
5431
5435 if date:
5432 if date:
5436 rev = cmdutil.finddate(ui, repo, date)
5433 rev = cmdutil.finddate(ui, repo, date)
5437
5434
5438 # if we defined a bookmark, we have to remember the original name
5435 # if we defined a bookmark, we have to remember the original name
5439 brev = rev
5436 brev = rev
5440 rev = scmutil.revsingle(repo, rev, rev).rev()
5437 rev = scmutil.revsingle(repo, rev, rev).rev()
5441
5438
5442 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5439 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5443
5440
5444 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5441 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5445 updatecheck=updatecheck)
5442 updatecheck=updatecheck)
5446
5443
5447 @command('verify', [])
5444 @command('verify', [])
5448 def verify(ui, repo):
5445 def verify(ui, repo):
5449 """verify the integrity of the repository
5446 """verify the integrity of the repository
5450
5447
5451 Verify the integrity of the current repository.
5448 Verify the integrity of the current repository.
5452
5449
5453 This will perform an extensive check of the repository's
5450 This will perform an extensive check of the repository's
5454 integrity, validating the hashes and checksums of each entry in
5451 integrity, validating the hashes and checksums of each entry in
5455 the changelog, manifest, and tracked files, as well as the
5452 the changelog, manifest, and tracked files, as well as the
5456 integrity of their crosslinks and indices.
5453 integrity of their crosslinks and indices.
5457
5454
5458 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5455 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5459 for more information about recovery from corruption of the
5456 for more information about recovery from corruption of the
5460 repository.
5457 repository.
5461
5458
5462 Returns 0 on success, 1 if errors are encountered.
5459 Returns 0 on success, 1 if errors are encountered.
5463 """
5460 """
5464 return hg.verify(repo)
5461 return hg.verify(repo)
5465
5462
5466 @command('version', [] + formatteropts, norepo=True)
5463 @command('version', [] + formatteropts, norepo=True)
5467 def version_(ui, **opts):
5464 def version_(ui, **opts):
5468 """output version and copyright information"""
5465 """output version and copyright information"""
5469 opts = pycompat.byteskwargs(opts)
5466 opts = pycompat.byteskwargs(opts)
5470 if ui.verbose:
5467 if ui.verbose:
5471 ui.pager('version')
5468 ui.pager('version')
5472 fm = ui.formatter("version", opts)
5469 fm = ui.formatter("version", opts)
5473 fm.startitem()
5470 fm.startitem()
5474 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5471 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5475 util.version())
5472 util.version())
5476 license = _(
5473 license = _(
5477 "(see https://mercurial-scm.org for more information)\n"
5474 "(see https://mercurial-scm.org for more information)\n"
5478 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5475 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5479 "This is free software; see the source for copying conditions. "
5476 "This is free software; see the source for copying conditions. "
5480 "There is NO\nwarranty; "
5477 "There is NO\nwarranty; "
5481 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5478 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5482 )
5479 )
5483 if not ui.quiet:
5480 if not ui.quiet:
5484 fm.plain(license)
5481 fm.plain(license)
5485
5482
5486 if ui.verbose:
5483 if ui.verbose:
5487 fm.plain(_("\nEnabled extensions:\n\n"))
5484 fm.plain(_("\nEnabled extensions:\n\n"))
5488 # format names and versions into columns
5485 # format names and versions into columns
5489 names = []
5486 names = []
5490 vers = []
5487 vers = []
5491 isinternals = []
5488 isinternals = []
5492 for name, module in extensions.extensions():
5489 for name, module in extensions.extensions():
5493 names.append(name)
5490 names.append(name)
5494 vers.append(extensions.moduleversion(module) or None)
5491 vers.append(extensions.moduleversion(module) or None)
5495 isinternals.append(extensions.ismoduleinternal(module))
5492 isinternals.append(extensions.ismoduleinternal(module))
5496 fn = fm.nested("extensions")
5493 fn = fm.nested("extensions")
5497 if names:
5494 if names:
5498 namefmt = " %%-%ds " % max(len(n) for n in names)
5495 namefmt = " %%-%ds " % max(len(n) for n in names)
5499 places = [_("external"), _("internal")]
5496 places = [_("external"), _("internal")]
5500 for n, v, p in zip(names, vers, isinternals):
5497 for n, v, p in zip(names, vers, isinternals):
5501 fn.startitem()
5498 fn.startitem()
5502 fn.condwrite(ui.verbose, "name", namefmt, n)
5499 fn.condwrite(ui.verbose, "name", namefmt, n)
5503 if ui.verbose:
5500 if ui.verbose:
5504 fn.plain("%s " % places[p])
5501 fn.plain("%s " % places[p])
5505 fn.data(bundled=p)
5502 fn.data(bundled=p)
5506 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5503 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5507 if ui.verbose:
5504 if ui.verbose:
5508 fn.plain("\n")
5505 fn.plain("\n")
5509 fn.end()
5506 fn.end()
5510 fm.end()
5507 fm.end()
5511
5508
5512 def loadcmdtable(ui, name, cmdtable):
5509 def loadcmdtable(ui, name, cmdtable):
5513 """Load command functions from specified cmdtable
5510 """Load command functions from specified cmdtable
5514 """
5511 """
5515 overrides = [cmd for cmd in cmdtable if cmd in table]
5512 overrides = [cmd for cmd in cmdtable if cmd in table]
5516 if overrides:
5513 if overrides:
5517 ui.warn(_("extension '%s' overrides commands: %s\n")
5514 ui.warn(_("extension '%s' overrides commands: %s\n")
5518 % (name, " ".join(overrides)))
5515 % (name, " ".join(overrides)))
5519 table.update(cmdtable)
5516 table.update(cmdtable)
@@ -1,2027 +1,2023 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 base85,
19 base85,
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 tags,
32 tags,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle version human names to changegroup versions.
40 # Maps bundle version human names to changegroup versions.
41 _bundlespeccgversions = {'v1': '01',
41 _bundlespeccgversions = {'v1': '01',
42 'v2': '02',
42 'v2': '02',
43 'packed1': 's1',
43 'packed1': 's1',
44 'bundle2': '02', #legacy
44 'bundle2': '02', #legacy
45 }
45 }
46
46
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
48 _bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
48 _bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
49
49
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
51 """Parse a bundle string specification into parts.
51 """Parse a bundle string specification into parts.
52
52
53 Bundle specifications denote a well-defined bundle/exchange format.
53 Bundle specifications denote a well-defined bundle/exchange format.
54 The content of a given specification should not change over time in
54 The content of a given specification should not change over time in
55 order to ensure that bundles produced by a newer version of Mercurial are
55 order to ensure that bundles produced by a newer version of Mercurial are
56 readable from an older version.
56 readable from an older version.
57
57
58 The string currently has the form:
58 The string currently has the form:
59
59
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
61
61
62 Where <compression> is one of the supported compression formats
62 Where <compression> is one of the supported compression formats
63 and <type> is (currently) a version string. A ";" can follow the type and
63 and <type> is (currently) a version string. A ";" can follow the type and
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
65 pairs.
65 pairs.
66
66
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
68 it is optional.
68 it is optional.
69
69
70 If ``externalnames`` is False (the default), the human-centric names will
70 If ``externalnames`` is False (the default), the human-centric names will
71 be converted to their internal representation.
71 be converted to their internal representation.
72
72
73 Returns a 3-tuple of (compression, version, parameters). Compression will
73 Returns a 3-tuple of (compression, version, parameters). Compression will
74 be ``None`` if not in strict mode and a compression isn't defined.
74 be ``None`` if not in strict mode and a compression isn't defined.
75
75
76 An ``InvalidBundleSpecification`` is raised when the specification is
76 An ``InvalidBundleSpecification`` is raised when the specification is
77 not syntactically well formed.
77 not syntactically well formed.
78
78
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
80 bundle type/version is not recognized.
80 bundle type/version is not recognized.
81
81
82 Note: this function will likely eventually return a more complex data
82 Note: this function will likely eventually return a more complex data
83 structure, including bundle2 part information.
83 structure, including bundle2 part information.
84 """
84 """
85 def parseparams(s):
85 def parseparams(s):
86 if ';' not in s:
86 if ';' not in s:
87 return s, {}
87 return s, {}
88
88
89 params = {}
89 params = {}
90 version, paramstr = s.split(';', 1)
90 version, paramstr = s.split(';', 1)
91
91
92 for p in paramstr.split(';'):
92 for p in paramstr.split(';'):
93 if '=' not in p:
93 if '=' not in p:
94 raise error.InvalidBundleSpecification(
94 raise error.InvalidBundleSpecification(
95 _('invalid bundle specification: '
95 _('invalid bundle specification: '
96 'missing "=" in parameter: %s') % p)
96 'missing "=" in parameter: %s') % p)
97
97
98 key, value = p.split('=', 1)
98 key, value = p.split('=', 1)
99 key = urlreq.unquote(key)
99 key = urlreq.unquote(key)
100 value = urlreq.unquote(value)
100 value = urlreq.unquote(value)
101 params[key] = value
101 params[key] = value
102
102
103 return version, params
103 return version, params
104
104
105
105
106 if strict and '-' not in spec:
106 if strict and '-' not in spec:
107 raise error.InvalidBundleSpecification(
107 raise error.InvalidBundleSpecification(
108 _('invalid bundle specification; '
108 _('invalid bundle specification; '
109 'must be prefixed with compression: %s') % spec)
109 'must be prefixed with compression: %s') % spec)
110
110
111 if '-' in spec:
111 if '-' in spec:
112 compression, version = spec.split('-', 1)
112 compression, version = spec.split('-', 1)
113
113
114 if compression not in util.compengines.supportedbundlenames:
114 if compression not in util.compengines.supportedbundlenames:
115 raise error.UnsupportedBundleSpecification(
115 raise error.UnsupportedBundleSpecification(
116 _('%s compression is not supported') % compression)
116 _('%s compression is not supported') % compression)
117
117
118 version, params = parseparams(version)
118 version, params = parseparams(version)
119
119
120 if version not in _bundlespeccgversions:
120 if version not in _bundlespeccgversions:
121 raise error.UnsupportedBundleSpecification(
121 raise error.UnsupportedBundleSpecification(
122 _('%s is not a recognized bundle version') % version)
122 _('%s is not a recognized bundle version') % version)
123 else:
123 else:
124 # Value could be just the compression or just the version, in which
124 # Value could be just the compression or just the version, in which
125 # case some defaults are assumed (but only when not in strict mode).
125 # case some defaults are assumed (but only when not in strict mode).
126 assert not strict
126 assert not strict
127
127
128 spec, params = parseparams(spec)
128 spec, params = parseparams(spec)
129
129
130 if spec in util.compengines.supportedbundlenames:
130 if spec in util.compengines.supportedbundlenames:
131 compression = spec
131 compression = spec
132 version = 'v1'
132 version = 'v1'
133 # Generaldelta repos require v2.
133 # Generaldelta repos require v2.
134 if 'generaldelta' in repo.requirements:
134 if 'generaldelta' in repo.requirements:
135 version = 'v2'
135 version = 'v2'
136 # Modern compression engines require v2.
136 # Modern compression engines require v2.
137 if compression not in _bundlespecv1compengines:
137 if compression not in _bundlespecv1compengines:
138 version = 'v2'
138 version = 'v2'
139 elif spec in _bundlespeccgversions:
139 elif spec in _bundlespeccgversions:
140 if spec == 'packed1':
140 if spec == 'packed1':
141 compression = 'none'
141 compression = 'none'
142 else:
142 else:
143 compression = 'bzip2'
143 compression = 'bzip2'
144 version = spec
144 version = spec
145 else:
145 else:
146 raise error.UnsupportedBundleSpecification(
146 raise error.UnsupportedBundleSpecification(
147 _('%s is not a recognized bundle specification') % spec)
147 _('%s is not a recognized bundle specification') % spec)
148
148
149 # Bundle version 1 only supports a known set of compression engines.
149 # Bundle version 1 only supports a known set of compression engines.
150 if version == 'v1' and compression not in _bundlespecv1compengines:
150 if version == 'v1' and compression not in _bundlespecv1compengines:
151 raise error.UnsupportedBundleSpecification(
151 raise error.UnsupportedBundleSpecification(
152 _('compression engine %s is not supported on v1 bundles') %
152 _('compression engine %s is not supported on v1 bundles') %
153 compression)
153 compression)
154
154
155 # The specification for packed1 can optionally declare the data formats
155 # The specification for packed1 can optionally declare the data formats
156 # required to apply it. If we see this metadata, compare against what the
156 # required to apply it. If we see this metadata, compare against what the
157 # repo supports and error if the bundle isn't compatible.
157 # repo supports and error if the bundle isn't compatible.
158 if version == 'packed1' and 'requirements' in params:
158 if version == 'packed1' and 'requirements' in params:
159 requirements = set(params['requirements'].split(','))
159 requirements = set(params['requirements'].split(','))
160 missingreqs = requirements - repo.supportedformats
160 missingreqs = requirements - repo.supportedformats
161 if missingreqs:
161 if missingreqs:
162 raise error.UnsupportedBundleSpecification(
162 raise error.UnsupportedBundleSpecification(
163 _('missing support for repository features: %s') %
163 _('missing support for repository features: %s') %
164 ', '.join(sorted(missingreqs)))
164 ', '.join(sorted(missingreqs)))
165
165
166 if not externalnames:
166 if not externalnames:
167 engine = util.compengines.forbundlename(compression)
167 engine = util.compengines.forbundlename(compression)
168 compression = engine.bundletype()[1]
168 compression = engine.bundletype()[1]
169 version = _bundlespeccgversions[version]
169 version = _bundlespeccgversions[version]
170 return compression, version, params
170 return compression, version, params
171
171
172 def readbundle(ui, fh, fname, vfs=None):
172 def readbundle(ui, fh, fname, vfs=None):
173 header = changegroup.readexactly(fh, 4)
173 header = changegroup.readexactly(fh, 4)
174
174
175 alg = None
175 alg = None
176 if not fname:
176 if not fname:
177 fname = "stream"
177 fname = "stream"
178 if not header.startswith('HG') and header.startswith('\0'):
178 if not header.startswith('HG') and header.startswith('\0'):
179 fh = changegroup.headerlessfixup(fh, header)
179 fh = changegroup.headerlessfixup(fh, header)
180 header = "HG10"
180 header = "HG10"
181 alg = 'UN'
181 alg = 'UN'
182 elif vfs:
182 elif vfs:
183 fname = vfs.join(fname)
183 fname = vfs.join(fname)
184
184
185 magic, version = header[0:2], header[2:4]
185 magic, version = header[0:2], header[2:4]
186
186
187 if magic != 'HG':
187 if magic != 'HG':
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
189 if version == '10':
189 if version == '10':
190 if alg is None:
190 if alg is None:
191 alg = changegroup.readexactly(fh, 2)
191 alg = changegroup.readexactly(fh, 2)
192 return changegroup.cg1unpacker(fh, alg)
192 return changegroup.cg1unpacker(fh, alg)
193 elif version.startswith('2'):
193 elif version.startswith('2'):
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
195 elif version == 'S1':
195 elif version == 'S1':
196 return streamclone.streamcloneapplier(fh)
196 return streamclone.streamcloneapplier(fh)
197 else:
197 else:
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
199
199
200 def getbundlespec(ui, fh):
200 def getbundlespec(ui, fh):
201 """Infer the bundlespec from a bundle file handle.
201 """Infer the bundlespec from a bundle file handle.
202
202
203 The input file handle is seeked and the original seek position is not
203 The input file handle is seeked and the original seek position is not
204 restored.
204 restored.
205 """
205 """
206 def speccompression(alg):
206 def speccompression(alg):
207 try:
207 try:
208 return util.compengines.forbundletype(alg).bundletype()[0]
208 return util.compengines.forbundletype(alg).bundletype()[0]
209 except KeyError:
209 except KeyError:
210 return None
210 return None
211
211
212 b = readbundle(ui, fh, None)
212 b = readbundle(ui, fh, None)
213 if isinstance(b, changegroup.cg1unpacker):
213 if isinstance(b, changegroup.cg1unpacker):
214 alg = b._type
214 alg = b._type
215 if alg == '_truncatedBZ':
215 if alg == '_truncatedBZ':
216 alg = 'BZ'
216 alg = 'BZ'
217 comp = speccompression(alg)
217 comp = speccompression(alg)
218 if not comp:
218 if not comp:
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
220 return '%s-v1' % comp
220 return '%s-v1' % comp
221 elif isinstance(b, bundle2.unbundle20):
221 elif isinstance(b, bundle2.unbundle20):
222 if 'Compression' in b.params:
222 if 'Compression' in b.params:
223 comp = speccompression(b.params['Compression'])
223 comp = speccompression(b.params['Compression'])
224 if not comp:
224 if not comp:
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
226 else:
226 else:
227 comp = 'none'
227 comp = 'none'
228
228
229 version = None
229 version = None
230 for part in b.iterparts():
230 for part in b.iterparts():
231 if part.type == 'changegroup':
231 if part.type == 'changegroup':
232 version = part.params['version']
232 version = part.params['version']
233 if version in ('01', '02'):
233 if version in ('01', '02'):
234 version = 'v2'
234 version = 'v2'
235 else:
235 else:
236 raise error.Abort(_('changegroup version %s does not have '
236 raise error.Abort(_('changegroup version %s does not have '
237 'a known bundlespec') % version,
237 'a known bundlespec') % version,
238 hint=_('try upgrading your Mercurial '
238 hint=_('try upgrading your Mercurial '
239 'client'))
239 'client'))
240
240
241 if not version:
241 if not version:
242 raise error.Abort(_('could not identify changegroup version in '
242 raise error.Abort(_('could not identify changegroup version in '
243 'bundle'))
243 'bundle'))
244
244
245 return '%s-%s' % (comp, version)
245 return '%s-%s' % (comp, version)
246 elif isinstance(b, streamclone.streamcloneapplier):
246 elif isinstance(b, streamclone.streamcloneapplier):
247 requirements = streamclone.readbundle1header(fh)[2]
247 requirements = streamclone.readbundle1header(fh)[2]
248 params = 'requirements=%s' % ','.join(sorted(requirements))
248 params = 'requirements=%s' % ','.join(sorted(requirements))
249 return 'none-packed1;%s' % urlreq.quote(params)
249 return 'none-packed1;%s' % urlreq.quote(params)
250 else:
250 else:
251 raise error.Abort(_('unknown bundle type: %s') % b)
251 raise error.Abort(_('unknown bundle type: %s') % b)
252
252
253 def buildobsmarkerspart(bundler, markers):
253 def buildobsmarkerspart(bundler, markers):
254 """add an obsmarker part to the bundler with <markers>
254 """add an obsmarker part to the bundler with <markers>
255
255
256 No part is created if markers is empty.
256 No part is created if markers is empty.
257 Raises ValueError if the bundler doesn't support any known obsmarker format.
257 Raises ValueError if the bundler doesn't support any known obsmarker format.
258 """
258 """
259 if markers:
259 if markers:
260 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
260 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
261 version = obsolete.commonversion(remoteversions)
261 version = obsolete.commonversion(remoteversions)
262 if version is None:
262 if version is None:
263 raise ValueError('bundler does not support common obsmarker format')
263 raise ValueError('bundler does not support common obsmarker format')
264 stream = obsolete.encodemarkers(markers, True, version=version)
264 stream = obsolete.encodemarkers(markers, True, version=version)
265 return bundler.newpart('obsmarkers', data=stream)
265 return bundler.newpart('obsmarkers', data=stream)
266 return None
266 return None
267
267
268 def _computeoutgoing(repo, heads, common):
268 def _computeoutgoing(repo, heads, common):
269 """Computes which revs are outgoing given a set of common
269 """Computes which revs are outgoing given a set of common
270 and a set of heads.
270 and a set of heads.
271
271
272 This is a separate function so extensions can have access to
272 This is a separate function so extensions can have access to
273 the logic.
273 the logic.
274
274
275 Returns a discovery.outgoing object.
275 Returns a discovery.outgoing object.
276 """
276 """
277 cl = repo.changelog
277 cl = repo.changelog
278 if common:
278 if common:
279 hasnode = cl.hasnode
279 hasnode = cl.hasnode
280 common = [n for n in common if hasnode(n)]
280 common = [n for n in common if hasnode(n)]
281 else:
281 else:
282 common = [nullid]
282 common = [nullid]
283 if not heads:
283 if not heads:
284 heads = cl.heads()
284 heads = cl.heads()
285 return discovery.outgoing(repo, common, heads)
285 return discovery.outgoing(repo, common, heads)
286
286
287 def _forcebundle1(op):
287 def _forcebundle1(op):
288 """return true if a pull/push must use bundle1
288 """return true if a pull/push must use bundle1
289
289
290 This function is used to allow testing of the older bundle version"""
290 This function is used to allow testing of the older bundle version"""
291 ui = op.repo.ui
291 ui = op.repo.ui
292 forcebundle1 = False
292 forcebundle1 = False
293 # The goal is this config is to allow developer to choose the bundle
293 # The goal is this config is to allow developer to choose the bundle
294 # version used during exchanged. This is especially handy during test.
294 # version used during exchanged. This is especially handy during test.
295 # Value is a list of bundle version to be picked from, highest version
295 # Value is a list of bundle version to be picked from, highest version
296 # should be used.
296 # should be used.
297 #
297 #
298 # developer config: devel.legacy.exchange
298 # developer config: devel.legacy.exchange
299 exchange = ui.configlist('devel', 'legacy.exchange')
299 exchange = ui.configlist('devel', 'legacy.exchange')
300 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
300 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
301 return forcebundle1 or not op.remote.capable('bundle2')
301 return forcebundle1 or not op.remote.capable('bundle2')
302
302
303 class pushoperation(object):
303 class pushoperation(object):
304 """A object that represent a single push operation
304 """A object that represent a single push operation
305
305
306 Its purpose is to carry push related state and very common operations.
306 Its purpose is to carry push related state and very common operations.
307
307
308 A new pushoperation should be created at the beginning of each push and
308 A new pushoperation should be created at the beginning of each push and
309 discarded afterward.
309 discarded afterward.
310 """
310 """
311
311
312 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
312 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
313 bookmarks=()):
313 bookmarks=()):
314 # repo we push from
314 # repo we push from
315 self.repo = repo
315 self.repo = repo
316 self.ui = repo.ui
316 self.ui = repo.ui
317 # repo we push to
317 # repo we push to
318 self.remote = remote
318 self.remote = remote
319 # force option provided
319 # force option provided
320 self.force = force
320 self.force = force
321 # revs to be pushed (None is "all")
321 # revs to be pushed (None is "all")
322 self.revs = revs
322 self.revs = revs
323 # bookmark explicitly pushed
323 # bookmark explicitly pushed
324 self.bookmarks = bookmarks
324 self.bookmarks = bookmarks
325 # allow push of new branch
325 # allow push of new branch
326 self.newbranch = newbranch
326 self.newbranch = newbranch
327 # did a local lock get acquired?
327 # did a local lock get acquired?
328 self.locallocked = None
328 self.locallocked = None
329 # step already performed
329 # step already performed
330 # (used to check what steps have been already performed through bundle2)
330 # (used to check what steps have been already performed through bundle2)
331 self.stepsdone = set()
331 self.stepsdone = set()
332 # Integer version of the changegroup push result
332 # Integer version of the changegroup push result
333 # - None means nothing to push
333 # - None means nothing to push
334 # - 0 means HTTP error
334 # - 0 means HTTP error
335 # - 1 means we pushed and remote head count is unchanged *or*
335 # - 1 means we pushed and remote head count is unchanged *or*
336 # we have outgoing changesets but refused to push
336 # we have outgoing changesets but refused to push
337 # - other values as described by addchangegroup()
337 # - other values as described by addchangegroup()
338 self.cgresult = None
338 self.cgresult = None
339 # Boolean value for the bookmark push
339 # Boolean value for the bookmark push
340 self.bkresult = None
340 self.bkresult = None
341 # discover.outgoing object (contains common and outgoing data)
341 # discover.outgoing object (contains common and outgoing data)
342 self.outgoing = None
342 self.outgoing = None
343 # all remote heads before the push
343 # all remote heads before the push
344 self.remoteheads = None
344 self.remoteheads = None
345 # testable as a boolean indicating if any nodes are missing locally.
345 # testable as a boolean indicating if any nodes are missing locally.
346 self.incoming = None
346 self.incoming = None
347 # phases changes that must be pushed along side the changesets
347 # phases changes that must be pushed along side the changesets
348 self.outdatedphases = None
348 self.outdatedphases = None
349 # phases changes that must be pushed if changeset push fails
349 # phases changes that must be pushed if changeset push fails
350 self.fallbackoutdatedphases = None
350 self.fallbackoutdatedphases = None
351 # outgoing obsmarkers
351 # outgoing obsmarkers
352 self.outobsmarkers = set()
352 self.outobsmarkers = set()
353 # outgoing bookmarks
353 # outgoing bookmarks
354 self.outbookmarks = []
354 self.outbookmarks = []
355 # transaction manager
355 # transaction manager
356 self.trmanager = None
356 self.trmanager = None
357 # map { pushkey partid -> callback handling failure}
357 # map { pushkey partid -> callback handling failure}
358 # used to handle exception from mandatory pushkey part failure
358 # used to handle exception from mandatory pushkey part failure
359 self.pkfailcb = {}
359 self.pkfailcb = {}
360
360
361 @util.propertycache
361 @util.propertycache
362 def futureheads(self):
362 def futureheads(self):
363 """future remote heads if the changeset push succeeds"""
363 """future remote heads if the changeset push succeeds"""
364 return self.outgoing.missingheads
364 return self.outgoing.missingheads
365
365
366 @util.propertycache
366 @util.propertycache
367 def fallbackheads(self):
367 def fallbackheads(self):
368 """future remote heads if the changeset push fails"""
368 """future remote heads if the changeset push fails"""
369 if self.revs is None:
369 if self.revs is None:
370 # not target to push, all common are relevant
370 # not target to push, all common are relevant
371 return self.outgoing.commonheads
371 return self.outgoing.commonheads
372 unfi = self.repo.unfiltered()
372 unfi = self.repo.unfiltered()
373 # I want cheads = heads(::missingheads and ::commonheads)
373 # I want cheads = heads(::missingheads and ::commonheads)
374 # (missingheads is revs with secret changeset filtered out)
374 # (missingheads is revs with secret changeset filtered out)
375 #
375 #
376 # This can be expressed as:
376 # This can be expressed as:
377 # cheads = ( (missingheads and ::commonheads)
377 # cheads = ( (missingheads and ::commonheads)
378 # + (commonheads and ::missingheads))"
378 # + (commonheads and ::missingheads))"
379 # )
379 # )
380 #
380 #
381 # while trying to push we already computed the following:
381 # while trying to push we already computed the following:
382 # common = (::commonheads)
382 # common = (::commonheads)
383 # missing = ((commonheads::missingheads) - commonheads)
383 # missing = ((commonheads::missingheads) - commonheads)
384 #
384 #
385 # We can pick:
385 # We can pick:
386 # * missingheads part of common (::commonheads)
386 # * missingheads part of common (::commonheads)
387 common = self.outgoing.common
387 common = self.outgoing.common
388 nm = self.repo.changelog.nodemap
388 nm = self.repo.changelog.nodemap
389 cheads = [node for node in self.revs if nm[node] in common]
389 cheads = [node for node in self.revs if nm[node] in common]
390 # and
390 # and
391 # * commonheads parents on missing
391 # * commonheads parents on missing
392 revset = unfi.set('%ln and parents(roots(%ln))',
392 revset = unfi.set('%ln and parents(roots(%ln))',
393 self.outgoing.commonheads,
393 self.outgoing.commonheads,
394 self.outgoing.missing)
394 self.outgoing.missing)
395 cheads.extend(c.node() for c in revset)
395 cheads.extend(c.node() for c in revset)
396 return cheads
396 return cheads
397
397
398 @property
398 @property
399 def commonheads(self):
399 def commonheads(self):
400 """set of all common heads after changeset bundle push"""
400 """set of all common heads after changeset bundle push"""
401 if self.cgresult:
401 if self.cgresult:
402 return self.futureheads
402 return self.futureheads
403 else:
403 else:
404 return self.fallbackheads
404 return self.fallbackheads
405
405
406 # mapping of message used when pushing bookmark
406 # mapping of message used when pushing bookmark
407 bookmsgmap = {'update': (_("updating bookmark %s\n"),
407 bookmsgmap = {'update': (_("updating bookmark %s\n"),
408 _('updating bookmark %s failed!\n')),
408 _('updating bookmark %s failed!\n')),
409 'export': (_("exporting bookmark %s\n"),
409 'export': (_("exporting bookmark %s\n"),
410 _('exporting bookmark %s failed!\n')),
410 _('exporting bookmark %s failed!\n')),
411 'delete': (_("deleting remote bookmark %s\n"),
411 'delete': (_("deleting remote bookmark %s\n"),
412 _('deleting remote bookmark %s failed!\n')),
412 _('deleting remote bookmark %s failed!\n')),
413 }
413 }
414
414
415
415
416 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
416 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
417 opargs=None):
417 opargs=None):
418 '''Push outgoing changesets (limited by revs) from a local
418 '''Push outgoing changesets (limited by revs) from a local
419 repository to remote. Return an integer:
419 repository to remote. Return an integer:
420 - None means nothing to push
420 - None means nothing to push
421 - 0 means HTTP error
421 - 0 means HTTP error
422 - 1 means we pushed and remote head count is unchanged *or*
422 - 1 means we pushed and remote head count is unchanged *or*
423 we have outgoing changesets but refused to push
423 we have outgoing changesets but refused to push
424 - other values as described by addchangegroup()
424 - other values as described by addchangegroup()
425 '''
425 '''
426 if opargs is None:
426 if opargs is None:
427 opargs = {}
427 opargs = {}
428 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
428 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
429 **opargs)
429 **opargs)
430 if pushop.remote.local():
430 if pushop.remote.local():
431 missing = (set(pushop.repo.requirements)
431 missing = (set(pushop.repo.requirements)
432 - pushop.remote.local().supported)
432 - pushop.remote.local().supported)
433 if missing:
433 if missing:
434 msg = _("required features are not"
434 msg = _("required features are not"
435 " supported in the destination:"
435 " supported in the destination:"
436 " %s") % (', '.join(sorted(missing)))
436 " %s") % (', '.join(sorted(missing)))
437 raise error.Abort(msg)
437 raise error.Abort(msg)
438
438
439 # there are two ways to push to remote repo:
439 # there are two ways to push to remote repo:
440 #
440 #
441 # addchangegroup assumes local user can lock remote
441 # addchangegroup assumes local user can lock remote
442 # repo (local filesystem, old ssh servers).
442 # repo (local filesystem, old ssh servers).
443 #
443 #
444 # unbundle assumes local user cannot lock remote repo (new ssh
444 # unbundle assumes local user cannot lock remote repo (new ssh
445 # servers, http servers).
445 # servers, http servers).
446
446
447 if not pushop.remote.canpush():
447 if not pushop.remote.canpush():
448 raise error.Abort(_("destination does not support push"))
448 raise error.Abort(_("destination does not support push"))
449 # get local lock as we might write phase data
449 # get local lock as we might write phase data
450 localwlock = locallock = None
450 localwlock = locallock = None
451 try:
451 try:
452 # bundle2 push may receive a reply bundle touching bookmarks or other
452 # bundle2 push may receive a reply bundle touching bookmarks or other
453 # things requiring the wlock. Take it now to ensure proper ordering.
453 # things requiring the wlock. Take it now to ensure proper ordering.
454 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
454 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
455 if (not _forcebundle1(pushop)) and maypushback:
455 if (not _forcebundle1(pushop)) and maypushback:
456 localwlock = pushop.repo.wlock()
456 localwlock = pushop.repo.wlock()
457 locallock = pushop.repo.lock()
457 locallock = pushop.repo.lock()
458 pushop.locallocked = True
458 pushop.locallocked = True
459 except IOError as err:
459 except IOError as err:
460 pushop.locallocked = False
460 pushop.locallocked = False
461 if err.errno != errno.EACCES:
461 if err.errno != errno.EACCES:
462 raise
462 raise
463 # source repo cannot be locked.
463 # source repo cannot be locked.
464 # We do not abort the push, but just disable the local phase
464 # We do not abort the push, but just disable the local phase
465 # synchronisation.
465 # synchronisation.
466 msg = 'cannot lock source repository: %s\n' % err
466 msg = 'cannot lock source repository: %s\n' % err
467 pushop.ui.debug(msg)
467 pushop.ui.debug(msg)
468 try:
468 try:
469 if pushop.locallocked:
469 if pushop.locallocked:
470 pushop.trmanager = transactionmanager(pushop.repo,
470 pushop.trmanager = transactionmanager(pushop.repo,
471 'push-response',
471 'push-response',
472 pushop.remote.url())
472 pushop.remote.url())
473 pushop.repo.checkpush(pushop)
473 pushop.repo.checkpush(pushop)
474 lock = None
474 lock = None
475 unbundle = pushop.remote.capable('unbundle')
475 unbundle = pushop.remote.capable('unbundle')
476 if not unbundle:
476 if not unbundle:
477 lock = pushop.remote.lock()
477 lock = pushop.remote.lock()
478 try:
478 try:
479 _pushdiscovery(pushop)
479 _pushdiscovery(pushop)
480 if not _forcebundle1(pushop):
480 if not _forcebundle1(pushop):
481 _pushbundle2(pushop)
481 _pushbundle2(pushop)
482 _pushchangeset(pushop)
482 _pushchangeset(pushop)
483 _pushsyncphase(pushop)
483 _pushsyncphase(pushop)
484 _pushobsolete(pushop)
484 _pushobsolete(pushop)
485 _pushbookmark(pushop)
485 _pushbookmark(pushop)
486 finally:
486 finally:
487 if lock is not None:
487 if lock is not None:
488 lock.release()
488 lock.release()
489 if pushop.trmanager:
489 if pushop.trmanager:
490 pushop.trmanager.close()
490 pushop.trmanager.close()
491 finally:
491 finally:
492 if pushop.trmanager:
492 if pushop.trmanager:
493 pushop.trmanager.release()
493 pushop.trmanager.release()
494 if locallock is not None:
494 if locallock is not None:
495 locallock.release()
495 locallock.release()
496 if localwlock is not None:
496 if localwlock is not None:
497 localwlock.release()
497 localwlock.release()
498
498
499 return pushop
499 return pushop
500
500
501 # list of steps to perform discovery before push
501 # list of steps to perform discovery before push
502 pushdiscoveryorder = []
502 pushdiscoveryorder = []
503
503
504 # Mapping between step name and function
504 # Mapping between step name and function
505 #
505 #
506 # This exists to help extensions wrap steps if necessary
506 # This exists to help extensions wrap steps if necessary
507 pushdiscoverymapping = {}
507 pushdiscoverymapping = {}
508
508
509 def pushdiscovery(stepname):
509 def pushdiscovery(stepname):
510 """decorator for function performing discovery before push
510 """decorator for function performing discovery before push
511
511
512 The function is added to the step -> function mapping and appended to the
512 The function is added to the step -> function mapping and appended to the
513 list of steps. Beware that decorated function will be added in order (this
513 list of steps. Beware that decorated function will be added in order (this
514 may matter).
514 may matter).
515
515
516 You can only use this decorator for a new step, if you want to wrap a step
516 You can only use this decorator for a new step, if you want to wrap a step
517 from an extension, change the pushdiscovery dictionary directly."""
517 from an extension, change the pushdiscovery dictionary directly."""
518 def dec(func):
518 def dec(func):
519 assert stepname not in pushdiscoverymapping
519 assert stepname not in pushdiscoverymapping
520 pushdiscoverymapping[stepname] = func
520 pushdiscoverymapping[stepname] = func
521 pushdiscoveryorder.append(stepname)
521 pushdiscoveryorder.append(stepname)
522 return func
522 return func
523 return dec
523 return dec
524
524
525 def _pushdiscovery(pushop):
525 def _pushdiscovery(pushop):
526 """Run all discovery steps"""
526 """Run all discovery steps"""
527 for stepname in pushdiscoveryorder:
527 for stepname in pushdiscoveryorder:
528 step = pushdiscoverymapping[stepname]
528 step = pushdiscoverymapping[stepname]
529 step(pushop)
529 step(pushop)
530
530
531 @pushdiscovery('changeset')
531 @pushdiscovery('changeset')
532 def _pushdiscoverychangeset(pushop):
532 def _pushdiscoverychangeset(pushop):
533 """discover the changeset that need to be pushed"""
533 """discover the changeset that need to be pushed"""
534 fci = discovery.findcommonincoming
534 fci = discovery.findcommonincoming
535 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
535 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
536 common, inc, remoteheads = commoninc
536 common, inc, remoteheads = commoninc
537 fco = discovery.findcommonoutgoing
537 fco = discovery.findcommonoutgoing
538 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
538 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
539 commoninc=commoninc, force=pushop.force)
539 commoninc=commoninc, force=pushop.force)
540 pushop.outgoing = outgoing
540 pushop.outgoing = outgoing
541 pushop.remoteheads = remoteheads
541 pushop.remoteheads = remoteheads
542 pushop.incoming = inc
542 pushop.incoming = inc
543
543
544 @pushdiscovery('phase')
544 @pushdiscovery('phase')
545 def _pushdiscoveryphase(pushop):
545 def _pushdiscoveryphase(pushop):
546 """discover the phase that needs to be pushed
546 """discover the phase that needs to be pushed
547
547
548 (computed for both success and failure case for changesets push)"""
548 (computed for both success and failure case for changesets push)"""
549 outgoing = pushop.outgoing
549 outgoing = pushop.outgoing
550 unfi = pushop.repo.unfiltered()
550 unfi = pushop.repo.unfiltered()
551 remotephases = pushop.remote.listkeys('phases')
551 remotephases = pushop.remote.listkeys('phases')
552 publishing = remotephases.get('publishing', False)
552 publishing = remotephases.get('publishing', False)
553 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
553 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
554 and remotephases # server supports phases
554 and remotephases # server supports phases
555 and not pushop.outgoing.missing # no changesets to be pushed
555 and not pushop.outgoing.missing # no changesets to be pushed
556 and publishing):
556 and publishing):
557 # When:
557 # When:
558 # - this is a subrepo push
558 # - this is a subrepo push
559 # - and remote support phase
559 # - and remote support phase
560 # - and no changeset are to be pushed
560 # - and no changeset are to be pushed
561 # - and remote is publishing
561 # - and remote is publishing
562 # We may be in issue 3871 case!
562 # We may be in issue 3871 case!
563 # We drop the possible phase synchronisation done by
563 # We drop the possible phase synchronisation done by
564 # courtesy to publish changesets possibly locally draft
564 # courtesy to publish changesets possibly locally draft
565 # on the remote.
565 # on the remote.
566 remotephases = {'publishing': 'True'}
566 remotephases = {'publishing': 'True'}
567 ana = phases.analyzeremotephases(pushop.repo,
567 ana = phases.analyzeremotephases(pushop.repo,
568 pushop.fallbackheads,
568 pushop.fallbackheads,
569 remotephases)
569 remotephases)
570 pheads, droots = ana
570 pheads, droots = ana
571 extracond = ''
571 extracond = ''
572 if not publishing:
572 if not publishing:
573 extracond = ' and public()'
573 extracond = ' and public()'
574 revset = 'heads((%%ln::%%ln) %s)' % extracond
574 revset = 'heads((%%ln::%%ln) %s)' % extracond
575 # Get the list of all revs draft on remote by public here.
575 # Get the list of all revs draft on remote by public here.
576 # XXX Beware that revset break if droots is not strictly
576 # XXX Beware that revset break if droots is not strictly
577 # XXX root we may want to ensure it is but it is costly
577 # XXX root we may want to ensure it is but it is costly
578 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
578 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
579 if not outgoing.missing:
579 if not outgoing.missing:
580 future = fallback
580 future = fallback
581 else:
581 else:
582 # adds changeset we are going to push as draft
582 # adds changeset we are going to push as draft
583 #
583 #
584 # should not be necessary for publishing server, but because of an
584 # should not be necessary for publishing server, but because of an
585 # issue fixed in xxxxx we have to do it anyway.
585 # issue fixed in xxxxx we have to do it anyway.
586 fdroots = list(unfi.set('roots(%ln + %ln::)',
586 fdroots = list(unfi.set('roots(%ln + %ln::)',
587 outgoing.missing, droots))
587 outgoing.missing, droots))
588 fdroots = [f.node() for f in fdroots]
588 fdroots = [f.node() for f in fdroots]
589 future = list(unfi.set(revset, fdroots, pushop.futureheads))
589 future = list(unfi.set(revset, fdroots, pushop.futureheads))
590 pushop.outdatedphases = future
590 pushop.outdatedphases = future
591 pushop.fallbackoutdatedphases = fallback
591 pushop.fallbackoutdatedphases = fallback
592
592
593 @pushdiscovery('obsmarker')
593 @pushdiscovery('obsmarker')
594 def _pushdiscoveryobsmarkers(pushop):
594 def _pushdiscoveryobsmarkers(pushop):
595 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
595 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
596 and pushop.repo.obsstore
596 and pushop.repo.obsstore
597 and 'obsolete' in pushop.remote.listkeys('namespaces')):
597 and 'obsolete' in pushop.remote.listkeys('namespaces')):
598 repo = pushop.repo
598 repo = pushop.repo
599 # very naive computation, that can be quite expensive on big repo.
599 # very naive computation, that can be quite expensive on big repo.
600 # However: evolution is currently slow on them anyway.
600 # However: evolution is currently slow on them anyway.
601 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
601 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
602 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
602 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
603
603
604 @pushdiscovery('bookmarks')
604 @pushdiscovery('bookmarks')
605 def _pushdiscoverybookmarks(pushop):
605 def _pushdiscoverybookmarks(pushop):
606 ui = pushop.ui
606 ui = pushop.ui
607 repo = pushop.repo.unfiltered()
607 repo = pushop.repo.unfiltered()
608 remote = pushop.remote
608 remote = pushop.remote
609 ui.debug("checking for updated bookmarks\n")
609 ui.debug("checking for updated bookmarks\n")
610 ancestors = ()
610 ancestors = ()
611 if pushop.revs:
611 if pushop.revs:
612 revnums = map(repo.changelog.rev, pushop.revs)
612 revnums = map(repo.changelog.rev, pushop.revs)
613 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
613 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
614 remotebookmark = remote.listkeys('bookmarks')
614 remotebookmark = remote.listkeys('bookmarks')
615
615
616 explicit = set([repo._bookmarks.expandname(bookmark)
616 explicit = set([repo._bookmarks.expandname(bookmark)
617 for bookmark in pushop.bookmarks])
617 for bookmark in pushop.bookmarks])
618
618
619 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
619 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
620 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
620 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
621
621
622 def safehex(x):
622 def safehex(x):
623 if x is None:
623 if x is None:
624 return x
624 return x
625 return hex(x)
625 return hex(x)
626
626
627 def hexifycompbookmarks(bookmarks):
627 def hexifycompbookmarks(bookmarks):
628 for b, scid, dcid in bookmarks:
628 for b, scid, dcid in bookmarks:
629 yield b, safehex(scid), safehex(dcid)
629 yield b, safehex(scid), safehex(dcid)
630
630
631 comp = [hexifycompbookmarks(marks) for marks in comp]
631 comp = [hexifycompbookmarks(marks) for marks in comp]
632 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
632 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
633
633
634 for b, scid, dcid in advsrc:
634 for b, scid, dcid in advsrc:
635 if b in explicit:
635 if b in explicit:
636 explicit.remove(b)
636 explicit.remove(b)
637 if not ancestors or repo[scid].rev() in ancestors:
637 if not ancestors or repo[scid].rev() in ancestors:
638 pushop.outbookmarks.append((b, dcid, scid))
638 pushop.outbookmarks.append((b, dcid, scid))
639 # search added bookmark
639 # search added bookmark
640 for b, scid, dcid in addsrc:
640 for b, scid, dcid in addsrc:
641 if b in explicit:
641 if b in explicit:
642 explicit.remove(b)
642 explicit.remove(b)
643 pushop.outbookmarks.append((b, '', scid))
643 pushop.outbookmarks.append((b, '', scid))
644 # search for overwritten bookmark
644 # search for overwritten bookmark
645 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
645 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
646 if b in explicit:
646 if b in explicit:
647 explicit.remove(b)
647 explicit.remove(b)
648 pushop.outbookmarks.append((b, dcid, scid))
648 pushop.outbookmarks.append((b, dcid, scid))
649 # search for bookmark to delete
649 # search for bookmark to delete
650 for b, scid, dcid in adddst:
650 for b, scid, dcid in adddst:
651 if b in explicit:
651 if b in explicit:
652 explicit.remove(b)
652 explicit.remove(b)
653 # treat as "deleted locally"
653 # treat as "deleted locally"
654 pushop.outbookmarks.append((b, dcid, ''))
654 pushop.outbookmarks.append((b, dcid, ''))
655 # identical bookmarks shouldn't get reported
655 # identical bookmarks shouldn't get reported
656 for b, scid, dcid in same:
656 for b, scid, dcid in same:
657 if b in explicit:
657 if b in explicit:
658 explicit.remove(b)
658 explicit.remove(b)
659
659
660 if explicit:
660 if explicit:
661 explicit = sorted(explicit)
661 explicit = sorted(explicit)
662 # we should probably list all of them
662 # we should probably list all of them
663 ui.warn(_('bookmark %s does not exist on the local '
663 ui.warn(_('bookmark %s does not exist on the local '
664 'or remote repository!\n') % explicit[0])
664 'or remote repository!\n') % explicit[0])
665 pushop.bkresult = 2
665 pushop.bkresult = 2
666
666
667 pushop.outbookmarks.sort()
667 pushop.outbookmarks.sort()
668
668
669 def _pushcheckoutgoing(pushop):
669 def _pushcheckoutgoing(pushop):
670 outgoing = pushop.outgoing
670 outgoing = pushop.outgoing
671 unfi = pushop.repo.unfiltered()
671 unfi = pushop.repo.unfiltered()
672 if not outgoing.missing:
672 if not outgoing.missing:
673 # nothing to push
673 # nothing to push
674 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
674 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
675 return False
675 return False
676 # something to push
676 # something to push
677 if not pushop.force:
677 if not pushop.force:
678 # if repo.obsstore == False --> no obsolete
678 # if repo.obsstore == False --> no obsolete
679 # then, save the iteration
679 # then, save the iteration
680 if unfi.obsstore:
680 if unfi.obsstore:
681 # this message are here for 80 char limit reason
681 # this message are here for 80 char limit reason
682 mso = _("push includes obsolete changeset: %s!")
682 mso = _("push includes obsolete changeset: %s!")
683 mst = {"unstable": _("push includes unstable changeset: %s!"),
683 mst = {"unstable": _("push includes unstable changeset: %s!"),
684 "bumped": _("push includes bumped changeset: %s!"),
684 "bumped": _("push includes bumped changeset: %s!"),
685 "divergent": _("push includes divergent changeset: %s!")}
685 "divergent": _("push includes divergent changeset: %s!")}
686 # If we are to push if there is at least one
686 # If we are to push if there is at least one
687 # obsolete or unstable changeset in missing, at
687 # obsolete or unstable changeset in missing, at
688 # least one of the missinghead will be obsolete or
688 # least one of the missinghead will be obsolete or
689 # unstable. So checking heads only is ok
689 # unstable. So checking heads only is ok
690 for node in outgoing.missingheads:
690 for node in outgoing.missingheads:
691 ctx = unfi[node]
691 ctx = unfi[node]
692 if ctx.obsolete():
692 if ctx.obsolete():
693 raise error.Abort(mso % ctx)
693 raise error.Abort(mso % ctx)
694 elif ctx.troubled():
694 elif ctx.troubled():
695 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
695 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
696
696
697 discovery.checkheads(pushop)
697 discovery.checkheads(pushop)
698 return True
698 return True
699
699
700 # List of names of steps to perform for an outgoing bundle2, order matters.
700 # List of names of steps to perform for an outgoing bundle2, order matters.
701 b2partsgenorder = []
701 b2partsgenorder = []
702
702
703 # Mapping between step name and function
703 # Mapping between step name and function
704 #
704 #
705 # This exists to help extensions wrap steps if necessary
705 # This exists to help extensions wrap steps if necessary
706 b2partsgenmapping = {}
706 b2partsgenmapping = {}
707
707
708 def b2partsgenerator(stepname, idx=None):
708 def b2partsgenerator(stepname, idx=None):
709 """decorator for function generating bundle2 part
709 """decorator for function generating bundle2 part
710
710
711 The function is added to the step -> function mapping and appended to the
711 The function is added to the step -> function mapping and appended to the
712 list of steps. Beware that decorated functions will be added in order
712 list of steps. Beware that decorated functions will be added in order
713 (this may matter).
713 (this may matter).
714
714
715 You can only use this decorator for new steps, if you want to wrap a step
715 You can only use this decorator for new steps, if you want to wrap a step
716 from an extension, attack the b2partsgenmapping dictionary directly."""
716 from an extension, attack the b2partsgenmapping dictionary directly."""
717 def dec(func):
717 def dec(func):
718 assert stepname not in b2partsgenmapping
718 assert stepname not in b2partsgenmapping
719 b2partsgenmapping[stepname] = func
719 b2partsgenmapping[stepname] = func
720 if idx is None:
720 if idx is None:
721 b2partsgenorder.append(stepname)
721 b2partsgenorder.append(stepname)
722 else:
722 else:
723 b2partsgenorder.insert(idx, stepname)
723 b2partsgenorder.insert(idx, stepname)
724 return func
724 return func
725 return dec
725 return dec
726
726
727 def _pushb2ctxcheckheads(pushop, bundler):
727 def _pushb2ctxcheckheads(pushop, bundler):
728 """Generate race condition checking parts
728 """Generate race condition checking parts
729
729
730 Exists as an independent function to aid extensions
730 Exists as an independent function to aid extensions
731 """
731 """
732 if not pushop.force:
732 if not pushop.force:
733 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
733 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
734
734
735 @b2partsgenerator('changeset')
735 @b2partsgenerator('changeset')
736 def _pushb2ctx(pushop, bundler):
736 def _pushb2ctx(pushop, bundler):
737 """handle changegroup push through bundle2
737 """handle changegroup push through bundle2
738
738
739 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
739 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
740 """
740 """
741 if 'changesets' in pushop.stepsdone:
741 if 'changesets' in pushop.stepsdone:
742 return
742 return
743 pushop.stepsdone.add('changesets')
743 pushop.stepsdone.add('changesets')
744 # Send known heads to the server for race detection.
744 # Send known heads to the server for race detection.
745 if not _pushcheckoutgoing(pushop):
745 if not _pushcheckoutgoing(pushop):
746 return
746 return
747 pushop.repo.prepushoutgoinghooks(pushop)
747 pushop.repo.prepushoutgoinghooks(pushop)
748
748
749 _pushb2ctxcheckheads(pushop, bundler)
749 _pushb2ctxcheckheads(pushop, bundler)
750
750
751 b2caps = bundle2.bundle2caps(pushop.remote)
751 b2caps = bundle2.bundle2caps(pushop.remote)
752 version = '01'
752 version = '01'
753 cgversions = b2caps.get('changegroup')
753 cgversions = b2caps.get('changegroup')
754 if cgversions: # 3.1 and 3.2 ship with an empty value
754 if cgversions: # 3.1 and 3.2 ship with an empty value
755 cgversions = [v for v in cgversions
755 cgversions = [v for v in cgversions
756 if v in changegroup.supportedoutgoingversions(
756 if v in changegroup.supportedoutgoingversions(
757 pushop.repo)]
757 pushop.repo)]
758 if not cgversions:
758 if not cgversions:
759 raise ValueError(_('no common changegroup version'))
759 raise ValueError(_('no common changegroup version'))
760 version = max(cgversions)
760 version = max(cgversions)
761 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
761 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
762 pushop.outgoing,
762 pushop.outgoing,
763 version=version)
763 version=version)
764 cgpart = bundler.newpart('changegroup', data=cg)
764 cgpart = bundler.newpart('changegroup', data=cg)
765 if cgversions:
765 if cgversions:
766 cgpart.addparam('version', version)
766 cgpart.addparam('version', version)
767 if 'treemanifest' in pushop.repo.requirements:
767 if 'treemanifest' in pushop.repo.requirements:
768 cgpart.addparam('treemanifest', '1')
768 cgpart.addparam('treemanifest', '1')
769 def handlereply(op):
769 def handlereply(op):
770 """extract addchangegroup returns from server reply"""
770 """extract addchangegroup returns from server reply"""
771 cgreplies = op.records.getreplies(cgpart.id)
771 cgreplies = op.records.getreplies(cgpart.id)
772 assert len(cgreplies['changegroup']) == 1
772 assert len(cgreplies['changegroup']) == 1
773 pushop.cgresult = cgreplies['changegroup'][0]['return']
773 pushop.cgresult = cgreplies['changegroup'][0]['return']
774 return handlereply
774 return handlereply
775
775
776 @b2partsgenerator('phase')
776 @b2partsgenerator('phase')
777 def _pushb2phases(pushop, bundler):
777 def _pushb2phases(pushop, bundler):
778 """handle phase push through bundle2"""
778 """handle phase push through bundle2"""
779 if 'phases' in pushop.stepsdone:
779 if 'phases' in pushop.stepsdone:
780 return
780 return
781 b2caps = bundle2.bundle2caps(pushop.remote)
781 b2caps = bundle2.bundle2caps(pushop.remote)
782 if not 'pushkey' in b2caps:
782 if not 'pushkey' in b2caps:
783 return
783 return
784 pushop.stepsdone.add('phases')
784 pushop.stepsdone.add('phases')
785 part2node = []
785 part2node = []
786
786
787 def handlefailure(pushop, exc):
787 def handlefailure(pushop, exc):
788 targetid = int(exc.partid)
788 targetid = int(exc.partid)
789 for partid, node in part2node:
789 for partid, node in part2node:
790 if partid == targetid:
790 if partid == targetid:
791 raise error.Abort(_('updating %s to public failed') % node)
791 raise error.Abort(_('updating %s to public failed') % node)
792
792
793 enc = pushkey.encode
793 enc = pushkey.encode
794 for newremotehead in pushop.outdatedphases:
794 for newremotehead in pushop.outdatedphases:
795 part = bundler.newpart('pushkey')
795 part = bundler.newpart('pushkey')
796 part.addparam('namespace', enc('phases'))
796 part.addparam('namespace', enc('phases'))
797 part.addparam('key', enc(newremotehead.hex()))
797 part.addparam('key', enc(newremotehead.hex()))
798 part.addparam('old', enc(str(phases.draft)))
798 part.addparam('old', enc(str(phases.draft)))
799 part.addparam('new', enc(str(phases.public)))
799 part.addparam('new', enc(str(phases.public)))
800 part2node.append((part.id, newremotehead))
800 part2node.append((part.id, newremotehead))
801 pushop.pkfailcb[part.id] = handlefailure
801 pushop.pkfailcb[part.id] = handlefailure
802
802
803 def handlereply(op):
803 def handlereply(op):
804 for partid, node in part2node:
804 for partid, node in part2node:
805 partrep = op.records.getreplies(partid)
805 partrep = op.records.getreplies(partid)
806 results = partrep['pushkey']
806 results = partrep['pushkey']
807 assert len(results) <= 1
807 assert len(results) <= 1
808 msg = None
808 msg = None
809 if not results:
809 if not results:
810 msg = _('server ignored update of %s to public!\n') % node
810 msg = _('server ignored update of %s to public!\n') % node
811 elif not int(results[0]['return']):
811 elif not int(results[0]['return']):
812 msg = _('updating %s to public failed!\n') % node
812 msg = _('updating %s to public failed!\n') % node
813 if msg is not None:
813 if msg is not None:
814 pushop.ui.warn(msg)
814 pushop.ui.warn(msg)
815 return handlereply
815 return handlereply
816
816
817 @b2partsgenerator('obsmarkers')
817 @b2partsgenerator('obsmarkers')
818 def _pushb2obsmarkers(pushop, bundler):
818 def _pushb2obsmarkers(pushop, bundler):
819 if 'obsmarkers' in pushop.stepsdone:
819 if 'obsmarkers' in pushop.stepsdone:
820 return
820 return
821 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
821 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
822 if obsolete.commonversion(remoteversions) is None:
822 if obsolete.commonversion(remoteversions) is None:
823 return
823 return
824 pushop.stepsdone.add('obsmarkers')
824 pushop.stepsdone.add('obsmarkers')
825 if pushop.outobsmarkers:
825 if pushop.outobsmarkers:
826 markers = sorted(pushop.outobsmarkers)
826 markers = sorted(pushop.outobsmarkers)
827 buildobsmarkerspart(bundler, markers)
827 buildobsmarkerspart(bundler, markers)
828
828
829 @b2partsgenerator('bookmarks')
829 @b2partsgenerator('bookmarks')
830 def _pushb2bookmarks(pushop, bundler):
830 def _pushb2bookmarks(pushop, bundler):
831 """handle bookmark push through bundle2"""
831 """handle bookmark push through bundle2"""
832 if 'bookmarks' in pushop.stepsdone:
832 if 'bookmarks' in pushop.stepsdone:
833 return
833 return
834 b2caps = bundle2.bundle2caps(pushop.remote)
834 b2caps = bundle2.bundle2caps(pushop.remote)
835 if 'pushkey' not in b2caps:
835 if 'pushkey' not in b2caps:
836 return
836 return
837 pushop.stepsdone.add('bookmarks')
837 pushop.stepsdone.add('bookmarks')
838 part2book = []
838 part2book = []
839 enc = pushkey.encode
839 enc = pushkey.encode
840
840
841 def handlefailure(pushop, exc):
841 def handlefailure(pushop, exc):
842 targetid = int(exc.partid)
842 targetid = int(exc.partid)
843 for partid, book, action in part2book:
843 for partid, book, action in part2book:
844 if partid == targetid:
844 if partid == targetid:
845 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
845 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
846 # we should not be called for part we did not generated
846 # we should not be called for part we did not generated
847 assert False
847 assert False
848
848
849 for book, old, new in pushop.outbookmarks:
849 for book, old, new in pushop.outbookmarks:
850 part = bundler.newpart('pushkey')
850 part = bundler.newpart('pushkey')
851 part.addparam('namespace', enc('bookmarks'))
851 part.addparam('namespace', enc('bookmarks'))
852 part.addparam('key', enc(book))
852 part.addparam('key', enc(book))
853 part.addparam('old', enc(old))
853 part.addparam('old', enc(old))
854 part.addparam('new', enc(new))
854 part.addparam('new', enc(new))
855 action = 'update'
855 action = 'update'
856 if not old:
856 if not old:
857 action = 'export'
857 action = 'export'
858 elif not new:
858 elif not new:
859 action = 'delete'
859 action = 'delete'
860 part2book.append((part.id, book, action))
860 part2book.append((part.id, book, action))
861 pushop.pkfailcb[part.id] = handlefailure
861 pushop.pkfailcb[part.id] = handlefailure
862
862
863 def handlereply(op):
863 def handlereply(op):
864 ui = pushop.ui
864 ui = pushop.ui
865 for partid, book, action in part2book:
865 for partid, book, action in part2book:
866 partrep = op.records.getreplies(partid)
866 partrep = op.records.getreplies(partid)
867 results = partrep['pushkey']
867 results = partrep['pushkey']
868 assert len(results) <= 1
868 assert len(results) <= 1
869 if not results:
869 if not results:
870 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
870 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
871 else:
871 else:
872 ret = int(results[0]['return'])
872 ret = int(results[0]['return'])
873 if ret:
873 if ret:
874 ui.status(bookmsgmap[action][0] % book)
874 ui.status(bookmsgmap[action][0] % book)
875 else:
875 else:
876 ui.warn(bookmsgmap[action][1] % book)
876 ui.warn(bookmsgmap[action][1] % book)
877 if pushop.bkresult is not None:
877 if pushop.bkresult is not None:
878 pushop.bkresult = 1
878 pushop.bkresult = 1
879 return handlereply
879 return handlereply
880
880
881
881
882 def _pushbundle2(pushop):
882 def _pushbundle2(pushop):
883 """push data to the remote using bundle2
883 """push data to the remote using bundle2
884
884
885 The only currently supported type of data is changegroup but this will
885 The only currently supported type of data is changegroup but this will
886 evolve in the future."""
886 evolve in the future."""
887 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
887 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
888 pushback = (pushop.trmanager
888 pushback = (pushop.trmanager
889 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
889 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
890
890
891 # create reply capability
891 # create reply capability
892 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
892 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
893 allowpushback=pushback))
893 allowpushback=pushback))
894 bundler.newpart('replycaps', data=capsblob)
894 bundler.newpart('replycaps', data=capsblob)
895 replyhandlers = []
895 replyhandlers = []
896 for partgenname in b2partsgenorder:
896 for partgenname in b2partsgenorder:
897 partgen = b2partsgenmapping[partgenname]
897 partgen = b2partsgenmapping[partgenname]
898 ret = partgen(pushop, bundler)
898 ret = partgen(pushop, bundler)
899 if callable(ret):
899 if callable(ret):
900 replyhandlers.append(ret)
900 replyhandlers.append(ret)
901 # do not push if nothing to push
901 # do not push if nothing to push
902 if bundler.nbparts <= 1:
902 if bundler.nbparts <= 1:
903 return
903 return
904 stream = util.chunkbuffer(bundler.getchunks())
904 stream = util.chunkbuffer(bundler.getchunks())
905 try:
905 try:
906 try:
906 try:
907 reply = pushop.remote.unbundle(
907 reply = pushop.remote.unbundle(
908 stream, ['force'], pushop.remote.url())
908 stream, ['force'], pushop.remote.url())
909 except error.BundleValueError as exc:
909 except error.BundleValueError as exc:
910 raise error.Abort(_('missing support for %s') % exc)
910 raise error.Abort(_('missing support for %s') % exc)
911 try:
911 try:
912 trgetter = None
912 trgetter = None
913 if pushback:
913 if pushback:
914 trgetter = pushop.trmanager.transaction
914 trgetter = pushop.trmanager.transaction
915 op = bundle2.processbundle(pushop.repo, reply, trgetter)
915 op = bundle2.processbundle(pushop.repo, reply, trgetter)
916 except error.BundleValueError as exc:
916 except error.BundleValueError as exc:
917 raise error.Abort(_('missing support for %s') % exc)
917 raise error.Abort(_('missing support for %s') % exc)
918 except bundle2.AbortFromPart as exc:
918 except bundle2.AbortFromPart as exc:
919 pushop.ui.status(_('remote: %s\n') % exc)
919 pushop.ui.status(_('remote: %s\n') % exc)
920 if exc.hint is not None:
920 if exc.hint is not None:
921 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
921 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
922 raise error.Abort(_('push failed on remote'))
922 raise error.Abort(_('push failed on remote'))
923 except error.PushkeyFailed as exc:
923 except error.PushkeyFailed as exc:
924 partid = int(exc.partid)
924 partid = int(exc.partid)
925 if partid not in pushop.pkfailcb:
925 if partid not in pushop.pkfailcb:
926 raise
926 raise
927 pushop.pkfailcb[partid](pushop, exc)
927 pushop.pkfailcb[partid](pushop, exc)
928 for rephand in replyhandlers:
928 for rephand in replyhandlers:
929 rephand(op)
929 rephand(op)
930
930
931 def _pushchangeset(pushop):
931 def _pushchangeset(pushop):
932 """Make the actual push of changeset bundle to remote repo"""
932 """Make the actual push of changeset bundle to remote repo"""
933 if 'changesets' in pushop.stepsdone:
933 if 'changesets' in pushop.stepsdone:
934 return
934 return
935 pushop.stepsdone.add('changesets')
935 pushop.stepsdone.add('changesets')
936 if not _pushcheckoutgoing(pushop):
936 if not _pushcheckoutgoing(pushop):
937 return
937 return
938 pushop.repo.prepushoutgoinghooks(pushop)
938 pushop.repo.prepushoutgoinghooks(pushop)
939 outgoing = pushop.outgoing
939 outgoing = pushop.outgoing
940 unbundle = pushop.remote.capable('unbundle')
940 unbundle = pushop.remote.capable('unbundle')
941 # TODO: get bundlecaps from remote
942 bundlecaps = None
943 # create a changegroup from local
941 # create a changegroup from local
944 if pushop.revs is None and not (outgoing.excluded
942 if pushop.revs is None and not (outgoing.excluded
945 or pushop.repo.changelog.filteredrevs):
943 or pushop.repo.changelog.filteredrevs):
946 # push everything,
944 # push everything,
947 # use the fast path, no race possible on push
945 # use the fast path, no race possible on push
948 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
946 bundler = changegroup.cg1packer(pushop.repo)
949 cg = changegroup.getsubset(pushop.repo,
947 cg = changegroup.getsubset(pushop.repo,
950 outgoing,
948 outgoing,
951 bundler,
949 bundler,
952 'push',
950 'push',
953 fastpath=True)
951 fastpath=True)
954 else:
952 else:
955 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
953 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing)
956 bundlecaps)
957
954
958 # apply changegroup to remote
955 # apply changegroup to remote
959 if unbundle:
956 if unbundle:
960 # local repo finds heads on server, finds out what
957 # local repo finds heads on server, finds out what
961 # revs it must push. once revs transferred, if server
958 # revs it must push. once revs transferred, if server
962 # finds it has different heads (someone else won
959 # finds it has different heads (someone else won
963 # commit/push race), server aborts.
960 # commit/push race), server aborts.
964 if pushop.force:
961 if pushop.force:
965 remoteheads = ['force']
962 remoteheads = ['force']
966 else:
963 else:
967 remoteheads = pushop.remoteheads
964 remoteheads = pushop.remoteheads
968 # ssh: return remote's addchangegroup()
965 # ssh: return remote's addchangegroup()
969 # http: return remote's addchangegroup() or 0 for error
966 # http: return remote's addchangegroup() or 0 for error
970 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
967 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
971 pushop.repo.url())
968 pushop.repo.url())
972 else:
969 else:
973 # we return an integer indicating remote head count
970 # we return an integer indicating remote head count
974 # change
971 # change
975 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
972 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
976 pushop.repo.url())
973 pushop.repo.url())
977
974
978 def _pushsyncphase(pushop):
975 def _pushsyncphase(pushop):
979 """synchronise phase information locally and remotely"""
976 """synchronise phase information locally and remotely"""
980 cheads = pushop.commonheads
977 cheads = pushop.commonheads
981 # even when we don't push, exchanging phase data is useful
978 # even when we don't push, exchanging phase data is useful
982 remotephases = pushop.remote.listkeys('phases')
979 remotephases = pushop.remote.listkeys('phases')
983 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
980 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
984 and remotephases # server supports phases
981 and remotephases # server supports phases
985 and pushop.cgresult is None # nothing was pushed
982 and pushop.cgresult is None # nothing was pushed
986 and remotephases.get('publishing', False)):
983 and remotephases.get('publishing', False)):
987 # When:
984 # When:
988 # - this is a subrepo push
985 # - this is a subrepo push
989 # - and remote support phase
986 # - and remote support phase
990 # - and no changeset was pushed
987 # - and no changeset was pushed
991 # - and remote is publishing
988 # - and remote is publishing
992 # We may be in issue 3871 case!
989 # We may be in issue 3871 case!
993 # We drop the possible phase synchronisation done by
990 # We drop the possible phase synchronisation done by
994 # courtesy to publish changesets possibly locally draft
991 # courtesy to publish changesets possibly locally draft
995 # on the remote.
992 # on the remote.
996 remotephases = {'publishing': 'True'}
993 remotephases = {'publishing': 'True'}
997 if not remotephases: # old server or public only reply from non-publishing
994 if not remotephases: # old server or public only reply from non-publishing
998 _localphasemove(pushop, cheads)
995 _localphasemove(pushop, cheads)
999 # don't push any phase data as there is nothing to push
996 # don't push any phase data as there is nothing to push
1000 else:
997 else:
1001 ana = phases.analyzeremotephases(pushop.repo, cheads,
998 ana = phases.analyzeremotephases(pushop.repo, cheads,
1002 remotephases)
999 remotephases)
1003 pheads, droots = ana
1000 pheads, droots = ana
1004 ### Apply remote phase on local
1001 ### Apply remote phase on local
1005 if remotephases.get('publishing', False):
1002 if remotephases.get('publishing', False):
1006 _localphasemove(pushop, cheads)
1003 _localphasemove(pushop, cheads)
1007 else: # publish = False
1004 else: # publish = False
1008 _localphasemove(pushop, pheads)
1005 _localphasemove(pushop, pheads)
1009 _localphasemove(pushop, cheads, phases.draft)
1006 _localphasemove(pushop, cheads, phases.draft)
1010 ### Apply local phase on remote
1007 ### Apply local phase on remote
1011
1008
1012 if pushop.cgresult:
1009 if pushop.cgresult:
1013 if 'phases' in pushop.stepsdone:
1010 if 'phases' in pushop.stepsdone:
1014 # phases already pushed though bundle2
1011 # phases already pushed though bundle2
1015 return
1012 return
1016 outdated = pushop.outdatedphases
1013 outdated = pushop.outdatedphases
1017 else:
1014 else:
1018 outdated = pushop.fallbackoutdatedphases
1015 outdated = pushop.fallbackoutdatedphases
1019
1016
1020 pushop.stepsdone.add('phases')
1017 pushop.stepsdone.add('phases')
1021
1018
1022 # filter heads already turned public by the push
1019 # filter heads already turned public by the push
1023 outdated = [c for c in outdated if c.node() not in pheads]
1020 outdated = [c for c in outdated if c.node() not in pheads]
1024 # fallback to independent pushkey command
1021 # fallback to independent pushkey command
1025 for newremotehead in outdated:
1022 for newremotehead in outdated:
1026 r = pushop.remote.pushkey('phases',
1023 r = pushop.remote.pushkey('phases',
1027 newremotehead.hex(),
1024 newremotehead.hex(),
1028 str(phases.draft),
1025 str(phases.draft),
1029 str(phases.public))
1026 str(phases.public))
1030 if not r:
1027 if not r:
1031 pushop.ui.warn(_('updating %s to public failed!\n')
1028 pushop.ui.warn(_('updating %s to public failed!\n')
1032 % newremotehead)
1029 % newremotehead)
1033
1030
1034 def _localphasemove(pushop, nodes, phase=phases.public):
1031 def _localphasemove(pushop, nodes, phase=phases.public):
1035 """move <nodes> to <phase> in the local source repo"""
1032 """move <nodes> to <phase> in the local source repo"""
1036 if pushop.trmanager:
1033 if pushop.trmanager:
1037 phases.advanceboundary(pushop.repo,
1034 phases.advanceboundary(pushop.repo,
1038 pushop.trmanager.transaction(),
1035 pushop.trmanager.transaction(),
1039 phase,
1036 phase,
1040 nodes)
1037 nodes)
1041 else:
1038 else:
1042 # repo is not locked, do not change any phases!
1039 # repo is not locked, do not change any phases!
1043 # Informs the user that phases should have been moved when
1040 # Informs the user that phases should have been moved when
1044 # applicable.
1041 # applicable.
1045 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1042 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1046 phasestr = phases.phasenames[phase]
1043 phasestr = phases.phasenames[phase]
1047 if actualmoves:
1044 if actualmoves:
1048 pushop.ui.status(_('cannot lock source repo, skipping '
1045 pushop.ui.status(_('cannot lock source repo, skipping '
1049 'local %s phase update\n') % phasestr)
1046 'local %s phase update\n') % phasestr)
1050
1047
1051 def _pushobsolete(pushop):
1048 def _pushobsolete(pushop):
1052 """utility function to push obsolete markers to a remote"""
1049 """utility function to push obsolete markers to a remote"""
1053 if 'obsmarkers' in pushop.stepsdone:
1050 if 'obsmarkers' in pushop.stepsdone:
1054 return
1051 return
1055 repo = pushop.repo
1052 repo = pushop.repo
1056 remote = pushop.remote
1053 remote = pushop.remote
1057 pushop.stepsdone.add('obsmarkers')
1054 pushop.stepsdone.add('obsmarkers')
1058 if pushop.outobsmarkers:
1055 if pushop.outobsmarkers:
1059 pushop.ui.debug('try to push obsolete markers to remote\n')
1056 pushop.ui.debug('try to push obsolete markers to remote\n')
1060 rslts = []
1057 rslts = []
1061 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1058 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1062 for key in sorted(remotedata, reverse=True):
1059 for key in sorted(remotedata, reverse=True):
1063 # reverse sort to ensure we end with dump0
1060 # reverse sort to ensure we end with dump0
1064 data = remotedata[key]
1061 data = remotedata[key]
1065 rslts.append(remote.pushkey('obsolete', key, '', data))
1062 rslts.append(remote.pushkey('obsolete', key, '', data))
1066 if [r for r in rslts if not r]:
1063 if [r for r in rslts if not r]:
1067 msg = _('failed to push some obsolete markers!\n')
1064 msg = _('failed to push some obsolete markers!\n')
1068 repo.ui.warn(msg)
1065 repo.ui.warn(msg)
1069
1066
1070 def _pushbookmark(pushop):
1067 def _pushbookmark(pushop):
1071 """Update bookmark position on remote"""
1068 """Update bookmark position on remote"""
1072 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1069 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1073 return
1070 return
1074 pushop.stepsdone.add('bookmarks')
1071 pushop.stepsdone.add('bookmarks')
1075 ui = pushop.ui
1072 ui = pushop.ui
1076 remote = pushop.remote
1073 remote = pushop.remote
1077
1074
1078 for b, old, new in pushop.outbookmarks:
1075 for b, old, new in pushop.outbookmarks:
1079 action = 'update'
1076 action = 'update'
1080 if not old:
1077 if not old:
1081 action = 'export'
1078 action = 'export'
1082 elif not new:
1079 elif not new:
1083 action = 'delete'
1080 action = 'delete'
1084 if remote.pushkey('bookmarks', b, old, new):
1081 if remote.pushkey('bookmarks', b, old, new):
1085 ui.status(bookmsgmap[action][0] % b)
1082 ui.status(bookmsgmap[action][0] % b)
1086 else:
1083 else:
1087 ui.warn(bookmsgmap[action][1] % b)
1084 ui.warn(bookmsgmap[action][1] % b)
1088 # discovery can have set the value form invalid entry
1085 # discovery can have set the value form invalid entry
1089 if pushop.bkresult is not None:
1086 if pushop.bkresult is not None:
1090 pushop.bkresult = 1
1087 pushop.bkresult = 1
1091
1088
1092 class pulloperation(object):
1089 class pulloperation(object):
1093 """A object that represent a single pull operation
1090 """A object that represent a single pull operation
1094
1091
1095 It purpose is to carry pull related state and very common operation.
1092 It purpose is to carry pull related state and very common operation.
1096
1093
1097 A new should be created at the beginning of each pull and discarded
1094 A new should be created at the beginning of each pull and discarded
1098 afterward.
1095 afterward.
1099 """
1096 """
1100
1097
1101 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1098 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1102 remotebookmarks=None, streamclonerequested=None):
1099 remotebookmarks=None, streamclonerequested=None):
1103 # repo we pull into
1100 # repo we pull into
1104 self.repo = repo
1101 self.repo = repo
1105 # repo we pull from
1102 # repo we pull from
1106 self.remote = remote
1103 self.remote = remote
1107 # revision we try to pull (None is "all")
1104 # revision we try to pull (None is "all")
1108 self.heads = heads
1105 self.heads = heads
1109 # bookmark pulled explicitly
1106 # bookmark pulled explicitly
1110 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1107 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1111 for bookmark in bookmarks]
1108 for bookmark in bookmarks]
1112 # do we force pull?
1109 # do we force pull?
1113 self.force = force
1110 self.force = force
1114 # whether a streaming clone was requested
1111 # whether a streaming clone was requested
1115 self.streamclonerequested = streamclonerequested
1112 self.streamclonerequested = streamclonerequested
1116 # transaction manager
1113 # transaction manager
1117 self.trmanager = None
1114 self.trmanager = None
1118 # set of common changeset between local and remote before pull
1115 # set of common changeset between local and remote before pull
1119 self.common = None
1116 self.common = None
1120 # set of pulled head
1117 # set of pulled head
1121 self.rheads = None
1118 self.rheads = None
1122 # list of missing changeset to fetch remotely
1119 # list of missing changeset to fetch remotely
1123 self.fetch = None
1120 self.fetch = None
1124 # remote bookmarks data
1121 # remote bookmarks data
1125 self.remotebookmarks = remotebookmarks
1122 self.remotebookmarks = remotebookmarks
1126 # result of changegroup pulling (used as return code by pull)
1123 # result of changegroup pulling (used as return code by pull)
1127 self.cgresult = None
1124 self.cgresult = None
1128 # list of step already done
1125 # list of step already done
1129 self.stepsdone = set()
1126 self.stepsdone = set()
1130 # Whether we attempted a clone from pre-generated bundles.
1127 # Whether we attempted a clone from pre-generated bundles.
1131 self.clonebundleattempted = False
1128 self.clonebundleattempted = False
1132
1129
1133 @util.propertycache
1130 @util.propertycache
1134 def pulledsubset(self):
1131 def pulledsubset(self):
1135 """heads of the set of changeset target by the pull"""
1132 """heads of the set of changeset target by the pull"""
1136 # compute target subset
1133 # compute target subset
1137 if self.heads is None:
1134 if self.heads is None:
1138 # We pulled every thing possible
1135 # We pulled every thing possible
1139 # sync on everything common
1136 # sync on everything common
1140 c = set(self.common)
1137 c = set(self.common)
1141 ret = list(self.common)
1138 ret = list(self.common)
1142 for n in self.rheads:
1139 for n in self.rheads:
1143 if n not in c:
1140 if n not in c:
1144 ret.append(n)
1141 ret.append(n)
1145 return ret
1142 return ret
1146 else:
1143 else:
1147 # We pulled a specific subset
1144 # We pulled a specific subset
1148 # sync on this subset
1145 # sync on this subset
1149 return self.heads
1146 return self.heads
1150
1147
1151 @util.propertycache
1148 @util.propertycache
1152 def canusebundle2(self):
1149 def canusebundle2(self):
1153 return not _forcebundle1(self)
1150 return not _forcebundle1(self)
1154
1151
1155 @util.propertycache
1152 @util.propertycache
1156 def remotebundle2caps(self):
1153 def remotebundle2caps(self):
1157 return bundle2.bundle2caps(self.remote)
1154 return bundle2.bundle2caps(self.remote)
1158
1155
1159 def gettransaction(self):
1156 def gettransaction(self):
1160 # deprecated; talk to trmanager directly
1157 # deprecated; talk to trmanager directly
1161 return self.trmanager.transaction()
1158 return self.trmanager.transaction()
1162
1159
1163 class transactionmanager(object):
1160 class transactionmanager(object):
1164 """An object to manage the life cycle of a transaction
1161 """An object to manage the life cycle of a transaction
1165
1162
1166 It creates the transaction on demand and calls the appropriate hooks when
1163 It creates the transaction on demand and calls the appropriate hooks when
1167 closing the transaction."""
1164 closing the transaction."""
1168 def __init__(self, repo, source, url):
1165 def __init__(self, repo, source, url):
1169 self.repo = repo
1166 self.repo = repo
1170 self.source = source
1167 self.source = source
1171 self.url = url
1168 self.url = url
1172 self._tr = None
1169 self._tr = None
1173
1170
1174 def transaction(self):
1171 def transaction(self):
1175 """Return an open transaction object, constructing if necessary"""
1172 """Return an open transaction object, constructing if necessary"""
1176 if not self._tr:
1173 if not self._tr:
1177 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1174 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1178 self._tr = self.repo.transaction(trname)
1175 self._tr = self.repo.transaction(trname)
1179 self._tr.hookargs['source'] = self.source
1176 self._tr.hookargs['source'] = self.source
1180 self._tr.hookargs['url'] = self.url
1177 self._tr.hookargs['url'] = self.url
1181 return self._tr
1178 return self._tr
1182
1179
1183 def close(self):
1180 def close(self):
1184 """close transaction if created"""
1181 """close transaction if created"""
1185 if self._tr is not None:
1182 if self._tr is not None:
1186 self._tr.close()
1183 self._tr.close()
1187
1184
1188 def release(self):
1185 def release(self):
1189 """release transaction if created"""
1186 """release transaction if created"""
1190 if self._tr is not None:
1187 if self._tr is not None:
1191 self._tr.release()
1188 self._tr.release()
1192
1189
1193 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1190 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1194 streamclonerequested=None):
1191 streamclonerequested=None):
1195 """Fetch repository data from a remote.
1192 """Fetch repository data from a remote.
1196
1193
1197 This is the main function used to retrieve data from a remote repository.
1194 This is the main function used to retrieve data from a remote repository.
1198
1195
1199 ``repo`` is the local repository to clone into.
1196 ``repo`` is the local repository to clone into.
1200 ``remote`` is a peer instance.
1197 ``remote`` is a peer instance.
1201 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1198 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1202 default) means to pull everything from the remote.
1199 default) means to pull everything from the remote.
1203 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1200 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1204 default, all remote bookmarks are pulled.
1201 default, all remote bookmarks are pulled.
1205 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1202 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1206 initialization.
1203 initialization.
1207 ``streamclonerequested`` is a boolean indicating whether a "streaming
1204 ``streamclonerequested`` is a boolean indicating whether a "streaming
1208 clone" is requested. A "streaming clone" is essentially a raw file copy
1205 clone" is requested. A "streaming clone" is essentially a raw file copy
1209 of revlogs from the server. This only works when the local repository is
1206 of revlogs from the server. This only works when the local repository is
1210 empty. The default value of ``None`` means to respect the server
1207 empty. The default value of ``None`` means to respect the server
1211 configuration for preferring stream clones.
1208 configuration for preferring stream clones.
1212
1209
1213 Returns the ``pulloperation`` created for this pull.
1210 Returns the ``pulloperation`` created for this pull.
1214 """
1211 """
1215 if opargs is None:
1212 if opargs is None:
1216 opargs = {}
1213 opargs = {}
1217 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1214 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1218 streamclonerequested=streamclonerequested, **opargs)
1215 streamclonerequested=streamclonerequested, **opargs)
1219 if pullop.remote.local():
1216 if pullop.remote.local():
1220 missing = set(pullop.remote.requirements) - pullop.repo.supported
1217 missing = set(pullop.remote.requirements) - pullop.repo.supported
1221 if missing:
1218 if missing:
1222 msg = _("required features are not"
1219 msg = _("required features are not"
1223 " supported in the destination:"
1220 " supported in the destination:"
1224 " %s") % (', '.join(sorted(missing)))
1221 " %s") % (', '.join(sorted(missing)))
1225 raise error.Abort(msg)
1222 raise error.Abort(msg)
1226
1223
1227 wlock = lock = None
1224 wlock = lock = None
1228 try:
1225 try:
1229 wlock = pullop.repo.wlock()
1226 wlock = pullop.repo.wlock()
1230 lock = pullop.repo.lock()
1227 lock = pullop.repo.lock()
1231 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1228 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1232 streamclone.maybeperformlegacystreamclone(pullop)
1229 streamclone.maybeperformlegacystreamclone(pullop)
1233 # This should ideally be in _pullbundle2(). However, it needs to run
1230 # This should ideally be in _pullbundle2(). However, it needs to run
1234 # before discovery to avoid extra work.
1231 # before discovery to avoid extra work.
1235 _maybeapplyclonebundle(pullop)
1232 _maybeapplyclonebundle(pullop)
1236 _pulldiscovery(pullop)
1233 _pulldiscovery(pullop)
1237 if pullop.canusebundle2:
1234 if pullop.canusebundle2:
1238 _pullbundle2(pullop)
1235 _pullbundle2(pullop)
1239 _pullchangeset(pullop)
1236 _pullchangeset(pullop)
1240 _pullphase(pullop)
1237 _pullphase(pullop)
1241 _pullbookmarks(pullop)
1238 _pullbookmarks(pullop)
1242 _pullobsolete(pullop)
1239 _pullobsolete(pullop)
1243 pullop.trmanager.close()
1240 pullop.trmanager.close()
1244 finally:
1241 finally:
1245 lockmod.release(pullop.trmanager, lock, wlock)
1242 lockmod.release(pullop.trmanager, lock, wlock)
1246
1243
1247 return pullop
1244 return pullop
1248
1245
1249 # list of steps to perform discovery before pull
1246 # list of steps to perform discovery before pull
1250 pulldiscoveryorder = []
1247 pulldiscoveryorder = []
1251
1248
1252 # Mapping between step name and function
1249 # Mapping between step name and function
1253 #
1250 #
1254 # This exists to help extensions wrap steps if necessary
1251 # This exists to help extensions wrap steps if necessary
1255 pulldiscoverymapping = {}
1252 pulldiscoverymapping = {}
1256
1253
1257 def pulldiscovery(stepname):
1254 def pulldiscovery(stepname):
1258 """decorator for function performing discovery before pull
1255 """decorator for function performing discovery before pull
1259
1256
1260 The function is added to the step -> function mapping and appended to the
1257 The function is added to the step -> function mapping and appended to the
1261 list of steps. Beware that decorated function will be added in order (this
1258 list of steps. Beware that decorated function will be added in order (this
1262 may matter).
1259 may matter).
1263
1260
1264 You can only use this decorator for a new step, if you want to wrap a step
1261 You can only use this decorator for a new step, if you want to wrap a step
1265 from an extension, change the pulldiscovery dictionary directly."""
1262 from an extension, change the pulldiscovery dictionary directly."""
1266 def dec(func):
1263 def dec(func):
1267 assert stepname not in pulldiscoverymapping
1264 assert stepname not in pulldiscoverymapping
1268 pulldiscoverymapping[stepname] = func
1265 pulldiscoverymapping[stepname] = func
1269 pulldiscoveryorder.append(stepname)
1266 pulldiscoveryorder.append(stepname)
1270 return func
1267 return func
1271 return dec
1268 return dec
1272
1269
1273 def _pulldiscovery(pullop):
1270 def _pulldiscovery(pullop):
1274 """Run all discovery steps"""
1271 """Run all discovery steps"""
1275 for stepname in pulldiscoveryorder:
1272 for stepname in pulldiscoveryorder:
1276 step = pulldiscoverymapping[stepname]
1273 step = pulldiscoverymapping[stepname]
1277 step(pullop)
1274 step(pullop)
1278
1275
1279 @pulldiscovery('b1:bookmarks')
1276 @pulldiscovery('b1:bookmarks')
1280 def _pullbookmarkbundle1(pullop):
1277 def _pullbookmarkbundle1(pullop):
1281 """fetch bookmark data in bundle1 case
1278 """fetch bookmark data in bundle1 case
1282
1279
1283 If not using bundle2, we have to fetch bookmarks before changeset
1280 If not using bundle2, we have to fetch bookmarks before changeset
1284 discovery to reduce the chance and impact of race conditions."""
1281 discovery to reduce the chance and impact of race conditions."""
1285 if pullop.remotebookmarks is not None:
1282 if pullop.remotebookmarks is not None:
1286 return
1283 return
1287 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1284 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1288 # all known bundle2 servers now support listkeys, but lets be nice with
1285 # all known bundle2 servers now support listkeys, but lets be nice with
1289 # new implementation.
1286 # new implementation.
1290 return
1287 return
1291 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1288 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1292
1289
1293
1290
1294 @pulldiscovery('changegroup')
1291 @pulldiscovery('changegroup')
1295 def _pulldiscoverychangegroup(pullop):
1292 def _pulldiscoverychangegroup(pullop):
1296 """discovery phase for the pull
1293 """discovery phase for the pull
1297
1294
1298 Current handle changeset discovery only, will change handle all discovery
1295 Current handle changeset discovery only, will change handle all discovery
1299 at some point."""
1296 at some point."""
1300 tmp = discovery.findcommonincoming(pullop.repo,
1297 tmp = discovery.findcommonincoming(pullop.repo,
1301 pullop.remote,
1298 pullop.remote,
1302 heads=pullop.heads,
1299 heads=pullop.heads,
1303 force=pullop.force)
1300 force=pullop.force)
1304 common, fetch, rheads = tmp
1301 common, fetch, rheads = tmp
1305 nm = pullop.repo.unfiltered().changelog.nodemap
1302 nm = pullop.repo.unfiltered().changelog.nodemap
1306 if fetch and rheads:
1303 if fetch and rheads:
1307 # If a remote heads in filtered locally, lets drop it from the unknown
1304 # If a remote heads in filtered locally, lets drop it from the unknown
1308 # remote heads and put in back in common.
1305 # remote heads and put in back in common.
1309 #
1306 #
1310 # This is a hackish solution to catch most of "common but locally
1307 # This is a hackish solution to catch most of "common but locally
1311 # hidden situation". We do not performs discovery on unfiltered
1308 # hidden situation". We do not performs discovery on unfiltered
1312 # repository because it end up doing a pathological amount of round
1309 # repository because it end up doing a pathological amount of round
1313 # trip for w huge amount of changeset we do not care about.
1310 # trip for w huge amount of changeset we do not care about.
1314 #
1311 #
1315 # If a set of such "common but filtered" changeset exist on the server
1312 # If a set of such "common but filtered" changeset exist on the server
1316 # but are not including a remote heads, we'll not be able to detect it,
1313 # but are not including a remote heads, we'll not be able to detect it,
1317 scommon = set(common)
1314 scommon = set(common)
1318 filteredrheads = []
1315 filteredrheads = []
1319 for n in rheads:
1316 for n in rheads:
1320 if n in nm:
1317 if n in nm:
1321 if n not in scommon:
1318 if n not in scommon:
1322 common.append(n)
1319 common.append(n)
1323 else:
1320 else:
1324 filteredrheads.append(n)
1321 filteredrheads.append(n)
1325 if not filteredrheads:
1322 if not filteredrheads:
1326 fetch = []
1323 fetch = []
1327 rheads = filteredrheads
1324 rheads = filteredrheads
1328 pullop.common = common
1325 pullop.common = common
1329 pullop.fetch = fetch
1326 pullop.fetch = fetch
1330 pullop.rheads = rheads
1327 pullop.rheads = rheads
1331
1328
1332 def _pullbundle2(pullop):
1329 def _pullbundle2(pullop):
1333 """pull data using bundle2
1330 """pull data using bundle2
1334
1331
1335 For now, the only supported data are changegroup."""
1332 For now, the only supported data are changegroup."""
1336 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1333 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1337
1334
1338 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1335 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1339
1336
1340 # pulling changegroup
1337 # pulling changegroup
1341 pullop.stepsdone.add('changegroup')
1338 pullop.stepsdone.add('changegroup')
1342
1339
1343 kwargs['common'] = pullop.common
1340 kwargs['common'] = pullop.common
1344 kwargs['heads'] = pullop.heads or pullop.rheads
1341 kwargs['heads'] = pullop.heads or pullop.rheads
1345 kwargs['cg'] = pullop.fetch
1342 kwargs['cg'] = pullop.fetch
1346 if 'listkeys' in pullop.remotebundle2caps:
1343 if 'listkeys' in pullop.remotebundle2caps:
1347 kwargs['listkeys'] = ['phases']
1344 kwargs['listkeys'] = ['phases']
1348 if pullop.remotebookmarks is None:
1345 if pullop.remotebookmarks is None:
1349 # make sure to always includes bookmark data when migrating
1346 # make sure to always includes bookmark data when migrating
1350 # `hg incoming --bundle` to using this function.
1347 # `hg incoming --bundle` to using this function.
1351 kwargs['listkeys'].append('bookmarks')
1348 kwargs['listkeys'].append('bookmarks')
1352
1349
1353 # If this is a full pull / clone and the server supports the clone bundles
1350 # If this is a full pull / clone and the server supports the clone bundles
1354 # feature, tell the server whether we attempted a clone bundle. The
1351 # feature, tell the server whether we attempted a clone bundle. The
1355 # presence of this flag indicates the client supports clone bundles. This
1352 # presence of this flag indicates the client supports clone bundles. This
1356 # will enable the server to treat clients that support clone bundles
1353 # will enable the server to treat clients that support clone bundles
1357 # differently from those that don't.
1354 # differently from those that don't.
1358 if (pullop.remote.capable('clonebundles')
1355 if (pullop.remote.capable('clonebundles')
1359 and pullop.heads is None and list(pullop.common) == [nullid]):
1356 and pullop.heads is None and list(pullop.common) == [nullid]):
1360 kwargs['cbattempted'] = pullop.clonebundleattempted
1357 kwargs['cbattempted'] = pullop.clonebundleattempted
1361
1358
1362 if streaming:
1359 if streaming:
1363 pullop.repo.ui.status(_('streaming all changes\n'))
1360 pullop.repo.ui.status(_('streaming all changes\n'))
1364 elif not pullop.fetch:
1361 elif not pullop.fetch:
1365 pullop.repo.ui.status(_("no changes found\n"))
1362 pullop.repo.ui.status(_("no changes found\n"))
1366 pullop.cgresult = 0
1363 pullop.cgresult = 0
1367 else:
1364 else:
1368 if pullop.heads is None and list(pullop.common) == [nullid]:
1365 if pullop.heads is None and list(pullop.common) == [nullid]:
1369 pullop.repo.ui.status(_("requesting all changes\n"))
1366 pullop.repo.ui.status(_("requesting all changes\n"))
1370 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1367 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1371 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1368 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1372 if obsolete.commonversion(remoteversions) is not None:
1369 if obsolete.commonversion(remoteversions) is not None:
1373 kwargs['obsmarkers'] = True
1370 kwargs['obsmarkers'] = True
1374 pullop.stepsdone.add('obsmarkers')
1371 pullop.stepsdone.add('obsmarkers')
1375 _pullbundle2extraprepare(pullop, kwargs)
1372 _pullbundle2extraprepare(pullop, kwargs)
1376 bundle = pullop.remote.getbundle('pull', **kwargs)
1373 bundle = pullop.remote.getbundle('pull', **kwargs)
1377 try:
1374 try:
1378 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1375 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1379 except bundle2.AbortFromPart as exc:
1376 except bundle2.AbortFromPart as exc:
1380 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1377 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1381 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1378 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1382 except error.BundleValueError as exc:
1379 except error.BundleValueError as exc:
1383 raise error.Abort(_('missing support for %s') % exc)
1380 raise error.Abort(_('missing support for %s') % exc)
1384
1381
1385 if pullop.fetch:
1382 if pullop.fetch:
1386 results = [cg['return'] for cg in op.records['changegroup']]
1383 results = [cg['return'] for cg in op.records['changegroup']]
1387 pullop.cgresult = changegroup.combineresults(results)
1384 pullop.cgresult = changegroup.combineresults(results)
1388
1385
1389 # processing phases change
1386 # processing phases change
1390 for namespace, value in op.records['listkeys']:
1387 for namespace, value in op.records['listkeys']:
1391 if namespace == 'phases':
1388 if namespace == 'phases':
1392 _pullapplyphases(pullop, value)
1389 _pullapplyphases(pullop, value)
1393
1390
1394 # processing bookmark update
1391 # processing bookmark update
1395 for namespace, value in op.records['listkeys']:
1392 for namespace, value in op.records['listkeys']:
1396 if namespace == 'bookmarks':
1393 if namespace == 'bookmarks':
1397 pullop.remotebookmarks = value
1394 pullop.remotebookmarks = value
1398
1395
1399 # bookmark data were either already there or pulled in the bundle
1396 # bookmark data were either already there or pulled in the bundle
1400 if pullop.remotebookmarks is not None:
1397 if pullop.remotebookmarks is not None:
1401 _pullbookmarks(pullop)
1398 _pullbookmarks(pullop)
1402
1399
1403 def _pullbundle2extraprepare(pullop, kwargs):
1400 def _pullbundle2extraprepare(pullop, kwargs):
1404 """hook function so that extensions can extend the getbundle call"""
1401 """hook function so that extensions can extend the getbundle call"""
1405 pass
1402 pass
1406
1403
1407 def _pullchangeset(pullop):
1404 def _pullchangeset(pullop):
1408 """pull changeset from unbundle into the local repo"""
1405 """pull changeset from unbundle into the local repo"""
1409 # We delay the open of the transaction as late as possible so we
1406 # We delay the open of the transaction as late as possible so we
1410 # don't open transaction for nothing or you break future useful
1407 # don't open transaction for nothing or you break future useful
1411 # rollback call
1408 # rollback call
1412 if 'changegroup' in pullop.stepsdone:
1409 if 'changegroup' in pullop.stepsdone:
1413 return
1410 return
1414 pullop.stepsdone.add('changegroup')
1411 pullop.stepsdone.add('changegroup')
1415 if not pullop.fetch:
1412 if not pullop.fetch:
1416 pullop.repo.ui.status(_("no changes found\n"))
1413 pullop.repo.ui.status(_("no changes found\n"))
1417 pullop.cgresult = 0
1414 pullop.cgresult = 0
1418 return
1415 return
1419 pullop.gettransaction()
1416 pullop.gettransaction()
1420 if pullop.heads is None and list(pullop.common) == [nullid]:
1417 if pullop.heads is None and list(pullop.common) == [nullid]:
1421 pullop.repo.ui.status(_("requesting all changes\n"))
1418 pullop.repo.ui.status(_("requesting all changes\n"))
1422 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1419 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1423 # issue1320, avoid a race if remote changed after discovery
1420 # issue1320, avoid a race if remote changed after discovery
1424 pullop.heads = pullop.rheads
1421 pullop.heads = pullop.rheads
1425
1422
1426 if pullop.remote.capable('getbundle'):
1423 if pullop.remote.capable('getbundle'):
1427 # TODO: get bundlecaps from remote
1424 # TODO: get bundlecaps from remote
1428 cg = pullop.remote.getbundle('pull', common=pullop.common,
1425 cg = pullop.remote.getbundle('pull', common=pullop.common,
1429 heads=pullop.heads or pullop.rheads)
1426 heads=pullop.heads or pullop.rheads)
1430 elif pullop.heads is None:
1427 elif pullop.heads is None:
1431 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1428 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1432 elif not pullop.remote.capable('changegroupsubset'):
1429 elif not pullop.remote.capable('changegroupsubset'):
1433 raise error.Abort(_("partial pull cannot be done because "
1430 raise error.Abort(_("partial pull cannot be done because "
1434 "other repository doesn't support "
1431 "other repository doesn't support "
1435 "changegroupsubset."))
1432 "changegroupsubset."))
1436 else:
1433 else:
1437 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1434 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1438 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1435 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1439
1436
1440 def _pullphase(pullop):
1437 def _pullphase(pullop):
1441 # Get remote phases data from remote
1438 # Get remote phases data from remote
1442 if 'phases' in pullop.stepsdone:
1439 if 'phases' in pullop.stepsdone:
1443 return
1440 return
1444 remotephases = pullop.remote.listkeys('phases')
1441 remotephases = pullop.remote.listkeys('phases')
1445 _pullapplyphases(pullop, remotephases)
1442 _pullapplyphases(pullop, remotephases)
1446
1443
1447 def _pullapplyphases(pullop, remotephases):
1444 def _pullapplyphases(pullop, remotephases):
1448 """apply phase movement from observed remote state"""
1445 """apply phase movement from observed remote state"""
1449 if 'phases' in pullop.stepsdone:
1446 if 'phases' in pullop.stepsdone:
1450 return
1447 return
1451 pullop.stepsdone.add('phases')
1448 pullop.stepsdone.add('phases')
1452 publishing = bool(remotephases.get('publishing', False))
1449 publishing = bool(remotephases.get('publishing', False))
1453 if remotephases and not publishing:
1450 if remotephases and not publishing:
1454 # remote is new and non-publishing
1451 # remote is new and non-publishing
1455 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1452 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1456 pullop.pulledsubset,
1453 pullop.pulledsubset,
1457 remotephases)
1454 remotephases)
1458 dheads = pullop.pulledsubset
1455 dheads = pullop.pulledsubset
1459 else:
1456 else:
1460 # Remote is old or publishing all common changesets
1457 # Remote is old or publishing all common changesets
1461 # should be seen as public
1458 # should be seen as public
1462 pheads = pullop.pulledsubset
1459 pheads = pullop.pulledsubset
1463 dheads = []
1460 dheads = []
1464 unfi = pullop.repo.unfiltered()
1461 unfi = pullop.repo.unfiltered()
1465 phase = unfi._phasecache.phase
1462 phase = unfi._phasecache.phase
1466 rev = unfi.changelog.nodemap.get
1463 rev = unfi.changelog.nodemap.get
1467 public = phases.public
1464 public = phases.public
1468 draft = phases.draft
1465 draft = phases.draft
1469
1466
1470 # exclude changesets already public locally and update the others
1467 # exclude changesets already public locally and update the others
1471 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1468 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1472 if pheads:
1469 if pheads:
1473 tr = pullop.gettransaction()
1470 tr = pullop.gettransaction()
1474 phases.advanceboundary(pullop.repo, tr, public, pheads)
1471 phases.advanceboundary(pullop.repo, tr, public, pheads)
1475
1472
1476 # exclude changesets already draft locally and update the others
1473 # exclude changesets already draft locally and update the others
1477 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1474 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1478 if dheads:
1475 if dheads:
1479 tr = pullop.gettransaction()
1476 tr = pullop.gettransaction()
1480 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1477 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1481
1478
1482 def _pullbookmarks(pullop):
1479 def _pullbookmarks(pullop):
1483 """process the remote bookmark information to update the local one"""
1480 """process the remote bookmark information to update the local one"""
1484 if 'bookmarks' in pullop.stepsdone:
1481 if 'bookmarks' in pullop.stepsdone:
1485 return
1482 return
1486 pullop.stepsdone.add('bookmarks')
1483 pullop.stepsdone.add('bookmarks')
1487 repo = pullop.repo
1484 repo = pullop.repo
1488 remotebookmarks = pullop.remotebookmarks
1485 remotebookmarks = pullop.remotebookmarks
1489 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1486 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1490 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1487 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1491 pullop.remote.url(),
1488 pullop.remote.url(),
1492 pullop.gettransaction,
1489 pullop.gettransaction,
1493 explicit=pullop.explicitbookmarks)
1490 explicit=pullop.explicitbookmarks)
1494
1491
1495 def _pullobsolete(pullop):
1492 def _pullobsolete(pullop):
1496 """utility function to pull obsolete markers from a remote
1493 """utility function to pull obsolete markers from a remote
1497
1494
1498 The `gettransaction` is function that return the pull transaction, creating
1495 The `gettransaction` is function that return the pull transaction, creating
1499 one if necessary. We return the transaction to inform the calling code that
1496 one if necessary. We return the transaction to inform the calling code that
1500 a new transaction have been created (when applicable).
1497 a new transaction have been created (when applicable).
1501
1498
1502 Exists mostly to allow overriding for experimentation purpose"""
1499 Exists mostly to allow overriding for experimentation purpose"""
1503 if 'obsmarkers' in pullop.stepsdone:
1500 if 'obsmarkers' in pullop.stepsdone:
1504 return
1501 return
1505 pullop.stepsdone.add('obsmarkers')
1502 pullop.stepsdone.add('obsmarkers')
1506 tr = None
1503 tr = None
1507 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1504 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1508 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1505 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1509 remoteobs = pullop.remote.listkeys('obsolete')
1506 remoteobs = pullop.remote.listkeys('obsolete')
1510 if 'dump0' in remoteobs:
1507 if 'dump0' in remoteobs:
1511 tr = pullop.gettransaction()
1508 tr = pullop.gettransaction()
1512 markers = []
1509 markers = []
1513 for key in sorted(remoteobs, reverse=True):
1510 for key in sorted(remoteobs, reverse=True):
1514 if key.startswith('dump'):
1511 if key.startswith('dump'):
1515 data = base85.b85decode(remoteobs[key])
1512 data = base85.b85decode(remoteobs[key])
1516 version, newmarks = obsolete._readmarkers(data)
1513 version, newmarks = obsolete._readmarkers(data)
1517 markers += newmarks
1514 markers += newmarks
1518 if markers:
1515 if markers:
1519 pullop.repo.obsstore.add(tr, markers)
1516 pullop.repo.obsstore.add(tr, markers)
1520 pullop.repo.invalidatevolatilesets()
1517 pullop.repo.invalidatevolatilesets()
1521 return tr
1518 return tr
1522
1519
1523 def caps20to10(repo):
1520 def caps20to10(repo):
1524 """return a set with appropriate options to use bundle20 during getbundle"""
1521 """return a set with appropriate options to use bundle20 during getbundle"""
1525 caps = set(['HG20'])
1522 caps = set(['HG20'])
1526 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1523 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1527 caps.add('bundle2=' + urlreq.quote(capsblob))
1524 caps.add('bundle2=' + urlreq.quote(capsblob))
1528 return caps
1525 return caps
1529
1526
1530 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1527 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1531 getbundle2partsorder = []
1528 getbundle2partsorder = []
1532
1529
1533 # Mapping between step name and function
1530 # Mapping between step name and function
1534 #
1531 #
1535 # This exists to help extensions wrap steps if necessary
1532 # This exists to help extensions wrap steps if necessary
1536 getbundle2partsmapping = {}
1533 getbundle2partsmapping = {}
1537
1534
1538 def getbundle2partsgenerator(stepname, idx=None):
1535 def getbundle2partsgenerator(stepname, idx=None):
1539 """decorator for function generating bundle2 part for getbundle
1536 """decorator for function generating bundle2 part for getbundle
1540
1537
1541 The function is added to the step -> function mapping and appended to the
1538 The function is added to the step -> function mapping and appended to the
1542 list of steps. Beware that decorated functions will be added in order
1539 list of steps. Beware that decorated functions will be added in order
1543 (this may matter).
1540 (this may matter).
1544
1541
1545 You can only use this decorator for new steps, if you want to wrap a step
1542 You can only use this decorator for new steps, if you want to wrap a step
1546 from an extension, attack the getbundle2partsmapping dictionary directly."""
1543 from an extension, attack the getbundle2partsmapping dictionary directly."""
1547 def dec(func):
1544 def dec(func):
1548 assert stepname not in getbundle2partsmapping
1545 assert stepname not in getbundle2partsmapping
1549 getbundle2partsmapping[stepname] = func
1546 getbundle2partsmapping[stepname] = func
1550 if idx is None:
1547 if idx is None:
1551 getbundle2partsorder.append(stepname)
1548 getbundle2partsorder.append(stepname)
1552 else:
1549 else:
1553 getbundle2partsorder.insert(idx, stepname)
1550 getbundle2partsorder.insert(idx, stepname)
1554 return func
1551 return func
1555 return dec
1552 return dec
1556
1553
1557 def bundle2requested(bundlecaps):
1554 def bundle2requested(bundlecaps):
1558 if bundlecaps is not None:
1555 if bundlecaps is not None:
1559 return any(cap.startswith('HG2') for cap in bundlecaps)
1556 return any(cap.startswith('HG2') for cap in bundlecaps)
1560 return False
1557 return False
1561
1558
1562 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1559 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1563 **kwargs):
1560 **kwargs):
1564 """Return chunks constituting a bundle's raw data.
1561 """Return chunks constituting a bundle's raw data.
1565
1562
1566 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1563 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1567 passed.
1564 passed.
1568
1565
1569 Returns an iterator over raw chunks (of varying sizes).
1566 Returns an iterator over raw chunks (of varying sizes).
1570 """
1567 """
1571 usebundle2 = bundle2requested(bundlecaps)
1568 usebundle2 = bundle2requested(bundlecaps)
1572 # bundle10 case
1569 # bundle10 case
1573 if not usebundle2:
1570 if not usebundle2:
1574 if bundlecaps and not kwargs.get('cg', True):
1571 if bundlecaps and not kwargs.get('cg', True):
1575 raise ValueError(_('request for bundle10 must include changegroup'))
1572 raise ValueError(_('request for bundle10 must include changegroup'))
1576
1573
1577 if kwargs:
1574 if kwargs:
1578 raise ValueError(_('unsupported getbundle arguments: %s')
1575 raise ValueError(_('unsupported getbundle arguments: %s')
1579 % ', '.join(sorted(kwargs.keys())))
1576 % ', '.join(sorted(kwargs.keys())))
1580 outgoing = _computeoutgoing(repo, heads, common)
1577 outgoing = _computeoutgoing(repo, heads, common)
1581 bundler = changegroup.getbundler('01', repo, bundlecaps)
1578 bundler = changegroup.getbundler('01', repo)
1582 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1579 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1583
1580
1584 # bundle20 case
1581 # bundle20 case
1585 b2caps = {}
1582 b2caps = {}
1586 for bcaps in bundlecaps:
1583 for bcaps in bundlecaps:
1587 if bcaps.startswith('bundle2='):
1584 if bcaps.startswith('bundle2='):
1588 blob = urlreq.unquote(bcaps[len('bundle2='):])
1585 blob = urlreq.unquote(bcaps[len('bundle2='):])
1589 b2caps.update(bundle2.decodecaps(blob))
1586 b2caps.update(bundle2.decodecaps(blob))
1590 bundler = bundle2.bundle20(repo.ui, b2caps)
1587 bundler = bundle2.bundle20(repo.ui, b2caps)
1591
1588
1592 kwargs['heads'] = heads
1589 kwargs['heads'] = heads
1593 kwargs['common'] = common
1590 kwargs['common'] = common
1594
1591
1595 for name in getbundle2partsorder:
1592 for name in getbundle2partsorder:
1596 func = getbundle2partsmapping[name]
1593 func = getbundle2partsmapping[name]
1597 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1594 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1598 **kwargs)
1595 **kwargs)
1599
1596
1600 return bundler.getchunks()
1597 return bundler.getchunks()
1601
1598
1602 @getbundle2partsgenerator('changegroup')
1599 @getbundle2partsgenerator('changegroup')
1603 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1600 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1604 b2caps=None, heads=None, common=None, **kwargs):
1601 b2caps=None, heads=None, common=None, **kwargs):
1605 """add a changegroup part to the requested bundle"""
1602 """add a changegroup part to the requested bundle"""
1606 cg = None
1603 cg = None
1607 if kwargs.get('cg', True):
1604 if kwargs.get('cg', True):
1608 # build changegroup bundle here.
1605 # build changegroup bundle here.
1609 version = '01'
1606 version = '01'
1610 cgversions = b2caps.get('changegroup')
1607 cgversions = b2caps.get('changegroup')
1611 if cgversions: # 3.1 and 3.2 ship with an empty value
1608 if cgversions: # 3.1 and 3.2 ship with an empty value
1612 cgversions = [v for v in cgversions
1609 cgversions = [v for v in cgversions
1613 if v in changegroup.supportedoutgoingversions(repo)]
1610 if v in changegroup.supportedoutgoingversions(repo)]
1614 if not cgversions:
1611 if not cgversions:
1615 raise ValueError(_('no common changegroup version'))
1612 raise ValueError(_('no common changegroup version'))
1616 version = max(cgversions)
1613 version = max(cgversions)
1617 outgoing = _computeoutgoing(repo, heads, common)
1614 outgoing = _computeoutgoing(repo, heads, common)
1618 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1615 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1619 bundlecaps=bundlecaps,
1620 version=version)
1616 version=version)
1621
1617
1622 if cg:
1618 if cg:
1623 part = bundler.newpart('changegroup', data=cg)
1619 part = bundler.newpart('changegroup', data=cg)
1624 if cgversions:
1620 if cgversions:
1625 part.addparam('version', version)
1621 part.addparam('version', version)
1626 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1622 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1627 if 'treemanifest' in repo.requirements:
1623 if 'treemanifest' in repo.requirements:
1628 part.addparam('treemanifest', '1')
1624 part.addparam('treemanifest', '1')
1629
1625
1630 @getbundle2partsgenerator('listkeys')
1626 @getbundle2partsgenerator('listkeys')
1631 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1627 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1632 b2caps=None, **kwargs):
1628 b2caps=None, **kwargs):
1633 """add parts containing listkeys namespaces to the requested bundle"""
1629 """add parts containing listkeys namespaces to the requested bundle"""
1634 listkeys = kwargs.get('listkeys', ())
1630 listkeys = kwargs.get('listkeys', ())
1635 for namespace in listkeys:
1631 for namespace in listkeys:
1636 part = bundler.newpart('listkeys')
1632 part = bundler.newpart('listkeys')
1637 part.addparam('namespace', namespace)
1633 part.addparam('namespace', namespace)
1638 keys = repo.listkeys(namespace).items()
1634 keys = repo.listkeys(namespace).items()
1639 part.data = pushkey.encodekeys(keys)
1635 part.data = pushkey.encodekeys(keys)
1640
1636
1641 @getbundle2partsgenerator('obsmarkers')
1637 @getbundle2partsgenerator('obsmarkers')
1642 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1638 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1643 b2caps=None, heads=None, **kwargs):
1639 b2caps=None, heads=None, **kwargs):
1644 """add an obsolescence markers part to the requested bundle"""
1640 """add an obsolescence markers part to the requested bundle"""
1645 if kwargs.get('obsmarkers', False):
1641 if kwargs.get('obsmarkers', False):
1646 if heads is None:
1642 if heads is None:
1647 heads = repo.heads()
1643 heads = repo.heads()
1648 subset = [c.node() for c in repo.set('::%ln', heads)]
1644 subset = [c.node() for c in repo.set('::%ln', heads)]
1649 markers = repo.obsstore.relevantmarkers(subset)
1645 markers = repo.obsstore.relevantmarkers(subset)
1650 markers = sorted(markers)
1646 markers = sorted(markers)
1651 buildobsmarkerspart(bundler, markers)
1647 buildobsmarkerspart(bundler, markers)
1652
1648
1653 @getbundle2partsgenerator('hgtagsfnodes')
1649 @getbundle2partsgenerator('hgtagsfnodes')
1654 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1650 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1655 b2caps=None, heads=None, common=None,
1651 b2caps=None, heads=None, common=None,
1656 **kwargs):
1652 **kwargs):
1657 """Transfer the .hgtags filenodes mapping.
1653 """Transfer the .hgtags filenodes mapping.
1658
1654
1659 Only values for heads in this bundle will be transferred.
1655 Only values for heads in this bundle will be transferred.
1660
1656
1661 The part data consists of pairs of 20 byte changeset node and .hgtags
1657 The part data consists of pairs of 20 byte changeset node and .hgtags
1662 filenodes raw values.
1658 filenodes raw values.
1663 """
1659 """
1664 # Don't send unless:
1660 # Don't send unless:
1665 # - changeset are being exchanged,
1661 # - changeset are being exchanged,
1666 # - the client supports it.
1662 # - the client supports it.
1667 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1663 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1668 return
1664 return
1669
1665
1670 outgoing = _computeoutgoing(repo, heads, common)
1666 outgoing = _computeoutgoing(repo, heads, common)
1671
1667
1672 if not outgoing.missingheads:
1668 if not outgoing.missingheads:
1673 return
1669 return
1674
1670
1675 cache = tags.hgtagsfnodescache(repo.unfiltered())
1671 cache = tags.hgtagsfnodescache(repo.unfiltered())
1676 chunks = []
1672 chunks = []
1677
1673
1678 # .hgtags fnodes are only relevant for head changesets. While we could
1674 # .hgtags fnodes are only relevant for head changesets. While we could
1679 # transfer values for all known nodes, there will likely be little to
1675 # transfer values for all known nodes, there will likely be little to
1680 # no benefit.
1676 # no benefit.
1681 #
1677 #
1682 # We don't bother using a generator to produce output data because
1678 # We don't bother using a generator to produce output data because
1683 # a) we only have 40 bytes per head and even esoteric numbers of heads
1679 # a) we only have 40 bytes per head and even esoteric numbers of heads
1684 # consume little memory (1M heads is 40MB) b) we don't want to send the
1680 # consume little memory (1M heads is 40MB) b) we don't want to send the
1685 # part if we don't have entries and knowing if we have entries requires
1681 # part if we don't have entries and knowing if we have entries requires
1686 # cache lookups.
1682 # cache lookups.
1687 for node in outgoing.missingheads:
1683 for node in outgoing.missingheads:
1688 # Don't compute missing, as this may slow down serving.
1684 # Don't compute missing, as this may slow down serving.
1689 fnode = cache.getfnode(node, computemissing=False)
1685 fnode = cache.getfnode(node, computemissing=False)
1690 if fnode is not None:
1686 if fnode is not None:
1691 chunks.extend([node, fnode])
1687 chunks.extend([node, fnode])
1692
1688
1693 if chunks:
1689 if chunks:
1694 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1690 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1695
1691
1696 def _getbookmarks(repo, **kwargs):
1692 def _getbookmarks(repo, **kwargs):
1697 """Returns bookmark to node mapping.
1693 """Returns bookmark to node mapping.
1698
1694
1699 This function is primarily used to generate `bookmarks` bundle2 part.
1695 This function is primarily used to generate `bookmarks` bundle2 part.
1700 It is a separate function in order to make it easy to wrap it
1696 It is a separate function in order to make it easy to wrap it
1701 in extensions. Passing `kwargs` to the function makes it easy to
1697 in extensions. Passing `kwargs` to the function makes it easy to
1702 add new parameters in extensions.
1698 add new parameters in extensions.
1703 """
1699 """
1704
1700
1705 return dict(bookmod.listbinbookmarks(repo))
1701 return dict(bookmod.listbinbookmarks(repo))
1706
1702
1707 def check_heads(repo, their_heads, context):
1703 def check_heads(repo, their_heads, context):
1708 """check if the heads of a repo have been modified
1704 """check if the heads of a repo have been modified
1709
1705
1710 Used by peer for unbundling.
1706 Used by peer for unbundling.
1711 """
1707 """
1712 heads = repo.heads()
1708 heads = repo.heads()
1713 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1709 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1714 if not (their_heads == ['force'] or their_heads == heads or
1710 if not (their_heads == ['force'] or their_heads == heads or
1715 their_heads == ['hashed', heads_hash]):
1711 their_heads == ['hashed', heads_hash]):
1716 # someone else committed/pushed/unbundled while we
1712 # someone else committed/pushed/unbundled while we
1717 # were transferring data
1713 # were transferring data
1718 raise error.PushRaced('repository changed while %s - '
1714 raise error.PushRaced('repository changed while %s - '
1719 'please try again' % context)
1715 'please try again' % context)
1720
1716
1721 def unbundle(repo, cg, heads, source, url):
1717 def unbundle(repo, cg, heads, source, url):
1722 """Apply a bundle to a repo.
1718 """Apply a bundle to a repo.
1723
1719
1724 this function makes sure the repo is locked during the application and have
1720 this function makes sure the repo is locked during the application and have
1725 mechanism to check that no push race occurred between the creation of the
1721 mechanism to check that no push race occurred between the creation of the
1726 bundle and its application.
1722 bundle and its application.
1727
1723
1728 If the push was raced as PushRaced exception is raised."""
1724 If the push was raced as PushRaced exception is raised."""
1729 r = 0
1725 r = 0
1730 # need a transaction when processing a bundle2 stream
1726 # need a transaction when processing a bundle2 stream
1731 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1727 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1732 lockandtr = [None, None, None]
1728 lockandtr = [None, None, None]
1733 recordout = None
1729 recordout = None
1734 # quick fix for output mismatch with bundle2 in 3.4
1730 # quick fix for output mismatch with bundle2 in 3.4
1735 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1731 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1736 False)
1732 False)
1737 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1733 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1738 captureoutput = True
1734 captureoutput = True
1739 try:
1735 try:
1740 # note: outside bundle1, 'heads' is expected to be empty and this
1736 # note: outside bundle1, 'heads' is expected to be empty and this
1741 # 'check_heads' call wil be a no-op
1737 # 'check_heads' call wil be a no-op
1742 check_heads(repo, heads, 'uploading changes')
1738 check_heads(repo, heads, 'uploading changes')
1743 # push can proceed
1739 # push can proceed
1744 if not util.safehasattr(cg, 'params'):
1740 if not util.safehasattr(cg, 'params'):
1745 # legacy case: bundle1 (changegroup 01)
1741 # legacy case: bundle1 (changegroup 01)
1746 lockandtr[1] = repo.lock()
1742 lockandtr[1] = repo.lock()
1747 r = cg.apply(repo, source, url)
1743 r = cg.apply(repo, source, url)
1748 else:
1744 else:
1749 r = None
1745 r = None
1750 try:
1746 try:
1751 def gettransaction():
1747 def gettransaction():
1752 if not lockandtr[2]:
1748 if not lockandtr[2]:
1753 lockandtr[0] = repo.wlock()
1749 lockandtr[0] = repo.wlock()
1754 lockandtr[1] = repo.lock()
1750 lockandtr[1] = repo.lock()
1755 lockandtr[2] = repo.transaction(source)
1751 lockandtr[2] = repo.transaction(source)
1756 lockandtr[2].hookargs['source'] = source
1752 lockandtr[2].hookargs['source'] = source
1757 lockandtr[2].hookargs['url'] = url
1753 lockandtr[2].hookargs['url'] = url
1758 lockandtr[2].hookargs['bundle2'] = '1'
1754 lockandtr[2].hookargs['bundle2'] = '1'
1759 return lockandtr[2]
1755 return lockandtr[2]
1760
1756
1761 # Do greedy locking by default until we're satisfied with lazy
1757 # Do greedy locking by default until we're satisfied with lazy
1762 # locking.
1758 # locking.
1763 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1759 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1764 gettransaction()
1760 gettransaction()
1765
1761
1766 op = bundle2.bundleoperation(repo, gettransaction,
1762 op = bundle2.bundleoperation(repo, gettransaction,
1767 captureoutput=captureoutput)
1763 captureoutput=captureoutput)
1768 try:
1764 try:
1769 op = bundle2.processbundle(repo, cg, op=op)
1765 op = bundle2.processbundle(repo, cg, op=op)
1770 finally:
1766 finally:
1771 r = op.reply
1767 r = op.reply
1772 if captureoutput and r is not None:
1768 if captureoutput and r is not None:
1773 repo.ui.pushbuffer(error=True, subproc=True)
1769 repo.ui.pushbuffer(error=True, subproc=True)
1774 def recordout(output):
1770 def recordout(output):
1775 r.newpart('output', data=output, mandatory=False)
1771 r.newpart('output', data=output, mandatory=False)
1776 if lockandtr[2] is not None:
1772 if lockandtr[2] is not None:
1777 lockandtr[2].close()
1773 lockandtr[2].close()
1778 except BaseException as exc:
1774 except BaseException as exc:
1779 exc.duringunbundle2 = True
1775 exc.duringunbundle2 = True
1780 if captureoutput and r is not None:
1776 if captureoutput and r is not None:
1781 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1777 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1782 def recordout(output):
1778 def recordout(output):
1783 part = bundle2.bundlepart('output', data=output,
1779 part = bundle2.bundlepart('output', data=output,
1784 mandatory=False)
1780 mandatory=False)
1785 parts.append(part)
1781 parts.append(part)
1786 raise
1782 raise
1787 finally:
1783 finally:
1788 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1784 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1789 if recordout is not None:
1785 if recordout is not None:
1790 recordout(repo.ui.popbuffer())
1786 recordout(repo.ui.popbuffer())
1791 return r
1787 return r
1792
1788
1793 def _maybeapplyclonebundle(pullop):
1789 def _maybeapplyclonebundle(pullop):
1794 """Apply a clone bundle from a remote, if possible."""
1790 """Apply a clone bundle from a remote, if possible."""
1795
1791
1796 repo = pullop.repo
1792 repo = pullop.repo
1797 remote = pullop.remote
1793 remote = pullop.remote
1798
1794
1799 if not repo.ui.configbool('ui', 'clonebundles', True):
1795 if not repo.ui.configbool('ui', 'clonebundles', True):
1800 return
1796 return
1801
1797
1802 # Only run if local repo is empty.
1798 # Only run if local repo is empty.
1803 if len(repo):
1799 if len(repo):
1804 return
1800 return
1805
1801
1806 if pullop.heads:
1802 if pullop.heads:
1807 return
1803 return
1808
1804
1809 if not remote.capable('clonebundles'):
1805 if not remote.capable('clonebundles'):
1810 return
1806 return
1811
1807
1812 res = remote._call('clonebundles')
1808 res = remote._call('clonebundles')
1813
1809
1814 # If we call the wire protocol command, that's good enough to record the
1810 # If we call the wire protocol command, that's good enough to record the
1815 # attempt.
1811 # attempt.
1816 pullop.clonebundleattempted = True
1812 pullop.clonebundleattempted = True
1817
1813
1818 entries = parseclonebundlesmanifest(repo, res)
1814 entries = parseclonebundlesmanifest(repo, res)
1819 if not entries:
1815 if not entries:
1820 repo.ui.note(_('no clone bundles available on remote; '
1816 repo.ui.note(_('no clone bundles available on remote; '
1821 'falling back to regular clone\n'))
1817 'falling back to regular clone\n'))
1822 return
1818 return
1823
1819
1824 entries = filterclonebundleentries(repo, entries)
1820 entries = filterclonebundleentries(repo, entries)
1825 if not entries:
1821 if not entries:
1826 # There is a thundering herd concern here. However, if a server
1822 # There is a thundering herd concern here. However, if a server
1827 # operator doesn't advertise bundles appropriate for its clients,
1823 # operator doesn't advertise bundles appropriate for its clients,
1828 # they deserve what's coming. Furthermore, from a client's
1824 # they deserve what's coming. Furthermore, from a client's
1829 # perspective, no automatic fallback would mean not being able to
1825 # perspective, no automatic fallback would mean not being able to
1830 # clone!
1826 # clone!
1831 repo.ui.warn(_('no compatible clone bundles available on server; '
1827 repo.ui.warn(_('no compatible clone bundles available on server; '
1832 'falling back to regular clone\n'))
1828 'falling back to regular clone\n'))
1833 repo.ui.warn(_('(you may want to report this to the server '
1829 repo.ui.warn(_('(you may want to report this to the server '
1834 'operator)\n'))
1830 'operator)\n'))
1835 return
1831 return
1836
1832
1837 entries = sortclonebundleentries(repo.ui, entries)
1833 entries = sortclonebundleentries(repo.ui, entries)
1838
1834
1839 url = entries[0]['URL']
1835 url = entries[0]['URL']
1840 repo.ui.status(_('applying clone bundle from %s\n') % url)
1836 repo.ui.status(_('applying clone bundle from %s\n') % url)
1841 if trypullbundlefromurl(repo.ui, repo, url):
1837 if trypullbundlefromurl(repo.ui, repo, url):
1842 repo.ui.status(_('finished applying clone bundle\n'))
1838 repo.ui.status(_('finished applying clone bundle\n'))
1843 # Bundle failed.
1839 # Bundle failed.
1844 #
1840 #
1845 # We abort by default to avoid the thundering herd of
1841 # We abort by default to avoid the thundering herd of
1846 # clients flooding a server that was expecting expensive
1842 # clients flooding a server that was expecting expensive
1847 # clone load to be offloaded.
1843 # clone load to be offloaded.
1848 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1844 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1849 repo.ui.warn(_('falling back to normal clone\n'))
1845 repo.ui.warn(_('falling back to normal clone\n'))
1850 else:
1846 else:
1851 raise error.Abort(_('error applying bundle'),
1847 raise error.Abort(_('error applying bundle'),
1852 hint=_('if this error persists, consider contacting '
1848 hint=_('if this error persists, consider contacting '
1853 'the server operator or disable clone '
1849 'the server operator or disable clone '
1854 'bundles via '
1850 'bundles via '
1855 '"--config ui.clonebundles=false"'))
1851 '"--config ui.clonebundles=false"'))
1856
1852
1857 def parseclonebundlesmanifest(repo, s):
1853 def parseclonebundlesmanifest(repo, s):
1858 """Parses the raw text of a clone bundles manifest.
1854 """Parses the raw text of a clone bundles manifest.
1859
1855
1860 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1856 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1861 to the URL and other keys are the attributes for the entry.
1857 to the URL and other keys are the attributes for the entry.
1862 """
1858 """
1863 m = []
1859 m = []
1864 for line in s.splitlines():
1860 for line in s.splitlines():
1865 fields = line.split()
1861 fields = line.split()
1866 if not fields:
1862 if not fields:
1867 continue
1863 continue
1868 attrs = {'URL': fields[0]}
1864 attrs = {'URL': fields[0]}
1869 for rawattr in fields[1:]:
1865 for rawattr in fields[1:]:
1870 key, value = rawattr.split('=', 1)
1866 key, value = rawattr.split('=', 1)
1871 key = urlreq.unquote(key)
1867 key = urlreq.unquote(key)
1872 value = urlreq.unquote(value)
1868 value = urlreq.unquote(value)
1873 attrs[key] = value
1869 attrs[key] = value
1874
1870
1875 # Parse BUNDLESPEC into components. This makes client-side
1871 # Parse BUNDLESPEC into components. This makes client-side
1876 # preferences easier to specify since you can prefer a single
1872 # preferences easier to specify since you can prefer a single
1877 # component of the BUNDLESPEC.
1873 # component of the BUNDLESPEC.
1878 if key == 'BUNDLESPEC':
1874 if key == 'BUNDLESPEC':
1879 try:
1875 try:
1880 comp, version, params = parsebundlespec(repo, value,
1876 comp, version, params = parsebundlespec(repo, value,
1881 externalnames=True)
1877 externalnames=True)
1882 attrs['COMPRESSION'] = comp
1878 attrs['COMPRESSION'] = comp
1883 attrs['VERSION'] = version
1879 attrs['VERSION'] = version
1884 except error.InvalidBundleSpecification:
1880 except error.InvalidBundleSpecification:
1885 pass
1881 pass
1886 except error.UnsupportedBundleSpecification:
1882 except error.UnsupportedBundleSpecification:
1887 pass
1883 pass
1888
1884
1889 m.append(attrs)
1885 m.append(attrs)
1890
1886
1891 return m
1887 return m
1892
1888
1893 def filterclonebundleentries(repo, entries):
1889 def filterclonebundleentries(repo, entries):
1894 """Remove incompatible clone bundle manifest entries.
1890 """Remove incompatible clone bundle manifest entries.
1895
1891
1896 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1892 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1897 and returns a new list consisting of only the entries that this client
1893 and returns a new list consisting of only the entries that this client
1898 should be able to apply.
1894 should be able to apply.
1899
1895
1900 There is no guarantee we'll be able to apply all returned entries because
1896 There is no guarantee we'll be able to apply all returned entries because
1901 the metadata we use to filter on may be missing or wrong.
1897 the metadata we use to filter on may be missing or wrong.
1902 """
1898 """
1903 newentries = []
1899 newentries = []
1904 for entry in entries:
1900 for entry in entries:
1905 spec = entry.get('BUNDLESPEC')
1901 spec = entry.get('BUNDLESPEC')
1906 if spec:
1902 if spec:
1907 try:
1903 try:
1908 parsebundlespec(repo, spec, strict=True)
1904 parsebundlespec(repo, spec, strict=True)
1909 except error.InvalidBundleSpecification as e:
1905 except error.InvalidBundleSpecification as e:
1910 repo.ui.debug(str(e) + '\n')
1906 repo.ui.debug(str(e) + '\n')
1911 continue
1907 continue
1912 except error.UnsupportedBundleSpecification as e:
1908 except error.UnsupportedBundleSpecification as e:
1913 repo.ui.debug('filtering %s because unsupported bundle '
1909 repo.ui.debug('filtering %s because unsupported bundle '
1914 'spec: %s\n' % (entry['URL'], str(e)))
1910 'spec: %s\n' % (entry['URL'], str(e)))
1915 continue
1911 continue
1916
1912
1917 if 'REQUIRESNI' in entry and not sslutil.hassni:
1913 if 'REQUIRESNI' in entry and not sslutil.hassni:
1918 repo.ui.debug('filtering %s because SNI not supported\n' %
1914 repo.ui.debug('filtering %s because SNI not supported\n' %
1919 entry['URL'])
1915 entry['URL'])
1920 continue
1916 continue
1921
1917
1922 newentries.append(entry)
1918 newentries.append(entry)
1923
1919
1924 return newentries
1920 return newentries
1925
1921
1926 class clonebundleentry(object):
1922 class clonebundleentry(object):
1927 """Represents an item in a clone bundles manifest.
1923 """Represents an item in a clone bundles manifest.
1928
1924
1929 This rich class is needed to support sorting since sorted() in Python 3
1925 This rich class is needed to support sorting since sorted() in Python 3
1930 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1926 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1931 won't work.
1927 won't work.
1932 """
1928 """
1933
1929
1934 def __init__(self, value, prefers):
1930 def __init__(self, value, prefers):
1935 self.value = value
1931 self.value = value
1936 self.prefers = prefers
1932 self.prefers = prefers
1937
1933
1938 def _cmp(self, other):
1934 def _cmp(self, other):
1939 for prefkey, prefvalue in self.prefers:
1935 for prefkey, prefvalue in self.prefers:
1940 avalue = self.value.get(prefkey)
1936 avalue = self.value.get(prefkey)
1941 bvalue = other.value.get(prefkey)
1937 bvalue = other.value.get(prefkey)
1942
1938
1943 # Special case for b missing attribute and a matches exactly.
1939 # Special case for b missing attribute and a matches exactly.
1944 if avalue is not None and bvalue is None and avalue == prefvalue:
1940 if avalue is not None and bvalue is None and avalue == prefvalue:
1945 return -1
1941 return -1
1946
1942
1947 # Special case for a missing attribute and b matches exactly.
1943 # Special case for a missing attribute and b matches exactly.
1948 if bvalue is not None and avalue is None and bvalue == prefvalue:
1944 if bvalue is not None and avalue is None and bvalue == prefvalue:
1949 return 1
1945 return 1
1950
1946
1951 # We can't compare unless attribute present on both.
1947 # We can't compare unless attribute present on both.
1952 if avalue is None or bvalue is None:
1948 if avalue is None or bvalue is None:
1953 continue
1949 continue
1954
1950
1955 # Same values should fall back to next attribute.
1951 # Same values should fall back to next attribute.
1956 if avalue == bvalue:
1952 if avalue == bvalue:
1957 continue
1953 continue
1958
1954
1959 # Exact matches come first.
1955 # Exact matches come first.
1960 if avalue == prefvalue:
1956 if avalue == prefvalue:
1961 return -1
1957 return -1
1962 if bvalue == prefvalue:
1958 if bvalue == prefvalue:
1963 return 1
1959 return 1
1964
1960
1965 # Fall back to next attribute.
1961 # Fall back to next attribute.
1966 continue
1962 continue
1967
1963
1968 # If we got here we couldn't sort by attributes and prefers. Fall
1964 # If we got here we couldn't sort by attributes and prefers. Fall
1969 # back to index order.
1965 # back to index order.
1970 return 0
1966 return 0
1971
1967
1972 def __lt__(self, other):
1968 def __lt__(self, other):
1973 return self._cmp(other) < 0
1969 return self._cmp(other) < 0
1974
1970
1975 def __gt__(self, other):
1971 def __gt__(self, other):
1976 return self._cmp(other) > 0
1972 return self._cmp(other) > 0
1977
1973
1978 def __eq__(self, other):
1974 def __eq__(self, other):
1979 return self._cmp(other) == 0
1975 return self._cmp(other) == 0
1980
1976
1981 def __le__(self, other):
1977 def __le__(self, other):
1982 return self._cmp(other) <= 0
1978 return self._cmp(other) <= 0
1983
1979
1984 def __ge__(self, other):
1980 def __ge__(self, other):
1985 return self._cmp(other) >= 0
1981 return self._cmp(other) >= 0
1986
1982
1987 def __ne__(self, other):
1983 def __ne__(self, other):
1988 return self._cmp(other) != 0
1984 return self._cmp(other) != 0
1989
1985
1990 def sortclonebundleentries(ui, entries):
1986 def sortclonebundleentries(ui, entries):
1991 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1987 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1992 if not prefers:
1988 if not prefers:
1993 return list(entries)
1989 return list(entries)
1994
1990
1995 prefers = [p.split('=', 1) for p in prefers]
1991 prefers = [p.split('=', 1) for p in prefers]
1996
1992
1997 items = sorted(clonebundleentry(v, prefers) for v in entries)
1993 items = sorted(clonebundleentry(v, prefers) for v in entries)
1998 return [i.value for i in items]
1994 return [i.value for i in items]
1999
1995
2000 def trypullbundlefromurl(ui, repo, url):
1996 def trypullbundlefromurl(ui, repo, url):
2001 """Attempt to apply a bundle from a URL."""
1997 """Attempt to apply a bundle from a URL."""
2002 lock = repo.lock()
1998 lock = repo.lock()
2003 try:
1999 try:
2004 tr = repo.transaction('bundleurl')
2000 tr = repo.transaction('bundleurl')
2005 try:
2001 try:
2006 try:
2002 try:
2007 fh = urlmod.open(ui, url)
2003 fh = urlmod.open(ui, url)
2008 cg = readbundle(ui, fh, 'stream')
2004 cg = readbundle(ui, fh, 'stream')
2009
2005
2010 if isinstance(cg, bundle2.unbundle20):
2006 if isinstance(cg, bundle2.unbundle20):
2011 bundle2.processbundle(repo, cg, lambda: tr)
2007 bundle2.processbundle(repo, cg, lambda: tr)
2012 elif isinstance(cg, streamclone.streamcloneapplier):
2008 elif isinstance(cg, streamclone.streamcloneapplier):
2013 cg.apply(repo)
2009 cg.apply(repo)
2014 else:
2010 else:
2015 cg.apply(repo, 'clonebundles', url)
2011 cg.apply(repo, 'clonebundles', url)
2016 tr.close()
2012 tr.close()
2017 return True
2013 return True
2018 except urlerr.httperror as e:
2014 except urlerr.httperror as e:
2019 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2015 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2020 except urlerr.urlerror as e:
2016 except urlerr.urlerror as e:
2021 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
2017 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
2022
2018
2023 return False
2019 return False
2024 finally:
2020 finally:
2025 tr.release()
2021 tr.release()
2026 finally:
2022 finally:
2027 lock.release()
2023 lock.release()
@@ -1,1232 +1,1232 b''
1 This test is dedicated to test the bundle2 container format
1 This test is dedicated to test the bundle2 container format
2
2
3 It test multiple existing parts to test different feature of the container. You
3 It test multiple existing parts to test different feature of the container. You
4 probably do not need to touch this test unless you change the binary encoding
4 probably do not need to touch this test unless you change the binary encoding
5 of the bundle2 format itself.
5 of the bundle2 format itself.
6
6
7 Create an extension to test bundle2 API
7 Create an extension to test bundle2 API
8
8
9 $ cat > bundle2.py << EOF
9 $ cat > bundle2.py << EOF
10 > """A small extension to test bundle2 implementation
10 > """A small extension to test bundle2 implementation
11 >
11 >
12 > This extension allows detailed testing of the various bundle2 API and
12 > This extension allows detailed testing of the various bundle2 API and
13 > behaviors.
13 > behaviors.
14 > """
14 > """
15 >
15 >
16 > import sys, os, gc
16 > import sys, os, gc
17 > from mercurial import cmdutil
17 > from mercurial import cmdutil
18 > from mercurial import util
18 > from mercurial import util
19 > from mercurial import bundle2
19 > from mercurial import bundle2
20 > from mercurial import scmutil
20 > from mercurial import scmutil
21 > from mercurial import discovery
21 > from mercurial import discovery
22 > from mercurial import changegroup
22 > from mercurial import changegroup
23 > from mercurial import error
23 > from mercurial import error
24 > from mercurial import obsolete
24 > from mercurial import obsolete
25 >
25 >
26 >
26 >
27 > try:
27 > try:
28 > import msvcrt
28 > import msvcrt
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 > except ImportError:
32 > except ImportError:
33 > pass
33 > pass
34 >
34 >
35 > cmdtable = {}
35 > cmdtable = {}
36 > command = cmdutil.command(cmdtable)
36 > command = cmdutil.command(cmdtable)
37 >
37 >
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 >
42 >
43 > @bundle2.parthandler('test:song')
43 > @bundle2.parthandler('test:song')
44 > def songhandler(op, part):
44 > def songhandler(op, part):
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 > op.ui.write('The choir starts singing:\n')
46 > op.ui.write('The choir starts singing:\n')
47 > verses = 0
47 > verses = 0
48 > for line in part.read().split('\n'):
48 > for line in part.read().split('\n'):
49 > op.ui.write(' %s\n' % line)
49 > op.ui.write(' %s\n' % line)
50 > verses += 1
50 > verses += 1
51 > op.records.add('song', {'verses': verses})
51 > op.records.add('song', {'verses': verses})
52 >
52 >
53 > @bundle2.parthandler('test:ping')
53 > @bundle2.parthandler('test:ping')
54 > def pinghandler(op, part):
54 > def pinghandler(op, part):
55 > op.ui.write('received ping request (id %i)\n' % part.id)
55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
59 > mandatory=False)
59 > mandatory=False)
60 >
60 >
61 > @bundle2.parthandler('test:debugreply')
61 > @bundle2.parthandler('test:debugreply')
62 > def debugreply(op, part):
62 > def debugreply(op, part):
63 > """print data about the capacity of the bundle reply"""
63 > """print data about the capacity of the bundle reply"""
64 > if op.reply is None:
64 > if op.reply is None:
65 > op.ui.write('debugreply: no reply\n')
65 > op.ui.write('debugreply: no reply\n')
66 > else:
66 > else:
67 > op.ui.write('debugreply: capabilities:\n')
67 > op.ui.write('debugreply: capabilities:\n')
68 > for cap in sorted(op.reply.capabilities):
68 > for cap in sorted(op.reply.capabilities):
69 > op.ui.write('debugreply: %r\n' % cap)
69 > op.ui.write('debugreply: %r\n' % cap)
70 > for val in op.reply.capabilities[cap]:
70 > for val in op.reply.capabilities[cap]:
71 > op.ui.write('debugreply: %r\n' % val)
71 > op.ui.write('debugreply: %r\n' % val)
72 >
72 >
73 > @command('bundle2',
73 > @command('bundle2',
74 > [('', 'param', [], 'stream level parameter'),
74 > [('', 'param', [], 'stream level parameter'),
75 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
76 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
77 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
78 > ('', 'reply', False, 'produce a reply bundle'),
78 > ('', 'reply', False, 'produce a reply bundle'),
79 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
80 > ('', 'genraise', False, 'includes a part that raise an exception during generation'),
80 > ('', 'genraise', False, 'includes a part that raise an exception during generation'),
81 > ('', 'timeout', False, 'emulate a timeout during bundle generation'),
81 > ('', 'timeout', False, 'emulate a timeout during bundle generation'),
82 > ('r', 'rev', [], 'includes those changeset in the bundle'),
82 > ('r', 'rev', [], 'includes those changeset in the bundle'),
83 > ('', 'compress', '', 'compress the stream'),],
83 > ('', 'compress', '', 'compress the stream'),],
84 > '[OUTPUTFILE]')
84 > '[OUTPUTFILE]')
85 > def cmdbundle2(ui, repo, path=None, **opts):
85 > def cmdbundle2(ui, repo, path=None, **opts):
86 > """write a bundle2 container on standard output"""
86 > """write a bundle2 container on standard output"""
87 > bundler = bundle2.bundle20(ui)
87 > bundler = bundle2.bundle20(ui)
88 > for p in opts['param']:
88 > for p in opts['param']:
89 > p = p.split('=', 1)
89 > p = p.split('=', 1)
90 > try:
90 > try:
91 > bundler.addparam(*p)
91 > bundler.addparam(*p)
92 > except ValueError, exc:
92 > except ValueError, exc:
93 > raise error.Abort('%s' % exc)
93 > raise error.Abort('%s' % exc)
94 >
94 >
95 > if opts['compress']:
95 > if opts['compress']:
96 > bundler.setcompression(opts['compress'])
96 > bundler.setcompression(opts['compress'])
97 >
97 >
98 > if opts['reply']:
98 > if opts['reply']:
99 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
99 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
100 > bundler.newpart('replycaps', data=capsstring)
100 > bundler.newpart('replycaps', data=capsstring)
101 >
101 >
102 > if opts['pushrace']:
102 > if opts['pushrace']:
103 > # also serve to test the assignement of data outside of init
103 > # also serve to test the assignement of data outside of init
104 > part = bundler.newpart('check:heads')
104 > part = bundler.newpart('check:heads')
105 > part.data = '01234567890123456789'
105 > part.data = '01234567890123456789'
106 >
106 >
107 > revs = opts['rev']
107 > revs = opts['rev']
108 > if 'rev' in opts:
108 > if 'rev' in opts:
109 > revs = scmutil.revrange(repo, opts['rev'])
109 > revs = scmutil.revrange(repo, opts['rev'])
110 > if revs:
110 > if revs:
111 > # very crude version of a changegroup part creation
111 > # very crude version of a changegroup part creation
112 > bundled = repo.revs('%ld::%ld', revs, revs)
112 > bundled = repo.revs('%ld::%ld', revs, revs)
113 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
113 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
114 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
114 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
115 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
115 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
116 > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None)
116 > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing)
117 > bundler.newpart('changegroup', data=cg.getchunks(),
117 > bundler.newpart('changegroup', data=cg.getchunks(),
118 > mandatory=False)
118 > mandatory=False)
119 >
119 >
120 > if opts['parts']:
120 > if opts['parts']:
121 > bundler.newpart('test:empty', mandatory=False)
121 > bundler.newpart('test:empty', mandatory=False)
122 > # add a second one to make sure we handle multiple parts
122 > # add a second one to make sure we handle multiple parts
123 > bundler.newpart('test:empty', mandatory=False)
123 > bundler.newpart('test:empty', mandatory=False)
124 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
124 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
125 > bundler.newpart('test:debugreply', mandatory=False)
125 > bundler.newpart('test:debugreply', mandatory=False)
126 > mathpart = bundler.newpart('test:math')
126 > mathpart = bundler.newpart('test:math')
127 > mathpart.addparam('pi', '3.14')
127 > mathpart.addparam('pi', '3.14')
128 > mathpart.addparam('e', '2.72')
128 > mathpart.addparam('e', '2.72')
129 > mathpart.addparam('cooking', 'raw', mandatory=False)
129 > mathpart.addparam('cooking', 'raw', mandatory=False)
130 > mathpart.data = '42'
130 > mathpart.data = '42'
131 > mathpart.mandatory = False
131 > mathpart.mandatory = False
132 > # advisory known part with unknown mandatory param
132 > # advisory known part with unknown mandatory param
133 > bundler.newpart('test:song', [('randomparam','')], mandatory=False)
133 > bundler.newpart('test:song', [('randomparam','')], mandatory=False)
134 > if opts['unknown']:
134 > if opts['unknown']:
135 > bundler.newpart('test:unknown', data='some random content')
135 > bundler.newpart('test:unknown', data='some random content')
136 > if opts['unknownparams']:
136 > if opts['unknownparams']:
137 > bundler.newpart('test:song', [('randomparams', '')])
137 > bundler.newpart('test:song', [('randomparams', '')])
138 > if opts['parts']:
138 > if opts['parts']:
139 > bundler.newpart('test:ping', mandatory=False)
139 > bundler.newpart('test:ping', mandatory=False)
140 > if opts['genraise']:
140 > if opts['genraise']:
141 > def genraise():
141 > def genraise():
142 > yield 'first line\n'
142 > yield 'first line\n'
143 > raise RuntimeError('Someone set up us the bomb!')
143 > raise RuntimeError('Someone set up us the bomb!')
144 > bundler.newpart('output', data=genraise(), mandatory=False)
144 > bundler.newpart('output', data=genraise(), mandatory=False)
145 >
145 >
146 > if path is None:
146 > if path is None:
147 > file = sys.stdout
147 > file = sys.stdout
148 > else:
148 > else:
149 > file = open(path, 'wb')
149 > file = open(path, 'wb')
150 >
150 >
151 > if opts['timeout']:
151 > if opts['timeout']:
152 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
152 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
153 > for idx, junk in enumerate(bundler.getchunks()):
153 > for idx, junk in enumerate(bundler.getchunks()):
154 > ui.write('%d chunk\n' % idx)
154 > ui.write('%d chunk\n' % idx)
155 > if idx > 4:
155 > if idx > 4:
156 > # This throws a GeneratorExit inside the generator, which
156 > # This throws a GeneratorExit inside the generator, which
157 > # can cause problems if the exception-recovery code is
157 > # can cause problems if the exception-recovery code is
158 > # too zealous. It's important for this test that the break
158 > # too zealous. It's important for this test that the break
159 > # occur while we're in the middle of a part.
159 > # occur while we're in the middle of a part.
160 > break
160 > break
161 > gc.collect()
161 > gc.collect()
162 > ui.write('fake timeout complete.\n')
162 > ui.write('fake timeout complete.\n')
163 > return
163 > return
164 > try:
164 > try:
165 > for chunk in bundler.getchunks():
165 > for chunk in bundler.getchunks():
166 > file.write(chunk)
166 > file.write(chunk)
167 > except RuntimeError, exc:
167 > except RuntimeError, exc:
168 > raise error.Abort(exc)
168 > raise error.Abort(exc)
169 > finally:
169 > finally:
170 > file.flush()
170 > file.flush()
171 >
171 >
172 > @command('unbundle2', [], '')
172 > @command('unbundle2', [], '')
173 > def cmdunbundle2(ui, repo, replypath=None):
173 > def cmdunbundle2(ui, repo, replypath=None):
174 > """process a bundle2 stream from stdin on the current repo"""
174 > """process a bundle2 stream from stdin on the current repo"""
175 > try:
175 > try:
176 > tr = None
176 > tr = None
177 > lock = repo.lock()
177 > lock = repo.lock()
178 > tr = repo.transaction('processbundle')
178 > tr = repo.transaction('processbundle')
179 > try:
179 > try:
180 > unbundler = bundle2.getunbundler(ui, sys.stdin)
180 > unbundler = bundle2.getunbundler(ui, sys.stdin)
181 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
181 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
182 > tr.close()
182 > tr.close()
183 > except error.BundleValueError, exc:
183 > except error.BundleValueError, exc:
184 > raise error.Abort('missing support for %s' % exc)
184 > raise error.Abort('missing support for %s' % exc)
185 > except error.PushRaced, exc:
185 > except error.PushRaced, exc:
186 > raise error.Abort('push race: %s' % exc)
186 > raise error.Abort('push race: %s' % exc)
187 > finally:
187 > finally:
188 > if tr is not None:
188 > if tr is not None:
189 > tr.release()
189 > tr.release()
190 > lock.release()
190 > lock.release()
191 > remains = sys.stdin.read()
191 > remains = sys.stdin.read()
192 > ui.write('%i unread bytes\n' % len(remains))
192 > ui.write('%i unread bytes\n' % len(remains))
193 > if op.records['song']:
193 > if op.records['song']:
194 > totalverses = sum(r['verses'] for r in op.records['song'])
194 > totalverses = sum(r['verses'] for r in op.records['song'])
195 > ui.write('%i total verses sung\n' % totalverses)
195 > ui.write('%i total verses sung\n' % totalverses)
196 > for rec in op.records['changegroup']:
196 > for rec in op.records['changegroup']:
197 > ui.write('addchangegroup return: %i\n' % rec['return'])
197 > ui.write('addchangegroup return: %i\n' % rec['return'])
198 > if op.reply is not None and replypath is not None:
198 > if op.reply is not None and replypath is not None:
199 > with open(replypath, 'wb') as file:
199 > with open(replypath, 'wb') as file:
200 > for chunk in op.reply.getchunks():
200 > for chunk in op.reply.getchunks():
201 > file.write(chunk)
201 > file.write(chunk)
202 >
202 >
203 > @command('statbundle2', [], '')
203 > @command('statbundle2', [], '')
204 > def cmdstatbundle2(ui, repo):
204 > def cmdstatbundle2(ui, repo):
205 > """print statistic on the bundle2 container read from stdin"""
205 > """print statistic on the bundle2 container read from stdin"""
206 > unbundler = bundle2.getunbundler(ui, sys.stdin)
206 > unbundler = bundle2.getunbundler(ui, sys.stdin)
207 > try:
207 > try:
208 > params = unbundler.params
208 > params = unbundler.params
209 > except error.BundleValueError, exc:
209 > except error.BundleValueError, exc:
210 > raise error.Abort('unknown parameters: %s' % exc)
210 > raise error.Abort('unknown parameters: %s' % exc)
211 > ui.write('options count: %i\n' % len(params))
211 > ui.write('options count: %i\n' % len(params))
212 > for key in sorted(params):
212 > for key in sorted(params):
213 > ui.write('- %s\n' % key)
213 > ui.write('- %s\n' % key)
214 > value = params[key]
214 > value = params[key]
215 > if value is not None:
215 > if value is not None:
216 > ui.write(' %s\n' % value)
216 > ui.write(' %s\n' % value)
217 > count = 0
217 > count = 0
218 > for p in unbundler.iterparts():
218 > for p in unbundler.iterparts():
219 > count += 1
219 > count += 1
220 > ui.write(' :%s:\n' % p.type)
220 > ui.write(' :%s:\n' % p.type)
221 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
221 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
222 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
222 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
223 > ui.write(' payload: %i bytes\n' % len(p.read()))
223 > ui.write(' payload: %i bytes\n' % len(p.read()))
224 > ui.write('parts count: %i\n' % count)
224 > ui.write('parts count: %i\n' % count)
225 > EOF
225 > EOF
226 $ cat >> $HGRCPATH << EOF
226 $ cat >> $HGRCPATH << EOF
227 > [extensions]
227 > [extensions]
228 > bundle2=$TESTTMP/bundle2.py
228 > bundle2=$TESTTMP/bundle2.py
229 > [experimental]
229 > [experimental]
230 > evolution=createmarkers
230 > evolution=createmarkers
231 > [ui]
231 > [ui]
232 > ssh=python "$TESTDIR/dummyssh"
232 > ssh=python "$TESTDIR/dummyssh"
233 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
233 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
234 > [web]
234 > [web]
235 > push_ssl = false
235 > push_ssl = false
236 > allow_push = *
236 > allow_push = *
237 > [phases]
237 > [phases]
238 > publish=False
238 > publish=False
239 > EOF
239 > EOF
240
240
241 The extension requires a repo (currently unused)
241 The extension requires a repo (currently unused)
242
242
243 $ hg init main
243 $ hg init main
244 $ cd main
244 $ cd main
245 $ touch a
245 $ touch a
246 $ hg add a
246 $ hg add a
247 $ hg commit -m 'a'
247 $ hg commit -m 'a'
248
248
249
249
250 Empty bundle
250 Empty bundle
251 =================
251 =================
252
252
253 - no option
253 - no option
254 - no parts
254 - no parts
255
255
256 Test bundling
256 Test bundling
257
257
258 $ hg bundle2 | f --hexdump
258 $ hg bundle2 | f --hexdump
259
259
260 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
260 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
261
261
262 Test timeouts during bundling
262 Test timeouts during bundling
263 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
263 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
264 bundle2-output-bundle: "HG20", 1 parts total
264 bundle2-output-bundle: "HG20", 1 parts total
265 bundle2-output: start emission of HG20 stream
265 bundle2-output: start emission of HG20 stream
266 0 chunk
266 0 chunk
267 bundle2-output: bundle parameter:
267 bundle2-output: bundle parameter:
268 1 chunk
268 1 chunk
269 bundle2-output: start of parts
269 bundle2-output: start of parts
270 bundle2-output: bundle part: "test:song"
270 bundle2-output: bundle part: "test:song"
271 bundle2-output-part: "test:song" (advisory) 178 bytes payload
271 bundle2-output-part: "test:song" (advisory) 178 bytes payload
272 bundle2-output: part 0: "test:song"
272 bundle2-output: part 0: "test:song"
273 bundle2-output: header chunk size: 16
273 bundle2-output: header chunk size: 16
274 2 chunk
274 2 chunk
275 3 chunk
275 3 chunk
276 bundle2-output: payload chunk size: 178
276 bundle2-output: payload chunk size: 178
277 4 chunk
277 4 chunk
278 5 chunk
278 5 chunk
279 bundle2-generatorexit
279 bundle2-generatorexit
280 fake timeout complete.
280 fake timeout complete.
281
281
282 Test unbundling
282 Test unbundling
283
283
284 $ hg bundle2 | hg statbundle2
284 $ hg bundle2 | hg statbundle2
285 options count: 0
285 options count: 0
286 parts count: 0
286 parts count: 0
287
287
288 Test old style bundle are detected and refused
288 Test old style bundle are detected and refused
289
289
290 $ hg bundle --all --type v1 ../bundle.hg
290 $ hg bundle --all --type v1 ../bundle.hg
291 1 changesets found
291 1 changesets found
292 $ hg statbundle2 < ../bundle.hg
292 $ hg statbundle2 < ../bundle.hg
293 abort: unknown bundle version 10
293 abort: unknown bundle version 10
294 [255]
294 [255]
295
295
296 Test parameters
296 Test parameters
297 =================
297 =================
298
298
299 - some options
299 - some options
300 - no parts
300 - no parts
301
301
302 advisory parameters, no value
302 advisory parameters, no value
303 -------------------------------
303 -------------------------------
304
304
305 Simplest possible parameters form
305 Simplest possible parameters form
306
306
307 Test generation simple option
307 Test generation simple option
308
308
309 $ hg bundle2 --param 'caution' | f --hexdump
309 $ hg bundle2 --param 'caution' | f --hexdump
310
310
311 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
311 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
312 0010: 00 00 00 |...|
312 0010: 00 00 00 |...|
313
313
314 Test unbundling
314 Test unbundling
315
315
316 $ hg bundle2 --param 'caution' | hg statbundle2
316 $ hg bundle2 --param 'caution' | hg statbundle2
317 options count: 1
317 options count: 1
318 - caution
318 - caution
319 parts count: 0
319 parts count: 0
320
320
321 Test generation multiple option
321 Test generation multiple option
322
322
323 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
323 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
324
324
325 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
325 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
326 0010: 6d 65 61 6c 00 00 00 00 |meal....|
326 0010: 6d 65 61 6c 00 00 00 00 |meal....|
327
327
328 Test unbundling
328 Test unbundling
329
329
330 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
330 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
331 options count: 2
331 options count: 2
332 - caution
332 - caution
333 - meal
333 - meal
334 parts count: 0
334 parts count: 0
335
335
336 advisory parameters, with value
336 advisory parameters, with value
337 -------------------------------
337 -------------------------------
338
338
339 Test generation
339 Test generation
340
340
341 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
341 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
342
342
343 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
343 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
344 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
344 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
345 0020: 61 6e 74 73 00 00 00 00 |ants....|
345 0020: 61 6e 74 73 00 00 00 00 |ants....|
346
346
347 Test unbundling
347 Test unbundling
348
348
349 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
349 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
350 options count: 3
350 options count: 3
351 - caution
351 - caution
352 - elephants
352 - elephants
353 - meal
353 - meal
354 vegan
354 vegan
355 parts count: 0
355 parts count: 0
356
356
357 parameter with special char in value
357 parameter with special char in value
358 ---------------------------------------------------
358 ---------------------------------------------------
359
359
360 Test generation
360 Test generation
361
361
362 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
362 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
363
363
364 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
364 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
365 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
365 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
366 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
366 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
367 0030: 65 00 00 00 00 |e....|
367 0030: 65 00 00 00 00 |e....|
368
368
369 Test unbundling
369 Test unbundling
370
370
371 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
371 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
372 options count: 2
372 options count: 2
373 - e|! 7/
373 - e|! 7/
374 babar%#==tutu
374 babar%#==tutu
375 - simple
375 - simple
376 parts count: 0
376 parts count: 0
377
377
378 Test unknown mandatory option
378 Test unknown mandatory option
379 ---------------------------------------------------
379 ---------------------------------------------------
380
380
381 $ hg bundle2 --param 'Gravity' | hg statbundle2
381 $ hg bundle2 --param 'Gravity' | hg statbundle2
382 abort: unknown parameters: Stream Parameter - Gravity
382 abort: unknown parameters: Stream Parameter - Gravity
383 [255]
383 [255]
384
384
385 Test debug output
385 Test debug output
386 ---------------------------------------------------
386 ---------------------------------------------------
387
387
388 bundling debug
388 bundling debug
389
389
390 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
390 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
391 bundle2-output-bundle: "HG20", (2 params) 0 parts total
391 bundle2-output-bundle: "HG20", (2 params) 0 parts total
392 bundle2-output: start emission of HG20 stream
392 bundle2-output: start emission of HG20 stream
393 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
393 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
394 bundle2-output: start of parts
394 bundle2-output: start of parts
395 bundle2-output: end of bundle
395 bundle2-output: end of bundle
396
396
397 file content is ok
397 file content is ok
398
398
399 $ f --hexdump ../out.hg2
399 $ f --hexdump ../out.hg2
400 ../out.hg2:
400 ../out.hg2:
401 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
401 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
402 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
402 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
403 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
403 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
404 0030: 65 00 00 00 00 |e....|
404 0030: 65 00 00 00 00 |e....|
405
405
406 unbundling debug
406 unbundling debug
407
407
408 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
408 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
409 bundle2-input: start processing of HG20 stream
409 bundle2-input: start processing of HG20 stream
410 bundle2-input: reading bundle2 stream parameters
410 bundle2-input: reading bundle2 stream parameters
411 bundle2-input: ignoring unknown parameter 'e|! 7/'
411 bundle2-input: ignoring unknown parameter 'e|! 7/'
412 bundle2-input: ignoring unknown parameter 'simple'
412 bundle2-input: ignoring unknown parameter 'simple'
413 options count: 2
413 options count: 2
414 - e|! 7/
414 - e|! 7/
415 babar%#==tutu
415 babar%#==tutu
416 - simple
416 - simple
417 bundle2-input: start extraction of bundle2 parts
417 bundle2-input: start extraction of bundle2 parts
418 bundle2-input: part header size: 0
418 bundle2-input: part header size: 0
419 bundle2-input: end of bundle2 stream
419 bundle2-input: end of bundle2 stream
420 parts count: 0
420 parts count: 0
421
421
422
422
423 Test buggy input
423 Test buggy input
424 ---------------------------------------------------
424 ---------------------------------------------------
425
425
426 empty parameter name
426 empty parameter name
427
427
428 $ hg bundle2 --param '' --quiet
428 $ hg bundle2 --param '' --quiet
429 abort: empty parameter name
429 abort: empty parameter name
430 [255]
430 [255]
431
431
432 bad parameter name
432 bad parameter name
433
433
434 $ hg bundle2 --param 42babar
434 $ hg bundle2 --param 42babar
435 abort: non letter first character: '42babar'
435 abort: non letter first character: '42babar'
436 [255]
436 [255]
437
437
438
438
439 Test part
439 Test part
440 =================
440 =================
441
441
442 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
442 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
443 bundle2-output-bundle: "HG20", 7 parts total
443 bundle2-output-bundle: "HG20", 7 parts total
444 bundle2-output: start emission of HG20 stream
444 bundle2-output: start emission of HG20 stream
445 bundle2-output: bundle parameter:
445 bundle2-output: bundle parameter:
446 bundle2-output: start of parts
446 bundle2-output: start of parts
447 bundle2-output: bundle part: "test:empty"
447 bundle2-output: bundle part: "test:empty"
448 bundle2-output-part: "test:empty" (advisory) empty payload
448 bundle2-output-part: "test:empty" (advisory) empty payload
449 bundle2-output: part 0: "test:empty"
449 bundle2-output: part 0: "test:empty"
450 bundle2-output: header chunk size: 17
450 bundle2-output: header chunk size: 17
451 bundle2-output: closing payload chunk
451 bundle2-output: closing payload chunk
452 bundle2-output: bundle part: "test:empty"
452 bundle2-output: bundle part: "test:empty"
453 bundle2-output-part: "test:empty" (advisory) empty payload
453 bundle2-output-part: "test:empty" (advisory) empty payload
454 bundle2-output: part 1: "test:empty"
454 bundle2-output: part 1: "test:empty"
455 bundle2-output: header chunk size: 17
455 bundle2-output: header chunk size: 17
456 bundle2-output: closing payload chunk
456 bundle2-output: closing payload chunk
457 bundle2-output: bundle part: "test:song"
457 bundle2-output: bundle part: "test:song"
458 bundle2-output-part: "test:song" (advisory) 178 bytes payload
458 bundle2-output-part: "test:song" (advisory) 178 bytes payload
459 bundle2-output: part 2: "test:song"
459 bundle2-output: part 2: "test:song"
460 bundle2-output: header chunk size: 16
460 bundle2-output: header chunk size: 16
461 bundle2-output: payload chunk size: 178
461 bundle2-output: payload chunk size: 178
462 bundle2-output: closing payload chunk
462 bundle2-output: closing payload chunk
463 bundle2-output: bundle part: "test:debugreply"
463 bundle2-output: bundle part: "test:debugreply"
464 bundle2-output-part: "test:debugreply" (advisory) empty payload
464 bundle2-output-part: "test:debugreply" (advisory) empty payload
465 bundle2-output: part 3: "test:debugreply"
465 bundle2-output: part 3: "test:debugreply"
466 bundle2-output: header chunk size: 22
466 bundle2-output: header chunk size: 22
467 bundle2-output: closing payload chunk
467 bundle2-output: closing payload chunk
468 bundle2-output: bundle part: "test:math"
468 bundle2-output: bundle part: "test:math"
469 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
469 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
470 bundle2-output: part 4: "test:math"
470 bundle2-output: part 4: "test:math"
471 bundle2-output: header chunk size: 43
471 bundle2-output: header chunk size: 43
472 bundle2-output: payload chunk size: 2
472 bundle2-output: payload chunk size: 2
473 bundle2-output: closing payload chunk
473 bundle2-output: closing payload chunk
474 bundle2-output: bundle part: "test:song"
474 bundle2-output: bundle part: "test:song"
475 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
475 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
476 bundle2-output: part 5: "test:song"
476 bundle2-output: part 5: "test:song"
477 bundle2-output: header chunk size: 29
477 bundle2-output: header chunk size: 29
478 bundle2-output: closing payload chunk
478 bundle2-output: closing payload chunk
479 bundle2-output: bundle part: "test:ping"
479 bundle2-output: bundle part: "test:ping"
480 bundle2-output-part: "test:ping" (advisory) empty payload
480 bundle2-output-part: "test:ping" (advisory) empty payload
481 bundle2-output: part 6: "test:ping"
481 bundle2-output: part 6: "test:ping"
482 bundle2-output: header chunk size: 16
482 bundle2-output: header chunk size: 16
483 bundle2-output: closing payload chunk
483 bundle2-output: closing payload chunk
484 bundle2-output: end of bundle
484 bundle2-output: end of bundle
485
485
486 $ f --hexdump ../parts.hg2
486 $ f --hexdump ../parts.hg2
487 ../parts.hg2:
487 ../parts.hg2:
488 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
488 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
489 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
489 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
490 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
490 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
491 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
491 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
492 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
492 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
493 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
493 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
494 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
494 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
495 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
495 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
496 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
496 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
497 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
497 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
498 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
498 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
499 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
499 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
500 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
500 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
501 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
501 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
502 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
502 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
503 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
503 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
504 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
504 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
505 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
505 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
506 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
506 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
507 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
507 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
508 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
508 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
509 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
509 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
510 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
510 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
511 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
511 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
512 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
512 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
513 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
513 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
514
514
515
515
516 $ hg statbundle2 < ../parts.hg2
516 $ hg statbundle2 < ../parts.hg2
517 options count: 0
517 options count: 0
518 :test:empty:
518 :test:empty:
519 mandatory: 0
519 mandatory: 0
520 advisory: 0
520 advisory: 0
521 payload: 0 bytes
521 payload: 0 bytes
522 :test:empty:
522 :test:empty:
523 mandatory: 0
523 mandatory: 0
524 advisory: 0
524 advisory: 0
525 payload: 0 bytes
525 payload: 0 bytes
526 :test:song:
526 :test:song:
527 mandatory: 0
527 mandatory: 0
528 advisory: 0
528 advisory: 0
529 payload: 178 bytes
529 payload: 178 bytes
530 :test:debugreply:
530 :test:debugreply:
531 mandatory: 0
531 mandatory: 0
532 advisory: 0
532 advisory: 0
533 payload: 0 bytes
533 payload: 0 bytes
534 :test:math:
534 :test:math:
535 mandatory: 2
535 mandatory: 2
536 advisory: 1
536 advisory: 1
537 payload: 2 bytes
537 payload: 2 bytes
538 :test:song:
538 :test:song:
539 mandatory: 1
539 mandatory: 1
540 advisory: 0
540 advisory: 0
541 payload: 0 bytes
541 payload: 0 bytes
542 :test:ping:
542 :test:ping:
543 mandatory: 0
543 mandatory: 0
544 advisory: 0
544 advisory: 0
545 payload: 0 bytes
545 payload: 0 bytes
546 parts count: 7
546 parts count: 7
547
547
548 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
548 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
549 bundle2-input: start processing of HG20 stream
549 bundle2-input: start processing of HG20 stream
550 bundle2-input: reading bundle2 stream parameters
550 bundle2-input: reading bundle2 stream parameters
551 options count: 0
551 options count: 0
552 bundle2-input: start extraction of bundle2 parts
552 bundle2-input: start extraction of bundle2 parts
553 bundle2-input: part header size: 17
553 bundle2-input: part header size: 17
554 bundle2-input: part type: "test:empty"
554 bundle2-input: part type: "test:empty"
555 bundle2-input: part id: "0"
555 bundle2-input: part id: "0"
556 bundle2-input: part parameters: 0
556 bundle2-input: part parameters: 0
557 :test:empty:
557 :test:empty:
558 mandatory: 0
558 mandatory: 0
559 advisory: 0
559 advisory: 0
560 bundle2-input: payload chunk size: 0
560 bundle2-input: payload chunk size: 0
561 payload: 0 bytes
561 payload: 0 bytes
562 bundle2-input: part header size: 17
562 bundle2-input: part header size: 17
563 bundle2-input: part type: "test:empty"
563 bundle2-input: part type: "test:empty"
564 bundle2-input: part id: "1"
564 bundle2-input: part id: "1"
565 bundle2-input: part parameters: 0
565 bundle2-input: part parameters: 0
566 :test:empty:
566 :test:empty:
567 mandatory: 0
567 mandatory: 0
568 advisory: 0
568 advisory: 0
569 bundle2-input: payload chunk size: 0
569 bundle2-input: payload chunk size: 0
570 payload: 0 bytes
570 payload: 0 bytes
571 bundle2-input: part header size: 16
571 bundle2-input: part header size: 16
572 bundle2-input: part type: "test:song"
572 bundle2-input: part type: "test:song"
573 bundle2-input: part id: "2"
573 bundle2-input: part id: "2"
574 bundle2-input: part parameters: 0
574 bundle2-input: part parameters: 0
575 :test:song:
575 :test:song:
576 mandatory: 0
576 mandatory: 0
577 advisory: 0
577 advisory: 0
578 bundle2-input: payload chunk size: 178
578 bundle2-input: payload chunk size: 178
579 bundle2-input: payload chunk size: 0
579 bundle2-input: payload chunk size: 0
580 bundle2-input-part: total payload size 178
580 bundle2-input-part: total payload size 178
581 payload: 178 bytes
581 payload: 178 bytes
582 bundle2-input: part header size: 22
582 bundle2-input: part header size: 22
583 bundle2-input: part type: "test:debugreply"
583 bundle2-input: part type: "test:debugreply"
584 bundle2-input: part id: "3"
584 bundle2-input: part id: "3"
585 bundle2-input: part parameters: 0
585 bundle2-input: part parameters: 0
586 :test:debugreply:
586 :test:debugreply:
587 mandatory: 0
587 mandatory: 0
588 advisory: 0
588 advisory: 0
589 bundle2-input: payload chunk size: 0
589 bundle2-input: payload chunk size: 0
590 payload: 0 bytes
590 payload: 0 bytes
591 bundle2-input: part header size: 43
591 bundle2-input: part header size: 43
592 bundle2-input: part type: "test:math"
592 bundle2-input: part type: "test:math"
593 bundle2-input: part id: "4"
593 bundle2-input: part id: "4"
594 bundle2-input: part parameters: 3
594 bundle2-input: part parameters: 3
595 :test:math:
595 :test:math:
596 mandatory: 2
596 mandatory: 2
597 advisory: 1
597 advisory: 1
598 bundle2-input: payload chunk size: 2
598 bundle2-input: payload chunk size: 2
599 bundle2-input: payload chunk size: 0
599 bundle2-input: payload chunk size: 0
600 bundle2-input-part: total payload size 2
600 bundle2-input-part: total payload size 2
601 payload: 2 bytes
601 payload: 2 bytes
602 bundle2-input: part header size: 29
602 bundle2-input: part header size: 29
603 bundle2-input: part type: "test:song"
603 bundle2-input: part type: "test:song"
604 bundle2-input: part id: "5"
604 bundle2-input: part id: "5"
605 bundle2-input: part parameters: 1
605 bundle2-input: part parameters: 1
606 :test:song:
606 :test:song:
607 mandatory: 1
607 mandatory: 1
608 advisory: 0
608 advisory: 0
609 bundle2-input: payload chunk size: 0
609 bundle2-input: payload chunk size: 0
610 payload: 0 bytes
610 payload: 0 bytes
611 bundle2-input: part header size: 16
611 bundle2-input: part header size: 16
612 bundle2-input: part type: "test:ping"
612 bundle2-input: part type: "test:ping"
613 bundle2-input: part id: "6"
613 bundle2-input: part id: "6"
614 bundle2-input: part parameters: 0
614 bundle2-input: part parameters: 0
615 :test:ping:
615 :test:ping:
616 mandatory: 0
616 mandatory: 0
617 advisory: 0
617 advisory: 0
618 bundle2-input: payload chunk size: 0
618 bundle2-input: payload chunk size: 0
619 payload: 0 bytes
619 payload: 0 bytes
620 bundle2-input: part header size: 0
620 bundle2-input: part header size: 0
621 bundle2-input: end of bundle2 stream
621 bundle2-input: end of bundle2 stream
622 parts count: 7
622 parts count: 7
623
623
624 Test actual unbundling of test part
624 Test actual unbundling of test part
625 =======================================
625 =======================================
626
626
627 Process the bundle
627 Process the bundle
628
628
629 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
629 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
630 bundle2-input: start processing of HG20 stream
630 bundle2-input: start processing of HG20 stream
631 bundle2-input: reading bundle2 stream parameters
631 bundle2-input: reading bundle2 stream parameters
632 bundle2-input-bundle: with-transaction
632 bundle2-input-bundle: with-transaction
633 bundle2-input: start extraction of bundle2 parts
633 bundle2-input: start extraction of bundle2 parts
634 bundle2-input: part header size: 17
634 bundle2-input: part header size: 17
635 bundle2-input: part type: "test:empty"
635 bundle2-input: part type: "test:empty"
636 bundle2-input: part id: "0"
636 bundle2-input: part id: "0"
637 bundle2-input: part parameters: 0
637 bundle2-input: part parameters: 0
638 bundle2-input: ignoring unsupported advisory part test:empty
638 bundle2-input: ignoring unsupported advisory part test:empty
639 bundle2-input-part: "test:empty" (advisory) unsupported-type
639 bundle2-input-part: "test:empty" (advisory) unsupported-type
640 bundle2-input: payload chunk size: 0
640 bundle2-input: payload chunk size: 0
641 bundle2-input: part header size: 17
641 bundle2-input: part header size: 17
642 bundle2-input: part type: "test:empty"
642 bundle2-input: part type: "test:empty"
643 bundle2-input: part id: "1"
643 bundle2-input: part id: "1"
644 bundle2-input: part parameters: 0
644 bundle2-input: part parameters: 0
645 bundle2-input: ignoring unsupported advisory part test:empty
645 bundle2-input: ignoring unsupported advisory part test:empty
646 bundle2-input-part: "test:empty" (advisory) unsupported-type
646 bundle2-input-part: "test:empty" (advisory) unsupported-type
647 bundle2-input: payload chunk size: 0
647 bundle2-input: payload chunk size: 0
648 bundle2-input: part header size: 16
648 bundle2-input: part header size: 16
649 bundle2-input: part type: "test:song"
649 bundle2-input: part type: "test:song"
650 bundle2-input: part id: "2"
650 bundle2-input: part id: "2"
651 bundle2-input: part parameters: 0
651 bundle2-input: part parameters: 0
652 bundle2-input: found a handler for part 'test:song'
652 bundle2-input: found a handler for part 'test:song'
653 bundle2-input-part: "test:song" (advisory) supported
653 bundle2-input-part: "test:song" (advisory) supported
654 The choir starts singing:
654 The choir starts singing:
655 bundle2-input: payload chunk size: 178
655 bundle2-input: payload chunk size: 178
656 bundle2-input: payload chunk size: 0
656 bundle2-input: payload chunk size: 0
657 bundle2-input-part: total payload size 178
657 bundle2-input-part: total payload size 178
658 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
658 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
659 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
659 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
660 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
660 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
661 bundle2-input: part header size: 22
661 bundle2-input: part header size: 22
662 bundle2-input: part type: "test:debugreply"
662 bundle2-input: part type: "test:debugreply"
663 bundle2-input: part id: "3"
663 bundle2-input: part id: "3"
664 bundle2-input: part parameters: 0
664 bundle2-input: part parameters: 0
665 bundle2-input: found a handler for part 'test:debugreply'
665 bundle2-input: found a handler for part 'test:debugreply'
666 bundle2-input-part: "test:debugreply" (advisory) supported
666 bundle2-input-part: "test:debugreply" (advisory) supported
667 debugreply: no reply
667 debugreply: no reply
668 bundle2-input: payload chunk size: 0
668 bundle2-input: payload chunk size: 0
669 bundle2-input: part header size: 43
669 bundle2-input: part header size: 43
670 bundle2-input: part type: "test:math"
670 bundle2-input: part type: "test:math"
671 bundle2-input: part id: "4"
671 bundle2-input: part id: "4"
672 bundle2-input: part parameters: 3
672 bundle2-input: part parameters: 3
673 bundle2-input: ignoring unsupported advisory part test:math
673 bundle2-input: ignoring unsupported advisory part test:math
674 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
674 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
675 bundle2-input: payload chunk size: 2
675 bundle2-input: payload chunk size: 2
676 bundle2-input: payload chunk size: 0
676 bundle2-input: payload chunk size: 0
677 bundle2-input-part: total payload size 2
677 bundle2-input-part: total payload size 2
678 bundle2-input: part header size: 29
678 bundle2-input: part header size: 29
679 bundle2-input: part type: "test:song"
679 bundle2-input: part type: "test:song"
680 bundle2-input: part id: "5"
680 bundle2-input: part id: "5"
681 bundle2-input: part parameters: 1
681 bundle2-input: part parameters: 1
682 bundle2-input: found a handler for part 'test:song'
682 bundle2-input: found a handler for part 'test:song'
683 bundle2-input: ignoring unsupported advisory part test:song - randomparam
683 bundle2-input: ignoring unsupported advisory part test:song - randomparam
684 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam'])
684 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam'])
685 bundle2-input: payload chunk size: 0
685 bundle2-input: payload chunk size: 0
686 bundle2-input: part header size: 16
686 bundle2-input: part header size: 16
687 bundle2-input: part type: "test:ping"
687 bundle2-input: part type: "test:ping"
688 bundle2-input: part id: "6"
688 bundle2-input: part id: "6"
689 bundle2-input: part parameters: 0
689 bundle2-input: part parameters: 0
690 bundle2-input: found a handler for part 'test:ping'
690 bundle2-input: found a handler for part 'test:ping'
691 bundle2-input-part: "test:ping" (advisory) supported
691 bundle2-input-part: "test:ping" (advisory) supported
692 received ping request (id 6)
692 received ping request (id 6)
693 bundle2-input: payload chunk size: 0
693 bundle2-input: payload chunk size: 0
694 bundle2-input: part header size: 0
694 bundle2-input: part header size: 0
695 bundle2-input: end of bundle2 stream
695 bundle2-input: end of bundle2 stream
696 bundle2-input-bundle: 6 parts total
696 bundle2-input-bundle: 6 parts total
697 0 unread bytes
697 0 unread bytes
698 3 total verses sung
698 3 total verses sung
699
699
700 Unbundle with an unknown mandatory part
700 Unbundle with an unknown mandatory part
701 (should abort)
701 (should abort)
702
702
703 $ hg bundle2 --parts --unknown ../unknown.hg2
703 $ hg bundle2 --parts --unknown ../unknown.hg2
704
704
705 $ hg unbundle2 < ../unknown.hg2
705 $ hg unbundle2 < ../unknown.hg2
706 The choir starts singing:
706 The choir starts singing:
707 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
707 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
708 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
708 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
709 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
709 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
710 debugreply: no reply
710 debugreply: no reply
711 0 unread bytes
711 0 unread bytes
712 abort: missing support for test:unknown
712 abort: missing support for test:unknown
713 [255]
713 [255]
714
714
715 Unbundle with an unknown mandatory part parameters
715 Unbundle with an unknown mandatory part parameters
716 (should abort)
716 (should abort)
717
717
718 $ hg bundle2 --unknownparams ../unknown.hg2
718 $ hg bundle2 --unknownparams ../unknown.hg2
719
719
720 $ hg unbundle2 < ../unknown.hg2
720 $ hg unbundle2 < ../unknown.hg2
721 0 unread bytes
721 0 unread bytes
722 abort: missing support for test:song - randomparams
722 abort: missing support for test:song - randomparams
723 [255]
723 [255]
724
724
725 unbundle with a reply
725 unbundle with a reply
726
726
727 $ hg bundle2 --parts --reply ../parts-reply.hg2
727 $ hg bundle2 --parts --reply ../parts-reply.hg2
728 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
728 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
729 0 unread bytes
729 0 unread bytes
730 3 total verses sung
730 3 total verses sung
731
731
732 The reply is a bundle
732 The reply is a bundle
733
733
734 $ f --hexdump ../reply.hg2
734 $ f --hexdump ../reply.hg2
735 ../reply.hg2:
735 ../reply.hg2:
736 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
736 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
737 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
737 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
738 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
738 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
739 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
739 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
740 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
740 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
741 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
741 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
742 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
742 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
743 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
743 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
744 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
744 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
745 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
745 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
746 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
746 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
747 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
747 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
748 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
748 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
749 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
749 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
750 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
750 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
751 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
751 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
752 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
752 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
753 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
753 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
754 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
754 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
755 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
755 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
756 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
756 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
757 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
757 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
758 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
758 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
759 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
759 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
760 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
760 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
761 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
761 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
762 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
762 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
763 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
763 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
764 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
764 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
765 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
765 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
766 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
766 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
767 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
767 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
768 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
768 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
769 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
769 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
770 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
770 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
771 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
771 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
772 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
772 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
773 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
773 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
774 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
774 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
775 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
775 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
776 0280: 00 00 00 00 00 00 |......|
776 0280: 00 00 00 00 00 00 |......|
777
777
778 The reply is valid
778 The reply is valid
779
779
780 $ hg statbundle2 < ../reply.hg2
780 $ hg statbundle2 < ../reply.hg2
781 options count: 0
781 options count: 0
782 :output:
782 :output:
783 mandatory: 0
783 mandatory: 0
784 advisory: 1
784 advisory: 1
785 payload: 217 bytes
785 payload: 217 bytes
786 :output:
786 :output:
787 mandatory: 0
787 mandatory: 0
788 advisory: 1
788 advisory: 1
789 payload: 201 bytes
789 payload: 201 bytes
790 :test:pong:
790 :test:pong:
791 mandatory: 1
791 mandatory: 1
792 advisory: 0
792 advisory: 0
793 payload: 0 bytes
793 payload: 0 bytes
794 :output:
794 :output:
795 mandatory: 0
795 mandatory: 0
796 advisory: 1
796 advisory: 1
797 payload: 61 bytes
797 payload: 61 bytes
798 parts count: 4
798 parts count: 4
799
799
800 Unbundle the reply to get the output:
800 Unbundle the reply to get the output:
801
801
802 $ hg unbundle2 < ../reply.hg2
802 $ hg unbundle2 < ../reply.hg2
803 remote: The choir starts singing:
803 remote: The choir starts singing:
804 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
804 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
805 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
805 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
806 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
806 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
807 remote: debugreply: capabilities:
807 remote: debugreply: capabilities:
808 remote: debugreply: 'city=!'
808 remote: debugreply: 'city=!'
809 remote: debugreply: 'celeste,ville'
809 remote: debugreply: 'celeste,ville'
810 remote: debugreply: 'elephants'
810 remote: debugreply: 'elephants'
811 remote: debugreply: 'babar'
811 remote: debugreply: 'babar'
812 remote: debugreply: 'celeste'
812 remote: debugreply: 'celeste'
813 remote: debugreply: 'ping-pong'
813 remote: debugreply: 'ping-pong'
814 remote: received ping request (id 7)
814 remote: received ping request (id 7)
815 remote: replying to ping request (id 7)
815 remote: replying to ping request (id 7)
816 0 unread bytes
816 0 unread bytes
817
817
818 Test push race detection
818 Test push race detection
819
819
820 $ hg bundle2 --pushrace ../part-race.hg2
820 $ hg bundle2 --pushrace ../part-race.hg2
821
821
822 $ hg unbundle2 < ../part-race.hg2
822 $ hg unbundle2 < ../part-race.hg2
823 0 unread bytes
823 0 unread bytes
824 abort: push race: repository changed while pushing - please try again
824 abort: push race: repository changed while pushing - please try again
825 [255]
825 [255]
826
826
827 Support for changegroup
827 Support for changegroup
828 ===================================
828 ===================================
829
829
830 $ hg unbundle $TESTDIR/bundles/rebase.hg
830 $ hg unbundle $TESTDIR/bundles/rebase.hg
831 adding changesets
831 adding changesets
832 adding manifests
832 adding manifests
833 adding file changes
833 adding file changes
834 added 8 changesets with 7 changes to 7 files (+3 heads)
834 added 8 changesets with 7 changes to 7 files (+3 heads)
835 (run 'hg heads' to see heads, 'hg merge' to merge)
835 (run 'hg heads' to see heads, 'hg merge' to merge)
836
836
837 $ hg log -G
837 $ hg log -G
838 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
838 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
839 |
839 |
840 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
840 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
841 |/|
841 |/|
842 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
842 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
843 | |
843 | |
844 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
844 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
845 |/
845 |/
846 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
846 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
847 | |
847 | |
848 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
848 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
849 | |
849 | |
850 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
850 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
851 |/
851 |/
852 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
852 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
853
853
854 @ 0:3903775176ed draft test a
854 @ 0:3903775176ed draft test a
855
855
856
856
857 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
857 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
858 4 changesets found
858 4 changesets found
859 list of changesets:
859 list of changesets:
860 32af7686d403cf45b5d95f2d70cebea587ac806a
860 32af7686d403cf45b5d95f2d70cebea587ac806a
861 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
861 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
862 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
862 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
863 02de42196ebee42ef284b6780a87cdc96e8eaab6
863 02de42196ebee42ef284b6780a87cdc96e8eaab6
864 bundle2-output-bundle: "HG20", 1 parts total
864 bundle2-output-bundle: "HG20", 1 parts total
865 bundle2-output: start emission of HG20 stream
865 bundle2-output: start emission of HG20 stream
866 bundle2-output: bundle parameter:
866 bundle2-output: bundle parameter:
867 bundle2-output: start of parts
867 bundle2-output: start of parts
868 bundle2-output: bundle part: "changegroup"
868 bundle2-output: bundle part: "changegroup"
869 bundle2-output-part: "changegroup" (advisory) streamed payload
869 bundle2-output-part: "changegroup" (advisory) streamed payload
870 bundle2-output: part 0: "changegroup"
870 bundle2-output: part 0: "changegroup"
871 bundle2-output: header chunk size: 18
871 bundle2-output: header chunk size: 18
872 bundling: 1/4 changesets (25.00%)
872 bundling: 1/4 changesets (25.00%)
873 bundling: 2/4 changesets (50.00%)
873 bundling: 2/4 changesets (50.00%)
874 bundling: 3/4 changesets (75.00%)
874 bundling: 3/4 changesets (75.00%)
875 bundling: 4/4 changesets (100.00%)
875 bundling: 4/4 changesets (100.00%)
876 bundling: 1/4 manifests (25.00%)
876 bundling: 1/4 manifests (25.00%)
877 bundling: 2/4 manifests (50.00%)
877 bundling: 2/4 manifests (50.00%)
878 bundling: 3/4 manifests (75.00%)
878 bundling: 3/4 manifests (75.00%)
879 bundling: 4/4 manifests (100.00%)
879 bundling: 4/4 manifests (100.00%)
880 bundling: D 1/3 files (33.33%)
880 bundling: D 1/3 files (33.33%)
881 bundling: E 2/3 files (66.67%)
881 bundling: E 2/3 files (66.67%)
882 bundling: H 3/3 files (100.00%)
882 bundling: H 3/3 files (100.00%)
883 bundle2-output: payload chunk size: 1555
883 bundle2-output: payload chunk size: 1555
884 bundle2-output: closing payload chunk
884 bundle2-output: closing payload chunk
885 bundle2-output: end of bundle
885 bundle2-output: end of bundle
886
886
887 $ f --hexdump ../rev.hg2
887 $ f --hexdump ../rev.hg2
888 ../rev.hg2:
888 ../rev.hg2:
889 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
889 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
890 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
890 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
891 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
891 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
892 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
892 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
893 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
893 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
894 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
894 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
895 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
895 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
896 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
896 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
897 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
897 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
898 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
898 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
899 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
899 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
900 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
900 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
901 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
901 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
902 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
902 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
903 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
903 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
904 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
904 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
905 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
905 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
906 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
906 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
907 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
907 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
908 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
908 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
909 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
909 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
910 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
910 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
911 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
911 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
912 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
912 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
913 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
913 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
914 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
914 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
915 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
915 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
916 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
916 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
917 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
917 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
918 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
918 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
919 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
919 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
920 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
920 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
921 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
921 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
922 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
922 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
923 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
923 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
924 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
924 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
925 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
925 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
926 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
926 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
927 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
927 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
928 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
928 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
929 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
929 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
930 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
930 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
931 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
931 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
932 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
932 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
933 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
933 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
934 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
934 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
935 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
935 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
936 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
936 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
937 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
937 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
938 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
938 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
939 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
939 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
940 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
940 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
941 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
941 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
942 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
942 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
943 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
943 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
944 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
944 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
945 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
945 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
946 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
946 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
947 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
947 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
948 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
948 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
949 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
949 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
950 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
950 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
951 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
951 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
952 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
952 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
953 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
953 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
954 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
954 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
955 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
955 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
956 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
956 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
957 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
957 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
958 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
958 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
959 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
959 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
960 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
960 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
961 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
961 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
962 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
962 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
963 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
963 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
964 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
964 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
965 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
965 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
966 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
966 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
967 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
967 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
968 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
968 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
969 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
969 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
970 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
970 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
971 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
971 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
972 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
972 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
973 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
973 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
974 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
974 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
975 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
975 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
976 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
976 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
977 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
977 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
978 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
978 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
979 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
979 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
980 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
980 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
981 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
981 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
982 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
982 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
983 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
983 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
985 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
985 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
986 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
986 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
987 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
987 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
988 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
988 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
989
989
990 $ hg debugbundle ../rev.hg2
990 $ hg debugbundle ../rev.hg2
991 Stream params: {}
991 Stream params: {}
992 changegroup -- 'sortdict()'
992 changegroup -- 'sortdict()'
993 32af7686d403cf45b5d95f2d70cebea587ac806a
993 32af7686d403cf45b5d95f2d70cebea587ac806a
994 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
994 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
995 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
995 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
996 02de42196ebee42ef284b6780a87cdc96e8eaab6
996 02de42196ebee42ef284b6780a87cdc96e8eaab6
997 $ hg unbundle ../rev.hg2
997 $ hg unbundle ../rev.hg2
998 adding changesets
998 adding changesets
999 adding manifests
999 adding manifests
1000 adding file changes
1000 adding file changes
1001 added 0 changesets with 0 changes to 3 files
1001 added 0 changesets with 0 changes to 3 files
1002 (run 'hg update' to get a working copy)
1002 (run 'hg update' to get a working copy)
1003
1003
1004 with reply
1004 with reply
1005
1005
1006 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1006 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1007 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1007 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1008 0 unread bytes
1008 0 unread bytes
1009 addchangegroup return: 1
1009 addchangegroup return: 1
1010
1010
1011 $ f --hexdump ../rev-reply.hg2
1011 $ f --hexdump ../rev-reply.hg2
1012 ../rev-reply.hg2:
1012 ../rev-reply.hg2:
1013 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1013 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1014 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1014 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1015 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1015 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1016 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1016 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1017 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1017 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1018 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1018 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1019 0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes|
1019 0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes|
1020 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1020 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1021 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1021 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1022 0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0|
1022 0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0|
1023 00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with|
1023 00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with|
1024 00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 |
1024 00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 |
1025 00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00 |files.........|
1025 00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00 |files.........|
1026
1026
1027 Check handling of exception during generation.
1027 Check handling of exception during generation.
1028 ----------------------------------------------
1028 ----------------------------------------------
1029
1029
1030 $ hg bundle2 --genraise > ../genfailed.hg2
1030 $ hg bundle2 --genraise > ../genfailed.hg2
1031 abort: Someone set up us the bomb!
1031 abort: Someone set up us the bomb!
1032 [255]
1032 [255]
1033
1033
1034 Should still be a valid bundle
1034 Should still be a valid bundle
1035
1035
1036 $ f --hexdump ../genfailed.hg2
1036 $ f --hexdump ../genfailed.hg2
1037 ../genfailed.hg2:
1037 ../genfailed.hg2:
1038 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1038 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1039 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1039 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1040 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1040 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1041 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1041 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1042 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1042 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1043 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1043 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1044 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1044 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1045 0070: 00 |.|
1045 0070: 00 |.|
1046
1046
1047 And its handling on the other size raise a clean exception
1047 And its handling on the other size raise a clean exception
1048
1048
1049 $ cat ../genfailed.hg2 | hg unbundle2
1049 $ cat ../genfailed.hg2 | hg unbundle2
1050 0 unread bytes
1050 0 unread bytes
1051 abort: unexpected error: Someone set up us the bomb!
1051 abort: unexpected error: Someone set up us the bomb!
1052 [255]
1052 [255]
1053
1053
1054 Test compression
1054 Test compression
1055 ================
1055 ================
1056
1056
1057 Simple case where it just work: GZ
1057 Simple case where it just work: GZ
1058 ----------------------------------
1058 ----------------------------------
1059
1059
1060 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1060 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1061 $ f --hexdump ../rev.hg2.bz
1061 $ f --hexdump ../rev.hg2.bz
1062 ../rev.hg2.bz:
1062 ../rev.hg2.bz:
1063 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1063 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1064 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1064 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1065 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1065 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1066 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1066 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1067 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1067 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1068 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1068 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1069 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1069 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1070 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1070 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1071 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1071 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1072 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1072 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1073 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1073 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1074 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1074 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1075 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1075 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1076 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1076 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1077 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1077 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1078 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1078 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1079 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1079 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1080 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1080 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1081 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1081 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1082 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1082 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1083 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1083 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1084 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1084 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1085 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1085 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1086 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1086 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1087 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1087 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1088 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1088 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1089 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1089 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1090 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1090 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1091 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1091 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1092 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1092 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1093 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1093 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1094 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1094 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1095 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1095 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1096 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1096 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1097 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1097 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1098 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1098 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1099 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1099 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1100 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1100 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1101 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1101 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1102 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1102 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1103 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1103 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1104 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1104 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1105 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1105 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1106 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1106 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1107 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1107 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1108 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1108 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1109 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1109 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1110 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1110 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1111 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1111 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1112 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1112 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1113 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1113 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1114 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1114 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1115 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1115 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1116 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1116 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1117 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1117 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1118 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1118 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1119 $ hg debugbundle ../rev.hg2.bz
1119 $ hg debugbundle ../rev.hg2.bz
1120 Stream params: sortdict([('Compression', 'GZ')])
1120 Stream params: sortdict([('Compression', 'GZ')])
1121 changegroup -- 'sortdict()'
1121 changegroup -- 'sortdict()'
1122 32af7686d403cf45b5d95f2d70cebea587ac806a
1122 32af7686d403cf45b5d95f2d70cebea587ac806a
1123 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1123 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1124 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1124 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1125 02de42196ebee42ef284b6780a87cdc96e8eaab6
1125 02de42196ebee42ef284b6780a87cdc96e8eaab6
1126 $ hg unbundle ../rev.hg2.bz
1126 $ hg unbundle ../rev.hg2.bz
1127 adding changesets
1127 adding changesets
1128 adding manifests
1128 adding manifests
1129 adding file changes
1129 adding file changes
1130 added 0 changesets with 0 changes to 3 files
1130 added 0 changesets with 0 changes to 3 files
1131 (run 'hg update' to get a working copy)
1131 (run 'hg update' to get a working copy)
1132 Simple case where it just work: BZ
1132 Simple case where it just work: BZ
1133 ----------------------------------
1133 ----------------------------------
1134
1134
1135 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1135 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1136 $ f --hexdump ../rev.hg2.bz
1136 $ f --hexdump ../rev.hg2.bz
1137 ../rev.hg2.bz:
1137 ../rev.hg2.bz:
1138 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1138 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1139 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1139 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1140 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1140 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1141 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1141 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1142 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1142 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1143 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1143 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1144 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1144 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1145 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1145 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1146 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1146 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1147 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1147 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1148 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1148 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1149 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1149 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1150 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1150 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1151 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1151 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1152 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1152 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1153 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1153 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1154 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1154 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1155 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1155 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1156 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1156 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1157 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1157 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1158 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1158 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1159 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1159 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1160 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1160 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1161 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1161 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1162 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1162 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1163 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1163 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1164 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1164 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1165 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1165 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1166 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1166 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1167 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1167 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1168 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1168 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1169 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1169 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1170 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1170 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1171 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1171 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1172 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1172 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1173 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1173 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1174 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1174 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1175 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1175 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1176 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1176 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1177 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1177 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1178 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1178 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1179 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1179 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1180 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1180 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1181 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1181 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1182 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1182 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1183 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1183 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1184 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1184 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1185 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1185 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1186 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1186 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1187 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1187 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1188 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1188 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1189 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1189 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1190 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1190 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1191 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1191 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1192 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1192 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1193 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1193 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1194 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1194 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1195 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1195 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1196 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1196 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1197 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1197 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1198 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1198 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1199 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1199 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1200 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1200 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1201 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1201 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1202 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1202 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1203 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1203 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1204 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1204 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1205 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1205 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1206 $ hg debugbundle ../rev.hg2.bz
1206 $ hg debugbundle ../rev.hg2.bz
1207 Stream params: sortdict([('Compression', 'BZ')])
1207 Stream params: sortdict([('Compression', 'BZ')])
1208 changegroup -- 'sortdict()'
1208 changegroup -- 'sortdict()'
1209 32af7686d403cf45b5d95f2d70cebea587ac806a
1209 32af7686d403cf45b5d95f2d70cebea587ac806a
1210 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1210 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1211 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1211 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1212 02de42196ebee42ef284b6780a87cdc96e8eaab6
1212 02de42196ebee42ef284b6780a87cdc96e8eaab6
1213 $ hg unbundle ../rev.hg2.bz
1213 $ hg unbundle ../rev.hg2.bz
1214 adding changesets
1214 adding changesets
1215 adding manifests
1215 adding manifests
1216 adding file changes
1216 adding file changes
1217 added 0 changesets with 0 changes to 3 files
1217 added 0 changesets with 0 changes to 3 files
1218 (run 'hg update' to get a working copy)
1218 (run 'hg update' to get a working copy)
1219
1219
1220 unknown compression while unbundling
1220 unknown compression while unbundling
1221 -----------------------------
1221 -----------------------------
1222
1222
1223 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1223 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1224 $ cat ../rev.hg2.bz | hg statbundle2
1224 $ cat ../rev.hg2.bz | hg statbundle2
1225 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1225 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1226 [255]
1226 [255]
1227 $ hg unbundle ../rev.hg2.bz
1227 $ hg unbundle ../rev.hg2.bz
1228 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1228 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1229 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1229 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1230 [255]
1230 [255]
1231
1231
1232 $ cd ..
1232 $ cd ..
@@ -1,260 +1,258 b''
1 Create an extension to test bundle2 with multiple changegroups
1 Create an extension to test bundle2 with multiple changegroups
2
2
3 $ cat > bundle2.py <<EOF
3 $ cat > bundle2.py <<EOF
4 > """
4 > """
5 > """
5 > """
6 > from mercurial import changegroup, discovery, exchange
6 > from mercurial import changegroup, discovery, exchange
7 >
7 >
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
9 > b2caps=None, heads=None, common=None,
9 > b2caps=None, heads=None, common=None,
10 > **kwargs):
10 > **kwargs):
11 > # Create two changegroups given the common changesets and heads for the
11 > # Create two changegroups given the common changesets and heads for the
12 > # changegroup part we are being requested. Use the parent of each head
12 > # changegroup part we are being requested. Use the parent of each head
13 > # in 'heads' as intermediate heads for the first changegroup.
13 > # in 'heads' as intermediate heads for the first changegroup.
14 > intermediates = [repo[r].p1().node() for r in heads]
14 > intermediates = [repo[r].p1().node() for r in heads]
15 > outgoing = discovery.outgoing(repo, common, intermediates)
15 > outgoing = discovery.outgoing(repo, common, intermediates)
16 > cg = changegroup.getchangegroup(repo, source, outgoing,
16 > cg = changegroup.getchangegroup(repo, source, outgoing)
17 > bundlecaps=bundlecaps)
18 > bundler.newpart('output', data='changegroup1')
17 > bundler.newpart('output', data='changegroup1')
19 > bundler.newpart('changegroup', data=cg.getchunks())
18 > bundler.newpart('changegroup', data=cg.getchunks())
20 > outgoing = discovery.outgoing(repo, common + intermediates, heads)
19 > outgoing = discovery.outgoing(repo, common + intermediates, heads)
21 > cg = changegroup.getchangegroup(repo, source, outgoing,
20 > cg = changegroup.getchangegroup(repo, source, outgoing)
22 > bundlecaps=bundlecaps)
23 > bundler.newpart('output', data='changegroup2')
21 > bundler.newpart('output', data='changegroup2')
24 > bundler.newpart('changegroup', data=cg.getchunks())
22 > bundler.newpart('changegroup', data=cg.getchunks())
25 >
23 >
26 > def _pull(repo, *args, **kwargs):
24 > def _pull(repo, *args, **kwargs):
27 > pullop = _orig_pull(repo, *args, **kwargs)
25 > pullop = _orig_pull(repo, *args, **kwargs)
28 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
26 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
29 > return pullop
27 > return pullop
30 >
28 >
31 > _orig_pull = exchange.pull
29 > _orig_pull = exchange.pull
32 > exchange.pull = _pull
30 > exchange.pull = _pull
33 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
31 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
34 > EOF
32 > EOF
35
33
36 $ cat >> $HGRCPATH << EOF
34 $ cat >> $HGRCPATH << EOF
37 > [ui]
35 > [ui]
38 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
36 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
39 > EOF
37 > EOF
40
38
41 Start with a simple repository with a single commit
39 Start with a simple repository with a single commit
42
40
43 $ hg init repo
41 $ hg init repo
44 $ cd repo
42 $ cd repo
45 $ cat > .hg/hgrc << EOF
43 $ cat > .hg/hgrc << EOF
46 > [extensions]
44 > [extensions]
47 > bundle2=$TESTTMP/bundle2.py
45 > bundle2=$TESTTMP/bundle2.py
48 > EOF
46 > EOF
49
47
50 $ echo A > A
48 $ echo A > A
51 $ hg commit -A -m A -q
49 $ hg commit -A -m A -q
52 $ cd ..
50 $ cd ..
53
51
54 Clone
52 Clone
55
53
56 $ hg clone -q repo clone
54 $ hg clone -q repo clone
57
55
58 Add two linear commits
56 Add two linear commits
59
57
60 $ cd repo
58 $ cd repo
61 $ echo B > B
59 $ echo B > B
62 $ hg commit -A -m B -q
60 $ hg commit -A -m B -q
63 $ echo C > C
61 $ echo C > C
64 $ hg commit -A -m C -q
62 $ hg commit -A -m C -q
65
63
66 $ cd ../clone
64 $ cd ../clone
67 $ cat >> .hg/hgrc <<EOF
65 $ cat >> .hg/hgrc <<EOF
68 > [hooks]
66 > [hooks]
69 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
67 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
70 > changegroup = sh -c "printenv.py changegroup"
68 > changegroup = sh -c "printenv.py changegroup"
71 > incoming = sh -c "printenv.py incoming"
69 > incoming = sh -c "printenv.py incoming"
72 > EOF
70 > EOF
73
71
74 Pull the new commits in the clone
72 Pull the new commits in the clone
75
73
76 $ hg pull
74 $ hg pull
77 pulling from $TESTTMP/repo (glob)
75 pulling from $TESTTMP/repo (glob)
78 searching for changes
76 searching for changes
79 remote: changegroup1
77 remote: changegroup1
80 adding changesets
78 adding changesets
81 adding manifests
79 adding manifests
82 adding file changes
80 adding file changes
83 added 1 changesets with 1 changes to 1 files
81 added 1 changesets with 1 changes to 1 files
84 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
82 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
85 remote: changegroup2
83 remote: changegroup2
86 adding changesets
84 adding changesets
87 adding manifests
85 adding manifests
88 adding file changes
86 adding file changes
89 added 1 changesets with 1 changes to 1 files
87 added 1 changesets with 1 changes to 1 files
90 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
88 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
91 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
89 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
92 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
90 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
93 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
91 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
94 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
92 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
95 pullop.cgresult is 1
93 pullop.cgresult is 1
96 (run 'hg update' to get a working copy)
94 (run 'hg update' to get a working copy)
97 $ hg update
95 $ hg update
98 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
99 $ hg log -G
97 $ hg log -G
100 @ 2:f838bfaca5c7 public test C
98 @ 2:f838bfaca5c7 public test C
101 |
99 |
102 o 1:27547f69f254 public test B
100 o 1:27547f69f254 public test B
103 |
101 |
104 o 0:4a2df7238c3b public test A
102 o 0:4a2df7238c3b public test A
105
103
106 Add more changesets with multiple heads to the original repository
104 Add more changesets with multiple heads to the original repository
107
105
108 $ cd ../repo
106 $ cd ../repo
109 $ echo D > D
107 $ echo D > D
110 $ hg commit -A -m D -q
108 $ hg commit -A -m D -q
111 $ hg up -r 1
109 $ hg up -r 1
112 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
110 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
113 $ echo E > E
111 $ echo E > E
114 $ hg commit -A -m E -q
112 $ hg commit -A -m E -q
115 $ echo F > F
113 $ echo F > F
116 $ hg commit -A -m F -q
114 $ hg commit -A -m F -q
117 $ hg up -r 1
115 $ hg up -r 1
118 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
116 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
119 $ echo G > G
117 $ echo G > G
120 $ hg commit -A -m G -q
118 $ hg commit -A -m G -q
121 $ hg up -r 3
119 $ hg up -r 3
122 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
120 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
123 $ echo H > H
121 $ echo H > H
124 $ hg commit -A -m H -q
122 $ hg commit -A -m H -q
125 $ hg log -G
123 $ hg log -G
126 @ 7:5cd59d311f65 draft test H
124 @ 7:5cd59d311f65 draft test H
127 |
125 |
128 | o 6:1d14c3ce6ac0 draft test G
126 | o 6:1d14c3ce6ac0 draft test G
129 | |
127 | |
130 | | o 5:7f219660301f draft test F
128 | | o 5:7f219660301f draft test F
131 | | |
129 | | |
132 | | o 4:8a5212ebc852 draft test E
130 | | o 4:8a5212ebc852 draft test E
133 | |/
131 | |/
134 o | 3:b3325c91a4d9 draft test D
132 o | 3:b3325c91a4d9 draft test D
135 | |
133 | |
136 o | 2:f838bfaca5c7 draft test C
134 o | 2:f838bfaca5c7 draft test C
137 |/
135 |/
138 o 1:27547f69f254 draft test B
136 o 1:27547f69f254 draft test B
139 |
137 |
140 o 0:4a2df7238c3b draft test A
138 o 0:4a2df7238c3b draft test A
141
139
142 New heads are reported during transfer and properly accounted for in
140 New heads are reported during transfer and properly accounted for in
143 pullop.cgresult
141 pullop.cgresult
144
142
145 $ cd ../clone
143 $ cd ../clone
146 $ hg pull
144 $ hg pull
147 pulling from $TESTTMP/repo (glob)
145 pulling from $TESTTMP/repo (glob)
148 searching for changes
146 searching for changes
149 remote: changegroup1
147 remote: changegroup1
150 adding changesets
148 adding changesets
151 adding manifests
149 adding manifests
152 adding file changes
150 adding file changes
153 added 2 changesets with 2 changes to 2 files (+1 heads)
151 added 2 changesets with 2 changes to 2 files (+1 heads)
154 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
152 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
155 remote: changegroup2
153 remote: changegroup2
156 adding changesets
154 adding changesets
157 adding manifests
155 adding manifests
158 adding file changes
156 adding file changes
159 added 3 changesets with 3 changes to 3 files (+1 heads)
157 added 3 changesets with 3 changes to 3 files (+1 heads)
160 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
158 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
161 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
159 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
162 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
160 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
161 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
164 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
162 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
165 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
166 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
164 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
167 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
165 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
168 pullop.cgresult is 3
166 pullop.cgresult is 3
169 (run 'hg heads' to see heads, 'hg merge' to merge)
167 (run 'hg heads' to see heads, 'hg merge' to merge)
170 $ hg log -G
168 $ hg log -G
171 o 7:5cd59d311f65 public test H
169 o 7:5cd59d311f65 public test H
172 |
170 |
173 | o 6:1d14c3ce6ac0 public test G
171 | o 6:1d14c3ce6ac0 public test G
174 | |
172 | |
175 | | o 5:7f219660301f public test F
173 | | o 5:7f219660301f public test F
176 | | |
174 | | |
177 | | o 4:8a5212ebc852 public test E
175 | | o 4:8a5212ebc852 public test E
178 | |/
176 | |/
179 o | 3:b3325c91a4d9 public test D
177 o | 3:b3325c91a4d9 public test D
180 | |
178 | |
181 @ | 2:f838bfaca5c7 public test C
179 @ | 2:f838bfaca5c7 public test C
182 |/
180 |/
183 o 1:27547f69f254 public test B
181 o 1:27547f69f254 public test B
184 |
182 |
185 o 0:4a2df7238c3b public test A
183 o 0:4a2df7238c3b public test A
186
184
187 Removing a head from the original repository by merging it
185 Removing a head from the original repository by merging it
188
186
189 $ cd ../repo
187 $ cd ../repo
190 $ hg merge -r 6 -q
188 $ hg merge -r 6 -q
191 $ hg commit -m Merge
189 $ hg commit -m Merge
192 $ echo I > I
190 $ echo I > I
193 $ hg commit -A -m H -q
191 $ hg commit -A -m H -q
194 $ hg log -G
192 $ hg log -G
195 @ 9:9d18e5bd9ab0 draft test H
193 @ 9:9d18e5bd9ab0 draft test H
196 |
194 |
197 o 8:71bd7b46de72 draft test Merge
195 o 8:71bd7b46de72 draft test Merge
198 |\
196 |\
199 | o 7:5cd59d311f65 draft test H
197 | o 7:5cd59d311f65 draft test H
200 | |
198 | |
201 o | 6:1d14c3ce6ac0 draft test G
199 o | 6:1d14c3ce6ac0 draft test G
202 | |
200 | |
203 | | o 5:7f219660301f draft test F
201 | | o 5:7f219660301f draft test F
204 | | |
202 | | |
205 +---o 4:8a5212ebc852 draft test E
203 +---o 4:8a5212ebc852 draft test E
206 | |
204 | |
207 | o 3:b3325c91a4d9 draft test D
205 | o 3:b3325c91a4d9 draft test D
208 | |
206 | |
209 | o 2:f838bfaca5c7 draft test C
207 | o 2:f838bfaca5c7 draft test C
210 |/
208 |/
211 o 1:27547f69f254 draft test B
209 o 1:27547f69f254 draft test B
212 |
210 |
213 o 0:4a2df7238c3b draft test A
211 o 0:4a2df7238c3b draft test A
214
212
215 Removed heads are reported during transfer and properly accounted for in
213 Removed heads are reported during transfer and properly accounted for in
216 pullop.cgresult
214 pullop.cgresult
217
215
218 $ cd ../clone
216 $ cd ../clone
219 $ hg pull
217 $ hg pull
220 pulling from $TESTTMP/repo (glob)
218 pulling from $TESTTMP/repo (glob)
221 searching for changes
219 searching for changes
222 remote: changegroup1
220 remote: changegroup1
223 adding changesets
221 adding changesets
224 adding manifests
222 adding manifests
225 adding file changes
223 adding file changes
226 added 1 changesets with 0 changes to 0 files (-1 heads)
224 added 1 changesets with 0 changes to 0 files (-1 heads)
227 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
225 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
228 remote: changegroup2
226 remote: changegroup2
229 adding changesets
227 adding changesets
230 adding manifests
228 adding manifests
231 adding file changes
229 adding file changes
232 added 1 changesets with 1 changes to 1 files
230 added 1 changesets with 1 changes to 1 files
233 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
231 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
234 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
232 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
235 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
233 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
236 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
234 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
237 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
235 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
238 pullop.cgresult is -2
236 pullop.cgresult is -2
239 (run 'hg update' to get a working copy)
237 (run 'hg update' to get a working copy)
240 $ hg log -G
238 $ hg log -G
241 o 9:9d18e5bd9ab0 public test H
239 o 9:9d18e5bd9ab0 public test H
242 |
240 |
243 o 8:71bd7b46de72 public test Merge
241 o 8:71bd7b46de72 public test Merge
244 |\
242 |\
245 | o 7:5cd59d311f65 public test H
243 | o 7:5cd59d311f65 public test H
246 | |
244 | |
247 o | 6:1d14c3ce6ac0 public test G
245 o | 6:1d14c3ce6ac0 public test G
248 | |
246 | |
249 | | o 5:7f219660301f public test F
247 | | o 5:7f219660301f public test F
250 | | |
248 | | |
251 +---o 4:8a5212ebc852 public test E
249 +---o 4:8a5212ebc852 public test E
252 | |
250 | |
253 | o 3:b3325c91a4d9 public test D
251 | o 3:b3325c91a4d9 public test D
254 | |
252 | |
255 | @ 2:f838bfaca5c7 public test C
253 | @ 2:f838bfaca5c7 public test C
256 |/
254 |/
257 o 1:27547f69f254 public test B
255 o 1:27547f69f254 public test B
258 |
256 |
259 o 0:4a2df7238c3b public test A
257 o 0:4a2df7238c3b public test A
260
258
General Comments 0
You need to be logged in to leave comments. Login now