##// END OF EJS Templates
getchangegroup: take an 'outgoing' object as argument (API)...
Pierre-Yves David -
r29807:d4e02634 default
parent child Browse files
Show More
@@ -1,1048 +1,1047 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import tempfile
12 import tempfile
13 import weakref
13 import weakref
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 branchmap,
24 branchmap,
25 dagutil,
25 dagutil,
26 discovery,
26 discovery,
27 error,
27 error,
28 mdiff,
28 mdiff,
29 phases,
29 phases,
30 util,
30 util,
31 )
31 )
32
32
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
35 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
35 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
36
36
37 def readexactly(stream, n):
37 def readexactly(stream, n):
38 '''read n bytes from stream.read and abort if less was available'''
38 '''read n bytes from stream.read and abort if less was available'''
39 s = stream.read(n)
39 s = stream.read(n)
40 if len(s) < n:
40 if len(s) < n:
41 raise error.Abort(_("stream ended unexpectedly"
41 raise error.Abort(_("stream ended unexpectedly"
42 " (got %d bytes, expected %d)")
42 " (got %d bytes, expected %d)")
43 % (len(s), n))
43 % (len(s), n))
44 return s
44 return s
45
45
46 def getchunk(stream):
46 def getchunk(stream):
47 """return the next chunk from stream as a string"""
47 """return the next chunk from stream as a string"""
48 d = readexactly(stream, 4)
48 d = readexactly(stream, 4)
49 l = struct.unpack(">l", d)[0]
49 l = struct.unpack(">l", d)[0]
50 if l <= 4:
50 if l <= 4:
51 if l:
51 if l:
52 raise error.Abort(_("invalid chunk length %d") % l)
52 raise error.Abort(_("invalid chunk length %d") % l)
53 return ""
53 return ""
54 return readexactly(stream, l - 4)
54 return readexactly(stream, l - 4)
55
55
56 def chunkheader(length):
56 def chunkheader(length):
57 """return a changegroup chunk header (string)"""
57 """return a changegroup chunk header (string)"""
58 return struct.pack(">l", length + 4)
58 return struct.pack(">l", length + 4)
59
59
60 def closechunk():
60 def closechunk():
61 """return a changegroup chunk header (string) for a zero-length chunk"""
61 """return a changegroup chunk header (string) for a zero-length chunk"""
62 return struct.pack(">l", 0)
62 return struct.pack(">l", 0)
63
63
64 def combineresults(results):
64 def combineresults(results):
65 """logic to combine 0 or more addchangegroup results into one"""
65 """logic to combine 0 or more addchangegroup results into one"""
66 changedheads = 0
66 changedheads = 0
67 result = 1
67 result = 1
68 for ret in results:
68 for ret in results:
69 # If any changegroup result is 0, return 0
69 # If any changegroup result is 0, return 0
70 if ret == 0:
70 if ret == 0:
71 result = 0
71 result = 0
72 break
72 break
73 if ret < -1:
73 if ret < -1:
74 changedheads += ret + 1
74 changedheads += ret + 1
75 elif ret > 1:
75 elif ret > 1:
76 changedheads += ret - 1
76 changedheads += ret - 1
77 if changedheads > 0:
77 if changedheads > 0:
78 result = 1 + changedheads
78 result = 1 + changedheads
79 elif changedheads < 0:
79 elif changedheads < 0:
80 result = -1 + changedheads
80 result = -1 + changedheads
81 return result
81 return result
82
82
83 def writechunks(ui, chunks, filename, vfs=None):
83 def writechunks(ui, chunks, filename, vfs=None):
84 """Write chunks to a file and return its filename.
84 """Write chunks to a file and return its filename.
85
85
86 The stream is assumed to be a bundle file.
86 The stream is assumed to be a bundle file.
87 Existing files will not be overwritten.
87 Existing files will not be overwritten.
88 If no filename is specified, a temporary file is created.
88 If no filename is specified, a temporary file is created.
89 """
89 """
90 fh = None
90 fh = None
91 cleanup = None
91 cleanup = None
92 try:
92 try:
93 if filename:
93 if filename:
94 if vfs:
94 if vfs:
95 fh = vfs.open(filename, "wb")
95 fh = vfs.open(filename, "wb")
96 else:
96 else:
97 fh = open(filename, "wb")
97 fh = open(filename, "wb")
98 else:
98 else:
99 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
99 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
100 fh = os.fdopen(fd, "wb")
100 fh = os.fdopen(fd, "wb")
101 cleanup = filename
101 cleanup = filename
102 for c in chunks:
102 for c in chunks:
103 fh.write(c)
103 fh.write(c)
104 cleanup = None
104 cleanup = None
105 return filename
105 return filename
106 finally:
106 finally:
107 if fh is not None:
107 if fh is not None:
108 fh.close()
108 fh.close()
109 if cleanup is not None:
109 if cleanup is not None:
110 if filename and vfs:
110 if filename and vfs:
111 vfs.unlink(cleanup)
111 vfs.unlink(cleanup)
112 else:
112 else:
113 os.unlink(cleanup)
113 os.unlink(cleanup)
114
114
115 class cg1unpacker(object):
115 class cg1unpacker(object):
116 """Unpacker for cg1 changegroup streams.
116 """Unpacker for cg1 changegroup streams.
117
117
118 A changegroup unpacker handles the framing of the revision data in
118 A changegroup unpacker handles the framing of the revision data in
119 the wire format. Most consumers will want to use the apply()
119 the wire format. Most consumers will want to use the apply()
120 method to add the changes from the changegroup to a repository.
120 method to add the changes from the changegroup to a repository.
121
121
122 If you're forwarding a changegroup unmodified to another consumer,
122 If you're forwarding a changegroup unmodified to another consumer,
123 use getchunks(), which returns an iterator of changegroup
123 use getchunks(), which returns an iterator of changegroup
124 chunks. This is mostly useful for cases where you need to know the
124 chunks. This is mostly useful for cases where you need to know the
125 data stream has ended by observing the end of the changegroup.
125 data stream has ended by observing the end of the changegroup.
126
126
127 deltachunk() is useful only if you're applying delta data. Most
127 deltachunk() is useful only if you're applying delta data. Most
128 consumers should prefer apply() instead.
128 consumers should prefer apply() instead.
129
129
130 A few other public methods exist. Those are used only for
130 A few other public methods exist. Those are used only for
131 bundlerepo and some debug commands - their use is discouraged.
131 bundlerepo and some debug commands - their use is discouraged.
132 """
132 """
133 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
133 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
134 deltaheadersize = struct.calcsize(deltaheader)
134 deltaheadersize = struct.calcsize(deltaheader)
135 version = '01'
135 version = '01'
136 _grouplistcount = 1 # One list of files after the manifests
136 _grouplistcount = 1 # One list of files after the manifests
137
137
138 def __init__(self, fh, alg, extras=None):
138 def __init__(self, fh, alg, extras=None):
139 if alg == 'UN':
139 if alg == 'UN':
140 alg = None # get more modern without breaking too much
140 alg = None # get more modern without breaking too much
141 if not alg in util.decompressors:
141 if not alg in util.decompressors:
142 raise error.Abort(_('unknown stream compression type: %s')
142 raise error.Abort(_('unknown stream compression type: %s')
143 % alg)
143 % alg)
144 if alg == 'BZ':
144 if alg == 'BZ':
145 alg = '_truncatedBZ'
145 alg = '_truncatedBZ'
146 self._stream = util.decompressors[alg](fh)
146 self._stream = util.decompressors[alg](fh)
147 self._type = alg
147 self._type = alg
148 self.extras = extras or {}
148 self.extras = extras or {}
149 self.callback = None
149 self.callback = None
150
150
151 # These methods (compressed, read, seek, tell) all appear to only
151 # These methods (compressed, read, seek, tell) all appear to only
152 # be used by bundlerepo, but it's a little hard to tell.
152 # be used by bundlerepo, but it's a little hard to tell.
153 def compressed(self):
153 def compressed(self):
154 return self._type is not None
154 return self._type is not None
155 def read(self, l):
155 def read(self, l):
156 return self._stream.read(l)
156 return self._stream.read(l)
157 def seek(self, pos):
157 def seek(self, pos):
158 return self._stream.seek(pos)
158 return self._stream.seek(pos)
159 def tell(self):
159 def tell(self):
160 return self._stream.tell()
160 return self._stream.tell()
161 def close(self):
161 def close(self):
162 return self._stream.close()
162 return self._stream.close()
163
163
164 def _chunklength(self):
164 def _chunklength(self):
165 d = readexactly(self._stream, 4)
165 d = readexactly(self._stream, 4)
166 l = struct.unpack(">l", d)[0]
166 l = struct.unpack(">l", d)[0]
167 if l <= 4:
167 if l <= 4:
168 if l:
168 if l:
169 raise error.Abort(_("invalid chunk length %d") % l)
169 raise error.Abort(_("invalid chunk length %d") % l)
170 return 0
170 return 0
171 if self.callback:
171 if self.callback:
172 self.callback()
172 self.callback()
173 return l - 4
173 return l - 4
174
174
175 def changelogheader(self):
175 def changelogheader(self):
176 """v10 does not have a changelog header chunk"""
176 """v10 does not have a changelog header chunk"""
177 return {}
177 return {}
178
178
179 def manifestheader(self):
179 def manifestheader(self):
180 """v10 does not have a manifest header chunk"""
180 """v10 does not have a manifest header chunk"""
181 return {}
181 return {}
182
182
183 def filelogheader(self):
183 def filelogheader(self):
184 """return the header of the filelogs chunk, v10 only has the filename"""
184 """return the header of the filelogs chunk, v10 only has the filename"""
185 l = self._chunklength()
185 l = self._chunklength()
186 if not l:
186 if not l:
187 return {}
187 return {}
188 fname = readexactly(self._stream, l)
188 fname = readexactly(self._stream, l)
189 return {'filename': fname}
189 return {'filename': fname}
190
190
191 def _deltaheader(self, headertuple, prevnode):
191 def _deltaheader(self, headertuple, prevnode):
192 node, p1, p2, cs = headertuple
192 node, p1, p2, cs = headertuple
193 if prevnode is None:
193 if prevnode is None:
194 deltabase = p1
194 deltabase = p1
195 else:
195 else:
196 deltabase = prevnode
196 deltabase = prevnode
197 flags = 0
197 flags = 0
198 return node, p1, p2, deltabase, cs, flags
198 return node, p1, p2, deltabase, cs, flags
199
199
200 def deltachunk(self, prevnode):
200 def deltachunk(self, prevnode):
201 l = self._chunklength()
201 l = self._chunklength()
202 if not l:
202 if not l:
203 return {}
203 return {}
204 headerdata = readexactly(self._stream, self.deltaheadersize)
204 headerdata = readexactly(self._stream, self.deltaheadersize)
205 header = struct.unpack(self.deltaheader, headerdata)
205 header = struct.unpack(self.deltaheader, headerdata)
206 delta = readexactly(self._stream, l - self.deltaheadersize)
206 delta = readexactly(self._stream, l - self.deltaheadersize)
207 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
207 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
208 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
208 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
209 'deltabase': deltabase, 'delta': delta, 'flags': flags}
209 'deltabase': deltabase, 'delta': delta, 'flags': flags}
210
210
211 def getchunks(self):
211 def getchunks(self):
212 """returns all the chunks contains in the bundle
212 """returns all the chunks contains in the bundle
213
213
214 Used when you need to forward the binary stream to a file or another
214 Used when you need to forward the binary stream to a file or another
215 network API. To do so, it parse the changegroup data, otherwise it will
215 network API. To do so, it parse the changegroup data, otherwise it will
216 block in case of sshrepo because it don't know the end of the stream.
216 block in case of sshrepo because it don't know the end of the stream.
217 """
217 """
218 # an empty chunkgroup is the end of the changegroup
218 # an empty chunkgroup is the end of the changegroup
219 # a changegroup has at least 2 chunkgroups (changelog and manifest).
219 # a changegroup has at least 2 chunkgroups (changelog and manifest).
220 # after that, changegroup versions 1 and 2 have a series of groups
220 # after that, changegroup versions 1 and 2 have a series of groups
221 # with one group per file. changegroup 3 has a series of directory
221 # with one group per file. changegroup 3 has a series of directory
222 # manifests before the files.
222 # manifests before the files.
223 count = 0
223 count = 0
224 emptycount = 0
224 emptycount = 0
225 while emptycount < self._grouplistcount:
225 while emptycount < self._grouplistcount:
226 empty = True
226 empty = True
227 count += 1
227 count += 1
228 while True:
228 while True:
229 chunk = getchunk(self)
229 chunk = getchunk(self)
230 if not chunk:
230 if not chunk:
231 if empty and count > 2:
231 if empty and count > 2:
232 emptycount += 1
232 emptycount += 1
233 break
233 break
234 empty = False
234 empty = False
235 yield chunkheader(len(chunk))
235 yield chunkheader(len(chunk))
236 pos = 0
236 pos = 0
237 while pos < len(chunk):
237 while pos < len(chunk):
238 next = pos + 2**20
238 next = pos + 2**20
239 yield chunk[pos:next]
239 yield chunk[pos:next]
240 pos = next
240 pos = next
241 yield closechunk()
241 yield closechunk()
242
242
243 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
243 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
244 # We know that we'll never have more manifests than we had
244 # We know that we'll never have more manifests than we had
245 # changesets.
245 # changesets.
246 self.callback = prog(_('manifests'), numchanges)
246 self.callback = prog(_('manifests'), numchanges)
247 # no need to check for empty manifest group here:
247 # no need to check for empty manifest group here:
248 # if the result of the merge of 1 and 2 is the same in 3 and 4,
248 # if the result of the merge of 1 and 2 is the same in 3 and 4,
249 # no new manifest will be created and the manifest group will
249 # no new manifest will be created and the manifest group will
250 # be empty during the pull
250 # be empty during the pull
251 self.manifestheader()
251 self.manifestheader()
252 repo.manifest.addgroup(self, revmap, trp)
252 repo.manifest.addgroup(self, revmap, trp)
253 repo.ui.progress(_('manifests'), None)
253 repo.ui.progress(_('manifests'), None)
254 self.callback = None
254 self.callback = None
255
255
256 def apply(self, repo, srctype, url, emptyok=False,
256 def apply(self, repo, srctype, url, emptyok=False,
257 targetphase=phases.draft, expectedtotal=None):
257 targetphase=phases.draft, expectedtotal=None):
258 """Add the changegroup returned by source.read() to this repo.
258 """Add the changegroup returned by source.read() to this repo.
259 srctype is a string like 'push', 'pull', or 'unbundle'. url is
259 srctype is a string like 'push', 'pull', or 'unbundle'. url is
260 the URL of the repo where this changegroup is coming from.
260 the URL of the repo where this changegroup is coming from.
261
261
262 Return an integer summarizing the change to this repo:
262 Return an integer summarizing the change to this repo:
263 - nothing changed or no source: 0
263 - nothing changed or no source: 0
264 - more heads than before: 1+added heads (2..n)
264 - more heads than before: 1+added heads (2..n)
265 - fewer heads than before: -1-removed heads (-2..-n)
265 - fewer heads than before: -1-removed heads (-2..-n)
266 - number of heads stays the same: 1
266 - number of heads stays the same: 1
267 """
267 """
268 repo = repo.unfiltered()
268 repo = repo.unfiltered()
269 def csmap(x):
269 def csmap(x):
270 repo.ui.debug("add changeset %s\n" % short(x))
270 repo.ui.debug("add changeset %s\n" % short(x))
271 return len(cl)
271 return len(cl)
272
272
273 def revmap(x):
273 def revmap(x):
274 return cl.rev(x)
274 return cl.rev(x)
275
275
276 changesets = files = revisions = 0
276 changesets = files = revisions = 0
277
277
278 try:
278 try:
279 with repo.transaction("\n".join([srctype,
279 with repo.transaction("\n".join([srctype,
280 util.hidepassword(url)])) as tr:
280 util.hidepassword(url)])) as tr:
281 # The transaction could have been created before and already
281 # The transaction could have been created before and already
282 # carries source information. In this case we use the top
282 # carries source information. In this case we use the top
283 # level data. We overwrite the argument because we need to use
283 # level data. We overwrite the argument because we need to use
284 # the top level value (if they exist) in this function.
284 # the top level value (if they exist) in this function.
285 srctype = tr.hookargs.setdefault('source', srctype)
285 srctype = tr.hookargs.setdefault('source', srctype)
286 url = tr.hookargs.setdefault('url', url)
286 url = tr.hookargs.setdefault('url', url)
287 repo.hook('prechangegroup', throw=True, **tr.hookargs)
287 repo.hook('prechangegroup', throw=True, **tr.hookargs)
288
288
289 # write changelog data to temp files so concurrent readers
289 # write changelog data to temp files so concurrent readers
290 # will not see an inconsistent view
290 # will not see an inconsistent view
291 cl = repo.changelog
291 cl = repo.changelog
292 cl.delayupdate(tr)
292 cl.delayupdate(tr)
293 oldheads = cl.heads()
293 oldheads = cl.heads()
294
294
295 trp = weakref.proxy(tr)
295 trp = weakref.proxy(tr)
296 # pull off the changeset group
296 # pull off the changeset group
297 repo.ui.status(_("adding changesets\n"))
297 repo.ui.status(_("adding changesets\n"))
298 clstart = len(cl)
298 clstart = len(cl)
299 class prog(object):
299 class prog(object):
300 def __init__(self, step, total):
300 def __init__(self, step, total):
301 self._step = step
301 self._step = step
302 self._total = total
302 self._total = total
303 self._count = 1
303 self._count = 1
304 def __call__(self):
304 def __call__(self):
305 repo.ui.progress(self._step, self._count,
305 repo.ui.progress(self._step, self._count,
306 unit=_('chunks'), total=self._total)
306 unit=_('chunks'), total=self._total)
307 self._count += 1
307 self._count += 1
308 self.callback = prog(_('changesets'), expectedtotal)
308 self.callback = prog(_('changesets'), expectedtotal)
309
309
310 efiles = set()
310 efiles = set()
311 def onchangelog(cl, node):
311 def onchangelog(cl, node):
312 efiles.update(cl.readfiles(node))
312 efiles.update(cl.readfiles(node))
313
313
314 self.changelogheader()
314 self.changelogheader()
315 srccontent = cl.addgroup(self, csmap, trp,
315 srccontent = cl.addgroup(self, csmap, trp,
316 addrevisioncb=onchangelog)
316 addrevisioncb=onchangelog)
317 efiles = len(efiles)
317 efiles = len(efiles)
318
318
319 if not (srccontent or emptyok):
319 if not (srccontent or emptyok):
320 raise error.Abort(_("received changelog group is empty"))
320 raise error.Abort(_("received changelog group is empty"))
321 clend = len(cl)
321 clend = len(cl)
322 changesets = clend - clstart
322 changesets = clend - clstart
323 repo.ui.progress(_('changesets'), None)
323 repo.ui.progress(_('changesets'), None)
324 self.callback = None
324 self.callback = None
325
325
326 # pull off the manifest group
326 # pull off the manifest group
327 repo.ui.status(_("adding manifests\n"))
327 repo.ui.status(_("adding manifests\n"))
328 self._unpackmanifests(repo, revmap, trp, prog, changesets)
328 self._unpackmanifests(repo, revmap, trp, prog, changesets)
329
329
330 needfiles = {}
330 needfiles = {}
331 if repo.ui.configbool('server', 'validate', default=False):
331 if repo.ui.configbool('server', 'validate', default=False):
332 # validate incoming csets have their manifests
332 # validate incoming csets have their manifests
333 for cset in xrange(clstart, clend):
333 for cset in xrange(clstart, clend):
334 mfnode = repo.changelog.read(
334 mfnode = repo.changelog.read(
335 repo.changelog.node(cset))[0]
335 repo.changelog.node(cset))[0]
336 mfest = repo.manifest.readdelta(mfnode)
336 mfest = repo.manifest.readdelta(mfnode)
337 # store file nodes we must see
337 # store file nodes we must see
338 for f, n in mfest.iteritems():
338 for f, n in mfest.iteritems():
339 needfiles.setdefault(f, set()).add(n)
339 needfiles.setdefault(f, set()).add(n)
340
340
341 # process the files
341 # process the files
342 repo.ui.status(_("adding file changes\n"))
342 repo.ui.status(_("adding file changes\n"))
343 newrevs, newfiles = _addchangegroupfiles(
343 newrevs, newfiles = _addchangegroupfiles(
344 repo, self, revmap, trp, efiles, needfiles)
344 repo, self, revmap, trp, efiles, needfiles)
345 revisions += newrevs
345 revisions += newrevs
346 files += newfiles
346 files += newfiles
347
347
348 dh = 0
348 dh = 0
349 if oldheads:
349 if oldheads:
350 heads = cl.heads()
350 heads = cl.heads()
351 dh = len(heads) - len(oldheads)
351 dh = len(heads) - len(oldheads)
352 for h in heads:
352 for h in heads:
353 if h not in oldheads and repo[h].closesbranch():
353 if h not in oldheads and repo[h].closesbranch():
354 dh -= 1
354 dh -= 1
355 htext = ""
355 htext = ""
356 if dh:
356 if dh:
357 htext = _(" (%+d heads)") % dh
357 htext = _(" (%+d heads)") % dh
358
358
359 repo.ui.status(_("added %d changesets"
359 repo.ui.status(_("added %d changesets"
360 " with %d changes to %d files%s\n")
360 " with %d changes to %d files%s\n")
361 % (changesets, revisions, files, htext))
361 % (changesets, revisions, files, htext))
362 repo.invalidatevolatilesets()
362 repo.invalidatevolatilesets()
363
363
364 if changesets > 0:
364 if changesets > 0:
365 if 'node' not in tr.hookargs:
365 if 'node' not in tr.hookargs:
366 tr.hookargs['node'] = hex(cl.node(clstart))
366 tr.hookargs['node'] = hex(cl.node(clstart))
367 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
367 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
368 hookargs = dict(tr.hookargs)
368 hookargs = dict(tr.hookargs)
369 else:
369 else:
370 hookargs = dict(tr.hookargs)
370 hookargs = dict(tr.hookargs)
371 hookargs['node'] = hex(cl.node(clstart))
371 hookargs['node'] = hex(cl.node(clstart))
372 hookargs['node_last'] = hex(cl.node(clend - 1))
372 hookargs['node_last'] = hex(cl.node(clend - 1))
373 repo.hook('pretxnchangegroup', throw=True, **hookargs)
373 repo.hook('pretxnchangegroup', throw=True, **hookargs)
374
374
375 added = [cl.node(r) for r in xrange(clstart, clend)]
375 added = [cl.node(r) for r in xrange(clstart, clend)]
376 publishing = repo.publishing()
376 publishing = repo.publishing()
377 if srctype in ('push', 'serve'):
377 if srctype in ('push', 'serve'):
378 # Old servers can not push the boundary themselves.
378 # Old servers can not push the boundary themselves.
379 # New servers won't push the boundary if changeset already
379 # New servers won't push the boundary if changeset already
380 # exists locally as secret
380 # exists locally as secret
381 #
381 #
382 # We should not use added here but the list of all change in
382 # We should not use added here but the list of all change in
383 # the bundle
383 # the bundle
384 if publishing:
384 if publishing:
385 phases.advanceboundary(repo, tr, phases.public,
385 phases.advanceboundary(repo, tr, phases.public,
386 srccontent)
386 srccontent)
387 else:
387 else:
388 # Those changesets have been pushed from the
388 # Those changesets have been pushed from the
389 # outside, their phases are going to be pushed
389 # outside, their phases are going to be pushed
390 # alongside. Therefor `targetphase` is
390 # alongside. Therefor `targetphase` is
391 # ignored.
391 # ignored.
392 phases.advanceboundary(repo, tr, phases.draft,
392 phases.advanceboundary(repo, tr, phases.draft,
393 srccontent)
393 srccontent)
394 phases.retractboundary(repo, tr, phases.draft, added)
394 phases.retractboundary(repo, tr, phases.draft, added)
395 elif srctype != 'strip':
395 elif srctype != 'strip':
396 # publishing only alter behavior during push
396 # publishing only alter behavior during push
397 #
397 #
398 # strip should not touch boundary at all
398 # strip should not touch boundary at all
399 phases.retractboundary(repo, tr, targetphase, added)
399 phases.retractboundary(repo, tr, targetphase, added)
400
400
401 if changesets > 0:
401 if changesets > 0:
402 if srctype != 'strip':
402 if srctype != 'strip':
403 # During strip, branchcache is invalid but
403 # During strip, branchcache is invalid but
404 # coming call to `destroyed` will repair it.
404 # coming call to `destroyed` will repair it.
405 # In other case we can safely update cache on
405 # In other case we can safely update cache on
406 # disk.
406 # disk.
407 repo.ui.debug('updating the branch cache\n')
407 repo.ui.debug('updating the branch cache\n')
408 branchmap.updatecache(repo.filtered('served'))
408 branchmap.updatecache(repo.filtered('served'))
409
409
410 def runhooks():
410 def runhooks():
411 # These hooks run when the lock releases, not when the
411 # These hooks run when the lock releases, not when the
412 # transaction closes. So it's possible for the changelog
412 # transaction closes. So it's possible for the changelog
413 # to have changed since we last saw it.
413 # to have changed since we last saw it.
414 if clstart >= len(repo):
414 if clstart >= len(repo):
415 return
415 return
416
416
417 repo.hook("changegroup", **hookargs)
417 repo.hook("changegroup", **hookargs)
418
418
419 for n in added:
419 for n in added:
420 args = hookargs.copy()
420 args = hookargs.copy()
421 args['node'] = hex(n)
421 args['node'] = hex(n)
422 del args['node_last']
422 del args['node_last']
423 repo.hook("incoming", **args)
423 repo.hook("incoming", **args)
424
424
425 newheads = [h for h in repo.heads()
425 newheads = [h for h in repo.heads()
426 if h not in oldheads]
426 if h not in oldheads]
427 repo.ui.log("incoming",
427 repo.ui.log("incoming",
428 "%s incoming changes - new heads: %s\n",
428 "%s incoming changes - new heads: %s\n",
429 len(added),
429 len(added),
430 ', '.join([hex(c[:6]) for c in newheads]))
430 ', '.join([hex(c[:6]) for c in newheads]))
431
431
432 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
432 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
433 lambda tr: repo._afterlock(runhooks))
433 lambda tr: repo._afterlock(runhooks))
434 finally:
434 finally:
435 repo.ui.flush()
435 repo.ui.flush()
436 # never return 0 here:
436 # never return 0 here:
437 if dh < 0:
437 if dh < 0:
438 return dh - 1
438 return dh - 1
439 else:
439 else:
440 return dh + 1
440 return dh + 1
441
441
442 class cg2unpacker(cg1unpacker):
442 class cg2unpacker(cg1unpacker):
443 """Unpacker for cg2 streams.
443 """Unpacker for cg2 streams.
444
444
445 cg2 streams add support for generaldelta, so the delta header
445 cg2 streams add support for generaldelta, so the delta header
446 format is slightly different. All other features about the data
446 format is slightly different. All other features about the data
447 remain the same.
447 remain the same.
448 """
448 """
449 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
449 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
450 deltaheadersize = struct.calcsize(deltaheader)
450 deltaheadersize = struct.calcsize(deltaheader)
451 version = '02'
451 version = '02'
452
452
453 def _deltaheader(self, headertuple, prevnode):
453 def _deltaheader(self, headertuple, prevnode):
454 node, p1, p2, deltabase, cs = headertuple
454 node, p1, p2, deltabase, cs = headertuple
455 flags = 0
455 flags = 0
456 return node, p1, p2, deltabase, cs, flags
456 return node, p1, p2, deltabase, cs, flags
457
457
458 class cg3unpacker(cg2unpacker):
458 class cg3unpacker(cg2unpacker):
459 """Unpacker for cg3 streams.
459 """Unpacker for cg3 streams.
460
460
461 cg3 streams add support for exchanging treemanifests and revlog
461 cg3 streams add support for exchanging treemanifests and revlog
462 flags. It adds the revlog flags to the delta header and an empty chunk
462 flags. It adds the revlog flags to the delta header and an empty chunk
463 separating manifests and files.
463 separating manifests and files.
464 """
464 """
465 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
465 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
466 deltaheadersize = struct.calcsize(deltaheader)
466 deltaheadersize = struct.calcsize(deltaheader)
467 version = '03'
467 version = '03'
468 _grouplistcount = 2 # One list of manifests and one list of files
468 _grouplistcount = 2 # One list of manifests and one list of files
469
469
470 def _deltaheader(self, headertuple, prevnode):
470 def _deltaheader(self, headertuple, prevnode):
471 node, p1, p2, deltabase, cs, flags = headertuple
471 node, p1, p2, deltabase, cs, flags = headertuple
472 return node, p1, p2, deltabase, cs, flags
472 return node, p1, p2, deltabase, cs, flags
473
473
474 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
474 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
475 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
475 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
476 numchanges)
476 numchanges)
477 for chunkdata in iter(self.filelogheader, {}):
477 for chunkdata in iter(self.filelogheader, {}):
478 # If we get here, there are directory manifests in the changegroup
478 # If we get here, there are directory manifests in the changegroup
479 d = chunkdata["filename"]
479 d = chunkdata["filename"]
480 repo.ui.debug("adding %s revisions\n" % d)
480 repo.ui.debug("adding %s revisions\n" % d)
481 dirlog = repo.manifest.dirlog(d)
481 dirlog = repo.manifest.dirlog(d)
482 if not dirlog.addgroup(self, revmap, trp):
482 if not dirlog.addgroup(self, revmap, trp):
483 raise error.Abort(_("received dir revlog group is empty"))
483 raise error.Abort(_("received dir revlog group is empty"))
484
484
485 class headerlessfixup(object):
485 class headerlessfixup(object):
486 def __init__(self, fh, h):
486 def __init__(self, fh, h):
487 self._h = h
487 self._h = h
488 self._fh = fh
488 self._fh = fh
489 def read(self, n):
489 def read(self, n):
490 if self._h:
490 if self._h:
491 d, self._h = self._h[:n], self._h[n:]
491 d, self._h = self._h[:n], self._h[n:]
492 if len(d) < n:
492 if len(d) < n:
493 d += readexactly(self._fh, n - len(d))
493 d += readexactly(self._fh, n - len(d))
494 return d
494 return d
495 return readexactly(self._fh, n)
495 return readexactly(self._fh, n)
496
496
497 class cg1packer(object):
497 class cg1packer(object):
498 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
498 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
499 version = '01'
499 version = '01'
500 def __init__(self, repo, bundlecaps=None):
500 def __init__(self, repo, bundlecaps=None):
501 """Given a source repo, construct a bundler.
501 """Given a source repo, construct a bundler.
502
502
503 bundlecaps is optional and can be used to specify the set of
503 bundlecaps is optional and can be used to specify the set of
504 capabilities which can be used to build the bundle.
504 capabilities which can be used to build the bundle.
505 """
505 """
506 # Set of capabilities we can use to build the bundle.
506 # Set of capabilities we can use to build the bundle.
507 if bundlecaps is None:
507 if bundlecaps is None:
508 bundlecaps = set()
508 bundlecaps = set()
509 self._bundlecaps = bundlecaps
509 self._bundlecaps = bundlecaps
510 # experimental config: bundle.reorder
510 # experimental config: bundle.reorder
511 reorder = repo.ui.config('bundle', 'reorder', 'auto')
511 reorder = repo.ui.config('bundle', 'reorder', 'auto')
512 if reorder == 'auto':
512 if reorder == 'auto':
513 reorder = None
513 reorder = None
514 else:
514 else:
515 reorder = util.parsebool(reorder)
515 reorder = util.parsebool(reorder)
516 self._repo = repo
516 self._repo = repo
517 self._reorder = reorder
517 self._reorder = reorder
518 self._progress = repo.ui.progress
518 self._progress = repo.ui.progress
519 if self._repo.ui.verbose and not self._repo.ui.debugflag:
519 if self._repo.ui.verbose and not self._repo.ui.debugflag:
520 self._verbosenote = self._repo.ui.note
520 self._verbosenote = self._repo.ui.note
521 else:
521 else:
522 self._verbosenote = lambda s: None
522 self._verbosenote = lambda s: None
523
523
524 def close(self):
524 def close(self):
525 return closechunk()
525 return closechunk()
526
526
527 def fileheader(self, fname):
527 def fileheader(self, fname):
528 return chunkheader(len(fname)) + fname
528 return chunkheader(len(fname)) + fname
529
529
530 # Extracted both for clarity and for overriding in extensions.
530 # Extracted both for clarity and for overriding in extensions.
531 def _sortgroup(self, revlog, nodelist, lookup):
531 def _sortgroup(self, revlog, nodelist, lookup):
532 """Sort nodes for change group and turn them into revnums."""
532 """Sort nodes for change group and turn them into revnums."""
533 # for generaldelta revlogs, we linearize the revs; this will both be
533 # for generaldelta revlogs, we linearize the revs; this will both be
534 # much quicker and generate a much smaller bundle
534 # much quicker and generate a much smaller bundle
535 if (revlog._generaldelta and self._reorder is None) or self._reorder:
535 if (revlog._generaldelta and self._reorder is None) or self._reorder:
536 dag = dagutil.revlogdag(revlog)
536 dag = dagutil.revlogdag(revlog)
537 return dag.linearize(set(revlog.rev(n) for n in nodelist))
537 return dag.linearize(set(revlog.rev(n) for n in nodelist))
538 else:
538 else:
539 return sorted([revlog.rev(n) for n in nodelist])
539 return sorted([revlog.rev(n) for n in nodelist])
540
540
541 def group(self, nodelist, revlog, lookup, units=None):
541 def group(self, nodelist, revlog, lookup, units=None):
542 """Calculate a delta group, yielding a sequence of changegroup chunks
542 """Calculate a delta group, yielding a sequence of changegroup chunks
543 (strings).
543 (strings).
544
544
545 Given a list of changeset revs, return a set of deltas and
545 Given a list of changeset revs, return a set of deltas and
546 metadata corresponding to nodes. The first delta is
546 metadata corresponding to nodes. The first delta is
547 first parent(nodelist[0]) -> nodelist[0], the receiver is
547 first parent(nodelist[0]) -> nodelist[0], the receiver is
548 guaranteed to have this parent as it has all history before
548 guaranteed to have this parent as it has all history before
549 these changesets. In the case firstparent is nullrev the
549 these changesets. In the case firstparent is nullrev the
550 changegroup starts with a full revision.
550 changegroup starts with a full revision.
551
551
552 If units is not None, progress detail will be generated, units specifies
552 If units is not None, progress detail will be generated, units specifies
553 the type of revlog that is touched (changelog, manifest, etc.).
553 the type of revlog that is touched (changelog, manifest, etc.).
554 """
554 """
555 # if we don't have any revisions touched by these changesets, bail
555 # if we don't have any revisions touched by these changesets, bail
556 if len(nodelist) == 0:
556 if len(nodelist) == 0:
557 yield self.close()
557 yield self.close()
558 return
558 return
559
559
560 revs = self._sortgroup(revlog, nodelist, lookup)
560 revs = self._sortgroup(revlog, nodelist, lookup)
561
561
562 # add the parent of the first rev
562 # add the parent of the first rev
563 p = revlog.parentrevs(revs[0])[0]
563 p = revlog.parentrevs(revs[0])[0]
564 revs.insert(0, p)
564 revs.insert(0, p)
565
565
566 # build deltas
566 # build deltas
567 total = len(revs) - 1
567 total = len(revs) - 1
568 msgbundling = _('bundling')
568 msgbundling = _('bundling')
569 for r in xrange(len(revs) - 1):
569 for r in xrange(len(revs) - 1):
570 if units is not None:
570 if units is not None:
571 self._progress(msgbundling, r + 1, unit=units, total=total)
571 self._progress(msgbundling, r + 1, unit=units, total=total)
572 prev, curr = revs[r], revs[r + 1]
572 prev, curr = revs[r], revs[r + 1]
573 linknode = lookup(revlog.node(curr))
573 linknode = lookup(revlog.node(curr))
574 for c in self.revchunk(revlog, curr, prev, linknode):
574 for c in self.revchunk(revlog, curr, prev, linknode):
575 yield c
575 yield c
576
576
577 if units is not None:
577 if units is not None:
578 self._progress(msgbundling, None)
578 self._progress(msgbundling, None)
579 yield self.close()
579 yield self.close()
580
580
581 # filter any nodes that claim to be part of the known set
581 # filter any nodes that claim to be part of the known set
582 def prune(self, revlog, missing, commonrevs):
582 def prune(self, revlog, missing, commonrevs):
583 rr, rl = revlog.rev, revlog.linkrev
583 rr, rl = revlog.rev, revlog.linkrev
584 return [n for n in missing if rl(rr(n)) not in commonrevs]
584 return [n for n in missing if rl(rr(n)) not in commonrevs]
585
585
586 def _packmanifests(self, dir, mfnodes, lookuplinknode):
586 def _packmanifests(self, dir, mfnodes, lookuplinknode):
587 """Pack flat manifests into a changegroup stream."""
587 """Pack flat manifests into a changegroup stream."""
588 assert not dir
588 assert not dir
589 for chunk in self.group(mfnodes, self._repo.manifest,
589 for chunk in self.group(mfnodes, self._repo.manifest,
590 lookuplinknode, units=_('manifests')):
590 lookuplinknode, units=_('manifests')):
591 yield chunk
591 yield chunk
592
592
593 def _manifestsdone(self):
593 def _manifestsdone(self):
594 return ''
594 return ''
595
595
596 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
596 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
597 '''yield a sequence of changegroup chunks (strings)'''
597 '''yield a sequence of changegroup chunks (strings)'''
598 repo = self._repo
598 repo = self._repo
599 cl = repo.changelog
599 cl = repo.changelog
600
600
601 clrevorder = {}
601 clrevorder = {}
602 mfs = {} # needed manifests
602 mfs = {} # needed manifests
603 fnodes = {} # needed file nodes
603 fnodes = {} # needed file nodes
604 changedfiles = set()
604 changedfiles = set()
605
605
606 # Callback for the changelog, used to collect changed files and manifest
606 # Callback for the changelog, used to collect changed files and manifest
607 # nodes.
607 # nodes.
608 # Returns the linkrev node (identity in the changelog case).
608 # Returns the linkrev node (identity in the changelog case).
609 def lookupcl(x):
609 def lookupcl(x):
610 c = cl.read(x)
610 c = cl.read(x)
611 clrevorder[x] = len(clrevorder)
611 clrevorder[x] = len(clrevorder)
612 n = c[0]
612 n = c[0]
613 # record the first changeset introducing this manifest version
613 # record the first changeset introducing this manifest version
614 mfs.setdefault(n, x)
614 mfs.setdefault(n, x)
615 # Record a complete list of potentially-changed files in
615 # Record a complete list of potentially-changed files in
616 # this manifest.
616 # this manifest.
617 changedfiles.update(c[3])
617 changedfiles.update(c[3])
618 return x
618 return x
619
619
620 self._verbosenote(_('uncompressed size of bundle content:\n'))
620 self._verbosenote(_('uncompressed size of bundle content:\n'))
621 size = 0
621 size = 0
622 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
622 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
623 size += len(chunk)
623 size += len(chunk)
624 yield chunk
624 yield chunk
625 self._verbosenote(_('%8.i (changelog)\n') % size)
625 self._verbosenote(_('%8.i (changelog)\n') % size)
626
626
627 # We need to make sure that the linkrev in the changegroup refers to
627 # We need to make sure that the linkrev in the changegroup refers to
628 # the first changeset that introduced the manifest or file revision.
628 # the first changeset that introduced the manifest or file revision.
629 # The fastpath is usually safer than the slowpath, because the filelogs
629 # The fastpath is usually safer than the slowpath, because the filelogs
630 # are walked in revlog order.
630 # are walked in revlog order.
631 #
631 #
632 # When taking the slowpath with reorder=None and the manifest revlog
632 # When taking the slowpath with reorder=None and the manifest revlog
633 # uses generaldelta, the manifest may be walked in the "wrong" order.
633 # uses generaldelta, the manifest may be walked in the "wrong" order.
634 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
634 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
635 # cc0ff93d0c0c).
635 # cc0ff93d0c0c).
636 #
636 #
637 # When taking the fastpath, we are only vulnerable to reordering
637 # When taking the fastpath, we are only vulnerable to reordering
638 # of the changelog itself. The changelog never uses generaldelta, so
638 # of the changelog itself. The changelog never uses generaldelta, so
639 # it is only reordered when reorder=True. To handle this case, we
639 # it is only reordered when reorder=True. To handle this case, we
640 # simply take the slowpath, which already has the 'clrevorder' logic.
640 # simply take the slowpath, which already has the 'clrevorder' logic.
641 # This was also fixed in cc0ff93d0c0c.
641 # This was also fixed in cc0ff93d0c0c.
642 fastpathlinkrev = fastpathlinkrev and not self._reorder
642 fastpathlinkrev = fastpathlinkrev and not self._reorder
643 # Treemanifests don't work correctly with fastpathlinkrev
643 # Treemanifests don't work correctly with fastpathlinkrev
644 # either, because we don't discover which directory nodes to
644 # either, because we don't discover which directory nodes to
645 # send along with files. This could probably be fixed.
645 # send along with files. This could probably be fixed.
646 fastpathlinkrev = fastpathlinkrev and (
646 fastpathlinkrev = fastpathlinkrev and (
647 'treemanifest' not in repo.requirements)
647 'treemanifest' not in repo.requirements)
648
648
649 for chunk in self.generatemanifests(commonrevs, clrevorder,
649 for chunk in self.generatemanifests(commonrevs, clrevorder,
650 fastpathlinkrev, mfs, fnodes):
650 fastpathlinkrev, mfs, fnodes):
651 yield chunk
651 yield chunk
652 mfs.clear()
652 mfs.clear()
653 clrevs = set(cl.rev(x) for x in clnodes)
653 clrevs = set(cl.rev(x) for x in clnodes)
654
654
655 if not fastpathlinkrev:
655 if not fastpathlinkrev:
656 def linknodes(unused, fname):
656 def linknodes(unused, fname):
657 return fnodes.get(fname, {})
657 return fnodes.get(fname, {})
658 else:
658 else:
659 cln = cl.node
659 cln = cl.node
660 def linknodes(filerevlog, fname):
660 def linknodes(filerevlog, fname):
661 llr = filerevlog.linkrev
661 llr = filerevlog.linkrev
662 fln = filerevlog.node
662 fln = filerevlog.node
663 revs = ((r, llr(r)) for r in filerevlog)
663 revs = ((r, llr(r)) for r in filerevlog)
664 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
664 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
665
665
666 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
666 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
667 source):
667 source):
668 yield chunk
668 yield chunk
669
669
670 yield self.close()
670 yield self.close()
671
671
672 if clnodes:
672 if clnodes:
673 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
673 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
674
674
675 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
675 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
676 fnodes):
676 fnodes):
677 repo = self._repo
677 repo = self._repo
678 dirlog = repo.manifest.dirlog
678 dirlog = repo.manifest.dirlog
679 tmfnodes = {'': mfs}
679 tmfnodes = {'': mfs}
680
680
681 # Callback for the manifest, used to collect linkrevs for filelog
681 # Callback for the manifest, used to collect linkrevs for filelog
682 # revisions.
682 # revisions.
683 # Returns the linkrev node (collected in lookupcl).
683 # Returns the linkrev node (collected in lookupcl).
684 def makelookupmflinknode(dir):
684 def makelookupmflinknode(dir):
685 if fastpathlinkrev:
685 if fastpathlinkrev:
686 assert not dir
686 assert not dir
687 return mfs.__getitem__
687 return mfs.__getitem__
688
688
689 def lookupmflinknode(x):
689 def lookupmflinknode(x):
690 """Callback for looking up the linknode for manifests.
690 """Callback for looking up the linknode for manifests.
691
691
692 Returns the linkrev node for the specified manifest.
692 Returns the linkrev node for the specified manifest.
693
693
694 SIDE EFFECT:
694 SIDE EFFECT:
695
695
696 1) fclnodes gets populated with the list of relevant
696 1) fclnodes gets populated with the list of relevant
697 file nodes if we're not using fastpathlinkrev
697 file nodes if we're not using fastpathlinkrev
698 2) When treemanifests are in use, collects treemanifest nodes
698 2) When treemanifests are in use, collects treemanifest nodes
699 to send
699 to send
700
700
701 Note that this means manifests must be completely sent to
701 Note that this means manifests must be completely sent to
702 the client before you can trust the list of files and
702 the client before you can trust the list of files and
703 treemanifests to send.
703 treemanifests to send.
704 """
704 """
705 clnode = tmfnodes[dir][x]
705 clnode = tmfnodes[dir][x]
706 mdata = dirlog(dir).readshallowfast(x)
706 mdata = dirlog(dir).readshallowfast(x)
707 for p, n, fl in mdata.iterentries():
707 for p, n, fl in mdata.iterentries():
708 if fl == 't': # subdirectory manifest
708 if fl == 't': # subdirectory manifest
709 subdir = dir + p + '/'
709 subdir = dir + p + '/'
710 tmfclnodes = tmfnodes.setdefault(subdir, {})
710 tmfclnodes = tmfnodes.setdefault(subdir, {})
711 tmfclnode = tmfclnodes.setdefault(n, clnode)
711 tmfclnode = tmfclnodes.setdefault(n, clnode)
712 if clrevorder[clnode] < clrevorder[tmfclnode]:
712 if clrevorder[clnode] < clrevorder[tmfclnode]:
713 tmfclnodes[n] = clnode
713 tmfclnodes[n] = clnode
714 else:
714 else:
715 f = dir + p
715 f = dir + p
716 fclnodes = fnodes.setdefault(f, {})
716 fclnodes = fnodes.setdefault(f, {})
717 fclnode = fclnodes.setdefault(n, clnode)
717 fclnode = fclnodes.setdefault(n, clnode)
718 if clrevorder[clnode] < clrevorder[fclnode]:
718 if clrevorder[clnode] < clrevorder[fclnode]:
719 fclnodes[n] = clnode
719 fclnodes[n] = clnode
720 return clnode
720 return clnode
721 return lookupmflinknode
721 return lookupmflinknode
722
722
723 size = 0
723 size = 0
724 while tmfnodes:
724 while tmfnodes:
725 dir = min(tmfnodes)
725 dir = min(tmfnodes)
726 nodes = tmfnodes[dir]
726 nodes = tmfnodes[dir]
727 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
727 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
728 if not dir or prunednodes:
728 if not dir or prunednodes:
729 for x in self._packmanifests(dir, prunednodes,
729 for x in self._packmanifests(dir, prunednodes,
730 makelookupmflinknode(dir)):
730 makelookupmflinknode(dir)):
731 size += len(x)
731 size += len(x)
732 yield x
732 yield x
733 del tmfnodes[dir]
733 del tmfnodes[dir]
734 self._verbosenote(_('%8.i (manifests)\n') % size)
734 self._verbosenote(_('%8.i (manifests)\n') % size)
735 yield self._manifestsdone()
735 yield self._manifestsdone()
736
736
737 # The 'source' parameter is useful for extensions
737 # The 'source' parameter is useful for extensions
738 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
738 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
739 repo = self._repo
739 repo = self._repo
740 progress = self._progress
740 progress = self._progress
741 msgbundling = _('bundling')
741 msgbundling = _('bundling')
742
742
743 total = len(changedfiles)
743 total = len(changedfiles)
744 # for progress output
744 # for progress output
745 msgfiles = _('files')
745 msgfiles = _('files')
746 for i, fname in enumerate(sorted(changedfiles)):
746 for i, fname in enumerate(sorted(changedfiles)):
747 filerevlog = repo.file(fname)
747 filerevlog = repo.file(fname)
748 if not filerevlog:
748 if not filerevlog:
749 raise error.Abort(_("empty or missing revlog for %s") % fname)
749 raise error.Abort(_("empty or missing revlog for %s") % fname)
750
750
751 linkrevnodes = linknodes(filerevlog, fname)
751 linkrevnodes = linknodes(filerevlog, fname)
752 # Lookup for filenodes, we collected the linkrev nodes above in the
752 # Lookup for filenodes, we collected the linkrev nodes above in the
753 # fastpath case and with lookupmf in the slowpath case.
753 # fastpath case and with lookupmf in the slowpath case.
754 def lookupfilelog(x):
754 def lookupfilelog(x):
755 return linkrevnodes[x]
755 return linkrevnodes[x]
756
756
757 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
757 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
758 if filenodes:
758 if filenodes:
759 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
759 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
760 total=total)
760 total=total)
761 h = self.fileheader(fname)
761 h = self.fileheader(fname)
762 size = len(h)
762 size = len(h)
763 yield h
763 yield h
764 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
764 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
765 size += len(chunk)
765 size += len(chunk)
766 yield chunk
766 yield chunk
767 self._verbosenote(_('%8.i %s\n') % (size, fname))
767 self._verbosenote(_('%8.i %s\n') % (size, fname))
768 progress(msgbundling, None)
768 progress(msgbundling, None)
769
769
770 def deltaparent(self, revlog, rev, p1, p2, prev):
770 def deltaparent(self, revlog, rev, p1, p2, prev):
771 return prev
771 return prev
772
772
773 def revchunk(self, revlog, rev, prev, linknode):
773 def revchunk(self, revlog, rev, prev, linknode):
774 node = revlog.node(rev)
774 node = revlog.node(rev)
775 p1, p2 = revlog.parentrevs(rev)
775 p1, p2 = revlog.parentrevs(rev)
776 base = self.deltaparent(revlog, rev, p1, p2, prev)
776 base = self.deltaparent(revlog, rev, p1, p2, prev)
777
777
778 prefix = ''
778 prefix = ''
779 if revlog.iscensored(base) or revlog.iscensored(rev):
779 if revlog.iscensored(base) or revlog.iscensored(rev):
780 try:
780 try:
781 delta = revlog.revision(node)
781 delta = revlog.revision(node)
782 except error.CensoredNodeError as e:
782 except error.CensoredNodeError as e:
783 delta = e.tombstone
783 delta = e.tombstone
784 if base == nullrev:
784 if base == nullrev:
785 prefix = mdiff.trivialdiffheader(len(delta))
785 prefix = mdiff.trivialdiffheader(len(delta))
786 else:
786 else:
787 baselen = revlog.rawsize(base)
787 baselen = revlog.rawsize(base)
788 prefix = mdiff.replacediffheader(baselen, len(delta))
788 prefix = mdiff.replacediffheader(baselen, len(delta))
789 elif base == nullrev:
789 elif base == nullrev:
790 delta = revlog.revision(node)
790 delta = revlog.revision(node)
791 prefix = mdiff.trivialdiffheader(len(delta))
791 prefix = mdiff.trivialdiffheader(len(delta))
792 else:
792 else:
793 delta = revlog.revdiff(base, rev)
793 delta = revlog.revdiff(base, rev)
794 p1n, p2n = revlog.parents(node)
794 p1n, p2n = revlog.parents(node)
795 basenode = revlog.node(base)
795 basenode = revlog.node(base)
796 flags = revlog.flags(rev)
796 flags = revlog.flags(rev)
797 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
797 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
798 meta += prefix
798 meta += prefix
799 l = len(meta) + len(delta)
799 l = len(meta) + len(delta)
800 yield chunkheader(l)
800 yield chunkheader(l)
801 yield meta
801 yield meta
802 yield delta
802 yield delta
803 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
803 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
804 # do nothing with basenode, it is implicitly the previous one in HG10
804 # do nothing with basenode, it is implicitly the previous one in HG10
805 # do nothing with flags, it is implicitly 0 for cg1 and cg2
805 # do nothing with flags, it is implicitly 0 for cg1 and cg2
806 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
806 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
807
807
808 class cg2packer(cg1packer):
808 class cg2packer(cg1packer):
809 version = '02'
809 version = '02'
810 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
810 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
811
811
812 def __init__(self, repo, bundlecaps=None):
812 def __init__(self, repo, bundlecaps=None):
813 super(cg2packer, self).__init__(repo, bundlecaps)
813 super(cg2packer, self).__init__(repo, bundlecaps)
814 if self._reorder is None:
814 if self._reorder is None:
815 # Since generaldelta is directly supported by cg2, reordering
815 # Since generaldelta is directly supported by cg2, reordering
816 # generally doesn't help, so we disable it by default (treating
816 # generally doesn't help, so we disable it by default (treating
817 # bundle.reorder=auto just like bundle.reorder=False).
817 # bundle.reorder=auto just like bundle.reorder=False).
818 self._reorder = False
818 self._reorder = False
819
819
820 def deltaparent(self, revlog, rev, p1, p2, prev):
820 def deltaparent(self, revlog, rev, p1, p2, prev):
821 dp = revlog.deltaparent(rev)
821 dp = revlog.deltaparent(rev)
822 # avoid storing full revisions; pick prev in those cases
822 # avoid storing full revisions; pick prev in those cases
823 # also pick prev when we can't be sure remote has dp
823 # also pick prev when we can't be sure remote has dp
824 if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
824 if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
825 return prev
825 return prev
826 return dp
826 return dp
827
827
828 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
828 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
829 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
829 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
830 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
830 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
831
831
832 class cg3packer(cg2packer):
832 class cg3packer(cg2packer):
833 version = '03'
833 version = '03'
834 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
834 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
835
835
836 def _packmanifests(self, dir, mfnodes, lookuplinknode):
836 def _packmanifests(self, dir, mfnodes, lookuplinknode):
837 if dir:
837 if dir:
838 yield self.fileheader(dir)
838 yield self.fileheader(dir)
839 for chunk in self.group(mfnodes, self._repo.manifest.dirlog(dir),
839 for chunk in self.group(mfnodes, self._repo.manifest.dirlog(dir),
840 lookuplinknode, units=_('manifests')):
840 lookuplinknode, units=_('manifests')):
841 yield chunk
841 yield chunk
842
842
843 def _manifestsdone(self):
843 def _manifestsdone(self):
844 return self.close()
844 return self.close()
845
845
846 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
846 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
847 return struct.pack(
847 return struct.pack(
848 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
848 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
849
849
850 _packermap = {'01': (cg1packer, cg1unpacker),
850 _packermap = {'01': (cg1packer, cg1unpacker),
851 # cg2 adds support for exchanging generaldelta
851 # cg2 adds support for exchanging generaldelta
852 '02': (cg2packer, cg2unpacker),
852 '02': (cg2packer, cg2unpacker),
853 # cg3 adds support for exchanging revlog flags and treemanifests
853 # cg3 adds support for exchanging revlog flags and treemanifests
854 '03': (cg3packer, cg3unpacker),
854 '03': (cg3packer, cg3unpacker),
855 }
855 }
856
856
857 def allsupportedversions(ui):
857 def allsupportedversions(ui):
858 versions = set(_packermap.keys())
858 versions = set(_packermap.keys())
859 versions.discard('03')
859 versions.discard('03')
860 if (ui.configbool('experimental', 'changegroup3') or
860 if (ui.configbool('experimental', 'changegroup3') or
861 ui.configbool('experimental', 'treemanifest')):
861 ui.configbool('experimental', 'treemanifest')):
862 versions.add('03')
862 versions.add('03')
863 return versions
863 return versions
864
864
865 # Changegroup versions that can be applied to the repo
865 # Changegroup versions that can be applied to the repo
866 def supportedincomingversions(repo):
866 def supportedincomingversions(repo):
867 versions = allsupportedversions(repo.ui)
867 versions = allsupportedversions(repo.ui)
868 if 'treemanifest' in repo.requirements:
868 if 'treemanifest' in repo.requirements:
869 versions.add('03')
869 versions.add('03')
870 return versions
870 return versions
871
871
872 # Changegroup versions that can be created from the repo
872 # Changegroup versions that can be created from the repo
873 def supportedoutgoingversions(repo):
873 def supportedoutgoingversions(repo):
874 versions = allsupportedversions(repo.ui)
874 versions = allsupportedversions(repo.ui)
875 if 'treemanifest' in repo.requirements:
875 if 'treemanifest' in repo.requirements:
876 # Versions 01 and 02 support only flat manifests and it's just too
876 # Versions 01 and 02 support only flat manifests and it's just too
877 # expensive to convert between the flat manifest and tree manifest on
877 # expensive to convert between the flat manifest and tree manifest on
878 # the fly. Since tree manifests are hashed differently, all of history
878 # the fly. Since tree manifests are hashed differently, all of history
879 # would have to be converted. Instead, we simply don't even pretend to
879 # would have to be converted. Instead, we simply don't even pretend to
880 # support versions 01 and 02.
880 # support versions 01 and 02.
881 versions.discard('01')
881 versions.discard('01')
882 versions.discard('02')
882 versions.discard('02')
883 versions.add('03')
883 versions.add('03')
884 return versions
884 return versions
885
885
886 def safeversion(repo):
886 def safeversion(repo):
887 # Finds the smallest version that it's safe to assume clients of the repo
887 # Finds the smallest version that it's safe to assume clients of the repo
888 # will support. For example, all hg versions that support generaldelta also
888 # will support. For example, all hg versions that support generaldelta also
889 # support changegroup 02.
889 # support changegroup 02.
890 versions = supportedoutgoingversions(repo)
890 versions = supportedoutgoingversions(repo)
891 if 'generaldelta' in repo.requirements:
891 if 'generaldelta' in repo.requirements:
892 versions.discard('01')
892 versions.discard('01')
893 assert versions
893 assert versions
894 return min(versions)
894 return min(versions)
895
895
896 def getbundler(version, repo, bundlecaps=None):
896 def getbundler(version, repo, bundlecaps=None):
897 assert version in supportedoutgoingversions(repo)
897 assert version in supportedoutgoingversions(repo)
898 return _packermap[version][0](repo, bundlecaps)
898 return _packermap[version][0](repo, bundlecaps)
899
899
900 def getunbundler(version, fh, alg, extras=None):
900 def getunbundler(version, fh, alg, extras=None):
901 return _packermap[version][1](fh, alg, extras=extras)
901 return _packermap[version][1](fh, alg, extras=extras)
902
902
903 def _changegroupinfo(repo, nodes, source):
903 def _changegroupinfo(repo, nodes, source):
904 if repo.ui.verbose or source == 'bundle':
904 if repo.ui.verbose or source == 'bundle':
905 repo.ui.status(_("%d changesets found\n") % len(nodes))
905 repo.ui.status(_("%d changesets found\n") % len(nodes))
906 if repo.ui.debugflag:
906 if repo.ui.debugflag:
907 repo.ui.debug("list of changesets:\n")
907 repo.ui.debug("list of changesets:\n")
908 for node in nodes:
908 for node in nodes:
909 repo.ui.debug("%s\n" % hex(node))
909 repo.ui.debug("%s\n" % hex(node))
910
910
911 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
911 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
912 repo = repo.unfiltered()
912 repo = repo.unfiltered()
913 commonrevs = outgoing.common
913 commonrevs = outgoing.common
914 csets = outgoing.missing
914 csets = outgoing.missing
915 heads = outgoing.missingheads
915 heads = outgoing.missingheads
916 # We go through the fast path if we get told to, or if all (unfiltered
916 # We go through the fast path if we get told to, or if all (unfiltered
917 # heads have been requested (since we then know there all linkrevs will
917 # heads have been requested (since we then know there all linkrevs will
918 # be pulled by the client).
918 # be pulled by the client).
919 heads.sort()
919 heads.sort()
920 fastpathlinkrev = fastpath or (
920 fastpathlinkrev = fastpath or (
921 repo.filtername is None and heads == sorted(repo.heads()))
921 repo.filtername is None and heads == sorted(repo.heads()))
922
922
923 repo.hook('preoutgoing', throw=True, source=source)
923 repo.hook('preoutgoing', throw=True, source=source)
924 _changegroupinfo(repo, csets, source)
924 _changegroupinfo(repo, csets, source)
925 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
925 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
926
926
927 def getsubset(repo, outgoing, bundler, source, fastpath=False):
927 def getsubset(repo, outgoing, bundler, source, fastpath=False):
928 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
928 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
929 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
929 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
930 {'clcount': len(outgoing.missing)})
930 {'clcount': len(outgoing.missing)})
931
931
932 def changegroupsubset(repo, roots, heads, source, version='01'):
932 def changegroupsubset(repo, roots, heads, source, version='01'):
933 """Compute a changegroup consisting of all the nodes that are
933 """Compute a changegroup consisting of all the nodes that are
934 descendants of any of the roots and ancestors of any of the heads.
934 descendants of any of the roots and ancestors of any of the heads.
935 Return a chunkbuffer object whose read() method will return
935 Return a chunkbuffer object whose read() method will return
936 successive changegroup chunks.
936 successive changegroup chunks.
937
937
938 It is fairly complex as determining which filenodes and which
938 It is fairly complex as determining which filenodes and which
939 manifest nodes need to be included for the changeset to be complete
939 manifest nodes need to be included for the changeset to be complete
940 is non-trivial.
940 is non-trivial.
941
941
942 Another wrinkle is doing the reverse, figuring out which changeset in
942 Another wrinkle is doing the reverse, figuring out which changeset in
943 the changegroup a particular filenode or manifestnode belongs to.
943 the changegroup a particular filenode or manifestnode belongs to.
944 """
944 """
945 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
945 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
946 bundler = getbundler(version, repo)
946 bundler = getbundler(version, repo)
947 return getsubset(repo, outgoing, bundler, source)
947 return getsubset(repo, outgoing, bundler, source)
948
948
949 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
949 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
950 version='01'):
950 version='01'):
951 """Like getbundle, but taking a discovery.outgoing as an argument.
951 """Like getbundle, but taking a discovery.outgoing as an argument.
952
952
953 This is only implemented for local repos and reuses potentially
953 This is only implemented for local repos and reuses potentially
954 precomputed sets in outgoing. Returns a raw changegroup generator."""
954 precomputed sets in outgoing. Returns a raw changegroup generator."""
955 if not outgoing.missing:
955 if not outgoing.missing:
956 return None
956 return None
957 bundler = getbundler(version, repo, bundlecaps)
957 bundler = getbundler(version, repo, bundlecaps)
958 return getsubsetraw(repo, outgoing, bundler, source)
958 return getsubsetraw(repo, outgoing, bundler, source)
959
959
960 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
960 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
961 version='01'):
961 version='01'):
962 """Like getbundle, but taking a discovery.outgoing as an argument.
962 """Like getbundle, but taking a discovery.outgoing as an argument.
963
963
964 This is only implemented for local repos and reuses potentially
964 This is only implemented for local repos and reuses potentially
965 precomputed sets in outgoing."""
965 precomputed sets in outgoing."""
966 if not outgoing.missing:
966 if not outgoing.missing:
967 return None
967 return None
968 bundler = getbundler(version, repo, bundlecaps)
968 bundler = getbundler(version, repo, bundlecaps)
969 return getsubset(repo, outgoing, bundler, source)
969 return getsubset(repo, outgoing, bundler, source)
970
970
971 def computeoutgoing(repo, heads, common):
971 def computeoutgoing(repo, heads, common):
972 """Computes which revs are outgoing given a set of common
972 """Computes which revs are outgoing given a set of common
973 and a set of heads.
973 and a set of heads.
974
974
975 This is a separate function so extensions can have access to
975 This is a separate function so extensions can have access to
976 the logic.
976 the logic.
977
977
978 Returns a discovery.outgoing object.
978 Returns a discovery.outgoing object.
979 """
979 """
980 cl = repo.changelog
980 cl = repo.changelog
981 if common:
981 if common:
982 hasnode = cl.hasnode
982 hasnode = cl.hasnode
983 common = [n for n in common if hasnode(n)]
983 common = [n for n in common if hasnode(n)]
984 else:
984 else:
985 common = [nullid]
985 common = [nullid]
986 if not heads:
986 if not heads:
987 heads = cl.heads()
987 heads = cl.heads()
988 return discovery.outgoing(repo, common, heads)
988 return discovery.outgoing(repo, common, heads)
989
989
990 def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None,
990 def getchangegroup(repo, source, outgoing, bundlecaps=None,
991 version='01'):
991 version='01'):
992 """Like changegroupsubset, but returns the set difference between the
992 """Like changegroupsubset, but returns the set difference between the
993 ancestors of heads and the ancestors common.
993 ancestors of heads and the ancestors common.
994
994
995 If heads is None, use the local heads. If common is None, use [nullid].
995 If heads is None, use the local heads. If common is None, use [nullid].
996
996
997 The nodes in common might not all be known locally due to the way the
997 The nodes in common might not all be known locally due to the way the
998 current discovery protocol works.
998 current discovery protocol works.
999 """
999 """
1000 outgoing = computeoutgoing(repo, heads, common)
1001 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
1000 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
1002 version=version)
1001 version=version)
1003
1002
1004 def changegroup(repo, basenodes, source):
1003 def changegroup(repo, basenodes, source):
1005 # to avoid a race we use changegroupsubset() (issue1320)
1004 # to avoid a race we use changegroupsubset() (issue1320)
1006 return changegroupsubset(repo, basenodes, repo.heads(), source)
1005 return changegroupsubset(repo, basenodes, repo.heads(), source)
1007
1006
1008 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1007 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1009 revisions = 0
1008 revisions = 0
1010 files = 0
1009 files = 0
1011 for chunkdata in iter(source.filelogheader, {}):
1010 for chunkdata in iter(source.filelogheader, {}):
1012 files += 1
1011 files += 1
1013 f = chunkdata["filename"]
1012 f = chunkdata["filename"]
1014 repo.ui.debug("adding %s revisions\n" % f)
1013 repo.ui.debug("adding %s revisions\n" % f)
1015 repo.ui.progress(_('files'), files, unit=_('files'),
1014 repo.ui.progress(_('files'), files, unit=_('files'),
1016 total=expectedfiles)
1015 total=expectedfiles)
1017 fl = repo.file(f)
1016 fl = repo.file(f)
1018 o = len(fl)
1017 o = len(fl)
1019 try:
1018 try:
1020 if not fl.addgroup(source, revmap, trp):
1019 if not fl.addgroup(source, revmap, trp):
1021 raise error.Abort(_("received file revlog group is empty"))
1020 raise error.Abort(_("received file revlog group is empty"))
1022 except error.CensoredBaseError as e:
1021 except error.CensoredBaseError as e:
1023 raise error.Abort(_("received delta base is censored: %s") % e)
1022 raise error.Abort(_("received delta base is censored: %s") % e)
1024 revisions += len(fl) - o
1023 revisions += len(fl) - o
1025 if f in needfiles:
1024 if f in needfiles:
1026 needs = needfiles[f]
1025 needs = needfiles[f]
1027 for new in xrange(o, len(fl)):
1026 for new in xrange(o, len(fl)):
1028 n = fl.node(new)
1027 n = fl.node(new)
1029 if n in needs:
1028 if n in needs:
1030 needs.remove(n)
1029 needs.remove(n)
1031 else:
1030 else:
1032 raise error.Abort(
1031 raise error.Abort(
1033 _("received spurious file revlog entry"))
1032 _("received spurious file revlog entry"))
1034 if not needs:
1033 if not needs:
1035 del needfiles[f]
1034 del needfiles[f]
1036 repo.ui.progress(_('files'), None)
1035 repo.ui.progress(_('files'), None)
1037
1036
1038 for f, needs in needfiles.iteritems():
1037 for f, needs in needfiles.iteritems():
1039 fl = repo.file(f)
1038 fl = repo.file(f)
1040 for n in needs:
1039 for n in needs:
1041 try:
1040 try:
1042 fl.rev(n)
1041 fl.rev(n)
1043 except error.LookupError:
1042 except error.LookupError:
1044 raise error.Abort(
1043 raise error.Abort(
1045 _('missing file data for %s:%s - run hg verify') %
1044 _('missing file data for %s:%s - run hg verify') %
1046 (f, hex(n)))
1045 (f, hex(n)))
1047
1046
1048 return revisions, files
1047 return revisions, files
@@ -1,7282 +1,7283 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import re
15 import re
16 import shlex
16 import shlex
17 import socket
17 import socket
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import time
20 import time
21
21
22 from .i18n import _
22 from .i18n import _
23 from .node import (
23 from .node import (
24 bin,
24 bin,
25 hex,
25 hex,
26 nullhex,
26 nullhex,
27 nullid,
27 nullid,
28 nullrev,
28 nullrev,
29 short,
29 short,
30 )
30 )
31 from . import (
31 from . import (
32 archival,
32 archival,
33 bookmarks,
33 bookmarks,
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 commandserver,
37 commandserver,
38 context,
38 context,
39 copies,
39 copies,
40 dagparser,
40 dagparser,
41 dagutil,
41 dagutil,
42 destutil,
42 destutil,
43 discovery,
43 discovery,
44 encoding,
44 encoding,
45 error,
45 error,
46 exchange,
46 exchange,
47 extensions,
47 extensions,
48 fileset,
48 fileset,
49 formatter,
49 formatter,
50 graphmod,
50 graphmod,
51 hbisect,
51 hbisect,
52 help,
52 help,
53 hg,
53 hg,
54 hgweb,
54 hgweb,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 merge as mergemod,
57 merge as mergemod,
58 minirst,
58 minirst,
59 obsolete,
59 obsolete,
60 patch,
60 patch,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 repair,
64 repair,
65 revlog,
65 revlog,
66 revset,
66 revset,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshserver,
70 sshserver,
71 streamclone,
71 streamclone,
72 templatekw,
72 templatekw,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 ui as uimod,
75 ui as uimod,
76 util,
76 util,
77 )
77 )
78
78
79 release = lockmod.release
79 release = lockmod.release
80
80
81 table = {}
81 table = {}
82
82
83 command = cmdutil.command(table)
83 command = cmdutil.command(table)
84
84
85 # label constants
85 # label constants
86 # until 3.5, bookmarks.current was the advertised name, not
86 # until 3.5, bookmarks.current was the advertised name, not
87 # bookmarks.active, so we must use both to avoid breaking old
87 # bookmarks.active, so we must use both to avoid breaking old
88 # custom styles
88 # custom styles
89 activebookmarklabel = 'bookmarks.active bookmarks.current'
89 activebookmarklabel = 'bookmarks.active bookmarks.current'
90
90
91 # common command options
91 # common command options
92
92
93 globalopts = [
93 globalopts = [
94 ('R', 'repository', '',
94 ('R', 'repository', '',
95 _('repository root directory or name of overlay bundle file'),
95 _('repository root directory or name of overlay bundle file'),
96 _('REPO')),
96 _('REPO')),
97 ('', 'cwd', '',
97 ('', 'cwd', '',
98 _('change working directory'), _('DIR')),
98 _('change working directory'), _('DIR')),
99 ('y', 'noninteractive', None,
99 ('y', 'noninteractive', None,
100 _('do not prompt, automatically pick the first choice for all prompts')),
100 _('do not prompt, automatically pick the first choice for all prompts')),
101 ('q', 'quiet', None, _('suppress output')),
101 ('q', 'quiet', None, _('suppress output')),
102 ('v', 'verbose', None, _('enable additional output')),
102 ('v', 'verbose', None, _('enable additional output')),
103 ('', 'config', [],
103 ('', 'config', [],
104 _('set/override config option (use \'section.name=value\')'),
104 _('set/override config option (use \'section.name=value\')'),
105 _('CONFIG')),
105 _('CONFIG')),
106 ('', 'debug', None, _('enable debugging output')),
106 ('', 'debug', None, _('enable debugging output')),
107 ('', 'debugger', None, _('start debugger')),
107 ('', 'debugger', None, _('start debugger')),
108 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
108 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
109 _('ENCODE')),
109 _('ENCODE')),
110 ('', 'encodingmode', encoding.encodingmode,
110 ('', 'encodingmode', encoding.encodingmode,
111 _('set the charset encoding mode'), _('MODE')),
111 _('set the charset encoding mode'), _('MODE')),
112 ('', 'traceback', None, _('always print a traceback on exception')),
112 ('', 'traceback', None, _('always print a traceback on exception')),
113 ('', 'time', None, _('time how long the command takes')),
113 ('', 'time', None, _('time how long the command takes')),
114 ('', 'profile', None, _('print command execution profile')),
114 ('', 'profile', None, _('print command execution profile')),
115 ('', 'version', None, _('output version information and exit')),
115 ('', 'version', None, _('output version information and exit')),
116 ('h', 'help', None, _('display help and exit')),
116 ('h', 'help', None, _('display help and exit')),
117 ('', 'hidden', False, _('consider hidden changesets')),
117 ('', 'hidden', False, _('consider hidden changesets')),
118 ]
118 ]
119
119
120 dryrunopts = [('n', 'dry-run', None,
120 dryrunopts = [('n', 'dry-run', None,
121 _('do not perform actions, just print output'))]
121 _('do not perform actions, just print output'))]
122
122
123 remoteopts = [
123 remoteopts = [
124 ('e', 'ssh', '',
124 ('e', 'ssh', '',
125 _('specify ssh command to use'), _('CMD')),
125 _('specify ssh command to use'), _('CMD')),
126 ('', 'remotecmd', '',
126 ('', 'remotecmd', '',
127 _('specify hg command to run on the remote side'), _('CMD')),
127 _('specify hg command to run on the remote side'), _('CMD')),
128 ('', 'insecure', None,
128 ('', 'insecure', None,
129 _('do not verify server certificate (ignoring web.cacerts config)')),
129 _('do not verify server certificate (ignoring web.cacerts config)')),
130 ]
130 ]
131
131
132 walkopts = [
132 walkopts = [
133 ('I', 'include', [],
133 ('I', 'include', [],
134 _('include names matching the given patterns'), _('PATTERN')),
134 _('include names matching the given patterns'), _('PATTERN')),
135 ('X', 'exclude', [],
135 ('X', 'exclude', [],
136 _('exclude names matching the given patterns'), _('PATTERN')),
136 _('exclude names matching the given patterns'), _('PATTERN')),
137 ]
137 ]
138
138
139 commitopts = [
139 commitopts = [
140 ('m', 'message', '',
140 ('m', 'message', '',
141 _('use text as commit message'), _('TEXT')),
141 _('use text as commit message'), _('TEXT')),
142 ('l', 'logfile', '',
142 ('l', 'logfile', '',
143 _('read commit message from file'), _('FILE')),
143 _('read commit message from file'), _('FILE')),
144 ]
144 ]
145
145
146 commitopts2 = [
146 commitopts2 = [
147 ('d', 'date', '',
147 ('d', 'date', '',
148 _('record the specified date as commit date'), _('DATE')),
148 _('record the specified date as commit date'), _('DATE')),
149 ('u', 'user', '',
149 ('u', 'user', '',
150 _('record the specified user as committer'), _('USER')),
150 _('record the specified user as committer'), _('USER')),
151 ]
151 ]
152
152
153 # hidden for now
153 # hidden for now
154 formatteropts = [
154 formatteropts = [
155 ('T', 'template', '',
155 ('T', 'template', '',
156 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
156 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
157 ]
157 ]
158
158
159 templateopts = [
159 templateopts = [
160 ('', 'style', '',
160 ('', 'style', '',
161 _('display using template map file (DEPRECATED)'), _('STYLE')),
161 _('display using template map file (DEPRECATED)'), _('STYLE')),
162 ('T', 'template', '',
162 ('T', 'template', '',
163 _('display with template'), _('TEMPLATE')),
163 _('display with template'), _('TEMPLATE')),
164 ]
164 ]
165
165
166 logopts = [
166 logopts = [
167 ('p', 'patch', None, _('show patch')),
167 ('p', 'patch', None, _('show patch')),
168 ('g', 'git', None, _('use git extended diff format')),
168 ('g', 'git', None, _('use git extended diff format')),
169 ('l', 'limit', '',
169 ('l', 'limit', '',
170 _('limit number of changes displayed'), _('NUM')),
170 _('limit number of changes displayed'), _('NUM')),
171 ('M', 'no-merges', None, _('do not show merges')),
171 ('M', 'no-merges', None, _('do not show merges')),
172 ('', 'stat', None, _('output diffstat-style summary of changes')),
172 ('', 'stat', None, _('output diffstat-style summary of changes')),
173 ('G', 'graph', None, _("show the revision DAG")),
173 ('G', 'graph', None, _("show the revision DAG")),
174 ] + templateopts
174 ] + templateopts
175
175
176 diffopts = [
176 diffopts = [
177 ('a', 'text', None, _('treat all files as text')),
177 ('a', 'text', None, _('treat all files as text')),
178 ('g', 'git', None, _('use git extended diff format')),
178 ('g', 'git', None, _('use git extended diff format')),
179 ('', 'nodates', None, _('omit dates from diff headers'))
179 ('', 'nodates', None, _('omit dates from diff headers'))
180 ]
180 ]
181
181
182 diffwsopts = [
182 diffwsopts = [
183 ('w', 'ignore-all-space', None,
183 ('w', 'ignore-all-space', None,
184 _('ignore white space when comparing lines')),
184 _('ignore white space when comparing lines')),
185 ('b', 'ignore-space-change', None,
185 ('b', 'ignore-space-change', None,
186 _('ignore changes in the amount of white space')),
186 _('ignore changes in the amount of white space')),
187 ('B', 'ignore-blank-lines', None,
187 ('B', 'ignore-blank-lines', None,
188 _('ignore changes whose lines are all blank')),
188 _('ignore changes whose lines are all blank')),
189 ]
189 ]
190
190
191 diffopts2 = [
191 diffopts2 = [
192 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
192 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
193 ('p', 'show-function', None, _('show which function each change is in')),
193 ('p', 'show-function', None, _('show which function each change is in')),
194 ('', 'reverse', None, _('produce a diff that undoes the changes')),
194 ('', 'reverse', None, _('produce a diff that undoes the changes')),
195 ] + diffwsopts + [
195 ] + diffwsopts + [
196 ('U', 'unified', '',
196 ('U', 'unified', '',
197 _('number of lines of context to show'), _('NUM')),
197 _('number of lines of context to show'), _('NUM')),
198 ('', 'stat', None, _('output diffstat-style summary of changes')),
198 ('', 'stat', None, _('output diffstat-style summary of changes')),
199 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
199 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
200 ]
200 ]
201
201
202 mergetoolopts = [
202 mergetoolopts = [
203 ('t', 'tool', '', _('specify merge tool')),
203 ('t', 'tool', '', _('specify merge tool')),
204 ]
204 ]
205
205
206 similarityopts = [
206 similarityopts = [
207 ('s', 'similarity', '',
207 ('s', 'similarity', '',
208 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
208 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
209 ]
209 ]
210
210
211 subrepoopts = [
211 subrepoopts = [
212 ('S', 'subrepos', None,
212 ('S', 'subrepos', None,
213 _('recurse into subrepositories'))
213 _('recurse into subrepositories'))
214 ]
214 ]
215
215
216 debugrevlogopts = [
216 debugrevlogopts = [
217 ('c', 'changelog', False, _('open changelog')),
217 ('c', 'changelog', False, _('open changelog')),
218 ('m', 'manifest', False, _('open manifest')),
218 ('m', 'manifest', False, _('open manifest')),
219 ('', 'dir', '', _('open directory manifest')),
219 ('', 'dir', '', _('open directory manifest')),
220 ]
220 ]
221
221
222 # Commands start here, listed alphabetically
222 # Commands start here, listed alphabetically
223
223
224 @command('^add',
224 @command('^add',
225 walkopts + subrepoopts + dryrunopts,
225 walkopts + subrepoopts + dryrunopts,
226 _('[OPTION]... [FILE]...'),
226 _('[OPTION]... [FILE]...'),
227 inferrepo=True)
227 inferrepo=True)
228 def add(ui, repo, *pats, **opts):
228 def add(ui, repo, *pats, **opts):
229 """add the specified files on the next commit
229 """add the specified files on the next commit
230
230
231 Schedule files to be version controlled and added to the
231 Schedule files to be version controlled and added to the
232 repository.
232 repository.
233
233
234 The files will be added to the repository at the next commit. To
234 The files will be added to the repository at the next commit. To
235 undo an add before that, see :hg:`forget`.
235 undo an add before that, see :hg:`forget`.
236
236
237 If no names are given, add all files to the repository (except
237 If no names are given, add all files to the repository (except
238 files matching ``.hgignore``).
238 files matching ``.hgignore``).
239
239
240 .. container:: verbose
240 .. container:: verbose
241
241
242 Examples:
242 Examples:
243
243
244 - New (unknown) files are added
244 - New (unknown) files are added
245 automatically by :hg:`add`::
245 automatically by :hg:`add`::
246
246
247 $ ls
247 $ ls
248 foo.c
248 foo.c
249 $ hg status
249 $ hg status
250 ? foo.c
250 ? foo.c
251 $ hg add
251 $ hg add
252 adding foo.c
252 adding foo.c
253 $ hg status
253 $ hg status
254 A foo.c
254 A foo.c
255
255
256 - Specific files to be added can be specified::
256 - Specific files to be added can be specified::
257
257
258 $ ls
258 $ ls
259 bar.c foo.c
259 bar.c foo.c
260 $ hg status
260 $ hg status
261 ? bar.c
261 ? bar.c
262 ? foo.c
262 ? foo.c
263 $ hg add bar.c
263 $ hg add bar.c
264 $ hg status
264 $ hg status
265 A bar.c
265 A bar.c
266 ? foo.c
266 ? foo.c
267
267
268 Returns 0 if all files are successfully added.
268 Returns 0 if all files are successfully added.
269 """
269 """
270
270
271 m = scmutil.match(repo[None], pats, opts)
271 m = scmutil.match(repo[None], pats, opts)
272 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
272 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
273 return rejected and 1 or 0
273 return rejected and 1 or 0
274
274
275 @command('addremove',
275 @command('addremove',
276 similarityopts + subrepoopts + walkopts + dryrunopts,
276 similarityopts + subrepoopts + walkopts + dryrunopts,
277 _('[OPTION]... [FILE]...'),
277 _('[OPTION]... [FILE]...'),
278 inferrepo=True)
278 inferrepo=True)
279 def addremove(ui, repo, *pats, **opts):
279 def addremove(ui, repo, *pats, **opts):
280 """add all new files, delete all missing files
280 """add all new files, delete all missing files
281
281
282 Add all new files and remove all missing files from the
282 Add all new files and remove all missing files from the
283 repository.
283 repository.
284
284
285 Unless names are given, new files are ignored if they match any of
285 Unless names are given, new files are ignored if they match any of
286 the patterns in ``.hgignore``. As with add, these changes take
286 the patterns in ``.hgignore``. As with add, these changes take
287 effect at the next commit.
287 effect at the next commit.
288
288
289 Use the -s/--similarity option to detect renamed files. This
289 Use the -s/--similarity option to detect renamed files. This
290 option takes a percentage between 0 (disabled) and 100 (files must
290 option takes a percentage between 0 (disabled) and 100 (files must
291 be identical) as its parameter. With a parameter greater than 0,
291 be identical) as its parameter. With a parameter greater than 0,
292 this compares every removed file with every added file and records
292 this compares every removed file with every added file and records
293 those similar enough as renames. Detecting renamed files this way
293 those similar enough as renames. Detecting renamed files this way
294 can be expensive. After using this option, :hg:`status -C` can be
294 can be expensive. After using this option, :hg:`status -C` can be
295 used to check which files were identified as moved or renamed. If
295 used to check which files were identified as moved or renamed. If
296 not specified, -s/--similarity defaults to 100 and only renames of
296 not specified, -s/--similarity defaults to 100 and only renames of
297 identical files are detected.
297 identical files are detected.
298
298
299 .. container:: verbose
299 .. container:: verbose
300
300
301 Examples:
301 Examples:
302
302
303 - A number of files (bar.c and foo.c) are new,
303 - A number of files (bar.c and foo.c) are new,
304 while foobar.c has been removed (without using :hg:`remove`)
304 while foobar.c has been removed (without using :hg:`remove`)
305 from the repository::
305 from the repository::
306
306
307 $ ls
307 $ ls
308 bar.c foo.c
308 bar.c foo.c
309 $ hg status
309 $ hg status
310 ! foobar.c
310 ! foobar.c
311 ? bar.c
311 ? bar.c
312 ? foo.c
312 ? foo.c
313 $ hg addremove
313 $ hg addremove
314 adding bar.c
314 adding bar.c
315 adding foo.c
315 adding foo.c
316 removing foobar.c
316 removing foobar.c
317 $ hg status
317 $ hg status
318 A bar.c
318 A bar.c
319 A foo.c
319 A foo.c
320 R foobar.c
320 R foobar.c
321
321
322 - A file foobar.c was moved to foo.c without using :hg:`rename`.
322 - A file foobar.c was moved to foo.c without using :hg:`rename`.
323 Afterwards, it was edited slightly::
323 Afterwards, it was edited slightly::
324
324
325 $ ls
325 $ ls
326 foo.c
326 foo.c
327 $ hg status
327 $ hg status
328 ! foobar.c
328 ! foobar.c
329 ? foo.c
329 ? foo.c
330 $ hg addremove --similarity 90
330 $ hg addremove --similarity 90
331 removing foobar.c
331 removing foobar.c
332 adding foo.c
332 adding foo.c
333 recording removal of foobar.c as rename to foo.c (94% similar)
333 recording removal of foobar.c as rename to foo.c (94% similar)
334 $ hg status -C
334 $ hg status -C
335 A foo.c
335 A foo.c
336 foobar.c
336 foobar.c
337 R foobar.c
337 R foobar.c
338
338
339 Returns 0 if all files are successfully added.
339 Returns 0 if all files are successfully added.
340 """
340 """
341 try:
341 try:
342 sim = float(opts.get('similarity') or 100)
342 sim = float(opts.get('similarity') or 100)
343 except ValueError:
343 except ValueError:
344 raise error.Abort(_('similarity must be a number'))
344 raise error.Abort(_('similarity must be a number'))
345 if sim < 0 or sim > 100:
345 if sim < 0 or sim > 100:
346 raise error.Abort(_('similarity must be between 0 and 100'))
346 raise error.Abort(_('similarity must be between 0 and 100'))
347 matcher = scmutil.match(repo[None], pats, opts)
347 matcher = scmutil.match(repo[None], pats, opts)
348 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
348 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
349
349
350 @command('^annotate|blame',
350 @command('^annotate|blame',
351 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
351 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
352 ('', 'follow', None,
352 ('', 'follow', None,
353 _('follow copies/renames and list the filename (DEPRECATED)')),
353 _('follow copies/renames and list the filename (DEPRECATED)')),
354 ('', 'no-follow', None, _("don't follow copies and renames")),
354 ('', 'no-follow', None, _("don't follow copies and renames")),
355 ('a', 'text', None, _('treat all files as text')),
355 ('a', 'text', None, _('treat all files as text')),
356 ('u', 'user', None, _('list the author (long with -v)')),
356 ('u', 'user', None, _('list the author (long with -v)')),
357 ('f', 'file', None, _('list the filename')),
357 ('f', 'file', None, _('list the filename')),
358 ('d', 'date', None, _('list the date (short with -q)')),
358 ('d', 'date', None, _('list the date (short with -q)')),
359 ('n', 'number', None, _('list the revision number (default)')),
359 ('n', 'number', None, _('list the revision number (default)')),
360 ('c', 'changeset', None, _('list the changeset')),
360 ('c', 'changeset', None, _('list the changeset')),
361 ('l', 'line-number', None, _('show line number at the first appearance'))
361 ('l', 'line-number', None, _('show line number at the first appearance'))
362 ] + diffwsopts + walkopts + formatteropts,
362 ] + diffwsopts + walkopts + formatteropts,
363 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
363 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
364 inferrepo=True)
364 inferrepo=True)
365 def annotate(ui, repo, *pats, **opts):
365 def annotate(ui, repo, *pats, **opts):
366 """show changeset information by line for each file
366 """show changeset information by line for each file
367
367
368 List changes in files, showing the revision id responsible for
368 List changes in files, showing the revision id responsible for
369 each line.
369 each line.
370
370
371 This command is useful for discovering when a change was made and
371 This command is useful for discovering when a change was made and
372 by whom.
372 by whom.
373
373
374 If you include --file, --user, or --date, the revision number is
374 If you include --file, --user, or --date, the revision number is
375 suppressed unless you also include --number.
375 suppressed unless you also include --number.
376
376
377 Without the -a/--text option, annotate will avoid processing files
377 Without the -a/--text option, annotate will avoid processing files
378 it detects as binary. With -a, annotate will annotate the file
378 it detects as binary. With -a, annotate will annotate the file
379 anyway, although the results will probably be neither useful
379 anyway, although the results will probably be neither useful
380 nor desirable.
380 nor desirable.
381
381
382 Returns 0 on success.
382 Returns 0 on success.
383 """
383 """
384 if not pats:
384 if not pats:
385 raise error.Abort(_('at least one filename or pattern is required'))
385 raise error.Abort(_('at least one filename or pattern is required'))
386
386
387 if opts.get('follow'):
387 if opts.get('follow'):
388 # --follow is deprecated and now just an alias for -f/--file
388 # --follow is deprecated and now just an alias for -f/--file
389 # to mimic the behavior of Mercurial before version 1.5
389 # to mimic the behavior of Mercurial before version 1.5
390 opts['file'] = True
390 opts['file'] = True
391
391
392 ctx = scmutil.revsingle(repo, opts.get('rev'))
392 ctx = scmutil.revsingle(repo, opts.get('rev'))
393
393
394 fm = ui.formatter('annotate', opts)
394 fm = ui.formatter('annotate', opts)
395 if ui.quiet:
395 if ui.quiet:
396 datefunc = util.shortdate
396 datefunc = util.shortdate
397 else:
397 else:
398 datefunc = util.datestr
398 datefunc = util.datestr
399 if ctx.rev() is None:
399 if ctx.rev() is None:
400 def hexfn(node):
400 def hexfn(node):
401 if node is None:
401 if node is None:
402 return None
402 return None
403 else:
403 else:
404 return fm.hexfunc(node)
404 return fm.hexfunc(node)
405 if opts.get('changeset'):
405 if opts.get('changeset'):
406 # omit "+" suffix which is appended to node hex
406 # omit "+" suffix which is appended to node hex
407 def formatrev(rev):
407 def formatrev(rev):
408 if rev is None:
408 if rev is None:
409 return '%d' % ctx.p1().rev()
409 return '%d' % ctx.p1().rev()
410 else:
410 else:
411 return '%d' % rev
411 return '%d' % rev
412 else:
412 else:
413 def formatrev(rev):
413 def formatrev(rev):
414 if rev is None:
414 if rev is None:
415 return '%d+' % ctx.p1().rev()
415 return '%d+' % ctx.p1().rev()
416 else:
416 else:
417 return '%d ' % rev
417 return '%d ' % rev
418 def formathex(hex):
418 def formathex(hex):
419 if hex is None:
419 if hex is None:
420 return '%s+' % fm.hexfunc(ctx.p1().node())
420 return '%s+' % fm.hexfunc(ctx.p1().node())
421 else:
421 else:
422 return '%s ' % hex
422 return '%s ' % hex
423 else:
423 else:
424 hexfn = fm.hexfunc
424 hexfn = fm.hexfunc
425 formatrev = formathex = str
425 formatrev = formathex = str
426
426
427 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
427 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
428 ('number', ' ', lambda x: x[0].rev(), formatrev),
428 ('number', ' ', lambda x: x[0].rev(), formatrev),
429 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
429 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
430 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
430 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
431 ('file', ' ', lambda x: x[0].path(), str),
431 ('file', ' ', lambda x: x[0].path(), str),
432 ('line_number', ':', lambda x: x[1], str),
432 ('line_number', ':', lambda x: x[1], str),
433 ]
433 ]
434 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
434 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
435
435
436 if (not opts.get('user') and not opts.get('changeset')
436 if (not opts.get('user') and not opts.get('changeset')
437 and not opts.get('date') and not opts.get('file')):
437 and not opts.get('date') and not opts.get('file')):
438 opts['number'] = True
438 opts['number'] = True
439
439
440 linenumber = opts.get('line_number') is not None
440 linenumber = opts.get('line_number') is not None
441 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
441 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
442 raise error.Abort(_('at least one of -n/-c is required for -l'))
442 raise error.Abort(_('at least one of -n/-c is required for -l'))
443
443
444 if fm:
444 if fm:
445 def makefunc(get, fmt):
445 def makefunc(get, fmt):
446 return get
446 return get
447 else:
447 else:
448 def makefunc(get, fmt):
448 def makefunc(get, fmt):
449 return lambda x: fmt(get(x))
449 return lambda x: fmt(get(x))
450 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
450 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
451 if opts.get(op)]
451 if opts.get(op)]
452 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
452 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
453 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
453 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
454 if opts.get(op))
454 if opts.get(op))
455
455
456 def bad(x, y):
456 def bad(x, y):
457 raise error.Abort("%s: %s" % (x, y))
457 raise error.Abort("%s: %s" % (x, y))
458
458
459 m = scmutil.match(ctx, pats, opts, badfn=bad)
459 m = scmutil.match(ctx, pats, opts, badfn=bad)
460
460
461 follow = not opts.get('no_follow')
461 follow = not opts.get('no_follow')
462 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
462 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
463 whitespace=True)
463 whitespace=True)
464 for abs in ctx.walk(m):
464 for abs in ctx.walk(m):
465 fctx = ctx[abs]
465 fctx = ctx[abs]
466 if not opts.get('text') and util.binary(fctx.data()):
466 if not opts.get('text') and util.binary(fctx.data()):
467 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
467 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
468 continue
468 continue
469
469
470 lines = fctx.annotate(follow=follow, linenumber=linenumber,
470 lines = fctx.annotate(follow=follow, linenumber=linenumber,
471 diffopts=diffopts)
471 diffopts=diffopts)
472 if not lines:
472 if not lines:
473 continue
473 continue
474 formats = []
474 formats = []
475 pieces = []
475 pieces = []
476
476
477 for f, sep in funcmap:
477 for f, sep in funcmap:
478 l = [f(n) for n, dummy in lines]
478 l = [f(n) for n, dummy in lines]
479 if fm:
479 if fm:
480 formats.append(['%s' for x in l])
480 formats.append(['%s' for x in l])
481 else:
481 else:
482 sizes = [encoding.colwidth(x) for x in l]
482 sizes = [encoding.colwidth(x) for x in l]
483 ml = max(sizes)
483 ml = max(sizes)
484 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
484 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
485 pieces.append(l)
485 pieces.append(l)
486
486
487 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
487 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
488 fm.startitem()
488 fm.startitem()
489 fm.write(fields, "".join(f), *p)
489 fm.write(fields, "".join(f), *p)
490 fm.write('line', ": %s", l[1])
490 fm.write('line', ": %s", l[1])
491
491
492 if not lines[-1][1].endswith('\n'):
492 if not lines[-1][1].endswith('\n'):
493 fm.plain('\n')
493 fm.plain('\n')
494
494
495 fm.end()
495 fm.end()
496
496
497 @command('archive',
497 @command('archive',
498 [('', 'no-decode', None, _('do not pass files through decoders')),
498 [('', 'no-decode', None, _('do not pass files through decoders')),
499 ('p', 'prefix', '', _('directory prefix for files in archive'),
499 ('p', 'prefix', '', _('directory prefix for files in archive'),
500 _('PREFIX')),
500 _('PREFIX')),
501 ('r', 'rev', '', _('revision to distribute'), _('REV')),
501 ('r', 'rev', '', _('revision to distribute'), _('REV')),
502 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
502 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
503 ] + subrepoopts + walkopts,
503 ] + subrepoopts + walkopts,
504 _('[OPTION]... DEST'))
504 _('[OPTION]... DEST'))
505 def archive(ui, repo, dest, **opts):
505 def archive(ui, repo, dest, **opts):
506 '''create an unversioned archive of a repository revision
506 '''create an unversioned archive of a repository revision
507
507
508 By default, the revision used is the parent of the working
508 By default, the revision used is the parent of the working
509 directory; use -r/--rev to specify a different revision.
509 directory; use -r/--rev to specify a different revision.
510
510
511 The archive type is automatically detected based on file
511 The archive type is automatically detected based on file
512 extension (to override, use -t/--type).
512 extension (to override, use -t/--type).
513
513
514 .. container:: verbose
514 .. container:: verbose
515
515
516 Examples:
516 Examples:
517
517
518 - create a zip file containing the 1.0 release::
518 - create a zip file containing the 1.0 release::
519
519
520 hg archive -r 1.0 project-1.0.zip
520 hg archive -r 1.0 project-1.0.zip
521
521
522 - create a tarball excluding .hg files::
522 - create a tarball excluding .hg files::
523
523
524 hg archive project.tar.gz -X ".hg*"
524 hg archive project.tar.gz -X ".hg*"
525
525
526 Valid types are:
526 Valid types are:
527
527
528 :``files``: a directory full of files (default)
528 :``files``: a directory full of files (default)
529 :``tar``: tar archive, uncompressed
529 :``tar``: tar archive, uncompressed
530 :``tbz2``: tar archive, compressed using bzip2
530 :``tbz2``: tar archive, compressed using bzip2
531 :``tgz``: tar archive, compressed using gzip
531 :``tgz``: tar archive, compressed using gzip
532 :``uzip``: zip archive, uncompressed
532 :``uzip``: zip archive, uncompressed
533 :``zip``: zip archive, compressed using deflate
533 :``zip``: zip archive, compressed using deflate
534
534
535 The exact name of the destination archive or directory is given
535 The exact name of the destination archive or directory is given
536 using a format string; see :hg:`help export` for details.
536 using a format string; see :hg:`help export` for details.
537
537
538 Each member added to an archive file has a directory prefix
538 Each member added to an archive file has a directory prefix
539 prepended. Use -p/--prefix to specify a format string for the
539 prepended. Use -p/--prefix to specify a format string for the
540 prefix. The default is the basename of the archive, with suffixes
540 prefix. The default is the basename of the archive, with suffixes
541 removed.
541 removed.
542
542
543 Returns 0 on success.
543 Returns 0 on success.
544 '''
544 '''
545
545
546 ctx = scmutil.revsingle(repo, opts.get('rev'))
546 ctx = scmutil.revsingle(repo, opts.get('rev'))
547 if not ctx:
547 if not ctx:
548 raise error.Abort(_('no working directory: please specify a revision'))
548 raise error.Abort(_('no working directory: please specify a revision'))
549 node = ctx.node()
549 node = ctx.node()
550 dest = cmdutil.makefilename(repo, dest, node)
550 dest = cmdutil.makefilename(repo, dest, node)
551 if os.path.realpath(dest) == repo.root:
551 if os.path.realpath(dest) == repo.root:
552 raise error.Abort(_('repository root cannot be destination'))
552 raise error.Abort(_('repository root cannot be destination'))
553
553
554 kind = opts.get('type') or archival.guesskind(dest) or 'files'
554 kind = opts.get('type') or archival.guesskind(dest) or 'files'
555 prefix = opts.get('prefix')
555 prefix = opts.get('prefix')
556
556
557 if dest == '-':
557 if dest == '-':
558 if kind == 'files':
558 if kind == 'files':
559 raise error.Abort(_('cannot archive plain files to stdout'))
559 raise error.Abort(_('cannot archive plain files to stdout'))
560 dest = cmdutil.makefileobj(repo, dest)
560 dest = cmdutil.makefileobj(repo, dest)
561 if not prefix:
561 if not prefix:
562 prefix = os.path.basename(repo.root) + '-%h'
562 prefix = os.path.basename(repo.root) + '-%h'
563
563
564 prefix = cmdutil.makefilename(repo, prefix, node)
564 prefix = cmdutil.makefilename(repo, prefix, node)
565 matchfn = scmutil.match(ctx, [], opts)
565 matchfn = scmutil.match(ctx, [], opts)
566 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
566 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
567 matchfn, prefix, subrepos=opts.get('subrepos'))
567 matchfn, prefix, subrepos=opts.get('subrepos'))
568
568
569 @command('backout',
569 @command('backout',
570 [('', 'merge', None, _('merge with old dirstate parent after backout')),
570 [('', 'merge', None, _('merge with old dirstate parent after backout')),
571 ('', 'commit', None,
571 ('', 'commit', None,
572 _('commit if no conflicts were encountered (DEPRECATED)')),
572 _('commit if no conflicts were encountered (DEPRECATED)')),
573 ('', 'no-commit', None, _('do not commit')),
573 ('', 'no-commit', None, _('do not commit')),
574 ('', 'parent', '',
574 ('', 'parent', '',
575 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
575 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
576 ('r', 'rev', '', _('revision to backout'), _('REV')),
576 ('r', 'rev', '', _('revision to backout'), _('REV')),
577 ('e', 'edit', False, _('invoke editor on commit messages')),
577 ('e', 'edit', False, _('invoke editor on commit messages')),
578 ] + mergetoolopts + walkopts + commitopts + commitopts2,
578 ] + mergetoolopts + walkopts + commitopts + commitopts2,
579 _('[OPTION]... [-r] REV'))
579 _('[OPTION]... [-r] REV'))
580 def backout(ui, repo, node=None, rev=None, **opts):
580 def backout(ui, repo, node=None, rev=None, **opts):
581 '''reverse effect of earlier changeset
581 '''reverse effect of earlier changeset
582
582
583 Prepare a new changeset with the effect of REV undone in the
583 Prepare a new changeset with the effect of REV undone in the
584 current working directory. If no conflicts were encountered,
584 current working directory. If no conflicts were encountered,
585 it will be committed immediately.
585 it will be committed immediately.
586
586
587 If REV is the parent of the working directory, then this new changeset
587 If REV is the parent of the working directory, then this new changeset
588 is committed automatically (unless --no-commit is specified).
588 is committed automatically (unless --no-commit is specified).
589
589
590 .. note::
590 .. note::
591
591
592 :hg:`backout` cannot be used to fix either an unwanted or
592 :hg:`backout` cannot be used to fix either an unwanted or
593 incorrect merge.
593 incorrect merge.
594
594
595 .. container:: verbose
595 .. container:: verbose
596
596
597 Examples:
597 Examples:
598
598
599 - Reverse the effect of the parent of the working directory.
599 - Reverse the effect of the parent of the working directory.
600 This backout will be committed immediately::
600 This backout will be committed immediately::
601
601
602 hg backout -r .
602 hg backout -r .
603
603
604 - Reverse the effect of previous bad revision 23::
604 - Reverse the effect of previous bad revision 23::
605
605
606 hg backout -r 23
606 hg backout -r 23
607
607
608 - Reverse the effect of previous bad revision 23 and
608 - Reverse the effect of previous bad revision 23 and
609 leave changes uncommitted::
609 leave changes uncommitted::
610
610
611 hg backout -r 23 --no-commit
611 hg backout -r 23 --no-commit
612 hg commit -m "Backout revision 23"
612 hg commit -m "Backout revision 23"
613
613
614 By default, the pending changeset will have one parent,
614 By default, the pending changeset will have one parent,
615 maintaining a linear history. With --merge, the pending
615 maintaining a linear history. With --merge, the pending
616 changeset will instead have two parents: the old parent of the
616 changeset will instead have two parents: the old parent of the
617 working directory and a new child of REV that simply undoes REV.
617 working directory and a new child of REV that simply undoes REV.
618
618
619 Before version 1.7, the behavior without --merge was equivalent
619 Before version 1.7, the behavior without --merge was equivalent
620 to specifying --merge followed by :hg:`update --clean .` to
620 to specifying --merge followed by :hg:`update --clean .` to
621 cancel the merge and leave the child of REV as a head to be
621 cancel the merge and leave the child of REV as a head to be
622 merged separately.
622 merged separately.
623
623
624 See :hg:`help dates` for a list of formats valid for -d/--date.
624 See :hg:`help dates` for a list of formats valid for -d/--date.
625
625
626 See :hg:`help revert` for a way to restore files to the state
626 See :hg:`help revert` for a way to restore files to the state
627 of another revision.
627 of another revision.
628
628
629 Returns 0 on success, 1 if nothing to backout or there are unresolved
629 Returns 0 on success, 1 if nothing to backout or there are unresolved
630 files.
630 files.
631 '''
631 '''
632 wlock = lock = None
632 wlock = lock = None
633 try:
633 try:
634 wlock = repo.wlock()
634 wlock = repo.wlock()
635 lock = repo.lock()
635 lock = repo.lock()
636 return _dobackout(ui, repo, node, rev, **opts)
636 return _dobackout(ui, repo, node, rev, **opts)
637 finally:
637 finally:
638 release(lock, wlock)
638 release(lock, wlock)
639
639
640 def _dobackout(ui, repo, node=None, rev=None, **opts):
640 def _dobackout(ui, repo, node=None, rev=None, **opts):
641 if opts.get('commit') and opts.get('no_commit'):
641 if opts.get('commit') and opts.get('no_commit'):
642 raise error.Abort(_("cannot use --commit with --no-commit"))
642 raise error.Abort(_("cannot use --commit with --no-commit"))
643 if opts.get('merge') and opts.get('no_commit'):
643 if opts.get('merge') and opts.get('no_commit'):
644 raise error.Abort(_("cannot use --merge with --no-commit"))
644 raise error.Abort(_("cannot use --merge with --no-commit"))
645
645
646 if rev and node:
646 if rev and node:
647 raise error.Abort(_("please specify just one revision"))
647 raise error.Abort(_("please specify just one revision"))
648
648
649 if not rev:
649 if not rev:
650 rev = node
650 rev = node
651
651
652 if not rev:
652 if not rev:
653 raise error.Abort(_("please specify a revision to backout"))
653 raise error.Abort(_("please specify a revision to backout"))
654
654
655 date = opts.get('date')
655 date = opts.get('date')
656 if date:
656 if date:
657 opts['date'] = util.parsedate(date)
657 opts['date'] = util.parsedate(date)
658
658
659 cmdutil.checkunfinished(repo)
659 cmdutil.checkunfinished(repo)
660 cmdutil.bailifchanged(repo)
660 cmdutil.bailifchanged(repo)
661 node = scmutil.revsingle(repo, rev).node()
661 node = scmutil.revsingle(repo, rev).node()
662
662
663 op1, op2 = repo.dirstate.parents()
663 op1, op2 = repo.dirstate.parents()
664 if not repo.changelog.isancestor(node, op1):
664 if not repo.changelog.isancestor(node, op1):
665 raise error.Abort(_('cannot backout change that is not an ancestor'))
665 raise error.Abort(_('cannot backout change that is not an ancestor'))
666
666
667 p1, p2 = repo.changelog.parents(node)
667 p1, p2 = repo.changelog.parents(node)
668 if p1 == nullid:
668 if p1 == nullid:
669 raise error.Abort(_('cannot backout a change with no parents'))
669 raise error.Abort(_('cannot backout a change with no parents'))
670 if p2 != nullid:
670 if p2 != nullid:
671 if not opts.get('parent'):
671 if not opts.get('parent'):
672 raise error.Abort(_('cannot backout a merge changeset'))
672 raise error.Abort(_('cannot backout a merge changeset'))
673 p = repo.lookup(opts['parent'])
673 p = repo.lookup(opts['parent'])
674 if p not in (p1, p2):
674 if p not in (p1, p2):
675 raise error.Abort(_('%s is not a parent of %s') %
675 raise error.Abort(_('%s is not a parent of %s') %
676 (short(p), short(node)))
676 (short(p), short(node)))
677 parent = p
677 parent = p
678 else:
678 else:
679 if opts.get('parent'):
679 if opts.get('parent'):
680 raise error.Abort(_('cannot use --parent on non-merge changeset'))
680 raise error.Abort(_('cannot use --parent on non-merge changeset'))
681 parent = p1
681 parent = p1
682
682
683 # the backout should appear on the same branch
683 # the backout should appear on the same branch
684 branch = repo.dirstate.branch()
684 branch = repo.dirstate.branch()
685 bheads = repo.branchheads(branch)
685 bheads = repo.branchheads(branch)
686 rctx = scmutil.revsingle(repo, hex(parent))
686 rctx = scmutil.revsingle(repo, hex(parent))
687 if not opts.get('merge') and op1 != node:
687 if not opts.get('merge') and op1 != node:
688 dsguard = cmdutil.dirstateguard(repo, 'backout')
688 dsguard = cmdutil.dirstateguard(repo, 'backout')
689 try:
689 try:
690 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
690 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
691 'backout')
691 'backout')
692 stats = mergemod.update(repo, parent, True, True, node, False)
692 stats = mergemod.update(repo, parent, True, True, node, False)
693 repo.setparents(op1, op2)
693 repo.setparents(op1, op2)
694 dsguard.close()
694 dsguard.close()
695 hg._showstats(repo, stats)
695 hg._showstats(repo, stats)
696 if stats[3]:
696 if stats[3]:
697 repo.ui.status(_("use 'hg resolve' to retry unresolved "
697 repo.ui.status(_("use 'hg resolve' to retry unresolved "
698 "file merges\n"))
698 "file merges\n"))
699 return 1
699 return 1
700 finally:
700 finally:
701 ui.setconfig('ui', 'forcemerge', '', '')
701 ui.setconfig('ui', 'forcemerge', '', '')
702 lockmod.release(dsguard)
702 lockmod.release(dsguard)
703 else:
703 else:
704 hg.clean(repo, node, show_stats=False)
704 hg.clean(repo, node, show_stats=False)
705 repo.dirstate.setbranch(branch)
705 repo.dirstate.setbranch(branch)
706 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
706 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
707
707
708 if opts.get('no_commit'):
708 if opts.get('no_commit'):
709 msg = _("changeset %s backed out, "
709 msg = _("changeset %s backed out, "
710 "don't forget to commit.\n")
710 "don't forget to commit.\n")
711 ui.status(msg % short(node))
711 ui.status(msg % short(node))
712 return 0
712 return 0
713
713
714 def commitfunc(ui, repo, message, match, opts):
714 def commitfunc(ui, repo, message, match, opts):
715 editform = 'backout'
715 editform = 'backout'
716 e = cmdutil.getcommiteditor(editform=editform, **opts)
716 e = cmdutil.getcommiteditor(editform=editform, **opts)
717 if not message:
717 if not message:
718 # we don't translate commit messages
718 # we don't translate commit messages
719 message = "Backed out changeset %s" % short(node)
719 message = "Backed out changeset %s" % short(node)
720 e = cmdutil.getcommiteditor(edit=True, editform=editform)
720 e = cmdutil.getcommiteditor(edit=True, editform=editform)
721 return repo.commit(message, opts.get('user'), opts.get('date'),
721 return repo.commit(message, opts.get('user'), opts.get('date'),
722 match, editor=e)
722 match, editor=e)
723 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
723 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
724 if not newnode:
724 if not newnode:
725 ui.status(_("nothing changed\n"))
725 ui.status(_("nothing changed\n"))
726 return 1
726 return 1
727 cmdutil.commitstatus(repo, newnode, branch, bheads)
727 cmdutil.commitstatus(repo, newnode, branch, bheads)
728
728
729 def nice(node):
729 def nice(node):
730 return '%d:%s' % (repo.changelog.rev(node), short(node))
730 return '%d:%s' % (repo.changelog.rev(node), short(node))
731 ui.status(_('changeset %s backs out changeset %s\n') %
731 ui.status(_('changeset %s backs out changeset %s\n') %
732 (nice(repo.changelog.tip()), nice(node)))
732 (nice(repo.changelog.tip()), nice(node)))
733 if opts.get('merge') and op1 != node:
733 if opts.get('merge') and op1 != node:
734 hg.clean(repo, op1, show_stats=False)
734 hg.clean(repo, op1, show_stats=False)
735 ui.status(_('merging with changeset %s\n')
735 ui.status(_('merging with changeset %s\n')
736 % nice(repo.changelog.tip()))
736 % nice(repo.changelog.tip()))
737 try:
737 try:
738 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
738 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
739 'backout')
739 'backout')
740 return hg.merge(repo, hex(repo.changelog.tip()))
740 return hg.merge(repo, hex(repo.changelog.tip()))
741 finally:
741 finally:
742 ui.setconfig('ui', 'forcemerge', '', '')
742 ui.setconfig('ui', 'forcemerge', '', '')
743 return 0
743 return 0
744
744
745 @command('bisect',
745 @command('bisect',
746 [('r', 'reset', False, _('reset bisect state')),
746 [('r', 'reset', False, _('reset bisect state')),
747 ('g', 'good', False, _('mark changeset good')),
747 ('g', 'good', False, _('mark changeset good')),
748 ('b', 'bad', False, _('mark changeset bad')),
748 ('b', 'bad', False, _('mark changeset bad')),
749 ('s', 'skip', False, _('skip testing changeset')),
749 ('s', 'skip', False, _('skip testing changeset')),
750 ('e', 'extend', False, _('extend the bisect range')),
750 ('e', 'extend', False, _('extend the bisect range')),
751 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
751 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
752 ('U', 'noupdate', False, _('do not update to target'))],
752 ('U', 'noupdate', False, _('do not update to target'))],
753 _("[-gbsr] [-U] [-c CMD] [REV]"))
753 _("[-gbsr] [-U] [-c CMD] [REV]"))
754 def bisect(ui, repo, rev=None, extra=None, command=None,
754 def bisect(ui, repo, rev=None, extra=None, command=None,
755 reset=None, good=None, bad=None, skip=None, extend=None,
755 reset=None, good=None, bad=None, skip=None, extend=None,
756 noupdate=None):
756 noupdate=None):
757 """subdivision search of changesets
757 """subdivision search of changesets
758
758
759 This command helps to find changesets which introduce problems. To
759 This command helps to find changesets which introduce problems. To
760 use, mark the earliest changeset you know exhibits the problem as
760 use, mark the earliest changeset you know exhibits the problem as
761 bad, then mark the latest changeset which is free from the problem
761 bad, then mark the latest changeset which is free from the problem
762 as good. Bisect will update your working directory to a revision
762 as good. Bisect will update your working directory to a revision
763 for testing (unless the -U/--noupdate option is specified). Once
763 for testing (unless the -U/--noupdate option is specified). Once
764 you have performed tests, mark the working directory as good or
764 you have performed tests, mark the working directory as good or
765 bad, and bisect will either update to another candidate changeset
765 bad, and bisect will either update to another candidate changeset
766 or announce that it has found the bad revision.
766 or announce that it has found the bad revision.
767
767
768 As a shortcut, you can also use the revision argument to mark a
768 As a shortcut, you can also use the revision argument to mark a
769 revision as good or bad without checking it out first.
769 revision as good or bad without checking it out first.
770
770
771 If you supply a command, it will be used for automatic bisection.
771 If you supply a command, it will be used for automatic bisection.
772 The environment variable HG_NODE will contain the ID of the
772 The environment variable HG_NODE will contain the ID of the
773 changeset being tested. The exit status of the command will be
773 changeset being tested. The exit status of the command will be
774 used to mark revisions as good or bad: status 0 means good, 125
774 used to mark revisions as good or bad: status 0 means good, 125
775 means to skip the revision, 127 (command not found) will abort the
775 means to skip the revision, 127 (command not found) will abort the
776 bisection, and any other non-zero exit status means the revision
776 bisection, and any other non-zero exit status means the revision
777 is bad.
777 is bad.
778
778
779 .. container:: verbose
779 .. container:: verbose
780
780
781 Some examples:
781 Some examples:
782
782
783 - start a bisection with known bad revision 34, and good revision 12::
783 - start a bisection with known bad revision 34, and good revision 12::
784
784
785 hg bisect --bad 34
785 hg bisect --bad 34
786 hg bisect --good 12
786 hg bisect --good 12
787
787
788 - advance the current bisection by marking current revision as good or
788 - advance the current bisection by marking current revision as good or
789 bad::
789 bad::
790
790
791 hg bisect --good
791 hg bisect --good
792 hg bisect --bad
792 hg bisect --bad
793
793
794 - mark the current revision, or a known revision, to be skipped (e.g. if
794 - mark the current revision, or a known revision, to be skipped (e.g. if
795 that revision is not usable because of another issue)::
795 that revision is not usable because of another issue)::
796
796
797 hg bisect --skip
797 hg bisect --skip
798 hg bisect --skip 23
798 hg bisect --skip 23
799
799
800 - skip all revisions that do not touch directories ``foo`` or ``bar``::
800 - skip all revisions that do not touch directories ``foo`` or ``bar``::
801
801
802 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
802 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
803
803
804 - forget the current bisection::
804 - forget the current bisection::
805
805
806 hg bisect --reset
806 hg bisect --reset
807
807
808 - use 'make && make tests' to automatically find the first broken
808 - use 'make && make tests' to automatically find the first broken
809 revision::
809 revision::
810
810
811 hg bisect --reset
811 hg bisect --reset
812 hg bisect --bad 34
812 hg bisect --bad 34
813 hg bisect --good 12
813 hg bisect --good 12
814 hg bisect --command "make && make tests"
814 hg bisect --command "make && make tests"
815
815
816 - see all changesets whose states are already known in the current
816 - see all changesets whose states are already known in the current
817 bisection::
817 bisection::
818
818
819 hg log -r "bisect(pruned)"
819 hg log -r "bisect(pruned)"
820
820
821 - see the changeset currently being bisected (especially useful
821 - see the changeset currently being bisected (especially useful
822 if running with -U/--noupdate)::
822 if running with -U/--noupdate)::
823
823
824 hg log -r "bisect(current)"
824 hg log -r "bisect(current)"
825
825
826 - see all changesets that took part in the current bisection::
826 - see all changesets that took part in the current bisection::
827
827
828 hg log -r "bisect(range)"
828 hg log -r "bisect(range)"
829
829
830 - you can even get a nice graph::
830 - you can even get a nice graph::
831
831
832 hg log --graph -r "bisect(range)"
832 hg log --graph -r "bisect(range)"
833
833
834 See :hg:`help revsets` for more about the `bisect()` keyword.
834 See :hg:`help revsets` for more about the `bisect()` keyword.
835
835
836 Returns 0 on success.
836 Returns 0 on success.
837 """
837 """
838 def extendbisectrange(nodes, good):
838 def extendbisectrange(nodes, good):
839 # bisect is incomplete when it ends on a merge node and
839 # bisect is incomplete when it ends on a merge node and
840 # one of the parent was not checked.
840 # one of the parent was not checked.
841 parents = repo[nodes[0]].parents()
841 parents = repo[nodes[0]].parents()
842 if len(parents) > 1:
842 if len(parents) > 1:
843 if good:
843 if good:
844 side = state['bad']
844 side = state['bad']
845 else:
845 else:
846 side = state['good']
846 side = state['good']
847 num = len(set(i.node() for i in parents) & set(side))
847 num = len(set(i.node() for i in parents) & set(side))
848 if num == 1:
848 if num == 1:
849 return parents[0].ancestor(parents[1])
849 return parents[0].ancestor(parents[1])
850 return None
850 return None
851
851
852 def print_result(nodes, good):
852 def print_result(nodes, good):
853 displayer = cmdutil.show_changeset(ui, repo, {})
853 displayer = cmdutil.show_changeset(ui, repo, {})
854 if len(nodes) == 1:
854 if len(nodes) == 1:
855 # narrowed it down to a single revision
855 # narrowed it down to a single revision
856 if good:
856 if good:
857 ui.write(_("The first good revision is:\n"))
857 ui.write(_("The first good revision is:\n"))
858 else:
858 else:
859 ui.write(_("The first bad revision is:\n"))
859 ui.write(_("The first bad revision is:\n"))
860 displayer.show(repo[nodes[0]])
860 displayer.show(repo[nodes[0]])
861 extendnode = extendbisectrange(nodes, good)
861 extendnode = extendbisectrange(nodes, good)
862 if extendnode is not None:
862 if extendnode is not None:
863 ui.write(_('Not all ancestors of this changeset have been'
863 ui.write(_('Not all ancestors of this changeset have been'
864 ' checked.\nUse bisect --extend to continue the '
864 ' checked.\nUse bisect --extend to continue the '
865 'bisection from\nthe common ancestor, %s.\n')
865 'bisection from\nthe common ancestor, %s.\n')
866 % extendnode)
866 % extendnode)
867 else:
867 else:
868 # multiple possible revisions
868 # multiple possible revisions
869 if good:
869 if good:
870 ui.write(_("Due to skipped revisions, the first "
870 ui.write(_("Due to skipped revisions, the first "
871 "good revision could be any of:\n"))
871 "good revision could be any of:\n"))
872 else:
872 else:
873 ui.write(_("Due to skipped revisions, the first "
873 ui.write(_("Due to skipped revisions, the first "
874 "bad revision could be any of:\n"))
874 "bad revision could be any of:\n"))
875 for n in nodes:
875 for n in nodes:
876 displayer.show(repo[n])
876 displayer.show(repo[n])
877 displayer.close()
877 displayer.close()
878
878
879 def check_state(state, interactive=True):
879 def check_state(state, interactive=True):
880 if not state['good'] or not state['bad']:
880 if not state['good'] or not state['bad']:
881 if (good or bad or skip or reset) and interactive:
881 if (good or bad or skip or reset) and interactive:
882 return
882 return
883 if not state['good']:
883 if not state['good']:
884 raise error.Abort(_('cannot bisect (no known good revisions)'))
884 raise error.Abort(_('cannot bisect (no known good revisions)'))
885 else:
885 else:
886 raise error.Abort(_('cannot bisect (no known bad revisions)'))
886 raise error.Abort(_('cannot bisect (no known bad revisions)'))
887 return True
887 return True
888
888
889 # backward compatibility
889 # backward compatibility
890 if rev in "good bad reset init".split():
890 if rev in "good bad reset init".split():
891 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
891 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
892 cmd, rev, extra = rev, extra, None
892 cmd, rev, extra = rev, extra, None
893 if cmd == "good":
893 if cmd == "good":
894 good = True
894 good = True
895 elif cmd == "bad":
895 elif cmd == "bad":
896 bad = True
896 bad = True
897 else:
897 else:
898 reset = True
898 reset = True
899 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
899 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
900 raise error.Abort(_('incompatible arguments'))
900 raise error.Abort(_('incompatible arguments'))
901
901
902 cmdutil.checkunfinished(repo)
902 cmdutil.checkunfinished(repo)
903
903
904 if reset:
904 if reset:
905 p = repo.join("bisect.state")
905 p = repo.join("bisect.state")
906 if os.path.exists(p):
906 if os.path.exists(p):
907 os.unlink(p)
907 os.unlink(p)
908 return
908 return
909
909
910 state = hbisect.load_state(repo)
910 state = hbisect.load_state(repo)
911
911
912 if command:
912 if command:
913 changesets = 1
913 changesets = 1
914 if noupdate:
914 if noupdate:
915 try:
915 try:
916 node = state['current'][0]
916 node = state['current'][0]
917 except LookupError:
917 except LookupError:
918 raise error.Abort(_('current bisect revision is unknown - '
918 raise error.Abort(_('current bisect revision is unknown - '
919 'start a new bisect to fix'))
919 'start a new bisect to fix'))
920 else:
920 else:
921 node, p2 = repo.dirstate.parents()
921 node, p2 = repo.dirstate.parents()
922 if p2 != nullid:
922 if p2 != nullid:
923 raise error.Abort(_('current bisect revision is a merge'))
923 raise error.Abort(_('current bisect revision is a merge'))
924 try:
924 try:
925 while changesets:
925 while changesets:
926 # update state
926 # update state
927 state['current'] = [node]
927 state['current'] = [node]
928 hbisect.save_state(repo, state)
928 hbisect.save_state(repo, state)
929 status = ui.system(command, environ={'HG_NODE': hex(node)})
929 status = ui.system(command, environ={'HG_NODE': hex(node)})
930 if status == 125:
930 if status == 125:
931 transition = "skip"
931 transition = "skip"
932 elif status == 0:
932 elif status == 0:
933 transition = "good"
933 transition = "good"
934 # status < 0 means process was killed
934 # status < 0 means process was killed
935 elif status == 127:
935 elif status == 127:
936 raise error.Abort(_("failed to execute %s") % command)
936 raise error.Abort(_("failed to execute %s") % command)
937 elif status < 0:
937 elif status < 0:
938 raise error.Abort(_("%s killed") % command)
938 raise error.Abort(_("%s killed") % command)
939 else:
939 else:
940 transition = "bad"
940 transition = "bad"
941 ctx = scmutil.revsingle(repo, rev, node)
941 ctx = scmutil.revsingle(repo, rev, node)
942 rev = None # clear for future iterations
942 rev = None # clear for future iterations
943 state[transition].append(ctx.node())
943 state[transition].append(ctx.node())
944 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
944 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
945 check_state(state, interactive=False)
945 check_state(state, interactive=False)
946 # bisect
946 # bisect
947 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
947 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
948 # update to next check
948 # update to next check
949 node = nodes[0]
949 node = nodes[0]
950 if not noupdate:
950 if not noupdate:
951 cmdutil.bailifchanged(repo)
951 cmdutil.bailifchanged(repo)
952 hg.clean(repo, node, show_stats=False)
952 hg.clean(repo, node, show_stats=False)
953 finally:
953 finally:
954 state['current'] = [node]
954 state['current'] = [node]
955 hbisect.save_state(repo, state)
955 hbisect.save_state(repo, state)
956 print_result(nodes, bgood)
956 print_result(nodes, bgood)
957 return
957 return
958
958
959 # update state
959 # update state
960
960
961 if rev:
961 if rev:
962 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
962 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
963 else:
963 else:
964 nodes = [repo.lookup('.')]
964 nodes = [repo.lookup('.')]
965
965
966 if good or bad or skip:
966 if good or bad or skip:
967 if good:
967 if good:
968 state['good'] += nodes
968 state['good'] += nodes
969 elif bad:
969 elif bad:
970 state['bad'] += nodes
970 state['bad'] += nodes
971 elif skip:
971 elif skip:
972 state['skip'] += nodes
972 state['skip'] += nodes
973 hbisect.save_state(repo, state)
973 hbisect.save_state(repo, state)
974
974
975 if not check_state(state):
975 if not check_state(state):
976 return
976 return
977
977
978 # actually bisect
978 # actually bisect
979 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
979 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
980 if extend:
980 if extend:
981 if not changesets:
981 if not changesets:
982 extendnode = extendbisectrange(nodes, good)
982 extendnode = extendbisectrange(nodes, good)
983 if extendnode is not None:
983 if extendnode is not None:
984 ui.write(_("Extending search to changeset %d:%s\n")
984 ui.write(_("Extending search to changeset %d:%s\n")
985 % (extendnode.rev(), extendnode))
985 % (extendnode.rev(), extendnode))
986 state['current'] = [extendnode.node()]
986 state['current'] = [extendnode.node()]
987 hbisect.save_state(repo, state)
987 hbisect.save_state(repo, state)
988 if noupdate:
988 if noupdate:
989 return
989 return
990 cmdutil.bailifchanged(repo)
990 cmdutil.bailifchanged(repo)
991 return hg.clean(repo, extendnode.node())
991 return hg.clean(repo, extendnode.node())
992 raise error.Abort(_("nothing to extend"))
992 raise error.Abort(_("nothing to extend"))
993
993
994 if changesets == 0:
994 if changesets == 0:
995 print_result(nodes, good)
995 print_result(nodes, good)
996 else:
996 else:
997 assert len(nodes) == 1 # only a single node can be tested next
997 assert len(nodes) == 1 # only a single node can be tested next
998 node = nodes[0]
998 node = nodes[0]
999 # compute the approximate number of remaining tests
999 # compute the approximate number of remaining tests
1000 tests, size = 0, 2
1000 tests, size = 0, 2
1001 while size <= changesets:
1001 while size <= changesets:
1002 tests, size = tests + 1, size * 2
1002 tests, size = tests + 1, size * 2
1003 rev = repo.changelog.rev(node)
1003 rev = repo.changelog.rev(node)
1004 ui.write(_("Testing changeset %d:%s "
1004 ui.write(_("Testing changeset %d:%s "
1005 "(%d changesets remaining, ~%d tests)\n")
1005 "(%d changesets remaining, ~%d tests)\n")
1006 % (rev, short(node), changesets, tests))
1006 % (rev, short(node), changesets, tests))
1007 state['current'] = [node]
1007 state['current'] = [node]
1008 hbisect.save_state(repo, state)
1008 hbisect.save_state(repo, state)
1009 if not noupdate:
1009 if not noupdate:
1010 cmdutil.bailifchanged(repo)
1010 cmdutil.bailifchanged(repo)
1011 return hg.clean(repo, node)
1011 return hg.clean(repo, node)
1012
1012
1013 @command('bookmarks|bookmark',
1013 @command('bookmarks|bookmark',
1014 [('f', 'force', False, _('force')),
1014 [('f', 'force', False, _('force')),
1015 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
1015 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
1016 ('d', 'delete', False, _('delete a given bookmark')),
1016 ('d', 'delete', False, _('delete a given bookmark')),
1017 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
1017 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
1018 ('i', 'inactive', False, _('mark a bookmark inactive')),
1018 ('i', 'inactive', False, _('mark a bookmark inactive')),
1019 ] + formatteropts,
1019 ] + formatteropts,
1020 _('hg bookmarks [OPTIONS]... [NAME]...'))
1020 _('hg bookmarks [OPTIONS]... [NAME]...'))
1021 def bookmark(ui, repo, *names, **opts):
1021 def bookmark(ui, repo, *names, **opts):
1022 '''create a new bookmark or list existing bookmarks
1022 '''create a new bookmark or list existing bookmarks
1023
1023
1024 Bookmarks are labels on changesets to help track lines of development.
1024 Bookmarks are labels on changesets to help track lines of development.
1025 Bookmarks are unversioned and can be moved, renamed and deleted.
1025 Bookmarks are unversioned and can be moved, renamed and deleted.
1026 Deleting or moving a bookmark has no effect on the associated changesets.
1026 Deleting or moving a bookmark has no effect on the associated changesets.
1027
1027
1028 Creating or updating to a bookmark causes it to be marked as 'active'.
1028 Creating or updating to a bookmark causes it to be marked as 'active'.
1029 The active bookmark is indicated with a '*'.
1029 The active bookmark is indicated with a '*'.
1030 When a commit is made, the active bookmark will advance to the new commit.
1030 When a commit is made, the active bookmark will advance to the new commit.
1031 A plain :hg:`update` will also advance an active bookmark, if possible.
1031 A plain :hg:`update` will also advance an active bookmark, if possible.
1032 Updating away from a bookmark will cause it to be deactivated.
1032 Updating away from a bookmark will cause it to be deactivated.
1033
1033
1034 Bookmarks can be pushed and pulled between repositories (see
1034 Bookmarks can be pushed and pulled between repositories (see
1035 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1035 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1036 diverged, a new 'divergent bookmark' of the form 'name@path' will
1036 diverged, a new 'divergent bookmark' of the form 'name@path' will
1037 be created. Using :hg:`merge` will resolve the divergence.
1037 be created. Using :hg:`merge` will resolve the divergence.
1038
1038
1039 A bookmark named '@' has the special property that :hg:`clone` will
1039 A bookmark named '@' has the special property that :hg:`clone` will
1040 check it out by default if it exists.
1040 check it out by default if it exists.
1041
1041
1042 .. container:: verbose
1042 .. container:: verbose
1043
1043
1044 Examples:
1044 Examples:
1045
1045
1046 - create an active bookmark for a new line of development::
1046 - create an active bookmark for a new line of development::
1047
1047
1048 hg book new-feature
1048 hg book new-feature
1049
1049
1050 - create an inactive bookmark as a place marker::
1050 - create an inactive bookmark as a place marker::
1051
1051
1052 hg book -i reviewed
1052 hg book -i reviewed
1053
1053
1054 - create an inactive bookmark on another changeset::
1054 - create an inactive bookmark on another changeset::
1055
1055
1056 hg book -r .^ tested
1056 hg book -r .^ tested
1057
1057
1058 - rename bookmark turkey to dinner::
1058 - rename bookmark turkey to dinner::
1059
1059
1060 hg book -m turkey dinner
1060 hg book -m turkey dinner
1061
1061
1062 - move the '@' bookmark from another branch::
1062 - move the '@' bookmark from another branch::
1063
1063
1064 hg book -f @
1064 hg book -f @
1065 '''
1065 '''
1066 force = opts.get('force')
1066 force = opts.get('force')
1067 rev = opts.get('rev')
1067 rev = opts.get('rev')
1068 delete = opts.get('delete')
1068 delete = opts.get('delete')
1069 rename = opts.get('rename')
1069 rename = opts.get('rename')
1070 inactive = opts.get('inactive')
1070 inactive = opts.get('inactive')
1071
1071
1072 def checkformat(mark):
1072 def checkformat(mark):
1073 mark = mark.strip()
1073 mark = mark.strip()
1074 if not mark:
1074 if not mark:
1075 raise error.Abort(_("bookmark names cannot consist entirely of "
1075 raise error.Abort(_("bookmark names cannot consist entirely of "
1076 "whitespace"))
1076 "whitespace"))
1077 scmutil.checknewlabel(repo, mark, 'bookmark')
1077 scmutil.checknewlabel(repo, mark, 'bookmark')
1078 return mark
1078 return mark
1079
1079
1080 def checkconflict(repo, mark, cur, force=False, target=None):
1080 def checkconflict(repo, mark, cur, force=False, target=None):
1081 if mark in marks and not force:
1081 if mark in marks and not force:
1082 if target:
1082 if target:
1083 if marks[mark] == target and target == cur:
1083 if marks[mark] == target and target == cur:
1084 # re-activating a bookmark
1084 # re-activating a bookmark
1085 return
1085 return
1086 anc = repo.changelog.ancestors([repo[target].rev()])
1086 anc = repo.changelog.ancestors([repo[target].rev()])
1087 bmctx = repo[marks[mark]]
1087 bmctx = repo[marks[mark]]
1088 divs = [repo[b].node() for b in marks
1088 divs = [repo[b].node() for b in marks
1089 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1089 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1090
1090
1091 # allow resolving a single divergent bookmark even if moving
1091 # allow resolving a single divergent bookmark even if moving
1092 # the bookmark across branches when a revision is specified
1092 # the bookmark across branches when a revision is specified
1093 # that contains a divergent bookmark
1093 # that contains a divergent bookmark
1094 if bmctx.rev() not in anc and target in divs:
1094 if bmctx.rev() not in anc and target in divs:
1095 bookmarks.deletedivergent(repo, [target], mark)
1095 bookmarks.deletedivergent(repo, [target], mark)
1096 return
1096 return
1097
1097
1098 deletefrom = [b for b in divs
1098 deletefrom = [b for b in divs
1099 if repo[b].rev() in anc or b == target]
1099 if repo[b].rev() in anc or b == target]
1100 bookmarks.deletedivergent(repo, deletefrom, mark)
1100 bookmarks.deletedivergent(repo, deletefrom, mark)
1101 if bookmarks.validdest(repo, bmctx, repo[target]):
1101 if bookmarks.validdest(repo, bmctx, repo[target]):
1102 ui.status(_("moving bookmark '%s' forward from %s\n") %
1102 ui.status(_("moving bookmark '%s' forward from %s\n") %
1103 (mark, short(bmctx.node())))
1103 (mark, short(bmctx.node())))
1104 return
1104 return
1105 raise error.Abort(_("bookmark '%s' already exists "
1105 raise error.Abort(_("bookmark '%s' already exists "
1106 "(use -f to force)") % mark)
1106 "(use -f to force)") % mark)
1107 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1107 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1108 and not force):
1108 and not force):
1109 raise error.Abort(
1109 raise error.Abort(
1110 _("a bookmark cannot have the name of an existing branch"))
1110 _("a bookmark cannot have the name of an existing branch"))
1111
1111
1112 if delete and rename:
1112 if delete and rename:
1113 raise error.Abort(_("--delete and --rename are incompatible"))
1113 raise error.Abort(_("--delete and --rename are incompatible"))
1114 if delete and rev:
1114 if delete and rev:
1115 raise error.Abort(_("--rev is incompatible with --delete"))
1115 raise error.Abort(_("--rev is incompatible with --delete"))
1116 if rename and rev:
1116 if rename and rev:
1117 raise error.Abort(_("--rev is incompatible with --rename"))
1117 raise error.Abort(_("--rev is incompatible with --rename"))
1118 if not names and (delete or rev):
1118 if not names and (delete or rev):
1119 raise error.Abort(_("bookmark name required"))
1119 raise error.Abort(_("bookmark name required"))
1120
1120
1121 if delete or rename or names or inactive:
1121 if delete or rename or names or inactive:
1122 wlock = lock = tr = None
1122 wlock = lock = tr = None
1123 try:
1123 try:
1124 wlock = repo.wlock()
1124 wlock = repo.wlock()
1125 lock = repo.lock()
1125 lock = repo.lock()
1126 cur = repo.changectx('.').node()
1126 cur = repo.changectx('.').node()
1127 marks = repo._bookmarks
1127 marks = repo._bookmarks
1128 if delete:
1128 if delete:
1129 tr = repo.transaction('bookmark')
1129 tr = repo.transaction('bookmark')
1130 for mark in names:
1130 for mark in names:
1131 if mark not in marks:
1131 if mark not in marks:
1132 raise error.Abort(_("bookmark '%s' does not exist") %
1132 raise error.Abort(_("bookmark '%s' does not exist") %
1133 mark)
1133 mark)
1134 if mark == repo._activebookmark:
1134 if mark == repo._activebookmark:
1135 bookmarks.deactivate(repo)
1135 bookmarks.deactivate(repo)
1136 del marks[mark]
1136 del marks[mark]
1137
1137
1138 elif rename:
1138 elif rename:
1139 tr = repo.transaction('bookmark')
1139 tr = repo.transaction('bookmark')
1140 if not names:
1140 if not names:
1141 raise error.Abort(_("new bookmark name required"))
1141 raise error.Abort(_("new bookmark name required"))
1142 elif len(names) > 1:
1142 elif len(names) > 1:
1143 raise error.Abort(_("only one new bookmark name allowed"))
1143 raise error.Abort(_("only one new bookmark name allowed"))
1144 mark = checkformat(names[0])
1144 mark = checkformat(names[0])
1145 if rename not in marks:
1145 if rename not in marks:
1146 raise error.Abort(_("bookmark '%s' does not exist")
1146 raise error.Abort(_("bookmark '%s' does not exist")
1147 % rename)
1147 % rename)
1148 checkconflict(repo, mark, cur, force)
1148 checkconflict(repo, mark, cur, force)
1149 marks[mark] = marks[rename]
1149 marks[mark] = marks[rename]
1150 if repo._activebookmark == rename and not inactive:
1150 if repo._activebookmark == rename and not inactive:
1151 bookmarks.activate(repo, mark)
1151 bookmarks.activate(repo, mark)
1152 del marks[rename]
1152 del marks[rename]
1153 elif names:
1153 elif names:
1154 tr = repo.transaction('bookmark')
1154 tr = repo.transaction('bookmark')
1155 newact = None
1155 newact = None
1156 for mark in names:
1156 for mark in names:
1157 mark = checkformat(mark)
1157 mark = checkformat(mark)
1158 if newact is None:
1158 if newact is None:
1159 newact = mark
1159 newact = mark
1160 if inactive and mark == repo._activebookmark:
1160 if inactive and mark == repo._activebookmark:
1161 bookmarks.deactivate(repo)
1161 bookmarks.deactivate(repo)
1162 return
1162 return
1163 tgt = cur
1163 tgt = cur
1164 if rev:
1164 if rev:
1165 tgt = scmutil.revsingle(repo, rev).node()
1165 tgt = scmutil.revsingle(repo, rev).node()
1166 checkconflict(repo, mark, cur, force, tgt)
1166 checkconflict(repo, mark, cur, force, tgt)
1167 marks[mark] = tgt
1167 marks[mark] = tgt
1168 if not inactive and cur == marks[newact] and not rev:
1168 if not inactive and cur == marks[newact] and not rev:
1169 bookmarks.activate(repo, newact)
1169 bookmarks.activate(repo, newact)
1170 elif cur != tgt and newact == repo._activebookmark:
1170 elif cur != tgt and newact == repo._activebookmark:
1171 bookmarks.deactivate(repo)
1171 bookmarks.deactivate(repo)
1172 elif inactive:
1172 elif inactive:
1173 if len(marks) == 0:
1173 if len(marks) == 0:
1174 ui.status(_("no bookmarks set\n"))
1174 ui.status(_("no bookmarks set\n"))
1175 elif not repo._activebookmark:
1175 elif not repo._activebookmark:
1176 ui.status(_("no active bookmark\n"))
1176 ui.status(_("no active bookmark\n"))
1177 else:
1177 else:
1178 bookmarks.deactivate(repo)
1178 bookmarks.deactivate(repo)
1179 if tr is not None:
1179 if tr is not None:
1180 marks.recordchange(tr)
1180 marks.recordchange(tr)
1181 tr.close()
1181 tr.close()
1182 finally:
1182 finally:
1183 lockmod.release(tr, lock, wlock)
1183 lockmod.release(tr, lock, wlock)
1184 else: # show bookmarks
1184 else: # show bookmarks
1185 fm = ui.formatter('bookmarks', opts)
1185 fm = ui.formatter('bookmarks', opts)
1186 hexfn = fm.hexfunc
1186 hexfn = fm.hexfunc
1187 marks = repo._bookmarks
1187 marks = repo._bookmarks
1188 if len(marks) == 0 and not fm:
1188 if len(marks) == 0 and not fm:
1189 ui.status(_("no bookmarks set\n"))
1189 ui.status(_("no bookmarks set\n"))
1190 for bmark, n in sorted(marks.iteritems()):
1190 for bmark, n in sorted(marks.iteritems()):
1191 active = repo._activebookmark
1191 active = repo._activebookmark
1192 if bmark == active:
1192 if bmark == active:
1193 prefix, label = '*', activebookmarklabel
1193 prefix, label = '*', activebookmarklabel
1194 else:
1194 else:
1195 prefix, label = ' ', ''
1195 prefix, label = ' ', ''
1196
1196
1197 fm.startitem()
1197 fm.startitem()
1198 if not ui.quiet:
1198 if not ui.quiet:
1199 fm.plain(' %s ' % prefix, label=label)
1199 fm.plain(' %s ' % prefix, label=label)
1200 fm.write('bookmark', '%s', bmark, label=label)
1200 fm.write('bookmark', '%s', bmark, label=label)
1201 pad = " " * (25 - encoding.colwidth(bmark))
1201 pad = " " * (25 - encoding.colwidth(bmark))
1202 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1202 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1203 repo.changelog.rev(n), hexfn(n), label=label)
1203 repo.changelog.rev(n), hexfn(n), label=label)
1204 fm.data(active=(bmark == active))
1204 fm.data(active=(bmark == active))
1205 fm.plain('\n')
1205 fm.plain('\n')
1206 fm.end()
1206 fm.end()
1207
1207
1208 @command('branch',
1208 @command('branch',
1209 [('f', 'force', None,
1209 [('f', 'force', None,
1210 _('set branch name even if it shadows an existing branch')),
1210 _('set branch name even if it shadows an existing branch')),
1211 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1211 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1212 _('[-fC] [NAME]'))
1212 _('[-fC] [NAME]'))
1213 def branch(ui, repo, label=None, **opts):
1213 def branch(ui, repo, label=None, **opts):
1214 """set or show the current branch name
1214 """set or show the current branch name
1215
1215
1216 .. note::
1216 .. note::
1217
1217
1218 Branch names are permanent and global. Use :hg:`bookmark` to create a
1218 Branch names are permanent and global. Use :hg:`bookmark` to create a
1219 light-weight bookmark instead. See :hg:`help glossary` for more
1219 light-weight bookmark instead. See :hg:`help glossary` for more
1220 information about named branches and bookmarks.
1220 information about named branches and bookmarks.
1221
1221
1222 With no argument, show the current branch name. With one argument,
1222 With no argument, show the current branch name. With one argument,
1223 set the working directory branch name (the branch will not exist
1223 set the working directory branch name (the branch will not exist
1224 in the repository until the next commit). Standard practice
1224 in the repository until the next commit). Standard practice
1225 recommends that primary development take place on the 'default'
1225 recommends that primary development take place on the 'default'
1226 branch.
1226 branch.
1227
1227
1228 Unless -f/--force is specified, branch will not let you set a
1228 Unless -f/--force is specified, branch will not let you set a
1229 branch name that already exists.
1229 branch name that already exists.
1230
1230
1231 Use -C/--clean to reset the working directory branch to that of
1231 Use -C/--clean to reset the working directory branch to that of
1232 the parent of the working directory, negating a previous branch
1232 the parent of the working directory, negating a previous branch
1233 change.
1233 change.
1234
1234
1235 Use the command :hg:`update` to switch to an existing branch. Use
1235 Use the command :hg:`update` to switch to an existing branch. Use
1236 :hg:`commit --close-branch` to mark this branch head as closed.
1236 :hg:`commit --close-branch` to mark this branch head as closed.
1237 When all heads of a branch are closed, the branch will be
1237 When all heads of a branch are closed, the branch will be
1238 considered closed.
1238 considered closed.
1239
1239
1240 Returns 0 on success.
1240 Returns 0 on success.
1241 """
1241 """
1242 if label:
1242 if label:
1243 label = label.strip()
1243 label = label.strip()
1244
1244
1245 if not opts.get('clean') and not label:
1245 if not opts.get('clean') and not label:
1246 ui.write("%s\n" % repo.dirstate.branch())
1246 ui.write("%s\n" % repo.dirstate.branch())
1247 return
1247 return
1248
1248
1249 with repo.wlock():
1249 with repo.wlock():
1250 if opts.get('clean'):
1250 if opts.get('clean'):
1251 label = repo[None].p1().branch()
1251 label = repo[None].p1().branch()
1252 repo.dirstate.setbranch(label)
1252 repo.dirstate.setbranch(label)
1253 ui.status(_('reset working directory to branch %s\n') % label)
1253 ui.status(_('reset working directory to branch %s\n') % label)
1254 elif label:
1254 elif label:
1255 if not opts.get('force') and label in repo.branchmap():
1255 if not opts.get('force') and label in repo.branchmap():
1256 if label not in [p.branch() for p in repo[None].parents()]:
1256 if label not in [p.branch() for p in repo[None].parents()]:
1257 raise error.Abort(_('a branch of the same name already'
1257 raise error.Abort(_('a branch of the same name already'
1258 ' exists'),
1258 ' exists'),
1259 # i18n: "it" refers to an existing branch
1259 # i18n: "it" refers to an existing branch
1260 hint=_("use 'hg update' to switch to it"))
1260 hint=_("use 'hg update' to switch to it"))
1261 scmutil.checknewlabel(repo, label, 'branch')
1261 scmutil.checknewlabel(repo, label, 'branch')
1262 repo.dirstate.setbranch(label)
1262 repo.dirstate.setbranch(label)
1263 ui.status(_('marked working directory as branch %s\n') % label)
1263 ui.status(_('marked working directory as branch %s\n') % label)
1264
1264
1265 # find any open named branches aside from default
1265 # find any open named branches aside from default
1266 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1266 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1267 if n != "default" and not c]
1267 if n != "default" and not c]
1268 if not others:
1268 if not others:
1269 ui.status(_('(branches are permanent and global, '
1269 ui.status(_('(branches are permanent and global, '
1270 'did you want a bookmark?)\n'))
1270 'did you want a bookmark?)\n'))
1271
1271
1272 @command('branches',
1272 @command('branches',
1273 [('a', 'active', False,
1273 [('a', 'active', False,
1274 _('show only branches that have unmerged heads (DEPRECATED)')),
1274 _('show only branches that have unmerged heads (DEPRECATED)')),
1275 ('c', 'closed', False, _('show normal and closed branches')),
1275 ('c', 'closed', False, _('show normal and closed branches')),
1276 ] + formatteropts,
1276 ] + formatteropts,
1277 _('[-c]'))
1277 _('[-c]'))
1278 def branches(ui, repo, active=False, closed=False, **opts):
1278 def branches(ui, repo, active=False, closed=False, **opts):
1279 """list repository named branches
1279 """list repository named branches
1280
1280
1281 List the repository's named branches, indicating which ones are
1281 List the repository's named branches, indicating which ones are
1282 inactive. If -c/--closed is specified, also list branches which have
1282 inactive. If -c/--closed is specified, also list branches which have
1283 been marked closed (see :hg:`commit --close-branch`).
1283 been marked closed (see :hg:`commit --close-branch`).
1284
1284
1285 Use the command :hg:`update` to switch to an existing branch.
1285 Use the command :hg:`update` to switch to an existing branch.
1286
1286
1287 Returns 0.
1287 Returns 0.
1288 """
1288 """
1289
1289
1290 fm = ui.formatter('branches', opts)
1290 fm = ui.formatter('branches', opts)
1291 hexfunc = fm.hexfunc
1291 hexfunc = fm.hexfunc
1292
1292
1293 allheads = set(repo.heads())
1293 allheads = set(repo.heads())
1294 branches = []
1294 branches = []
1295 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1295 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1296 isactive = not isclosed and bool(set(heads) & allheads)
1296 isactive = not isclosed and bool(set(heads) & allheads)
1297 branches.append((tag, repo[tip], isactive, not isclosed))
1297 branches.append((tag, repo[tip], isactive, not isclosed))
1298 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1298 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1299 reverse=True)
1299 reverse=True)
1300
1300
1301 for tag, ctx, isactive, isopen in branches:
1301 for tag, ctx, isactive, isopen in branches:
1302 if active and not isactive:
1302 if active and not isactive:
1303 continue
1303 continue
1304 if isactive:
1304 if isactive:
1305 label = 'branches.active'
1305 label = 'branches.active'
1306 notice = ''
1306 notice = ''
1307 elif not isopen:
1307 elif not isopen:
1308 if not closed:
1308 if not closed:
1309 continue
1309 continue
1310 label = 'branches.closed'
1310 label = 'branches.closed'
1311 notice = _(' (closed)')
1311 notice = _(' (closed)')
1312 else:
1312 else:
1313 label = 'branches.inactive'
1313 label = 'branches.inactive'
1314 notice = _(' (inactive)')
1314 notice = _(' (inactive)')
1315 current = (tag == repo.dirstate.branch())
1315 current = (tag == repo.dirstate.branch())
1316 if current:
1316 if current:
1317 label = 'branches.current'
1317 label = 'branches.current'
1318
1318
1319 fm.startitem()
1319 fm.startitem()
1320 fm.write('branch', '%s', tag, label=label)
1320 fm.write('branch', '%s', tag, label=label)
1321 rev = ctx.rev()
1321 rev = ctx.rev()
1322 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1322 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1323 fmt = ' ' * padsize + ' %d:%s'
1323 fmt = ' ' * padsize + ' %d:%s'
1324 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1324 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1325 label='log.changeset changeset.%s' % ctx.phasestr())
1325 label='log.changeset changeset.%s' % ctx.phasestr())
1326 fm.data(active=isactive, closed=not isopen, current=current)
1326 fm.data(active=isactive, closed=not isopen, current=current)
1327 if not ui.quiet:
1327 if not ui.quiet:
1328 fm.plain(notice)
1328 fm.plain(notice)
1329 fm.plain('\n')
1329 fm.plain('\n')
1330 fm.end()
1330 fm.end()
1331
1331
1332 @command('bundle',
1332 @command('bundle',
1333 [('f', 'force', None, _('run even when the destination is unrelated')),
1333 [('f', 'force', None, _('run even when the destination is unrelated')),
1334 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1334 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1335 _('REV')),
1335 _('REV')),
1336 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1336 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1337 _('BRANCH')),
1337 _('BRANCH')),
1338 ('', 'base', [],
1338 ('', 'base', [],
1339 _('a base changeset assumed to be available at the destination'),
1339 _('a base changeset assumed to be available at the destination'),
1340 _('REV')),
1340 _('REV')),
1341 ('a', 'all', None, _('bundle all changesets in the repository')),
1341 ('a', 'all', None, _('bundle all changesets in the repository')),
1342 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1342 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1343 ] + remoteopts,
1343 ] + remoteopts,
1344 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1344 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1345 def bundle(ui, repo, fname, dest=None, **opts):
1345 def bundle(ui, repo, fname, dest=None, **opts):
1346 """create a changegroup file
1346 """create a changegroup file
1347
1347
1348 Generate a changegroup file collecting changesets to be added
1348 Generate a changegroup file collecting changesets to be added
1349 to a repository.
1349 to a repository.
1350
1350
1351 To create a bundle containing all changesets, use -a/--all
1351 To create a bundle containing all changesets, use -a/--all
1352 (or --base null). Otherwise, hg assumes the destination will have
1352 (or --base null). Otherwise, hg assumes the destination will have
1353 all the nodes you specify with --base parameters. Otherwise, hg
1353 all the nodes you specify with --base parameters. Otherwise, hg
1354 will assume the repository has all the nodes in destination, or
1354 will assume the repository has all the nodes in destination, or
1355 default-push/default if no destination is specified.
1355 default-push/default if no destination is specified.
1356
1356
1357 You can change bundle format with the -t/--type option. You can
1357 You can change bundle format with the -t/--type option. You can
1358 specify a compression, a bundle version or both using a dash
1358 specify a compression, a bundle version or both using a dash
1359 (comp-version). The available compression methods are: none, bzip2,
1359 (comp-version). The available compression methods are: none, bzip2,
1360 and gzip (by default, bundles are compressed using bzip2). The
1360 and gzip (by default, bundles are compressed using bzip2). The
1361 available formats are: v1, v2 (default to most suitable).
1361 available formats are: v1, v2 (default to most suitable).
1362
1362
1363 The bundle file can then be transferred using conventional means
1363 The bundle file can then be transferred using conventional means
1364 and applied to another repository with the unbundle or pull
1364 and applied to another repository with the unbundle or pull
1365 command. This is useful when direct push and pull are not
1365 command. This is useful when direct push and pull are not
1366 available or when exporting an entire repository is undesirable.
1366 available or when exporting an entire repository is undesirable.
1367
1367
1368 Applying bundles preserves all changeset contents including
1368 Applying bundles preserves all changeset contents including
1369 permissions, copy/rename information, and revision history.
1369 permissions, copy/rename information, and revision history.
1370
1370
1371 Returns 0 on success, 1 if no changes found.
1371 Returns 0 on success, 1 if no changes found.
1372 """
1372 """
1373 revs = None
1373 revs = None
1374 if 'rev' in opts:
1374 if 'rev' in opts:
1375 revstrings = opts['rev']
1375 revstrings = opts['rev']
1376 revs = scmutil.revrange(repo, revstrings)
1376 revs = scmutil.revrange(repo, revstrings)
1377 if revstrings and not revs:
1377 if revstrings and not revs:
1378 raise error.Abort(_('no commits to bundle'))
1378 raise error.Abort(_('no commits to bundle'))
1379
1379
1380 bundletype = opts.get('type', 'bzip2').lower()
1380 bundletype = opts.get('type', 'bzip2').lower()
1381 try:
1381 try:
1382 bcompression, cgversion, params = exchange.parsebundlespec(
1382 bcompression, cgversion, params = exchange.parsebundlespec(
1383 repo, bundletype, strict=False)
1383 repo, bundletype, strict=False)
1384 except error.UnsupportedBundleSpecification as e:
1384 except error.UnsupportedBundleSpecification as e:
1385 raise error.Abort(str(e),
1385 raise error.Abort(str(e),
1386 hint=_('see "hg help bundle" for supported '
1386 hint=_('see "hg help bundle" for supported '
1387 'values for --type'))
1387 'values for --type'))
1388
1388
1389 # Packed bundles are a pseudo bundle format for now.
1389 # Packed bundles are a pseudo bundle format for now.
1390 if cgversion == 's1':
1390 if cgversion == 's1':
1391 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1391 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1392 hint=_("use 'hg debugcreatestreamclonebundle'"))
1392 hint=_("use 'hg debugcreatestreamclonebundle'"))
1393
1393
1394 if opts.get('all'):
1394 if opts.get('all'):
1395 if dest:
1395 if dest:
1396 raise error.Abort(_("--all is incompatible with specifying "
1396 raise error.Abort(_("--all is incompatible with specifying "
1397 "a destination"))
1397 "a destination"))
1398 if opts.get('base'):
1398 if opts.get('base'):
1399 ui.warn(_("ignoring --base because --all was specified\n"))
1399 ui.warn(_("ignoring --base because --all was specified\n"))
1400 base = ['null']
1400 base = ['null']
1401 else:
1401 else:
1402 base = scmutil.revrange(repo, opts.get('base'))
1402 base = scmutil.revrange(repo, opts.get('base'))
1403 # TODO: get desired bundlecaps from command line.
1403 # TODO: get desired bundlecaps from command line.
1404 bundlecaps = None
1404 bundlecaps = None
1405 if cgversion not in changegroup.supportedoutgoingversions(repo):
1405 if cgversion not in changegroup.supportedoutgoingversions(repo):
1406 raise error.Abort(_("repository does not support bundle version %s") %
1406 raise error.Abort(_("repository does not support bundle version %s") %
1407 cgversion)
1407 cgversion)
1408
1408
1409 if base:
1409 if base:
1410 if dest:
1410 if dest:
1411 raise error.Abort(_("--base is incompatible with specifying "
1411 raise error.Abort(_("--base is incompatible with specifying "
1412 "a destination"))
1412 "a destination"))
1413 common = [repo.lookup(rev) for rev in base]
1413 common = [repo.lookup(rev) for rev in base]
1414 heads = revs and map(repo.lookup, revs) or revs
1414 heads = revs and map(repo.lookup, revs) or revs
1415 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1415 outgoing = discovery.outgoing(repo, common, heads)
1416 common=common, bundlecaps=bundlecaps,
1416 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1417 version=cgversion)
1417 bundlecaps=bundlecaps,
1418 version=cgversion)
1418 outgoing = None
1419 outgoing = None
1419 else:
1420 else:
1420 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1421 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1421 dest, branches = hg.parseurl(dest, opts.get('branch'))
1422 dest, branches = hg.parseurl(dest, opts.get('branch'))
1422 other = hg.peer(repo, opts, dest)
1423 other = hg.peer(repo, opts, dest)
1423 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1424 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1424 heads = revs and map(repo.lookup, revs) or revs
1425 heads = revs and map(repo.lookup, revs) or revs
1425 outgoing = discovery.findcommonoutgoing(repo, other,
1426 outgoing = discovery.findcommonoutgoing(repo, other,
1426 onlyheads=heads,
1427 onlyheads=heads,
1427 force=opts.get('force'),
1428 force=opts.get('force'),
1428 portable=True)
1429 portable=True)
1429 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1430 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1430 bundlecaps, version=cgversion)
1431 bundlecaps, version=cgversion)
1431 if not cg:
1432 if not cg:
1432 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1433 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1433 return 1
1434 return 1
1434
1435
1435 if cgversion == '01': #bundle1
1436 if cgversion == '01': #bundle1
1436 if bcompression is None:
1437 if bcompression is None:
1437 bcompression = 'UN'
1438 bcompression = 'UN'
1438 bversion = 'HG10' + bcompression
1439 bversion = 'HG10' + bcompression
1439 bcompression = None
1440 bcompression = None
1440 else:
1441 else:
1441 assert cgversion == '02'
1442 assert cgversion == '02'
1442 bversion = 'HG20'
1443 bversion = 'HG20'
1443
1444
1444 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1445 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1445
1446
1446 @command('cat',
1447 @command('cat',
1447 [('o', 'output', '',
1448 [('o', 'output', '',
1448 _('print output to file with formatted name'), _('FORMAT')),
1449 _('print output to file with formatted name'), _('FORMAT')),
1449 ('r', 'rev', '', _('print the given revision'), _('REV')),
1450 ('r', 'rev', '', _('print the given revision'), _('REV')),
1450 ('', 'decode', None, _('apply any matching decode filter')),
1451 ('', 'decode', None, _('apply any matching decode filter')),
1451 ] + walkopts,
1452 ] + walkopts,
1452 _('[OPTION]... FILE...'),
1453 _('[OPTION]... FILE...'),
1453 inferrepo=True)
1454 inferrepo=True)
1454 def cat(ui, repo, file1, *pats, **opts):
1455 def cat(ui, repo, file1, *pats, **opts):
1455 """output the current or given revision of files
1456 """output the current or given revision of files
1456
1457
1457 Print the specified files as they were at the given revision. If
1458 Print the specified files as they were at the given revision. If
1458 no revision is given, the parent of the working directory is used.
1459 no revision is given, the parent of the working directory is used.
1459
1460
1460 Output may be to a file, in which case the name of the file is
1461 Output may be to a file, in which case the name of the file is
1461 given using a format string. The formatting rules as follows:
1462 given using a format string. The formatting rules as follows:
1462
1463
1463 :``%%``: literal "%" character
1464 :``%%``: literal "%" character
1464 :``%s``: basename of file being printed
1465 :``%s``: basename of file being printed
1465 :``%d``: dirname of file being printed, or '.' if in repository root
1466 :``%d``: dirname of file being printed, or '.' if in repository root
1466 :``%p``: root-relative path name of file being printed
1467 :``%p``: root-relative path name of file being printed
1467 :``%H``: changeset hash (40 hexadecimal digits)
1468 :``%H``: changeset hash (40 hexadecimal digits)
1468 :``%R``: changeset revision number
1469 :``%R``: changeset revision number
1469 :``%h``: short-form changeset hash (12 hexadecimal digits)
1470 :``%h``: short-form changeset hash (12 hexadecimal digits)
1470 :``%r``: zero-padded changeset revision number
1471 :``%r``: zero-padded changeset revision number
1471 :``%b``: basename of the exporting repository
1472 :``%b``: basename of the exporting repository
1472
1473
1473 Returns 0 on success.
1474 Returns 0 on success.
1474 """
1475 """
1475 ctx = scmutil.revsingle(repo, opts.get('rev'))
1476 ctx = scmutil.revsingle(repo, opts.get('rev'))
1476 m = scmutil.match(ctx, (file1,) + pats, opts)
1477 m = scmutil.match(ctx, (file1,) + pats, opts)
1477
1478
1478 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1479 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1479
1480
1480 @command('^clone',
1481 @command('^clone',
1481 [('U', 'noupdate', None, _('the clone will include an empty working '
1482 [('U', 'noupdate', None, _('the clone will include an empty working '
1482 'directory (only a repository)')),
1483 'directory (only a repository)')),
1483 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1484 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1484 _('REV')),
1485 _('REV')),
1485 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1486 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1486 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1487 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1487 ('', 'pull', None, _('use pull protocol to copy metadata')),
1488 ('', 'pull', None, _('use pull protocol to copy metadata')),
1488 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1489 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1489 ] + remoteopts,
1490 ] + remoteopts,
1490 _('[OPTION]... SOURCE [DEST]'),
1491 _('[OPTION]... SOURCE [DEST]'),
1491 norepo=True)
1492 norepo=True)
1492 def clone(ui, source, dest=None, **opts):
1493 def clone(ui, source, dest=None, **opts):
1493 """make a copy of an existing repository
1494 """make a copy of an existing repository
1494
1495
1495 Create a copy of an existing repository in a new directory.
1496 Create a copy of an existing repository in a new directory.
1496
1497
1497 If no destination directory name is specified, it defaults to the
1498 If no destination directory name is specified, it defaults to the
1498 basename of the source.
1499 basename of the source.
1499
1500
1500 The location of the source is added to the new repository's
1501 The location of the source is added to the new repository's
1501 ``.hg/hgrc`` file, as the default to be used for future pulls.
1502 ``.hg/hgrc`` file, as the default to be used for future pulls.
1502
1503
1503 Only local paths and ``ssh://`` URLs are supported as
1504 Only local paths and ``ssh://`` URLs are supported as
1504 destinations. For ``ssh://`` destinations, no working directory or
1505 destinations. For ``ssh://`` destinations, no working directory or
1505 ``.hg/hgrc`` will be created on the remote side.
1506 ``.hg/hgrc`` will be created on the remote side.
1506
1507
1507 If the source repository has a bookmark called '@' set, that
1508 If the source repository has a bookmark called '@' set, that
1508 revision will be checked out in the new repository by default.
1509 revision will be checked out in the new repository by default.
1509
1510
1510 To check out a particular version, use -u/--update, or
1511 To check out a particular version, use -u/--update, or
1511 -U/--noupdate to create a clone with no working directory.
1512 -U/--noupdate to create a clone with no working directory.
1512
1513
1513 To pull only a subset of changesets, specify one or more revisions
1514 To pull only a subset of changesets, specify one or more revisions
1514 identifiers with -r/--rev or branches with -b/--branch. The
1515 identifiers with -r/--rev or branches with -b/--branch. The
1515 resulting clone will contain only the specified changesets and
1516 resulting clone will contain only the specified changesets and
1516 their ancestors. These options (or 'clone src#rev dest') imply
1517 their ancestors. These options (or 'clone src#rev dest') imply
1517 --pull, even for local source repositories.
1518 --pull, even for local source repositories.
1518
1519
1519 .. note::
1520 .. note::
1520
1521
1521 Specifying a tag will include the tagged changeset but not the
1522 Specifying a tag will include the tagged changeset but not the
1522 changeset containing the tag.
1523 changeset containing the tag.
1523
1524
1524 .. container:: verbose
1525 .. container:: verbose
1525
1526
1526 For efficiency, hardlinks are used for cloning whenever the
1527 For efficiency, hardlinks are used for cloning whenever the
1527 source and destination are on the same filesystem (note this
1528 source and destination are on the same filesystem (note this
1528 applies only to the repository data, not to the working
1529 applies only to the repository data, not to the working
1529 directory). Some filesystems, such as AFS, implement hardlinking
1530 directory). Some filesystems, such as AFS, implement hardlinking
1530 incorrectly, but do not report errors. In these cases, use the
1531 incorrectly, but do not report errors. In these cases, use the
1531 --pull option to avoid hardlinking.
1532 --pull option to avoid hardlinking.
1532
1533
1533 In some cases, you can clone repositories and the working
1534 In some cases, you can clone repositories and the working
1534 directory using full hardlinks with ::
1535 directory using full hardlinks with ::
1535
1536
1536 $ cp -al REPO REPOCLONE
1537 $ cp -al REPO REPOCLONE
1537
1538
1538 This is the fastest way to clone, but it is not always safe. The
1539 This is the fastest way to clone, but it is not always safe. The
1539 operation is not atomic (making sure REPO is not modified during
1540 operation is not atomic (making sure REPO is not modified during
1540 the operation is up to you) and you have to make sure your
1541 the operation is up to you) and you have to make sure your
1541 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1542 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1542 so). Also, this is not compatible with certain extensions that
1543 so). Also, this is not compatible with certain extensions that
1543 place their metadata under the .hg directory, such as mq.
1544 place their metadata under the .hg directory, such as mq.
1544
1545
1545 Mercurial will update the working directory to the first applicable
1546 Mercurial will update the working directory to the first applicable
1546 revision from this list:
1547 revision from this list:
1547
1548
1548 a) null if -U or the source repository has no changesets
1549 a) null if -U or the source repository has no changesets
1549 b) if -u . and the source repository is local, the first parent of
1550 b) if -u . and the source repository is local, the first parent of
1550 the source repository's working directory
1551 the source repository's working directory
1551 c) the changeset specified with -u (if a branch name, this means the
1552 c) the changeset specified with -u (if a branch name, this means the
1552 latest head of that branch)
1553 latest head of that branch)
1553 d) the changeset specified with -r
1554 d) the changeset specified with -r
1554 e) the tipmost head specified with -b
1555 e) the tipmost head specified with -b
1555 f) the tipmost head specified with the url#branch source syntax
1556 f) the tipmost head specified with the url#branch source syntax
1556 g) the revision marked with the '@' bookmark, if present
1557 g) the revision marked with the '@' bookmark, if present
1557 h) the tipmost head of the default branch
1558 h) the tipmost head of the default branch
1558 i) tip
1559 i) tip
1559
1560
1560 When cloning from servers that support it, Mercurial may fetch
1561 When cloning from servers that support it, Mercurial may fetch
1561 pre-generated data from a server-advertised URL. When this is done,
1562 pre-generated data from a server-advertised URL. When this is done,
1562 hooks operating on incoming changesets and changegroups may fire twice,
1563 hooks operating on incoming changesets and changegroups may fire twice,
1563 once for the bundle fetched from the URL and another for any additional
1564 once for the bundle fetched from the URL and another for any additional
1564 data not fetched from this URL. In addition, if an error occurs, the
1565 data not fetched from this URL. In addition, if an error occurs, the
1565 repository may be rolled back to a partial clone. This behavior may
1566 repository may be rolled back to a partial clone. This behavior may
1566 change in future releases. See :hg:`help -e clonebundles` for more.
1567 change in future releases. See :hg:`help -e clonebundles` for more.
1567
1568
1568 Examples:
1569 Examples:
1569
1570
1570 - clone a remote repository to a new directory named hg/::
1571 - clone a remote repository to a new directory named hg/::
1571
1572
1572 hg clone http://selenic.com/hg
1573 hg clone http://selenic.com/hg
1573
1574
1574 - create a lightweight local clone::
1575 - create a lightweight local clone::
1575
1576
1576 hg clone project/ project-feature/
1577 hg clone project/ project-feature/
1577
1578
1578 - clone from an absolute path on an ssh server (note double-slash)::
1579 - clone from an absolute path on an ssh server (note double-slash)::
1579
1580
1580 hg clone ssh://user@server//home/projects/alpha/
1581 hg clone ssh://user@server//home/projects/alpha/
1581
1582
1582 - do a high-speed clone over a LAN while checking out a
1583 - do a high-speed clone over a LAN while checking out a
1583 specified version::
1584 specified version::
1584
1585
1585 hg clone --uncompressed http://server/repo -u 1.5
1586 hg clone --uncompressed http://server/repo -u 1.5
1586
1587
1587 - create a repository without changesets after a particular revision::
1588 - create a repository without changesets after a particular revision::
1588
1589
1589 hg clone -r 04e544 experimental/ good/
1590 hg clone -r 04e544 experimental/ good/
1590
1591
1591 - clone (and track) a particular named branch::
1592 - clone (and track) a particular named branch::
1592
1593
1593 hg clone http://selenic.com/hg#stable
1594 hg clone http://selenic.com/hg#stable
1594
1595
1595 See :hg:`help urls` for details on specifying URLs.
1596 See :hg:`help urls` for details on specifying URLs.
1596
1597
1597 Returns 0 on success.
1598 Returns 0 on success.
1598 """
1599 """
1599 if opts.get('noupdate') and opts.get('updaterev'):
1600 if opts.get('noupdate') and opts.get('updaterev'):
1600 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1601 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1601
1602
1602 r = hg.clone(ui, opts, source, dest,
1603 r = hg.clone(ui, opts, source, dest,
1603 pull=opts.get('pull'),
1604 pull=opts.get('pull'),
1604 stream=opts.get('uncompressed'),
1605 stream=opts.get('uncompressed'),
1605 rev=opts.get('rev'),
1606 rev=opts.get('rev'),
1606 update=opts.get('updaterev') or not opts.get('noupdate'),
1607 update=opts.get('updaterev') or not opts.get('noupdate'),
1607 branch=opts.get('branch'),
1608 branch=opts.get('branch'),
1608 shareopts=opts.get('shareopts'))
1609 shareopts=opts.get('shareopts'))
1609
1610
1610 return r is None
1611 return r is None
1611
1612
1612 @command('^commit|ci',
1613 @command('^commit|ci',
1613 [('A', 'addremove', None,
1614 [('A', 'addremove', None,
1614 _('mark new/missing files as added/removed before committing')),
1615 _('mark new/missing files as added/removed before committing')),
1615 ('', 'close-branch', None,
1616 ('', 'close-branch', None,
1616 _('mark a branch head as closed')),
1617 _('mark a branch head as closed')),
1617 ('', 'amend', None, _('amend the parent of the working directory')),
1618 ('', 'amend', None, _('amend the parent of the working directory')),
1618 ('s', 'secret', None, _('use the secret phase for committing')),
1619 ('s', 'secret', None, _('use the secret phase for committing')),
1619 ('e', 'edit', None, _('invoke editor on commit messages')),
1620 ('e', 'edit', None, _('invoke editor on commit messages')),
1620 ('i', 'interactive', None, _('use interactive mode')),
1621 ('i', 'interactive', None, _('use interactive mode')),
1621 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1622 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1622 _('[OPTION]... [FILE]...'),
1623 _('[OPTION]... [FILE]...'),
1623 inferrepo=True)
1624 inferrepo=True)
1624 def commit(ui, repo, *pats, **opts):
1625 def commit(ui, repo, *pats, **opts):
1625 """commit the specified files or all outstanding changes
1626 """commit the specified files or all outstanding changes
1626
1627
1627 Commit changes to the given files into the repository. Unlike a
1628 Commit changes to the given files into the repository. Unlike a
1628 centralized SCM, this operation is a local operation. See
1629 centralized SCM, this operation is a local operation. See
1629 :hg:`push` for a way to actively distribute your changes.
1630 :hg:`push` for a way to actively distribute your changes.
1630
1631
1631 If a list of files is omitted, all changes reported by :hg:`status`
1632 If a list of files is omitted, all changes reported by :hg:`status`
1632 will be committed.
1633 will be committed.
1633
1634
1634 If you are committing the result of a merge, do not provide any
1635 If you are committing the result of a merge, do not provide any
1635 filenames or -I/-X filters.
1636 filenames or -I/-X filters.
1636
1637
1637 If no commit message is specified, Mercurial starts your
1638 If no commit message is specified, Mercurial starts your
1638 configured editor where you can enter a message. In case your
1639 configured editor where you can enter a message. In case your
1639 commit fails, you will find a backup of your message in
1640 commit fails, you will find a backup of your message in
1640 ``.hg/last-message.txt``.
1641 ``.hg/last-message.txt``.
1641
1642
1642 The --close-branch flag can be used to mark the current branch
1643 The --close-branch flag can be used to mark the current branch
1643 head closed. When all heads of a branch are closed, the branch
1644 head closed. When all heads of a branch are closed, the branch
1644 will be considered closed and no longer listed.
1645 will be considered closed and no longer listed.
1645
1646
1646 The --amend flag can be used to amend the parent of the
1647 The --amend flag can be used to amend the parent of the
1647 working directory with a new commit that contains the changes
1648 working directory with a new commit that contains the changes
1648 in the parent in addition to those currently reported by :hg:`status`,
1649 in the parent in addition to those currently reported by :hg:`status`,
1649 if there are any. The old commit is stored in a backup bundle in
1650 if there are any. The old commit is stored in a backup bundle in
1650 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1651 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1651 on how to restore it).
1652 on how to restore it).
1652
1653
1653 Message, user and date are taken from the amended commit unless
1654 Message, user and date are taken from the amended commit unless
1654 specified. When a message isn't specified on the command line,
1655 specified. When a message isn't specified on the command line,
1655 the editor will open with the message of the amended commit.
1656 the editor will open with the message of the amended commit.
1656
1657
1657 It is not possible to amend public changesets (see :hg:`help phases`)
1658 It is not possible to amend public changesets (see :hg:`help phases`)
1658 or changesets that have children.
1659 or changesets that have children.
1659
1660
1660 See :hg:`help dates` for a list of formats valid for -d/--date.
1661 See :hg:`help dates` for a list of formats valid for -d/--date.
1661
1662
1662 Returns 0 on success, 1 if nothing changed.
1663 Returns 0 on success, 1 if nothing changed.
1663
1664
1664 .. container:: verbose
1665 .. container:: verbose
1665
1666
1666 Examples:
1667 Examples:
1667
1668
1668 - commit all files ending in .py::
1669 - commit all files ending in .py::
1669
1670
1670 hg commit --include "set:**.py"
1671 hg commit --include "set:**.py"
1671
1672
1672 - commit all non-binary files::
1673 - commit all non-binary files::
1673
1674
1674 hg commit --exclude "set:binary()"
1675 hg commit --exclude "set:binary()"
1675
1676
1676 - amend the current commit and set the date to now::
1677 - amend the current commit and set the date to now::
1677
1678
1678 hg commit --amend --date now
1679 hg commit --amend --date now
1679 """
1680 """
1680 wlock = lock = None
1681 wlock = lock = None
1681 try:
1682 try:
1682 wlock = repo.wlock()
1683 wlock = repo.wlock()
1683 lock = repo.lock()
1684 lock = repo.lock()
1684 return _docommit(ui, repo, *pats, **opts)
1685 return _docommit(ui, repo, *pats, **opts)
1685 finally:
1686 finally:
1686 release(lock, wlock)
1687 release(lock, wlock)
1687
1688
1688 def _docommit(ui, repo, *pats, **opts):
1689 def _docommit(ui, repo, *pats, **opts):
1689 if opts.get('interactive'):
1690 if opts.get('interactive'):
1690 opts.pop('interactive')
1691 opts.pop('interactive')
1691 cmdutil.dorecord(ui, repo, commit, None, False,
1692 cmdutil.dorecord(ui, repo, commit, None, False,
1692 cmdutil.recordfilter, *pats, **opts)
1693 cmdutil.recordfilter, *pats, **opts)
1693 return
1694 return
1694
1695
1695 if opts.get('subrepos'):
1696 if opts.get('subrepos'):
1696 if opts.get('amend'):
1697 if opts.get('amend'):
1697 raise error.Abort(_('cannot amend with --subrepos'))
1698 raise error.Abort(_('cannot amend with --subrepos'))
1698 # Let --subrepos on the command line override config setting.
1699 # Let --subrepos on the command line override config setting.
1699 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1700 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1700
1701
1701 cmdutil.checkunfinished(repo, commit=True)
1702 cmdutil.checkunfinished(repo, commit=True)
1702
1703
1703 branch = repo[None].branch()
1704 branch = repo[None].branch()
1704 bheads = repo.branchheads(branch)
1705 bheads = repo.branchheads(branch)
1705
1706
1706 extra = {}
1707 extra = {}
1707 if opts.get('close_branch'):
1708 if opts.get('close_branch'):
1708 extra['close'] = 1
1709 extra['close'] = 1
1709
1710
1710 if not bheads:
1711 if not bheads:
1711 raise error.Abort(_('can only close branch heads'))
1712 raise error.Abort(_('can only close branch heads'))
1712 elif opts.get('amend'):
1713 elif opts.get('amend'):
1713 if repo[None].parents()[0].p1().branch() != branch and \
1714 if repo[None].parents()[0].p1().branch() != branch and \
1714 repo[None].parents()[0].p2().branch() != branch:
1715 repo[None].parents()[0].p2().branch() != branch:
1715 raise error.Abort(_('can only close branch heads'))
1716 raise error.Abort(_('can only close branch heads'))
1716
1717
1717 if opts.get('amend'):
1718 if opts.get('amend'):
1718 if ui.configbool('ui', 'commitsubrepos'):
1719 if ui.configbool('ui', 'commitsubrepos'):
1719 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1720 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1720
1721
1721 old = repo['.']
1722 old = repo['.']
1722 if not old.mutable():
1723 if not old.mutable():
1723 raise error.Abort(_('cannot amend public changesets'))
1724 raise error.Abort(_('cannot amend public changesets'))
1724 if len(repo[None].parents()) > 1:
1725 if len(repo[None].parents()) > 1:
1725 raise error.Abort(_('cannot amend while merging'))
1726 raise error.Abort(_('cannot amend while merging'))
1726 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1727 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1727 if not allowunstable and old.children():
1728 if not allowunstable and old.children():
1728 raise error.Abort(_('cannot amend changeset with children'))
1729 raise error.Abort(_('cannot amend changeset with children'))
1729
1730
1730 # Currently histedit gets confused if an amend happens while histedit
1731 # Currently histedit gets confused if an amend happens while histedit
1731 # is in progress. Since we have a checkunfinished command, we are
1732 # is in progress. Since we have a checkunfinished command, we are
1732 # temporarily honoring it.
1733 # temporarily honoring it.
1733 #
1734 #
1734 # Note: eventually this guard will be removed. Please do not expect
1735 # Note: eventually this guard will be removed. Please do not expect
1735 # this behavior to remain.
1736 # this behavior to remain.
1736 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1737 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1737 cmdutil.checkunfinished(repo)
1738 cmdutil.checkunfinished(repo)
1738
1739
1739 # commitfunc is used only for temporary amend commit by cmdutil.amend
1740 # commitfunc is used only for temporary amend commit by cmdutil.amend
1740 def commitfunc(ui, repo, message, match, opts):
1741 def commitfunc(ui, repo, message, match, opts):
1741 return repo.commit(message,
1742 return repo.commit(message,
1742 opts.get('user') or old.user(),
1743 opts.get('user') or old.user(),
1743 opts.get('date') or old.date(),
1744 opts.get('date') or old.date(),
1744 match,
1745 match,
1745 extra=extra)
1746 extra=extra)
1746
1747
1747 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1748 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1748 if node == old.node():
1749 if node == old.node():
1749 ui.status(_("nothing changed\n"))
1750 ui.status(_("nothing changed\n"))
1750 return 1
1751 return 1
1751 else:
1752 else:
1752 def commitfunc(ui, repo, message, match, opts):
1753 def commitfunc(ui, repo, message, match, opts):
1753 backup = ui.backupconfig('phases', 'new-commit')
1754 backup = ui.backupconfig('phases', 'new-commit')
1754 baseui = repo.baseui
1755 baseui = repo.baseui
1755 basebackup = baseui.backupconfig('phases', 'new-commit')
1756 basebackup = baseui.backupconfig('phases', 'new-commit')
1756 try:
1757 try:
1757 if opts.get('secret'):
1758 if opts.get('secret'):
1758 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1759 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1759 # Propagate to subrepos
1760 # Propagate to subrepos
1760 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1761 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1761
1762
1762 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1763 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1763 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1764 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1764 return repo.commit(message, opts.get('user'), opts.get('date'),
1765 return repo.commit(message, opts.get('user'), opts.get('date'),
1765 match,
1766 match,
1766 editor=editor,
1767 editor=editor,
1767 extra=extra)
1768 extra=extra)
1768 finally:
1769 finally:
1769 ui.restoreconfig(backup)
1770 ui.restoreconfig(backup)
1770 repo.baseui.restoreconfig(basebackup)
1771 repo.baseui.restoreconfig(basebackup)
1771
1772
1772
1773
1773 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1774 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1774
1775
1775 if not node:
1776 if not node:
1776 stat = cmdutil.postcommitstatus(repo, pats, opts)
1777 stat = cmdutil.postcommitstatus(repo, pats, opts)
1777 if stat[3]:
1778 if stat[3]:
1778 ui.status(_("nothing changed (%d missing files, see "
1779 ui.status(_("nothing changed (%d missing files, see "
1779 "'hg status')\n") % len(stat[3]))
1780 "'hg status')\n") % len(stat[3]))
1780 else:
1781 else:
1781 ui.status(_("nothing changed\n"))
1782 ui.status(_("nothing changed\n"))
1782 return 1
1783 return 1
1783
1784
1784 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1785 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1785
1786
1786 @command('config|showconfig|debugconfig',
1787 @command('config|showconfig|debugconfig',
1787 [('u', 'untrusted', None, _('show untrusted configuration options')),
1788 [('u', 'untrusted', None, _('show untrusted configuration options')),
1788 ('e', 'edit', None, _('edit user config')),
1789 ('e', 'edit', None, _('edit user config')),
1789 ('l', 'local', None, _('edit repository config')),
1790 ('l', 'local', None, _('edit repository config')),
1790 ('g', 'global', None, _('edit global config'))],
1791 ('g', 'global', None, _('edit global config'))],
1791 _('[-u] [NAME]...'),
1792 _('[-u] [NAME]...'),
1792 optionalrepo=True)
1793 optionalrepo=True)
1793 def config(ui, repo, *values, **opts):
1794 def config(ui, repo, *values, **opts):
1794 """show combined config settings from all hgrc files
1795 """show combined config settings from all hgrc files
1795
1796
1796 With no arguments, print names and values of all config items.
1797 With no arguments, print names and values of all config items.
1797
1798
1798 With one argument of the form section.name, print just the value
1799 With one argument of the form section.name, print just the value
1799 of that config item.
1800 of that config item.
1800
1801
1801 With multiple arguments, print names and values of all config
1802 With multiple arguments, print names and values of all config
1802 items with matching section names.
1803 items with matching section names.
1803
1804
1804 With --edit, start an editor on the user-level config file. With
1805 With --edit, start an editor on the user-level config file. With
1805 --global, edit the system-wide config file. With --local, edit the
1806 --global, edit the system-wide config file. With --local, edit the
1806 repository-level config file.
1807 repository-level config file.
1807
1808
1808 With --debug, the source (filename and line number) is printed
1809 With --debug, the source (filename and line number) is printed
1809 for each config item.
1810 for each config item.
1810
1811
1811 See :hg:`help config` for more information about config files.
1812 See :hg:`help config` for more information about config files.
1812
1813
1813 Returns 0 on success, 1 if NAME does not exist.
1814 Returns 0 on success, 1 if NAME does not exist.
1814
1815
1815 """
1816 """
1816
1817
1817 if opts.get('edit') or opts.get('local') or opts.get('global'):
1818 if opts.get('edit') or opts.get('local') or opts.get('global'):
1818 if opts.get('local') and opts.get('global'):
1819 if opts.get('local') and opts.get('global'):
1819 raise error.Abort(_("can't use --local and --global together"))
1820 raise error.Abort(_("can't use --local and --global together"))
1820
1821
1821 if opts.get('local'):
1822 if opts.get('local'):
1822 if not repo:
1823 if not repo:
1823 raise error.Abort(_("can't use --local outside a repository"))
1824 raise error.Abort(_("can't use --local outside a repository"))
1824 paths = [repo.join('hgrc')]
1825 paths = [repo.join('hgrc')]
1825 elif opts.get('global'):
1826 elif opts.get('global'):
1826 paths = scmutil.systemrcpath()
1827 paths = scmutil.systemrcpath()
1827 else:
1828 else:
1828 paths = scmutil.userrcpath()
1829 paths = scmutil.userrcpath()
1829
1830
1830 for f in paths:
1831 for f in paths:
1831 if os.path.exists(f):
1832 if os.path.exists(f):
1832 break
1833 break
1833 else:
1834 else:
1834 if opts.get('global'):
1835 if opts.get('global'):
1835 samplehgrc = uimod.samplehgrcs['global']
1836 samplehgrc = uimod.samplehgrcs['global']
1836 elif opts.get('local'):
1837 elif opts.get('local'):
1837 samplehgrc = uimod.samplehgrcs['local']
1838 samplehgrc = uimod.samplehgrcs['local']
1838 else:
1839 else:
1839 samplehgrc = uimod.samplehgrcs['user']
1840 samplehgrc = uimod.samplehgrcs['user']
1840
1841
1841 f = paths[0]
1842 f = paths[0]
1842 fp = open(f, "w")
1843 fp = open(f, "w")
1843 fp.write(samplehgrc)
1844 fp.write(samplehgrc)
1844 fp.close()
1845 fp.close()
1845
1846
1846 editor = ui.geteditor()
1847 editor = ui.geteditor()
1847 ui.system("%s \"%s\"" % (editor, f),
1848 ui.system("%s \"%s\"" % (editor, f),
1848 onerr=error.Abort, errprefix=_("edit failed"))
1849 onerr=error.Abort, errprefix=_("edit failed"))
1849 return
1850 return
1850
1851
1851 for f in scmutil.rcpath():
1852 for f in scmutil.rcpath():
1852 ui.debug('read config from: %s\n' % f)
1853 ui.debug('read config from: %s\n' % f)
1853 untrusted = bool(opts.get('untrusted'))
1854 untrusted = bool(opts.get('untrusted'))
1854 if values:
1855 if values:
1855 sections = [v for v in values if '.' not in v]
1856 sections = [v for v in values if '.' not in v]
1856 items = [v for v in values if '.' in v]
1857 items = [v for v in values if '.' in v]
1857 if len(items) > 1 or items and sections:
1858 if len(items) > 1 or items and sections:
1858 raise error.Abort(_('only one config item permitted'))
1859 raise error.Abort(_('only one config item permitted'))
1859 matched = False
1860 matched = False
1860 for section, name, value in ui.walkconfig(untrusted=untrusted):
1861 for section, name, value in ui.walkconfig(untrusted=untrusted):
1861 value = str(value).replace('\n', '\\n')
1862 value = str(value).replace('\n', '\\n')
1862 sectname = section + '.' + name
1863 sectname = section + '.' + name
1863 if values:
1864 if values:
1864 for v in values:
1865 for v in values:
1865 if v == section:
1866 if v == section:
1866 ui.debug('%s: ' %
1867 ui.debug('%s: ' %
1867 ui.configsource(section, name, untrusted))
1868 ui.configsource(section, name, untrusted))
1868 ui.write('%s=%s\n' % (sectname, value))
1869 ui.write('%s=%s\n' % (sectname, value))
1869 matched = True
1870 matched = True
1870 elif v == sectname:
1871 elif v == sectname:
1871 ui.debug('%s: ' %
1872 ui.debug('%s: ' %
1872 ui.configsource(section, name, untrusted))
1873 ui.configsource(section, name, untrusted))
1873 ui.write(value, '\n')
1874 ui.write(value, '\n')
1874 matched = True
1875 matched = True
1875 else:
1876 else:
1876 ui.debug('%s: ' %
1877 ui.debug('%s: ' %
1877 ui.configsource(section, name, untrusted))
1878 ui.configsource(section, name, untrusted))
1878 ui.write('%s=%s\n' % (sectname, value))
1879 ui.write('%s=%s\n' % (sectname, value))
1879 matched = True
1880 matched = True
1880 if matched:
1881 if matched:
1881 return 0
1882 return 0
1882 return 1
1883 return 1
1883
1884
1884 @command('copy|cp',
1885 @command('copy|cp',
1885 [('A', 'after', None, _('record a copy that has already occurred')),
1886 [('A', 'after', None, _('record a copy that has already occurred')),
1886 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1887 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1887 ] + walkopts + dryrunopts,
1888 ] + walkopts + dryrunopts,
1888 _('[OPTION]... [SOURCE]... DEST'))
1889 _('[OPTION]... [SOURCE]... DEST'))
1889 def copy(ui, repo, *pats, **opts):
1890 def copy(ui, repo, *pats, **opts):
1890 """mark files as copied for the next commit
1891 """mark files as copied for the next commit
1891
1892
1892 Mark dest as having copies of source files. If dest is a
1893 Mark dest as having copies of source files. If dest is a
1893 directory, copies are put in that directory. If dest is a file,
1894 directory, copies are put in that directory. If dest is a file,
1894 the source must be a single file.
1895 the source must be a single file.
1895
1896
1896 By default, this command copies the contents of files as they
1897 By default, this command copies the contents of files as they
1897 exist in the working directory. If invoked with -A/--after, the
1898 exist in the working directory. If invoked with -A/--after, the
1898 operation is recorded, but no copying is performed.
1899 operation is recorded, but no copying is performed.
1899
1900
1900 This command takes effect with the next commit. To undo a copy
1901 This command takes effect with the next commit. To undo a copy
1901 before that, see :hg:`revert`.
1902 before that, see :hg:`revert`.
1902
1903
1903 Returns 0 on success, 1 if errors are encountered.
1904 Returns 0 on success, 1 if errors are encountered.
1904 """
1905 """
1905 with repo.wlock(False):
1906 with repo.wlock(False):
1906 return cmdutil.copy(ui, repo, pats, opts)
1907 return cmdutil.copy(ui, repo, pats, opts)
1907
1908
1908 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1909 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1909 def debugancestor(ui, repo, *args):
1910 def debugancestor(ui, repo, *args):
1910 """find the ancestor revision of two revisions in a given index"""
1911 """find the ancestor revision of two revisions in a given index"""
1911 if len(args) == 3:
1912 if len(args) == 3:
1912 index, rev1, rev2 = args
1913 index, rev1, rev2 = args
1913 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1914 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1914 lookup = r.lookup
1915 lookup = r.lookup
1915 elif len(args) == 2:
1916 elif len(args) == 2:
1916 if not repo:
1917 if not repo:
1917 raise error.Abort(_("there is no Mercurial repository here "
1918 raise error.Abort(_("there is no Mercurial repository here "
1918 "(.hg not found)"))
1919 "(.hg not found)"))
1919 rev1, rev2 = args
1920 rev1, rev2 = args
1920 r = repo.changelog
1921 r = repo.changelog
1921 lookup = repo.lookup
1922 lookup = repo.lookup
1922 else:
1923 else:
1923 raise error.Abort(_('either two or three arguments required'))
1924 raise error.Abort(_('either two or three arguments required'))
1924 a = r.ancestor(lookup(rev1), lookup(rev2))
1925 a = r.ancestor(lookup(rev1), lookup(rev2))
1925 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1926 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1926
1927
1927 @command('debugbuilddag',
1928 @command('debugbuilddag',
1928 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1929 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1929 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1930 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1930 ('n', 'new-file', None, _('add new file at each rev'))],
1931 ('n', 'new-file', None, _('add new file at each rev'))],
1931 _('[OPTION]... [TEXT]'))
1932 _('[OPTION]... [TEXT]'))
1932 def debugbuilddag(ui, repo, text=None,
1933 def debugbuilddag(ui, repo, text=None,
1933 mergeable_file=False,
1934 mergeable_file=False,
1934 overwritten_file=False,
1935 overwritten_file=False,
1935 new_file=False):
1936 new_file=False):
1936 """builds a repo with a given DAG from scratch in the current empty repo
1937 """builds a repo with a given DAG from scratch in the current empty repo
1937
1938
1938 The description of the DAG is read from stdin if not given on the
1939 The description of the DAG is read from stdin if not given on the
1939 command line.
1940 command line.
1940
1941
1941 Elements:
1942 Elements:
1942
1943
1943 - "+n" is a linear run of n nodes based on the current default parent
1944 - "+n" is a linear run of n nodes based on the current default parent
1944 - "." is a single node based on the current default parent
1945 - "." is a single node based on the current default parent
1945 - "$" resets the default parent to null (implied at the start);
1946 - "$" resets the default parent to null (implied at the start);
1946 otherwise the default parent is always the last node created
1947 otherwise the default parent is always the last node created
1947 - "<p" sets the default parent to the backref p
1948 - "<p" sets the default parent to the backref p
1948 - "*p" is a fork at parent p, which is a backref
1949 - "*p" is a fork at parent p, which is a backref
1949 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1950 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1950 - "/p2" is a merge of the preceding node and p2
1951 - "/p2" is a merge of the preceding node and p2
1951 - ":tag" defines a local tag for the preceding node
1952 - ":tag" defines a local tag for the preceding node
1952 - "@branch" sets the named branch for subsequent nodes
1953 - "@branch" sets the named branch for subsequent nodes
1953 - "#...\\n" is a comment up to the end of the line
1954 - "#...\\n" is a comment up to the end of the line
1954
1955
1955 Whitespace between the above elements is ignored.
1956 Whitespace between the above elements is ignored.
1956
1957
1957 A backref is either
1958 A backref is either
1958
1959
1959 - a number n, which references the node curr-n, where curr is the current
1960 - a number n, which references the node curr-n, where curr is the current
1960 node, or
1961 node, or
1961 - the name of a local tag you placed earlier using ":tag", or
1962 - the name of a local tag you placed earlier using ":tag", or
1962 - empty to denote the default parent.
1963 - empty to denote the default parent.
1963
1964
1964 All string valued-elements are either strictly alphanumeric, or must
1965 All string valued-elements are either strictly alphanumeric, or must
1965 be enclosed in double quotes ("..."), with "\\" as escape character.
1966 be enclosed in double quotes ("..."), with "\\" as escape character.
1966 """
1967 """
1967
1968
1968 if text is None:
1969 if text is None:
1969 ui.status(_("reading DAG from stdin\n"))
1970 ui.status(_("reading DAG from stdin\n"))
1970 text = ui.fin.read()
1971 text = ui.fin.read()
1971
1972
1972 cl = repo.changelog
1973 cl = repo.changelog
1973 if len(cl) > 0:
1974 if len(cl) > 0:
1974 raise error.Abort(_('repository is not empty'))
1975 raise error.Abort(_('repository is not empty'))
1975
1976
1976 # determine number of revs in DAG
1977 # determine number of revs in DAG
1977 total = 0
1978 total = 0
1978 for type, data in dagparser.parsedag(text):
1979 for type, data in dagparser.parsedag(text):
1979 if type == 'n':
1980 if type == 'n':
1980 total += 1
1981 total += 1
1981
1982
1982 if mergeable_file:
1983 if mergeable_file:
1983 linesperrev = 2
1984 linesperrev = 2
1984 # make a file with k lines per rev
1985 # make a file with k lines per rev
1985 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1986 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1986 initialmergedlines.append("")
1987 initialmergedlines.append("")
1987
1988
1988 tags = []
1989 tags = []
1989
1990
1990 wlock = lock = tr = None
1991 wlock = lock = tr = None
1991 try:
1992 try:
1992 wlock = repo.wlock()
1993 wlock = repo.wlock()
1993 lock = repo.lock()
1994 lock = repo.lock()
1994 tr = repo.transaction("builddag")
1995 tr = repo.transaction("builddag")
1995
1996
1996 at = -1
1997 at = -1
1997 atbranch = 'default'
1998 atbranch = 'default'
1998 nodeids = []
1999 nodeids = []
1999 id = 0
2000 id = 0
2000 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2001 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2001 for type, data in dagparser.parsedag(text):
2002 for type, data in dagparser.parsedag(text):
2002 if type == 'n':
2003 if type == 'n':
2003 ui.note(('node %s\n' % str(data)))
2004 ui.note(('node %s\n' % str(data)))
2004 id, ps = data
2005 id, ps = data
2005
2006
2006 files = []
2007 files = []
2007 fctxs = {}
2008 fctxs = {}
2008
2009
2009 p2 = None
2010 p2 = None
2010 if mergeable_file:
2011 if mergeable_file:
2011 fn = "mf"
2012 fn = "mf"
2012 p1 = repo[ps[0]]
2013 p1 = repo[ps[0]]
2013 if len(ps) > 1:
2014 if len(ps) > 1:
2014 p2 = repo[ps[1]]
2015 p2 = repo[ps[1]]
2015 pa = p1.ancestor(p2)
2016 pa = p1.ancestor(p2)
2016 base, local, other = [x[fn].data() for x in (pa, p1,
2017 base, local, other = [x[fn].data() for x in (pa, p1,
2017 p2)]
2018 p2)]
2018 m3 = simplemerge.Merge3Text(base, local, other)
2019 m3 = simplemerge.Merge3Text(base, local, other)
2019 ml = [l.strip() for l in m3.merge_lines()]
2020 ml = [l.strip() for l in m3.merge_lines()]
2020 ml.append("")
2021 ml.append("")
2021 elif at > 0:
2022 elif at > 0:
2022 ml = p1[fn].data().split("\n")
2023 ml = p1[fn].data().split("\n")
2023 else:
2024 else:
2024 ml = initialmergedlines
2025 ml = initialmergedlines
2025 ml[id * linesperrev] += " r%i" % id
2026 ml[id * linesperrev] += " r%i" % id
2026 mergedtext = "\n".join(ml)
2027 mergedtext = "\n".join(ml)
2027 files.append(fn)
2028 files.append(fn)
2028 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
2029 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
2029
2030
2030 if overwritten_file:
2031 if overwritten_file:
2031 fn = "of"
2032 fn = "of"
2032 files.append(fn)
2033 files.append(fn)
2033 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2034 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2034
2035
2035 if new_file:
2036 if new_file:
2036 fn = "nf%i" % id
2037 fn = "nf%i" % id
2037 files.append(fn)
2038 files.append(fn)
2038 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2039 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
2039 if len(ps) > 1:
2040 if len(ps) > 1:
2040 if not p2:
2041 if not p2:
2041 p2 = repo[ps[1]]
2042 p2 = repo[ps[1]]
2042 for fn in p2:
2043 for fn in p2:
2043 if fn.startswith("nf"):
2044 if fn.startswith("nf"):
2044 files.append(fn)
2045 files.append(fn)
2045 fctxs[fn] = p2[fn]
2046 fctxs[fn] = p2[fn]
2046
2047
2047 def fctxfn(repo, cx, path):
2048 def fctxfn(repo, cx, path):
2048 return fctxs.get(path)
2049 return fctxs.get(path)
2049
2050
2050 if len(ps) == 0 or ps[0] < 0:
2051 if len(ps) == 0 or ps[0] < 0:
2051 pars = [None, None]
2052 pars = [None, None]
2052 elif len(ps) == 1:
2053 elif len(ps) == 1:
2053 pars = [nodeids[ps[0]], None]
2054 pars = [nodeids[ps[0]], None]
2054 else:
2055 else:
2055 pars = [nodeids[p] for p in ps]
2056 pars = [nodeids[p] for p in ps]
2056 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
2057 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
2057 date=(id, 0),
2058 date=(id, 0),
2058 user="debugbuilddag",
2059 user="debugbuilddag",
2059 extra={'branch': atbranch})
2060 extra={'branch': atbranch})
2060 nodeid = repo.commitctx(cx)
2061 nodeid = repo.commitctx(cx)
2061 nodeids.append(nodeid)
2062 nodeids.append(nodeid)
2062 at = id
2063 at = id
2063 elif type == 'l':
2064 elif type == 'l':
2064 id, name = data
2065 id, name = data
2065 ui.note(('tag %s\n' % name))
2066 ui.note(('tag %s\n' % name))
2066 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
2067 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
2067 elif type == 'a':
2068 elif type == 'a':
2068 ui.note(('branch %s\n' % data))
2069 ui.note(('branch %s\n' % data))
2069 atbranch = data
2070 atbranch = data
2070 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2071 ui.progress(_('building'), id, unit=_('revisions'), total=total)
2071 tr.close()
2072 tr.close()
2072
2073
2073 if tags:
2074 if tags:
2074 repo.vfs.write("localtags", "".join(tags))
2075 repo.vfs.write("localtags", "".join(tags))
2075 finally:
2076 finally:
2076 ui.progress(_('building'), None)
2077 ui.progress(_('building'), None)
2077 release(tr, lock, wlock)
2078 release(tr, lock, wlock)
2078
2079
2079 @command('debugbundle',
2080 @command('debugbundle',
2080 [('a', 'all', None, _('show all details')),
2081 [('a', 'all', None, _('show all details')),
2081 ('', 'spec', None, _('print the bundlespec of the bundle'))],
2082 ('', 'spec', None, _('print the bundlespec of the bundle'))],
2082 _('FILE'),
2083 _('FILE'),
2083 norepo=True)
2084 norepo=True)
2084 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
2085 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
2085 """lists the contents of a bundle"""
2086 """lists the contents of a bundle"""
2086 with hg.openpath(ui, bundlepath) as f:
2087 with hg.openpath(ui, bundlepath) as f:
2087 if spec:
2088 if spec:
2088 spec = exchange.getbundlespec(ui, f)
2089 spec = exchange.getbundlespec(ui, f)
2089 ui.write('%s\n' % spec)
2090 ui.write('%s\n' % spec)
2090 return
2091 return
2091
2092
2092 gen = exchange.readbundle(ui, f, bundlepath)
2093 gen = exchange.readbundle(ui, f, bundlepath)
2093 if isinstance(gen, bundle2.unbundle20):
2094 if isinstance(gen, bundle2.unbundle20):
2094 return _debugbundle2(ui, gen, all=all, **opts)
2095 return _debugbundle2(ui, gen, all=all, **opts)
2095 _debugchangegroup(ui, gen, all=all, **opts)
2096 _debugchangegroup(ui, gen, all=all, **opts)
2096
2097
2097 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
2098 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
2098 indent_string = ' ' * indent
2099 indent_string = ' ' * indent
2099 if all:
2100 if all:
2100 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
2101 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
2101 % indent_string)
2102 % indent_string)
2102
2103
2103 def showchunks(named):
2104 def showchunks(named):
2104 ui.write("\n%s%s\n" % (indent_string, named))
2105 ui.write("\n%s%s\n" % (indent_string, named))
2105 chain = None
2106 chain = None
2106 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
2107 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
2107 node = chunkdata['node']
2108 node = chunkdata['node']
2108 p1 = chunkdata['p1']
2109 p1 = chunkdata['p1']
2109 p2 = chunkdata['p2']
2110 p2 = chunkdata['p2']
2110 cs = chunkdata['cs']
2111 cs = chunkdata['cs']
2111 deltabase = chunkdata['deltabase']
2112 deltabase = chunkdata['deltabase']
2112 delta = chunkdata['delta']
2113 delta = chunkdata['delta']
2113 ui.write("%s%s %s %s %s %s %s\n" %
2114 ui.write("%s%s %s %s %s %s %s\n" %
2114 (indent_string, hex(node), hex(p1), hex(p2),
2115 (indent_string, hex(node), hex(p1), hex(p2),
2115 hex(cs), hex(deltabase), len(delta)))
2116 hex(cs), hex(deltabase), len(delta)))
2116 chain = node
2117 chain = node
2117
2118
2118 chunkdata = gen.changelogheader()
2119 chunkdata = gen.changelogheader()
2119 showchunks("changelog")
2120 showchunks("changelog")
2120 chunkdata = gen.manifestheader()
2121 chunkdata = gen.manifestheader()
2121 showchunks("manifest")
2122 showchunks("manifest")
2122 for chunkdata in iter(gen.filelogheader, {}):
2123 for chunkdata in iter(gen.filelogheader, {}):
2123 fname = chunkdata['filename']
2124 fname = chunkdata['filename']
2124 showchunks(fname)
2125 showchunks(fname)
2125 else:
2126 else:
2126 if isinstance(gen, bundle2.unbundle20):
2127 if isinstance(gen, bundle2.unbundle20):
2127 raise error.Abort(_('use debugbundle2 for this file'))
2128 raise error.Abort(_('use debugbundle2 for this file'))
2128 chunkdata = gen.changelogheader()
2129 chunkdata = gen.changelogheader()
2129 chain = None
2130 chain = None
2130 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
2131 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
2131 node = chunkdata['node']
2132 node = chunkdata['node']
2132 ui.write("%s%s\n" % (indent_string, hex(node)))
2133 ui.write("%s%s\n" % (indent_string, hex(node)))
2133 chain = node
2134 chain = node
2134
2135
2135 def _debugbundle2(ui, gen, all=None, **opts):
2136 def _debugbundle2(ui, gen, all=None, **opts):
2136 """lists the contents of a bundle2"""
2137 """lists the contents of a bundle2"""
2137 if not isinstance(gen, bundle2.unbundle20):
2138 if not isinstance(gen, bundle2.unbundle20):
2138 raise error.Abort(_('not a bundle2 file'))
2139 raise error.Abort(_('not a bundle2 file'))
2139 ui.write(('Stream params: %s\n' % repr(gen.params)))
2140 ui.write(('Stream params: %s\n' % repr(gen.params)))
2140 for part in gen.iterparts():
2141 for part in gen.iterparts():
2141 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
2142 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
2142 if part.type == 'changegroup':
2143 if part.type == 'changegroup':
2143 version = part.params.get('version', '01')
2144 version = part.params.get('version', '01')
2144 cg = changegroup.getunbundler(version, part, 'UN')
2145 cg = changegroup.getunbundler(version, part, 'UN')
2145 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
2146 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
2146
2147
2147 @command('debugcreatestreamclonebundle', [], 'FILE')
2148 @command('debugcreatestreamclonebundle', [], 'FILE')
2148 def debugcreatestreamclonebundle(ui, repo, fname):
2149 def debugcreatestreamclonebundle(ui, repo, fname):
2149 """create a stream clone bundle file
2150 """create a stream clone bundle file
2150
2151
2151 Stream bundles are special bundles that are essentially archives of
2152 Stream bundles are special bundles that are essentially archives of
2152 revlog files. They are commonly used for cloning very quickly.
2153 revlog files. They are commonly used for cloning very quickly.
2153 """
2154 """
2154 requirements, gen = streamclone.generatebundlev1(repo)
2155 requirements, gen = streamclone.generatebundlev1(repo)
2155 changegroup.writechunks(ui, gen, fname)
2156 changegroup.writechunks(ui, gen, fname)
2156
2157
2157 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
2158 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
2158
2159
2159 @command('debugapplystreamclonebundle', [], 'FILE')
2160 @command('debugapplystreamclonebundle', [], 'FILE')
2160 def debugapplystreamclonebundle(ui, repo, fname):
2161 def debugapplystreamclonebundle(ui, repo, fname):
2161 """apply a stream clone bundle file"""
2162 """apply a stream clone bundle file"""
2162 f = hg.openpath(ui, fname)
2163 f = hg.openpath(ui, fname)
2163 gen = exchange.readbundle(ui, f, fname)
2164 gen = exchange.readbundle(ui, f, fname)
2164 gen.apply(repo)
2165 gen.apply(repo)
2165
2166
2166 @command('debugcheckstate', [], '')
2167 @command('debugcheckstate', [], '')
2167 def debugcheckstate(ui, repo):
2168 def debugcheckstate(ui, repo):
2168 """validate the correctness of the current dirstate"""
2169 """validate the correctness of the current dirstate"""
2169 parent1, parent2 = repo.dirstate.parents()
2170 parent1, parent2 = repo.dirstate.parents()
2170 m1 = repo[parent1].manifest()
2171 m1 = repo[parent1].manifest()
2171 m2 = repo[parent2].manifest()
2172 m2 = repo[parent2].manifest()
2172 errors = 0
2173 errors = 0
2173 for f in repo.dirstate:
2174 for f in repo.dirstate:
2174 state = repo.dirstate[f]
2175 state = repo.dirstate[f]
2175 if state in "nr" and f not in m1:
2176 if state in "nr" and f not in m1:
2176 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
2177 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
2177 errors += 1
2178 errors += 1
2178 if state in "a" and f in m1:
2179 if state in "a" and f in m1:
2179 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
2180 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
2180 errors += 1
2181 errors += 1
2181 if state in "m" and f not in m1 and f not in m2:
2182 if state in "m" and f not in m1 and f not in m2:
2182 ui.warn(_("%s in state %s, but not in either manifest\n") %
2183 ui.warn(_("%s in state %s, but not in either manifest\n") %
2183 (f, state))
2184 (f, state))
2184 errors += 1
2185 errors += 1
2185 for f in m1:
2186 for f in m1:
2186 state = repo.dirstate[f]
2187 state = repo.dirstate[f]
2187 if state not in "nrm":
2188 if state not in "nrm":
2188 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
2189 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
2189 errors += 1
2190 errors += 1
2190 if errors:
2191 if errors:
2191 error = _(".hg/dirstate inconsistent with current parent's manifest")
2192 error = _(".hg/dirstate inconsistent with current parent's manifest")
2192 raise error.Abort(error)
2193 raise error.Abort(error)
2193
2194
2194 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
2195 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
2195 def debugcommands(ui, cmd='', *args):
2196 def debugcommands(ui, cmd='', *args):
2196 """list all available commands and options"""
2197 """list all available commands and options"""
2197 for cmd, vals in sorted(table.iteritems()):
2198 for cmd, vals in sorted(table.iteritems()):
2198 cmd = cmd.split('|')[0].strip('^')
2199 cmd = cmd.split('|')[0].strip('^')
2199 opts = ', '.join([i[1] for i in vals[1]])
2200 opts = ', '.join([i[1] for i in vals[1]])
2200 ui.write('%s: %s\n' % (cmd, opts))
2201 ui.write('%s: %s\n' % (cmd, opts))
2201
2202
2202 @command('debugcomplete',
2203 @command('debugcomplete',
2203 [('o', 'options', None, _('show the command options'))],
2204 [('o', 'options', None, _('show the command options'))],
2204 _('[-o] CMD'),
2205 _('[-o] CMD'),
2205 norepo=True)
2206 norepo=True)
2206 def debugcomplete(ui, cmd='', **opts):
2207 def debugcomplete(ui, cmd='', **opts):
2207 """returns the completion list associated with the given command"""
2208 """returns the completion list associated with the given command"""
2208
2209
2209 if opts.get('options'):
2210 if opts.get('options'):
2210 options = []
2211 options = []
2211 otables = [globalopts]
2212 otables = [globalopts]
2212 if cmd:
2213 if cmd:
2213 aliases, entry = cmdutil.findcmd(cmd, table, False)
2214 aliases, entry = cmdutil.findcmd(cmd, table, False)
2214 otables.append(entry[1])
2215 otables.append(entry[1])
2215 for t in otables:
2216 for t in otables:
2216 for o in t:
2217 for o in t:
2217 if "(DEPRECATED)" in o[3]:
2218 if "(DEPRECATED)" in o[3]:
2218 continue
2219 continue
2219 if o[0]:
2220 if o[0]:
2220 options.append('-%s' % o[0])
2221 options.append('-%s' % o[0])
2221 options.append('--%s' % o[1])
2222 options.append('--%s' % o[1])
2222 ui.write("%s\n" % "\n".join(options))
2223 ui.write("%s\n" % "\n".join(options))
2223 return
2224 return
2224
2225
2225 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2226 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2226 if ui.verbose:
2227 if ui.verbose:
2227 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2228 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2228 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2229 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2229
2230
2230 @command('debugdag',
2231 @command('debugdag',
2231 [('t', 'tags', None, _('use tags as labels')),
2232 [('t', 'tags', None, _('use tags as labels')),
2232 ('b', 'branches', None, _('annotate with branch names')),
2233 ('b', 'branches', None, _('annotate with branch names')),
2233 ('', 'dots', None, _('use dots for runs')),
2234 ('', 'dots', None, _('use dots for runs')),
2234 ('s', 'spaces', None, _('separate elements by spaces'))],
2235 ('s', 'spaces', None, _('separate elements by spaces'))],
2235 _('[OPTION]... [FILE [REV]...]'),
2236 _('[OPTION]... [FILE [REV]...]'),
2236 optionalrepo=True)
2237 optionalrepo=True)
2237 def debugdag(ui, repo, file_=None, *revs, **opts):
2238 def debugdag(ui, repo, file_=None, *revs, **opts):
2238 """format the changelog or an index DAG as a concise textual description
2239 """format the changelog or an index DAG as a concise textual description
2239
2240
2240 If you pass a revlog index, the revlog's DAG is emitted. If you list
2241 If you pass a revlog index, the revlog's DAG is emitted. If you list
2241 revision numbers, they get labeled in the output as rN.
2242 revision numbers, they get labeled in the output as rN.
2242
2243
2243 Otherwise, the changelog DAG of the current repo is emitted.
2244 Otherwise, the changelog DAG of the current repo is emitted.
2244 """
2245 """
2245 spaces = opts.get('spaces')
2246 spaces = opts.get('spaces')
2246 dots = opts.get('dots')
2247 dots = opts.get('dots')
2247 if file_:
2248 if file_:
2248 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2249 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2249 revs = set((int(r) for r in revs))
2250 revs = set((int(r) for r in revs))
2250 def events():
2251 def events():
2251 for r in rlog:
2252 for r in rlog:
2252 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2253 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2253 if p != -1))
2254 if p != -1))
2254 if r in revs:
2255 if r in revs:
2255 yield 'l', (r, "r%i" % r)
2256 yield 'l', (r, "r%i" % r)
2256 elif repo:
2257 elif repo:
2257 cl = repo.changelog
2258 cl = repo.changelog
2258 tags = opts.get('tags')
2259 tags = opts.get('tags')
2259 branches = opts.get('branches')
2260 branches = opts.get('branches')
2260 if tags:
2261 if tags:
2261 labels = {}
2262 labels = {}
2262 for l, n in repo.tags().items():
2263 for l, n in repo.tags().items():
2263 labels.setdefault(cl.rev(n), []).append(l)
2264 labels.setdefault(cl.rev(n), []).append(l)
2264 def events():
2265 def events():
2265 b = "default"
2266 b = "default"
2266 for r in cl:
2267 for r in cl:
2267 if branches:
2268 if branches:
2268 newb = cl.read(cl.node(r))[5]['branch']
2269 newb = cl.read(cl.node(r))[5]['branch']
2269 if newb != b:
2270 if newb != b:
2270 yield 'a', newb
2271 yield 'a', newb
2271 b = newb
2272 b = newb
2272 yield 'n', (r, list(p for p in cl.parentrevs(r)
2273 yield 'n', (r, list(p for p in cl.parentrevs(r)
2273 if p != -1))
2274 if p != -1))
2274 if tags:
2275 if tags:
2275 ls = labels.get(r)
2276 ls = labels.get(r)
2276 if ls:
2277 if ls:
2277 for l in ls:
2278 for l in ls:
2278 yield 'l', (r, l)
2279 yield 'l', (r, l)
2279 else:
2280 else:
2280 raise error.Abort(_('need repo for changelog dag'))
2281 raise error.Abort(_('need repo for changelog dag'))
2281
2282
2282 for line in dagparser.dagtextlines(events(),
2283 for line in dagparser.dagtextlines(events(),
2283 addspaces=spaces,
2284 addspaces=spaces,
2284 wraplabels=True,
2285 wraplabels=True,
2285 wrapannotations=True,
2286 wrapannotations=True,
2286 wrapnonlinear=dots,
2287 wrapnonlinear=dots,
2287 usedots=dots,
2288 usedots=dots,
2288 maxlinewidth=70):
2289 maxlinewidth=70):
2289 ui.write(line)
2290 ui.write(line)
2290 ui.write("\n")
2291 ui.write("\n")
2291
2292
2292 @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV'))
2293 @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV'))
2293 def debugdata(ui, repo, file_, rev=None, **opts):
2294 def debugdata(ui, repo, file_, rev=None, **opts):
2294 """dump the contents of a data file revision"""
2295 """dump the contents of a data file revision"""
2295 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2296 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2296 if rev is not None:
2297 if rev is not None:
2297 raise error.CommandError('debugdata', _('invalid arguments'))
2298 raise error.CommandError('debugdata', _('invalid arguments'))
2298 file_, rev = None, file_
2299 file_, rev = None, file_
2299 elif rev is None:
2300 elif rev is None:
2300 raise error.CommandError('debugdata', _('invalid arguments'))
2301 raise error.CommandError('debugdata', _('invalid arguments'))
2301 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2302 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2302 try:
2303 try:
2303 ui.write(r.revision(r.lookup(rev)))
2304 ui.write(r.revision(r.lookup(rev)))
2304 except KeyError:
2305 except KeyError:
2305 raise error.Abort(_('invalid revision identifier %s') % rev)
2306 raise error.Abort(_('invalid revision identifier %s') % rev)
2306
2307
2307 @command('debugdate',
2308 @command('debugdate',
2308 [('e', 'extended', None, _('try extended date formats'))],
2309 [('e', 'extended', None, _('try extended date formats'))],
2309 _('[-e] DATE [RANGE]'),
2310 _('[-e] DATE [RANGE]'),
2310 norepo=True, optionalrepo=True)
2311 norepo=True, optionalrepo=True)
2311 def debugdate(ui, date, range=None, **opts):
2312 def debugdate(ui, date, range=None, **opts):
2312 """parse and display a date"""
2313 """parse and display a date"""
2313 if opts["extended"]:
2314 if opts["extended"]:
2314 d = util.parsedate(date, util.extendeddateformats)
2315 d = util.parsedate(date, util.extendeddateformats)
2315 else:
2316 else:
2316 d = util.parsedate(date)
2317 d = util.parsedate(date)
2317 ui.write(("internal: %s %s\n") % d)
2318 ui.write(("internal: %s %s\n") % d)
2318 ui.write(("standard: %s\n") % util.datestr(d))
2319 ui.write(("standard: %s\n") % util.datestr(d))
2319 if range:
2320 if range:
2320 m = util.matchdate(range)
2321 m = util.matchdate(range)
2321 ui.write(("match: %s\n") % m(d[0]))
2322 ui.write(("match: %s\n") % m(d[0]))
2322
2323
2323 @command('debugdiscovery',
2324 @command('debugdiscovery',
2324 [('', 'old', None, _('use old-style discovery')),
2325 [('', 'old', None, _('use old-style discovery')),
2325 ('', 'nonheads', None,
2326 ('', 'nonheads', None,
2326 _('use old-style discovery with non-heads included')),
2327 _('use old-style discovery with non-heads included')),
2327 ] + remoteopts,
2328 ] + remoteopts,
2328 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2329 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2329 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2330 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2330 """runs the changeset discovery protocol in isolation"""
2331 """runs the changeset discovery protocol in isolation"""
2331 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2332 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2332 opts.get('branch'))
2333 opts.get('branch'))
2333 remote = hg.peer(repo, opts, remoteurl)
2334 remote = hg.peer(repo, opts, remoteurl)
2334 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2335 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2335
2336
2336 # make sure tests are repeatable
2337 # make sure tests are repeatable
2337 random.seed(12323)
2338 random.seed(12323)
2338
2339
2339 def doit(localheads, remoteheads, remote=remote):
2340 def doit(localheads, remoteheads, remote=remote):
2340 if opts.get('old'):
2341 if opts.get('old'):
2341 if localheads:
2342 if localheads:
2342 raise error.Abort('cannot use localheads with old style '
2343 raise error.Abort('cannot use localheads with old style '
2343 'discovery')
2344 'discovery')
2344 if not util.safehasattr(remote, 'branches'):
2345 if not util.safehasattr(remote, 'branches'):
2345 # enable in-client legacy support
2346 # enable in-client legacy support
2346 remote = localrepo.locallegacypeer(remote.local())
2347 remote = localrepo.locallegacypeer(remote.local())
2347 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2348 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2348 force=True)
2349 force=True)
2349 common = set(common)
2350 common = set(common)
2350 if not opts.get('nonheads'):
2351 if not opts.get('nonheads'):
2351 ui.write(("unpruned common: %s\n") %
2352 ui.write(("unpruned common: %s\n") %
2352 " ".join(sorted(short(n) for n in common)))
2353 " ".join(sorted(short(n) for n in common)))
2353 dag = dagutil.revlogdag(repo.changelog)
2354 dag = dagutil.revlogdag(repo.changelog)
2354 all = dag.ancestorset(dag.internalizeall(common))
2355 all = dag.ancestorset(dag.internalizeall(common))
2355 common = dag.externalizeall(dag.headsetofconnecteds(all))
2356 common = dag.externalizeall(dag.headsetofconnecteds(all))
2356 else:
2357 else:
2357 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2358 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2358 common = set(common)
2359 common = set(common)
2359 rheads = set(hds)
2360 rheads = set(hds)
2360 lheads = set(repo.heads())
2361 lheads = set(repo.heads())
2361 ui.write(("common heads: %s\n") %
2362 ui.write(("common heads: %s\n") %
2362 " ".join(sorted(short(n) for n in common)))
2363 " ".join(sorted(short(n) for n in common)))
2363 if lheads <= common:
2364 if lheads <= common:
2364 ui.write(("local is subset\n"))
2365 ui.write(("local is subset\n"))
2365 elif rheads <= common:
2366 elif rheads <= common:
2366 ui.write(("remote is subset\n"))
2367 ui.write(("remote is subset\n"))
2367
2368
2368 serverlogs = opts.get('serverlog')
2369 serverlogs = opts.get('serverlog')
2369 if serverlogs:
2370 if serverlogs:
2370 for filename in serverlogs:
2371 for filename in serverlogs:
2371 with open(filename, 'r') as logfile:
2372 with open(filename, 'r') as logfile:
2372 line = logfile.readline()
2373 line = logfile.readline()
2373 while line:
2374 while line:
2374 parts = line.strip().split(';')
2375 parts = line.strip().split(';')
2375 op = parts[1]
2376 op = parts[1]
2376 if op == 'cg':
2377 if op == 'cg':
2377 pass
2378 pass
2378 elif op == 'cgss':
2379 elif op == 'cgss':
2379 doit(parts[2].split(' '), parts[3].split(' '))
2380 doit(parts[2].split(' '), parts[3].split(' '))
2380 elif op == 'unb':
2381 elif op == 'unb':
2381 doit(parts[3].split(' '), parts[2].split(' '))
2382 doit(parts[3].split(' '), parts[2].split(' '))
2382 line = logfile.readline()
2383 line = logfile.readline()
2383 else:
2384 else:
2384 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2385 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2385 opts.get('remote_head'))
2386 opts.get('remote_head'))
2386 localrevs = opts.get('local_head')
2387 localrevs = opts.get('local_head')
2387 doit(localrevs, remoterevs)
2388 doit(localrevs, remoterevs)
2388
2389
2389 @command('debugextensions', formatteropts, [], norepo=True)
2390 @command('debugextensions', formatteropts, [], norepo=True)
2390 def debugextensions(ui, **opts):
2391 def debugextensions(ui, **opts):
2391 '''show information about active extensions'''
2392 '''show information about active extensions'''
2392 exts = extensions.extensions(ui)
2393 exts = extensions.extensions(ui)
2393 hgver = util.version()
2394 hgver = util.version()
2394 fm = ui.formatter('debugextensions', opts)
2395 fm = ui.formatter('debugextensions', opts)
2395 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
2396 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
2396 extsource = extmod.__file__
2397 extsource = extmod.__file__
2397 exttestedwith = getattr(extmod, 'testedwith', '').split()
2398 exttestedwith = getattr(extmod, 'testedwith', '').split()
2398 extbuglink = getattr(extmod, 'buglink', None)
2399 extbuglink = getattr(extmod, 'buglink', None)
2399
2400
2400 fm.startitem()
2401 fm.startitem()
2401
2402
2402 if ui.quiet or ui.verbose:
2403 if ui.quiet or ui.verbose:
2403 fm.write('name', '%s\n', extname)
2404 fm.write('name', '%s\n', extname)
2404 else:
2405 else:
2405 fm.write('name', '%s', extname)
2406 fm.write('name', '%s', extname)
2406 if not exttestedwith:
2407 if not exttestedwith:
2407 fm.plain(_(' (untested!)\n'))
2408 fm.plain(_(' (untested!)\n'))
2408 elif exttestedwith == ['internal'] or hgver in exttestedwith:
2409 elif exttestedwith == ['internal'] or hgver in exttestedwith:
2409 fm.plain('\n')
2410 fm.plain('\n')
2410 else:
2411 else:
2411 lasttestedversion = exttestedwith[-1]
2412 lasttestedversion = exttestedwith[-1]
2412 fm.plain(' (%s!)\n' % lasttestedversion)
2413 fm.plain(' (%s!)\n' % lasttestedversion)
2413
2414
2414 fm.condwrite(ui.verbose and extsource, 'source',
2415 fm.condwrite(ui.verbose and extsource, 'source',
2415 _(' location: %s\n'), extsource or "")
2416 _(' location: %s\n'), extsource or "")
2416
2417
2417 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
2418 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
2418 _(' tested with: %s\n'),
2419 _(' tested with: %s\n'),
2419 fm.formatlist(exttestedwith, name='ver'))
2420 fm.formatlist(exttestedwith, name='ver'))
2420
2421
2421 fm.condwrite(ui.verbose and extbuglink, 'buglink',
2422 fm.condwrite(ui.verbose and extbuglink, 'buglink',
2422 _(' bug reporting: %s\n'), extbuglink or "")
2423 _(' bug reporting: %s\n'), extbuglink or "")
2423
2424
2424 fm.end()
2425 fm.end()
2425
2426
2426 @command('debugfileset',
2427 @command('debugfileset',
2427 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2428 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2428 _('[-r REV] FILESPEC'))
2429 _('[-r REV] FILESPEC'))
2429 def debugfileset(ui, repo, expr, **opts):
2430 def debugfileset(ui, repo, expr, **opts):
2430 '''parse and apply a fileset specification'''
2431 '''parse and apply a fileset specification'''
2431 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2432 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2432 if ui.verbose:
2433 if ui.verbose:
2433 tree = fileset.parse(expr)
2434 tree = fileset.parse(expr)
2434 ui.note(fileset.prettyformat(tree), "\n")
2435 ui.note(fileset.prettyformat(tree), "\n")
2435
2436
2436 for f in ctx.getfileset(expr):
2437 for f in ctx.getfileset(expr):
2437 ui.write("%s\n" % f)
2438 ui.write("%s\n" % f)
2438
2439
2439 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2440 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2440 def debugfsinfo(ui, path="."):
2441 def debugfsinfo(ui, path="."):
2441 """show information detected about current filesystem"""
2442 """show information detected about current filesystem"""
2442 util.writefile('.debugfsinfo', '')
2443 util.writefile('.debugfsinfo', '')
2443 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2444 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2444 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2445 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2445 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2446 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2446 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2447 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2447 and 'yes' or 'no'))
2448 and 'yes' or 'no'))
2448 os.unlink('.debugfsinfo')
2449 os.unlink('.debugfsinfo')
2449
2450
2450 @command('debuggetbundle',
2451 @command('debuggetbundle',
2451 [('H', 'head', [], _('id of head node'), _('ID')),
2452 [('H', 'head', [], _('id of head node'), _('ID')),
2452 ('C', 'common', [], _('id of common node'), _('ID')),
2453 ('C', 'common', [], _('id of common node'), _('ID')),
2453 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2454 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2454 _('REPO FILE [-H|-C ID]...'),
2455 _('REPO FILE [-H|-C ID]...'),
2455 norepo=True)
2456 norepo=True)
2456 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2457 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2457 """retrieves a bundle from a repo
2458 """retrieves a bundle from a repo
2458
2459
2459 Every ID must be a full-length hex node id string. Saves the bundle to the
2460 Every ID must be a full-length hex node id string. Saves the bundle to the
2460 given file.
2461 given file.
2461 """
2462 """
2462 repo = hg.peer(ui, opts, repopath)
2463 repo = hg.peer(ui, opts, repopath)
2463 if not repo.capable('getbundle'):
2464 if not repo.capable('getbundle'):
2464 raise error.Abort("getbundle() not supported by target repository")
2465 raise error.Abort("getbundle() not supported by target repository")
2465 args = {}
2466 args = {}
2466 if common:
2467 if common:
2467 args['common'] = [bin(s) for s in common]
2468 args['common'] = [bin(s) for s in common]
2468 if head:
2469 if head:
2469 args['heads'] = [bin(s) for s in head]
2470 args['heads'] = [bin(s) for s in head]
2470 # TODO: get desired bundlecaps from command line.
2471 # TODO: get desired bundlecaps from command line.
2471 args['bundlecaps'] = None
2472 args['bundlecaps'] = None
2472 bundle = repo.getbundle('debug', **args)
2473 bundle = repo.getbundle('debug', **args)
2473
2474
2474 bundletype = opts.get('type', 'bzip2').lower()
2475 bundletype = opts.get('type', 'bzip2').lower()
2475 btypes = {'none': 'HG10UN',
2476 btypes = {'none': 'HG10UN',
2476 'bzip2': 'HG10BZ',
2477 'bzip2': 'HG10BZ',
2477 'gzip': 'HG10GZ',
2478 'gzip': 'HG10GZ',
2478 'bundle2': 'HG20'}
2479 'bundle2': 'HG20'}
2479 bundletype = btypes.get(bundletype)
2480 bundletype = btypes.get(bundletype)
2480 if bundletype not in bundle2.bundletypes:
2481 if bundletype not in bundle2.bundletypes:
2481 raise error.Abort(_('unknown bundle type specified with --type'))
2482 raise error.Abort(_('unknown bundle type specified with --type'))
2482 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
2483 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
2483
2484
2484 @command('debugignore', [], '[FILE]')
2485 @command('debugignore', [], '[FILE]')
2485 def debugignore(ui, repo, *files, **opts):
2486 def debugignore(ui, repo, *files, **opts):
2486 """display the combined ignore pattern and information about ignored files
2487 """display the combined ignore pattern and information about ignored files
2487
2488
2488 With no argument display the combined ignore pattern.
2489 With no argument display the combined ignore pattern.
2489
2490
2490 Given space separated file names, shows if the given file is ignored and
2491 Given space separated file names, shows if the given file is ignored and
2491 if so, show the ignore rule (file and line number) that matched it.
2492 if so, show the ignore rule (file and line number) that matched it.
2492 """
2493 """
2493 ignore = repo.dirstate._ignore
2494 ignore = repo.dirstate._ignore
2494 if not files:
2495 if not files:
2495 # Show all the patterns
2496 # Show all the patterns
2496 includepat = getattr(ignore, 'includepat', None)
2497 includepat = getattr(ignore, 'includepat', None)
2497 if includepat is not None:
2498 if includepat is not None:
2498 ui.write("%s\n" % includepat)
2499 ui.write("%s\n" % includepat)
2499 else:
2500 else:
2500 raise error.Abort(_("no ignore patterns found"))
2501 raise error.Abort(_("no ignore patterns found"))
2501 else:
2502 else:
2502 for f in files:
2503 for f in files:
2503 nf = util.normpath(f)
2504 nf = util.normpath(f)
2504 ignored = None
2505 ignored = None
2505 ignoredata = None
2506 ignoredata = None
2506 if nf != '.':
2507 if nf != '.':
2507 if ignore(nf):
2508 if ignore(nf):
2508 ignored = nf
2509 ignored = nf
2509 ignoredata = repo.dirstate._ignorefileandline(nf)
2510 ignoredata = repo.dirstate._ignorefileandline(nf)
2510 else:
2511 else:
2511 for p in util.finddirs(nf):
2512 for p in util.finddirs(nf):
2512 if ignore(p):
2513 if ignore(p):
2513 ignored = p
2514 ignored = p
2514 ignoredata = repo.dirstate._ignorefileandline(p)
2515 ignoredata = repo.dirstate._ignorefileandline(p)
2515 break
2516 break
2516 if ignored:
2517 if ignored:
2517 if ignored == nf:
2518 if ignored == nf:
2518 ui.write(_("%s is ignored\n") % f)
2519 ui.write(_("%s is ignored\n") % f)
2519 else:
2520 else:
2520 ui.write(_("%s is ignored because of "
2521 ui.write(_("%s is ignored because of "
2521 "containing folder %s\n")
2522 "containing folder %s\n")
2522 % (f, ignored))
2523 % (f, ignored))
2523 ignorefile, lineno, line = ignoredata
2524 ignorefile, lineno, line = ignoredata
2524 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
2525 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
2525 % (ignorefile, lineno, line))
2526 % (ignorefile, lineno, line))
2526 else:
2527 else:
2527 ui.write(_("%s is not ignored\n") % f)
2528 ui.write(_("%s is not ignored\n") % f)
2528
2529
2529 @command('debugindex', debugrevlogopts +
2530 @command('debugindex', debugrevlogopts +
2530 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2531 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2531 _('[-f FORMAT] -c|-m|FILE'),
2532 _('[-f FORMAT] -c|-m|FILE'),
2532 optionalrepo=True)
2533 optionalrepo=True)
2533 def debugindex(ui, repo, file_=None, **opts):
2534 def debugindex(ui, repo, file_=None, **opts):
2534 """dump the contents of an index file"""
2535 """dump the contents of an index file"""
2535 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2536 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2536 format = opts.get('format', 0)
2537 format = opts.get('format', 0)
2537 if format not in (0, 1):
2538 if format not in (0, 1):
2538 raise error.Abort(_("unknown format %d") % format)
2539 raise error.Abort(_("unknown format %d") % format)
2539
2540
2540 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2541 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2541 if generaldelta:
2542 if generaldelta:
2542 basehdr = ' delta'
2543 basehdr = ' delta'
2543 else:
2544 else:
2544 basehdr = ' base'
2545 basehdr = ' base'
2545
2546
2546 if ui.debugflag:
2547 if ui.debugflag:
2547 shortfn = hex
2548 shortfn = hex
2548 else:
2549 else:
2549 shortfn = short
2550 shortfn = short
2550
2551
2551 # There might not be anything in r, so have a sane default
2552 # There might not be anything in r, so have a sane default
2552 idlen = 12
2553 idlen = 12
2553 for i in r:
2554 for i in r:
2554 idlen = len(shortfn(r.node(i)))
2555 idlen = len(shortfn(r.node(i)))
2555 break
2556 break
2556
2557
2557 if format == 0:
2558 if format == 0:
2558 ui.write((" rev offset length " + basehdr + " linkrev"
2559 ui.write((" rev offset length " + basehdr + " linkrev"
2559 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2560 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2560 elif format == 1:
2561 elif format == 1:
2561 ui.write((" rev flag offset length"
2562 ui.write((" rev flag offset length"
2562 " size " + basehdr + " link p1 p2"
2563 " size " + basehdr + " link p1 p2"
2563 " %s\n") % "nodeid".rjust(idlen))
2564 " %s\n") % "nodeid".rjust(idlen))
2564
2565
2565 for i in r:
2566 for i in r:
2566 node = r.node(i)
2567 node = r.node(i)
2567 if generaldelta:
2568 if generaldelta:
2568 base = r.deltaparent(i)
2569 base = r.deltaparent(i)
2569 else:
2570 else:
2570 base = r.chainbase(i)
2571 base = r.chainbase(i)
2571 if format == 0:
2572 if format == 0:
2572 try:
2573 try:
2573 pp = r.parents(node)
2574 pp = r.parents(node)
2574 except Exception:
2575 except Exception:
2575 pp = [nullid, nullid]
2576 pp = [nullid, nullid]
2576 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2577 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2577 i, r.start(i), r.length(i), base, r.linkrev(i),
2578 i, r.start(i), r.length(i), base, r.linkrev(i),
2578 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2579 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2579 elif format == 1:
2580 elif format == 1:
2580 pr = r.parentrevs(i)
2581 pr = r.parentrevs(i)
2581 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2582 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2582 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2583 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2583 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2584 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2584
2585
2585 @command('debugindexdot', debugrevlogopts,
2586 @command('debugindexdot', debugrevlogopts,
2586 _('-c|-m|FILE'), optionalrepo=True)
2587 _('-c|-m|FILE'), optionalrepo=True)
2587 def debugindexdot(ui, repo, file_=None, **opts):
2588 def debugindexdot(ui, repo, file_=None, **opts):
2588 """dump an index DAG as a graphviz dot file"""
2589 """dump an index DAG as a graphviz dot file"""
2589 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
2590 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
2590 ui.write(("digraph G {\n"))
2591 ui.write(("digraph G {\n"))
2591 for i in r:
2592 for i in r:
2592 node = r.node(i)
2593 node = r.node(i)
2593 pp = r.parents(node)
2594 pp = r.parents(node)
2594 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2595 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2595 if pp[1] != nullid:
2596 if pp[1] != nullid:
2596 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2597 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2597 ui.write("}\n")
2598 ui.write("}\n")
2598
2599
2599 @command('debugdeltachain',
2600 @command('debugdeltachain',
2600 debugrevlogopts + formatteropts,
2601 debugrevlogopts + formatteropts,
2601 _('-c|-m|FILE'),
2602 _('-c|-m|FILE'),
2602 optionalrepo=True)
2603 optionalrepo=True)
2603 def debugdeltachain(ui, repo, file_=None, **opts):
2604 def debugdeltachain(ui, repo, file_=None, **opts):
2604 """dump information about delta chains in a revlog
2605 """dump information about delta chains in a revlog
2605
2606
2606 Output can be templatized. Available template keywords are:
2607 Output can be templatized. Available template keywords are:
2607
2608
2608 :``rev``: revision number
2609 :``rev``: revision number
2609 :``chainid``: delta chain identifier (numbered by unique base)
2610 :``chainid``: delta chain identifier (numbered by unique base)
2610 :``chainlen``: delta chain length to this revision
2611 :``chainlen``: delta chain length to this revision
2611 :``prevrev``: previous revision in delta chain
2612 :``prevrev``: previous revision in delta chain
2612 :``deltatype``: role of delta / how it was computed
2613 :``deltatype``: role of delta / how it was computed
2613 :``compsize``: compressed size of revision
2614 :``compsize``: compressed size of revision
2614 :``uncompsize``: uncompressed size of revision
2615 :``uncompsize``: uncompressed size of revision
2615 :``chainsize``: total size of compressed revisions in chain
2616 :``chainsize``: total size of compressed revisions in chain
2616 :``chainratio``: total chain size divided by uncompressed revision size
2617 :``chainratio``: total chain size divided by uncompressed revision size
2617 (new delta chains typically start at ratio 2.00)
2618 (new delta chains typically start at ratio 2.00)
2618 :``lindist``: linear distance from base revision in delta chain to end
2619 :``lindist``: linear distance from base revision in delta chain to end
2619 of this revision
2620 of this revision
2620 :``extradist``: total size of revisions not part of this delta chain from
2621 :``extradist``: total size of revisions not part of this delta chain from
2621 base of delta chain to end of this revision; a measurement
2622 base of delta chain to end of this revision; a measurement
2622 of how much extra data we need to read/seek across to read
2623 of how much extra data we need to read/seek across to read
2623 the delta chain for this revision
2624 the delta chain for this revision
2624 :``extraratio``: extradist divided by chainsize; another representation of
2625 :``extraratio``: extradist divided by chainsize; another representation of
2625 how much unrelated data is needed to load this delta chain
2626 how much unrelated data is needed to load this delta chain
2626 """
2627 """
2627 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
2628 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
2628 index = r.index
2629 index = r.index
2629 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2630 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2630
2631
2631 def revinfo(rev):
2632 def revinfo(rev):
2632 e = index[rev]
2633 e = index[rev]
2633 compsize = e[1]
2634 compsize = e[1]
2634 uncompsize = e[2]
2635 uncompsize = e[2]
2635 chainsize = 0
2636 chainsize = 0
2636
2637
2637 if generaldelta:
2638 if generaldelta:
2638 if e[3] == e[5]:
2639 if e[3] == e[5]:
2639 deltatype = 'p1'
2640 deltatype = 'p1'
2640 elif e[3] == e[6]:
2641 elif e[3] == e[6]:
2641 deltatype = 'p2'
2642 deltatype = 'p2'
2642 elif e[3] == rev - 1:
2643 elif e[3] == rev - 1:
2643 deltatype = 'prev'
2644 deltatype = 'prev'
2644 elif e[3] == rev:
2645 elif e[3] == rev:
2645 deltatype = 'base'
2646 deltatype = 'base'
2646 else:
2647 else:
2647 deltatype = 'other'
2648 deltatype = 'other'
2648 else:
2649 else:
2649 if e[3] == rev:
2650 if e[3] == rev:
2650 deltatype = 'base'
2651 deltatype = 'base'
2651 else:
2652 else:
2652 deltatype = 'prev'
2653 deltatype = 'prev'
2653
2654
2654 chain = r._deltachain(rev)[0]
2655 chain = r._deltachain(rev)[0]
2655 for iterrev in chain:
2656 for iterrev in chain:
2656 e = index[iterrev]
2657 e = index[iterrev]
2657 chainsize += e[1]
2658 chainsize += e[1]
2658
2659
2659 return compsize, uncompsize, deltatype, chain, chainsize
2660 return compsize, uncompsize, deltatype, chain, chainsize
2660
2661
2661 fm = ui.formatter('debugdeltachain', opts)
2662 fm = ui.formatter('debugdeltachain', opts)
2662
2663
2663 fm.plain(' rev chain# chainlen prev delta '
2664 fm.plain(' rev chain# chainlen prev delta '
2664 'size rawsize chainsize ratio lindist extradist '
2665 'size rawsize chainsize ratio lindist extradist '
2665 'extraratio\n')
2666 'extraratio\n')
2666
2667
2667 chainbases = {}
2668 chainbases = {}
2668 for rev in r:
2669 for rev in r:
2669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
2670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
2670 chainbase = chain[0]
2671 chainbase = chain[0]
2671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
2672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
2672 basestart = r.start(chainbase)
2673 basestart = r.start(chainbase)
2673 revstart = r.start(rev)
2674 revstart = r.start(rev)
2674 lineardist = revstart + comp - basestart
2675 lineardist = revstart + comp - basestart
2675 extradist = lineardist - chainsize
2676 extradist = lineardist - chainsize
2676 try:
2677 try:
2677 prevrev = chain[-2]
2678 prevrev = chain[-2]
2678 except IndexError:
2679 except IndexError:
2679 prevrev = -1
2680 prevrev = -1
2680
2681
2681 chainratio = float(chainsize) / float(uncomp)
2682 chainratio = float(chainsize) / float(uncomp)
2682 extraratio = float(extradist) / float(chainsize)
2683 extraratio = float(extradist) / float(chainsize)
2683
2684
2684 fm.startitem()
2685 fm.startitem()
2685 fm.write('rev chainid chainlen prevrev deltatype compsize '
2686 fm.write('rev chainid chainlen prevrev deltatype compsize '
2686 'uncompsize chainsize chainratio lindist extradist '
2687 'uncompsize chainsize chainratio lindist extradist '
2687 'extraratio',
2688 'extraratio',
2688 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
2689 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
2689 rev, chainid, len(chain), prevrev, deltatype, comp,
2690 rev, chainid, len(chain), prevrev, deltatype, comp,
2690 uncomp, chainsize, chainratio, lineardist, extradist,
2691 uncomp, chainsize, chainratio, lineardist, extradist,
2691 extraratio,
2692 extraratio,
2692 rev=rev, chainid=chainid, chainlen=len(chain),
2693 rev=rev, chainid=chainid, chainlen=len(chain),
2693 prevrev=prevrev, deltatype=deltatype, compsize=comp,
2694 prevrev=prevrev, deltatype=deltatype, compsize=comp,
2694 uncompsize=uncomp, chainsize=chainsize,
2695 uncompsize=uncomp, chainsize=chainsize,
2695 chainratio=chainratio, lindist=lineardist,
2696 chainratio=chainratio, lindist=lineardist,
2696 extradist=extradist, extraratio=extraratio)
2697 extradist=extradist, extraratio=extraratio)
2697
2698
2698 fm.end()
2699 fm.end()
2699
2700
2700 @command('debuginstall', [] + formatteropts, '', norepo=True)
2701 @command('debuginstall', [] + formatteropts, '', norepo=True)
2701 def debuginstall(ui, **opts):
2702 def debuginstall(ui, **opts):
2702 '''test Mercurial installation
2703 '''test Mercurial installation
2703
2704
2704 Returns 0 on success.
2705 Returns 0 on success.
2705 '''
2706 '''
2706
2707
2707 def writetemp(contents):
2708 def writetemp(contents):
2708 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2709 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2709 f = os.fdopen(fd, "wb")
2710 f = os.fdopen(fd, "wb")
2710 f.write(contents)
2711 f.write(contents)
2711 f.close()
2712 f.close()
2712 return name
2713 return name
2713
2714
2714 problems = 0
2715 problems = 0
2715
2716
2716 fm = ui.formatter('debuginstall', opts)
2717 fm = ui.formatter('debuginstall', opts)
2717 fm.startitem()
2718 fm.startitem()
2718
2719
2719 # encoding
2720 # encoding
2720 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
2721 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
2721 err = None
2722 err = None
2722 try:
2723 try:
2723 encoding.fromlocal("test")
2724 encoding.fromlocal("test")
2724 except error.Abort as inst:
2725 except error.Abort as inst:
2725 err = inst
2726 err = inst
2726 problems += 1
2727 problems += 1
2727 fm.condwrite(err, 'encodingerror', _(" %s\n"
2728 fm.condwrite(err, 'encodingerror', _(" %s\n"
2728 " (check that your locale is properly set)\n"), err)
2729 " (check that your locale is properly set)\n"), err)
2729
2730
2730 # Python
2731 # Python
2731 fm.write('pythonexe', _("checking Python executable (%s)\n"),
2732 fm.write('pythonexe', _("checking Python executable (%s)\n"),
2732 sys.executable)
2733 sys.executable)
2733 fm.write('pythonver', _("checking Python version (%s)\n"),
2734 fm.write('pythonver', _("checking Python version (%s)\n"),
2734 ("%s.%s.%s" % sys.version_info[:3]))
2735 ("%s.%s.%s" % sys.version_info[:3]))
2735 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
2736 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
2736 os.path.dirname(os.__file__))
2737 os.path.dirname(os.__file__))
2737
2738
2738 # hg version
2739 # hg version
2739 hgver = util.version()
2740 hgver = util.version()
2740 fm.write('hgver', _("checking Mercurial version (%s)\n"),
2741 fm.write('hgver', _("checking Mercurial version (%s)\n"),
2741 hgver.split('+')[0])
2742 hgver.split('+')[0])
2742 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
2743 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
2743 '+'.join(hgver.split('+')[1:]))
2744 '+'.join(hgver.split('+')[1:]))
2744
2745
2745 # compiled modules
2746 # compiled modules
2746 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
2747 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
2747 policy.policy)
2748 policy.policy)
2748 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
2749 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
2749 os.path.dirname(__file__))
2750 os.path.dirname(__file__))
2750
2751
2751 err = None
2752 err = None
2752 try:
2753 try:
2753 from . import (
2754 from . import (
2754 base85,
2755 base85,
2755 bdiff,
2756 bdiff,
2756 mpatch,
2757 mpatch,
2757 osutil,
2758 osutil,
2758 )
2759 )
2759 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2760 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2760 except Exception as inst:
2761 except Exception as inst:
2761 err = inst
2762 err = inst
2762 problems += 1
2763 problems += 1
2763 fm.condwrite(err, 'extensionserror', " %s\n", err)
2764 fm.condwrite(err, 'extensionserror', " %s\n", err)
2764
2765
2765 # templates
2766 # templates
2766 p = templater.templatepaths()
2767 p = templater.templatepaths()
2767 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
2768 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
2768 fm.condwrite(not p, '', _(" no template directories found\n"))
2769 fm.condwrite(not p, '', _(" no template directories found\n"))
2769 if p:
2770 if p:
2770 m = templater.templatepath("map-cmdline.default")
2771 m = templater.templatepath("map-cmdline.default")
2771 if m:
2772 if m:
2772 # template found, check if it is working
2773 # template found, check if it is working
2773 err = None
2774 err = None
2774 try:
2775 try:
2775 templater.templater.frommapfile(m)
2776 templater.templater.frommapfile(m)
2776 except Exception as inst:
2777 except Exception as inst:
2777 err = inst
2778 err = inst
2778 p = None
2779 p = None
2779 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
2780 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
2780 else:
2781 else:
2781 p = None
2782 p = None
2782 fm.condwrite(p, 'defaulttemplate',
2783 fm.condwrite(p, 'defaulttemplate',
2783 _("checking default template (%s)\n"), m)
2784 _("checking default template (%s)\n"), m)
2784 fm.condwrite(not m, 'defaulttemplatenotfound',
2785 fm.condwrite(not m, 'defaulttemplatenotfound',
2785 _(" template '%s' not found\n"), "default")
2786 _(" template '%s' not found\n"), "default")
2786 if not p:
2787 if not p:
2787 problems += 1
2788 problems += 1
2788 fm.condwrite(not p, '',
2789 fm.condwrite(not p, '',
2789 _(" (templates seem to have been installed incorrectly)\n"))
2790 _(" (templates seem to have been installed incorrectly)\n"))
2790
2791
2791 # editor
2792 # editor
2792 editor = ui.geteditor()
2793 editor = ui.geteditor()
2793 editor = util.expandpath(editor)
2794 editor = util.expandpath(editor)
2794 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
2795 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
2795 cmdpath = util.findexe(shlex.split(editor)[0])
2796 cmdpath = util.findexe(shlex.split(editor)[0])
2796 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
2797 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
2797 _(" No commit editor set and can't find %s in PATH\n"
2798 _(" No commit editor set and can't find %s in PATH\n"
2798 " (specify a commit editor in your configuration"
2799 " (specify a commit editor in your configuration"
2799 " file)\n"), not cmdpath and editor == 'vi' and editor)
2800 " file)\n"), not cmdpath and editor == 'vi' and editor)
2800 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
2801 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
2801 _(" Can't find editor '%s' in PATH\n"
2802 _(" Can't find editor '%s' in PATH\n"
2802 " (specify a commit editor in your configuration"
2803 " (specify a commit editor in your configuration"
2803 " file)\n"), not cmdpath and editor)
2804 " file)\n"), not cmdpath and editor)
2804 if not cmdpath and editor != 'vi':
2805 if not cmdpath and editor != 'vi':
2805 problems += 1
2806 problems += 1
2806
2807
2807 # check username
2808 # check username
2808 username = None
2809 username = None
2809 err = None
2810 err = None
2810 try:
2811 try:
2811 username = ui.username()
2812 username = ui.username()
2812 except error.Abort as e:
2813 except error.Abort as e:
2813 err = e
2814 err = e
2814 problems += 1
2815 problems += 1
2815
2816
2816 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2817 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2817 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2818 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2818 " (specify a username in your configuration file)\n"), err)
2819 " (specify a username in your configuration file)\n"), err)
2819
2820
2820 fm.condwrite(not problems, '',
2821 fm.condwrite(not problems, '',
2821 _("no problems detected\n"))
2822 _("no problems detected\n"))
2822 if not problems:
2823 if not problems:
2823 fm.data(problems=problems)
2824 fm.data(problems=problems)
2824 fm.condwrite(problems, 'problems',
2825 fm.condwrite(problems, 'problems',
2825 _("%s problems detected,"
2826 _("%s problems detected,"
2826 " please check your install!\n"), problems)
2827 " please check your install!\n"), problems)
2827 fm.end()
2828 fm.end()
2828
2829
2829 return problems
2830 return problems
2830
2831
2831 @command('debugknown', [], _('REPO ID...'), norepo=True)
2832 @command('debugknown', [], _('REPO ID...'), norepo=True)
2832 def debugknown(ui, repopath, *ids, **opts):
2833 def debugknown(ui, repopath, *ids, **opts):
2833 """test whether node ids are known to a repo
2834 """test whether node ids are known to a repo
2834
2835
2835 Every ID must be a full-length hex node id string. Returns a list of 0s
2836 Every ID must be a full-length hex node id string. Returns a list of 0s
2836 and 1s indicating unknown/known.
2837 and 1s indicating unknown/known.
2837 """
2838 """
2838 repo = hg.peer(ui, opts, repopath)
2839 repo = hg.peer(ui, opts, repopath)
2839 if not repo.capable('known'):
2840 if not repo.capable('known'):
2840 raise error.Abort("known() not supported by target repository")
2841 raise error.Abort("known() not supported by target repository")
2841 flags = repo.known([bin(s) for s in ids])
2842 flags = repo.known([bin(s) for s in ids])
2842 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2843 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2843
2844
2844 @command('debuglabelcomplete', [], _('LABEL...'))
2845 @command('debuglabelcomplete', [], _('LABEL...'))
2845 def debuglabelcomplete(ui, repo, *args):
2846 def debuglabelcomplete(ui, repo, *args):
2846 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2847 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2847 debugnamecomplete(ui, repo, *args)
2848 debugnamecomplete(ui, repo, *args)
2848
2849
2849 @command('debugmergestate', [], '')
2850 @command('debugmergestate', [], '')
2850 def debugmergestate(ui, repo, *args):
2851 def debugmergestate(ui, repo, *args):
2851 """print merge state
2852 """print merge state
2852
2853
2853 Use --verbose to print out information about whether v1 or v2 merge state
2854 Use --verbose to print out information about whether v1 or v2 merge state
2854 was chosen."""
2855 was chosen."""
2855 def _hashornull(h):
2856 def _hashornull(h):
2856 if h == nullhex:
2857 if h == nullhex:
2857 return 'null'
2858 return 'null'
2858 else:
2859 else:
2859 return h
2860 return h
2860
2861
2861 def printrecords(version):
2862 def printrecords(version):
2862 ui.write(('* version %s records\n') % version)
2863 ui.write(('* version %s records\n') % version)
2863 if version == 1:
2864 if version == 1:
2864 records = v1records
2865 records = v1records
2865 else:
2866 else:
2866 records = v2records
2867 records = v2records
2867
2868
2868 for rtype, record in records:
2869 for rtype, record in records:
2869 # pretty print some record types
2870 # pretty print some record types
2870 if rtype == 'L':
2871 if rtype == 'L':
2871 ui.write(('local: %s\n') % record)
2872 ui.write(('local: %s\n') % record)
2872 elif rtype == 'O':
2873 elif rtype == 'O':
2873 ui.write(('other: %s\n') % record)
2874 ui.write(('other: %s\n') % record)
2874 elif rtype == 'm':
2875 elif rtype == 'm':
2875 driver, mdstate = record.split('\0', 1)
2876 driver, mdstate = record.split('\0', 1)
2876 ui.write(('merge driver: %s (state "%s")\n')
2877 ui.write(('merge driver: %s (state "%s")\n')
2877 % (driver, mdstate))
2878 % (driver, mdstate))
2878 elif rtype in 'FDC':
2879 elif rtype in 'FDC':
2879 r = record.split('\0')
2880 r = record.split('\0')
2880 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2881 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2881 if version == 1:
2882 if version == 1:
2882 onode = 'not stored in v1 format'
2883 onode = 'not stored in v1 format'
2883 flags = r[7]
2884 flags = r[7]
2884 else:
2885 else:
2885 onode, flags = r[7:9]
2886 onode, flags = r[7:9]
2886 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2887 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2887 % (f, rtype, state, _hashornull(hash)))
2888 % (f, rtype, state, _hashornull(hash)))
2888 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2889 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2889 ui.write((' ancestor path: %s (node %s)\n')
2890 ui.write((' ancestor path: %s (node %s)\n')
2890 % (afile, _hashornull(anode)))
2891 % (afile, _hashornull(anode)))
2891 ui.write((' other path: %s (node %s)\n')
2892 ui.write((' other path: %s (node %s)\n')
2892 % (ofile, _hashornull(onode)))
2893 % (ofile, _hashornull(onode)))
2893 elif rtype == 'f':
2894 elif rtype == 'f':
2894 filename, rawextras = record.split('\0', 1)
2895 filename, rawextras = record.split('\0', 1)
2895 extras = rawextras.split('\0')
2896 extras = rawextras.split('\0')
2896 i = 0
2897 i = 0
2897 extrastrings = []
2898 extrastrings = []
2898 while i < len(extras):
2899 while i < len(extras):
2899 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2900 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2900 i += 2
2901 i += 2
2901
2902
2902 ui.write(('file extras: %s (%s)\n')
2903 ui.write(('file extras: %s (%s)\n')
2903 % (filename, ', '.join(extrastrings)))
2904 % (filename, ', '.join(extrastrings)))
2904 elif rtype == 'l':
2905 elif rtype == 'l':
2905 labels = record.split('\0', 2)
2906 labels = record.split('\0', 2)
2906 labels = [l for l in labels if len(l) > 0]
2907 labels = [l for l in labels if len(l) > 0]
2907 ui.write(('labels:\n'))
2908 ui.write(('labels:\n'))
2908 ui.write((' local: %s\n' % labels[0]))
2909 ui.write((' local: %s\n' % labels[0]))
2909 ui.write((' other: %s\n' % labels[1]))
2910 ui.write((' other: %s\n' % labels[1]))
2910 if len(labels) > 2:
2911 if len(labels) > 2:
2911 ui.write((' base: %s\n' % labels[2]))
2912 ui.write((' base: %s\n' % labels[2]))
2912 else:
2913 else:
2913 ui.write(('unrecognized entry: %s\t%s\n')
2914 ui.write(('unrecognized entry: %s\t%s\n')
2914 % (rtype, record.replace('\0', '\t')))
2915 % (rtype, record.replace('\0', '\t')))
2915
2916
2916 # Avoid mergestate.read() since it may raise an exception for unsupported
2917 # Avoid mergestate.read() since it may raise an exception for unsupported
2917 # merge state records. We shouldn't be doing this, but this is OK since this
2918 # merge state records. We shouldn't be doing this, but this is OK since this
2918 # command is pretty low-level.
2919 # command is pretty low-level.
2919 ms = mergemod.mergestate(repo)
2920 ms = mergemod.mergestate(repo)
2920
2921
2921 # sort so that reasonable information is on top
2922 # sort so that reasonable information is on top
2922 v1records = ms._readrecordsv1()
2923 v1records = ms._readrecordsv1()
2923 v2records = ms._readrecordsv2()
2924 v2records = ms._readrecordsv2()
2924 order = 'LOml'
2925 order = 'LOml'
2925 def key(r):
2926 def key(r):
2926 idx = order.find(r[0])
2927 idx = order.find(r[0])
2927 if idx == -1:
2928 if idx == -1:
2928 return (1, r[1])
2929 return (1, r[1])
2929 else:
2930 else:
2930 return (0, idx)
2931 return (0, idx)
2931 v1records.sort(key=key)
2932 v1records.sort(key=key)
2932 v2records.sort(key=key)
2933 v2records.sort(key=key)
2933
2934
2934 if not v1records and not v2records:
2935 if not v1records and not v2records:
2935 ui.write(('no merge state found\n'))
2936 ui.write(('no merge state found\n'))
2936 elif not v2records:
2937 elif not v2records:
2937 ui.note(('no version 2 merge state\n'))
2938 ui.note(('no version 2 merge state\n'))
2938 printrecords(1)
2939 printrecords(1)
2939 elif ms._v1v2match(v1records, v2records):
2940 elif ms._v1v2match(v1records, v2records):
2940 ui.note(('v1 and v2 states match: using v2\n'))
2941 ui.note(('v1 and v2 states match: using v2\n'))
2941 printrecords(2)
2942 printrecords(2)
2942 else:
2943 else:
2943 ui.note(('v1 and v2 states mismatch: using v1\n'))
2944 ui.note(('v1 and v2 states mismatch: using v1\n'))
2944 printrecords(1)
2945 printrecords(1)
2945 if ui.verbose:
2946 if ui.verbose:
2946 printrecords(2)
2947 printrecords(2)
2947
2948
2948 @command('debugnamecomplete', [], _('NAME...'))
2949 @command('debugnamecomplete', [], _('NAME...'))
2949 def debugnamecomplete(ui, repo, *args):
2950 def debugnamecomplete(ui, repo, *args):
2950 '''complete "names" - tags, open branch names, bookmark names'''
2951 '''complete "names" - tags, open branch names, bookmark names'''
2951
2952
2952 names = set()
2953 names = set()
2953 # since we previously only listed open branches, we will handle that
2954 # since we previously only listed open branches, we will handle that
2954 # specially (after this for loop)
2955 # specially (after this for loop)
2955 for name, ns in repo.names.iteritems():
2956 for name, ns in repo.names.iteritems():
2956 if name != 'branches':
2957 if name != 'branches':
2957 names.update(ns.listnames(repo))
2958 names.update(ns.listnames(repo))
2958 names.update(tag for (tag, heads, tip, closed)
2959 names.update(tag for (tag, heads, tip, closed)
2959 in repo.branchmap().iterbranches() if not closed)
2960 in repo.branchmap().iterbranches() if not closed)
2960 completions = set()
2961 completions = set()
2961 if not args:
2962 if not args:
2962 args = ['']
2963 args = ['']
2963 for a in args:
2964 for a in args:
2964 completions.update(n for n in names if n.startswith(a))
2965 completions.update(n for n in names if n.startswith(a))
2965 ui.write('\n'.join(sorted(completions)))
2966 ui.write('\n'.join(sorted(completions)))
2966 ui.write('\n')
2967 ui.write('\n')
2967
2968
2968 @command('debuglocks',
2969 @command('debuglocks',
2969 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2970 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2970 ('W', 'force-wlock', None,
2971 ('W', 'force-wlock', None,
2971 _('free the working state lock (DANGEROUS)'))],
2972 _('free the working state lock (DANGEROUS)'))],
2972 _('[OPTION]...'))
2973 _('[OPTION]...'))
2973 def debuglocks(ui, repo, **opts):
2974 def debuglocks(ui, repo, **opts):
2974 """show or modify state of locks
2975 """show or modify state of locks
2975
2976
2976 By default, this command will show which locks are held. This
2977 By default, this command will show which locks are held. This
2977 includes the user and process holding the lock, the amount of time
2978 includes the user and process holding the lock, the amount of time
2978 the lock has been held, and the machine name where the process is
2979 the lock has been held, and the machine name where the process is
2979 running if it's not local.
2980 running if it's not local.
2980
2981
2981 Locks protect the integrity of Mercurial's data, so should be
2982 Locks protect the integrity of Mercurial's data, so should be
2982 treated with care. System crashes or other interruptions may cause
2983 treated with care. System crashes or other interruptions may cause
2983 locks to not be properly released, though Mercurial will usually
2984 locks to not be properly released, though Mercurial will usually
2984 detect and remove such stale locks automatically.
2985 detect and remove such stale locks automatically.
2985
2986
2986 However, detecting stale locks may not always be possible (for
2987 However, detecting stale locks may not always be possible (for
2987 instance, on a shared filesystem). Removing locks may also be
2988 instance, on a shared filesystem). Removing locks may also be
2988 blocked by filesystem permissions.
2989 blocked by filesystem permissions.
2989
2990
2990 Returns 0 if no locks are held.
2991 Returns 0 if no locks are held.
2991
2992
2992 """
2993 """
2993
2994
2994 if opts.get('force_lock'):
2995 if opts.get('force_lock'):
2995 repo.svfs.unlink('lock')
2996 repo.svfs.unlink('lock')
2996 if opts.get('force_wlock'):
2997 if opts.get('force_wlock'):
2997 repo.vfs.unlink('wlock')
2998 repo.vfs.unlink('wlock')
2998 if opts.get('force_lock') or opts.get('force_lock'):
2999 if opts.get('force_lock') or opts.get('force_lock'):
2999 return 0
3000 return 0
3000
3001
3001 now = time.time()
3002 now = time.time()
3002 held = 0
3003 held = 0
3003
3004
3004 def report(vfs, name, method):
3005 def report(vfs, name, method):
3005 # this causes stale locks to get reaped for more accurate reporting
3006 # this causes stale locks to get reaped for more accurate reporting
3006 try:
3007 try:
3007 l = method(False)
3008 l = method(False)
3008 except error.LockHeld:
3009 except error.LockHeld:
3009 l = None
3010 l = None
3010
3011
3011 if l:
3012 if l:
3012 l.release()
3013 l.release()
3013 else:
3014 else:
3014 try:
3015 try:
3015 stat = vfs.lstat(name)
3016 stat = vfs.lstat(name)
3016 age = now - stat.st_mtime
3017 age = now - stat.st_mtime
3017 user = util.username(stat.st_uid)
3018 user = util.username(stat.st_uid)
3018 locker = vfs.readlock(name)
3019 locker = vfs.readlock(name)
3019 if ":" in locker:
3020 if ":" in locker:
3020 host, pid = locker.split(':')
3021 host, pid = locker.split(':')
3021 if host == socket.gethostname():
3022 if host == socket.gethostname():
3022 locker = 'user %s, process %s' % (user, pid)
3023 locker = 'user %s, process %s' % (user, pid)
3023 else:
3024 else:
3024 locker = 'user %s, process %s, host %s' \
3025 locker = 'user %s, process %s, host %s' \
3025 % (user, pid, host)
3026 % (user, pid, host)
3026 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
3027 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
3027 return 1
3028 return 1
3028 except OSError as e:
3029 except OSError as e:
3029 if e.errno != errno.ENOENT:
3030 if e.errno != errno.ENOENT:
3030 raise
3031 raise
3031
3032
3032 ui.write(("%-6s free\n") % (name + ":"))
3033 ui.write(("%-6s free\n") % (name + ":"))
3033 return 0
3034 return 0
3034
3035
3035 held += report(repo.svfs, "lock", repo.lock)
3036 held += report(repo.svfs, "lock", repo.lock)
3036 held += report(repo.vfs, "wlock", repo.wlock)
3037 held += report(repo.vfs, "wlock", repo.wlock)
3037
3038
3038 return held
3039 return held
3039
3040
3040 @command('debugobsolete',
3041 @command('debugobsolete',
3041 [('', 'flags', 0, _('markers flag')),
3042 [('', 'flags', 0, _('markers flag')),
3042 ('', 'record-parents', False,
3043 ('', 'record-parents', False,
3043 _('record parent information for the precursor')),
3044 _('record parent information for the precursor')),
3044 ('r', 'rev', [], _('display markers relevant to REV')),
3045 ('r', 'rev', [], _('display markers relevant to REV')),
3045 ('', 'index', False, _('display index of the marker')),
3046 ('', 'index', False, _('display index of the marker')),
3046 ('', 'delete', [], _('delete markers specified by indices')),
3047 ('', 'delete', [], _('delete markers specified by indices')),
3047 ] + commitopts2 + formatteropts,
3048 ] + commitopts2 + formatteropts,
3048 _('[OBSOLETED [REPLACEMENT ...]]'))
3049 _('[OBSOLETED [REPLACEMENT ...]]'))
3049 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
3050 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
3050 """create arbitrary obsolete marker
3051 """create arbitrary obsolete marker
3051
3052
3052 With no arguments, displays the list of obsolescence markers."""
3053 With no arguments, displays the list of obsolescence markers."""
3053
3054
3054 def parsenodeid(s):
3055 def parsenodeid(s):
3055 try:
3056 try:
3056 # We do not use revsingle/revrange functions here to accept
3057 # We do not use revsingle/revrange functions here to accept
3057 # arbitrary node identifiers, possibly not present in the
3058 # arbitrary node identifiers, possibly not present in the
3058 # local repository.
3059 # local repository.
3059 n = bin(s)
3060 n = bin(s)
3060 if len(n) != len(nullid):
3061 if len(n) != len(nullid):
3061 raise TypeError()
3062 raise TypeError()
3062 return n
3063 return n
3063 except TypeError:
3064 except TypeError:
3064 raise error.Abort('changeset references must be full hexadecimal '
3065 raise error.Abort('changeset references must be full hexadecimal '
3065 'node identifiers')
3066 'node identifiers')
3066
3067
3067 if opts.get('delete'):
3068 if opts.get('delete'):
3068 indices = []
3069 indices = []
3069 for v in opts.get('delete'):
3070 for v in opts.get('delete'):
3070 try:
3071 try:
3071 indices.append(int(v))
3072 indices.append(int(v))
3072 except ValueError:
3073 except ValueError:
3073 raise error.Abort(_('invalid index value: %r') % v,
3074 raise error.Abort(_('invalid index value: %r') % v,
3074 hint=_('use integers for indices'))
3075 hint=_('use integers for indices'))
3075
3076
3076 if repo.currenttransaction():
3077 if repo.currenttransaction():
3077 raise error.Abort(_('cannot delete obsmarkers in the middle '
3078 raise error.Abort(_('cannot delete obsmarkers in the middle '
3078 'of transaction.'))
3079 'of transaction.'))
3079
3080
3080 with repo.lock():
3081 with repo.lock():
3081 n = repair.deleteobsmarkers(repo.obsstore, indices)
3082 n = repair.deleteobsmarkers(repo.obsstore, indices)
3082 ui.write(_('deleted %i obsolescense markers\n') % n)
3083 ui.write(_('deleted %i obsolescense markers\n') % n)
3083
3084
3084 return
3085 return
3085
3086
3086 if precursor is not None:
3087 if precursor is not None:
3087 if opts['rev']:
3088 if opts['rev']:
3088 raise error.Abort('cannot select revision when creating marker')
3089 raise error.Abort('cannot select revision when creating marker')
3089 metadata = {}
3090 metadata = {}
3090 metadata['user'] = opts['user'] or ui.username()
3091 metadata['user'] = opts['user'] or ui.username()
3091 succs = tuple(parsenodeid(succ) for succ in successors)
3092 succs = tuple(parsenodeid(succ) for succ in successors)
3092 l = repo.lock()
3093 l = repo.lock()
3093 try:
3094 try:
3094 tr = repo.transaction('debugobsolete')
3095 tr = repo.transaction('debugobsolete')
3095 try:
3096 try:
3096 date = opts.get('date')
3097 date = opts.get('date')
3097 if date:
3098 if date:
3098 date = util.parsedate(date)
3099 date = util.parsedate(date)
3099 else:
3100 else:
3100 date = None
3101 date = None
3101 prec = parsenodeid(precursor)
3102 prec = parsenodeid(precursor)
3102 parents = None
3103 parents = None
3103 if opts['record_parents']:
3104 if opts['record_parents']:
3104 if prec not in repo.unfiltered():
3105 if prec not in repo.unfiltered():
3105 raise error.Abort('cannot used --record-parents on '
3106 raise error.Abort('cannot used --record-parents on '
3106 'unknown changesets')
3107 'unknown changesets')
3107 parents = repo.unfiltered()[prec].parents()
3108 parents = repo.unfiltered()[prec].parents()
3108 parents = tuple(p.node() for p in parents)
3109 parents = tuple(p.node() for p in parents)
3109 repo.obsstore.create(tr, prec, succs, opts['flags'],
3110 repo.obsstore.create(tr, prec, succs, opts['flags'],
3110 parents=parents, date=date,
3111 parents=parents, date=date,
3111 metadata=metadata)
3112 metadata=metadata)
3112 tr.close()
3113 tr.close()
3113 except ValueError as exc:
3114 except ValueError as exc:
3114 raise error.Abort(_('bad obsmarker input: %s') % exc)
3115 raise error.Abort(_('bad obsmarker input: %s') % exc)
3115 finally:
3116 finally:
3116 tr.release()
3117 tr.release()
3117 finally:
3118 finally:
3118 l.release()
3119 l.release()
3119 else:
3120 else:
3120 if opts['rev']:
3121 if opts['rev']:
3121 revs = scmutil.revrange(repo, opts['rev'])
3122 revs = scmutil.revrange(repo, opts['rev'])
3122 nodes = [repo[r].node() for r in revs]
3123 nodes = [repo[r].node() for r in revs]
3123 markers = list(obsolete.getmarkers(repo, nodes=nodes))
3124 markers = list(obsolete.getmarkers(repo, nodes=nodes))
3124 markers.sort(key=lambda x: x._data)
3125 markers.sort(key=lambda x: x._data)
3125 else:
3126 else:
3126 markers = obsolete.getmarkers(repo)
3127 markers = obsolete.getmarkers(repo)
3127
3128
3128 markerstoiter = markers
3129 markerstoiter = markers
3129 isrelevant = lambda m: True
3130 isrelevant = lambda m: True
3130 if opts.get('rev') and opts.get('index'):
3131 if opts.get('rev') and opts.get('index'):
3131 markerstoiter = obsolete.getmarkers(repo)
3132 markerstoiter = obsolete.getmarkers(repo)
3132 markerset = set(markers)
3133 markerset = set(markers)
3133 isrelevant = lambda m: m in markerset
3134 isrelevant = lambda m: m in markerset
3134
3135
3135 fm = ui.formatter('debugobsolete', opts)
3136 fm = ui.formatter('debugobsolete', opts)
3136 for i, m in enumerate(markerstoiter):
3137 for i, m in enumerate(markerstoiter):
3137 if not isrelevant(m):
3138 if not isrelevant(m):
3138 # marker can be irrelevant when we're iterating over a set
3139 # marker can be irrelevant when we're iterating over a set
3139 # of markers (markerstoiter) which is bigger than the set
3140 # of markers (markerstoiter) which is bigger than the set
3140 # of markers we want to display (markers)
3141 # of markers we want to display (markers)
3141 # this can happen if both --index and --rev options are
3142 # this can happen if both --index and --rev options are
3142 # provided and thus we need to iterate over all of the markers
3143 # provided and thus we need to iterate over all of the markers
3143 # to get the correct indices, but only display the ones that
3144 # to get the correct indices, but only display the ones that
3144 # are relevant to --rev value
3145 # are relevant to --rev value
3145 continue
3146 continue
3146 fm.startitem()
3147 fm.startitem()
3147 ind = i if opts.get('index') else None
3148 ind = i if opts.get('index') else None
3148 cmdutil.showmarker(fm, m, index=ind)
3149 cmdutil.showmarker(fm, m, index=ind)
3149 fm.end()
3150 fm.end()
3150
3151
3151 @command('debugpathcomplete',
3152 @command('debugpathcomplete',
3152 [('f', 'full', None, _('complete an entire path')),
3153 [('f', 'full', None, _('complete an entire path')),
3153 ('n', 'normal', None, _('show only normal files')),
3154 ('n', 'normal', None, _('show only normal files')),
3154 ('a', 'added', None, _('show only added files')),
3155 ('a', 'added', None, _('show only added files')),
3155 ('r', 'removed', None, _('show only removed files'))],
3156 ('r', 'removed', None, _('show only removed files'))],
3156 _('FILESPEC...'))
3157 _('FILESPEC...'))
3157 def debugpathcomplete(ui, repo, *specs, **opts):
3158 def debugpathcomplete(ui, repo, *specs, **opts):
3158 '''complete part or all of a tracked path
3159 '''complete part or all of a tracked path
3159
3160
3160 This command supports shells that offer path name completion. It
3161 This command supports shells that offer path name completion. It
3161 currently completes only files already known to the dirstate.
3162 currently completes only files already known to the dirstate.
3162
3163
3163 Completion extends only to the next path segment unless
3164 Completion extends only to the next path segment unless
3164 --full is specified, in which case entire paths are used.'''
3165 --full is specified, in which case entire paths are used.'''
3165
3166
3166 def complete(path, acceptable):
3167 def complete(path, acceptable):
3167 dirstate = repo.dirstate
3168 dirstate = repo.dirstate
3168 spec = os.path.normpath(os.path.join(os.getcwd(), path))
3169 spec = os.path.normpath(os.path.join(os.getcwd(), path))
3169 rootdir = repo.root + os.sep
3170 rootdir = repo.root + os.sep
3170 if spec != repo.root and not spec.startswith(rootdir):
3171 if spec != repo.root and not spec.startswith(rootdir):
3171 return [], []
3172 return [], []
3172 if os.path.isdir(spec):
3173 if os.path.isdir(spec):
3173 spec += '/'
3174 spec += '/'
3174 spec = spec[len(rootdir):]
3175 spec = spec[len(rootdir):]
3175 fixpaths = os.sep != '/'
3176 fixpaths = os.sep != '/'
3176 if fixpaths:
3177 if fixpaths:
3177 spec = spec.replace(os.sep, '/')
3178 spec = spec.replace(os.sep, '/')
3178 speclen = len(spec)
3179 speclen = len(spec)
3179 fullpaths = opts['full']
3180 fullpaths = opts['full']
3180 files, dirs = set(), set()
3181 files, dirs = set(), set()
3181 adddir, addfile = dirs.add, files.add
3182 adddir, addfile = dirs.add, files.add
3182 for f, st in dirstate.iteritems():
3183 for f, st in dirstate.iteritems():
3183 if f.startswith(spec) and st[0] in acceptable:
3184 if f.startswith(spec) and st[0] in acceptable:
3184 if fixpaths:
3185 if fixpaths:
3185 f = f.replace('/', os.sep)
3186 f = f.replace('/', os.sep)
3186 if fullpaths:
3187 if fullpaths:
3187 addfile(f)
3188 addfile(f)
3188 continue
3189 continue
3189 s = f.find(os.sep, speclen)
3190 s = f.find(os.sep, speclen)
3190 if s >= 0:
3191 if s >= 0:
3191 adddir(f[:s])
3192 adddir(f[:s])
3192 else:
3193 else:
3193 addfile(f)
3194 addfile(f)
3194 return files, dirs
3195 return files, dirs
3195
3196
3196 acceptable = ''
3197 acceptable = ''
3197 if opts['normal']:
3198 if opts['normal']:
3198 acceptable += 'nm'
3199 acceptable += 'nm'
3199 if opts['added']:
3200 if opts['added']:
3200 acceptable += 'a'
3201 acceptable += 'a'
3201 if opts['removed']:
3202 if opts['removed']:
3202 acceptable += 'r'
3203 acceptable += 'r'
3203 cwd = repo.getcwd()
3204 cwd = repo.getcwd()
3204 if not specs:
3205 if not specs:
3205 specs = ['.']
3206 specs = ['.']
3206
3207
3207 files, dirs = set(), set()
3208 files, dirs = set(), set()
3208 for spec in specs:
3209 for spec in specs:
3209 f, d = complete(spec, acceptable or 'nmar')
3210 f, d = complete(spec, acceptable or 'nmar')
3210 files.update(f)
3211 files.update(f)
3211 dirs.update(d)
3212 dirs.update(d)
3212 files.update(dirs)
3213 files.update(dirs)
3213 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
3214 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
3214 ui.write('\n')
3215 ui.write('\n')
3215
3216
3216 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3217 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3217 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3218 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3218 '''access the pushkey key/value protocol
3219 '''access the pushkey key/value protocol
3219
3220
3220 With two args, list the keys in the given namespace.
3221 With two args, list the keys in the given namespace.
3221
3222
3222 With five args, set a key to new if it currently is set to old.
3223 With five args, set a key to new if it currently is set to old.
3223 Reports success or failure.
3224 Reports success or failure.
3224 '''
3225 '''
3225
3226
3226 target = hg.peer(ui, {}, repopath)
3227 target = hg.peer(ui, {}, repopath)
3227 if keyinfo:
3228 if keyinfo:
3228 key, old, new = keyinfo
3229 key, old, new = keyinfo
3229 r = target.pushkey(namespace, key, old, new)
3230 r = target.pushkey(namespace, key, old, new)
3230 ui.status(str(r) + '\n')
3231 ui.status(str(r) + '\n')
3231 return not r
3232 return not r
3232 else:
3233 else:
3233 for k, v in sorted(target.listkeys(namespace).iteritems()):
3234 for k, v in sorted(target.listkeys(namespace).iteritems()):
3234 ui.write("%s\t%s\n" % (k.encode('string-escape'),
3235 ui.write("%s\t%s\n" % (k.encode('string-escape'),
3235 v.encode('string-escape')))
3236 v.encode('string-escape')))
3236
3237
3237 @command('debugpvec', [], _('A B'))
3238 @command('debugpvec', [], _('A B'))
3238 def debugpvec(ui, repo, a, b=None):
3239 def debugpvec(ui, repo, a, b=None):
3239 ca = scmutil.revsingle(repo, a)
3240 ca = scmutil.revsingle(repo, a)
3240 cb = scmutil.revsingle(repo, b)
3241 cb = scmutil.revsingle(repo, b)
3241 pa = pvec.ctxpvec(ca)
3242 pa = pvec.ctxpvec(ca)
3242 pb = pvec.ctxpvec(cb)
3243 pb = pvec.ctxpvec(cb)
3243 if pa == pb:
3244 if pa == pb:
3244 rel = "="
3245 rel = "="
3245 elif pa > pb:
3246 elif pa > pb:
3246 rel = ">"
3247 rel = ">"
3247 elif pa < pb:
3248 elif pa < pb:
3248 rel = "<"
3249 rel = "<"
3249 elif pa | pb:
3250 elif pa | pb:
3250 rel = "|"
3251 rel = "|"
3251 ui.write(_("a: %s\n") % pa)
3252 ui.write(_("a: %s\n") % pa)
3252 ui.write(_("b: %s\n") % pb)
3253 ui.write(_("b: %s\n") % pb)
3253 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3254 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3254 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
3255 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
3255 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
3256 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
3256 pa.distance(pb), rel))
3257 pa.distance(pb), rel))
3257
3258
3258 @command('debugrebuilddirstate|debugrebuildstate',
3259 @command('debugrebuilddirstate|debugrebuildstate',
3259 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
3260 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
3260 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
3261 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
3261 'the working copy parent')),
3262 'the working copy parent')),
3262 ],
3263 ],
3263 _('[-r REV]'))
3264 _('[-r REV]'))
3264 def debugrebuilddirstate(ui, repo, rev, **opts):
3265 def debugrebuilddirstate(ui, repo, rev, **opts):
3265 """rebuild the dirstate as it would look like for the given revision
3266 """rebuild the dirstate as it would look like for the given revision
3266
3267
3267 If no revision is specified the first current parent will be used.
3268 If no revision is specified the first current parent will be used.
3268
3269
3269 The dirstate will be set to the files of the given revision.
3270 The dirstate will be set to the files of the given revision.
3270 The actual working directory content or existing dirstate
3271 The actual working directory content or existing dirstate
3271 information such as adds or removes is not considered.
3272 information such as adds or removes is not considered.
3272
3273
3273 ``minimal`` will only rebuild the dirstate status for files that claim to be
3274 ``minimal`` will only rebuild the dirstate status for files that claim to be
3274 tracked but are not in the parent manifest, or that exist in the parent
3275 tracked but are not in the parent manifest, or that exist in the parent
3275 manifest but are not in the dirstate. It will not change adds, removes, or
3276 manifest but are not in the dirstate. It will not change adds, removes, or
3276 modified files that are in the working copy parent.
3277 modified files that are in the working copy parent.
3277
3278
3278 One use of this command is to make the next :hg:`status` invocation
3279 One use of this command is to make the next :hg:`status` invocation
3279 check the actual file content.
3280 check the actual file content.
3280 """
3281 """
3281 ctx = scmutil.revsingle(repo, rev)
3282 ctx = scmutil.revsingle(repo, rev)
3282 with repo.wlock():
3283 with repo.wlock():
3283 dirstate = repo.dirstate
3284 dirstate = repo.dirstate
3284 changedfiles = None
3285 changedfiles = None
3285 # See command doc for what minimal does.
3286 # See command doc for what minimal does.
3286 if opts.get('minimal'):
3287 if opts.get('minimal'):
3287 manifestfiles = set(ctx.manifest().keys())
3288 manifestfiles = set(ctx.manifest().keys())
3288 dirstatefiles = set(dirstate)
3289 dirstatefiles = set(dirstate)
3289 manifestonly = manifestfiles - dirstatefiles
3290 manifestonly = manifestfiles - dirstatefiles
3290 dsonly = dirstatefiles - manifestfiles
3291 dsonly = dirstatefiles - manifestfiles
3291 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
3292 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
3292 changedfiles = manifestonly | dsnotadded
3293 changedfiles = manifestonly | dsnotadded
3293
3294
3294 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3295 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3295
3296
3296 @command('debugrebuildfncache', [], '')
3297 @command('debugrebuildfncache', [], '')
3297 def debugrebuildfncache(ui, repo):
3298 def debugrebuildfncache(ui, repo):
3298 """rebuild the fncache file"""
3299 """rebuild the fncache file"""
3299 repair.rebuildfncache(ui, repo)
3300 repair.rebuildfncache(ui, repo)
3300
3301
3301 @command('debugrename',
3302 @command('debugrename',
3302 [('r', 'rev', '', _('revision to debug'), _('REV'))],
3303 [('r', 'rev', '', _('revision to debug'), _('REV'))],
3303 _('[-r REV] FILE'))
3304 _('[-r REV] FILE'))
3304 def debugrename(ui, repo, file1, *pats, **opts):
3305 def debugrename(ui, repo, file1, *pats, **opts):
3305 """dump rename information"""
3306 """dump rename information"""
3306
3307
3307 ctx = scmutil.revsingle(repo, opts.get('rev'))
3308 ctx = scmutil.revsingle(repo, opts.get('rev'))
3308 m = scmutil.match(ctx, (file1,) + pats, opts)
3309 m = scmutil.match(ctx, (file1,) + pats, opts)
3309 for abs in ctx.walk(m):
3310 for abs in ctx.walk(m):
3310 fctx = ctx[abs]
3311 fctx = ctx[abs]
3311 o = fctx.filelog().renamed(fctx.filenode())
3312 o = fctx.filelog().renamed(fctx.filenode())
3312 rel = m.rel(abs)
3313 rel = m.rel(abs)
3313 if o:
3314 if o:
3314 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3315 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3315 else:
3316 else:
3316 ui.write(_("%s not renamed\n") % rel)
3317 ui.write(_("%s not renamed\n") % rel)
3317
3318
3318 @command('debugrevlog', debugrevlogopts +
3319 @command('debugrevlog', debugrevlogopts +
3319 [('d', 'dump', False, _('dump index data'))],
3320 [('d', 'dump', False, _('dump index data'))],
3320 _('-c|-m|FILE'),
3321 _('-c|-m|FILE'),
3321 optionalrepo=True)
3322 optionalrepo=True)
3322 def debugrevlog(ui, repo, file_=None, **opts):
3323 def debugrevlog(ui, repo, file_=None, **opts):
3323 """show data and statistics about a revlog"""
3324 """show data and statistics about a revlog"""
3324 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
3325 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
3325
3326
3326 if opts.get("dump"):
3327 if opts.get("dump"):
3327 numrevs = len(r)
3328 numrevs = len(r)
3328 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
3329 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
3329 " rawsize totalsize compression heads chainlen\n"))
3330 " rawsize totalsize compression heads chainlen\n"))
3330 ts = 0
3331 ts = 0
3331 heads = set()
3332 heads = set()
3332
3333
3333 for rev in xrange(numrevs):
3334 for rev in xrange(numrevs):
3334 dbase = r.deltaparent(rev)
3335 dbase = r.deltaparent(rev)
3335 if dbase == -1:
3336 if dbase == -1:
3336 dbase = rev
3337 dbase = rev
3337 cbase = r.chainbase(rev)
3338 cbase = r.chainbase(rev)
3338 clen = r.chainlen(rev)
3339 clen = r.chainlen(rev)
3339 p1, p2 = r.parentrevs(rev)
3340 p1, p2 = r.parentrevs(rev)
3340 rs = r.rawsize(rev)
3341 rs = r.rawsize(rev)
3341 ts = ts + rs
3342 ts = ts + rs
3342 heads -= set(r.parentrevs(rev))
3343 heads -= set(r.parentrevs(rev))
3343 heads.add(rev)
3344 heads.add(rev)
3344 try:
3345 try:
3345 compression = ts / r.end(rev)
3346 compression = ts / r.end(rev)
3346 except ZeroDivisionError:
3347 except ZeroDivisionError:
3347 compression = 0
3348 compression = 0
3348 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3349 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3349 "%11d %5d %8d\n" %
3350 "%11d %5d %8d\n" %
3350 (rev, p1, p2, r.start(rev), r.end(rev),
3351 (rev, p1, p2, r.start(rev), r.end(rev),
3351 r.start(dbase), r.start(cbase),
3352 r.start(dbase), r.start(cbase),
3352 r.start(p1), r.start(p2),
3353 r.start(p1), r.start(p2),
3353 rs, ts, compression, len(heads), clen))
3354 rs, ts, compression, len(heads), clen))
3354 return 0
3355 return 0
3355
3356
3356 v = r.version
3357 v = r.version
3357 format = v & 0xFFFF
3358 format = v & 0xFFFF
3358 flags = []
3359 flags = []
3359 gdelta = False
3360 gdelta = False
3360 if v & revlog.REVLOGNGINLINEDATA:
3361 if v & revlog.REVLOGNGINLINEDATA:
3361 flags.append('inline')
3362 flags.append('inline')
3362 if v & revlog.REVLOGGENERALDELTA:
3363 if v & revlog.REVLOGGENERALDELTA:
3363 gdelta = True
3364 gdelta = True
3364 flags.append('generaldelta')
3365 flags.append('generaldelta')
3365 if not flags:
3366 if not flags:
3366 flags = ['(none)']
3367 flags = ['(none)']
3367
3368
3368 nummerges = 0
3369 nummerges = 0
3369 numfull = 0
3370 numfull = 0
3370 numprev = 0
3371 numprev = 0
3371 nump1 = 0
3372 nump1 = 0
3372 nump2 = 0
3373 nump2 = 0
3373 numother = 0
3374 numother = 0
3374 nump1prev = 0
3375 nump1prev = 0
3375 nump2prev = 0
3376 nump2prev = 0
3376 chainlengths = []
3377 chainlengths = []
3377
3378
3378 datasize = [None, 0, 0L]
3379 datasize = [None, 0, 0L]
3379 fullsize = [None, 0, 0L]
3380 fullsize = [None, 0, 0L]
3380 deltasize = [None, 0, 0L]
3381 deltasize = [None, 0, 0L]
3381
3382
3382 def addsize(size, l):
3383 def addsize(size, l):
3383 if l[0] is None or size < l[0]:
3384 if l[0] is None or size < l[0]:
3384 l[0] = size
3385 l[0] = size
3385 if size > l[1]:
3386 if size > l[1]:
3386 l[1] = size
3387 l[1] = size
3387 l[2] += size
3388 l[2] += size
3388
3389
3389 numrevs = len(r)
3390 numrevs = len(r)
3390 for rev in xrange(numrevs):
3391 for rev in xrange(numrevs):
3391 p1, p2 = r.parentrevs(rev)
3392 p1, p2 = r.parentrevs(rev)
3392 delta = r.deltaparent(rev)
3393 delta = r.deltaparent(rev)
3393 if format > 0:
3394 if format > 0:
3394 addsize(r.rawsize(rev), datasize)
3395 addsize(r.rawsize(rev), datasize)
3395 if p2 != nullrev:
3396 if p2 != nullrev:
3396 nummerges += 1
3397 nummerges += 1
3397 size = r.length(rev)
3398 size = r.length(rev)
3398 if delta == nullrev:
3399 if delta == nullrev:
3399 chainlengths.append(0)
3400 chainlengths.append(0)
3400 numfull += 1
3401 numfull += 1
3401 addsize(size, fullsize)
3402 addsize(size, fullsize)
3402 else:
3403 else:
3403 chainlengths.append(chainlengths[delta] + 1)
3404 chainlengths.append(chainlengths[delta] + 1)
3404 addsize(size, deltasize)
3405 addsize(size, deltasize)
3405 if delta == rev - 1:
3406 if delta == rev - 1:
3406 numprev += 1
3407 numprev += 1
3407 if delta == p1:
3408 if delta == p1:
3408 nump1prev += 1
3409 nump1prev += 1
3409 elif delta == p2:
3410 elif delta == p2:
3410 nump2prev += 1
3411 nump2prev += 1
3411 elif delta == p1:
3412 elif delta == p1:
3412 nump1 += 1
3413 nump1 += 1
3413 elif delta == p2:
3414 elif delta == p2:
3414 nump2 += 1
3415 nump2 += 1
3415 elif delta != nullrev:
3416 elif delta != nullrev:
3416 numother += 1
3417 numother += 1
3417
3418
3418 # Adjust size min value for empty cases
3419 # Adjust size min value for empty cases
3419 for size in (datasize, fullsize, deltasize):
3420 for size in (datasize, fullsize, deltasize):
3420 if size[0] is None:
3421 if size[0] is None:
3421 size[0] = 0
3422 size[0] = 0
3422
3423
3423 numdeltas = numrevs - numfull
3424 numdeltas = numrevs - numfull
3424 numoprev = numprev - nump1prev - nump2prev
3425 numoprev = numprev - nump1prev - nump2prev
3425 totalrawsize = datasize[2]
3426 totalrawsize = datasize[2]
3426 datasize[2] /= numrevs
3427 datasize[2] /= numrevs
3427 fulltotal = fullsize[2]
3428 fulltotal = fullsize[2]
3428 fullsize[2] /= numfull
3429 fullsize[2] /= numfull
3429 deltatotal = deltasize[2]
3430 deltatotal = deltasize[2]
3430 if numrevs - numfull > 0:
3431 if numrevs - numfull > 0:
3431 deltasize[2] /= numrevs - numfull
3432 deltasize[2] /= numrevs - numfull
3432 totalsize = fulltotal + deltatotal
3433 totalsize = fulltotal + deltatotal
3433 avgchainlen = sum(chainlengths) / numrevs
3434 avgchainlen = sum(chainlengths) / numrevs
3434 maxchainlen = max(chainlengths)
3435 maxchainlen = max(chainlengths)
3435 compratio = 1
3436 compratio = 1
3436 if totalsize:
3437 if totalsize:
3437 compratio = totalrawsize / totalsize
3438 compratio = totalrawsize / totalsize
3438
3439
3439 basedfmtstr = '%%%dd\n'
3440 basedfmtstr = '%%%dd\n'
3440 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
3441 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
3441
3442
3442 def dfmtstr(max):
3443 def dfmtstr(max):
3443 return basedfmtstr % len(str(max))
3444 return basedfmtstr % len(str(max))
3444 def pcfmtstr(max, padding=0):
3445 def pcfmtstr(max, padding=0):
3445 return basepcfmtstr % (len(str(max)), ' ' * padding)
3446 return basepcfmtstr % (len(str(max)), ' ' * padding)
3446
3447
3447 def pcfmt(value, total):
3448 def pcfmt(value, total):
3448 if total:
3449 if total:
3449 return (value, 100 * float(value) / total)
3450 return (value, 100 * float(value) / total)
3450 else:
3451 else:
3451 return value, 100.0
3452 return value, 100.0
3452
3453
3453 ui.write(('format : %d\n') % format)
3454 ui.write(('format : %d\n') % format)
3454 ui.write(('flags : %s\n') % ', '.join(flags))
3455 ui.write(('flags : %s\n') % ', '.join(flags))
3455
3456
3456 ui.write('\n')
3457 ui.write('\n')
3457 fmt = pcfmtstr(totalsize)
3458 fmt = pcfmtstr(totalsize)
3458 fmt2 = dfmtstr(totalsize)
3459 fmt2 = dfmtstr(totalsize)
3459 ui.write(('revisions : ') + fmt2 % numrevs)
3460 ui.write(('revisions : ') + fmt2 % numrevs)
3460 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
3461 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
3461 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
3462 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
3462 ui.write(('revisions : ') + fmt2 % numrevs)
3463 ui.write(('revisions : ') + fmt2 % numrevs)
3463 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
3464 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
3464 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
3465 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
3465 ui.write(('revision size : ') + fmt2 % totalsize)
3466 ui.write(('revision size : ') + fmt2 % totalsize)
3466 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
3467 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
3467 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
3468 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
3468
3469
3469 ui.write('\n')
3470 ui.write('\n')
3470 fmt = dfmtstr(max(avgchainlen, compratio))
3471 fmt = dfmtstr(max(avgchainlen, compratio))
3471 ui.write(('avg chain length : ') + fmt % avgchainlen)
3472 ui.write(('avg chain length : ') + fmt % avgchainlen)
3472 ui.write(('max chain length : ') + fmt % maxchainlen)
3473 ui.write(('max chain length : ') + fmt % maxchainlen)
3473 ui.write(('compression ratio : ') + fmt % compratio)
3474 ui.write(('compression ratio : ') + fmt % compratio)
3474
3475
3475 if format > 0:
3476 if format > 0:
3476 ui.write('\n')
3477 ui.write('\n')
3477 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
3478 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
3478 % tuple(datasize))
3479 % tuple(datasize))
3479 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
3480 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
3480 % tuple(fullsize))
3481 % tuple(fullsize))
3481 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
3482 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
3482 % tuple(deltasize))
3483 % tuple(deltasize))
3483
3484
3484 if numdeltas > 0:
3485 if numdeltas > 0:
3485 ui.write('\n')
3486 ui.write('\n')
3486 fmt = pcfmtstr(numdeltas)
3487 fmt = pcfmtstr(numdeltas)
3487 fmt2 = pcfmtstr(numdeltas, 4)
3488 fmt2 = pcfmtstr(numdeltas, 4)
3488 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
3489 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
3489 if numprev > 0:
3490 if numprev > 0:
3490 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
3491 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
3491 numprev))
3492 numprev))
3492 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
3493 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
3493 numprev))
3494 numprev))
3494 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
3495 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
3495 numprev))
3496 numprev))
3496 if gdelta:
3497 if gdelta:
3497 ui.write(('deltas against p1 : ')
3498 ui.write(('deltas against p1 : ')
3498 + fmt % pcfmt(nump1, numdeltas))
3499 + fmt % pcfmt(nump1, numdeltas))
3499 ui.write(('deltas against p2 : ')
3500 ui.write(('deltas against p2 : ')
3500 + fmt % pcfmt(nump2, numdeltas))
3501 + fmt % pcfmt(nump2, numdeltas))
3501 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
3502 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
3502 numdeltas))
3503 numdeltas))
3503
3504
3504 @command('debugrevspec',
3505 @command('debugrevspec',
3505 [('', 'optimize', None, _('print parsed tree after optimizing'))],
3506 [('', 'optimize', None, _('print parsed tree after optimizing'))],
3506 ('REVSPEC'))
3507 ('REVSPEC'))
3507 def debugrevspec(ui, repo, expr, **opts):
3508 def debugrevspec(ui, repo, expr, **opts):
3508 """parse and apply a revision specification
3509 """parse and apply a revision specification
3509
3510
3510 Use --verbose to print the parsed tree before and after aliases
3511 Use --verbose to print the parsed tree before and after aliases
3511 expansion.
3512 expansion.
3512 """
3513 """
3513 if ui.verbose:
3514 if ui.verbose:
3514 tree = revset.parse(expr, lookup=repo.__contains__)
3515 tree = revset.parse(expr, lookup=repo.__contains__)
3515 ui.note(revset.prettyformat(tree), "\n")
3516 ui.note(revset.prettyformat(tree), "\n")
3516 newtree = revset.expandaliases(ui, tree)
3517 newtree = revset.expandaliases(ui, tree)
3517 if newtree != tree:
3518 if newtree != tree:
3518 ui.note(("* expanded:\n"), revset.prettyformat(newtree), "\n")
3519 ui.note(("* expanded:\n"), revset.prettyformat(newtree), "\n")
3519 tree = newtree
3520 tree = newtree
3520 newtree = revset.foldconcat(tree)
3521 newtree = revset.foldconcat(tree)
3521 if newtree != tree:
3522 if newtree != tree:
3522 ui.note(("* concatenated:\n"), revset.prettyformat(newtree), "\n")
3523 ui.note(("* concatenated:\n"), revset.prettyformat(newtree), "\n")
3523 if opts["optimize"]:
3524 if opts["optimize"]:
3524 optimizedtree = revset.optimize(newtree)
3525 optimizedtree = revset.optimize(newtree)
3525 ui.note(("* optimized:\n"),
3526 ui.note(("* optimized:\n"),
3526 revset.prettyformat(optimizedtree), "\n")
3527 revset.prettyformat(optimizedtree), "\n")
3527 func = revset.match(ui, expr, repo)
3528 func = revset.match(ui, expr, repo)
3528 revs = func(repo)
3529 revs = func(repo)
3529 if ui.verbose:
3530 if ui.verbose:
3530 ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
3531 ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
3531 for c in revs:
3532 for c in revs:
3532 ui.write("%s\n" % c)
3533 ui.write("%s\n" % c)
3533
3534
3534 @command('debugsetparents', [], _('REV1 [REV2]'))
3535 @command('debugsetparents', [], _('REV1 [REV2]'))
3535 def debugsetparents(ui, repo, rev1, rev2=None):
3536 def debugsetparents(ui, repo, rev1, rev2=None):
3536 """manually set the parents of the current working directory
3537 """manually set the parents of the current working directory
3537
3538
3538 This is useful for writing repository conversion tools, but should
3539 This is useful for writing repository conversion tools, but should
3539 be used with care. For example, neither the working directory nor the
3540 be used with care. For example, neither the working directory nor the
3540 dirstate is updated, so file status may be incorrect after running this
3541 dirstate is updated, so file status may be incorrect after running this
3541 command.
3542 command.
3542
3543
3543 Returns 0 on success.
3544 Returns 0 on success.
3544 """
3545 """
3545
3546
3546 r1 = scmutil.revsingle(repo, rev1).node()
3547 r1 = scmutil.revsingle(repo, rev1).node()
3547 r2 = scmutil.revsingle(repo, rev2, 'null').node()
3548 r2 = scmutil.revsingle(repo, rev2, 'null').node()
3548
3549
3549 with repo.wlock():
3550 with repo.wlock():
3550 repo.setparents(r1, r2)
3551 repo.setparents(r1, r2)
3551
3552
3552 @command('debugdirstate|debugstate',
3553 @command('debugdirstate|debugstate',
3553 [('', 'nodates', None, _('do not display the saved mtime')),
3554 [('', 'nodates', None, _('do not display the saved mtime')),
3554 ('', 'datesort', None, _('sort by saved mtime'))],
3555 ('', 'datesort', None, _('sort by saved mtime'))],
3555 _('[OPTION]...'))
3556 _('[OPTION]...'))
3556 def debugstate(ui, repo, **opts):
3557 def debugstate(ui, repo, **opts):
3557 """show the contents of the current dirstate"""
3558 """show the contents of the current dirstate"""
3558
3559
3559 nodates = opts.get('nodates')
3560 nodates = opts.get('nodates')
3560 datesort = opts.get('datesort')
3561 datesort = opts.get('datesort')
3561
3562
3562 timestr = ""
3563 timestr = ""
3563 if datesort:
3564 if datesort:
3564 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
3565 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
3565 else:
3566 else:
3566 keyfunc = None # sort by filename
3567 keyfunc = None # sort by filename
3567 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
3568 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
3568 if ent[3] == -1:
3569 if ent[3] == -1:
3569 timestr = 'unset '
3570 timestr = 'unset '
3570 elif nodates:
3571 elif nodates:
3571 timestr = 'set '
3572 timestr = 'set '
3572 else:
3573 else:
3573 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3574 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3574 time.localtime(ent[3]))
3575 time.localtime(ent[3]))
3575 if ent[1] & 0o20000:
3576 if ent[1] & 0o20000:
3576 mode = 'lnk'
3577 mode = 'lnk'
3577 else:
3578 else:
3578 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3579 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3579 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3580 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3580 for f in repo.dirstate.copies():
3581 for f in repo.dirstate.copies():
3581 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3582 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3582
3583
3583 @command('debugsub',
3584 @command('debugsub',
3584 [('r', 'rev', '',
3585 [('r', 'rev', '',
3585 _('revision to check'), _('REV'))],
3586 _('revision to check'), _('REV'))],
3586 _('[-r REV] [REV]'))
3587 _('[-r REV] [REV]'))
3587 def debugsub(ui, repo, rev=None):
3588 def debugsub(ui, repo, rev=None):
3588 ctx = scmutil.revsingle(repo, rev, None)
3589 ctx = scmutil.revsingle(repo, rev, None)
3589 for k, v in sorted(ctx.substate.items()):
3590 for k, v in sorted(ctx.substate.items()):
3590 ui.write(('path %s\n') % k)
3591 ui.write(('path %s\n') % k)
3591 ui.write((' source %s\n') % v[0])
3592 ui.write((' source %s\n') % v[0])
3592 ui.write((' revision %s\n') % v[1])
3593 ui.write((' revision %s\n') % v[1])
3593
3594
3594 @command('debugsuccessorssets',
3595 @command('debugsuccessorssets',
3595 [],
3596 [],
3596 _('[REV]'))
3597 _('[REV]'))
3597 def debugsuccessorssets(ui, repo, *revs):
3598 def debugsuccessorssets(ui, repo, *revs):
3598 """show set of successors for revision
3599 """show set of successors for revision
3599
3600
3600 A successors set of changeset A is a consistent group of revisions that
3601 A successors set of changeset A is a consistent group of revisions that
3601 succeed A. It contains non-obsolete changesets only.
3602 succeed A. It contains non-obsolete changesets only.
3602
3603
3603 In most cases a changeset A has a single successors set containing a single
3604 In most cases a changeset A has a single successors set containing a single
3604 successor (changeset A replaced by A').
3605 successor (changeset A replaced by A').
3605
3606
3606 A changeset that is made obsolete with no successors are called "pruned".
3607 A changeset that is made obsolete with no successors are called "pruned".
3607 Such changesets have no successors sets at all.
3608 Such changesets have no successors sets at all.
3608
3609
3609 A changeset that has been "split" will have a successors set containing
3610 A changeset that has been "split" will have a successors set containing
3610 more than one successor.
3611 more than one successor.
3611
3612
3612 A changeset that has been rewritten in multiple different ways is called
3613 A changeset that has been rewritten in multiple different ways is called
3613 "divergent". Such changesets have multiple successor sets (each of which
3614 "divergent". Such changesets have multiple successor sets (each of which
3614 may also be split, i.e. have multiple successors).
3615 may also be split, i.e. have multiple successors).
3615
3616
3616 Results are displayed as follows::
3617 Results are displayed as follows::
3617
3618
3618 <rev1>
3619 <rev1>
3619 <successors-1A>
3620 <successors-1A>
3620 <rev2>
3621 <rev2>
3621 <successors-2A>
3622 <successors-2A>
3622 <successors-2B1> <successors-2B2> <successors-2B3>
3623 <successors-2B1> <successors-2B2> <successors-2B3>
3623
3624
3624 Here rev2 has two possible (i.e. divergent) successors sets. The first
3625 Here rev2 has two possible (i.e. divergent) successors sets. The first
3625 holds one element, whereas the second holds three (i.e. the changeset has
3626 holds one element, whereas the second holds three (i.e. the changeset has
3626 been split).
3627 been split).
3627 """
3628 """
3628 # passed to successorssets caching computation from one call to another
3629 # passed to successorssets caching computation from one call to another
3629 cache = {}
3630 cache = {}
3630 ctx2str = str
3631 ctx2str = str
3631 node2str = short
3632 node2str = short
3632 if ui.debug():
3633 if ui.debug():
3633 def ctx2str(ctx):
3634 def ctx2str(ctx):
3634 return ctx.hex()
3635 return ctx.hex()
3635 node2str = hex
3636 node2str = hex
3636 for rev in scmutil.revrange(repo, revs):
3637 for rev in scmutil.revrange(repo, revs):
3637 ctx = repo[rev]
3638 ctx = repo[rev]
3638 ui.write('%s\n'% ctx2str(ctx))
3639 ui.write('%s\n'% ctx2str(ctx))
3639 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3640 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3640 if succsset:
3641 if succsset:
3641 ui.write(' ')
3642 ui.write(' ')
3642 ui.write(node2str(succsset[0]))
3643 ui.write(node2str(succsset[0]))
3643 for node in succsset[1:]:
3644 for node in succsset[1:]:
3644 ui.write(' ')
3645 ui.write(' ')
3645 ui.write(node2str(node))
3646 ui.write(node2str(node))
3646 ui.write('\n')
3647 ui.write('\n')
3647
3648
3648 @command('debugtemplate',
3649 @command('debugtemplate',
3649 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
3650 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
3650 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
3651 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
3651 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3652 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3652 optionalrepo=True)
3653 optionalrepo=True)
3653 def debugtemplate(ui, repo, tmpl, **opts):
3654 def debugtemplate(ui, repo, tmpl, **opts):
3654 """parse and apply a template
3655 """parse and apply a template
3655
3656
3656 If -r/--rev is given, the template is processed as a log template and
3657 If -r/--rev is given, the template is processed as a log template and
3657 applied to the given changesets. Otherwise, it is processed as a generic
3658 applied to the given changesets. Otherwise, it is processed as a generic
3658 template.
3659 template.
3659
3660
3660 Use --verbose to print the parsed tree.
3661 Use --verbose to print the parsed tree.
3661 """
3662 """
3662 revs = None
3663 revs = None
3663 if opts['rev']:
3664 if opts['rev']:
3664 if repo is None:
3665 if repo is None:
3665 raise error.RepoError(_('there is no Mercurial repository here '
3666 raise error.RepoError(_('there is no Mercurial repository here '
3666 '(.hg not found)'))
3667 '(.hg not found)'))
3667 revs = scmutil.revrange(repo, opts['rev'])
3668 revs = scmutil.revrange(repo, opts['rev'])
3668
3669
3669 props = {}
3670 props = {}
3670 for d in opts['define']:
3671 for d in opts['define']:
3671 try:
3672 try:
3672 k, v = (e.strip() for e in d.split('=', 1))
3673 k, v = (e.strip() for e in d.split('=', 1))
3673 if not k:
3674 if not k:
3674 raise ValueError
3675 raise ValueError
3675 props[k] = v
3676 props[k] = v
3676 except ValueError:
3677 except ValueError:
3677 raise error.Abort(_('malformed keyword definition: %s') % d)
3678 raise error.Abort(_('malformed keyword definition: %s') % d)
3678
3679
3679 if ui.verbose:
3680 if ui.verbose:
3680 aliases = ui.configitems('templatealias')
3681 aliases = ui.configitems('templatealias')
3681 tree = templater.parse(tmpl)
3682 tree = templater.parse(tmpl)
3682 ui.note(templater.prettyformat(tree), '\n')
3683 ui.note(templater.prettyformat(tree), '\n')
3683 newtree = templater.expandaliases(tree, aliases)
3684 newtree = templater.expandaliases(tree, aliases)
3684 if newtree != tree:
3685 if newtree != tree:
3685 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
3686 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
3686
3687
3687 mapfile = None
3688 mapfile = None
3688 if revs is None:
3689 if revs is None:
3689 k = 'debugtemplate'
3690 k = 'debugtemplate'
3690 t = formatter.maketemplater(ui, k, tmpl)
3691 t = formatter.maketemplater(ui, k, tmpl)
3691 ui.write(templater.stringify(t(k, **props)))
3692 ui.write(templater.stringify(t(k, **props)))
3692 else:
3693 else:
3693 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
3694 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
3694 mapfile, buffered=False)
3695 mapfile, buffered=False)
3695 for r in revs:
3696 for r in revs:
3696 displayer.show(repo[r], **props)
3697 displayer.show(repo[r], **props)
3697 displayer.close()
3698 displayer.close()
3698
3699
3699 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3700 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3700 def debugwalk(ui, repo, *pats, **opts):
3701 def debugwalk(ui, repo, *pats, **opts):
3701 """show how files match on given patterns"""
3702 """show how files match on given patterns"""
3702 m = scmutil.match(repo[None], pats, opts)
3703 m = scmutil.match(repo[None], pats, opts)
3703 items = list(repo.walk(m))
3704 items = list(repo.walk(m))
3704 if not items:
3705 if not items:
3705 return
3706 return
3706 f = lambda fn: fn
3707 f = lambda fn: fn
3707 if ui.configbool('ui', 'slash') and os.sep != '/':
3708 if ui.configbool('ui', 'slash') and os.sep != '/':
3708 f = lambda fn: util.normpath(fn)
3709 f = lambda fn: util.normpath(fn)
3709 fmt = 'f %%-%ds %%-%ds %%s' % (
3710 fmt = 'f %%-%ds %%-%ds %%s' % (
3710 max([len(abs) for abs in items]),
3711 max([len(abs) for abs in items]),
3711 max([len(m.rel(abs)) for abs in items]))
3712 max([len(m.rel(abs)) for abs in items]))
3712 for abs in items:
3713 for abs in items:
3713 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3714 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3714 ui.write("%s\n" % line.rstrip())
3715 ui.write("%s\n" % line.rstrip())
3715
3716
3716 @command('debugwireargs',
3717 @command('debugwireargs',
3717 [('', 'three', '', 'three'),
3718 [('', 'three', '', 'three'),
3718 ('', 'four', '', 'four'),
3719 ('', 'four', '', 'four'),
3719 ('', 'five', '', 'five'),
3720 ('', 'five', '', 'five'),
3720 ] + remoteopts,
3721 ] + remoteopts,
3721 _('REPO [OPTIONS]... [ONE [TWO]]'),
3722 _('REPO [OPTIONS]... [ONE [TWO]]'),
3722 norepo=True)
3723 norepo=True)
3723 def debugwireargs(ui, repopath, *vals, **opts):
3724 def debugwireargs(ui, repopath, *vals, **opts):
3724 repo = hg.peer(ui, opts, repopath)
3725 repo = hg.peer(ui, opts, repopath)
3725 for opt in remoteopts:
3726 for opt in remoteopts:
3726 del opts[opt[1]]
3727 del opts[opt[1]]
3727 args = {}
3728 args = {}
3728 for k, v in opts.iteritems():
3729 for k, v in opts.iteritems():
3729 if v:
3730 if v:
3730 args[k] = v
3731 args[k] = v
3731 # run twice to check that we don't mess up the stream for the next command
3732 # run twice to check that we don't mess up the stream for the next command
3732 res1 = repo.debugwireargs(*vals, **args)
3733 res1 = repo.debugwireargs(*vals, **args)
3733 res2 = repo.debugwireargs(*vals, **args)
3734 res2 = repo.debugwireargs(*vals, **args)
3734 ui.write("%s\n" % res1)
3735 ui.write("%s\n" % res1)
3735 if res1 != res2:
3736 if res1 != res2:
3736 ui.warn("%s\n" % res2)
3737 ui.warn("%s\n" % res2)
3737
3738
3738 @command('^diff',
3739 @command('^diff',
3739 [('r', 'rev', [], _('revision'), _('REV')),
3740 [('r', 'rev', [], _('revision'), _('REV')),
3740 ('c', 'change', '', _('change made by revision'), _('REV'))
3741 ('c', 'change', '', _('change made by revision'), _('REV'))
3741 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3742 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3742 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3743 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3743 inferrepo=True)
3744 inferrepo=True)
3744 def diff(ui, repo, *pats, **opts):
3745 def diff(ui, repo, *pats, **opts):
3745 """diff repository (or selected files)
3746 """diff repository (or selected files)
3746
3747
3747 Show differences between revisions for the specified files.
3748 Show differences between revisions for the specified files.
3748
3749
3749 Differences between files are shown using the unified diff format.
3750 Differences between files are shown using the unified diff format.
3750
3751
3751 .. note::
3752 .. note::
3752
3753
3753 :hg:`diff` may generate unexpected results for merges, as it will
3754 :hg:`diff` may generate unexpected results for merges, as it will
3754 default to comparing against the working directory's first
3755 default to comparing against the working directory's first
3755 parent changeset if no revisions are specified.
3756 parent changeset if no revisions are specified.
3756
3757
3757 When two revision arguments are given, then changes are shown
3758 When two revision arguments are given, then changes are shown
3758 between those revisions. If only one revision is specified then
3759 between those revisions. If only one revision is specified then
3759 that revision is compared to the working directory, and, when no
3760 that revision is compared to the working directory, and, when no
3760 revisions are specified, the working directory files are compared
3761 revisions are specified, the working directory files are compared
3761 to its first parent.
3762 to its first parent.
3762
3763
3763 Alternatively you can specify -c/--change with a revision to see
3764 Alternatively you can specify -c/--change with a revision to see
3764 the changes in that changeset relative to its first parent.
3765 the changes in that changeset relative to its first parent.
3765
3766
3766 Without the -a/--text option, diff will avoid generating diffs of
3767 Without the -a/--text option, diff will avoid generating diffs of
3767 files it detects as binary. With -a, diff will generate a diff
3768 files it detects as binary. With -a, diff will generate a diff
3768 anyway, probably with undesirable results.
3769 anyway, probably with undesirable results.
3769
3770
3770 Use the -g/--git option to generate diffs in the git extended diff
3771 Use the -g/--git option to generate diffs in the git extended diff
3771 format. For more information, read :hg:`help diffs`.
3772 format. For more information, read :hg:`help diffs`.
3772
3773
3773 .. container:: verbose
3774 .. container:: verbose
3774
3775
3775 Examples:
3776 Examples:
3776
3777
3777 - compare a file in the current working directory to its parent::
3778 - compare a file in the current working directory to its parent::
3778
3779
3779 hg diff foo.c
3780 hg diff foo.c
3780
3781
3781 - compare two historical versions of a directory, with rename info::
3782 - compare two historical versions of a directory, with rename info::
3782
3783
3783 hg diff --git -r 1.0:1.2 lib/
3784 hg diff --git -r 1.0:1.2 lib/
3784
3785
3785 - get change stats relative to the last change on some date::
3786 - get change stats relative to the last change on some date::
3786
3787
3787 hg diff --stat -r "date('may 2')"
3788 hg diff --stat -r "date('may 2')"
3788
3789
3789 - diff all newly-added files that contain a keyword::
3790 - diff all newly-added files that contain a keyword::
3790
3791
3791 hg diff "set:added() and grep(GNU)"
3792 hg diff "set:added() and grep(GNU)"
3792
3793
3793 - compare a revision and its parents::
3794 - compare a revision and its parents::
3794
3795
3795 hg diff -c 9353 # compare against first parent
3796 hg diff -c 9353 # compare against first parent
3796 hg diff -r 9353^:9353 # same using revset syntax
3797 hg diff -r 9353^:9353 # same using revset syntax
3797 hg diff -r 9353^2:9353 # compare against the second parent
3798 hg diff -r 9353^2:9353 # compare against the second parent
3798
3799
3799 Returns 0 on success.
3800 Returns 0 on success.
3800 """
3801 """
3801
3802
3802 revs = opts.get('rev')
3803 revs = opts.get('rev')
3803 change = opts.get('change')
3804 change = opts.get('change')
3804 stat = opts.get('stat')
3805 stat = opts.get('stat')
3805 reverse = opts.get('reverse')
3806 reverse = opts.get('reverse')
3806
3807
3807 if revs and change:
3808 if revs and change:
3808 msg = _('cannot specify --rev and --change at the same time')
3809 msg = _('cannot specify --rev and --change at the same time')
3809 raise error.Abort(msg)
3810 raise error.Abort(msg)
3810 elif change:
3811 elif change:
3811 node2 = scmutil.revsingle(repo, change, None).node()
3812 node2 = scmutil.revsingle(repo, change, None).node()
3812 node1 = repo[node2].p1().node()
3813 node1 = repo[node2].p1().node()
3813 else:
3814 else:
3814 node1, node2 = scmutil.revpair(repo, revs)
3815 node1, node2 = scmutil.revpair(repo, revs)
3815
3816
3816 if reverse:
3817 if reverse:
3817 node1, node2 = node2, node1
3818 node1, node2 = node2, node1
3818
3819
3819 diffopts = patch.diffallopts(ui, opts)
3820 diffopts = patch.diffallopts(ui, opts)
3820 m = scmutil.match(repo[node2], pats, opts)
3821 m = scmutil.match(repo[node2], pats, opts)
3821 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3822 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3822 listsubrepos=opts.get('subrepos'),
3823 listsubrepos=opts.get('subrepos'),
3823 root=opts.get('root'))
3824 root=opts.get('root'))
3824
3825
3825 @command('^export',
3826 @command('^export',
3826 [('o', 'output', '',
3827 [('o', 'output', '',
3827 _('print output to file with formatted name'), _('FORMAT')),
3828 _('print output to file with formatted name'), _('FORMAT')),
3828 ('', 'switch-parent', None, _('diff against the second parent')),
3829 ('', 'switch-parent', None, _('diff against the second parent')),
3829 ('r', 'rev', [], _('revisions to export'), _('REV')),
3830 ('r', 'rev', [], _('revisions to export'), _('REV')),
3830 ] + diffopts,
3831 ] + diffopts,
3831 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3832 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3832 def export(ui, repo, *changesets, **opts):
3833 def export(ui, repo, *changesets, **opts):
3833 """dump the header and diffs for one or more changesets
3834 """dump the header and diffs for one or more changesets
3834
3835
3835 Print the changeset header and diffs for one or more revisions.
3836 Print the changeset header and diffs for one or more revisions.
3836 If no revision is given, the parent of the working directory is used.
3837 If no revision is given, the parent of the working directory is used.
3837
3838
3838 The information shown in the changeset header is: author, date,
3839 The information shown in the changeset header is: author, date,
3839 branch name (if non-default), changeset hash, parent(s) and commit
3840 branch name (if non-default), changeset hash, parent(s) and commit
3840 comment.
3841 comment.
3841
3842
3842 .. note::
3843 .. note::
3843
3844
3844 :hg:`export` may generate unexpected diff output for merge
3845 :hg:`export` may generate unexpected diff output for merge
3845 changesets, as it will compare the merge changeset against its
3846 changesets, as it will compare the merge changeset against its
3846 first parent only.
3847 first parent only.
3847
3848
3848 Output may be to a file, in which case the name of the file is
3849 Output may be to a file, in which case the name of the file is
3849 given using a format string. The formatting rules are as follows:
3850 given using a format string. The formatting rules are as follows:
3850
3851
3851 :``%%``: literal "%" character
3852 :``%%``: literal "%" character
3852 :``%H``: changeset hash (40 hexadecimal digits)
3853 :``%H``: changeset hash (40 hexadecimal digits)
3853 :``%N``: number of patches being generated
3854 :``%N``: number of patches being generated
3854 :``%R``: changeset revision number
3855 :``%R``: changeset revision number
3855 :``%b``: basename of the exporting repository
3856 :``%b``: basename of the exporting repository
3856 :``%h``: short-form changeset hash (12 hexadecimal digits)
3857 :``%h``: short-form changeset hash (12 hexadecimal digits)
3857 :``%m``: first line of the commit message (only alphanumeric characters)
3858 :``%m``: first line of the commit message (only alphanumeric characters)
3858 :``%n``: zero-padded sequence number, starting at 1
3859 :``%n``: zero-padded sequence number, starting at 1
3859 :``%r``: zero-padded changeset revision number
3860 :``%r``: zero-padded changeset revision number
3860
3861
3861 Without the -a/--text option, export will avoid generating diffs
3862 Without the -a/--text option, export will avoid generating diffs
3862 of files it detects as binary. With -a, export will generate a
3863 of files it detects as binary. With -a, export will generate a
3863 diff anyway, probably with undesirable results.
3864 diff anyway, probably with undesirable results.
3864
3865
3865 Use the -g/--git option to generate diffs in the git extended diff
3866 Use the -g/--git option to generate diffs in the git extended diff
3866 format. See :hg:`help diffs` for more information.
3867 format. See :hg:`help diffs` for more information.
3867
3868
3868 With the --switch-parent option, the diff will be against the
3869 With the --switch-parent option, the diff will be against the
3869 second parent. It can be useful to review a merge.
3870 second parent. It can be useful to review a merge.
3870
3871
3871 .. container:: verbose
3872 .. container:: verbose
3872
3873
3873 Examples:
3874 Examples:
3874
3875
3875 - use export and import to transplant a bugfix to the current
3876 - use export and import to transplant a bugfix to the current
3876 branch::
3877 branch::
3877
3878
3878 hg export -r 9353 | hg import -
3879 hg export -r 9353 | hg import -
3879
3880
3880 - export all the changesets between two revisions to a file with
3881 - export all the changesets between two revisions to a file with
3881 rename information::
3882 rename information::
3882
3883
3883 hg export --git -r 123:150 > changes.txt
3884 hg export --git -r 123:150 > changes.txt
3884
3885
3885 - split outgoing changes into a series of patches with
3886 - split outgoing changes into a series of patches with
3886 descriptive names::
3887 descriptive names::
3887
3888
3888 hg export -r "outgoing()" -o "%n-%m.patch"
3889 hg export -r "outgoing()" -o "%n-%m.patch"
3889
3890
3890 Returns 0 on success.
3891 Returns 0 on success.
3891 """
3892 """
3892 changesets += tuple(opts.get('rev', []))
3893 changesets += tuple(opts.get('rev', []))
3893 if not changesets:
3894 if not changesets:
3894 changesets = ['.']
3895 changesets = ['.']
3895 revs = scmutil.revrange(repo, changesets)
3896 revs = scmutil.revrange(repo, changesets)
3896 if not revs:
3897 if not revs:
3897 raise error.Abort(_("export requires at least one changeset"))
3898 raise error.Abort(_("export requires at least one changeset"))
3898 if len(revs) > 1:
3899 if len(revs) > 1:
3899 ui.note(_('exporting patches:\n'))
3900 ui.note(_('exporting patches:\n'))
3900 else:
3901 else:
3901 ui.note(_('exporting patch:\n'))
3902 ui.note(_('exporting patch:\n'))
3902 cmdutil.export(repo, revs, template=opts.get('output'),
3903 cmdutil.export(repo, revs, template=opts.get('output'),
3903 switch_parent=opts.get('switch_parent'),
3904 switch_parent=opts.get('switch_parent'),
3904 opts=patch.diffallopts(ui, opts))
3905 opts=patch.diffallopts(ui, opts))
3905
3906
3906 @command('files',
3907 @command('files',
3907 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3908 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3908 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3909 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3909 ] + walkopts + formatteropts + subrepoopts,
3910 ] + walkopts + formatteropts + subrepoopts,
3910 _('[OPTION]... [PATTERN]...'))
3911 _('[OPTION]... [PATTERN]...'))
3911 def files(ui, repo, *pats, **opts):
3912 def files(ui, repo, *pats, **opts):
3912 """list tracked files
3913 """list tracked files
3913
3914
3914 Print files under Mercurial control in the working directory or
3915 Print files under Mercurial control in the working directory or
3915 specified revision whose names match the given patterns (excluding
3916 specified revision whose names match the given patterns (excluding
3916 removed files).
3917 removed files).
3917
3918
3918 If no patterns are given to match, this command prints the names
3919 If no patterns are given to match, this command prints the names
3919 of all files under Mercurial control in the working directory.
3920 of all files under Mercurial control in the working directory.
3920
3921
3921 .. container:: verbose
3922 .. container:: verbose
3922
3923
3923 Examples:
3924 Examples:
3924
3925
3925 - list all files under the current directory::
3926 - list all files under the current directory::
3926
3927
3927 hg files .
3928 hg files .
3928
3929
3929 - shows sizes and flags for current revision::
3930 - shows sizes and flags for current revision::
3930
3931
3931 hg files -vr .
3932 hg files -vr .
3932
3933
3933 - list all files named README::
3934 - list all files named README::
3934
3935
3935 hg files -I "**/README"
3936 hg files -I "**/README"
3936
3937
3937 - list all binary files::
3938 - list all binary files::
3938
3939
3939 hg files "set:binary()"
3940 hg files "set:binary()"
3940
3941
3941 - find files containing a regular expression::
3942 - find files containing a regular expression::
3942
3943
3943 hg files "set:grep('bob')"
3944 hg files "set:grep('bob')"
3944
3945
3945 - search tracked file contents with xargs and grep::
3946 - search tracked file contents with xargs and grep::
3946
3947
3947 hg files -0 | xargs -0 grep foo
3948 hg files -0 | xargs -0 grep foo
3948
3949
3949 See :hg:`help patterns` and :hg:`help filesets` for more information
3950 See :hg:`help patterns` and :hg:`help filesets` for more information
3950 on specifying file patterns.
3951 on specifying file patterns.
3951
3952
3952 Returns 0 if a match is found, 1 otherwise.
3953 Returns 0 if a match is found, 1 otherwise.
3953
3954
3954 """
3955 """
3955 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3956 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3956
3957
3957 end = '\n'
3958 end = '\n'
3958 if opts.get('print0'):
3959 if opts.get('print0'):
3959 end = '\0'
3960 end = '\0'
3960 fm = ui.formatter('files', opts)
3961 fm = ui.formatter('files', opts)
3961 fmt = '%s' + end
3962 fmt = '%s' + end
3962
3963
3963 m = scmutil.match(ctx, pats, opts)
3964 m = scmutil.match(ctx, pats, opts)
3964 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3965 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3965
3966
3966 fm.end()
3967 fm.end()
3967
3968
3968 return ret
3969 return ret
3969
3970
3970 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3971 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3971 def forget(ui, repo, *pats, **opts):
3972 def forget(ui, repo, *pats, **opts):
3972 """forget the specified files on the next commit
3973 """forget the specified files on the next commit
3973
3974
3974 Mark the specified files so they will no longer be tracked
3975 Mark the specified files so they will no longer be tracked
3975 after the next commit.
3976 after the next commit.
3976
3977
3977 This only removes files from the current branch, not from the
3978 This only removes files from the current branch, not from the
3978 entire project history, and it does not delete them from the
3979 entire project history, and it does not delete them from the
3979 working directory.
3980 working directory.
3980
3981
3981 To delete the file from the working directory, see :hg:`remove`.
3982 To delete the file from the working directory, see :hg:`remove`.
3982
3983
3983 To undo a forget before the next commit, see :hg:`add`.
3984 To undo a forget before the next commit, see :hg:`add`.
3984
3985
3985 .. container:: verbose
3986 .. container:: verbose
3986
3987
3987 Examples:
3988 Examples:
3988
3989
3989 - forget newly-added binary files::
3990 - forget newly-added binary files::
3990
3991
3991 hg forget "set:added() and binary()"
3992 hg forget "set:added() and binary()"
3992
3993
3993 - forget files that would be excluded by .hgignore::
3994 - forget files that would be excluded by .hgignore::
3994
3995
3995 hg forget "set:hgignore()"
3996 hg forget "set:hgignore()"
3996
3997
3997 Returns 0 on success.
3998 Returns 0 on success.
3998 """
3999 """
3999
4000
4000 if not pats:
4001 if not pats:
4001 raise error.Abort(_('no files specified'))
4002 raise error.Abort(_('no files specified'))
4002
4003
4003 m = scmutil.match(repo[None], pats, opts)
4004 m = scmutil.match(repo[None], pats, opts)
4004 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
4005 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
4005 return rejected and 1 or 0
4006 return rejected and 1 or 0
4006
4007
4007 @command(
4008 @command(
4008 'graft',
4009 'graft',
4009 [('r', 'rev', [], _('revisions to graft'), _('REV')),
4010 [('r', 'rev', [], _('revisions to graft'), _('REV')),
4010 ('c', 'continue', False, _('resume interrupted graft')),
4011 ('c', 'continue', False, _('resume interrupted graft')),
4011 ('e', 'edit', False, _('invoke editor on commit messages')),
4012 ('e', 'edit', False, _('invoke editor on commit messages')),
4012 ('', 'log', None, _('append graft info to log message')),
4013 ('', 'log', None, _('append graft info to log message')),
4013 ('f', 'force', False, _('force graft')),
4014 ('f', 'force', False, _('force graft')),
4014 ('D', 'currentdate', False,
4015 ('D', 'currentdate', False,
4015 _('record the current date as commit date')),
4016 _('record the current date as commit date')),
4016 ('U', 'currentuser', False,
4017 ('U', 'currentuser', False,
4017 _('record the current user as committer'), _('DATE'))]
4018 _('record the current user as committer'), _('DATE'))]
4018 + commitopts2 + mergetoolopts + dryrunopts,
4019 + commitopts2 + mergetoolopts + dryrunopts,
4019 _('[OPTION]... [-r REV]... REV...'))
4020 _('[OPTION]... [-r REV]... REV...'))
4020 def graft(ui, repo, *revs, **opts):
4021 def graft(ui, repo, *revs, **opts):
4021 '''copy changes from other branches onto the current branch
4022 '''copy changes from other branches onto the current branch
4022
4023
4023 This command uses Mercurial's merge logic to copy individual
4024 This command uses Mercurial's merge logic to copy individual
4024 changes from other branches without merging branches in the
4025 changes from other branches without merging branches in the
4025 history graph. This is sometimes known as 'backporting' or
4026 history graph. This is sometimes known as 'backporting' or
4026 'cherry-picking'. By default, graft will copy user, date, and
4027 'cherry-picking'. By default, graft will copy user, date, and
4027 description from the source changesets.
4028 description from the source changesets.
4028
4029
4029 Changesets that are ancestors of the current revision, that have
4030 Changesets that are ancestors of the current revision, that have
4030 already been grafted, or that are merges will be skipped.
4031 already been grafted, or that are merges will be skipped.
4031
4032
4032 If --log is specified, log messages will have a comment appended
4033 If --log is specified, log messages will have a comment appended
4033 of the form::
4034 of the form::
4034
4035
4035 (grafted from CHANGESETHASH)
4036 (grafted from CHANGESETHASH)
4036
4037
4037 If --force is specified, revisions will be grafted even if they
4038 If --force is specified, revisions will be grafted even if they
4038 are already ancestors of or have been grafted to the destination.
4039 are already ancestors of or have been grafted to the destination.
4039 This is useful when the revisions have since been backed out.
4040 This is useful when the revisions have since been backed out.
4040
4041
4041 If a graft merge results in conflicts, the graft process is
4042 If a graft merge results in conflicts, the graft process is
4042 interrupted so that the current merge can be manually resolved.
4043 interrupted so that the current merge can be manually resolved.
4043 Once all conflicts are addressed, the graft process can be
4044 Once all conflicts are addressed, the graft process can be
4044 continued with the -c/--continue option.
4045 continued with the -c/--continue option.
4045
4046
4046 .. note::
4047 .. note::
4047
4048
4048 The -c/--continue option does not reapply earlier options, except
4049 The -c/--continue option does not reapply earlier options, except
4049 for --force.
4050 for --force.
4050
4051
4051 .. container:: verbose
4052 .. container:: verbose
4052
4053
4053 Examples:
4054 Examples:
4054
4055
4055 - copy a single change to the stable branch and edit its description::
4056 - copy a single change to the stable branch and edit its description::
4056
4057
4057 hg update stable
4058 hg update stable
4058 hg graft --edit 9393
4059 hg graft --edit 9393
4059
4060
4060 - graft a range of changesets with one exception, updating dates::
4061 - graft a range of changesets with one exception, updating dates::
4061
4062
4062 hg graft -D "2085::2093 and not 2091"
4063 hg graft -D "2085::2093 and not 2091"
4063
4064
4064 - continue a graft after resolving conflicts::
4065 - continue a graft after resolving conflicts::
4065
4066
4066 hg graft -c
4067 hg graft -c
4067
4068
4068 - show the source of a grafted changeset::
4069 - show the source of a grafted changeset::
4069
4070
4070 hg log --debug -r .
4071 hg log --debug -r .
4071
4072
4072 - show revisions sorted by date::
4073 - show revisions sorted by date::
4073
4074
4074 hg log -r "sort(all(), date)"
4075 hg log -r "sort(all(), date)"
4075
4076
4076 See :hg:`help revisions` and :hg:`help revsets` for more about
4077 See :hg:`help revisions` and :hg:`help revsets` for more about
4077 specifying revisions.
4078 specifying revisions.
4078
4079
4079 Returns 0 on successful completion.
4080 Returns 0 on successful completion.
4080 '''
4081 '''
4081 with repo.wlock():
4082 with repo.wlock():
4082 return _dograft(ui, repo, *revs, **opts)
4083 return _dograft(ui, repo, *revs, **opts)
4083
4084
4084 def _dograft(ui, repo, *revs, **opts):
4085 def _dograft(ui, repo, *revs, **opts):
4085 if revs and opts.get('rev'):
4086 if revs and opts.get('rev'):
4086 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
4087 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
4087 'revision ordering!\n'))
4088 'revision ordering!\n'))
4088
4089
4089 revs = list(revs)
4090 revs = list(revs)
4090 revs.extend(opts.get('rev'))
4091 revs.extend(opts.get('rev'))
4091
4092
4092 if not opts.get('user') and opts.get('currentuser'):
4093 if not opts.get('user') and opts.get('currentuser'):
4093 opts['user'] = ui.username()
4094 opts['user'] = ui.username()
4094 if not opts.get('date') and opts.get('currentdate'):
4095 if not opts.get('date') and opts.get('currentdate'):
4095 opts['date'] = "%d %d" % util.makedate()
4096 opts['date'] = "%d %d" % util.makedate()
4096
4097
4097 editor = cmdutil.getcommiteditor(editform='graft', **opts)
4098 editor = cmdutil.getcommiteditor(editform='graft', **opts)
4098
4099
4099 cont = False
4100 cont = False
4100 if opts.get('continue'):
4101 if opts.get('continue'):
4101 cont = True
4102 cont = True
4102 if revs:
4103 if revs:
4103 raise error.Abort(_("can't specify --continue and revisions"))
4104 raise error.Abort(_("can't specify --continue and revisions"))
4104 # read in unfinished revisions
4105 # read in unfinished revisions
4105 try:
4106 try:
4106 nodes = repo.vfs.read('graftstate').splitlines()
4107 nodes = repo.vfs.read('graftstate').splitlines()
4107 revs = [repo[node].rev() for node in nodes]
4108 revs = [repo[node].rev() for node in nodes]
4108 except IOError as inst:
4109 except IOError as inst:
4109 if inst.errno != errno.ENOENT:
4110 if inst.errno != errno.ENOENT:
4110 raise
4111 raise
4111 cmdutil.wrongtooltocontinue(repo, _('graft'))
4112 cmdutil.wrongtooltocontinue(repo, _('graft'))
4112 else:
4113 else:
4113 cmdutil.checkunfinished(repo)
4114 cmdutil.checkunfinished(repo)
4114 cmdutil.bailifchanged(repo)
4115 cmdutil.bailifchanged(repo)
4115 if not revs:
4116 if not revs:
4116 raise error.Abort(_('no revisions specified'))
4117 raise error.Abort(_('no revisions specified'))
4117 revs = scmutil.revrange(repo, revs)
4118 revs = scmutil.revrange(repo, revs)
4118
4119
4119 skipped = set()
4120 skipped = set()
4120 # check for merges
4121 # check for merges
4121 for rev in repo.revs('%ld and merge()', revs):
4122 for rev in repo.revs('%ld and merge()', revs):
4122 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
4123 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
4123 skipped.add(rev)
4124 skipped.add(rev)
4124 revs = [r for r in revs if r not in skipped]
4125 revs = [r for r in revs if r not in skipped]
4125 if not revs:
4126 if not revs:
4126 return -1
4127 return -1
4127
4128
4128 # Don't check in the --continue case, in effect retaining --force across
4129 # Don't check in the --continue case, in effect retaining --force across
4129 # --continues. That's because without --force, any revisions we decided to
4130 # --continues. That's because without --force, any revisions we decided to
4130 # skip would have been filtered out here, so they wouldn't have made their
4131 # skip would have been filtered out here, so they wouldn't have made their
4131 # way to the graftstate. With --force, any revisions we would have otherwise
4132 # way to the graftstate. With --force, any revisions we would have otherwise
4132 # skipped would not have been filtered out, and if they hadn't been applied
4133 # skipped would not have been filtered out, and if they hadn't been applied
4133 # already, they'd have been in the graftstate.
4134 # already, they'd have been in the graftstate.
4134 if not (cont or opts.get('force')):
4135 if not (cont or opts.get('force')):
4135 # check for ancestors of dest branch
4136 # check for ancestors of dest branch
4136 crev = repo['.'].rev()
4137 crev = repo['.'].rev()
4137 ancestors = repo.changelog.ancestors([crev], inclusive=True)
4138 ancestors = repo.changelog.ancestors([crev], inclusive=True)
4138 # Cannot use x.remove(y) on smart set, this has to be a list.
4139 # Cannot use x.remove(y) on smart set, this has to be a list.
4139 # XXX make this lazy in the future
4140 # XXX make this lazy in the future
4140 revs = list(revs)
4141 revs = list(revs)
4141 # don't mutate while iterating, create a copy
4142 # don't mutate while iterating, create a copy
4142 for rev in list(revs):
4143 for rev in list(revs):
4143 if rev in ancestors:
4144 if rev in ancestors:
4144 ui.warn(_('skipping ancestor revision %d:%s\n') %
4145 ui.warn(_('skipping ancestor revision %d:%s\n') %
4145 (rev, repo[rev]))
4146 (rev, repo[rev]))
4146 # XXX remove on list is slow
4147 # XXX remove on list is slow
4147 revs.remove(rev)
4148 revs.remove(rev)
4148 if not revs:
4149 if not revs:
4149 return -1
4150 return -1
4150
4151
4151 # analyze revs for earlier grafts
4152 # analyze revs for earlier grafts
4152 ids = {}
4153 ids = {}
4153 for ctx in repo.set("%ld", revs):
4154 for ctx in repo.set("%ld", revs):
4154 ids[ctx.hex()] = ctx.rev()
4155 ids[ctx.hex()] = ctx.rev()
4155 n = ctx.extra().get('source')
4156 n = ctx.extra().get('source')
4156 if n:
4157 if n:
4157 ids[n] = ctx.rev()
4158 ids[n] = ctx.rev()
4158
4159
4159 # check ancestors for earlier grafts
4160 # check ancestors for earlier grafts
4160 ui.debug('scanning for duplicate grafts\n')
4161 ui.debug('scanning for duplicate grafts\n')
4161
4162
4162 for rev in repo.changelog.findmissingrevs(revs, [crev]):
4163 for rev in repo.changelog.findmissingrevs(revs, [crev]):
4163 ctx = repo[rev]
4164 ctx = repo[rev]
4164 n = ctx.extra().get('source')
4165 n = ctx.extra().get('source')
4165 if n in ids:
4166 if n in ids:
4166 try:
4167 try:
4167 r = repo[n].rev()
4168 r = repo[n].rev()
4168 except error.RepoLookupError:
4169 except error.RepoLookupError:
4169 r = None
4170 r = None
4170 if r in revs:
4171 if r in revs:
4171 ui.warn(_('skipping revision %d:%s '
4172 ui.warn(_('skipping revision %d:%s '
4172 '(already grafted to %d:%s)\n')
4173 '(already grafted to %d:%s)\n')
4173 % (r, repo[r], rev, ctx))
4174 % (r, repo[r], rev, ctx))
4174 revs.remove(r)
4175 revs.remove(r)
4175 elif ids[n] in revs:
4176 elif ids[n] in revs:
4176 if r is None:
4177 if r is None:
4177 ui.warn(_('skipping already grafted revision %d:%s '
4178 ui.warn(_('skipping already grafted revision %d:%s '
4178 '(%d:%s also has unknown origin %s)\n')
4179 '(%d:%s also has unknown origin %s)\n')
4179 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
4180 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
4180 else:
4181 else:
4181 ui.warn(_('skipping already grafted revision %d:%s '
4182 ui.warn(_('skipping already grafted revision %d:%s '
4182 '(%d:%s also has origin %d:%s)\n')
4183 '(%d:%s also has origin %d:%s)\n')
4183 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
4184 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
4184 revs.remove(ids[n])
4185 revs.remove(ids[n])
4185 elif ctx.hex() in ids:
4186 elif ctx.hex() in ids:
4186 r = ids[ctx.hex()]
4187 r = ids[ctx.hex()]
4187 ui.warn(_('skipping already grafted revision %d:%s '
4188 ui.warn(_('skipping already grafted revision %d:%s '
4188 '(was grafted from %d:%s)\n') %
4189 '(was grafted from %d:%s)\n') %
4189 (r, repo[r], rev, ctx))
4190 (r, repo[r], rev, ctx))
4190 revs.remove(r)
4191 revs.remove(r)
4191 if not revs:
4192 if not revs:
4192 return -1
4193 return -1
4193
4194
4194 for pos, ctx in enumerate(repo.set("%ld", revs)):
4195 for pos, ctx in enumerate(repo.set("%ld", revs)):
4195 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
4196 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
4196 ctx.description().split('\n', 1)[0])
4197 ctx.description().split('\n', 1)[0])
4197 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
4198 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
4198 if names:
4199 if names:
4199 desc += ' (%s)' % ' '.join(names)
4200 desc += ' (%s)' % ' '.join(names)
4200 ui.status(_('grafting %s\n') % desc)
4201 ui.status(_('grafting %s\n') % desc)
4201 if opts.get('dry_run'):
4202 if opts.get('dry_run'):
4202 continue
4203 continue
4203
4204
4204 source = ctx.extra().get('source')
4205 source = ctx.extra().get('source')
4205 extra = {}
4206 extra = {}
4206 if source:
4207 if source:
4207 extra['source'] = source
4208 extra['source'] = source
4208 extra['intermediate-source'] = ctx.hex()
4209 extra['intermediate-source'] = ctx.hex()
4209 else:
4210 else:
4210 extra['source'] = ctx.hex()
4211 extra['source'] = ctx.hex()
4211 user = ctx.user()
4212 user = ctx.user()
4212 if opts.get('user'):
4213 if opts.get('user'):
4213 user = opts['user']
4214 user = opts['user']
4214 date = ctx.date()
4215 date = ctx.date()
4215 if opts.get('date'):
4216 if opts.get('date'):
4216 date = opts['date']
4217 date = opts['date']
4217 message = ctx.description()
4218 message = ctx.description()
4218 if opts.get('log'):
4219 if opts.get('log'):
4219 message += '\n(grafted from %s)' % ctx.hex()
4220 message += '\n(grafted from %s)' % ctx.hex()
4220
4221
4221 # we don't merge the first commit when continuing
4222 # we don't merge the first commit when continuing
4222 if not cont:
4223 if not cont:
4223 # perform the graft merge with p1(rev) as 'ancestor'
4224 # perform the graft merge with p1(rev) as 'ancestor'
4224 try:
4225 try:
4225 # ui.forcemerge is an internal variable, do not document
4226 # ui.forcemerge is an internal variable, do not document
4226 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4227 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4227 'graft')
4228 'graft')
4228 stats = mergemod.graft(repo, ctx, ctx.p1(),
4229 stats = mergemod.graft(repo, ctx, ctx.p1(),
4229 ['local', 'graft'])
4230 ['local', 'graft'])
4230 finally:
4231 finally:
4231 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
4232 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
4232 # report any conflicts
4233 # report any conflicts
4233 if stats and stats[3] > 0:
4234 if stats and stats[3] > 0:
4234 # write out state for --continue
4235 # write out state for --continue
4235 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
4236 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
4236 repo.vfs.write('graftstate', ''.join(nodelines))
4237 repo.vfs.write('graftstate', ''.join(nodelines))
4237 extra = ''
4238 extra = ''
4238 if opts.get('user'):
4239 if opts.get('user'):
4239 extra += ' --user %s' % util.shellquote(opts['user'])
4240 extra += ' --user %s' % util.shellquote(opts['user'])
4240 if opts.get('date'):
4241 if opts.get('date'):
4241 extra += ' --date %s' % util.shellquote(opts['date'])
4242 extra += ' --date %s' % util.shellquote(opts['date'])
4242 if opts.get('log'):
4243 if opts.get('log'):
4243 extra += ' --log'
4244 extra += ' --log'
4244 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
4245 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
4245 raise error.Abort(
4246 raise error.Abort(
4246 _("unresolved conflicts, can't continue"),
4247 _("unresolved conflicts, can't continue"),
4247 hint=hint)
4248 hint=hint)
4248 else:
4249 else:
4249 cont = False
4250 cont = False
4250
4251
4251 # commit
4252 # commit
4252 node = repo.commit(text=message, user=user,
4253 node = repo.commit(text=message, user=user,
4253 date=date, extra=extra, editor=editor)
4254 date=date, extra=extra, editor=editor)
4254 if node is None:
4255 if node is None:
4255 ui.warn(
4256 ui.warn(
4256 _('note: graft of %d:%s created no changes to commit\n') %
4257 _('note: graft of %d:%s created no changes to commit\n') %
4257 (ctx.rev(), ctx))
4258 (ctx.rev(), ctx))
4258
4259
4259 # remove state when we complete successfully
4260 # remove state when we complete successfully
4260 if not opts.get('dry_run'):
4261 if not opts.get('dry_run'):
4261 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
4262 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
4262
4263
4263 return 0
4264 return 0
4264
4265
4265 @command('grep',
4266 @command('grep',
4266 [('0', 'print0', None, _('end fields with NUL')),
4267 [('0', 'print0', None, _('end fields with NUL')),
4267 ('', 'all', None, _('print all revisions that match')),
4268 ('', 'all', None, _('print all revisions that match')),
4268 ('a', 'text', None, _('treat all files as text')),
4269 ('a', 'text', None, _('treat all files as text')),
4269 ('f', 'follow', None,
4270 ('f', 'follow', None,
4270 _('follow changeset history,'
4271 _('follow changeset history,'
4271 ' or file history across copies and renames')),
4272 ' or file history across copies and renames')),
4272 ('i', 'ignore-case', None, _('ignore case when matching')),
4273 ('i', 'ignore-case', None, _('ignore case when matching')),
4273 ('l', 'files-with-matches', None,
4274 ('l', 'files-with-matches', None,
4274 _('print only filenames and revisions that match')),
4275 _('print only filenames and revisions that match')),
4275 ('n', 'line-number', None, _('print matching line numbers')),
4276 ('n', 'line-number', None, _('print matching line numbers')),
4276 ('r', 'rev', [],
4277 ('r', 'rev', [],
4277 _('only search files changed within revision range'), _('REV')),
4278 _('only search files changed within revision range'), _('REV')),
4278 ('u', 'user', None, _('list the author (long with -v)')),
4279 ('u', 'user', None, _('list the author (long with -v)')),
4279 ('d', 'date', None, _('list the date (short with -q)')),
4280 ('d', 'date', None, _('list the date (short with -q)')),
4280 ] + walkopts,
4281 ] + walkopts,
4281 _('[OPTION]... PATTERN [FILE]...'),
4282 _('[OPTION]... PATTERN [FILE]...'),
4282 inferrepo=True)
4283 inferrepo=True)
4283 def grep(ui, repo, pattern, *pats, **opts):
4284 def grep(ui, repo, pattern, *pats, **opts):
4284 """search for a pattern in specified files and revisions
4285 """search for a pattern in specified files and revisions
4285
4286
4286 Search revisions of files for a regular expression.
4287 Search revisions of files for a regular expression.
4287
4288
4288 This command behaves differently than Unix grep. It only accepts
4289 This command behaves differently than Unix grep. It only accepts
4289 Python/Perl regexps. It searches repository history, not the
4290 Python/Perl regexps. It searches repository history, not the
4290 working directory. It always prints the revision number in which a
4291 working directory. It always prints the revision number in which a
4291 match appears.
4292 match appears.
4292
4293
4293 By default, grep only prints output for the first revision of a
4294 By default, grep only prints output for the first revision of a
4294 file in which it finds a match. To get it to print every revision
4295 file in which it finds a match. To get it to print every revision
4295 that contains a change in match status ("-" for a match that
4296 that contains a change in match status ("-" for a match that
4296 becomes a non-match, or "+" for a non-match that becomes a match),
4297 becomes a non-match, or "+" for a non-match that becomes a match),
4297 use the --all flag.
4298 use the --all flag.
4298
4299
4299 Returns 0 if a match is found, 1 otherwise.
4300 Returns 0 if a match is found, 1 otherwise.
4300 """
4301 """
4301 reflags = re.M
4302 reflags = re.M
4302 if opts.get('ignore_case'):
4303 if opts.get('ignore_case'):
4303 reflags |= re.I
4304 reflags |= re.I
4304 try:
4305 try:
4305 regexp = util.re.compile(pattern, reflags)
4306 regexp = util.re.compile(pattern, reflags)
4306 except re.error as inst:
4307 except re.error as inst:
4307 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
4308 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
4308 return 1
4309 return 1
4309 sep, eol = ':', '\n'
4310 sep, eol = ':', '\n'
4310 if opts.get('print0'):
4311 if opts.get('print0'):
4311 sep = eol = '\0'
4312 sep = eol = '\0'
4312
4313
4313 getfile = util.lrucachefunc(repo.file)
4314 getfile = util.lrucachefunc(repo.file)
4314
4315
4315 def matchlines(body):
4316 def matchlines(body):
4316 begin = 0
4317 begin = 0
4317 linenum = 0
4318 linenum = 0
4318 while begin < len(body):
4319 while begin < len(body):
4319 match = regexp.search(body, begin)
4320 match = regexp.search(body, begin)
4320 if not match:
4321 if not match:
4321 break
4322 break
4322 mstart, mend = match.span()
4323 mstart, mend = match.span()
4323 linenum += body.count('\n', begin, mstart) + 1
4324 linenum += body.count('\n', begin, mstart) + 1
4324 lstart = body.rfind('\n', begin, mstart) + 1 or begin
4325 lstart = body.rfind('\n', begin, mstart) + 1 or begin
4325 begin = body.find('\n', mend) + 1 or len(body) + 1
4326 begin = body.find('\n', mend) + 1 or len(body) + 1
4326 lend = begin - 1
4327 lend = begin - 1
4327 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
4328 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
4328
4329
4329 class linestate(object):
4330 class linestate(object):
4330 def __init__(self, line, linenum, colstart, colend):
4331 def __init__(self, line, linenum, colstart, colend):
4331 self.line = line
4332 self.line = line
4332 self.linenum = linenum
4333 self.linenum = linenum
4333 self.colstart = colstart
4334 self.colstart = colstart
4334 self.colend = colend
4335 self.colend = colend
4335
4336
4336 def __hash__(self):
4337 def __hash__(self):
4337 return hash((self.linenum, self.line))
4338 return hash((self.linenum, self.line))
4338
4339
4339 def __eq__(self, other):
4340 def __eq__(self, other):
4340 return self.line == other.line
4341 return self.line == other.line
4341
4342
4342 def __iter__(self):
4343 def __iter__(self):
4343 yield (self.line[:self.colstart], '')
4344 yield (self.line[:self.colstart], '')
4344 yield (self.line[self.colstart:self.colend], 'grep.match')
4345 yield (self.line[self.colstart:self.colend], 'grep.match')
4345 rest = self.line[self.colend:]
4346 rest = self.line[self.colend:]
4346 while rest != '':
4347 while rest != '':
4347 match = regexp.search(rest)
4348 match = regexp.search(rest)
4348 if not match:
4349 if not match:
4349 yield (rest, '')
4350 yield (rest, '')
4350 break
4351 break
4351 mstart, mend = match.span()
4352 mstart, mend = match.span()
4352 yield (rest[:mstart], '')
4353 yield (rest[:mstart], '')
4353 yield (rest[mstart:mend], 'grep.match')
4354 yield (rest[mstart:mend], 'grep.match')
4354 rest = rest[mend:]
4355 rest = rest[mend:]
4355
4356
4356 matches = {}
4357 matches = {}
4357 copies = {}
4358 copies = {}
4358 def grepbody(fn, rev, body):
4359 def grepbody(fn, rev, body):
4359 matches[rev].setdefault(fn, [])
4360 matches[rev].setdefault(fn, [])
4360 m = matches[rev][fn]
4361 m = matches[rev][fn]
4361 for lnum, cstart, cend, line in matchlines(body):
4362 for lnum, cstart, cend, line in matchlines(body):
4362 s = linestate(line, lnum, cstart, cend)
4363 s = linestate(line, lnum, cstart, cend)
4363 m.append(s)
4364 m.append(s)
4364
4365
4365 def difflinestates(a, b):
4366 def difflinestates(a, b):
4366 sm = difflib.SequenceMatcher(None, a, b)
4367 sm = difflib.SequenceMatcher(None, a, b)
4367 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
4368 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
4368 if tag == 'insert':
4369 if tag == 'insert':
4369 for i in xrange(blo, bhi):
4370 for i in xrange(blo, bhi):
4370 yield ('+', b[i])
4371 yield ('+', b[i])
4371 elif tag == 'delete':
4372 elif tag == 'delete':
4372 for i in xrange(alo, ahi):
4373 for i in xrange(alo, ahi):
4373 yield ('-', a[i])
4374 yield ('-', a[i])
4374 elif tag == 'replace':
4375 elif tag == 'replace':
4375 for i in xrange(alo, ahi):
4376 for i in xrange(alo, ahi):
4376 yield ('-', a[i])
4377 yield ('-', a[i])
4377 for i in xrange(blo, bhi):
4378 for i in xrange(blo, bhi):
4378 yield ('+', b[i])
4379 yield ('+', b[i])
4379
4380
4380 def display(fn, ctx, pstates, states):
4381 def display(fn, ctx, pstates, states):
4381 rev = ctx.rev()
4382 rev = ctx.rev()
4382 if ui.quiet:
4383 if ui.quiet:
4383 datefunc = util.shortdate
4384 datefunc = util.shortdate
4384 else:
4385 else:
4385 datefunc = util.datestr
4386 datefunc = util.datestr
4386 found = False
4387 found = False
4387 @util.cachefunc
4388 @util.cachefunc
4388 def binary():
4389 def binary():
4389 flog = getfile(fn)
4390 flog = getfile(fn)
4390 return util.binary(flog.read(ctx.filenode(fn)))
4391 return util.binary(flog.read(ctx.filenode(fn)))
4391
4392
4392 if opts.get('all'):
4393 if opts.get('all'):
4393 iter = difflinestates(pstates, states)
4394 iter = difflinestates(pstates, states)
4394 else:
4395 else:
4395 iter = [('', l) for l in states]
4396 iter = [('', l) for l in states]
4396 for change, l in iter:
4397 for change, l in iter:
4397 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
4398 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
4398
4399
4399 if opts.get('line_number'):
4400 if opts.get('line_number'):
4400 cols.append((str(l.linenum), 'grep.linenumber'))
4401 cols.append((str(l.linenum), 'grep.linenumber'))
4401 if opts.get('all'):
4402 if opts.get('all'):
4402 cols.append((change, 'grep.change'))
4403 cols.append((change, 'grep.change'))
4403 if opts.get('user'):
4404 if opts.get('user'):
4404 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
4405 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
4405 if opts.get('date'):
4406 if opts.get('date'):
4406 cols.append((datefunc(ctx.date()), 'grep.date'))
4407 cols.append((datefunc(ctx.date()), 'grep.date'))
4407 for col, label in cols[:-1]:
4408 for col, label in cols[:-1]:
4408 ui.write(col, label=label)
4409 ui.write(col, label=label)
4409 ui.write(sep, label='grep.sep')
4410 ui.write(sep, label='grep.sep')
4410 ui.write(cols[-1][0], label=cols[-1][1])
4411 ui.write(cols[-1][0], label=cols[-1][1])
4411 if not opts.get('files_with_matches'):
4412 if not opts.get('files_with_matches'):
4412 ui.write(sep, label='grep.sep')
4413 ui.write(sep, label='grep.sep')
4413 if not opts.get('text') and binary():
4414 if not opts.get('text') and binary():
4414 ui.write(_(" Binary file matches"))
4415 ui.write(_(" Binary file matches"))
4415 else:
4416 else:
4416 for s, label in l:
4417 for s, label in l:
4417 ui.write(s, label=label)
4418 ui.write(s, label=label)
4418 ui.write(eol)
4419 ui.write(eol)
4419 found = True
4420 found = True
4420 if opts.get('files_with_matches'):
4421 if opts.get('files_with_matches'):
4421 break
4422 break
4422 return found
4423 return found
4423
4424
4424 skip = {}
4425 skip = {}
4425 revfiles = {}
4426 revfiles = {}
4426 matchfn = scmutil.match(repo[None], pats, opts)
4427 matchfn = scmutil.match(repo[None], pats, opts)
4427 found = False
4428 found = False
4428 follow = opts.get('follow')
4429 follow = opts.get('follow')
4429
4430
4430 def prep(ctx, fns):
4431 def prep(ctx, fns):
4431 rev = ctx.rev()
4432 rev = ctx.rev()
4432 pctx = ctx.p1()
4433 pctx = ctx.p1()
4433 parent = pctx.rev()
4434 parent = pctx.rev()
4434 matches.setdefault(rev, {})
4435 matches.setdefault(rev, {})
4435 matches.setdefault(parent, {})
4436 matches.setdefault(parent, {})
4436 files = revfiles.setdefault(rev, [])
4437 files = revfiles.setdefault(rev, [])
4437 for fn in fns:
4438 for fn in fns:
4438 flog = getfile(fn)
4439 flog = getfile(fn)
4439 try:
4440 try:
4440 fnode = ctx.filenode(fn)
4441 fnode = ctx.filenode(fn)
4441 except error.LookupError:
4442 except error.LookupError:
4442 continue
4443 continue
4443
4444
4444 copied = flog.renamed(fnode)
4445 copied = flog.renamed(fnode)
4445 copy = follow and copied and copied[0]
4446 copy = follow and copied and copied[0]
4446 if copy:
4447 if copy:
4447 copies.setdefault(rev, {})[fn] = copy
4448 copies.setdefault(rev, {})[fn] = copy
4448 if fn in skip:
4449 if fn in skip:
4449 if copy:
4450 if copy:
4450 skip[copy] = True
4451 skip[copy] = True
4451 continue
4452 continue
4452 files.append(fn)
4453 files.append(fn)
4453
4454
4454 if fn not in matches[rev]:
4455 if fn not in matches[rev]:
4455 grepbody(fn, rev, flog.read(fnode))
4456 grepbody(fn, rev, flog.read(fnode))
4456
4457
4457 pfn = copy or fn
4458 pfn = copy or fn
4458 if pfn not in matches[parent]:
4459 if pfn not in matches[parent]:
4459 try:
4460 try:
4460 fnode = pctx.filenode(pfn)
4461 fnode = pctx.filenode(pfn)
4461 grepbody(pfn, parent, flog.read(fnode))
4462 grepbody(pfn, parent, flog.read(fnode))
4462 except error.LookupError:
4463 except error.LookupError:
4463 pass
4464 pass
4464
4465
4465 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4466 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4466 rev = ctx.rev()
4467 rev = ctx.rev()
4467 parent = ctx.p1().rev()
4468 parent = ctx.p1().rev()
4468 for fn in sorted(revfiles.get(rev, [])):
4469 for fn in sorted(revfiles.get(rev, [])):
4469 states = matches[rev][fn]
4470 states = matches[rev][fn]
4470 copy = copies.get(rev, {}).get(fn)
4471 copy = copies.get(rev, {}).get(fn)
4471 if fn in skip:
4472 if fn in skip:
4472 if copy:
4473 if copy:
4473 skip[copy] = True
4474 skip[copy] = True
4474 continue
4475 continue
4475 pstates = matches.get(parent, {}).get(copy or fn, [])
4476 pstates = matches.get(parent, {}).get(copy or fn, [])
4476 if pstates or states:
4477 if pstates or states:
4477 r = display(fn, ctx, pstates, states)
4478 r = display(fn, ctx, pstates, states)
4478 found = found or r
4479 found = found or r
4479 if r and not opts.get('all'):
4480 if r and not opts.get('all'):
4480 skip[fn] = True
4481 skip[fn] = True
4481 if copy:
4482 if copy:
4482 skip[copy] = True
4483 skip[copy] = True
4483 del matches[rev]
4484 del matches[rev]
4484 del revfiles[rev]
4485 del revfiles[rev]
4485
4486
4486 return not found
4487 return not found
4487
4488
4488 @command('heads',
4489 @command('heads',
4489 [('r', 'rev', '',
4490 [('r', 'rev', '',
4490 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
4491 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
4491 ('t', 'topo', False, _('show topological heads only')),
4492 ('t', 'topo', False, _('show topological heads only')),
4492 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
4493 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
4493 ('c', 'closed', False, _('show normal and closed branch heads')),
4494 ('c', 'closed', False, _('show normal and closed branch heads')),
4494 ] + templateopts,
4495 ] + templateopts,
4495 _('[-ct] [-r STARTREV] [REV]...'))
4496 _('[-ct] [-r STARTREV] [REV]...'))
4496 def heads(ui, repo, *branchrevs, **opts):
4497 def heads(ui, repo, *branchrevs, **opts):
4497 """show branch heads
4498 """show branch heads
4498
4499
4499 With no arguments, show all open branch heads in the repository.
4500 With no arguments, show all open branch heads in the repository.
4500 Branch heads are changesets that have no descendants on the
4501 Branch heads are changesets that have no descendants on the
4501 same branch. They are where development generally takes place and
4502 same branch. They are where development generally takes place and
4502 are the usual targets for update and merge operations.
4503 are the usual targets for update and merge operations.
4503
4504
4504 If one or more REVs are given, only open branch heads on the
4505 If one or more REVs are given, only open branch heads on the
4505 branches associated with the specified changesets are shown. This
4506 branches associated with the specified changesets are shown. This
4506 means that you can use :hg:`heads .` to see the heads on the
4507 means that you can use :hg:`heads .` to see the heads on the
4507 currently checked-out branch.
4508 currently checked-out branch.
4508
4509
4509 If -c/--closed is specified, also show branch heads marked closed
4510 If -c/--closed is specified, also show branch heads marked closed
4510 (see :hg:`commit --close-branch`).
4511 (see :hg:`commit --close-branch`).
4511
4512
4512 If STARTREV is specified, only those heads that are descendants of
4513 If STARTREV is specified, only those heads that are descendants of
4513 STARTREV will be displayed.
4514 STARTREV will be displayed.
4514
4515
4515 If -t/--topo is specified, named branch mechanics will be ignored and only
4516 If -t/--topo is specified, named branch mechanics will be ignored and only
4516 topological heads (changesets with no children) will be shown.
4517 topological heads (changesets with no children) will be shown.
4517
4518
4518 Returns 0 if matching heads are found, 1 if not.
4519 Returns 0 if matching heads are found, 1 if not.
4519 """
4520 """
4520
4521
4521 start = None
4522 start = None
4522 if 'rev' in opts:
4523 if 'rev' in opts:
4523 start = scmutil.revsingle(repo, opts['rev'], None).node()
4524 start = scmutil.revsingle(repo, opts['rev'], None).node()
4524
4525
4525 if opts.get('topo'):
4526 if opts.get('topo'):
4526 heads = [repo[h] for h in repo.heads(start)]
4527 heads = [repo[h] for h in repo.heads(start)]
4527 else:
4528 else:
4528 heads = []
4529 heads = []
4529 for branch in repo.branchmap():
4530 for branch in repo.branchmap():
4530 heads += repo.branchheads(branch, start, opts.get('closed'))
4531 heads += repo.branchheads(branch, start, opts.get('closed'))
4531 heads = [repo[h] for h in heads]
4532 heads = [repo[h] for h in heads]
4532
4533
4533 if branchrevs:
4534 if branchrevs:
4534 branches = set(repo[br].branch() for br in branchrevs)
4535 branches = set(repo[br].branch() for br in branchrevs)
4535 heads = [h for h in heads if h.branch() in branches]
4536 heads = [h for h in heads if h.branch() in branches]
4536
4537
4537 if opts.get('active') and branchrevs:
4538 if opts.get('active') and branchrevs:
4538 dagheads = repo.heads(start)
4539 dagheads = repo.heads(start)
4539 heads = [h for h in heads if h.node() in dagheads]
4540 heads = [h for h in heads if h.node() in dagheads]
4540
4541
4541 if branchrevs:
4542 if branchrevs:
4542 haveheads = set(h.branch() for h in heads)
4543 haveheads = set(h.branch() for h in heads)
4543 if branches - haveheads:
4544 if branches - haveheads:
4544 headless = ', '.join(b for b in branches - haveheads)
4545 headless = ', '.join(b for b in branches - haveheads)
4545 msg = _('no open branch heads found on branches %s')
4546 msg = _('no open branch heads found on branches %s')
4546 if opts.get('rev'):
4547 if opts.get('rev'):
4547 msg += _(' (started at %s)') % opts['rev']
4548 msg += _(' (started at %s)') % opts['rev']
4548 ui.warn((msg + '\n') % headless)
4549 ui.warn((msg + '\n') % headless)
4549
4550
4550 if not heads:
4551 if not heads:
4551 return 1
4552 return 1
4552
4553
4553 heads = sorted(heads, key=lambda x: -x.rev())
4554 heads = sorted(heads, key=lambda x: -x.rev())
4554 displayer = cmdutil.show_changeset(ui, repo, opts)
4555 displayer = cmdutil.show_changeset(ui, repo, opts)
4555 for ctx in heads:
4556 for ctx in heads:
4556 displayer.show(ctx)
4557 displayer.show(ctx)
4557 displayer.close()
4558 displayer.close()
4558
4559
4559 @command('help',
4560 @command('help',
4560 [('e', 'extension', None, _('show only help for extensions')),
4561 [('e', 'extension', None, _('show only help for extensions')),
4561 ('c', 'command', None, _('show only help for commands')),
4562 ('c', 'command', None, _('show only help for commands')),
4562 ('k', 'keyword', None, _('show topics matching keyword')),
4563 ('k', 'keyword', None, _('show topics matching keyword')),
4563 ('s', 'system', [], _('show help for specific platform(s)')),
4564 ('s', 'system', [], _('show help for specific platform(s)')),
4564 ],
4565 ],
4565 _('[-ecks] [TOPIC]'),
4566 _('[-ecks] [TOPIC]'),
4566 norepo=True)
4567 norepo=True)
4567 def help_(ui, name=None, **opts):
4568 def help_(ui, name=None, **opts):
4568 """show help for a given topic or a help overview
4569 """show help for a given topic or a help overview
4569
4570
4570 With no arguments, print a list of commands with short help messages.
4571 With no arguments, print a list of commands with short help messages.
4571
4572
4572 Given a topic, extension, or command name, print help for that
4573 Given a topic, extension, or command name, print help for that
4573 topic.
4574 topic.
4574
4575
4575 Returns 0 if successful.
4576 Returns 0 if successful.
4576 """
4577 """
4577
4578
4578 textwidth = ui.configint('ui', 'textwidth', 78)
4579 textwidth = ui.configint('ui', 'textwidth', 78)
4579 termwidth = ui.termwidth() - 2
4580 termwidth = ui.termwidth() - 2
4580 if textwidth <= 0 or termwidth < textwidth:
4581 if textwidth <= 0 or termwidth < textwidth:
4581 textwidth = termwidth
4582 textwidth = termwidth
4582
4583
4583 keep = opts.get('system') or []
4584 keep = opts.get('system') or []
4584 if len(keep) == 0:
4585 if len(keep) == 0:
4585 if sys.platform.startswith('win'):
4586 if sys.platform.startswith('win'):
4586 keep.append('windows')
4587 keep.append('windows')
4587 elif sys.platform == 'OpenVMS':
4588 elif sys.platform == 'OpenVMS':
4588 keep.append('vms')
4589 keep.append('vms')
4589 elif sys.platform == 'plan9':
4590 elif sys.platform == 'plan9':
4590 keep.append('plan9')
4591 keep.append('plan9')
4591 else:
4592 else:
4592 keep.append('unix')
4593 keep.append('unix')
4593 keep.append(sys.platform.lower())
4594 keep.append(sys.platform.lower())
4594 if ui.verbose:
4595 if ui.verbose:
4595 keep.append('verbose')
4596 keep.append('verbose')
4596
4597
4597 section = None
4598 section = None
4598 subtopic = None
4599 subtopic = None
4599 if name and '.' in name:
4600 if name and '.' in name:
4600 name, remaining = name.split('.', 1)
4601 name, remaining = name.split('.', 1)
4601 remaining = encoding.lower(remaining)
4602 remaining = encoding.lower(remaining)
4602 if '.' in remaining:
4603 if '.' in remaining:
4603 subtopic, section = remaining.split('.', 1)
4604 subtopic, section = remaining.split('.', 1)
4604 else:
4605 else:
4605 if name in help.subtopics:
4606 if name in help.subtopics:
4606 subtopic = remaining
4607 subtopic = remaining
4607 else:
4608 else:
4608 section = remaining
4609 section = remaining
4609
4610
4610 text = help.help_(ui, name, subtopic=subtopic, **opts)
4611 text = help.help_(ui, name, subtopic=subtopic, **opts)
4611
4612
4612 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4613 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4613 section=section)
4614 section=section)
4614
4615
4615 # We could have been given a weird ".foo" section without a name
4616 # We could have been given a weird ".foo" section without a name
4616 # to look for, or we could have simply failed to found "foo.bar"
4617 # to look for, or we could have simply failed to found "foo.bar"
4617 # because bar isn't a section of foo
4618 # because bar isn't a section of foo
4618 if section and not (formatted and name):
4619 if section and not (formatted and name):
4619 raise error.Abort(_("help section not found"))
4620 raise error.Abort(_("help section not found"))
4620
4621
4621 if 'verbose' in pruned:
4622 if 'verbose' in pruned:
4622 keep.append('omitted')
4623 keep.append('omitted')
4623 else:
4624 else:
4624 keep.append('notomitted')
4625 keep.append('notomitted')
4625 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4626 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4626 section=section)
4627 section=section)
4627 ui.write(formatted)
4628 ui.write(formatted)
4628
4629
4629
4630
4630 @command('identify|id',
4631 @command('identify|id',
4631 [('r', 'rev', '',
4632 [('r', 'rev', '',
4632 _('identify the specified revision'), _('REV')),
4633 _('identify the specified revision'), _('REV')),
4633 ('n', 'num', None, _('show local revision number')),
4634 ('n', 'num', None, _('show local revision number')),
4634 ('i', 'id', None, _('show global revision id')),
4635 ('i', 'id', None, _('show global revision id')),
4635 ('b', 'branch', None, _('show branch')),
4636 ('b', 'branch', None, _('show branch')),
4636 ('t', 'tags', None, _('show tags')),
4637 ('t', 'tags', None, _('show tags')),
4637 ('B', 'bookmarks', None, _('show bookmarks')),
4638 ('B', 'bookmarks', None, _('show bookmarks')),
4638 ] + remoteopts,
4639 ] + remoteopts,
4639 _('[-nibtB] [-r REV] [SOURCE]'),
4640 _('[-nibtB] [-r REV] [SOURCE]'),
4640 optionalrepo=True)
4641 optionalrepo=True)
4641 def identify(ui, repo, source=None, rev=None,
4642 def identify(ui, repo, source=None, rev=None,
4642 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
4643 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
4643 """identify the working directory or specified revision
4644 """identify the working directory or specified revision
4644
4645
4645 Print a summary identifying the repository state at REV using one or
4646 Print a summary identifying the repository state at REV using one or
4646 two parent hash identifiers, followed by a "+" if the working
4647 two parent hash identifiers, followed by a "+" if the working
4647 directory has uncommitted changes, the branch name (if not default),
4648 directory has uncommitted changes, the branch name (if not default),
4648 a list of tags, and a list of bookmarks.
4649 a list of tags, and a list of bookmarks.
4649
4650
4650 When REV is not given, print a summary of the current state of the
4651 When REV is not given, print a summary of the current state of the
4651 repository.
4652 repository.
4652
4653
4653 Specifying a path to a repository root or Mercurial bundle will
4654 Specifying a path to a repository root or Mercurial bundle will
4654 cause lookup to operate on that repository/bundle.
4655 cause lookup to operate on that repository/bundle.
4655
4656
4656 .. container:: verbose
4657 .. container:: verbose
4657
4658
4658 Examples:
4659 Examples:
4659
4660
4660 - generate a build identifier for the working directory::
4661 - generate a build identifier for the working directory::
4661
4662
4662 hg id --id > build-id.dat
4663 hg id --id > build-id.dat
4663
4664
4664 - find the revision corresponding to a tag::
4665 - find the revision corresponding to a tag::
4665
4666
4666 hg id -n -r 1.3
4667 hg id -n -r 1.3
4667
4668
4668 - check the most recent revision of a remote repository::
4669 - check the most recent revision of a remote repository::
4669
4670
4670 hg id -r tip http://selenic.com/hg/
4671 hg id -r tip http://selenic.com/hg/
4671
4672
4672 See :hg:`log` for generating more information about specific revisions,
4673 See :hg:`log` for generating more information about specific revisions,
4673 including full hash identifiers.
4674 including full hash identifiers.
4674
4675
4675 Returns 0 if successful.
4676 Returns 0 if successful.
4676 """
4677 """
4677
4678
4678 if not repo and not source:
4679 if not repo and not source:
4679 raise error.Abort(_("there is no Mercurial repository here "
4680 raise error.Abort(_("there is no Mercurial repository here "
4680 "(.hg not found)"))
4681 "(.hg not found)"))
4681
4682
4682 if ui.debugflag:
4683 if ui.debugflag:
4683 hexfunc = hex
4684 hexfunc = hex
4684 else:
4685 else:
4685 hexfunc = short
4686 hexfunc = short
4686 default = not (num or id or branch or tags or bookmarks)
4687 default = not (num or id or branch or tags or bookmarks)
4687 output = []
4688 output = []
4688 revs = []
4689 revs = []
4689
4690
4690 if source:
4691 if source:
4691 source, branches = hg.parseurl(ui.expandpath(source))
4692 source, branches = hg.parseurl(ui.expandpath(source))
4692 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4693 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4693 repo = peer.local()
4694 repo = peer.local()
4694 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4695 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4695
4696
4696 if not repo:
4697 if not repo:
4697 if num or branch or tags:
4698 if num or branch or tags:
4698 raise error.Abort(
4699 raise error.Abort(
4699 _("can't query remote revision number, branch, or tags"))
4700 _("can't query remote revision number, branch, or tags"))
4700 if not rev and revs:
4701 if not rev and revs:
4701 rev = revs[0]
4702 rev = revs[0]
4702 if not rev:
4703 if not rev:
4703 rev = "tip"
4704 rev = "tip"
4704
4705
4705 remoterev = peer.lookup(rev)
4706 remoterev = peer.lookup(rev)
4706 if default or id:
4707 if default or id:
4707 output = [hexfunc(remoterev)]
4708 output = [hexfunc(remoterev)]
4708
4709
4709 def getbms():
4710 def getbms():
4710 bms = []
4711 bms = []
4711
4712
4712 if 'bookmarks' in peer.listkeys('namespaces'):
4713 if 'bookmarks' in peer.listkeys('namespaces'):
4713 hexremoterev = hex(remoterev)
4714 hexremoterev = hex(remoterev)
4714 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4715 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4715 if bmr == hexremoterev]
4716 if bmr == hexremoterev]
4716
4717
4717 return sorted(bms)
4718 return sorted(bms)
4718
4719
4719 if bookmarks:
4720 if bookmarks:
4720 output.extend(getbms())
4721 output.extend(getbms())
4721 elif default and not ui.quiet:
4722 elif default and not ui.quiet:
4722 # multiple bookmarks for a single parent separated by '/'
4723 # multiple bookmarks for a single parent separated by '/'
4723 bm = '/'.join(getbms())
4724 bm = '/'.join(getbms())
4724 if bm:
4725 if bm:
4725 output.append(bm)
4726 output.append(bm)
4726 else:
4727 else:
4727 ctx = scmutil.revsingle(repo, rev, None)
4728 ctx = scmutil.revsingle(repo, rev, None)
4728
4729
4729 if ctx.rev() is None:
4730 if ctx.rev() is None:
4730 ctx = repo[None]
4731 ctx = repo[None]
4731 parents = ctx.parents()
4732 parents = ctx.parents()
4732 taglist = []
4733 taglist = []
4733 for p in parents:
4734 for p in parents:
4734 taglist.extend(p.tags())
4735 taglist.extend(p.tags())
4735
4736
4736 changed = ""
4737 changed = ""
4737 if default or id or num:
4738 if default or id or num:
4738 if (any(repo.status())
4739 if (any(repo.status())
4739 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4740 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4740 changed = '+'
4741 changed = '+'
4741 if default or id:
4742 if default or id:
4742 output = ["%s%s" %
4743 output = ["%s%s" %
4743 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4744 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4744 if num:
4745 if num:
4745 output.append("%s%s" %
4746 output.append("%s%s" %
4746 ('+'.join([str(p.rev()) for p in parents]), changed))
4747 ('+'.join([str(p.rev()) for p in parents]), changed))
4747 else:
4748 else:
4748 if default or id:
4749 if default or id:
4749 output = [hexfunc(ctx.node())]
4750 output = [hexfunc(ctx.node())]
4750 if num:
4751 if num:
4751 output.append(str(ctx.rev()))
4752 output.append(str(ctx.rev()))
4752 taglist = ctx.tags()
4753 taglist = ctx.tags()
4753
4754
4754 if default and not ui.quiet:
4755 if default and not ui.quiet:
4755 b = ctx.branch()
4756 b = ctx.branch()
4756 if b != 'default':
4757 if b != 'default':
4757 output.append("(%s)" % b)
4758 output.append("(%s)" % b)
4758
4759
4759 # multiple tags for a single parent separated by '/'
4760 # multiple tags for a single parent separated by '/'
4760 t = '/'.join(taglist)
4761 t = '/'.join(taglist)
4761 if t:
4762 if t:
4762 output.append(t)
4763 output.append(t)
4763
4764
4764 # multiple bookmarks for a single parent separated by '/'
4765 # multiple bookmarks for a single parent separated by '/'
4765 bm = '/'.join(ctx.bookmarks())
4766 bm = '/'.join(ctx.bookmarks())
4766 if bm:
4767 if bm:
4767 output.append(bm)
4768 output.append(bm)
4768 else:
4769 else:
4769 if branch:
4770 if branch:
4770 output.append(ctx.branch())
4771 output.append(ctx.branch())
4771
4772
4772 if tags:
4773 if tags:
4773 output.extend(taglist)
4774 output.extend(taglist)
4774
4775
4775 if bookmarks:
4776 if bookmarks:
4776 output.extend(ctx.bookmarks())
4777 output.extend(ctx.bookmarks())
4777
4778
4778 ui.write("%s\n" % ' '.join(output))
4779 ui.write("%s\n" % ' '.join(output))
4779
4780
4780 @command('import|patch',
4781 @command('import|patch',
4781 [('p', 'strip', 1,
4782 [('p', 'strip', 1,
4782 _('directory strip option for patch. This has the same '
4783 _('directory strip option for patch. This has the same '
4783 'meaning as the corresponding patch option'), _('NUM')),
4784 'meaning as the corresponding patch option'), _('NUM')),
4784 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4785 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4785 ('e', 'edit', False, _('invoke editor on commit messages')),
4786 ('e', 'edit', False, _('invoke editor on commit messages')),
4786 ('f', 'force', None,
4787 ('f', 'force', None,
4787 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4788 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4788 ('', 'no-commit', None,
4789 ('', 'no-commit', None,
4789 _("don't commit, just update the working directory")),
4790 _("don't commit, just update the working directory")),
4790 ('', 'bypass', None,
4791 ('', 'bypass', None,
4791 _("apply patch without touching the working directory")),
4792 _("apply patch without touching the working directory")),
4792 ('', 'partial', None,
4793 ('', 'partial', None,
4793 _('commit even if some hunks fail')),
4794 _('commit even if some hunks fail')),
4794 ('', 'exact', None,
4795 ('', 'exact', None,
4795 _('abort if patch would apply lossily')),
4796 _('abort if patch would apply lossily')),
4796 ('', 'prefix', '',
4797 ('', 'prefix', '',
4797 _('apply patch to subdirectory'), _('DIR')),
4798 _('apply patch to subdirectory'), _('DIR')),
4798 ('', 'import-branch', None,
4799 ('', 'import-branch', None,
4799 _('use any branch information in patch (implied by --exact)'))] +
4800 _('use any branch information in patch (implied by --exact)'))] +
4800 commitopts + commitopts2 + similarityopts,
4801 commitopts + commitopts2 + similarityopts,
4801 _('[OPTION]... PATCH...'))
4802 _('[OPTION]... PATCH...'))
4802 def import_(ui, repo, patch1=None, *patches, **opts):
4803 def import_(ui, repo, patch1=None, *patches, **opts):
4803 """import an ordered set of patches
4804 """import an ordered set of patches
4804
4805
4805 Import a list of patches and commit them individually (unless
4806 Import a list of patches and commit them individually (unless
4806 --no-commit is specified).
4807 --no-commit is specified).
4807
4808
4808 To read a patch from standard input, use "-" as the patch name. If
4809 To read a patch from standard input, use "-" as the patch name. If
4809 a URL is specified, the patch will be downloaded from there.
4810 a URL is specified, the patch will be downloaded from there.
4810
4811
4811 Import first applies changes to the working directory (unless
4812 Import first applies changes to the working directory (unless
4812 --bypass is specified), import will abort if there are outstanding
4813 --bypass is specified), import will abort if there are outstanding
4813 changes.
4814 changes.
4814
4815
4815 Use --bypass to apply and commit patches directly to the
4816 Use --bypass to apply and commit patches directly to the
4816 repository, without affecting the working directory. Without
4817 repository, without affecting the working directory. Without
4817 --exact, patches will be applied on top of the working directory
4818 --exact, patches will be applied on top of the working directory
4818 parent revision.
4819 parent revision.
4819
4820
4820 You can import a patch straight from a mail message. Even patches
4821 You can import a patch straight from a mail message. Even patches
4821 as attachments work (to use the body part, it must have type
4822 as attachments work (to use the body part, it must have type
4822 text/plain or text/x-patch). From and Subject headers of email
4823 text/plain or text/x-patch). From and Subject headers of email
4823 message are used as default committer and commit message. All
4824 message are used as default committer and commit message. All
4824 text/plain body parts before first diff are added to the commit
4825 text/plain body parts before first diff are added to the commit
4825 message.
4826 message.
4826
4827
4827 If the imported patch was generated by :hg:`export`, user and
4828 If the imported patch was generated by :hg:`export`, user and
4828 description from patch override values from message headers and
4829 description from patch override values from message headers and
4829 body. Values given on command line with -m/--message and -u/--user
4830 body. Values given on command line with -m/--message and -u/--user
4830 override these.
4831 override these.
4831
4832
4832 If --exact is specified, import will set the working directory to
4833 If --exact is specified, import will set the working directory to
4833 the parent of each patch before applying it, and will abort if the
4834 the parent of each patch before applying it, and will abort if the
4834 resulting changeset has a different ID than the one recorded in
4835 resulting changeset has a different ID than the one recorded in
4835 the patch. This will guard against various ways that portable
4836 the patch. This will guard against various ways that portable
4836 patch formats and mail systems might fail to transfer Mercurial
4837 patch formats and mail systems might fail to transfer Mercurial
4837 data or metadata. See :hg:`bundle` for lossless transmission.
4838 data or metadata. See :hg:`bundle` for lossless transmission.
4838
4839
4839 Use --partial to ensure a changeset will be created from the patch
4840 Use --partial to ensure a changeset will be created from the patch
4840 even if some hunks fail to apply. Hunks that fail to apply will be
4841 even if some hunks fail to apply. Hunks that fail to apply will be
4841 written to a <target-file>.rej file. Conflicts can then be resolved
4842 written to a <target-file>.rej file. Conflicts can then be resolved
4842 by hand before :hg:`commit --amend` is run to update the created
4843 by hand before :hg:`commit --amend` is run to update the created
4843 changeset. This flag exists to let people import patches that
4844 changeset. This flag exists to let people import patches that
4844 partially apply without losing the associated metadata (author,
4845 partially apply without losing the associated metadata (author,
4845 date, description, ...).
4846 date, description, ...).
4846
4847
4847 .. note::
4848 .. note::
4848
4849
4849 When no hunks apply cleanly, :hg:`import --partial` will create
4850 When no hunks apply cleanly, :hg:`import --partial` will create
4850 an empty changeset, importing only the patch metadata.
4851 an empty changeset, importing only the patch metadata.
4851
4852
4852 With -s/--similarity, hg will attempt to discover renames and
4853 With -s/--similarity, hg will attempt to discover renames and
4853 copies in the patch in the same way as :hg:`addremove`.
4854 copies in the patch in the same way as :hg:`addremove`.
4854
4855
4855 It is possible to use external patch programs to perform the patch
4856 It is possible to use external patch programs to perform the patch
4856 by setting the ``ui.patch`` configuration option. For the default
4857 by setting the ``ui.patch`` configuration option. For the default
4857 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4858 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4858 See :hg:`help config` for more information about configuration
4859 See :hg:`help config` for more information about configuration
4859 files and how to use these options.
4860 files and how to use these options.
4860
4861
4861 See :hg:`help dates` for a list of formats valid for -d/--date.
4862 See :hg:`help dates` for a list of formats valid for -d/--date.
4862
4863
4863 .. container:: verbose
4864 .. container:: verbose
4864
4865
4865 Examples:
4866 Examples:
4866
4867
4867 - import a traditional patch from a website and detect renames::
4868 - import a traditional patch from a website and detect renames::
4868
4869
4869 hg import -s 80 http://example.com/bugfix.patch
4870 hg import -s 80 http://example.com/bugfix.patch
4870
4871
4871 - import a changeset from an hgweb server::
4872 - import a changeset from an hgweb server::
4872
4873
4873 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4874 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4874
4875
4875 - import all the patches in an Unix-style mbox::
4876 - import all the patches in an Unix-style mbox::
4876
4877
4877 hg import incoming-patches.mbox
4878 hg import incoming-patches.mbox
4878
4879
4879 - attempt to exactly restore an exported changeset (not always
4880 - attempt to exactly restore an exported changeset (not always
4880 possible)::
4881 possible)::
4881
4882
4882 hg import --exact proposed-fix.patch
4883 hg import --exact proposed-fix.patch
4883
4884
4884 - use an external tool to apply a patch which is too fuzzy for
4885 - use an external tool to apply a patch which is too fuzzy for
4885 the default internal tool.
4886 the default internal tool.
4886
4887
4887 hg import --config ui.patch="patch --merge" fuzzy.patch
4888 hg import --config ui.patch="patch --merge" fuzzy.patch
4888
4889
4889 - change the default fuzzing from 2 to a less strict 7
4890 - change the default fuzzing from 2 to a less strict 7
4890
4891
4891 hg import --config ui.fuzz=7 fuzz.patch
4892 hg import --config ui.fuzz=7 fuzz.patch
4892
4893
4893 Returns 0 on success, 1 on partial success (see --partial).
4894 Returns 0 on success, 1 on partial success (see --partial).
4894 """
4895 """
4895
4896
4896 if not patch1:
4897 if not patch1:
4897 raise error.Abort(_('need at least one patch to import'))
4898 raise error.Abort(_('need at least one patch to import'))
4898
4899
4899 patches = (patch1,) + patches
4900 patches = (patch1,) + patches
4900
4901
4901 date = opts.get('date')
4902 date = opts.get('date')
4902 if date:
4903 if date:
4903 opts['date'] = util.parsedate(date)
4904 opts['date'] = util.parsedate(date)
4904
4905
4905 exact = opts.get('exact')
4906 exact = opts.get('exact')
4906 update = not opts.get('bypass')
4907 update = not opts.get('bypass')
4907 if not update and opts.get('no_commit'):
4908 if not update and opts.get('no_commit'):
4908 raise error.Abort(_('cannot use --no-commit with --bypass'))
4909 raise error.Abort(_('cannot use --no-commit with --bypass'))
4909 try:
4910 try:
4910 sim = float(opts.get('similarity') or 0)
4911 sim = float(opts.get('similarity') or 0)
4911 except ValueError:
4912 except ValueError:
4912 raise error.Abort(_('similarity must be a number'))
4913 raise error.Abort(_('similarity must be a number'))
4913 if sim < 0 or sim > 100:
4914 if sim < 0 or sim > 100:
4914 raise error.Abort(_('similarity must be between 0 and 100'))
4915 raise error.Abort(_('similarity must be between 0 and 100'))
4915 if sim and not update:
4916 if sim and not update:
4916 raise error.Abort(_('cannot use --similarity with --bypass'))
4917 raise error.Abort(_('cannot use --similarity with --bypass'))
4917 if exact:
4918 if exact:
4918 if opts.get('edit'):
4919 if opts.get('edit'):
4919 raise error.Abort(_('cannot use --exact with --edit'))
4920 raise error.Abort(_('cannot use --exact with --edit'))
4920 if opts.get('prefix'):
4921 if opts.get('prefix'):
4921 raise error.Abort(_('cannot use --exact with --prefix'))
4922 raise error.Abort(_('cannot use --exact with --prefix'))
4922
4923
4923 base = opts["base"]
4924 base = opts["base"]
4924 wlock = dsguard = lock = tr = None
4925 wlock = dsguard = lock = tr = None
4925 msgs = []
4926 msgs = []
4926 ret = 0
4927 ret = 0
4927
4928
4928
4929
4929 try:
4930 try:
4930 wlock = repo.wlock()
4931 wlock = repo.wlock()
4931
4932
4932 if update:
4933 if update:
4933 cmdutil.checkunfinished(repo)
4934 cmdutil.checkunfinished(repo)
4934 if (exact or not opts.get('force')):
4935 if (exact or not opts.get('force')):
4935 cmdutil.bailifchanged(repo)
4936 cmdutil.bailifchanged(repo)
4936
4937
4937 if not opts.get('no_commit'):
4938 if not opts.get('no_commit'):
4938 lock = repo.lock()
4939 lock = repo.lock()
4939 tr = repo.transaction('import')
4940 tr = repo.transaction('import')
4940 else:
4941 else:
4941 dsguard = cmdutil.dirstateguard(repo, 'import')
4942 dsguard = cmdutil.dirstateguard(repo, 'import')
4942 parents = repo[None].parents()
4943 parents = repo[None].parents()
4943 for patchurl in patches:
4944 for patchurl in patches:
4944 if patchurl == '-':
4945 if patchurl == '-':
4945 ui.status(_('applying patch from stdin\n'))
4946 ui.status(_('applying patch from stdin\n'))
4946 patchfile = ui.fin
4947 patchfile = ui.fin
4947 patchurl = 'stdin' # for error message
4948 patchurl = 'stdin' # for error message
4948 else:
4949 else:
4949 patchurl = os.path.join(base, patchurl)
4950 patchurl = os.path.join(base, patchurl)
4950 ui.status(_('applying %s\n') % patchurl)
4951 ui.status(_('applying %s\n') % patchurl)
4951 patchfile = hg.openpath(ui, patchurl)
4952 patchfile = hg.openpath(ui, patchurl)
4952
4953
4953 haspatch = False
4954 haspatch = False
4954 for hunk in patch.split(patchfile):
4955 for hunk in patch.split(patchfile):
4955 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4956 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4956 parents, opts,
4957 parents, opts,
4957 msgs, hg.clean)
4958 msgs, hg.clean)
4958 if msg:
4959 if msg:
4959 haspatch = True
4960 haspatch = True
4960 ui.note(msg + '\n')
4961 ui.note(msg + '\n')
4961 if update or exact:
4962 if update or exact:
4962 parents = repo[None].parents()
4963 parents = repo[None].parents()
4963 else:
4964 else:
4964 parents = [repo[node]]
4965 parents = [repo[node]]
4965 if rej:
4966 if rej:
4966 ui.write_err(_("patch applied partially\n"))
4967 ui.write_err(_("patch applied partially\n"))
4967 ui.write_err(_("(fix the .rej files and run "
4968 ui.write_err(_("(fix the .rej files and run "
4968 "`hg commit --amend`)\n"))
4969 "`hg commit --amend`)\n"))
4969 ret = 1
4970 ret = 1
4970 break
4971 break
4971
4972
4972 if not haspatch:
4973 if not haspatch:
4973 raise error.Abort(_('%s: no diffs found') % patchurl)
4974 raise error.Abort(_('%s: no diffs found') % patchurl)
4974
4975
4975 if tr:
4976 if tr:
4976 tr.close()
4977 tr.close()
4977 if msgs:
4978 if msgs:
4978 repo.savecommitmessage('\n* * *\n'.join(msgs))
4979 repo.savecommitmessage('\n* * *\n'.join(msgs))
4979 if dsguard:
4980 if dsguard:
4980 dsguard.close()
4981 dsguard.close()
4981 return ret
4982 return ret
4982 finally:
4983 finally:
4983 if tr:
4984 if tr:
4984 tr.release()
4985 tr.release()
4985 release(lock, dsguard, wlock)
4986 release(lock, dsguard, wlock)
4986
4987
4987 @command('incoming|in',
4988 @command('incoming|in',
4988 [('f', 'force', None,
4989 [('f', 'force', None,
4989 _('run even if remote repository is unrelated')),
4990 _('run even if remote repository is unrelated')),
4990 ('n', 'newest-first', None, _('show newest record first')),
4991 ('n', 'newest-first', None, _('show newest record first')),
4991 ('', 'bundle', '',
4992 ('', 'bundle', '',
4992 _('file to store the bundles into'), _('FILE')),
4993 _('file to store the bundles into'), _('FILE')),
4993 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4994 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4994 ('B', 'bookmarks', False, _("compare bookmarks")),
4995 ('B', 'bookmarks', False, _("compare bookmarks")),
4995 ('b', 'branch', [],
4996 ('b', 'branch', [],
4996 _('a specific branch you would like to pull'), _('BRANCH')),
4997 _('a specific branch you would like to pull'), _('BRANCH')),
4997 ] + logopts + remoteopts + subrepoopts,
4998 ] + logopts + remoteopts + subrepoopts,
4998 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4999 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4999 def incoming(ui, repo, source="default", **opts):
5000 def incoming(ui, repo, source="default", **opts):
5000 """show new changesets found in source
5001 """show new changesets found in source
5001
5002
5002 Show new changesets found in the specified path/URL or the default
5003 Show new changesets found in the specified path/URL or the default
5003 pull location. These are the changesets that would have been pulled
5004 pull location. These are the changesets that would have been pulled
5004 if a pull at the time you issued this command.
5005 if a pull at the time you issued this command.
5005
5006
5006 See pull for valid source format details.
5007 See pull for valid source format details.
5007
5008
5008 .. container:: verbose
5009 .. container:: verbose
5009
5010
5010 With -B/--bookmarks, the result of bookmark comparison between
5011 With -B/--bookmarks, the result of bookmark comparison between
5011 local and remote repositories is displayed. With -v/--verbose,
5012 local and remote repositories is displayed. With -v/--verbose,
5012 status is also displayed for each bookmark like below::
5013 status is also displayed for each bookmark like below::
5013
5014
5014 BM1 01234567890a added
5015 BM1 01234567890a added
5015 BM2 1234567890ab advanced
5016 BM2 1234567890ab advanced
5016 BM3 234567890abc diverged
5017 BM3 234567890abc diverged
5017 BM4 34567890abcd changed
5018 BM4 34567890abcd changed
5018
5019
5019 The action taken locally when pulling depends on the
5020 The action taken locally when pulling depends on the
5020 status of each bookmark:
5021 status of each bookmark:
5021
5022
5022 :``added``: pull will create it
5023 :``added``: pull will create it
5023 :``advanced``: pull will update it
5024 :``advanced``: pull will update it
5024 :``diverged``: pull will create a divergent bookmark
5025 :``diverged``: pull will create a divergent bookmark
5025 :``changed``: result depends on remote changesets
5026 :``changed``: result depends on remote changesets
5026
5027
5027 From the point of view of pulling behavior, bookmark
5028 From the point of view of pulling behavior, bookmark
5028 existing only in the remote repository are treated as ``added``,
5029 existing only in the remote repository are treated as ``added``,
5029 even if it is in fact locally deleted.
5030 even if it is in fact locally deleted.
5030
5031
5031 .. container:: verbose
5032 .. container:: verbose
5032
5033
5033 For remote repository, using --bundle avoids downloading the
5034 For remote repository, using --bundle avoids downloading the
5034 changesets twice if the incoming is followed by a pull.
5035 changesets twice if the incoming is followed by a pull.
5035
5036
5036 Examples:
5037 Examples:
5037
5038
5038 - show incoming changes with patches and full description::
5039 - show incoming changes with patches and full description::
5039
5040
5040 hg incoming -vp
5041 hg incoming -vp
5041
5042
5042 - show incoming changes excluding merges, store a bundle::
5043 - show incoming changes excluding merges, store a bundle::
5043
5044
5044 hg in -vpM --bundle incoming.hg
5045 hg in -vpM --bundle incoming.hg
5045 hg pull incoming.hg
5046 hg pull incoming.hg
5046
5047
5047 - briefly list changes inside a bundle::
5048 - briefly list changes inside a bundle::
5048
5049
5049 hg in changes.hg -T "{desc|firstline}\\n"
5050 hg in changes.hg -T "{desc|firstline}\\n"
5050
5051
5051 Returns 0 if there are incoming changes, 1 otherwise.
5052 Returns 0 if there are incoming changes, 1 otherwise.
5052 """
5053 """
5053 if opts.get('graph'):
5054 if opts.get('graph'):
5054 cmdutil.checkunsupportedgraphflags([], opts)
5055 cmdutil.checkunsupportedgraphflags([], opts)
5055 def display(other, chlist, displayer):
5056 def display(other, chlist, displayer):
5056 revdag = cmdutil.graphrevs(other, chlist, opts)
5057 revdag = cmdutil.graphrevs(other, chlist, opts)
5057 cmdutil.displaygraph(ui, repo, revdag, displayer,
5058 cmdutil.displaygraph(ui, repo, revdag, displayer,
5058 graphmod.asciiedges)
5059 graphmod.asciiedges)
5059
5060
5060 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
5061 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
5061 return 0
5062 return 0
5062
5063
5063 if opts.get('bundle') and opts.get('subrepos'):
5064 if opts.get('bundle') and opts.get('subrepos'):
5064 raise error.Abort(_('cannot combine --bundle and --subrepos'))
5065 raise error.Abort(_('cannot combine --bundle and --subrepos'))
5065
5066
5066 if opts.get('bookmarks'):
5067 if opts.get('bookmarks'):
5067 source, branches = hg.parseurl(ui.expandpath(source),
5068 source, branches = hg.parseurl(ui.expandpath(source),
5068 opts.get('branch'))
5069 opts.get('branch'))
5069 other = hg.peer(repo, opts, source)
5070 other = hg.peer(repo, opts, source)
5070 if 'bookmarks' not in other.listkeys('namespaces'):
5071 if 'bookmarks' not in other.listkeys('namespaces'):
5071 ui.warn(_("remote doesn't support bookmarks\n"))
5072 ui.warn(_("remote doesn't support bookmarks\n"))
5072 return 0
5073 return 0
5073 ui.status(_('comparing with %s\n') % util.hidepassword(source))
5074 ui.status(_('comparing with %s\n') % util.hidepassword(source))
5074 return bookmarks.incoming(ui, repo, other)
5075 return bookmarks.incoming(ui, repo, other)
5075
5076
5076 repo._subtoppath = ui.expandpath(source)
5077 repo._subtoppath = ui.expandpath(source)
5077 try:
5078 try:
5078 return hg.incoming(ui, repo, source, opts)
5079 return hg.incoming(ui, repo, source, opts)
5079 finally:
5080 finally:
5080 del repo._subtoppath
5081 del repo._subtoppath
5081
5082
5082
5083
5083 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
5084 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
5084 norepo=True)
5085 norepo=True)
5085 def init(ui, dest=".", **opts):
5086 def init(ui, dest=".", **opts):
5086 """create a new repository in the given directory
5087 """create a new repository in the given directory
5087
5088
5088 Initialize a new repository in the given directory. If the given
5089 Initialize a new repository in the given directory. If the given
5089 directory does not exist, it will be created.
5090 directory does not exist, it will be created.
5090
5091
5091 If no directory is given, the current directory is used.
5092 If no directory is given, the current directory is used.
5092
5093
5093 It is possible to specify an ``ssh://`` URL as the destination.
5094 It is possible to specify an ``ssh://`` URL as the destination.
5094 See :hg:`help urls` for more information.
5095 See :hg:`help urls` for more information.
5095
5096
5096 Returns 0 on success.
5097 Returns 0 on success.
5097 """
5098 """
5098 hg.peer(ui, opts, ui.expandpath(dest), create=True)
5099 hg.peer(ui, opts, ui.expandpath(dest), create=True)
5099
5100
5100 @command('locate',
5101 @command('locate',
5101 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
5102 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
5102 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5103 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5103 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
5104 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
5104 ] + walkopts,
5105 ] + walkopts,
5105 _('[OPTION]... [PATTERN]...'))
5106 _('[OPTION]... [PATTERN]...'))
5106 def locate(ui, repo, *pats, **opts):
5107 def locate(ui, repo, *pats, **opts):
5107 """locate files matching specific patterns (DEPRECATED)
5108 """locate files matching specific patterns (DEPRECATED)
5108
5109
5109 Print files under Mercurial control in the working directory whose
5110 Print files under Mercurial control in the working directory whose
5110 names match the given patterns.
5111 names match the given patterns.
5111
5112
5112 By default, this command searches all directories in the working
5113 By default, this command searches all directories in the working
5113 directory. To search just the current directory and its
5114 directory. To search just the current directory and its
5114 subdirectories, use "--include .".
5115 subdirectories, use "--include .".
5115
5116
5116 If no patterns are given to match, this command prints the names
5117 If no patterns are given to match, this command prints the names
5117 of all files under Mercurial control in the working directory.
5118 of all files under Mercurial control in the working directory.
5118
5119
5119 If you want to feed the output of this command into the "xargs"
5120 If you want to feed the output of this command into the "xargs"
5120 command, use the -0 option to both this command and "xargs". This
5121 command, use the -0 option to both this command and "xargs". This
5121 will avoid the problem of "xargs" treating single filenames that
5122 will avoid the problem of "xargs" treating single filenames that
5122 contain whitespace as multiple filenames.
5123 contain whitespace as multiple filenames.
5123
5124
5124 See :hg:`help files` for a more versatile command.
5125 See :hg:`help files` for a more versatile command.
5125
5126
5126 Returns 0 if a match is found, 1 otherwise.
5127 Returns 0 if a match is found, 1 otherwise.
5127 """
5128 """
5128 if opts.get('print0'):
5129 if opts.get('print0'):
5129 end = '\0'
5130 end = '\0'
5130 else:
5131 else:
5131 end = '\n'
5132 end = '\n'
5132 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
5133 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
5133
5134
5134 ret = 1
5135 ret = 1
5135 ctx = repo[rev]
5136 ctx = repo[rev]
5136 m = scmutil.match(ctx, pats, opts, default='relglob',
5137 m = scmutil.match(ctx, pats, opts, default='relglob',
5137 badfn=lambda x, y: False)
5138 badfn=lambda x, y: False)
5138
5139
5139 for abs in ctx.matches(m):
5140 for abs in ctx.matches(m):
5140 if opts.get('fullpath'):
5141 if opts.get('fullpath'):
5141 ui.write(repo.wjoin(abs), end)
5142 ui.write(repo.wjoin(abs), end)
5142 else:
5143 else:
5143 ui.write(((pats and m.rel(abs)) or abs), end)
5144 ui.write(((pats and m.rel(abs)) or abs), end)
5144 ret = 0
5145 ret = 0
5145
5146
5146 return ret
5147 return ret
5147
5148
5148 @command('^log|history',
5149 @command('^log|history',
5149 [('f', 'follow', None,
5150 [('f', 'follow', None,
5150 _('follow changeset history, or file history across copies and renames')),
5151 _('follow changeset history, or file history across copies and renames')),
5151 ('', 'follow-first', None,
5152 ('', 'follow-first', None,
5152 _('only follow the first parent of merge changesets (DEPRECATED)')),
5153 _('only follow the first parent of merge changesets (DEPRECATED)')),
5153 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
5154 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
5154 ('C', 'copies', None, _('show copied files')),
5155 ('C', 'copies', None, _('show copied files')),
5155 ('k', 'keyword', [],
5156 ('k', 'keyword', [],
5156 _('do case-insensitive search for a given text'), _('TEXT')),
5157 _('do case-insensitive search for a given text'), _('TEXT')),
5157 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
5158 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
5158 ('', 'removed', None, _('include revisions where files were removed')),
5159 ('', 'removed', None, _('include revisions where files were removed')),
5159 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
5160 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
5160 ('u', 'user', [], _('revisions committed by user'), _('USER')),
5161 ('u', 'user', [], _('revisions committed by user'), _('USER')),
5161 ('', 'only-branch', [],
5162 ('', 'only-branch', [],
5162 _('show only changesets within the given named branch (DEPRECATED)'),
5163 _('show only changesets within the given named branch (DEPRECATED)'),
5163 _('BRANCH')),
5164 _('BRANCH')),
5164 ('b', 'branch', [],
5165 ('b', 'branch', [],
5165 _('show changesets within the given named branch'), _('BRANCH')),
5166 _('show changesets within the given named branch'), _('BRANCH')),
5166 ('P', 'prune', [],
5167 ('P', 'prune', [],
5167 _('do not display revision or any of its ancestors'), _('REV')),
5168 _('do not display revision or any of its ancestors'), _('REV')),
5168 ] + logopts + walkopts,
5169 ] + logopts + walkopts,
5169 _('[OPTION]... [FILE]'),
5170 _('[OPTION]... [FILE]'),
5170 inferrepo=True)
5171 inferrepo=True)
5171 def log(ui, repo, *pats, **opts):
5172 def log(ui, repo, *pats, **opts):
5172 """show revision history of entire repository or files
5173 """show revision history of entire repository or files
5173
5174
5174 Print the revision history of the specified files or the entire
5175 Print the revision history of the specified files or the entire
5175 project.
5176 project.
5176
5177
5177 If no revision range is specified, the default is ``tip:0`` unless
5178 If no revision range is specified, the default is ``tip:0`` unless
5178 --follow is set, in which case the working directory parent is
5179 --follow is set, in which case the working directory parent is
5179 used as the starting revision.
5180 used as the starting revision.
5180
5181
5181 File history is shown without following rename or copy history of
5182 File history is shown without following rename or copy history of
5182 files. Use -f/--follow with a filename to follow history across
5183 files. Use -f/--follow with a filename to follow history across
5183 renames and copies. --follow without a filename will only show
5184 renames and copies. --follow without a filename will only show
5184 ancestors or descendants of the starting revision.
5185 ancestors or descendants of the starting revision.
5185
5186
5186 By default this command prints revision number and changeset id,
5187 By default this command prints revision number and changeset id,
5187 tags, non-trivial parents, user, date and time, and a summary for
5188 tags, non-trivial parents, user, date and time, and a summary for
5188 each commit. When the -v/--verbose switch is used, the list of
5189 each commit. When the -v/--verbose switch is used, the list of
5189 changed files and full commit message are shown.
5190 changed files and full commit message are shown.
5190
5191
5191 With --graph the revisions are shown as an ASCII art DAG with the most
5192 With --graph the revisions are shown as an ASCII art DAG with the most
5192 recent changeset at the top.
5193 recent changeset at the top.
5193 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
5194 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
5194 and '+' represents a fork where the changeset from the lines below is a
5195 and '+' represents a fork where the changeset from the lines below is a
5195 parent of the 'o' merge on the same line.
5196 parent of the 'o' merge on the same line.
5196
5197
5197 .. note::
5198 .. note::
5198
5199
5199 :hg:`log --patch` may generate unexpected diff output for merge
5200 :hg:`log --patch` may generate unexpected diff output for merge
5200 changesets, as it will only compare the merge changeset against
5201 changesets, as it will only compare the merge changeset against
5201 its first parent. Also, only files different from BOTH parents
5202 its first parent. Also, only files different from BOTH parents
5202 will appear in files:.
5203 will appear in files:.
5203
5204
5204 .. note::
5205 .. note::
5205
5206
5206 For performance reasons, :hg:`log FILE` may omit duplicate changes
5207 For performance reasons, :hg:`log FILE` may omit duplicate changes
5207 made on branches and will not show removals or mode changes. To
5208 made on branches and will not show removals or mode changes. To
5208 see all such changes, use the --removed switch.
5209 see all such changes, use the --removed switch.
5209
5210
5210 .. container:: verbose
5211 .. container:: verbose
5211
5212
5212 Some examples:
5213 Some examples:
5213
5214
5214 - changesets with full descriptions and file lists::
5215 - changesets with full descriptions and file lists::
5215
5216
5216 hg log -v
5217 hg log -v
5217
5218
5218 - changesets ancestral to the working directory::
5219 - changesets ancestral to the working directory::
5219
5220
5220 hg log -f
5221 hg log -f
5221
5222
5222 - last 10 commits on the current branch::
5223 - last 10 commits on the current branch::
5223
5224
5224 hg log -l 10 -b .
5225 hg log -l 10 -b .
5225
5226
5226 - changesets showing all modifications of a file, including removals::
5227 - changesets showing all modifications of a file, including removals::
5227
5228
5228 hg log --removed file.c
5229 hg log --removed file.c
5229
5230
5230 - all changesets that touch a directory, with diffs, excluding merges::
5231 - all changesets that touch a directory, with diffs, excluding merges::
5231
5232
5232 hg log -Mp lib/
5233 hg log -Mp lib/
5233
5234
5234 - all revision numbers that match a keyword::
5235 - all revision numbers that match a keyword::
5235
5236
5236 hg log -k bug --template "{rev}\\n"
5237 hg log -k bug --template "{rev}\\n"
5237
5238
5238 - the full hash identifier of the working directory parent::
5239 - the full hash identifier of the working directory parent::
5239
5240
5240 hg log -r . --template "{node}\\n"
5241 hg log -r . --template "{node}\\n"
5241
5242
5242 - list available log templates::
5243 - list available log templates::
5243
5244
5244 hg log -T list
5245 hg log -T list
5245
5246
5246 - check if a given changeset is included in a tagged release::
5247 - check if a given changeset is included in a tagged release::
5247
5248
5248 hg log -r "a21ccf and ancestor(1.9)"
5249 hg log -r "a21ccf and ancestor(1.9)"
5249
5250
5250 - find all changesets by some user in a date range::
5251 - find all changesets by some user in a date range::
5251
5252
5252 hg log -k alice -d "may 2008 to jul 2008"
5253 hg log -k alice -d "may 2008 to jul 2008"
5253
5254
5254 - summary of all changesets after the last tag::
5255 - summary of all changesets after the last tag::
5255
5256
5256 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
5257 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
5257
5258
5258 See :hg:`help dates` for a list of formats valid for -d/--date.
5259 See :hg:`help dates` for a list of formats valid for -d/--date.
5259
5260
5260 See :hg:`help revisions` and :hg:`help revsets` for more about
5261 See :hg:`help revisions` and :hg:`help revsets` for more about
5261 specifying and ordering revisions.
5262 specifying and ordering revisions.
5262
5263
5263 See :hg:`help templates` for more about pre-packaged styles and
5264 See :hg:`help templates` for more about pre-packaged styles and
5264 specifying custom templates.
5265 specifying custom templates.
5265
5266
5266 Returns 0 on success.
5267 Returns 0 on success.
5267
5268
5268 """
5269 """
5269 if opts.get('follow') and opts.get('rev'):
5270 if opts.get('follow') and opts.get('rev'):
5270 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
5271 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
5271 del opts['follow']
5272 del opts['follow']
5272
5273
5273 if opts.get('graph'):
5274 if opts.get('graph'):
5274 return cmdutil.graphlog(ui, repo, *pats, **opts)
5275 return cmdutil.graphlog(ui, repo, *pats, **opts)
5275
5276
5276 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
5277 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
5277 limit = cmdutil.loglimit(opts)
5278 limit = cmdutil.loglimit(opts)
5278 count = 0
5279 count = 0
5279
5280
5280 getrenamed = None
5281 getrenamed = None
5281 if opts.get('copies'):
5282 if opts.get('copies'):
5282 endrev = None
5283 endrev = None
5283 if opts.get('rev'):
5284 if opts.get('rev'):
5284 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
5285 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
5285 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
5286 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
5286
5287
5287 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5288 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5288 for rev in revs:
5289 for rev in revs:
5289 if count == limit:
5290 if count == limit:
5290 break
5291 break
5291 ctx = repo[rev]
5292 ctx = repo[rev]
5292 copies = None
5293 copies = None
5293 if getrenamed is not None and rev:
5294 if getrenamed is not None and rev:
5294 copies = []
5295 copies = []
5295 for fn in ctx.files():
5296 for fn in ctx.files():
5296 rename = getrenamed(fn, rev)
5297 rename = getrenamed(fn, rev)
5297 if rename:
5298 if rename:
5298 copies.append((fn, rename[0]))
5299 copies.append((fn, rename[0]))
5299 if filematcher:
5300 if filematcher:
5300 revmatchfn = filematcher(ctx.rev())
5301 revmatchfn = filematcher(ctx.rev())
5301 else:
5302 else:
5302 revmatchfn = None
5303 revmatchfn = None
5303 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
5304 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
5304 if displayer.flush(ctx):
5305 if displayer.flush(ctx):
5305 count += 1
5306 count += 1
5306
5307
5307 displayer.close()
5308 displayer.close()
5308
5309
5309 @command('manifest',
5310 @command('manifest',
5310 [('r', 'rev', '', _('revision to display'), _('REV')),
5311 [('r', 'rev', '', _('revision to display'), _('REV')),
5311 ('', 'all', False, _("list files from all revisions"))]
5312 ('', 'all', False, _("list files from all revisions"))]
5312 + formatteropts,
5313 + formatteropts,
5313 _('[-r REV]'))
5314 _('[-r REV]'))
5314 def manifest(ui, repo, node=None, rev=None, **opts):
5315 def manifest(ui, repo, node=None, rev=None, **opts):
5315 """output the current or given revision of the project manifest
5316 """output the current or given revision of the project manifest
5316
5317
5317 Print a list of version controlled files for the given revision.
5318 Print a list of version controlled files for the given revision.
5318 If no revision is given, the first parent of the working directory
5319 If no revision is given, the first parent of the working directory
5319 is used, or the null revision if no revision is checked out.
5320 is used, or the null revision if no revision is checked out.
5320
5321
5321 With -v, print file permissions, symlink and executable bits.
5322 With -v, print file permissions, symlink and executable bits.
5322 With --debug, print file revision hashes.
5323 With --debug, print file revision hashes.
5323
5324
5324 If option --all is specified, the list of all files from all revisions
5325 If option --all is specified, the list of all files from all revisions
5325 is printed. This includes deleted and renamed files.
5326 is printed. This includes deleted and renamed files.
5326
5327
5327 Returns 0 on success.
5328 Returns 0 on success.
5328 """
5329 """
5329
5330
5330 fm = ui.formatter('manifest', opts)
5331 fm = ui.formatter('manifest', opts)
5331
5332
5332 if opts.get('all'):
5333 if opts.get('all'):
5333 if rev or node:
5334 if rev or node:
5334 raise error.Abort(_("can't specify a revision with --all"))
5335 raise error.Abort(_("can't specify a revision with --all"))
5335
5336
5336 res = []
5337 res = []
5337 prefix = "data/"
5338 prefix = "data/"
5338 suffix = ".i"
5339 suffix = ".i"
5339 plen = len(prefix)
5340 plen = len(prefix)
5340 slen = len(suffix)
5341 slen = len(suffix)
5341 with repo.lock():
5342 with repo.lock():
5342 for fn, b, size in repo.store.datafiles():
5343 for fn, b, size in repo.store.datafiles():
5343 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
5344 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
5344 res.append(fn[plen:-slen])
5345 res.append(fn[plen:-slen])
5345 for f in res:
5346 for f in res:
5346 fm.startitem()
5347 fm.startitem()
5347 fm.write("path", '%s\n', f)
5348 fm.write("path", '%s\n', f)
5348 fm.end()
5349 fm.end()
5349 return
5350 return
5350
5351
5351 if rev and node:
5352 if rev and node:
5352 raise error.Abort(_("please specify just one revision"))
5353 raise error.Abort(_("please specify just one revision"))
5353
5354
5354 if not node:
5355 if not node:
5355 node = rev
5356 node = rev
5356
5357
5357 char = {'l': '@', 'x': '*', '': ''}
5358 char = {'l': '@', 'x': '*', '': ''}
5358 mode = {'l': '644', 'x': '755', '': '644'}
5359 mode = {'l': '644', 'x': '755', '': '644'}
5359 ctx = scmutil.revsingle(repo, node)
5360 ctx = scmutil.revsingle(repo, node)
5360 mf = ctx.manifest()
5361 mf = ctx.manifest()
5361 for f in ctx:
5362 for f in ctx:
5362 fm.startitem()
5363 fm.startitem()
5363 fl = ctx[f].flags()
5364 fl = ctx[f].flags()
5364 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
5365 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
5365 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
5366 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
5366 fm.write('path', '%s\n', f)
5367 fm.write('path', '%s\n', f)
5367 fm.end()
5368 fm.end()
5368
5369
5369 @command('^merge',
5370 @command('^merge',
5370 [('f', 'force', None,
5371 [('f', 'force', None,
5371 _('force a merge including outstanding changes (DEPRECATED)')),
5372 _('force a merge including outstanding changes (DEPRECATED)')),
5372 ('r', 'rev', '', _('revision to merge'), _('REV')),
5373 ('r', 'rev', '', _('revision to merge'), _('REV')),
5373 ('P', 'preview', None,
5374 ('P', 'preview', None,
5374 _('review revisions to merge (no merge is performed)'))
5375 _('review revisions to merge (no merge is performed)'))
5375 ] + mergetoolopts,
5376 ] + mergetoolopts,
5376 _('[-P] [[-r] REV]'))
5377 _('[-P] [[-r] REV]'))
5377 def merge(ui, repo, node=None, **opts):
5378 def merge(ui, repo, node=None, **opts):
5378 """merge another revision into working directory
5379 """merge another revision into working directory
5379
5380
5380 The current working directory is updated with all changes made in
5381 The current working directory is updated with all changes made in
5381 the requested revision since the last common predecessor revision.
5382 the requested revision since the last common predecessor revision.
5382
5383
5383 Files that changed between either parent are marked as changed for
5384 Files that changed between either parent are marked as changed for
5384 the next commit and a commit must be performed before any further
5385 the next commit and a commit must be performed before any further
5385 updates to the repository are allowed. The next commit will have
5386 updates to the repository are allowed. The next commit will have
5386 two parents.
5387 two parents.
5387
5388
5388 ``--tool`` can be used to specify the merge tool used for file
5389 ``--tool`` can be used to specify the merge tool used for file
5389 merges. It overrides the HGMERGE environment variable and your
5390 merges. It overrides the HGMERGE environment variable and your
5390 configuration files. See :hg:`help merge-tools` for options.
5391 configuration files. See :hg:`help merge-tools` for options.
5391
5392
5392 If no revision is specified, the working directory's parent is a
5393 If no revision is specified, the working directory's parent is a
5393 head revision, and the current branch contains exactly one other
5394 head revision, and the current branch contains exactly one other
5394 head, the other head is merged with by default. Otherwise, an
5395 head, the other head is merged with by default. Otherwise, an
5395 explicit revision with which to merge with must be provided.
5396 explicit revision with which to merge with must be provided.
5396
5397
5397 See :hg:`help resolve` for information on handling file conflicts.
5398 See :hg:`help resolve` for information on handling file conflicts.
5398
5399
5399 To undo an uncommitted merge, use :hg:`update --clean .` which
5400 To undo an uncommitted merge, use :hg:`update --clean .` which
5400 will check out a clean copy of the original merge parent, losing
5401 will check out a clean copy of the original merge parent, losing
5401 all changes.
5402 all changes.
5402
5403
5403 Returns 0 on success, 1 if there are unresolved files.
5404 Returns 0 on success, 1 if there are unresolved files.
5404 """
5405 """
5405
5406
5406 if opts.get('rev') and node:
5407 if opts.get('rev') and node:
5407 raise error.Abort(_("please specify just one revision"))
5408 raise error.Abort(_("please specify just one revision"))
5408 if not node:
5409 if not node:
5409 node = opts.get('rev')
5410 node = opts.get('rev')
5410
5411
5411 if node:
5412 if node:
5412 node = scmutil.revsingle(repo, node).node()
5413 node = scmutil.revsingle(repo, node).node()
5413
5414
5414 if not node:
5415 if not node:
5415 node = repo[destutil.destmerge(repo)].node()
5416 node = repo[destutil.destmerge(repo)].node()
5416
5417
5417 if opts.get('preview'):
5418 if opts.get('preview'):
5418 # find nodes that are ancestors of p2 but not of p1
5419 # find nodes that are ancestors of p2 but not of p1
5419 p1 = repo.lookup('.')
5420 p1 = repo.lookup('.')
5420 p2 = repo.lookup(node)
5421 p2 = repo.lookup(node)
5421 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
5422 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
5422
5423
5423 displayer = cmdutil.show_changeset(ui, repo, opts)
5424 displayer = cmdutil.show_changeset(ui, repo, opts)
5424 for node in nodes:
5425 for node in nodes:
5425 displayer.show(repo[node])
5426 displayer.show(repo[node])
5426 displayer.close()
5427 displayer.close()
5427 return 0
5428 return 0
5428
5429
5429 try:
5430 try:
5430 # ui.forcemerge is an internal variable, do not document
5431 # ui.forcemerge is an internal variable, do not document
5431 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
5432 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
5432 force = opts.get('force')
5433 force = opts.get('force')
5433 return hg.merge(repo, node, force=force, mergeforce=force)
5434 return hg.merge(repo, node, force=force, mergeforce=force)
5434 finally:
5435 finally:
5435 ui.setconfig('ui', 'forcemerge', '', 'merge')
5436 ui.setconfig('ui', 'forcemerge', '', 'merge')
5436
5437
5437 @command('outgoing|out',
5438 @command('outgoing|out',
5438 [('f', 'force', None, _('run even when the destination is unrelated')),
5439 [('f', 'force', None, _('run even when the destination is unrelated')),
5439 ('r', 'rev', [],
5440 ('r', 'rev', [],
5440 _('a changeset intended to be included in the destination'), _('REV')),
5441 _('a changeset intended to be included in the destination'), _('REV')),
5441 ('n', 'newest-first', None, _('show newest record first')),
5442 ('n', 'newest-first', None, _('show newest record first')),
5442 ('B', 'bookmarks', False, _('compare bookmarks')),
5443 ('B', 'bookmarks', False, _('compare bookmarks')),
5443 ('b', 'branch', [], _('a specific branch you would like to push'),
5444 ('b', 'branch', [], _('a specific branch you would like to push'),
5444 _('BRANCH')),
5445 _('BRANCH')),
5445 ] + logopts + remoteopts + subrepoopts,
5446 ] + logopts + remoteopts + subrepoopts,
5446 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
5447 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
5447 def outgoing(ui, repo, dest=None, **opts):
5448 def outgoing(ui, repo, dest=None, **opts):
5448 """show changesets not found in the destination
5449 """show changesets not found in the destination
5449
5450
5450 Show changesets not found in the specified destination repository
5451 Show changesets not found in the specified destination repository
5451 or the default push location. These are the changesets that would
5452 or the default push location. These are the changesets that would
5452 be pushed if a push was requested.
5453 be pushed if a push was requested.
5453
5454
5454 See pull for details of valid destination formats.
5455 See pull for details of valid destination formats.
5455
5456
5456 .. container:: verbose
5457 .. container:: verbose
5457
5458
5458 With -B/--bookmarks, the result of bookmark comparison between
5459 With -B/--bookmarks, the result of bookmark comparison between
5459 local and remote repositories is displayed. With -v/--verbose,
5460 local and remote repositories is displayed. With -v/--verbose,
5460 status is also displayed for each bookmark like below::
5461 status is also displayed for each bookmark like below::
5461
5462
5462 BM1 01234567890a added
5463 BM1 01234567890a added
5463 BM2 deleted
5464 BM2 deleted
5464 BM3 234567890abc advanced
5465 BM3 234567890abc advanced
5465 BM4 34567890abcd diverged
5466 BM4 34567890abcd diverged
5466 BM5 4567890abcde changed
5467 BM5 4567890abcde changed
5467
5468
5468 The action taken when pushing depends on the
5469 The action taken when pushing depends on the
5469 status of each bookmark:
5470 status of each bookmark:
5470
5471
5471 :``added``: push with ``-B`` will create it
5472 :``added``: push with ``-B`` will create it
5472 :``deleted``: push with ``-B`` will delete it
5473 :``deleted``: push with ``-B`` will delete it
5473 :``advanced``: push will update it
5474 :``advanced``: push will update it
5474 :``diverged``: push with ``-B`` will update it
5475 :``diverged``: push with ``-B`` will update it
5475 :``changed``: push with ``-B`` will update it
5476 :``changed``: push with ``-B`` will update it
5476
5477
5477 From the point of view of pushing behavior, bookmarks
5478 From the point of view of pushing behavior, bookmarks
5478 existing only in the remote repository are treated as
5479 existing only in the remote repository are treated as
5479 ``deleted``, even if it is in fact added remotely.
5480 ``deleted``, even if it is in fact added remotely.
5480
5481
5481 Returns 0 if there are outgoing changes, 1 otherwise.
5482 Returns 0 if there are outgoing changes, 1 otherwise.
5482 """
5483 """
5483 if opts.get('graph'):
5484 if opts.get('graph'):
5484 cmdutil.checkunsupportedgraphflags([], opts)
5485 cmdutil.checkunsupportedgraphflags([], opts)
5485 o, other = hg._outgoing(ui, repo, dest, opts)
5486 o, other = hg._outgoing(ui, repo, dest, opts)
5486 if not o:
5487 if not o:
5487 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5488 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5488 return
5489 return
5489
5490
5490 revdag = cmdutil.graphrevs(repo, o, opts)
5491 revdag = cmdutil.graphrevs(repo, o, opts)
5491 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5492 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5492 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
5493 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
5493 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5494 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5494 return 0
5495 return 0
5495
5496
5496 if opts.get('bookmarks'):
5497 if opts.get('bookmarks'):
5497 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5498 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5498 dest, branches = hg.parseurl(dest, opts.get('branch'))
5499 dest, branches = hg.parseurl(dest, opts.get('branch'))
5499 other = hg.peer(repo, opts, dest)
5500 other = hg.peer(repo, opts, dest)
5500 if 'bookmarks' not in other.listkeys('namespaces'):
5501 if 'bookmarks' not in other.listkeys('namespaces'):
5501 ui.warn(_("remote doesn't support bookmarks\n"))
5502 ui.warn(_("remote doesn't support bookmarks\n"))
5502 return 0
5503 return 0
5503 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
5504 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
5504 return bookmarks.outgoing(ui, repo, other)
5505 return bookmarks.outgoing(ui, repo, other)
5505
5506
5506 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
5507 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
5507 try:
5508 try:
5508 return hg.outgoing(ui, repo, dest, opts)
5509 return hg.outgoing(ui, repo, dest, opts)
5509 finally:
5510 finally:
5510 del repo._subtoppath
5511 del repo._subtoppath
5511
5512
5512 @command('parents',
5513 @command('parents',
5513 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
5514 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
5514 ] + templateopts,
5515 ] + templateopts,
5515 _('[-r REV] [FILE]'),
5516 _('[-r REV] [FILE]'),
5516 inferrepo=True)
5517 inferrepo=True)
5517 def parents(ui, repo, file_=None, **opts):
5518 def parents(ui, repo, file_=None, **opts):
5518 """show the parents of the working directory or revision (DEPRECATED)
5519 """show the parents of the working directory or revision (DEPRECATED)
5519
5520
5520 Print the working directory's parent revisions. If a revision is
5521 Print the working directory's parent revisions. If a revision is
5521 given via -r/--rev, the parent of that revision will be printed.
5522 given via -r/--rev, the parent of that revision will be printed.
5522 If a file argument is given, the revision in which the file was
5523 If a file argument is given, the revision in which the file was
5523 last changed (before the working directory revision or the
5524 last changed (before the working directory revision or the
5524 argument to --rev if given) is printed.
5525 argument to --rev if given) is printed.
5525
5526
5526 This command is equivalent to::
5527 This command is equivalent to::
5527
5528
5528 hg log -r "p1()+p2()" or
5529 hg log -r "p1()+p2()" or
5529 hg log -r "p1(REV)+p2(REV)" or
5530 hg log -r "p1(REV)+p2(REV)" or
5530 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5531 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5531 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5532 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5532
5533
5533 See :hg:`summary` and :hg:`help revsets` for related information.
5534 See :hg:`summary` and :hg:`help revsets` for related information.
5534
5535
5535 Returns 0 on success.
5536 Returns 0 on success.
5536 """
5537 """
5537
5538
5538 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
5539 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
5539
5540
5540 if file_:
5541 if file_:
5541 m = scmutil.match(ctx, (file_,), opts)
5542 m = scmutil.match(ctx, (file_,), opts)
5542 if m.anypats() or len(m.files()) != 1:
5543 if m.anypats() or len(m.files()) != 1:
5543 raise error.Abort(_('can only specify an explicit filename'))
5544 raise error.Abort(_('can only specify an explicit filename'))
5544 file_ = m.files()[0]
5545 file_ = m.files()[0]
5545 filenodes = []
5546 filenodes = []
5546 for cp in ctx.parents():
5547 for cp in ctx.parents():
5547 if not cp:
5548 if not cp:
5548 continue
5549 continue
5549 try:
5550 try:
5550 filenodes.append(cp.filenode(file_))
5551 filenodes.append(cp.filenode(file_))
5551 except error.LookupError:
5552 except error.LookupError:
5552 pass
5553 pass
5553 if not filenodes:
5554 if not filenodes:
5554 raise error.Abort(_("'%s' not found in manifest!") % file_)
5555 raise error.Abort(_("'%s' not found in manifest!") % file_)
5555 p = []
5556 p = []
5556 for fn in filenodes:
5557 for fn in filenodes:
5557 fctx = repo.filectx(file_, fileid=fn)
5558 fctx = repo.filectx(file_, fileid=fn)
5558 p.append(fctx.node())
5559 p.append(fctx.node())
5559 else:
5560 else:
5560 p = [cp.node() for cp in ctx.parents()]
5561 p = [cp.node() for cp in ctx.parents()]
5561
5562
5562 displayer = cmdutil.show_changeset(ui, repo, opts)
5563 displayer = cmdutil.show_changeset(ui, repo, opts)
5563 for n in p:
5564 for n in p:
5564 if n != nullid:
5565 if n != nullid:
5565 displayer.show(repo[n])
5566 displayer.show(repo[n])
5566 displayer.close()
5567 displayer.close()
5567
5568
5568 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
5569 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
5569 def paths(ui, repo, search=None, **opts):
5570 def paths(ui, repo, search=None, **opts):
5570 """show aliases for remote repositories
5571 """show aliases for remote repositories
5571
5572
5572 Show definition of symbolic path name NAME. If no name is given,
5573 Show definition of symbolic path name NAME. If no name is given,
5573 show definition of all available names.
5574 show definition of all available names.
5574
5575
5575 Option -q/--quiet suppresses all output when searching for NAME
5576 Option -q/--quiet suppresses all output when searching for NAME
5576 and shows only the path names when listing all definitions.
5577 and shows only the path names when listing all definitions.
5577
5578
5578 Path names are defined in the [paths] section of your
5579 Path names are defined in the [paths] section of your
5579 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5580 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5580 repository, ``.hg/hgrc`` is used, too.
5581 repository, ``.hg/hgrc`` is used, too.
5581
5582
5582 The path names ``default`` and ``default-push`` have a special
5583 The path names ``default`` and ``default-push`` have a special
5583 meaning. When performing a push or pull operation, they are used
5584 meaning. When performing a push or pull operation, they are used
5584 as fallbacks if no location is specified on the command-line.
5585 as fallbacks if no location is specified on the command-line.
5585 When ``default-push`` is set, it will be used for push and
5586 When ``default-push`` is set, it will be used for push and
5586 ``default`` will be used for pull; otherwise ``default`` is used
5587 ``default`` will be used for pull; otherwise ``default`` is used
5587 as the fallback for both. When cloning a repository, the clone
5588 as the fallback for both. When cloning a repository, the clone
5588 source is written as ``default`` in ``.hg/hgrc``.
5589 source is written as ``default`` in ``.hg/hgrc``.
5589
5590
5590 .. note::
5591 .. note::
5591
5592
5592 ``default`` and ``default-push`` apply to all inbound (e.g.
5593 ``default`` and ``default-push`` apply to all inbound (e.g.
5593 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5594 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5594 and :hg:`bundle`) operations.
5595 and :hg:`bundle`) operations.
5595
5596
5596 See :hg:`help urls` for more information.
5597 See :hg:`help urls` for more information.
5597
5598
5598 Returns 0 on success.
5599 Returns 0 on success.
5599 """
5600 """
5600 if search:
5601 if search:
5601 pathitems = [(name, path) for name, path in ui.paths.iteritems()
5602 pathitems = [(name, path) for name, path in ui.paths.iteritems()
5602 if name == search]
5603 if name == search]
5603 else:
5604 else:
5604 pathitems = sorted(ui.paths.iteritems())
5605 pathitems = sorted(ui.paths.iteritems())
5605
5606
5606 fm = ui.formatter('paths', opts)
5607 fm = ui.formatter('paths', opts)
5607 if fm:
5608 if fm:
5608 hidepassword = str
5609 hidepassword = str
5609 else:
5610 else:
5610 hidepassword = util.hidepassword
5611 hidepassword = util.hidepassword
5611 if ui.quiet:
5612 if ui.quiet:
5612 namefmt = '%s\n'
5613 namefmt = '%s\n'
5613 else:
5614 else:
5614 namefmt = '%s = '
5615 namefmt = '%s = '
5615 showsubopts = not search and not ui.quiet
5616 showsubopts = not search and not ui.quiet
5616
5617
5617 for name, path in pathitems:
5618 for name, path in pathitems:
5618 fm.startitem()
5619 fm.startitem()
5619 fm.condwrite(not search, 'name', namefmt, name)
5620 fm.condwrite(not search, 'name', namefmt, name)
5620 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
5621 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
5621 for subopt, value in sorted(path.suboptions.items()):
5622 for subopt, value in sorted(path.suboptions.items()):
5622 assert subopt not in ('name', 'url')
5623 assert subopt not in ('name', 'url')
5623 if showsubopts:
5624 if showsubopts:
5624 fm.plain('%s:%s = ' % (name, subopt))
5625 fm.plain('%s:%s = ' % (name, subopt))
5625 fm.condwrite(showsubopts, subopt, '%s\n', value)
5626 fm.condwrite(showsubopts, subopt, '%s\n', value)
5626
5627
5627 fm.end()
5628 fm.end()
5628
5629
5629 if search and not pathitems:
5630 if search and not pathitems:
5630 if not ui.quiet:
5631 if not ui.quiet:
5631 ui.warn(_("not found!\n"))
5632 ui.warn(_("not found!\n"))
5632 return 1
5633 return 1
5633 else:
5634 else:
5634 return 0
5635 return 0
5635
5636
5636 @command('phase',
5637 @command('phase',
5637 [('p', 'public', False, _('set changeset phase to public')),
5638 [('p', 'public', False, _('set changeset phase to public')),
5638 ('d', 'draft', False, _('set changeset phase to draft')),
5639 ('d', 'draft', False, _('set changeset phase to draft')),
5639 ('s', 'secret', False, _('set changeset phase to secret')),
5640 ('s', 'secret', False, _('set changeset phase to secret')),
5640 ('f', 'force', False, _('allow to move boundary backward')),
5641 ('f', 'force', False, _('allow to move boundary backward')),
5641 ('r', 'rev', [], _('target revision'), _('REV')),
5642 ('r', 'rev', [], _('target revision'), _('REV')),
5642 ],
5643 ],
5643 _('[-p|-d|-s] [-f] [-r] [REV...]'))
5644 _('[-p|-d|-s] [-f] [-r] [REV...]'))
5644 def phase(ui, repo, *revs, **opts):
5645 def phase(ui, repo, *revs, **opts):
5645 """set or show the current phase name
5646 """set or show the current phase name
5646
5647
5647 With no argument, show the phase name of the current revision(s).
5648 With no argument, show the phase name of the current revision(s).
5648
5649
5649 With one of -p/--public, -d/--draft or -s/--secret, change the
5650 With one of -p/--public, -d/--draft or -s/--secret, change the
5650 phase value of the specified revisions.
5651 phase value of the specified revisions.
5651
5652
5652 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5653 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5653 lower phase to an higher phase. Phases are ordered as follows::
5654 lower phase to an higher phase. Phases are ordered as follows::
5654
5655
5655 public < draft < secret
5656 public < draft < secret
5656
5657
5657 Returns 0 on success, 1 if some phases could not be changed.
5658 Returns 0 on success, 1 if some phases could not be changed.
5658
5659
5659 (For more information about the phases concept, see :hg:`help phases`.)
5660 (For more information about the phases concept, see :hg:`help phases`.)
5660 """
5661 """
5661 # search for a unique phase argument
5662 # search for a unique phase argument
5662 targetphase = None
5663 targetphase = None
5663 for idx, name in enumerate(phases.phasenames):
5664 for idx, name in enumerate(phases.phasenames):
5664 if opts[name]:
5665 if opts[name]:
5665 if targetphase is not None:
5666 if targetphase is not None:
5666 raise error.Abort(_('only one phase can be specified'))
5667 raise error.Abort(_('only one phase can be specified'))
5667 targetphase = idx
5668 targetphase = idx
5668
5669
5669 # look for specified revision
5670 # look for specified revision
5670 revs = list(revs)
5671 revs = list(revs)
5671 revs.extend(opts['rev'])
5672 revs.extend(opts['rev'])
5672 if not revs:
5673 if not revs:
5673 # display both parents as the second parent phase can influence
5674 # display both parents as the second parent phase can influence
5674 # the phase of a merge commit
5675 # the phase of a merge commit
5675 revs = [c.rev() for c in repo[None].parents()]
5676 revs = [c.rev() for c in repo[None].parents()]
5676
5677
5677 revs = scmutil.revrange(repo, revs)
5678 revs = scmutil.revrange(repo, revs)
5678
5679
5679 lock = None
5680 lock = None
5680 ret = 0
5681 ret = 0
5681 if targetphase is None:
5682 if targetphase is None:
5682 # display
5683 # display
5683 for r in revs:
5684 for r in revs:
5684 ctx = repo[r]
5685 ctx = repo[r]
5685 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5686 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5686 else:
5687 else:
5687 tr = None
5688 tr = None
5688 lock = repo.lock()
5689 lock = repo.lock()
5689 try:
5690 try:
5690 tr = repo.transaction("phase")
5691 tr = repo.transaction("phase")
5691 # set phase
5692 # set phase
5692 if not revs:
5693 if not revs:
5693 raise error.Abort(_('empty revision set'))
5694 raise error.Abort(_('empty revision set'))
5694 nodes = [repo[r].node() for r in revs]
5695 nodes = [repo[r].node() for r in revs]
5695 # moving revision from public to draft may hide them
5696 # moving revision from public to draft may hide them
5696 # We have to check result on an unfiltered repository
5697 # We have to check result on an unfiltered repository
5697 unfi = repo.unfiltered()
5698 unfi = repo.unfiltered()
5698 getphase = unfi._phasecache.phase
5699 getphase = unfi._phasecache.phase
5699 olddata = [getphase(unfi, r) for r in unfi]
5700 olddata = [getphase(unfi, r) for r in unfi]
5700 phases.advanceboundary(repo, tr, targetphase, nodes)
5701 phases.advanceboundary(repo, tr, targetphase, nodes)
5701 if opts['force']:
5702 if opts['force']:
5702 phases.retractboundary(repo, tr, targetphase, nodes)
5703 phases.retractboundary(repo, tr, targetphase, nodes)
5703 tr.close()
5704 tr.close()
5704 finally:
5705 finally:
5705 if tr is not None:
5706 if tr is not None:
5706 tr.release()
5707 tr.release()
5707 lock.release()
5708 lock.release()
5708 getphase = unfi._phasecache.phase
5709 getphase = unfi._phasecache.phase
5709 newdata = [getphase(unfi, r) for r in unfi]
5710 newdata = [getphase(unfi, r) for r in unfi]
5710 changes = sum(newdata[r] != olddata[r] for r in unfi)
5711 changes = sum(newdata[r] != olddata[r] for r in unfi)
5711 cl = unfi.changelog
5712 cl = unfi.changelog
5712 rejected = [n for n in nodes
5713 rejected = [n for n in nodes
5713 if newdata[cl.rev(n)] < targetphase]
5714 if newdata[cl.rev(n)] < targetphase]
5714 if rejected:
5715 if rejected:
5715 ui.warn(_('cannot move %i changesets to a higher '
5716 ui.warn(_('cannot move %i changesets to a higher '
5716 'phase, use --force\n') % len(rejected))
5717 'phase, use --force\n') % len(rejected))
5717 ret = 1
5718 ret = 1
5718 if changes:
5719 if changes:
5719 msg = _('phase changed for %i changesets\n') % changes
5720 msg = _('phase changed for %i changesets\n') % changes
5720 if ret:
5721 if ret:
5721 ui.status(msg)
5722 ui.status(msg)
5722 else:
5723 else:
5723 ui.note(msg)
5724 ui.note(msg)
5724 else:
5725 else:
5725 ui.warn(_('no phases changed\n'))
5726 ui.warn(_('no phases changed\n'))
5726 return ret
5727 return ret
5727
5728
5728 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5729 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5729 """Run after a changegroup has been added via pull/unbundle
5730 """Run after a changegroup has been added via pull/unbundle
5730
5731
5731 This takes arguments below:
5732 This takes arguments below:
5732
5733
5733 :modheads: change of heads by pull/unbundle
5734 :modheads: change of heads by pull/unbundle
5734 :optupdate: updating working directory is needed or not
5735 :optupdate: updating working directory is needed or not
5735 :checkout: update destination revision (or None to default destination)
5736 :checkout: update destination revision (or None to default destination)
5736 :brev: a name, which might be a bookmark to be activated after updating
5737 :brev: a name, which might be a bookmark to be activated after updating
5737 """
5738 """
5738 if modheads == 0:
5739 if modheads == 0:
5739 return
5740 return
5740 if optupdate:
5741 if optupdate:
5741 try:
5742 try:
5742 return hg.updatetotally(ui, repo, checkout, brev)
5743 return hg.updatetotally(ui, repo, checkout, brev)
5743 except error.UpdateAbort as inst:
5744 except error.UpdateAbort as inst:
5744 msg = _("not updating: %s") % str(inst)
5745 msg = _("not updating: %s") % str(inst)
5745 hint = inst.hint
5746 hint = inst.hint
5746 raise error.UpdateAbort(msg, hint=hint)
5747 raise error.UpdateAbort(msg, hint=hint)
5747 if modheads > 1:
5748 if modheads > 1:
5748 currentbranchheads = len(repo.branchheads())
5749 currentbranchheads = len(repo.branchheads())
5749 if currentbranchheads == modheads:
5750 if currentbranchheads == modheads:
5750 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5751 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5751 elif currentbranchheads > 1:
5752 elif currentbranchheads > 1:
5752 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5753 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5753 "merge)\n"))
5754 "merge)\n"))
5754 else:
5755 else:
5755 ui.status(_("(run 'hg heads' to see heads)\n"))
5756 ui.status(_("(run 'hg heads' to see heads)\n"))
5756 else:
5757 else:
5757 ui.status(_("(run 'hg update' to get a working copy)\n"))
5758 ui.status(_("(run 'hg update' to get a working copy)\n"))
5758
5759
5759 @command('^pull',
5760 @command('^pull',
5760 [('u', 'update', None,
5761 [('u', 'update', None,
5761 _('update to new branch head if changesets were pulled')),
5762 _('update to new branch head if changesets were pulled')),
5762 ('f', 'force', None, _('run even when remote repository is unrelated')),
5763 ('f', 'force', None, _('run even when remote repository is unrelated')),
5763 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5764 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5764 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5765 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5765 ('b', 'branch', [], _('a specific branch you would like to pull'),
5766 ('b', 'branch', [], _('a specific branch you would like to pull'),
5766 _('BRANCH')),
5767 _('BRANCH')),
5767 ] + remoteopts,
5768 ] + remoteopts,
5768 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5769 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5769 def pull(ui, repo, source="default", **opts):
5770 def pull(ui, repo, source="default", **opts):
5770 """pull changes from the specified source
5771 """pull changes from the specified source
5771
5772
5772 Pull changes from a remote repository to a local one.
5773 Pull changes from a remote repository to a local one.
5773
5774
5774 This finds all changes from the repository at the specified path
5775 This finds all changes from the repository at the specified path
5775 or URL and adds them to a local repository (the current one unless
5776 or URL and adds them to a local repository (the current one unless
5776 -R is specified). By default, this does not update the copy of the
5777 -R is specified). By default, this does not update the copy of the
5777 project in the working directory.
5778 project in the working directory.
5778
5779
5779 Use :hg:`incoming` if you want to see what would have been added
5780 Use :hg:`incoming` if you want to see what would have been added
5780 by a pull at the time you issued this command. If you then decide
5781 by a pull at the time you issued this command. If you then decide
5781 to add those changes to the repository, you should use :hg:`pull
5782 to add those changes to the repository, you should use :hg:`pull
5782 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5783 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5783
5784
5784 If SOURCE is omitted, the 'default' path will be used.
5785 If SOURCE is omitted, the 'default' path will be used.
5785 See :hg:`help urls` for more information.
5786 See :hg:`help urls` for more information.
5786
5787
5787 Specifying bookmark as ``.`` is equivalent to specifying the active
5788 Specifying bookmark as ``.`` is equivalent to specifying the active
5788 bookmark's name.
5789 bookmark's name.
5789
5790
5790 Returns 0 on success, 1 if an update had unresolved files.
5791 Returns 0 on success, 1 if an update had unresolved files.
5791 """
5792 """
5792 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5793 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5793 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5794 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5794 other = hg.peer(repo, opts, source)
5795 other = hg.peer(repo, opts, source)
5795 try:
5796 try:
5796 revs, checkout = hg.addbranchrevs(repo, other, branches,
5797 revs, checkout = hg.addbranchrevs(repo, other, branches,
5797 opts.get('rev'))
5798 opts.get('rev'))
5798
5799
5799
5800
5800 pullopargs = {}
5801 pullopargs = {}
5801 if opts.get('bookmark'):
5802 if opts.get('bookmark'):
5802 if not revs:
5803 if not revs:
5803 revs = []
5804 revs = []
5804 # The list of bookmark used here is not the one used to actually
5805 # The list of bookmark used here is not the one used to actually
5805 # update the bookmark name. This can result in the revision pulled
5806 # update the bookmark name. This can result in the revision pulled
5806 # not ending up with the name of the bookmark because of a race
5807 # not ending up with the name of the bookmark because of a race
5807 # condition on the server. (See issue 4689 for details)
5808 # condition on the server. (See issue 4689 for details)
5808 remotebookmarks = other.listkeys('bookmarks')
5809 remotebookmarks = other.listkeys('bookmarks')
5809 pullopargs['remotebookmarks'] = remotebookmarks
5810 pullopargs['remotebookmarks'] = remotebookmarks
5810 for b in opts['bookmark']:
5811 for b in opts['bookmark']:
5811 b = repo._bookmarks.expandname(b)
5812 b = repo._bookmarks.expandname(b)
5812 if b not in remotebookmarks:
5813 if b not in remotebookmarks:
5813 raise error.Abort(_('remote bookmark %s not found!') % b)
5814 raise error.Abort(_('remote bookmark %s not found!') % b)
5814 revs.append(remotebookmarks[b])
5815 revs.append(remotebookmarks[b])
5815
5816
5816 if revs:
5817 if revs:
5817 try:
5818 try:
5818 # When 'rev' is a bookmark name, we cannot guarantee that it
5819 # When 'rev' is a bookmark name, we cannot guarantee that it
5819 # will be updated with that name because of a race condition
5820 # will be updated with that name because of a race condition
5820 # server side. (See issue 4689 for details)
5821 # server side. (See issue 4689 for details)
5821 oldrevs = revs
5822 oldrevs = revs
5822 revs = [] # actually, nodes
5823 revs = [] # actually, nodes
5823 for r in oldrevs:
5824 for r in oldrevs:
5824 node = other.lookup(r)
5825 node = other.lookup(r)
5825 revs.append(node)
5826 revs.append(node)
5826 if r == checkout:
5827 if r == checkout:
5827 checkout = node
5828 checkout = node
5828 except error.CapabilityError:
5829 except error.CapabilityError:
5829 err = _("other repository doesn't support revision lookup, "
5830 err = _("other repository doesn't support revision lookup, "
5830 "so a rev cannot be specified.")
5831 "so a rev cannot be specified.")
5831 raise error.Abort(err)
5832 raise error.Abort(err)
5832
5833
5833 pullopargs.update(opts.get('opargs', {}))
5834 pullopargs.update(opts.get('opargs', {}))
5834 modheads = exchange.pull(repo, other, heads=revs,
5835 modheads = exchange.pull(repo, other, heads=revs,
5835 force=opts.get('force'),
5836 force=opts.get('force'),
5836 bookmarks=opts.get('bookmark', ()),
5837 bookmarks=opts.get('bookmark', ()),
5837 opargs=pullopargs).cgresult
5838 opargs=pullopargs).cgresult
5838
5839
5839 # brev is a name, which might be a bookmark to be activated at
5840 # brev is a name, which might be a bookmark to be activated at
5840 # the end of the update. In other words, it is an explicit
5841 # the end of the update. In other words, it is an explicit
5841 # destination of the update
5842 # destination of the update
5842 brev = None
5843 brev = None
5843
5844
5844 if checkout:
5845 if checkout:
5845 checkout = str(repo.changelog.rev(checkout))
5846 checkout = str(repo.changelog.rev(checkout))
5846
5847
5847 # order below depends on implementation of
5848 # order below depends on implementation of
5848 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5849 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5849 # because 'checkout' is determined without it.
5850 # because 'checkout' is determined without it.
5850 if opts.get('rev'):
5851 if opts.get('rev'):
5851 brev = opts['rev'][0]
5852 brev = opts['rev'][0]
5852 elif opts.get('branch'):
5853 elif opts.get('branch'):
5853 brev = opts['branch'][0]
5854 brev = opts['branch'][0]
5854 else:
5855 else:
5855 brev = branches[0]
5856 brev = branches[0]
5856 repo._subtoppath = source
5857 repo._subtoppath = source
5857 try:
5858 try:
5858 ret = postincoming(ui, repo, modheads, opts.get('update'),
5859 ret = postincoming(ui, repo, modheads, opts.get('update'),
5859 checkout, brev)
5860 checkout, brev)
5860
5861
5861 finally:
5862 finally:
5862 del repo._subtoppath
5863 del repo._subtoppath
5863
5864
5864 finally:
5865 finally:
5865 other.close()
5866 other.close()
5866 return ret
5867 return ret
5867
5868
5868 @command('^push',
5869 @command('^push',
5869 [('f', 'force', None, _('force push')),
5870 [('f', 'force', None, _('force push')),
5870 ('r', 'rev', [],
5871 ('r', 'rev', [],
5871 _('a changeset intended to be included in the destination'),
5872 _('a changeset intended to be included in the destination'),
5872 _('REV')),
5873 _('REV')),
5873 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5874 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5874 ('b', 'branch', [],
5875 ('b', 'branch', [],
5875 _('a specific branch you would like to push'), _('BRANCH')),
5876 _('a specific branch you would like to push'), _('BRANCH')),
5876 ('', 'new-branch', False, _('allow pushing a new branch')),
5877 ('', 'new-branch', False, _('allow pushing a new branch')),
5877 ] + remoteopts,
5878 ] + remoteopts,
5878 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5879 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5879 def push(ui, repo, dest=None, **opts):
5880 def push(ui, repo, dest=None, **opts):
5880 """push changes to the specified destination
5881 """push changes to the specified destination
5881
5882
5882 Push changesets from the local repository to the specified
5883 Push changesets from the local repository to the specified
5883 destination.
5884 destination.
5884
5885
5885 This operation is symmetrical to pull: it is identical to a pull
5886 This operation is symmetrical to pull: it is identical to a pull
5886 in the destination repository from the current one.
5887 in the destination repository from the current one.
5887
5888
5888 By default, push will not allow creation of new heads at the
5889 By default, push will not allow creation of new heads at the
5889 destination, since multiple heads would make it unclear which head
5890 destination, since multiple heads would make it unclear which head
5890 to use. In this situation, it is recommended to pull and merge
5891 to use. In this situation, it is recommended to pull and merge
5891 before pushing.
5892 before pushing.
5892
5893
5893 Use --new-branch if you want to allow push to create a new named
5894 Use --new-branch if you want to allow push to create a new named
5894 branch that is not present at the destination. This allows you to
5895 branch that is not present at the destination. This allows you to
5895 only create a new branch without forcing other changes.
5896 only create a new branch without forcing other changes.
5896
5897
5897 .. note::
5898 .. note::
5898
5899
5899 Extra care should be taken with the -f/--force option,
5900 Extra care should be taken with the -f/--force option,
5900 which will push all new heads on all branches, an action which will
5901 which will push all new heads on all branches, an action which will
5901 almost always cause confusion for collaborators.
5902 almost always cause confusion for collaborators.
5902
5903
5903 If -r/--rev is used, the specified revision and all its ancestors
5904 If -r/--rev is used, the specified revision and all its ancestors
5904 will be pushed to the remote repository.
5905 will be pushed to the remote repository.
5905
5906
5906 If -B/--bookmark is used, the specified bookmarked revision, its
5907 If -B/--bookmark is used, the specified bookmarked revision, its
5907 ancestors, and the bookmark will be pushed to the remote
5908 ancestors, and the bookmark will be pushed to the remote
5908 repository. Specifying ``.`` is equivalent to specifying the active
5909 repository. Specifying ``.`` is equivalent to specifying the active
5909 bookmark's name.
5910 bookmark's name.
5910
5911
5911 Please see :hg:`help urls` for important details about ``ssh://``
5912 Please see :hg:`help urls` for important details about ``ssh://``
5912 URLs. If DESTINATION is omitted, a default path will be used.
5913 URLs. If DESTINATION is omitted, a default path will be used.
5913
5914
5914 Returns 0 if push was successful, 1 if nothing to push.
5915 Returns 0 if push was successful, 1 if nothing to push.
5915 """
5916 """
5916
5917
5917 if opts.get('bookmark'):
5918 if opts.get('bookmark'):
5918 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5919 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5919 for b in opts['bookmark']:
5920 for b in opts['bookmark']:
5920 # translate -B options to -r so changesets get pushed
5921 # translate -B options to -r so changesets get pushed
5921 b = repo._bookmarks.expandname(b)
5922 b = repo._bookmarks.expandname(b)
5922 if b in repo._bookmarks:
5923 if b in repo._bookmarks:
5923 opts.setdefault('rev', []).append(b)
5924 opts.setdefault('rev', []).append(b)
5924 else:
5925 else:
5925 # if we try to push a deleted bookmark, translate it to null
5926 # if we try to push a deleted bookmark, translate it to null
5926 # this lets simultaneous -r, -b options continue working
5927 # this lets simultaneous -r, -b options continue working
5927 opts.setdefault('rev', []).append("null")
5928 opts.setdefault('rev', []).append("null")
5928
5929
5929 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5930 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5930 if not path:
5931 if not path:
5931 raise error.Abort(_('default repository not configured!'),
5932 raise error.Abort(_('default repository not configured!'),
5932 hint=_('see the "path" section in "hg help config"'))
5933 hint=_('see the "path" section in "hg help config"'))
5933 dest = path.pushloc or path.loc
5934 dest = path.pushloc or path.loc
5934 branches = (path.branch, opts.get('branch') or [])
5935 branches = (path.branch, opts.get('branch') or [])
5935 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5936 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5936 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5937 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5937 other = hg.peer(repo, opts, dest)
5938 other = hg.peer(repo, opts, dest)
5938
5939
5939 if revs:
5940 if revs:
5940 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5941 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5941 if not revs:
5942 if not revs:
5942 raise error.Abort(_("specified revisions evaluate to an empty set"),
5943 raise error.Abort(_("specified revisions evaluate to an empty set"),
5943 hint=_("use different revision arguments"))
5944 hint=_("use different revision arguments"))
5944 elif path.pushrev:
5945 elif path.pushrev:
5945 # It doesn't make any sense to specify ancestor revisions. So limit
5946 # It doesn't make any sense to specify ancestor revisions. So limit
5946 # to DAG heads to make discovery simpler.
5947 # to DAG heads to make discovery simpler.
5947 expr = revset.formatspec('heads(%r)', path.pushrev)
5948 expr = revset.formatspec('heads(%r)', path.pushrev)
5948 revs = scmutil.revrange(repo, [expr])
5949 revs = scmutil.revrange(repo, [expr])
5949 revs = [repo[rev].node() for rev in revs]
5950 revs = [repo[rev].node() for rev in revs]
5950 if not revs:
5951 if not revs:
5951 raise error.Abort(_('default push revset for path evaluates to an '
5952 raise error.Abort(_('default push revset for path evaluates to an '
5952 'empty set'))
5953 'empty set'))
5953
5954
5954 repo._subtoppath = dest
5955 repo._subtoppath = dest
5955 try:
5956 try:
5956 # push subrepos depth-first for coherent ordering
5957 # push subrepos depth-first for coherent ordering
5957 c = repo['']
5958 c = repo['']
5958 subs = c.substate # only repos that are committed
5959 subs = c.substate # only repos that are committed
5959 for s in sorted(subs):
5960 for s in sorted(subs):
5960 result = c.sub(s).push(opts)
5961 result = c.sub(s).push(opts)
5961 if result == 0:
5962 if result == 0:
5962 return not result
5963 return not result
5963 finally:
5964 finally:
5964 del repo._subtoppath
5965 del repo._subtoppath
5965 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5966 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5966 newbranch=opts.get('new_branch'),
5967 newbranch=opts.get('new_branch'),
5967 bookmarks=opts.get('bookmark', ()),
5968 bookmarks=opts.get('bookmark', ()),
5968 opargs=opts.get('opargs'))
5969 opargs=opts.get('opargs'))
5969
5970
5970 result = not pushop.cgresult
5971 result = not pushop.cgresult
5971
5972
5972 if pushop.bkresult is not None:
5973 if pushop.bkresult is not None:
5973 if pushop.bkresult == 2:
5974 if pushop.bkresult == 2:
5974 result = 2
5975 result = 2
5975 elif not result and pushop.bkresult:
5976 elif not result and pushop.bkresult:
5976 result = 2
5977 result = 2
5977
5978
5978 return result
5979 return result
5979
5980
5980 @command('recover', [])
5981 @command('recover', [])
5981 def recover(ui, repo):
5982 def recover(ui, repo):
5982 """roll back an interrupted transaction
5983 """roll back an interrupted transaction
5983
5984
5984 Recover from an interrupted commit or pull.
5985 Recover from an interrupted commit or pull.
5985
5986
5986 This command tries to fix the repository status after an
5987 This command tries to fix the repository status after an
5987 interrupted operation. It should only be necessary when Mercurial
5988 interrupted operation. It should only be necessary when Mercurial
5988 suggests it.
5989 suggests it.
5989
5990
5990 Returns 0 if successful, 1 if nothing to recover or verify fails.
5991 Returns 0 if successful, 1 if nothing to recover or verify fails.
5991 """
5992 """
5992 if repo.recover():
5993 if repo.recover():
5993 return hg.verify(repo)
5994 return hg.verify(repo)
5994 return 1
5995 return 1
5995
5996
5996 @command('^remove|rm',
5997 @command('^remove|rm',
5997 [('A', 'after', None, _('record delete for missing files')),
5998 [('A', 'after', None, _('record delete for missing files')),
5998 ('f', 'force', None,
5999 ('f', 'force', None,
5999 _('forget added files, delete modified files')),
6000 _('forget added files, delete modified files')),
6000 ] + subrepoopts + walkopts,
6001 ] + subrepoopts + walkopts,
6001 _('[OPTION]... FILE...'),
6002 _('[OPTION]... FILE...'),
6002 inferrepo=True)
6003 inferrepo=True)
6003 def remove(ui, repo, *pats, **opts):
6004 def remove(ui, repo, *pats, **opts):
6004 """remove the specified files on the next commit
6005 """remove the specified files on the next commit
6005
6006
6006 Schedule the indicated files for removal from the current branch.
6007 Schedule the indicated files for removal from the current branch.
6007
6008
6008 This command schedules the files to be removed at the next commit.
6009 This command schedules the files to be removed at the next commit.
6009 To undo a remove before that, see :hg:`revert`. To undo added
6010 To undo a remove before that, see :hg:`revert`. To undo added
6010 files, see :hg:`forget`.
6011 files, see :hg:`forget`.
6011
6012
6012 .. container:: verbose
6013 .. container:: verbose
6013
6014
6014 -A/--after can be used to remove only files that have already
6015 -A/--after can be used to remove only files that have already
6015 been deleted, -f/--force can be used to force deletion, and -Af
6016 been deleted, -f/--force can be used to force deletion, and -Af
6016 can be used to remove files from the next revision without
6017 can be used to remove files from the next revision without
6017 deleting them from the working directory.
6018 deleting them from the working directory.
6018
6019
6019 The following table details the behavior of remove for different
6020 The following table details the behavior of remove for different
6020 file states (columns) and option combinations (rows). The file
6021 file states (columns) and option combinations (rows). The file
6021 states are Added [A], Clean [C], Modified [M] and Missing [!]
6022 states are Added [A], Clean [C], Modified [M] and Missing [!]
6022 (as reported by :hg:`status`). The actions are Warn, Remove
6023 (as reported by :hg:`status`). The actions are Warn, Remove
6023 (from branch) and Delete (from disk):
6024 (from branch) and Delete (from disk):
6024
6025
6025 ========= == == == ==
6026 ========= == == == ==
6026 opt/state A C M !
6027 opt/state A C M !
6027 ========= == == == ==
6028 ========= == == == ==
6028 none W RD W R
6029 none W RD W R
6029 -f R RD RD R
6030 -f R RD RD R
6030 -A W W W R
6031 -A W W W R
6031 -Af R R R R
6032 -Af R R R R
6032 ========= == == == ==
6033 ========= == == == ==
6033
6034
6034 .. note::
6035 .. note::
6035
6036
6036 :hg:`remove` never deletes files in Added [A] state from the
6037 :hg:`remove` never deletes files in Added [A] state from the
6037 working directory, not even if ``--force`` is specified.
6038 working directory, not even if ``--force`` is specified.
6038
6039
6039 Returns 0 on success, 1 if any warnings encountered.
6040 Returns 0 on success, 1 if any warnings encountered.
6040 """
6041 """
6041
6042
6042 after, force = opts.get('after'), opts.get('force')
6043 after, force = opts.get('after'), opts.get('force')
6043 if not pats and not after:
6044 if not pats and not after:
6044 raise error.Abort(_('no files specified'))
6045 raise error.Abort(_('no files specified'))
6045
6046
6046 m = scmutil.match(repo[None], pats, opts)
6047 m = scmutil.match(repo[None], pats, opts)
6047 subrepos = opts.get('subrepos')
6048 subrepos = opts.get('subrepos')
6048 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
6049 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
6049
6050
6050 @command('rename|move|mv',
6051 @command('rename|move|mv',
6051 [('A', 'after', None, _('record a rename that has already occurred')),
6052 [('A', 'after', None, _('record a rename that has already occurred')),
6052 ('f', 'force', None, _('forcibly copy over an existing managed file')),
6053 ('f', 'force', None, _('forcibly copy over an existing managed file')),
6053 ] + walkopts + dryrunopts,
6054 ] + walkopts + dryrunopts,
6054 _('[OPTION]... SOURCE... DEST'))
6055 _('[OPTION]... SOURCE... DEST'))
6055 def rename(ui, repo, *pats, **opts):
6056 def rename(ui, repo, *pats, **opts):
6056 """rename files; equivalent of copy + remove
6057 """rename files; equivalent of copy + remove
6057
6058
6058 Mark dest as copies of sources; mark sources for deletion. If dest
6059 Mark dest as copies of sources; mark sources for deletion. If dest
6059 is a directory, copies are put in that directory. If dest is a
6060 is a directory, copies are put in that directory. If dest is a
6060 file, there can only be one source.
6061 file, there can only be one source.
6061
6062
6062 By default, this command copies the contents of files as they
6063 By default, this command copies the contents of files as they
6063 exist in the working directory. If invoked with -A/--after, the
6064 exist in the working directory. If invoked with -A/--after, the
6064 operation is recorded, but no copying is performed.
6065 operation is recorded, but no copying is performed.
6065
6066
6066 This command takes effect at the next commit. To undo a rename
6067 This command takes effect at the next commit. To undo a rename
6067 before that, see :hg:`revert`.
6068 before that, see :hg:`revert`.
6068
6069
6069 Returns 0 on success, 1 if errors are encountered.
6070 Returns 0 on success, 1 if errors are encountered.
6070 """
6071 """
6071 with repo.wlock(False):
6072 with repo.wlock(False):
6072 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6073 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6073
6074
6074 @command('resolve',
6075 @command('resolve',
6075 [('a', 'all', None, _('select all unresolved files')),
6076 [('a', 'all', None, _('select all unresolved files')),
6076 ('l', 'list', None, _('list state of files needing merge')),
6077 ('l', 'list', None, _('list state of files needing merge')),
6077 ('m', 'mark', None, _('mark files as resolved')),
6078 ('m', 'mark', None, _('mark files as resolved')),
6078 ('u', 'unmark', None, _('mark files as unresolved')),
6079 ('u', 'unmark', None, _('mark files as unresolved')),
6079 ('n', 'no-status', None, _('hide status prefix'))]
6080 ('n', 'no-status', None, _('hide status prefix'))]
6080 + mergetoolopts + walkopts + formatteropts,
6081 + mergetoolopts + walkopts + formatteropts,
6081 _('[OPTION]... [FILE]...'),
6082 _('[OPTION]... [FILE]...'),
6082 inferrepo=True)
6083 inferrepo=True)
6083 def resolve(ui, repo, *pats, **opts):
6084 def resolve(ui, repo, *pats, **opts):
6084 """redo merges or set/view the merge status of files
6085 """redo merges or set/view the merge status of files
6085
6086
6086 Merges with unresolved conflicts are often the result of
6087 Merges with unresolved conflicts are often the result of
6087 non-interactive merging using the ``internal:merge`` configuration
6088 non-interactive merging using the ``internal:merge`` configuration
6088 setting, or a command-line merge tool like ``diff3``. The resolve
6089 setting, or a command-line merge tool like ``diff3``. The resolve
6089 command is used to manage the files involved in a merge, after
6090 command is used to manage the files involved in a merge, after
6090 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6091 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6091 working directory must have two parents). See :hg:`help
6092 working directory must have two parents). See :hg:`help
6092 merge-tools` for information on configuring merge tools.
6093 merge-tools` for information on configuring merge tools.
6093
6094
6094 The resolve command can be used in the following ways:
6095 The resolve command can be used in the following ways:
6095
6096
6096 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
6097 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
6097 files, discarding any previous merge attempts. Re-merging is not
6098 files, discarding any previous merge attempts. Re-merging is not
6098 performed for files already marked as resolved. Use ``--all/-a``
6099 performed for files already marked as resolved. Use ``--all/-a``
6099 to select all unresolved files. ``--tool`` can be used to specify
6100 to select all unresolved files. ``--tool`` can be used to specify
6100 the merge tool used for the given files. It overrides the HGMERGE
6101 the merge tool used for the given files. It overrides the HGMERGE
6101 environment variable and your configuration files. Previous file
6102 environment variable and your configuration files. Previous file
6102 contents are saved with a ``.orig`` suffix.
6103 contents are saved with a ``.orig`` suffix.
6103
6104
6104 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6105 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6105 (e.g. after having manually fixed-up the files). The default is
6106 (e.g. after having manually fixed-up the files). The default is
6106 to mark all unresolved files.
6107 to mark all unresolved files.
6107
6108
6108 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6109 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6109 default is to mark all resolved files.
6110 default is to mark all resolved files.
6110
6111
6111 - :hg:`resolve -l`: list files which had or still have conflicts.
6112 - :hg:`resolve -l`: list files which had or still have conflicts.
6112 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6113 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6113
6114
6114 .. note::
6115 .. note::
6115
6116
6116 Mercurial will not let you commit files with unresolved merge
6117 Mercurial will not let you commit files with unresolved merge
6117 conflicts. You must use :hg:`resolve -m ...` before you can
6118 conflicts. You must use :hg:`resolve -m ...` before you can
6118 commit after a conflicting merge.
6119 commit after a conflicting merge.
6119
6120
6120 Returns 0 on success, 1 if any files fail a resolve attempt.
6121 Returns 0 on success, 1 if any files fail a resolve attempt.
6121 """
6122 """
6122
6123
6123 flaglist = 'all mark unmark list no_status'.split()
6124 flaglist = 'all mark unmark list no_status'.split()
6124 all, mark, unmark, show, nostatus = \
6125 all, mark, unmark, show, nostatus = \
6125 [opts.get(o) for o in flaglist]
6126 [opts.get(o) for o in flaglist]
6126
6127
6127 if (show and (mark or unmark)) or (mark and unmark):
6128 if (show and (mark or unmark)) or (mark and unmark):
6128 raise error.Abort(_("too many options specified"))
6129 raise error.Abort(_("too many options specified"))
6129 if pats and all:
6130 if pats and all:
6130 raise error.Abort(_("can't specify --all and patterns"))
6131 raise error.Abort(_("can't specify --all and patterns"))
6131 if not (all or pats or show or mark or unmark):
6132 if not (all or pats or show or mark or unmark):
6132 raise error.Abort(_('no files or directories specified'),
6133 raise error.Abort(_('no files or directories specified'),
6133 hint=('use --all to re-merge all unresolved files'))
6134 hint=('use --all to re-merge all unresolved files'))
6134
6135
6135 if show:
6136 if show:
6136 fm = ui.formatter('resolve', opts)
6137 fm = ui.formatter('resolve', opts)
6137 ms = mergemod.mergestate.read(repo)
6138 ms = mergemod.mergestate.read(repo)
6138 m = scmutil.match(repo[None], pats, opts)
6139 m = scmutil.match(repo[None], pats, opts)
6139 for f in ms:
6140 for f in ms:
6140 if not m(f):
6141 if not m(f):
6141 continue
6142 continue
6142 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
6143 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
6143 'd': 'driverresolved'}[ms[f]]
6144 'd': 'driverresolved'}[ms[f]]
6144 fm.startitem()
6145 fm.startitem()
6145 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
6146 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
6146 fm.write('path', '%s\n', f, label=l)
6147 fm.write('path', '%s\n', f, label=l)
6147 fm.end()
6148 fm.end()
6148 return 0
6149 return 0
6149
6150
6150 with repo.wlock():
6151 with repo.wlock():
6151 ms = mergemod.mergestate.read(repo)
6152 ms = mergemod.mergestate.read(repo)
6152
6153
6153 if not (ms.active() or repo.dirstate.p2() != nullid):
6154 if not (ms.active() or repo.dirstate.p2() != nullid):
6154 raise error.Abort(
6155 raise error.Abort(
6155 _('resolve command not applicable when not merging'))
6156 _('resolve command not applicable when not merging'))
6156
6157
6157 wctx = repo[None]
6158 wctx = repo[None]
6158
6159
6159 if ms.mergedriver and ms.mdstate() == 'u':
6160 if ms.mergedriver and ms.mdstate() == 'u':
6160 proceed = mergemod.driverpreprocess(repo, ms, wctx)
6161 proceed = mergemod.driverpreprocess(repo, ms, wctx)
6161 ms.commit()
6162 ms.commit()
6162 # allow mark and unmark to go through
6163 # allow mark and unmark to go through
6163 if not mark and not unmark and not proceed:
6164 if not mark and not unmark and not proceed:
6164 return 1
6165 return 1
6165
6166
6166 m = scmutil.match(wctx, pats, opts)
6167 m = scmutil.match(wctx, pats, opts)
6167 ret = 0
6168 ret = 0
6168 didwork = False
6169 didwork = False
6169 runconclude = False
6170 runconclude = False
6170
6171
6171 tocomplete = []
6172 tocomplete = []
6172 for f in ms:
6173 for f in ms:
6173 if not m(f):
6174 if not m(f):
6174 continue
6175 continue
6175
6176
6176 didwork = True
6177 didwork = True
6177
6178
6178 # don't let driver-resolved files be marked, and run the conclude
6179 # don't let driver-resolved files be marked, and run the conclude
6179 # step if asked to resolve
6180 # step if asked to resolve
6180 if ms[f] == "d":
6181 if ms[f] == "d":
6181 exact = m.exact(f)
6182 exact = m.exact(f)
6182 if mark:
6183 if mark:
6183 if exact:
6184 if exact:
6184 ui.warn(_('not marking %s as it is driver-resolved\n')
6185 ui.warn(_('not marking %s as it is driver-resolved\n')
6185 % f)
6186 % f)
6186 elif unmark:
6187 elif unmark:
6187 if exact:
6188 if exact:
6188 ui.warn(_('not unmarking %s as it is driver-resolved\n')
6189 ui.warn(_('not unmarking %s as it is driver-resolved\n')
6189 % f)
6190 % f)
6190 else:
6191 else:
6191 runconclude = True
6192 runconclude = True
6192 continue
6193 continue
6193
6194
6194 if mark:
6195 if mark:
6195 ms.mark(f, "r")
6196 ms.mark(f, "r")
6196 elif unmark:
6197 elif unmark:
6197 ms.mark(f, "u")
6198 ms.mark(f, "u")
6198 else:
6199 else:
6199 # backup pre-resolve (merge uses .orig for its own purposes)
6200 # backup pre-resolve (merge uses .orig for its own purposes)
6200 a = repo.wjoin(f)
6201 a = repo.wjoin(f)
6201 try:
6202 try:
6202 util.copyfile(a, a + ".resolve")
6203 util.copyfile(a, a + ".resolve")
6203 except (IOError, OSError) as inst:
6204 except (IOError, OSError) as inst:
6204 if inst.errno != errno.ENOENT:
6205 if inst.errno != errno.ENOENT:
6205 raise
6206 raise
6206
6207
6207 try:
6208 try:
6208 # preresolve file
6209 # preresolve file
6209 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6210 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6210 'resolve')
6211 'resolve')
6211 complete, r = ms.preresolve(f, wctx)
6212 complete, r = ms.preresolve(f, wctx)
6212 if not complete:
6213 if not complete:
6213 tocomplete.append(f)
6214 tocomplete.append(f)
6214 elif r:
6215 elif r:
6215 ret = 1
6216 ret = 1
6216 finally:
6217 finally:
6217 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6218 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6218 ms.commit()
6219 ms.commit()
6219
6220
6220 # replace filemerge's .orig file with our resolve file, but only
6221 # replace filemerge's .orig file with our resolve file, but only
6221 # for merges that are complete
6222 # for merges that are complete
6222 if complete:
6223 if complete:
6223 try:
6224 try:
6224 util.rename(a + ".resolve",
6225 util.rename(a + ".resolve",
6225 scmutil.origpath(ui, repo, a))
6226 scmutil.origpath(ui, repo, a))
6226 except OSError as inst:
6227 except OSError as inst:
6227 if inst.errno != errno.ENOENT:
6228 if inst.errno != errno.ENOENT:
6228 raise
6229 raise
6229
6230
6230 for f in tocomplete:
6231 for f in tocomplete:
6231 try:
6232 try:
6232 # resolve file
6233 # resolve file
6233 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6234 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
6234 'resolve')
6235 'resolve')
6235 r = ms.resolve(f, wctx)
6236 r = ms.resolve(f, wctx)
6236 if r:
6237 if r:
6237 ret = 1
6238 ret = 1
6238 finally:
6239 finally:
6239 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6240 ui.setconfig('ui', 'forcemerge', '', 'resolve')
6240 ms.commit()
6241 ms.commit()
6241
6242
6242 # replace filemerge's .orig file with our resolve file
6243 # replace filemerge's .orig file with our resolve file
6243 a = repo.wjoin(f)
6244 a = repo.wjoin(f)
6244 try:
6245 try:
6245 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
6246 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
6246 except OSError as inst:
6247 except OSError as inst:
6247 if inst.errno != errno.ENOENT:
6248 if inst.errno != errno.ENOENT:
6248 raise
6249 raise
6249
6250
6250 ms.commit()
6251 ms.commit()
6251 ms.recordactions()
6252 ms.recordactions()
6252
6253
6253 if not didwork and pats:
6254 if not didwork and pats:
6254 hint = None
6255 hint = None
6255 if not any([p for p in pats if p.find(':') >= 0]):
6256 if not any([p for p in pats if p.find(':') >= 0]):
6256 pats = ['path:%s' % p for p in pats]
6257 pats = ['path:%s' % p for p in pats]
6257 m = scmutil.match(wctx, pats, opts)
6258 m = scmutil.match(wctx, pats, opts)
6258 for f in ms:
6259 for f in ms:
6259 if not m(f):
6260 if not m(f):
6260 continue
6261 continue
6261 flags = ''.join(['-%s ' % o[0] for o in flaglist
6262 flags = ''.join(['-%s ' % o[0] for o in flaglist
6262 if opts.get(o)])
6263 if opts.get(o)])
6263 hint = _("(try: hg resolve %s%s)\n") % (
6264 hint = _("(try: hg resolve %s%s)\n") % (
6264 flags,
6265 flags,
6265 ' '.join(pats))
6266 ' '.join(pats))
6266 break
6267 break
6267 ui.warn(_("arguments do not match paths that need resolving\n"))
6268 ui.warn(_("arguments do not match paths that need resolving\n"))
6268 if hint:
6269 if hint:
6269 ui.warn(hint)
6270 ui.warn(hint)
6270 elif ms.mergedriver and ms.mdstate() != 's':
6271 elif ms.mergedriver and ms.mdstate() != 's':
6271 # run conclude step when either a driver-resolved file is requested
6272 # run conclude step when either a driver-resolved file is requested
6272 # or there are no driver-resolved files
6273 # or there are no driver-resolved files
6273 # we can't use 'ret' to determine whether any files are unresolved
6274 # we can't use 'ret' to determine whether any files are unresolved
6274 # because we might not have tried to resolve some
6275 # because we might not have tried to resolve some
6275 if ((runconclude or not list(ms.driverresolved()))
6276 if ((runconclude or not list(ms.driverresolved()))
6276 and not list(ms.unresolved())):
6277 and not list(ms.unresolved())):
6277 proceed = mergemod.driverconclude(repo, ms, wctx)
6278 proceed = mergemod.driverconclude(repo, ms, wctx)
6278 ms.commit()
6279 ms.commit()
6279 if not proceed:
6280 if not proceed:
6280 return 1
6281 return 1
6281
6282
6282 # Nudge users into finishing an unfinished operation
6283 # Nudge users into finishing an unfinished operation
6283 unresolvedf = list(ms.unresolved())
6284 unresolvedf = list(ms.unresolved())
6284 driverresolvedf = list(ms.driverresolved())
6285 driverresolvedf = list(ms.driverresolved())
6285 if not unresolvedf and not driverresolvedf:
6286 if not unresolvedf and not driverresolvedf:
6286 ui.status(_('(no more unresolved files)\n'))
6287 ui.status(_('(no more unresolved files)\n'))
6287 cmdutil.checkafterresolved(repo)
6288 cmdutil.checkafterresolved(repo)
6288 elif not unresolvedf:
6289 elif not unresolvedf:
6289 ui.status(_('(no more unresolved files -- '
6290 ui.status(_('(no more unresolved files -- '
6290 'run "hg resolve --all" to conclude)\n'))
6291 'run "hg resolve --all" to conclude)\n'))
6291
6292
6292 return ret
6293 return ret
6293
6294
6294 @command('revert',
6295 @command('revert',
6295 [('a', 'all', None, _('revert all changes when no arguments given')),
6296 [('a', 'all', None, _('revert all changes when no arguments given')),
6296 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6297 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6297 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
6298 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
6298 ('C', 'no-backup', None, _('do not save backup copies of files')),
6299 ('C', 'no-backup', None, _('do not save backup copies of files')),
6299 ('i', 'interactive', None,
6300 ('i', 'interactive', None,
6300 _('interactively select the changes (EXPERIMENTAL)')),
6301 _('interactively select the changes (EXPERIMENTAL)')),
6301 ] + walkopts + dryrunopts,
6302 ] + walkopts + dryrunopts,
6302 _('[OPTION]... [-r REV] [NAME]...'))
6303 _('[OPTION]... [-r REV] [NAME]...'))
6303 def revert(ui, repo, *pats, **opts):
6304 def revert(ui, repo, *pats, **opts):
6304 """restore files to their checkout state
6305 """restore files to their checkout state
6305
6306
6306 .. note::
6307 .. note::
6307
6308
6308 To check out earlier revisions, you should use :hg:`update REV`.
6309 To check out earlier revisions, you should use :hg:`update REV`.
6309 To cancel an uncommitted merge (and lose your changes),
6310 To cancel an uncommitted merge (and lose your changes),
6310 use :hg:`update --clean .`.
6311 use :hg:`update --clean .`.
6311
6312
6312 With no revision specified, revert the specified files or directories
6313 With no revision specified, revert the specified files or directories
6313 to the contents they had in the parent of the working directory.
6314 to the contents they had in the parent of the working directory.
6314 This restores the contents of files to an unmodified
6315 This restores the contents of files to an unmodified
6315 state and unschedules adds, removes, copies, and renames. If the
6316 state and unschedules adds, removes, copies, and renames. If the
6316 working directory has two parents, you must explicitly specify a
6317 working directory has two parents, you must explicitly specify a
6317 revision.
6318 revision.
6318
6319
6319 Using the -r/--rev or -d/--date options, revert the given files or
6320 Using the -r/--rev or -d/--date options, revert the given files or
6320 directories to their states as of a specific revision. Because
6321 directories to their states as of a specific revision. Because
6321 revert does not change the working directory parents, this will
6322 revert does not change the working directory parents, this will
6322 cause these files to appear modified. This can be helpful to "back
6323 cause these files to appear modified. This can be helpful to "back
6323 out" some or all of an earlier change. See :hg:`backout` for a
6324 out" some or all of an earlier change. See :hg:`backout` for a
6324 related method.
6325 related method.
6325
6326
6326 Modified files are saved with a .orig suffix before reverting.
6327 Modified files are saved with a .orig suffix before reverting.
6327 To disable these backups, use --no-backup. It is possible to store
6328 To disable these backups, use --no-backup. It is possible to store
6328 the backup files in a custom directory relative to the root of the
6329 the backup files in a custom directory relative to the root of the
6329 repository by setting the ``ui.origbackuppath`` configuration
6330 repository by setting the ``ui.origbackuppath`` configuration
6330 option.
6331 option.
6331
6332
6332 See :hg:`help dates` for a list of formats valid for -d/--date.
6333 See :hg:`help dates` for a list of formats valid for -d/--date.
6333
6334
6334 See :hg:`help backout` for a way to reverse the effect of an
6335 See :hg:`help backout` for a way to reverse the effect of an
6335 earlier changeset.
6336 earlier changeset.
6336
6337
6337 Returns 0 on success.
6338 Returns 0 on success.
6338 """
6339 """
6339
6340
6340 if opts.get("date"):
6341 if opts.get("date"):
6341 if opts.get("rev"):
6342 if opts.get("rev"):
6342 raise error.Abort(_("you can't specify a revision and a date"))
6343 raise error.Abort(_("you can't specify a revision and a date"))
6343 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
6344 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
6344
6345
6345 parent, p2 = repo.dirstate.parents()
6346 parent, p2 = repo.dirstate.parents()
6346 if not opts.get('rev') and p2 != nullid:
6347 if not opts.get('rev') and p2 != nullid:
6347 # revert after merge is a trap for new users (issue2915)
6348 # revert after merge is a trap for new users (issue2915)
6348 raise error.Abort(_('uncommitted merge with no revision specified'),
6349 raise error.Abort(_('uncommitted merge with no revision specified'),
6349 hint=_("use 'hg update' or see 'hg help revert'"))
6350 hint=_("use 'hg update' or see 'hg help revert'"))
6350
6351
6351 ctx = scmutil.revsingle(repo, opts.get('rev'))
6352 ctx = scmutil.revsingle(repo, opts.get('rev'))
6352
6353
6353 if (not (pats or opts.get('include') or opts.get('exclude') or
6354 if (not (pats or opts.get('include') or opts.get('exclude') or
6354 opts.get('all') or opts.get('interactive'))):
6355 opts.get('all') or opts.get('interactive'))):
6355 msg = _("no files or directories specified")
6356 msg = _("no files or directories specified")
6356 if p2 != nullid:
6357 if p2 != nullid:
6357 hint = _("uncommitted merge, use --all to discard all changes,"
6358 hint = _("uncommitted merge, use --all to discard all changes,"
6358 " or 'hg update -C .' to abort the merge")
6359 " or 'hg update -C .' to abort the merge")
6359 raise error.Abort(msg, hint=hint)
6360 raise error.Abort(msg, hint=hint)
6360 dirty = any(repo.status())
6361 dirty = any(repo.status())
6361 node = ctx.node()
6362 node = ctx.node()
6362 if node != parent:
6363 if node != parent:
6363 if dirty:
6364 if dirty:
6364 hint = _("uncommitted changes, use --all to discard all"
6365 hint = _("uncommitted changes, use --all to discard all"
6365 " changes, or 'hg update %s' to update") % ctx.rev()
6366 " changes, or 'hg update %s' to update") % ctx.rev()
6366 else:
6367 else:
6367 hint = _("use --all to revert all files,"
6368 hint = _("use --all to revert all files,"
6368 " or 'hg update %s' to update") % ctx.rev()
6369 " or 'hg update %s' to update") % ctx.rev()
6369 elif dirty:
6370 elif dirty:
6370 hint = _("uncommitted changes, use --all to discard all changes")
6371 hint = _("uncommitted changes, use --all to discard all changes")
6371 else:
6372 else:
6372 hint = _("use --all to revert all files")
6373 hint = _("use --all to revert all files")
6373 raise error.Abort(msg, hint=hint)
6374 raise error.Abort(msg, hint=hint)
6374
6375
6375 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
6376 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
6376
6377
6377 @command('rollback', dryrunopts +
6378 @command('rollback', dryrunopts +
6378 [('f', 'force', False, _('ignore safety measures'))])
6379 [('f', 'force', False, _('ignore safety measures'))])
6379 def rollback(ui, repo, **opts):
6380 def rollback(ui, repo, **opts):
6380 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6381 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6381
6382
6382 Please use :hg:`commit --amend` instead of rollback to correct
6383 Please use :hg:`commit --amend` instead of rollback to correct
6383 mistakes in the last commit.
6384 mistakes in the last commit.
6384
6385
6385 This command should be used with care. There is only one level of
6386 This command should be used with care. There is only one level of
6386 rollback, and there is no way to undo a rollback. It will also
6387 rollback, and there is no way to undo a rollback. It will also
6387 restore the dirstate at the time of the last transaction, losing
6388 restore the dirstate at the time of the last transaction, losing
6388 any dirstate changes since that time. This command does not alter
6389 any dirstate changes since that time. This command does not alter
6389 the working directory.
6390 the working directory.
6390
6391
6391 Transactions are used to encapsulate the effects of all commands
6392 Transactions are used to encapsulate the effects of all commands
6392 that create new changesets or propagate existing changesets into a
6393 that create new changesets or propagate existing changesets into a
6393 repository.
6394 repository.
6394
6395
6395 .. container:: verbose
6396 .. container:: verbose
6396
6397
6397 For example, the following commands are transactional, and their
6398 For example, the following commands are transactional, and their
6398 effects can be rolled back:
6399 effects can be rolled back:
6399
6400
6400 - commit
6401 - commit
6401 - import
6402 - import
6402 - pull
6403 - pull
6403 - push (with this repository as the destination)
6404 - push (with this repository as the destination)
6404 - unbundle
6405 - unbundle
6405
6406
6406 To avoid permanent data loss, rollback will refuse to rollback a
6407 To avoid permanent data loss, rollback will refuse to rollback a
6407 commit transaction if it isn't checked out. Use --force to
6408 commit transaction if it isn't checked out. Use --force to
6408 override this protection.
6409 override this protection.
6409
6410
6410 The rollback command can be entirely disabled by setting the
6411 The rollback command can be entirely disabled by setting the
6411 ``ui.rollback`` configuration setting to false. If you're here
6412 ``ui.rollback`` configuration setting to false. If you're here
6412 because you want to use rollback and it's disabled, you can
6413 because you want to use rollback and it's disabled, you can
6413 re-enable the command by setting ``ui.rollback`` to true.
6414 re-enable the command by setting ``ui.rollback`` to true.
6414
6415
6415 This command is not intended for use on public repositories. Once
6416 This command is not intended for use on public repositories. Once
6416 changes are visible for pull by other users, rolling a transaction
6417 changes are visible for pull by other users, rolling a transaction
6417 back locally is ineffective (someone else may already have pulled
6418 back locally is ineffective (someone else may already have pulled
6418 the changes). Furthermore, a race is possible with readers of the
6419 the changes). Furthermore, a race is possible with readers of the
6419 repository; for example an in-progress pull from the repository
6420 repository; for example an in-progress pull from the repository
6420 may fail if a rollback is performed.
6421 may fail if a rollback is performed.
6421
6422
6422 Returns 0 on success, 1 if no rollback data is available.
6423 Returns 0 on success, 1 if no rollback data is available.
6423 """
6424 """
6424 if not ui.configbool('ui', 'rollback', True):
6425 if not ui.configbool('ui', 'rollback', True):
6425 raise error.Abort(_('rollback is disabled because it is unsafe'),
6426 raise error.Abort(_('rollback is disabled because it is unsafe'),
6426 hint=('see `hg help -v rollback` for information'))
6427 hint=('see `hg help -v rollback` for information'))
6427 return repo.rollback(dryrun=opts.get('dry_run'),
6428 return repo.rollback(dryrun=opts.get('dry_run'),
6428 force=opts.get('force'))
6429 force=opts.get('force'))
6429
6430
6430 @command('root', [])
6431 @command('root', [])
6431 def root(ui, repo):
6432 def root(ui, repo):
6432 """print the root (top) of the current working directory
6433 """print the root (top) of the current working directory
6433
6434
6434 Print the root directory of the current repository.
6435 Print the root directory of the current repository.
6435
6436
6436 Returns 0 on success.
6437 Returns 0 on success.
6437 """
6438 """
6438 ui.write(repo.root + "\n")
6439 ui.write(repo.root + "\n")
6439
6440
6440 @command('^serve',
6441 @command('^serve',
6441 [('A', 'accesslog', '', _('name of access log file to write to'),
6442 [('A', 'accesslog', '', _('name of access log file to write to'),
6442 _('FILE')),
6443 _('FILE')),
6443 ('d', 'daemon', None, _('run server in background')),
6444 ('d', 'daemon', None, _('run server in background')),
6444 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
6445 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
6445 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
6446 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
6446 # use string type, then we can check if something was passed
6447 # use string type, then we can check if something was passed
6447 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
6448 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
6448 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
6449 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
6449 _('ADDR')),
6450 _('ADDR')),
6450 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
6451 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
6451 _('PREFIX')),
6452 _('PREFIX')),
6452 ('n', 'name', '',
6453 ('n', 'name', '',
6453 _('name to show in web pages (default: working directory)'), _('NAME')),
6454 _('name to show in web pages (default: working directory)'), _('NAME')),
6454 ('', 'web-conf', '',
6455 ('', 'web-conf', '',
6455 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
6456 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
6456 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
6457 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
6457 _('FILE')),
6458 _('FILE')),
6458 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
6459 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
6459 ('', 'stdio', None, _('for remote clients')),
6460 ('', 'stdio', None, _('for remote clients')),
6460 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
6461 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
6461 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
6462 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
6462 ('', 'style', '', _('template style to use'), _('STYLE')),
6463 ('', 'style', '', _('template style to use'), _('STYLE')),
6463 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
6464 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
6464 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
6465 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
6465 _('[OPTION]...'),
6466 _('[OPTION]...'),
6466 optionalrepo=True)
6467 optionalrepo=True)
6467 def serve(ui, repo, **opts):
6468 def serve(ui, repo, **opts):
6468 """start stand-alone webserver
6469 """start stand-alone webserver
6469
6470
6470 Start a local HTTP repository browser and pull server. You can use
6471 Start a local HTTP repository browser and pull server. You can use
6471 this for ad-hoc sharing and browsing of repositories. It is
6472 this for ad-hoc sharing and browsing of repositories. It is
6472 recommended to use a real web server to serve a repository for
6473 recommended to use a real web server to serve a repository for
6473 longer periods of time.
6474 longer periods of time.
6474
6475
6475 Please note that the server does not implement access control.
6476 Please note that the server does not implement access control.
6476 This means that, by default, anybody can read from the server and
6477 This means that, by default, anybody can read from the server and
6477 nobody can write to it by default. Set the ``web.allow_push``
6478 nobody can write to it by default. Set the ``web.allow_push``
6478 option to ``*`` to allow everybody to push to the server. You
6479 option to ``*`` to allow everybody to push to the server. You
6479 should use a real web server if you need to authenticate users.
6480 should use a real web server if you need to authenticate users.
6480
6481
6481 By default, the server logs accesses to stdout and errors to
6482 By default, the server logs accesses to stdout and errors to
6482 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6483 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6483 files.
6484 files.
6484
6485
6485 To have the server choose a free port number to listen on, specify
6486 To have the server choose a free port number to listen on, specify
6486 a port number of 0; in this case, the server will print the port
6487 a port number of 0; in this case, the server will print the port
6487 number it uses.
6488 number it uses.
6488
6489
6489 Returns 0 on success.
6490 Returns 0 on success.
6490 """
6491 """
6491
6492
6492 if opts["stdio"] and opts["cmdserver"]:
6493 if opts["stdio"] and opts["cmdserver"]:
6493 raise error.Abort(_("cannot use --stdio with --cmdserver"))
6494 raise error.Abort(_("cannot use --stdio with --cmdserver"))
6494
6495
6495 if opts["stdio"]:
6496 if opts["stdio"]:
6496 if repo is None:
6497 if repo is None:
6497 raise error.RepoError(_("there is no Mercurial repository here"
6498 raise error.RepoError(_("there is no Mercurial repository here"
6498 " (.hg not found)"))
6499 " (.hg not found)"))
6499 s = sshserver.sshserver(ui, repo)
6500 s = sshserver.sshserver(ui, repo)
6500 s.serve_forever()
6501 s.serve_forever()
6501
6502
6502 if opts["cmdserver"]:
6503 if opts["cmdserver"]:
6503 service = commandserver.createservice(ui, repo, opts)
6504 service = commandserver.createservice(ui, repo, opts)
6504 else:
6505 else:
6505 service = hgweb.createservice(ui, repo, opts)
6506 service = hgweb.createservice(ui, repo, opts)
6506 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
6507 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
6507
6508
6508 @command('^status|st',
6509 @command('^status|st',
6509 [('A', 'all', None, _('show status of all files')),
6510 [('A', 'all', None, _('show status of all files')),
6510 ('m', 'modified', None, _('show only modified files')),
6511 ('m', 'modified', None, _('show only modified files')),
6511 ('a', 'added', None, _('show only added files')),
6512 ('a', 'added', None, _('show only added files')),
6512 ('r', 'removed', None, _('show only removed files')),
6513 ('r', 'removed', None, _('show only removed files')),
6513 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
6514 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
6514 ('c', 'clean', None, _('show only files without changes')),
6515 ('c', 'clean', None, _('show only files without changes')),
6515 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
6516 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
6516 ('i', 'ignored', None, _('show only ignored files')),
6517 ('i', 'ignored', None, _('show only ignored files')),
6517 ('n', 'no-status', None, _('hide status prefix')),
6518 ('n', 'no-status', None, _('hide status prefix')),
6518 ('C', 'copies', None, _('show source of copied files')),
6519 ('C', 'copies', None, _('show source of copied files')),
6519 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
6520 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
6520 ('', 'rev', [], _('show difference from revision'), _('REV')),
6521 ('', 'rev', [], _('show difference from revision'), _('REV')),
6521 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
6522 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
6522 ] + walkopts + subrepoopts + formatteropts,
6523 ] + walkopts + subrepoopts + formatteropts,
6523 _('[OPTION]... [FILE]...'),
6524 _('[OPTION]... [FILE]...'),
6524 inferrepo=True)
6525 inferrepo=True)
6525 def status(ui, repo, *pats, **opts):
6526 def status(ui, repo, *pats, **opts):
6526 """show changed files in the working directory
6527 """show changed files in the working directory
6527
6528
6528 Show status of files in the repository. If names are given, only
6529 Show status of files in the repository. If names are given, only
6529 files that match are shown. Files that are clean or ignored or
6530 files that match are shown. Files that are clean or ignored or
6530 the source of a copy/move operation, are not listed unless
6531 the source of a copy/move operation, are not listed unless
6531 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6532 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6532 Unless options described with "show only ..." are given, the
6533 Unless options described with "show only ..." are given, the
6533 options -mardu are used.
6534 options -mardu are used.
6534
6535
6535 Option -q/--quiet hides untracked (unknown and ignored) files
6536 Option -q/--quiet hides untracked (unknown and ignored) files
6536 unless explicitly requested with -u/--unknown or -i/--ignored.
6537 unless explicitly requested with -u/--unknown or -i/--ignored.
6537
6538
6538 .. note::
6539 .. note::
6539
6540
6540 :hg:`status` may appear to disagree with diff if permissions have
6541 :hg:`status` may appear to disagree with diff if permissions have
6541 changed or a merge has occurred. The standard diff format does
6542 changed or a merge has occurred. The standard diff format does
6542 not report permission changes and diff only reports changes
6543 not report permission changes and diff only reports changes
6543 relative to one merge parent.
6544 relative to one merge parent.
6544
6545
6545 If one revision is given, it is used as the base revision.
6546 If one revision is given, it is used as the base revision.
6546 If two revisions are given, the differences between them are
6547 If two revisions are given, the differences between them are
6547 shown. The --change option can also be used as a shortcut to list
6548 shown. The --change option can also be used as a shortcut to list
6548 the changed files of a revision from its first parent.
6549 the changed files of a revision from its first parent.
6549
6550
6550 The codes used to show the status of files are::
6551 The codes used to show the status of files are::
6551
6552
6552 M = modified
6553 M = modified
6553 A = added
6554 A = added
6554 R = removed
6555 R = removed
6555 C = clean
6556 C = clean
6556 ! = missing (deleted by non-hg command, but still tracked)
6557 ! = missing (deleted by non-hg command, but still tracked)
6557 ? = not tracked
6558 ? = not tracked
6558 I = ignored
6559 I = ignored
6559 = origin of the previous file (with --copies)
6560 = origin of the previous file (with --copies)
6560
6561
6561 .. container:: verbose
6562 .. container:: verbose
6562
6563
6563 Examples:
6564 Examples:
6564
6565
6565 - show changes in the working directory relative to a
6566 - show changes in the working directory relative to a
6566 changeset::
6567 changeset::
6567
6568
6568 hg status --rev 9353
6569 hg status --rev 9353
6569
6570
6570 - show changes in the working directory relative to the
6571 - show changes in the working directory relative to the
6571 current directory (see :hg:`help patterns` for more information)::
6572 current directory (see :hg:`help patterns` for more information)::
6572
6573
6573 hg status re:
6574 hg status re:
6574
6575
6575 - show all changes including copies in an existing changeset::
6576 - show all changes including copies in an existing changeset::
6576
6577
6577 hg status --copies --change 9353
6578 hg status --copies --change 9353
6578
6579
6579 - get a NUL separated list of added files, suitable for xargs::
6580 - get a NUL separated list of added files, suitable for xargs::
6580
6581
6581 hg status -an0
6582 hg status -an0
6582
6583
6583 Returns 0 on success.
6584 Returns 0 on success.
6584 """
6585 """
6585
6586
6586 revs = opts.get('rev')
6587 revs = opts.get('rev')
6587 change = opts.get('change')
6588 change = opts.get('change')
6588
6589
6589 if revs and change:
6590 if revs and change:
6590 msg = _('cannot specify --rev and --change at the same time')
6591 msg = _('cannot specify --rev and --change at the same time')
6591 raise error.Abort(msg)
6592 raise error.Abort(msg)
6592 elif change:
6593 elif change:
6593 node2 = scmutil.revsingle(repo, change, None).node()
6594 node2 = scmutil.revsingle(repo, change, None).node()
6594 node1 = repo[node2].p1().node()
6595 node1 = repo[node2].p1().node()
6595 else:
6596 else:
6596 node1, node2 = scmutil.revpair(repo, revs)
6597 node1, node2 = scmutil.revpair(repo, revs)
6597
6598
6598 if pats:
6599 if pats:
6599 cwd = repo.getcwd()
6600 cwd = repo.getcwd()
6600 else:
6601 else:
6601 cwd = ''
6602 cwd = ''
6602
6603
6603 if opts.get('print0'):
6604 if opts.get('print0'):
6604 end = '\0'
6605 end = '\0'
6605 else:
6606 else:
6606 end = '\n'
6607 end = '\n'
6607 copy = {}
6608 copy = {}
6608 states = 'modified added removed deleted unknown ignored clean'.split()
6609 states = 'modified added removed deleted unknown ignored clean'.split()
6609 show = [k for k in states if opts.get(k)]
6610 show = [k for k in states if opts.get(k)]
6610 if opts.get('all'):
6611 if opts.get('all'):
6611 show += ui.quiet and (states[:4] + ['clean']) or states
6612 show += ui.quiet and (states[:4] + ['clean']) or states
6612 if not show:
6613 if not show:
6613 if ui.quiet:
6614 if ui.quiet:
6614 show = states[:4]
6615 show = states[:4]
6615 else:
6616 else:
6616 show = states[:5]
6617 show = states[:5]
6617
6618
6618 m = scmutil.match(repo[node2], pats, opts)
6619 m = scmutil.match(repo[node2], pats, opts)
6619 stat = repo.status(node1, node2, m,
6620 stat = repo.status(node1, node2, m,
6620 'ignored' in show, 'clean' in show, 'unknown' in show,
6621 'ignored' in show, 'clean' in show, 'unknown' in show,
6621 opts.get('subrepos'))
6622 opts.get('subrepos'))
6622 changestates = zip(states, 'MAR!?IC', stat)
6623 changestates = zip(states, 'MAR!?IC', stat)
6623
6624
6624 if (opts.get('all') or opts.get('copies')
6625 if (opts.get('all') or opts.get('copies')
6625 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
6626 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
6626 copy = copies.pathcopies(repo[node1], repo[node2], m)
6627 copy = copies.pathcopies(repo[node1], repo[node2], m)
6627
6628
6628 fm = ui.formatter('status', opts)
6629 fm = ui.formatter('status', opts)
6629 fmt = '%s' + end
6630 fmt = '%s' + end
6630 showchar = not opts.get('no_status')
6631 showchar = not opts.get('no_status')
6631
6632
6632 for state, char, files in changestates:
6633 for state, char, files in changestates:
6633 if state in show:
6634 if state in show:
6634 label = 'status.' + state
6635 label = 'status.' + state
6635 for f in files:
6636 for f in files:
6636 fm.startitem()
6637 fm.startitem()
6637 fm.condwrite(showchar, 'status', '%s ', char, label=label)
6638 fm.condwrite(showchar, 'status', '%s ', char, label=label)
6638 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
6639 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
6639 if f in copy:
6640 if f in copy:
6640 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
6641 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
6641 label='status.copied')
6642 label='status.copied')
6642 fm.end()
6643 fm.end()
6643
6644
6644 @command('^summary|sum',
6645 @command('^summary|sum',
6645 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
6646 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
6646 def summary(ui, repo, **opts):
6647 def summary(ui, repo, **opts):
6647 """summarize working directory state
6648 """summarize working directory state
6648
6649
6649 This generates a brief summary of the working directory state,
6650 This generates a brief summary of the working directory state,
6650 including parents, branch, commit status, phase and available updates.
6651 including parents, branch, commit status, phase and available updates.
6651
6652
6652 With the --remote option, this will check the default paths for
6653 With the --remote option, this will check the default paths for
6653 incoming and outgoing changes. This can be time-consuming.
6654 incoming and outgoing changes. This can be time-consuming.
6654
6655
6655 Returns 0 on success.
6656 Returns 0 on success.
6656 """
6657 """
6657
6658
6658 ctx = repo[None]
6659 ctx = repo[None]
6659 parents = ctx.parents()
6660 parents = ctx.parents()
6660 pnode = parents[0].node()
6661 pnode = parents[0].node()
6661 marks = []
6662 marks = []
6662
6663
6663 ms = None
6664 ms = None
6664 try:
6665 try:
6665 ms = mergemod.mergestate.read(repo)
6666 ms = mergemod.mergestate.read(repo)
6666 except error.UnsupportedMergeRecords as e:
6667 except error.UnsupportedMergeRecords as e:
6667 s = ' '.join(e.recordtypes)
6668 s = ' '.join(e.recordtypes)
6668 ui.warn(
6669 ui.warn(
6669 _('warning: merge state has unsupported record types: %s\n') % s)
6670 _('warning: merge state has unsupported record types: %s\n') % s)
6670 unresolved = 0
6671 unresolved = 0
6671 else:
6672 else:
6672 unresolved = [f for f in ms if ms[f] == 'u']
6673 unresolved = [f for f in ms if ms[f] == 'u']
6673
6674
6674 for p in parents:
6675 for p in parents:
6675 # label with log.changeset (instead of log.parent) since this
6676 # label with log.changeset (instead of log.parent) since this
6676 # shows a working directory parent *changeset*:
6677 # shows a working directory parent *changeset*:
6677 # i18n: column positioning for "hg summary"
6678 # i18n: column positioning for "hg summary"
6678 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
6679 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
6679 label='log.changeset changeset.%s' % p.phasestr())
6680 label='log.changeset changeset.%s' % p.phasestr())
6680 ui.write(' '.join(p.tags()), label='log.tag')
6681 ui.write(' '.join(p.tags()), label='log.tag')
6681 if p.bookmarks():
6682 if p.bookmarks():
6682 marks.extend(p.bookmarks())
6683 marks.extend(p.bookmarks())
6683 if p.rev() == -1:
6684 if p.rev() == -1:
6684 if not len(repo):
6685 if not len(repo):
6685 ui.write(_(' (empty repository)'))
6686 ui.write(_(' (empty repository)'))
6686 else:
6687 else:
6687 ui.write(_(' (no revision checked out)'))
6688 ui.write(_(' (no revision checked out)'))
6688 ui.write('\n')
6689 ui.write('\n')
6689 if p.description():
6690 if p.description():
6690 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
6691 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
6691 label='log.summary')
6692 label='log.summary')
6692
6693
6693 branch = ctx.branch()
6694 branch = ctx.branch()
6694 bheads = repo.branchheads(branch)
6695 bheads = repo.branchheads(branch)
6695 # i18n: column positioning for "hg summary"
6696 # i18n: column positioning for "hg summary"
6696 m = _('branch: %s\n') % branch
6697 m = _('branch: %s\n') % branch
6697 if branch != 'default':
6698 if branch != 'default':
6698 ui.write(m, label='log.branch')
6699 ui.write(m, label='log.branch')
6699 else:
6700 else:
6700 ui.status(m, label='log.branch')
6701 ui.status(m, label='log.branch')
6701
6702
6702 if marks:
6703 if marks:
6703 active = repo._activebookmark
6704 active = repo._activebookmark
6704 # i18n: column positioning for "hg summary"
6705 # i18n: column positioning for "hg summary"
6705 ui.write(_('bookmarks:'), label='log.bookmark')
6706 ui.write(_('bookmarks:'), label='log.bookmark')
6706 if active is not None:
6707 if active is not None:
6707 if active in marks:
6708 if active in marks:
6708 ui.write(' *' + active, label=activebookmarklabel)
6709 ui.write(' *' + active, label=activebookmarklabel)
6709 marks.remove(active)
6710 marks.remove(active)
6710 else:
6711 else:
6711 ui.write(' [%s]' % active, label=activebookmarklabel)
6712 ui.write(' [%s]' % active, label=activebookmarklabel)
6712 for m in marks:
6713 for m in marks:
6713 ui.write(' ' + m, label='log.bookmark')
6714 ui.write(' ' + m, label='log.bookmark')
6714 ui.write('\n', label='log.bookmark')
6715 ui.write('\n', label='log.bookmark')
6715
6716
6716 status = repo.status(unknown=True)
6717 status = repo.status(unknown=True)
6717
6718
6718 c = repo.dirstate.copies()
6719 c = repo.dirstate.copies()
6719 copied, renamed = [], []
6720 copied, renamed = [], []
6720 for d, s in c.iteritems():
6721 for d, s in c.iteritems():
6721 if s in status.removed:
6722 if s in status.removed:
6722 status.removed.remove(s)
6723 status.removed.remove(s)
6723 renamed.append(d)
6724 renamed.append(d)
6724 else:
6725 else:
6725 copied.append(d)
6726 copied.append(d)
6726 if d in status.added:
6727 if d in status.added:
6727 status.added.remove(d)
6728 status.added.remove(d)
6728
6729
6729 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6730 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6730
6731
6731 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6732 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6732 (ui.label(_('%d added'), 'status.added'), status.added),
6733 (ui.label(_('%d added'), 'status.added'), status.added),
6733 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6734 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6734 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6735 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6735 (ui.label(_('%d copied'), 'status.copied'), copied),
6736 (ui.label(_('%d copied'), 'status.copied'), copied),
6736 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6737 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6737 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6738 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6738 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6739 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6739 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6740 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6740 t = []
6741 t = []
6741 for l, s in labels:
6742 for l, s in labels:
6742 if s:
6743 if s:
6743 t.append(l % len(s))
6744 t.append(l % len(s))
6744
6745
6745 t = ', '.join(t)
6746 t = ', '.join(t)
6746 cleanworkdir = False
6747 cleanworkdir = False
6747
6748
6748 if repo.vfs.exists('graftstate'):
6749 if repo.vfs.exists('graftstate'):
6749 t += _(' (graft in progress)')
6750 t += _(' (graft in progress)')
6750 if repo.vfs.exists('updatestate'):
6751 if repo.vfs.exists('updatestate'):
6751 t += _(' (interrupted update)')
6752 t += _(' (interrupted update)')
6752 elif len(parents) > 1:
6753 elif len(parents) > 1:
6753 t += _(' (merge)')
6754 t += _(' (merge)')
6754 elif branch != parents[0].branch():
6755 elif branch != parents[0].branch():
6755 t += _(' (new branch)')
6756 t += _(' (new branch)')
6756 elif (parents[0].closesbranch() and
6757 elif (parents[0].closesbranch() and
6757 pnode in repo.branchheads(branch, closed=True)):
6758 pnode in repo.branchheads(branch, closed=True)):
6758 t += _(' (head closed)')
6759 t += _(' (head closed)')
6759 elif not (status.modified or status.added or status.removed or renamed or
6760 elif not (status.modified or status.added or status.removed or renamed or
6760 copied or subs):
6761 copied or subs):
6761 t += _(' (clean)')
6762 t += _(' (clean)')
6762 cleanworkdir = True
6763 cleanworkdir = True
6763 elif pnode not in bheads:
6764 elif pnode not in bheads:
6764 t += _(' (new branch head)')
6765 t += _(' (new branch head)')
6765
6766
6766 if parents:
6767 if parents:
6767 pendingphase = max(p.phase() for p in parents)
6768 pendingphase = max(p.phase() for p in parents)
6768 else:
6769 else:
6769 pendingphase = phases.public
6770 pendingphase = phases.public
6770
6771
6771 if pendingphase > phases.newcommitphase(ui):
6772 if pendingphase > phases.newcommitphase(ui):
6772 t += ' (%s)' % phases.phasenames[pendingphase]
6773 t += ' (%s)' % phases.phasenames[pendingphase]
6773
6774
6774 if cleanworkdir:
6775 if cleanworkdir:
6775 # i18n: column positioning for "hg summary"
6776 # i18n: column positioning for "hg summary"
6776 ui.status(_('commit: %s\n') % t.strip())
6777 ui.status(_('commit: %s\n') % t.strip())
6777 else:
6778 else:
6778 # i18n: column positioning for "hg summary"
6779 # i18n: column positioning for "hg summary"
6779 ui.write(_('commit: %s\n') % t.strip())
6780 ui.write(_('commit: %s\n') % t.strip())
6780
6781
6781 # all ancestors of branch heads - all ancestors of parent = new csets
6782 # all ancestors of branch heads - all ancestors of parent = new csets
6782 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6783 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6783 bheads))
6784 bheads))
6784
6785
6785 if new == 0:
6786 if new == 0:
6786 # i18n: column positioning for "hg summary"
6787 # i18n: column positioning for "hg summary"
6787 ui.status(_('update: (current)\n'))
6788 ui.status(_('update: (current)\n'))
6788 elif pnode not in bheads:
6789 elif pnode not in bheads:
6789 # i18n: column positioning for "hg summary"
6790 # i18n: column positioning for "hg summary"
6790 ui.write(_('update: %d new changesets (update)\n') % new)
6791 ui.write(_('update: %d new changesets (update)\n') % new)
6791 else:
6792 else:
6792 # i18n: column positioning for "hg summary"
6793 # i18n: column positioning for "hg summary"
6793 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6794 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6794 (new, len(bheads)))
6795 (new, len(bheads)))
6795
6796
6796 t = []
6797 t = []
6797 draft = len(repo.revs('draft()'))
6798 draft = len(repo.revs('draft()'))
6798 if draft:
6799 if draft:
6799 t.append(_('%d draft') % draft)
6800 t.append(_('%d draft') % draft)
6800 secret = len(repo.revs('secret()'))
6801 secret = len(repo.revs('secret()'))
6801 if secret:
6802 if secret:
6802 t.append(_('%d secret') % secret)
6803 t.append(_('%d secret') % secret)
6803
6804
6804 if draft or secret:
6805 if draft or secret:
6805 ui.status(_('phases: %s\n') % ', '.join(t))
6806 ui.status(_('phases: %s\n') % ', '.join(t))
6806
6807
6807 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6808 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6808 for trouble in ("unstable", "divergent", "bumped"):
6809 for trouble in ("unstable", "divergent", "bumped"):
6809 numtrouble = len(repo.revs(trouble + "()"))
6810 numtrouble = len(repo.revs(trouble + "()"))
6810 # We write all the possibilities to ease translation
6811 # We write all the possibilities to ease translation
6811 troublemsg = {
6812 troublemsg = {
6812 "unstable": _("unstable: %d changesets"),
6813 "unstable": _("unstable: %d changesets"),
6813 "divergent": _("divergent: %d changesets"),
6814 "divergent": _("divergent: %d changesets"),
6814 "bumped": _("bumped: %d changesets"),
6815 "bumped": _("bumped: %d changesets"),
6815 }
6816 }
6816 if numtrouble > 0:
6817 if numtrouble > 0:
6817 ui.status(troublemsg[trouble] % numtrouble + "\n")
6818 ui.status(troublemsg[trouble] % numtrouble + "\n")
6818
6819
6819 cmdutil.summaryhooks(ui, repo)
6820 cmdutil.summaryhooks(ui, repo)
6820
6821
6821 if opts.get('remote'):
6822 if opts.get('remote'):
6822 needsincoming, needsoutgoing = True, True
6823 needsincoming, needsoutgoing = True, True
6823 else:
6824 else:
6824 needsincoming, needsoutgoing = False, False
6825 needsincoming, needsoutgoing = False, False
6825 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6826 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6826 if i:
6827 if i:
6827 needsincoming = True
6828 needsincoming = True
6828 if o:
6829 if o:
6829 needsoutgoing = True
6830 needsoutgoing = True
6830 if not needsincoming and not needsoutgoing:
6831 if not needsincoming and not needsoutgoing:
6831 return
6832 return
6832
6833
6833 def getincoming():
6834 def getincoming():
6834 source, branches = hg.parseurl(ui.expandpath('default'))
6835 source, branches = hg.parseurl(ui.expandpath('default'))
6835 sbranch = branches[0]
6836 sbranch = branches[0]
6836 try:
6837 try:
6837 other = hg.peer(repo, {}, source)
6838 other = hg.peer(repo, {}, source)
6838 except error.RepoError:
6839 except error.RepoError:
6839 if opts.get('remote'):
6840 if opts.get('remote'):
6840 raise
6841 raise
6841 return source, sbranch, None, None, None
6842 return source, sbranch, None, None, None
6842 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6843 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6843 if revs:
6844 if revs:
6844 revs = [other.lookup(rev) for rev in revs]
6845 revs = [other.lookup(rev) for rev in revs]
6845 ui.debug('comparing with %s\n' % util.hidepassword(source))
6846 ui.debug('comparing with %s\n' % util.hidepassword(source))
6846 repo.ui.pushbuffer()
6847 repo.ui.pushbuffer()
6847 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6848 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6848 repo.ui.popbuffer()
6849 repo.ui.popbuffer()
6849 return source, sbranch, other, commoninc, commoninc[1]
6850 return source, sbranch, other, commoninc, commoninc[1]
6850
6851
6851 if needsincoming:
6852 if needsincoming:
6852 source, sbranch, sother, commoninc, incoming = getincoming()
6853 source, sbranch, sother, commoninc, incoming = getincoming()
6853 else:
6854 else:
6854 source = sbranch = sother = commoninc = incoming = None
6855 source = sbranch = sother = commoninc = incoming = None
6855
6856
6856 def getoutgoing():
6857 def getoutgoing():
6857 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6858 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6858 dbranch = branches[0]
6859 dbranch = branches[0]
6859 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6860 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6860 if source != dest:
6861 if source != dest:
6861 try:
6862 try:
6862 dother = hg.peer(repo, {}, dest)
6863 dother = hg.peer(repo, {}, dest)
6863 except error.RepoError:
6864 except error.RepoError:
6864 if opts.get('remote'):
6865 if opts.get('remote'):
6865 raise
6866 raise
6866 return dest, dbranch, None, None
6867 return dest, dbranch, None, None
6867 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6868 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6868 elif sother is None:
6869 elif sother is None:
6869 # there is no explicit destination peer, but source one is invalid
6870 # there is no explicit destination peer, but source one is invalid
6870 return dest, dbranch, None, None
6871 return dest, dbranch, None, None
6871 else:
6872 else:
6872 dother = sother
6873 dother = sother
6873 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6874 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6874 common = None
6875 common = None
6875 else:
6876 else:
6876 common = commoninc
6877 common = commoninc
6877 if revs:
6878 if revs:
6878 revs = [repo.lookup(rev) for rev in revs]
6879 revs = [repo.lookup(rev) for rev in revs]
6879 repo.ui.pushbuffer()
6880 repo.ui.pushbuffer()
6880 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6881 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6881 commoninc=common)
6882 commoninc=common)
6882 repo.ui.popbuffer()
6883 repo.ui.popbuffer()
6883 return dest, dbranch, dother, outgoing
6884 return dest, dbranch, dother, outgoing
6884
6885
6885 if needsoutgoing:
6886 if needsoutgoing:
6886 dest, dbranch, dother, outgoing = getoutgoing()
6887 dest, dbranch, dother, outgoing = getoutgoing()
6887 else:
6888 else:
6888 dest = dbranch = dother = outgoing = None
6889 dest = dbranch = dother = outgoing = None
6889
6890
6890 if opts.get('remote'):
6891 if opts.get('remote'):
6891 t = []
6892 t = []
6892 if incoming:
6893 if incoming:
6893 t.append(_('1 or more incoming'))
6894 t.append(_('1 or more incoming'))
6894 o = outgoing.missing
6895 o = outgoing.missing
6895 if o:
6896 if o:
6896 t.append(_('%d outgoing') % len(o))
6897 t.append(_('%d outgoing') % len(o))
6897 other = dother or sother
6898 other = dother or sother
6898 if 'bookmarks' in other.listkeys('namespaces'):
6899 if 'bookmarks' in other.listkeys('namespaces'):
6899 counts = bookmarks.summary(repo, other)
6900 counts = bookmarks.summary(repo, other)
6900 if counts[0] > 0:
6901 if counts[0] > 0:
6901 t.append(_('%d incoming bookmarks') % counts[0])
6902 t.append(_('%d incoming bookmarks') % counts[0])
6902 if counts[1] > 0:
6903 if counts[1] > 0:
6903 t.append(_('%d outgoing bookmarks') % counts[1])
6904 t.append(_('%d outgoing bookmarks') % counts[1])
6904
6905
6905 if t:
6906 if t:
6906 # i18n: column positioning for "hg summary"
6907 # i18n: column positioning for "hg summary"
6907 ui.write(_('remote: %s\n') % (', '.join(t)))
6908 ui.write(_('remote: %s\n') % (', '.join(t)))
6908 else:
6909 else:
6909 # i18n: column positioning for "hg summary"
6910 # i18n: column positioning for "hg summary"
6910 ui.status(_('remote: (synced)\n'))
6911 ui.status(_('remote: (synced)\n'))
6911
6912
6912 cmdutil.summaryremotehooks(ui, repo, opts,
6913 cmdutil.summaryremotehooks(ui, repo, opts,
6913 ((source, sbranch, sother, commoninc),
6914 ((source, sbranch, sother, commoninc),
6914 (dest, dbranch, dother, outgoing)))
6915 (dest, dbranch, dother, outgoing)))
6915
6916
6916 @command('tag',
6917 @command('tag',
6917 [('f', 'force', None, _('force tag')),
6918 [('f', 'force', None, _('force tag')),
6918 ('l', 'local', None, _('make the tag local')),
6919 ('l', 'local', None, _('make the tag local')),
6919 ('r', 'rev', '', _('revision to tag'), _('REV')),
6920 ('r', 'rev', '', _('revision to tag'), _('REV')),
6920 ('', 'remove', None, _('remove a tag')),
6921 ('', 'remove', None, _('remove a tag')),
6921 # -l/--local is already there, commitopts cannot be used
6922 # -l/--local is already there, commitopts cannot be used
6922 ('e', 'edit', None, _('invoke editor on commit messages')),
6923 ('e', 'edit', None, _('invoke editor on commit messages')),
6923 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6924 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6924 ] + commitopts2,
6925 ] + commitopts2,
6925 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6926 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6926 def tag(ui, repo, name1, *names, **opts):
6927 def tag(ui, repo, name1, *names, **opts):
6927 """add one or more tags for the current or given revision
6928 """add one or more tags for the current or given revision
6928
6929
6929 Name a particular revision using <name>.
6930 Name a particular revision using <name>.
6930
6931
6931 Tags are used to name particular revisions of the repository and are
6932 Tags are used to name particular revisions of the repository and are
6932 very useful to compare different revisions, to go back to significant
6933 very useful to compare different revisions, to go back to significant
6933 earlier versions or to mark branch points as releases, etc. Changing
6934 earlier versions or to mark branch points as releases, etc. Changing
6934 an existing tag is normally disallowed; use -f/--force to override.
6935 an existing tag is normally disallowed; use -f/--force to override.
6935
6936
6936 If no revision is given, the parent of the working directory is
6937 If no revision is given, the parent of the working directory is
6937 used.
6938 used.
6938
6939
6939 To facilitate version control, distribution, and merging of tags,
6940 To facilitate version control, distribution, and merging of tags,
6940 they are stored as a file named ".hgtags" which is managed similarly
6941 they are stored as a file named ".hgtags" which is managed similarly
6941 to other project files and can be hand-edited if necessary. This
6942 to other project files and can be hand-edited if necessary. This
6942 also means that tagging creates a new commit. The file
6943 also means that tagging creates a new commit. The file
6943 ".hg/localtags" is used for local tags (not shared among
6944 ".hg/localtags" is used for local tags (not shared among
6944 repositories).
6945 repositories).
6945
6946
6946 Tag commits are usually made at the head of a branch. If the parent
6947 Tag commits are usually made at the head of a branch. If the parent
6947 of the working directory is not a branch head, :hg:`tag` aborts; use
6948 of the working directory is not a branch head, :hg:`tag` aborts; use
6948 -f/--force to force the tag commit to be based on a non-head
6949 -f/--force to force the tag commit to be based on a non-head
6949 changeset.
6950 changeset.
6950
6951
6951 See :hg:`help dates` for a list of formats valid for -d/--date.
6952 See :hg:`help dates` for a list of formats valid for -d/--date.
6952
6953
6953 Since tag names have priority over branch names during revision
6954 Since tag names have priority over branch names during revision
6954 lookup, using an existing branch name as a tag name is discouraged.
6955 lookup, using an existing branch name as a tag name is discouraged.
6955
6956
6956 Returns 0 on success.
6957 Returns 0 on success.
6957 """
6958 """
6958 wlock = lock = None
6959 wlock = lock = None
6959 try:
6960 try:
6960 wlock = repo.wlock()
6961 wlock = repo.wlock()
6961 lock = repo.lock()
6962 lock = repo.lock()
6962 rev_ = "."
6963 rev_ = "."
6963 names = [t.strip() for t in (name1,) + names]
6964 names = [t.strip() for t in (name1,) + names]
6964 if len(names) != len(set(names)):
6965 if len(names) != len(set(names)):
6965 raise error.Abort(_('tag names must be unique'))
6966 raise error.Abort(_('tag names must be unique'))
6966 for n in names:
6967 for n in names:
6967 scmutil.checknewlabel(repo, n, 'tag')
6968 scmutil.checknewlabel(repo, n, 'tag')
6968 if not n:
6969 if not n:
6969 raise error.Abort(_('tag names cannot consist entirely of '
6970 raise error.Abort(_('tag names cannot consist entirely of '
6970 'whitespace'))
6971 'whitespace'))
6971 if opts.get('rev') and opts.get('remove'):
6972 if opts.get('rev') and opts.get('remove'):
6972 raise error.Abort(_("--rev and --remove are incompatible"))
6973 raise error.Abort(_("--rev and --remove are incompatible"))
6973 if opts.get('rev'):
6974 if opts.get('rev'):
6974 rev_ = opts['rev']
6975 rev_ = opts['rev']
6975 message = opts.get('message')
6976 message = opts.get('message')
6976 if opts.get('remove'):
6977 if opts.get('remove'):
6977 if opts.get('local'):
6978 if opts.get('local'):
6978 expectedtype = 'local'
6979 expectedtype = 'local'
6979 else:
6980 else:
6980 expectedtype = 'global'
6981 expectedtype = 'global'
6981
6982
6982 for n in names:
6983 for n in names:
6983 if not repo.tagtype(n):
6984 if not repo.tagtype(n):
6984 raise error.Abort(_("tag '%s' does not exist") % n)
6985 raise error.Abort(_("tag '%s' does not exist") % n)
6985 if repo.tagtype(n) != expectedtype:
6986 if repo.tagtype(n) != expectedtype:
6986 if expectedtype == 'global':
6987 if expectedtype == 'global':
6987 raise error.Abort(_("tag '%s' is not a global tag") % n)
6988 raise error.Abort(_("tag '%s' is not a global tag") % n)
6988 else:
6989 else:
6989 raise error.Abort(_("tag '%s' is not a local tag") % n)
6990 raise error.Abort(_("tag '%s' is not a local tag") % n)
6990 rev_ = 'null'
6991 rev_ = 'null'
6991 if not message:
6992 if not message:
6992 # we don't translate commit messages
6993 # we don't translate commit messages
6993 message = 'Removed tag %s' % ', '.join(names)
6994 message = 'Removed tag %s' % ', '.join(names)
6994 elif not opts.get('force'):
6995 elif not opts.get('force'):
6995 for n in names:
6996 for n in names:
6996 if n in repo.tags():
6997 if n in repo.tags():
6997 raise error.Abort(_("tag '%s' already exists "
6998 raise error.Abort(_("tag '%s' already exists "
6998 "(use -f to force)") % n)
6999 "(use -f to force)") % n)
6999 if not opts.get('local'):
7000 if not opts.get('local'):
7000 p1, p2 = repo.dirstate.parents()
7001 p1, p2 = repo.dirstate.parents()
7001 if p2 != nullid:
7002 if p2 != nullid:
7002 raise error.Abort(_('uncommitted merge'))
7003 raise error.Abort(_('uncommitted merge'))
7003 bheads = repo.branchheads()
7004 bheads = repo.branchheads()
7004 if not opts.get('force') and bheads and p1 not in bheads:
7005 if not opts.get('force') and bheads and p1 not in bheads:
7005 raise error.Abort(_('not at a branch head (use -f to force)'))
7006 raise error.Abort(_('not at a branch head (use -f to force)'))
7006 r = scmutil.revsingle(repo, rev_).node()
7007 r = scmutil.revsingle(repo, rev_).node()
7007
7008
7008 if not message:
7009 if not message:
7009 # we don't translate commit messages
7010 # we don't translate commit messages
7010 message = ('Added tag %s for changeset %s' %
7011 message = ('Added tag %s for changeset %s' %
7011 (', '.join(names), short(r)))
7012 (', '.join(names), short(r)))
7012
7013
7013 date = opts.get('date')
7014 date = opts.get('date')
7014 if date:
7015 if date:
7015 date = util.parsedate(date)
7016 date = util.parsedate(date)
7016
7017
7017 if opts.get('remove'):
7018 if opts.get('remove'):
7018 editform = 'tag.remove'
7019 editform = 'tag.remove'
7019 else:
7020 else:
7020 editform = 'tag.add'
7021 editform = 'tag.add'
7021 editor = cmdutil.getcommiteditor(editform=editform, **opts)
7022 editor = cmdutil.getcommiteditor(editform=editform, **opts)
7022
7023
7023 # don't allow tagging the null rev
7024 # don't allow tagging the null rev
7024 if (not opts.get('remove') and
7025 if (not opts.get('remove') and
7025 scmutil.revsingle(repo, rev_).rev() == nullrev):
7026 scmutil.revsingle(repo, rev_).rev() == nullrev):
7026 raise error.Abort(_("cannot tag null revision"))
7027 raise error.Abort(_("cannot tag null revision"))
7027
7028
7028 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
7029 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
7029 editor=editor)
7030 editor=editor)
7030 finally:
7031 finally:
7031 release(lock, wlock)
7032 release(lock, wlock)
7032
7033
7033 @command('tags', formatteropts, '')
7034 @command('tags', formatteropts, '')
7034 def tags(ui, repo, **opts):
7035 def tags(ui, repo, **opts):
7035 """list repository tags
7036 """list repository tags
7036
7037
7037 This lists both regular and local tags. When the -v/--verbose
7038 This lists both regular and local tags. When the -v/--verbose
7038 switch is used, a third column "local" is printed for local tags.
7039 switch is used, a third column "local" is printed for local tags.
7039 When the -q/--quiet switch is used, only the tag name is printed.
7040 When the -q/--quiet switch is used, only the tag name is printed.
7040
7041
7041 Returns 0 on success.
7042 Returns 0 on success.
7042 """
7043 """
7043
7044
7044 fm = ui.formatter('tags', opts)
7045 fm = ui.formatter('tags', opts)
7045 hexfunc = fm.hexfunc
7046 hexfunc = fm.hexfunc
7046 tagtype = ""
7047 tagtype = ""
7047
7048
7048 for t, n in reversed(repo.tagslist()):
7049 for t, n in reversed(repo.tagslist()):
7049 hn = hexfunc(n)
7050 hn = hexfunc(n)
7050 label = 'tags.normal'
7051 label = 'tags.normal'
7051 tagtype = ''
7052 tagtype = ''
7052 if repo.tagtype(t) == 'local':
7053 if repo.tagtype(t) == 'local':
7053 label = 'tags.local'
7054 label = 'tags.local'
7054 tagtype = 'local'
7055 tagtype = 'local'
7055
7056
7056 fm.startitem()
7057 fm.startitem()
7057 fm.write('tag', '%s', t, label=label)
7058 fm.write('tag', '%s', t, label=label)
7058 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
7059 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
7059 fm.condwrite(not ui.quiet, 'rev node', fmt,
7060 fm.condwrite(not ui.quiet, 'rev node', fmt,
7060 repo.changelog.rev(n), hn, label=label)
7061 repo.changelog.rev(n), hn, label=label)
7061 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
7062 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
7062 tagtype, label=label)
7063 tagtype, label=label)
7063 fm.plain('\n')
7064 fm.plain('\n')
7064 fm.end()
7065 fm.end()
7065
7066
7066 @command('tip',
7067 @command('tip',
7067 [('p', 'patch', None, _('show patch')),
7068 [('p', 'patch', None, _('show patch')),
7068 ('g', 'git', None, _('use git extended diff format')),
7069 ('g', 'git', None, _('use git extended diff format')),
7069 ] + templateopts,
7070 ] + templateopts,
7070 _('[-p] [-g]'))
7071 _('[-p] [-g]'))
7071 def tip(ui, repo, **opts):
7072 def tip(ui, repo, **opts):
7072 """show the tip revision (DEPRECATED)
7073 """show the tip revision (DEPRECATED)
7073
7074
7074 The tip revision (usually just called the tip) is the changeset
7075 The tip revision (usually just called the tip) is the changeset
7075 most recently added to the repository (and therefore the most
7076 most recently added to the repository (and therefore the most
7076 recently changed head).
7077 recently changed head).
7077
7078
7078 If you have just made a commit, that commit will be the tip. If
7079 If you have just made a commit, that commit will be the tip. If
7079 you have just pulled changes from another repository, the tip of
7080 you have just pulled changes from another repository, the tip of
7080 that repository becomes the current tip. The "tip" tag is special
7081 that repository becomes the current tip. The "tip" tag is special
7081 and cannot be renamed or assigned to a different changeset.
7082 and cannot be renamed or assigned to a different changeset.
7082
7083
7083 This command is deprecated, please use :hg:`heads` instead.
7084 This command is deprecated, please use :hg:`heads` instead.
7084
7085
7085 Returns 0 on success.
7086 Returns 0 on success.
7086 """
7087 """
7087 displayer = cmdutil.show_changeset(ui, repo, opts)
7088 displayer = cmdutil.show_changeset(ui, repo, opts)
7088 displayer.show(repo['tip'])
7089 displayer.show(repo['tip'])
7089 displayer.close()
7090 displayer.close()
7090
7091
7091 @command('unbundle',
7092 @command('unbundle',
7092 [('u', 'update', None,
7093 [('u', 'update', None,
7093 _('update to new branch head if changesets were unbundled'))],
7094 _('update to new branch head if changesets were unbundled'))],
7094 _('[-u] FILE...'))
7095 _('[-u] FILE...'))
7095 def unbundle(ui, repo, fname1, *fnames, **opts):
7096 def unbundle(ui, repo, fname1, *fnames, **opts):
7096 """apply one or more changegroup files
7097 """apply one or more changegroup files
7097
7098
7098 Apply one or more compressed changegroup files generated by the
7099 Apply one or more compressed changegroup files generated by the
7099 bundle command.
7100 bundle command.
7100
7101
7101 Returns 0 on success, 1 if an update has unresolved files.
7102 Returns 0 on success, 1 if an update has unresolved files.
7102 """
7103 """
7103 fnames = (fname1,) + fnames
7104 fnames = (fname1,) + fnames
7104
7105
7105 with repo.lock():
7106 with repo.lock():
7106 for fname in fnames:
7107 for fname in fnames:
7107 f = hg.openpath(ui, fname)
7108 f = hg.openpath(ui, fname)
7108 gen = exchange.readbundle(ui, f, fname)
7109 gen = exchange.readbundle(ui, f, fname)
7109 if isinstance(gen, bundle2.unbundle20):
7110 if isinstance(gen, bundle2.unbundle20):
7110 tr = repo.transaction('unbundle')
7111 tr = repo.transaction('unbundle')
7111 try:
7112 try:
7112 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
7113 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
7113 url='bundle:' + fname)
7114 url='bundle:' + fname)
7114 tr.close()
7115 tr.close()
7115 except error.BundleUnknownFeatureError as exc:
7116 except error.BundleUnknownFeatureError as exc:
7116 raise error.Abort(_('%s: unknown bundle feature, %s')
7117 raise error.Abort(_('%s: unknown bundle feature, %s')
7117 % (fname, exc),
7118 % (fname, exc),
7118 hint=_("see https://mercurial-scm.org/"
7119 hint=_("see https://mercurial-scm.org/"
7119 "wiki/BundleFeature for more "
7120 "wiki/BundleFeature for more "
7120 "information"))
7121 "information"))
7121 finally:
7122 finally:
7122 if tr:
7123 if tr:
7123 tr.release()
7124 tr.release()
7124 changes = [r.get('return', 0)
7125 changes = [r.get('return', 0)
7125 for r in op.records['changegroup']]
7126 for r in op.records['changegroup']]
7126 modheads = changegroup.combineresults(changes)
7127 modheads = changegroup.combineresults(changes)
7127 elif isinstance(gen, streamclone.streamcloneapplier):
7128 elif isinstance(gen, streamclone.streamcloneapplier):
7128 raise error.Abort(
7129 raise error.Abort(
7129 _('packed bundles cannot be applied with '
7130 _('packed bundles cannot be applied with '
7130 '"hg unbundle"'),
7131 '"hg unbundle"'),
7131 hint=_('use "hg debugapplystreamclonebundle"'))
7132 hint=_('use "hg debugapplystreamclonebundle"'))
7132 else:
7133 else:
7133 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
7134 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
7134
7135
7135 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
7136 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
7136
7137
7137 @command('^update|up|checkout|co',
7138 @command('^update|up|checkout|co',
7138 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
7139 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
7139 ('c', 'check', None, _('require clean working directory')),
7140 ('c', 'check', None, _('require clean working directory')),
7140 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
7141 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
7141 ('r', 'rev', '', _('revision'), _('REV'))
7142 ('r', 'rev', '', _('revision'), _('REV'))
7142 ] + mergetoolopts,
7143 ] + mergetoolopts,
7143 _('[-c] [-C] [-d DATE] [[-r] REV]'))
7144 _('[-c] [-C] [-d DATE] [[-r] REV]'))
7144 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
7145 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
7145 tool=None):
7146 tool=None):
7146 """update working directory (or switch revisions)
7147 """update working directory (or switch revisions)
7147
7148
7148 Update the repository's working directory to the specified
7149 Update the repository's working directory to the specified
7149 changeset. If no changeset is specified, update to the tip of the
7150 changeset. If no changeset is specified, update to the tip of the
7150 current named branch and move the active bookmark (see :hg:`help
7151 current named branch and move the active bookmark (see :hg:`help
7151 bookmarks`).
7152 bookmarks`).
7152
7153
7153 Update sets the working directory's parent revision to the specified
7154 Update sets the working directory's parent revision to the specified
7154 changeset (see :hg:`help parents`).
7155 changeset (see :hg:`help parents`).
7155
7156
7156 If the changeset is not a descendant or ancestor of the working
7157 If the changeset is not a descendant or ancestor of the working
7157 directory's parent, the update is aborted. With the -c/--check
7158 directory's parent, the update is aborted. With the -c/--check
7158 option, the working directory is checked for uncommitted changes; if
7159 option, the working directory is checked for uncommitted changes; if
7159 none are found, the working directory is updated to the specified
7160 none are found, the working directory is updated to the specified
7160 changeset.
7161 changeset.
7161
7162
7162 .. container:: verbose
7163 .. container:: verbose
7163
7164
7164 The following rules apply when the working directory contains
7165 The following rules apply when the working directory contains
7165 uncommitted changes:
7166 uncommitted changes:
7166
7167
7167 1. If neither -c/--check nor -C/--clean is specified, and if
7168 1. If neither -c/--check nor -C/--clean is specified, and if
7168 the requested changeset is an ancestor or descendant of
7169 the requested changeset is an ancestor or descendant of
7169 the working directory's parent, the uncommitted changes
7170 the working directory's parent, the uncommitted changes
7170 are merged into the requested changeset and the merged
7171 are merged into the requested changeset and the merged
7171 result is left uncommitted. If the requested changeset is
7172 result is left uncommitted. If the requested changeset is
7172 not an ancestor or descendant (that is, it is on another
7173 not an ancestor or descendant (that is, it is on another
7173 branch), the update is aborted and the uncommitted changes
7174 branch), the update is aborted and the uncommitted changes
7174 are preserved.
7175 are preserved.
7175
7176
7176 2. With the -c/--check option, the update is aborted and the
7177 2. With the -c/--check option, the update is aborted and the
7177 uncommitted changes are preserved.
7178 uncommitted changes are preserved.
7178
7179
7179 3. With the -C/--clean option, uncommitted changes are discarded and
7180 3. With the -C/--clean option, uncommitted changes are discarded and
7180 the working directory is updated to the requested changeset.
7181 the working directory is updated to the requested changeset.
7181
7182
7182 To cancel an uncommitted merge (and lose your changes), use
7183 To cancel an uncommitted merge (and lose your changes), use
7183 :hg:`update --clean .`.
7184 :hg:`update --clean .`.
7184
7185
7185 Use null as the changeset to remove the working directory (like
7186 Use null as the changeset to remove the working directory (like
7186 :hg:`clone -U`).
7187 :hg:`clone -U`).
7187
7188
7188 If you want to revert just one file to an older revision, use
7189 If you want to revert just one file to an older revision, use
7189 :hg:`revert [-r REV] NAME`.
7190 :hg:`revert [-r REV] NAME`.
7190
7191
7191 See :hg:`help dates` for a list of formats valid for -d/--date.
7192 See :hg:`help dates` for a list of formats valid for -d/--date.
7192
7193
7193 Returns 0 on success, 1 if there are unresolved files.
7194 Returns 0 on success, 1 if there are unresolved files.
7194 """
7195 """
7195 if rev and node:
7196 if rev and node:
7196 raise error.Abort(_("please specify just one revision"))
7197 raise error.Abort(_("please specify just one revision"))
7197
7198
7198 if rev is None or rev == '':
7199 if rev is None or rev == '':
7199 rev = node
7200 rev = node
7200
7201
7201 if date and rev is not None:
7202 if date and rev is not None:
7202 raise error.Abort(_("you can't specify a revision and a date"))
7203 raise error.Abort(_("you can't specify a revision and a date"))
7203
7204
7204 if check and clean:
7205 if check and clean:
7205 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
7206 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
7206
7207
7207 with repo.wlock():
7208 with repo.wlock():
7208 cmdutil.clearunfinished(repo)
7209 cmdutil.clearunfinished(repo)
7209
7210
7210 if date:
7211 if date:
7211 rev = cmdutil.finddate(ui, repo, date)
7212 rev = cmdutil.finddate(ui, repo, date)
7212
7213
7213 # if we defined a bookmark, we have to remember the original name
7214 # if we defined a bookmark, we have to remember the original name
7214 brev = rev
7215 brev = rev
7215 rev = scmutil.revsingle(repo, rev, rev).rev()
7216 rev = scmutil.revsingle(repo, rev, rev).rev()
7216
7217
7217 if check:
7218 if check:
7218 cmdutil.bailifchanged(repo, merge=False)
7219 cmdutil.bailifchanged(repo, merge=False)
7219
7220
7220 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
7221 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
7221
7222
7222 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
7223 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
7223
7224
7224 @command('verify', [])
7225 @command('verify', [])
7225 def verify(ui, repo):
7226 def verify(ui, repo):
7226 """verify the integrity of the repository
7227 """verify the integrity of the repository
7227
7228
7228 Verify the integrity of the current repository.
7229 Verify the integrity of the current repository.
7229
7230
7230 This will perform an extensive check of the repository's
7231 This will perform an extensive check of the repository's
7231 integrity, validating the hashes and checksums of each entry in
7232 integrity, validating the hashes and checksums of each entry in
7232 the changelog, manifest, and tracked files, as well as the
7233 the changelog, manifest, and tracked files, as well as the
7233 integrity of their crosslinks and indices.
7234 integrity of their crosslinks and indices.
7234
7235
7235 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7236 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7236 for more information about recovery from corruption of the
7237 for more information about recovery from corruption of the
7237 repository.
7238 repository.
7238
7239
7239 Returns 0 on success, 1 if errors are encountered.
7240 Returns 0 on success, 1 if errors are encountered.
7240 """
7241 """
7241 return hg.verify(repo)
7242 return hg.verify(repo)
7242
7243
7243 @command('version', [], norepo=True)
7244 @command('version', [], norepo=True)
7244 def version_(ui):
7245 def version_(ui):
7245 """output version and copyright information"""
7246 """output version and copyright information"""
7246 ui.write(_("Mercurial Distributed SCM (version %s)\n")
7247 ui.write(_("Mercurial Distributed SCM (version %s)\n")
7247 % util.version())
7248 % util.version())
7248 ui.status(_(
7249 ui.status(_(
7249 "(see https://mercurial-scm.org for more information)\n"
7250 "(see https://mercurial-scm.org for more information)\n"
7250 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
7251 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
7251 "This is free software; see the source for copying conditions. "
7252 "This is free software; see the source for copying conditions. "
7252 "There is NO\nwarranty; "
7253 "There is NO\nwarranty; "
7253 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7254 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7254 ))
7255 ))
7255
7256
7256 ui.note(_("\nEnabled extensions:\n\n"))
7257 ui.note(_("\nEnabled extensions:\n\n"))
7257 if ui.verbose:
7258 if ui.verbose:
7258 # format names and versions into columns
7259 # format names and versions into columns
7259 names = []
7260 names = []
7260 vers = []
7261 vers = []
7261 place = []
7262 place = []
7262 for name, module in extensions.extensions():
7263 for name, module in extensions.extensions():
7263 names.append(name)
7264 names.append(name)
7264 vers.append(extensions.moduleversion(module))
7265 vers.append(extensions.moduleversion(module))
7265 if extensions.ismoduleinternal(module):
7266 if extensions.ismoduleinternal(module):
7266 place.append(_("internal"))
7267 place.append(_("internal"))
7267 else:
7268 else:
7268 place.append(_("external"))
7269 place.append(_("external"))
7269 if names:
7270 if names:
7270 maxnamelen = max(len(n) for n in names)
7271 maxnamelen = max(len(n) for n in names)
7271 for i, name in enumerate(names):
7272 for i, name in enumerate(names):
7272 ui.write(" %-*s %s %s\n" %
7273 ui.write(" %-*s %s %s\n" %
7273 (maxnamelen, name, place[i], vers[i]))
7274 (maxnamelen, name, place[i], vers[i]))
7274
7275
7275 def loadcmdtable(ui, name, cmdtable):
7276 def loadcmdtable(ui, name, cmdtable):
7276 """Load command functions from specified cmdtable
7277 """Load command functions from specified cmdtable
7277 """
7278 """
7278 overrides = [cmd for cmd in cmdtable if cmd in table]
7279 overrides = [cmd for cmd in cmdtable if cmd in table]
7279 if overrides:
7280 if overrides:
7280 ui.warn(_("extension '%s' overrides commands: %s\n")
7281 ui.warn(_("extension '%s' overrides commands: %s\n")
7281 % (name, " ".join(overrides)))
7282 % (name, " ".join(overrides)))
7282 table.update(cmdtable)
7283 table.update(cmdtable)
@@ -1,1941 +1,1942 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 base85,
19 base85,
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 tags,
32 tags,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle compression human names to internal representation.
40 # Maps bundle compression human names to internal representation.
41 _bundlespeccompressions = {'none': None,
41 _bundlespeccompressions = {'none': None,
42 'bzip2': 'BZ',
42 'bzip2': 'BZ',
43 'gzip': 'GZ',
43 'gzip': 'GZ',
44 }
44 }
45
45
46 # Maps bundle version human names to changegroup versions.
46 # Maps bundle version human names to changegroup versions.
47 _bundlespeccgversions = {'v1': '01',
47 _bundlespeccgversions = {'v1': '01',
48 'v2': '02',
48 'v2': '02',
49 'packed1': 's1',
49 'packed1': 's1',
50 'bundle2': '02', #legacy
50 'bundle2': '02', #legacy
51 }
51 }
52
52
53 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 def parsebundlespec(repo, spec, strict=True, externalnames=False):
54 """Parse a bundle string specification into parts.
54 """Parse a bundle string specification into parts.
55
55
56 Bundle specifications denote a well-defined bundle/exchange format.
56 Bundle specifications denote a well-defined bundle/exchange format.
57 The content of a given specification should not change over time in
57 The content of a given specification should not change over time in
58 order to ensure that bundles produced by a newer version of Mercurial are
58 order to ensure that bundles produced by a newer version of Mercurial are
59 readable from an older version.
59 readable from an older version.
60
60
61 The string currently has the form:
61 The string currently has the form:
62
62
63 <compression>-<type>[;<parameter0>[;<parameter1>]]
63 <compression>-<type>[;<parameter0>[;<parameter1>]]
64
64
65 Where <compression> is one of the supported compression formats
65 Where <compression> is one of the supported compression formats
66 and <type> is (currently) a version string. A ";" can follow the type and
66 and <type> is (currently) a version string. A ";" can follow the type and
67 all text afterwards is interpretted as URI encoded, ";" delimited key=value
67 all text afterwards is interpretted as URI encoded, ";" delimited key=value
68 pairs.
68 pairs.
69
69
70 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 If ``strict`` is True (the default) <compression> is required. Otherwise,
71 it is optional.
71 it is optional.
72
72
73 If ``externalnames`` is False (the default), the human-centric names will
73 If ``externalnames`` is False (the default), the human-centric names will
74 be converted to their internal representation.
74 be converted to their internal representation.
75
75
76 Returns a 3-tuple of (compression, version, parameters). Compression will
76 Returns a 3-tuple of (compression, version, parameters). Compression will
77 be ``None`` if not in strict mode and a compression isn't defined.
77 be ``None`` if not in strict mode and a compression isn't defined.
78
78
79 An ``InvalidBundleSpecification`` is raised when the specification is
79 An ``InvalidBundleSpecification`` is raised when the specification is
80 not syntactically well formed.
80 not syntactically well formed.
81
81
82 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 An ``UnsupportedBundleSpecification`` is raised when the compression or
83 bundle type/version is not recognized.
83 bundle type/version is not recognized.
84
84
85 Note: this function will likely eventually return a more complex data
85 Note: this function will likely eventually return a more complex data
86 structure, including bundle2 part information.
86 structure, including bundle2 part information.
87 """
87 """
88 def parseparams(s):
88 def parseparams(s):
89 if ';' not in s:
89 if ';' not in s:
90 return s, {}
90 return s, {}
91
91
92 params = {}
92 params = {}
93 version, paramstr = s.split(';', 1)
93 version, paramstr = s.split(';', 1)
94
94
95 for p in paramstr.split(';'):
95 for p in paramstr.split(';'):
96 if '=' not in p:
96 if '=' not in p:
97 raise error.InvalidBundleSpecification(
97 raise error.InvalidBundleSpecification(
98 _('invalid bundle specification: '
98 _('invalid bundle specification: '
99 'missing "=" in parameter: %s') % p)
99 'missing "=" in parameter: %s') % p)
100
100
101 key, value = p.split('=', 1)
101 key, value = p.split('=', 1)
102 key = urlreq.unquote(key)
102 key = urlreq.unquote(key)
103 value = urlreq.unquote(value)
103 value = urlreq.unquote(value)
104 params[key] = value
104 params[key] = value
105
105
106 return version, params
106 return version, params
107
107
108
108
109 if strict and '-' not in spec:
109 if strict and '-' not in spec:
110 raise error.InvalidBundleSpecification(
110 raise error.InvalidBundleSpecification(
111 _('invalid bundle specification; '
111 _('invalid bundle specification; '
112 'must be prefixed with compression: %s') % spec)
112 'must be prefixed with compression: %s') % spec)
113
113
114 if '-' in spec:
114 if '-' in spec:
115 compression, version = spec.split('-', 1)
115 compression, version = spec.split('-', 1)
116
116
117 if compression not in _bundlespeccompressions:
117 if compression not in _bundlespeccompressions:
118 raise error.UnsupportedBundleSpecification(
118 raise error.UnsupportedBundleSpecification(
119 _('%s compression is not supported') % compression)
119 _('%s compression is not supported') % compression)
120
120
121 version, params = parseparams(version)
121 version, params = parseparams(version)
122
122
123 if version not in _bundlespeccgversions:
123 if version not in _bundlespeccgversions:
124 raise error.UnsupportedBundleSpecification(
124 raise error.UnsupportedBundleSpecification(
125 _('%s is not a recognized bundle version') % version)
125 _('%s is not a recognized bundle version') % version)
126 else:
126 else:
127 # Value could be just the compression or just the version, in which
127 # Value could be just the compression or just the version, in which
128 # case some defaults are assumed (but only when not in strict mode).
128 # case some defaults are assumed (but only when not in strict mode).
129 assert not strict
129 assert not strict
130
130
131 spec, params = parseparams(spec)
131 spec, params = parseparams(spec)
132
132
133 if spec in _bundlespeccompressions:
133 if spec in _bundlespeccompressions:
134 compression = spec
134 compression = spec
135 version = 'v1'
135 version = 'v1'
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # The specification for packed1 can optionally declare the data formats
148 # The specification for packed1 can optionally declare the data formats
149 # required to apply it. If we see this metadata, compare against what the
149 # required to apply it. If we see this metadata, compare against what the
150 # repo supports and error if the bundle isn't compatible.
150 # repo supports and error if the bundle isn't compatible.
151 if version == 'packed1' and 'requirements' in params:
151 if version == 'packed1' and 'requirements' in params:
152 requirements = set(params['requirements'].split(','))
152 requirements = set(params['requirements'].split(','))
153 missingreqs = requirements - repo.supportedformats
153 missingreqs = requirements - repo.supportedformats
154 if missingreqs:
154 if missingreqs:
155 raise error.UnsupportedBundleSpecification(
155 raise error.UnsupportedBundleSpecification(
156 _('missing support for repository features: %s') %
156 _('missing support for repository features: %s') %
157 ', '.join(sorted(missingreqs)))
157 ', '.join(sorted(missingreqs)))
158
158
159 if not externalnames:
159 if not externalnames:
160 compression = _bundlespeccompressions[compression]
160 compression = _bundlespeccompressions[compression]
161 version = _bundlespeccgversions[version]
161 version = _bundlespeccgversions[version]
162 return compression, version, params
162 return compression, version, params
163
163
164 def readbundle(ui, fh, fname, vfs=None):
164 def readbundle(ui, fh, fname, vfs=None):
165 header = changegroup.readexactly(fh, 4)
165 header = changegroup.readexactly(fh, 4)
166
166
167 alg = None
167 alg = None
168 if not fname:
168 if not fname:
169 fname = "stream"
169 fname = "stream"
170 if not header.startswith('HG') and header.startswith('\0'):
170 if not header.startswith('HG') and header.startswith('\0'):
171 fh = changegroup.headerlessfixup(fh, header)
171 fh = changegroup.headerlessfixup(fh, header)
172 header = "HG10"
172 header = "HG10"
173 alg = 'UN'
173 alg = 'UN'
174 elif vfs:
174 elif vfs:
175 fname = vfs.join(fname)
175 fname = vfs.join(fname)
176
176
177 magic, version = header[0:2], header[2:4]
177 magic, version = header[0:2], header[2:4]
178
178
179 if magic != 'HG':
179 if magic != 'HG':
180 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
180 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
181 if version == '10':
181 if version == '10':
182 if alg is None:
182 if alg is None:
183 alg = changegroup.readexactly(fh, 2)
183 alg = changegroup.readexactly(fh, 2)
184 return changegroup.cg1unpacker(fh, alg)
184 return changegroup.cg1unpacker(fh, alg)
185 elif version.startswith('2'):
185 elif version.startswith('2'):
186 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
186 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
187 elif version == 'S1':
187 elif version == 'S1':
188 return streamclone.streamcloneapplier(fh)
188 return streamclone.streamcloneapplier(fh)
189 else:
189 else:
190 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
190 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
191
191
192 def getbundlespec(ui, fh):
192 def getbundlespec(ui, fh):
193 """Infer the bundlespec from a bundle file handle.
193 """Infer the bundlespec from a bundle file handle.
194
194
195 The input file handle is seeked and the original seek position is not
195 The input file handle is seeked and the original seek position is not
196 restored.
196 restored.
197 """
197 """
198 def speccompression(alg):
198 def speccompression(alg):
199 for k, v in _bundlespeccompressions.items():
199 for k, v in _bundlespeccompressions.items():
200 if v == alg:
200 if v == alg:
201 return k
201 return k
202 return None
202 return None
203
203
204 b = readbundle(ui, fh, None)
204 b = readbundle(ui, fh, None)
205 if isinstance(b, changegroup.cg1unpacker):
205 if isinstance(b, changegroup.cg1unpacker):
206 alg = b._type
206 alg = b._type
207 if alg == '_truncatedBZ':
207 if alg == '_truncatedBZ':
208 alg = 'BZ'
208 alg = 'BZ'
209 comp = speccompression(alg)
209 comp = speccompression(alg)
210 if not comp:
210 if not comp:
211 raise error.Abort(_('unknown compression algorithm: %s') % alg)
211 raise error.Abort(_('unknown compression algorithm: %s') % alg)
212 return '%s-v1' % comp
212 return '%s-v1' % comp
213 elif isinstance(b, bundle2.unbundle20):
213 elif isinstance(b, bundle2.unbundle20):
214 if 'Compression' in b.params:
214 if 'Compression' in b.params:
215 comp = speccompression(b.params['Compression'])
215 comp = speccompression(b.params['Compression'])
216 if not comp:
216 if not comp:
217 raise error.Abort(_('unknown compression algorithm: %s') % comp)
217 raise error.Abort(_('unknown compression algorithm: %s') % comp)
218 else:
218 else:
219 comp = 'none'
219 comp = 'none'
220
220
221 version = None
221 version = None
222 for part in b.iterparts():
222 for part in b.iterparts():
223 if part.type == 'changegroup':
223 if part.type == 'changegroup':
224 version = part.params['version']
224 version = part.params['version']
225 if version in ('01', '02'):
225 if version in ('01', '02'):
226 version = 'v2'
226 version = 'v2'
227 else:
227 else:
228 raise error.Abort(_('changegroup version %s does not have '
228 raise error.Abort(_('changegroup version %s does not have '
229 'a known bundlespec') % version,
229 'a known bundlespec') % version,
230 hint=_('try upgrading your Mercurial '
230 hint=_('try upgrading your Mercurial '
231 'client'))
231 'client'))
232
232
233 if not version:
233 if not version:
234 raise error.Abort(_('could not identify changegroup version in '
234 raise error.Abort(_('could not identify changegroup version in '
235 'bundle'))
235 'bundle'))
236
236
237 return '%s-%s' % (comp, version)
237 return '%s-%s' % (comp, version)
238 elif isinstance(b, streamclone.streamcloneapplier):
238 elif isinstance(b, streamclone.streamcloneapplier):
239 requirements = streamclone.readbundle1header(fh)[2]
239 requirements = streamclone.readbundle1header(fh)[2]
240 params = 'requirements=%s' % ','.join(sorted(requirements))
240 params = 'requirements=%s' % ','.join(sorted(requirements))
241 return 'none-packed1;%s' % urlreq.quote(params)
241 return 'none-packed1;%s' % urlreq.quote(params)
242 else:
242 else:
243 raise error.Abort(_('unknown bundle type: %s') % b)
243 raise error.Abort(_('unknown bundle type: %s') % b)
244
244
245 def buildobsmarkerspart(bundler, markers):
245 def buildobsmarkerspart(bundler, markers):
246 """add an obsmarker part to the bundler with <markers>
246 """add an obsmarker part to the bundler with <markers>
247
247
248 No part is created if markers is empty.
248 No part is created if markers is empty.
249 Raises ValueError if the bundler doesn't support any known obsmarker format.
249 Raises ValueError if the bundler doesn't support any known obsmarker format.
250 """
250 """
251 if markers:
251 if markers:
252 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
252 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
253 version = obsolete.commonversion(remoteversions)
253 version = obsolete.commonversion(remoteversions)
254 if version is None:
254 if version is None:
255 raise ValueError('bundler does not support common obsmarker format')
255 raise ValueError('bundler does not support common obsmarker format')
256 stream = obsolete.encodemarkers(markers, True, version=version)
256 stream = obsolete.encodemarkers(markers, True, version=version)
257 return bundler.newpart('obsmarkers', data=stream)
257 return bundler.newpart('obsmarkers', data=stream)
258 return None
258 return None
259
259
260 def _forcebundle1(op):
260 def _forcebundle1(op):
261 """return true if a pull/push must use bundle1
261 """return true if a pull/push must use bundle1
262
262
263 This function is used to allow testing of the older bundle version"""
263 This function is used to allow testing of the older bundle version"""
264 ui = op.repo.ui
264 ui = op.repo.ui
265 forcebundle1 = False
265 forcebundle1 = False
266 # The goal is this config is to allow developper to choose the bundle
266 # The goal is this config is to allow developper to choose the bundle
267 # version used during exchanged. This is especially handy during test.
267 # version used during exchanged. This is especially handy during test.
268 # Value is a list of bundle version to be picked from, highest version
268 # Value is a list of bundle version to be picked from, highest version
269 # should be used.
269 # should be used.
270 #
270 #
271 # developer config: devel.legacy.exchange
271 # developer config: devel.legacy.exchange
272 exchange = ui.configlist('devel', 'legacy.exchange')
272 exchange = ui.configlist('devel', 'legacy.exchange')
273 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
273 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
274 return forcebundle1 or not op.remote.capable('bundle2')
274 return forcebundle1 or not op.remote.capable('bundle2')
275
275
276 class pushoperation(object):
276 class pushoperation(object):
277 """A object that represent a single push operation
277 """A object that represent a single push operation
278
278
279 Its purpose is to carry push related state and very common operations.
279 Its purpose is to carry push related state and very common operations.
280
280
281 A new pushoperation should be created at the beginning of each push and
281 A new pushoperation should be created at the beginning of each push and
282 discarded afterward.
282 discarded afterward.
283 """
283 """
284
284
285 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
285 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
286 bookmarks=()):
286 bookmarks=()):
287 # repo we push from
287 # repo we push from
288 self.repo = repo
288 self.repo = repo
289 self.ui = repo.ui
289 self.ui = repo.ui
290 # repo we push to
290 # repo we push to
291 self.remote = remote
291 self.remote = remote
292 # force option provided
292 # force option provided
293 self.force = force
293 self.force = force
294 # revs to be pushed (None is "all")
294 # revs to be pushed (None is "all")
295 self.revs = revs
295 self.revs = revs
296 # bookmark explicitly pushed
296 # bookmark explicitly pushed
297 self.bookmarks = bookmarks
297 self.bookmarks = bookmarks
298 # allow push of new branch
298 # allow push of new branch
299 self.newbranch = newbranch
299 self.newbranch = newbranch
300 # did a local lock get acquired?
300 # did a local lock get acquired?
301 self.locallocked = None
301 self.locallocked = None
302 # step already performed
302 # step already performed
303 # (used to check what steps have been already performed through bundle2)
303 # (used to check what steps have been already performed through bundle2)
304 self.stepsdone = set()
304 self.stepsdone = set()
305 # Integer version of the changegroup push result
305 # Integer version of the changegroup push result
306 # - None means nothing to push
306 # - None means nothing to push
307 # - 0 means HTTP error
307 # - 0 means HTTP error
308 # - 1 means we pushed and remote head count is unchanged *or*
308 # - 1 means we pushed and remote head count is unchanged *or*
309 # we have outgoing changesets but refused to push
309 # we have outgoing changesets but refused to push
310 # - other values as described by addchangegroup()
310 # - other values as described by addchangegroup()
311 self.cgresult = None
311 self.cgresult = None
312 # Boolean value for the bookmark push
312 # Boolean value for the bookmark push
313 self.bkresult = None
313 self.bkresult = None
314 # discover.outgoing object (contains common and outgoing data)
314 # discover.outgoing object (contains common and outgoing data)
315 self.outgoing = None
315 self.outgoing = None
316 # all remote heads before the push
316 # all remote heads before the push
317 self.remoteheads = None
317 self.remoteheads = None
318 # testable as a boolean indicating if any nodes are missing locally.
318 # testable as a boolean indicating if any nodes are missing locally.
319 self.incoming = None
319 self.incoming = None
320 # phases changes that must be pushed along side the changesets
320 # phases changes that must be pushed along side the changesets
321 self.outdatedphases = None
321 self.outdatedphases = None
322 # phases changes that must be pushed if changeset push fails
322 # phases changes that must be pushed if changeset push fails
323 self.fallbackoutdatedphases = None
323 self.fallbackoutdatedphases = None
324 # outgoing obsmarkers
324 # outgoing obsmarkers
325 self.outobsmarkers = set()
325 self.outobsmarkers = set()
326 # outgoing bookmarks
326 # outgoing bookmarks
327 self.outbookmarks = []
327 self.outbookmarks = []
328 # transaction manager
328 # transaction manager
329 self.trmanager = None
329 self.trmanager = None
330 # map { pushkey partid -> callback handling failure}
330 # map { pushkey partid -> callback handling failure}
331 # used to handle exception from mandatory pushkey part failure
331 # used to handle exception from mandatory pushkey part failure
332 self.pkfailcb = {}
332 self.pkfailcb = {}
333
333
334 @util.propertycache
334 @util.propertycache
335 def futureheads(self):
335 def futureheads(self):
336 """future remote heads if the changeset push succeeds"""
336 """future remote heads if the changeset push succeeds"""
337 return self.outgoing.missingheads
337 return self.outgoing.missingheads
338
338
339 @util.propertycache
339 @util.propertycache
340 def fallbackheads(self):
340 def fallbackheads(self):
341 """future remote heads if the changeset push fails"""
341 """future remote heads if the changeset push fails"""
342 if self.revs is None:
342 if self.revs is None:
343 # not target to push, all common are relevant
343 # not target to push, all common are relevant
344 return self.outgoing.commonheads
344 return self.outgoing.commonheads
345 unfi = self.repo.unfiltered()
345 unfi = self.repo.unfiltered()
346 # I want cheads = heads(::missingheads and ::commonheads)
346 # I want cheads = heads(::missingheads and ::commonheads)
347 # (missingheads is revs with secret changeset filtered out)
347 # (missingheads is revs with secret changeset filtered out)
348 #
348 #
349 # This can be expressed as:
349 # This can be expressed as:
350 # cheads = ( (missingheads and ::commonheads)
350 # cheads = ( (missingheads and ::commonheads)
351 # + (commonheads and ::missingheads))"
351 # + (commonheads and ::missingheads))"
352 # )
352 # )
353 #
353 #
354 # while trying to push we already computed the following:
354 # while trying to push we already computed the following:
355 # common = (::commonheads)
355 # common = (::commonheads)
356 # missing = ((commonheads::missingheads) - commonheads)
356 # missing = ((commonheads::missingheads) - commonheads)
357 #
357 #
358 # We can pick:
358 # We can pick:
359 # * missingheads part of common (::commonheads)
359 # * missingheads part of common (::commonheads)
360 common = self.outgoing.common
360 common = self.outgoing.common
361 nm = self.repo.changelog.nodemap
361 nm = self.repo.changelog.nodemap
362 cheads = [node for node in self.revs if nm[node] in common]
362 cheads = [node for node in self.revs if nm[node] in common]
363 # and
363 # and
364 # * commonheads parents on missing
364 # * commonheads parents on missing
365 revset = unfi.set('%ln and parents(roots(%ln))',
365 revset = unfi.set('%ln and parents(roots(%ln))',
366 self.outgoing.commonheads,
366 self.outgoing.commonheads,
367 self.outgoing.missing)
367 self.outgoing.missing)
368 cheads.extend(c.node() for c in revset)
368 cheads.extend(c.node() for c in revset)
369 return cheads
369 return cheads
370
370
371 @property
371 @property
372 def commonheads(self):
372 def commonheads(self):
373 """set of all common heads after changeset bundle push"""
373 """set of all common heads after changeset bundle push"""
374 if self.cgresult:
374 if self.cgresult:
375 return self.futureheads
375 return self.futureheads
376 else:
376 else:
377 return self.fallbackheads
377 return self.fallbackheads
378
378
379 # mapping of message used when pushing bookmark
379 # mapping of message used when pushing bookmark
380 bookmsgmap = {'update': (_("updating bookmark %s\n"),
380 bookmsgmap = {'update': (_("updating bookmark %s\n"),
381 _('updating bookmark %s failed!\n')),
381 _('updating bookmark %s failed!\n')),
382 'export': (_("exporting bookmark %s\n"),
382 'export': (_("exporting bookmark %s\n"),
383 _('exporting bookmark %s failed!\n')),
383 _('exporting bookmark %s failed!\n')),
384 'delete': (_("deleting remote bookmark %s\n"),
384 'delete': (_("deleting remote bookmark %s\n"),
385 _('deleting remote bookmark %s failed!\n')),
385 _('deleting remote bookmark %s failed!\n')),
386 }
386 }
387
387
388
388
389 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
389 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
390 opargs=None):
390 opargs=None):
391 '''Push outgoing changesets (limited by revs) from a local
391 '''Push outgoing changesets (limited by revs) from a local
392 repository to remote. Return an integer:
392 repository to remote. Return an integer:
393 - None means nothing to push
393 - None means nothing to push
394 - 0 means HTTP error
394 - 0 means HTTP error
395 - 1 means we pushed and remote head count is unchanged *or*
395 - 1 means we pushed and remote head count is unchanged *or*
396 we have outgoing changesets but refused to push
396 we have outgoing changesets but refused to push
397 - other values as described by addchangegroup()
397 - other values as described by addchangegroup()
398 '''
398 '''
399 if opargs is None:
399 if opargs is None:
400 opargs = {}
400 opargs = {}
401 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
401 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
402 **opargs)
402 **opargs)
403 if pushop.remote.local():
403 if pushop.remote.local():
404 missing = (set(pushop.repo.requirements)
404 missing = (set(pushop.repo.requirements)
405 - pushop.remote.local().supported)
405 - pushop.remote.local().supported)
406 if missing:
406 if missing:
407 msg = _("required features are not"
407 msg = _("required features are not"
408 " supported in the destination:"
408 " supported in the destination:"
409 " %s") % (', '.join(sorted(missing)))
409 " %s") % (', '.join(sorted(missing)))
410 raise error.Abort(msg)
410 raise error.Abort(msg)
411
411
412 # there are two ways to push to remote repo:
412 # there are two ways to push to remote repo:
413 #
413 #
414 # addchangegroup assumes local user can lock remote
414 # addchangegroup assumes local user can lock remote
415 # repo (local filesystem, old ssh servers).
415 # repo (local filesystem, old ssh servers).
416 #
416 #
417 # unbundle assumes local user cannot lock remote repo (new ssh
417 # unbundle assumes local user cannot lock remote repo (new ssh
418 # servers, http servers).
418 # servers, http servers).
419
419
420 if not pushop.remote.canpush():
420 if not pushop.remote.canpush():
421 raise error.Abort(_("destination does not support push"))
421 raise error.Abort(_("destination does not support push"))
422 # get local lock as we might write phase data
422 # get local lock as we might write phase data
423 localwlock = locallock = None
423 localwlock = locallock = None
424 try:
424 try:
425 # bundle2 push may receive a reply bundle touching bookmarks or other
425 # bundle2 push may receive a reply bundle touching bookmarks or other
426 # things requiring the wlock. Take it now to ensure proper ordering.
426 # things requiring the wlock. Take it now to ensure proper ordering.
427 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
427 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
428 if (not _forcebundle1(pushop)) and maypushback:
428 if (not _forcebundle1(pushop)) and maypushback:
429 localwlock = pushop.repo.wlock()
429 localwlock = pushop.repo.wlock()
430 locallock = pushop.repo.lock()
430 locallock = pushop.repo.lock()
431 pushop.locallocked = True
431 pushop.locallocked = True
432 except IOError as err:
432 except IOError as err:
433 pushop.locallocked = False
433 pushop.locallocked = False
434 if err.errno != errno.EACCES:
434 if err.errno != errno.EACCES:
435 raise
435 raise
436 # source repo cannot be locked.
436 # source repo cannot be locked.
437 # We do not abort the push, but just disable the local phase
437 # We do not abort the push, but just disable the local phase
438 # synchronisation.
438 # synchronisation.
439 msg = 'cannot lock source repository: %s\n' % err
439 msg = 'cannot lock source repository: %s\n' % err
440 pushop.ui.debug(msg)
440 pushop.ui.debug(msg)
441 try:
441 try:
442 if pushop.locallocked:
442 if pushop.locallocked:
443 pushop.trmanager = transactionmanager(pushop.repo,
443 pushop.trmanager = transactionmanager(pushop.repo,
444 'push-response',
444 'push-response',
445 pushop.remote.url())
445 pushop.remote.url())
446 pushop.repo.checkpush(pushop)
446 pushop.repo.checkpush(pushop)
447 lock = None
447 lock = None
448 unbundle = pushop.remote.capable('unbundle')
448 unbundle = pushop.remote.capable('unbundle')
449 if not unbundle:
449 if not unbundle:
450 lock = pushop.remote.lock()
450 lock = pushop.remote.lock()
451 try:
451 try:
452 _pushdiscovery(pushop)
452 _pushdiscovery(pushop)
453 if not _forcebundle1(pushop):
453 if not _forcebundle1(pushop):
454 _pushbundle2(pushop)
454 _pushbundle2(pushop)
455 _pushchangeset(pushop)
455 _pushchangeset(pushop)
456 _pushsyncphase(pushop)
456 _pushsyncphase(pushop)
457 _pushobsolete(pushop)
457 _pushobsolete(pushop)
458 _pushbookmark(pushop)
458 _pushbookmark(pushop)
459 finally:
459 finally:
460 if lock is not None:
460 if lock is not None:
461 lock.release()
461 lock.release()
462 if pushop.trmanager:
462 if pushop.trmanager:
463 pushop.trmanager.close()
463 pushop.trmanager.close()
464 finally:
464 finally:
465 if pushop.trmanager:
465 if pushop.trmanager:
466 pushop.trmanager.release()
466 pushop.trmanager.release()
467 if locallock is not None:
467 if locallock is not None:
468 locallock.release()
468 locallock.release()
469 if localwlock is not None:
469 if localwlock is not None:
470 localwlock.release()
470 localwlock.release()
471
471
472 return pushop
472 return pushop
473
473
474 # list of steps to perform discovery before push
474 # list of steps to perform discovery before push
475 pushdiscoveryorder = []
475 pushdiscoveryorder = []
476
476
477 # Mapping between step name and function
477 # Mapping between step name and function
478 #
478 #
479 # This exists to help extensions wrap steps if necessary
479 # This exists to help extensions wrap steps if necessary
480 pushdiscoverymapping = {}
480 pushdiscoverymapping = {}
481
481
482 def pushdiscovery(stepname):
482 def pushdiscovery(stepname):
483 """decorator for function performing discovery before push
483 """decorator for function performing discovery before push
484
484
485 The function is added to the step -> function mapping and appended to the
485 The function is added to the step -> function mapping and appended to the
486 list of steps. Beware that decorated function will be added in order (this
486 list of steps. Beware that decorated function will be added in order (this
487 may matter).
487 may matter).
488
488
489 You can only use this decorator for a new step, if you want to wrap a step
489 You can only use this decorator for a new step, if you want to wrap a step
490 from an extension, change the pushdiscovery dictionary directly."""
490 from an extension, change the pushdiscovery dictionary directly."""
491 def dec(func):
491 def dec(func):
492 assert stepname not in pushdiscoverymapping
492 assert stepname not in pushdiscoverymapping
493 pushdiscoverymapping[stepname] = func
493 pushdiscoverymapping[stepname] = func
494 pushdiscoveryorder.append(stepname)
494 pushdiscoveryorder.append(stepname)
495 return func
495 return func
496 return dec
496 return dec
497
497
498 def _pushdiscovery(pushop):
498 def _pushdiscovery(pushop):
499 """Run all discovery steps"""
499 """Run all discovery steps"""
500 for stepname in pushdiscoveryorder:
500 for stepname in pushdiscoveryorder:
501 step = pushdiscoverymapping[stepname]
501 step = pushdiscoverymapping[stepname]
502 step(pushop)
502 step(pushop)
503
503
504 @pushdiscovery('changeset')
504 @pushdiscovery('changeset')
505 def _pushdiscoverychangeset(pushop):
505 def _pushdiscoverychangeset(pushop):
506 """discover the changeset that need to be pushed"""
506 """discover the changeset that need to be pushed"""
507 fci = discovery.findcommonincoming
507 fci = discovery.findcommonincoming
508 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
508 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
509 common, inc, remoteheads = commoninc
509 common, inc, remoteheads = commoninc
510 fco = discovery.findcommonoutgoing
510 fco = discovery.findcommonoutgoing
511 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
511 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
512 commoninc=commoninc, force=pushop.force)
512 commoninc=commoninc, force=pushop.force)
513 pushop.outgoing = outgoing
513 pushop.outgoing = outgoing
514 pushop.remoteheads = remoteheads
514 pushop.remoteheads = remoteheads
515 pushop.incoming = inc
515 pushop.incoming = inc
516
516
517 @pushdiscovery('phase')
517 @pushdiscovery('phase')
518 def _pushdiscoveryphase(pushop):
518 def _pushdiscoveryphase(pushop):
519 """discover the phase that needs to be pushed
519 """discover the phase that needs to be pushed
520
520
521 (computed for both success and failure case for changesets push)"""
521 (computed for both success and failure case for changesets push)"""
522 outgoing = pushop.outgoing
522 outgoing = pushop.outgoing
523 unfi = pushop.repo.unfiltered()
523 unfi = pushop.repo.unfiltered()
524 remotephases = pushop.remote.listkeys('phases')
524 remotephases = pushop.remote.listkeys('phases')
525 publishing = remotephases.get('publishing', False)
525 publishing = remotephases.get('publishing', False)
526 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
526 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
527 and remotephases # server supports phases
527 and remotephases # server supports phases
528 and not pushop.outgoing.missing # no changesets to be pushed
528 and not pushop.outgoing.missing # no changesets to be pushed
529 and publishing):
529 and publishing):
530 # When:
530 # When:
531 # - this is a subrepo push
531 # - this is a subrepo push
532 # - and remote support phase
532 # - and remote support phase
533 # - and no changeset are to be pushed
533 # - and no changeset are to be pushed
534 # - and remote is publishing
534 # - and remote is publishing
535 # We may be in issue 3871 case!
535 # We may be in issue 3871 case!
536 # We drop the possible phase synchronisation done by
536 # We drop the possible phase synchronisation done by
537 # courtesy to publish changesets possibly locally draft
537 # courtesy to publish changesets possibly locally draft
538 # on the remote.
538 # on the remote.
539 remotephases = {'publishing': 'True'}
539 remotephases = {'publishing': 'True'}
540 ana = phases.analyzeremotephases(pushop.repo,
540 ana = phases.analyzeremotephases(pushop.repo,
541 pushop.fallbackheads,
541 pushop.fallbackheads,
542 remotephases)
542 remotephases)
543 pheads, droots = ana
543 pheads, droots = ana
544 extracond = ''
544 extracond = ''
545 if not publishing:
545 if not publishing:
546 extracond = ' and public()'
546 extracond = ' and public()'
547 revset = 'heads((%%ln::%%ln) %s)' % extracond
547 revset = 'heads((%%ln::%%ln) %s)' % extracond
548 # Get the list of all revs draft on remote by public here.
548 # Get the list of all revs draft on remote by public here.
549 # XXX Beware that revset break if droots is not strictly
549 # XXX Beware that revset break if droots is not strictly
550 # XXX root we may want to ensure it is but it is costly
550 # XXX root we may want to ensure it is but it is costly
551 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
551 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
552 if not outgoing.missing:
552 if not outgoing.missing:
553 future = fallback
553 future = fallback
554 else:
554 else:
555 # adds changeset we are going to push as draft
555 # adds changeset we are going to push as draft
556 #
556 #
557 # should not be necessary for publishing server, but because of an
557 # should not be necessary for publishing server, but because of an
558 # issue fixed in xxxxx we have to do it anyway.
558 # issue fixed in xxxxx we have to do it anyway.
559 fdroots = list(unfi.set('roots(%ln + %ln::)',
559 fdroots = list(unfi.set('roots(%ln + %ln::)',
560 outgoing.missing, droots))
560 outgoing.missing, droots))
561 fdroots = [f.node() for f in fdroots]
561 fdroots = [f.node() for f in fdroots]
562 future = list(unfi.set(revset, fdroots, pushop.futureheads))
562 future = list(unfi.set(revset, fdroots, pushop.futureheads))
563 pushop.outdatedphases = future
563 pushop.outdatedphases = future
564 pushop.fallbackoutdatedphases = fallback
564 pushop.fallbackoutdatedphases = fallback
565
565
566 @pushdiscovery('obsmarker')
566 @pushdiscovery('obsmarker')
567 def _pushdiscoveryobsmarkers(pushop):
567 def _pushdiscoveryobsmarkers(pushop):
568 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
568 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
569 and pushop.repo.obsstore
569 and pushop.repo.obsstore
570 and 'obsolete' in pushop.remote.listkeys('namespaces')):
570 and 'obsolete' in pushop.remote.listkeys('namespaces')):
571 repo = pushop.repo
571 repo = pushop.repo
572 # very naive computation, that can be quite expensive on big repo.
572 # very naive computation, that can be quite expensive on big repo.
573 # However: evolution is currently slow on them anyway.
573 # However: evolution is currently slow on them anyway.
574 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
574 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
575 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
575 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
576
576
577 @pushdiscovery('bookmarks')
577 @pushdiscovery('bookmarks')
578 def _pushdiscoverybookmarks(pushop):
578 def _pushdiscoverybookmarks(pushop):
579 ui = pushop.ui
579 ui = pushop.ui
580 repo = pushop.repo.unfiltered()
580 repo = pushop.repo.unfiltered()
581 remote = pushop.remote
581 remote = pushop.remote
582 ui.debug("checking for updated bookmarks\n")
582 ui.debug("checking for updated bookmarks\n")
583 ancestors = ()
583 ancestors = ()
584 if pushop.revs:
584 if pushop.revs:
585 revnums = map(repo.changelog.rev, pushop.revs)
585 revnums = map(repo.changelog.rev, pushop.revs)
586 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
586 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
587 remotebookmark = remote.listkeys('bookmarks')
587 remotebookmark = remote.listkeys('bookmarks')
588
588
589 explicit = set([repo._bookmarks.expandname(bookmark)
589 explicit = set([repo._bookmarks.expandname(bookmark)
590 for bookmark in pushop.bookmarks])
590 for bookmark in pushop.bookmarks])
591
591
592 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
592 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
593 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
593 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
594 for b, scid, dcid in advsrc:
594 for b, scid, dcid in advsrc:
595 if b in explicit:
595 if b in explicit:
596 explicit.remove(b)
596 explicit.remove(b)
597 if not ancestors or repo[scid].rev() in ancestors:
597 if not ancestors or repo[scid].rev() in ancestors:
598 pushop.outbookmarks.append((b, dcid, scid))
598 pushop.outbookmarks.append((b, dcid, scid))
599 # search added bookmark
599 # search added bookmark
600 for b, scid, dcid in addsrc:
600 for b, scid, dcid in addsrc:
601 if b in explicit:
601 if b in explicit:
602 explicit.remove(b)
602 explicit.remove(b)
603 pushop.outbookmarks.append((b, '', scid))
603 pushop.outbookmarks.append((b, '', scid))
604 # search for overwritten bookmark
604 # search for overwritten bookmark
605 for b, scid, dcid in advdst + diverge + differ:
605 for b, scid, dcid in advdst + diverge + differ:
606 if b in explicit:
606 if b in explicit:
607 explicit.remove(b)
607 explicit.remove(b)
608 pushop.outbookmarks.append((b, dcid, scid))
608 pushop.outbookmarks.append((b, dcid, scid))
609 # search for bookmark to delete
609 # search for bookmark to delete
610 for b, scid, dcid in adddst:
610 for b, scid, dcid in adddst:
611 if b in explicit:
611 if b in explicit:
612 explicit.remove(b)
612 explicit.remove(b)
613 # treat as "deleted locally"
613 # treat as "deleted locally"
614 pushop.outbookmarks.append((b, dcid, ''))
614 pushop.outbookmarks.append((b, dcid, ''))
615 # identical bookmarks shouldn't get reported
615 # identical bookmarks shouldn't get reported
616 for b, scid, dcid in same:
616 for b, scid, dcid in same:
617 if b in explicit:
617 if b in explicit:
618 explicit.remove(b)
618 explicit.remove(b)
619
619
620 if explicit:
620 if explicit:
621 explicit = sorted(explicit)
621 explicit = sorted(explicit)
622 # we should probably list all of them
622 # we should probably list all of them
623 ui.warn(_('bookmark %s does not exist on the local '
623 ui.warn(_('bookmark %s does not exist on the local '
624 'or remote repository!\n') % explicit[0])
624 'or remote repository!\n') % explicit[0])
625 pushop.bkresult = 2
625 pushop.bkresult = 2
626
626
627 pushop.outbookmarks.sort()
627 pushop.outbookmarks.sort()
628
628
629 def _pushcheckoutgoing(pushop):
629 def _pushcheckoutgoing(pushop):
630 outgoing = pushop.outgoing
630 outgoing = pushop.outgoing
631 unfi = pushop.repo.unfiltered()
631 unfi = pushop.repo.unfiltered()
632 if not outgoing.missing:
632 if not outgoing.missing:
633 # nothing to push
633 # nothing to push
634 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
634 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
635 return False
635 return False
636 # something to push
636 # something to push
637 if not pushop.force:
637 if not pushop.force:
638 # if repo.obsstore == False --> no obsolete
638 # if repo.obsstore == False --> no obsolete
639 # then, save the iteration
639 # then, save the iteration
640 if unfi.obsstore:
640 if unfi.obsstore:
641 # this message are here for 80 char limit reason
641 # this message are here for 80 char limit reason
642 mso = _("push includes obsolete changeset: %s!")
642 mso = _("push includes obsolete changeset: %s!")
643 mst = {"unstable": _("push includes unstable changeset: %s!"),
643 mst = {"unstable": _("push includes unstable changeset: %s!"),
644 "bumped": _("push includes bumped changeset: %s!"),
644 "bumped": _("push includes bumped changeset: %s!"),
645 "divergent": _("push includes divergent changeset: %s!")}
645 "divergent": _("push includes divergent changeset: %s!")}
646 # If we are to push if there is at least one
646 # If we are to push if there is at least one
647 # obsolete or unstable changeset in missing, at
647 # obsolete or unstable changeset in missing, at
648 # least one of the missinghead will be obsolete or
648 # least one of the missinghead will be obsolete or
649 # unstable. So checking heads only is ok
649 # unstable. So checking heads only is ok
650 for node in outgoing.missingheads:
650 for node in outgoing.missingheads:
651 ctx = unfi[node]
651 ctx = unfi[node]
652 if ctx.obsolete():
652 if ctx.obsolete():
653 raise error.Abort(mso % ctx)
653 raise error.Abort(mso % ctx)
654 elif ctx.troubled():
654 elif ctx.troubled():
655 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
655 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
656
656
657 discovery.checkheads(pushop)
657 discovery.checkheads(pushop)
658 return True
658 return True
659
659
660 # List of names of steps to perform for an outgoing bundle2, order matters.
660 # List of names of steps to perform for an outgoing bundle2, order matters.
661 b2partsgenorder = []
661 b2partsgenorder = []
662
662
663 # Mapping between step name and function
663 # Mapping between step name and function
664 #
664 #
665 # This exists to help extensions wrap steps if necessary
665 # This exists to help extensions wrap steps if necessary
666 b2partsgenmapping = {}
666 b2partsgenmapping = {}
667
667
668 def b2partsgenerator(stepname, idx=None):
668 def b2partsgenerator(stepname, idx=None):
669 """decorator for function generating bundle2 part
669 """decorator for function generating bundle2 part
670
670
671 The function is added to the step -> function mapping and appended to the
671 The function is added to the step -> function mapping and appended to the
672 list of steps. Beware that decorated functions will be added in order
672 list of steps. Beware that decorated functions will be added in order
673 (this may matter).
673 (this may matter).
674
674
675 You can only use this decorator for new steps, if you want to wrap a step
675 You can only use this decorator for new steps, if you want to wrap a step
676 from an extension, attack the b2partsgenmapping dictionary directly."""
676 from an extension, attack the b2partsgenmapping dictionary directly."""
677 def dec(func):
677 def dec(func):
678 assert stepname not in b2partsgenmapping
678 assert stepname not in b2partsgenmapping
679 b2partsgenmapping[stepname] = func
679 b2partsgenmapping[stepname] = func
680 if idx is None:
680 if idx is None:
681 b2partsgenorder.append(stepname)
681 b2partsgenorder.append(stepname)
682 else:
682 else:
683 b2partsgenorder.insert(idx, stepname)
683 b2partsgenorder.insert(idx, stepname)
684 return func
684 return func
685 return dec
685 return dec
686
686
687 def _pushb2ctxcheckheads(pushop, bundler):
687 def _pushb2ctxcheckheads(pushop, bundler):
688 """Generate race condition checking parts
688 """Generate race condition checking parts
689
689
690 Exists as an independent function to aid extensions
690 Exists as an independent function to aid extensions
691 """
691 """
692 if not pushop.force:
692 if not pushop.force:
693 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
693 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
694
694
695 @b2partsgenerator('changeset')
695 @b2partsgenerator('changeset')
696 def _pushb2ctx(pushop, bundler):
696 def _pushb2ctx(pushop, bundler):
697 """handle changegroup push through bundle2
697 """handle changegroup push through bundle2
698
698
699 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
699 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
700 """
700 """
701 if 'changesets' in pushop.stepsdone:
701 if 'changesets' in pushop.stepsdone:
702 return
702 return
703 pushop.stepsdone.add('changesets')
703 pushop.stepsdone.add('changesets')
704 # Send known heads to the server for race detection.
704 # Send known heads to the server for race detection.
705 if not _pushcheckoutgoing(pushop):
705 if not _pushcheckoutgoing(pushop):
706 return
706 return
707 pushop.repo.prepushoutgoinghooks(pushop)
707 pushop.repo.prepushoutgoinghooks(pushop)
708
708
709 _pushb2ctxcheckheads(pushop, bundler)
709 _pushb2ctxcheckheads(pushop, bundler)
710
710
711 b2caps = bundle2.bundle2caps(pushop.remote)
711 b2caps = bundle2.bundle2caps(pushop.remote)
712 version = '01'
712 version = '01'
713 cgversions = b2caps.get('changegroup')
713 cgversions = b2caps.get('changegroup')
714 if cgversions: # 3.1 and 3.2 ship with an empty value
714 if cgversions: # 3.1 and 3.2 ship with an empty value
715 cgversions = [v for v in cgversions
715 cgversions = [v for v in cgversions
716 if v in changegroup.supportedoutgoingversions(
716 if v in changegroup.supportedoutgoingversions(
717 pushop.repo)]
717 pushop.repo)]
718 if not cgversions:
718 if not cgversions:
719 raise ValueError(_('no common changegroup version'))
719 raise ValueError(_('no common changegroup version'))
720 version = max(cgversions)
720 version = max(cgversions)
721 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
721 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
722 pushop.outgoing,
722 pushop.outgoing,
723 version=version)
723 version=version)
724 cgpart = bundler.newpart('changegroup', data=cg)
724 cgpart = bundler.newpart('changegroup', data=cg)
725 if cgversions:
725 if cgversions:
726 cgpart.addparam('version', version)
726 cgpart.addparam('version', version)
727 if 'treemanifest' in pushop.repo.requirements:
727 if 'treemanifest' in pushop.repo.requirements:
728 cgpart.addparam('treemanifest', '1')
728 cgpart.addparam('treemanifest', '1')
729 def handlereply(op):
729 def handlereply(op):
730 """extract addchangegroup returns from server reply"""
730 """extract addchangegroup returns from server reply"""
731 cgreplies = op.records.getreplies(cgpart.id)
731 cgreplies = op.records.getreplies(cgpart.id)
732 assert len(cgreplies['changegroup']) == 1
732 assert len(cgreplies['changegroup']) == 1
733 pushop.cgresult = cgreplies['changegroup'][0]['return']
733 pushop.cgresult = cgreplies['changegroup'][0]['return']
734 return handlereply
734 return handlereply
735
735
736 @b2partsgenerator('phase')
736 @b2partsgenerator('phase')
737 def _pushb2phases(pushop, bundler):
737 def _pushb2phases(pushop, bundler):
738 """handle phase push through bundle2"""
738 """handle phase push through bundle2"""
739 if 'phases' in pushop.stepsdone:
739 if 'phases' in pushop.stepsdone:
740 return
740 return
741 b2caps = bundle2.bundle2caps(pushop.remote)
741 b2caps = bundle2.bundle2caps(pushop.remote)
742 if not 'pushkey' in b2caps:
742 if not 'pushkey' in b2caps:
743 return
743 return
744 pushop.stepsdone.add('phases')
744 pushop.stepsdone.add('phases')
745 part2node = []
745 part2node = []
746
746
747 def handlefailure(pushop, exc):
747 def handlefailure(pushop, exc):
748 targetid = int(exc.partid)
748 targetid = int(exc.partid)
749 for partid, node in part2node:
749 for partid, node in part2node:
750 if partid == targetid:
750 if partid == targetid:
751 raise error.Abort(_('updating %s to public failed') % node)
751 raise error.Abort(_('updating %s to public failed') % node)
752
752
753 enc = pushkey.encode
753 enc = pushkey.encode
754 for newremotehead in pushop.outdatedphases:
754 for newremotehead in pushop.outdatedphases:
755 part = bundler.newpart('pushkey')
755 part = bundler.newpart('pushkey')
756 part.addparam('namespace', enc('phases'))
756 part.addparam('namespace', enc('phases'))
757 part.addparam('key', enc(newremotehead.hex()))
757 part.addparam('key', enc(newremotehead.hex()))
758 part.addparam('old', enc(str(phases.draft)))
758 part.addparam('old', enc(str(phases.draft)))
759 part.addparam('new', enc(str(phases.public)))
759 part.addparam('new', enc(str(phases.public)))
760 part2node.append((part.id, newremotehead))
760 part2node.append((part.id, newremotehead))
761 pushop.pkfailcb[part.id] = handlefailure
761 pushop.pkfailcb[part.id] = handlefailure
762
762
763 def handlereply(op):
763 def handlereply(op):
764 for partid, node in part2node:
764 for partid, node in part2node:
765 partrep = op.records.getreplies(partid)
765 partrep = op.records.getreplies(partid)
766 results = partrep['pushkey']
766 results = partrep['pushkey']
767 assert len(results) <= 1
767 assert len(results) <= 1
768 msg = None
768 msg = None
769 if not results:
769 if not results:
770 msg = _('server ignored update of %s to public!\n') % node
770 msg = _('server ignored update of %s to public!\n') % node
771 elif not int(results[0]['return']):
771 elif not int(results[0]['return']):
772 msg = _('updating %s to public failed!\n') % node
772 msg = _('updating %s to public failed!\n') % node
773 if msg is not None:
773 if msg is not None:
774 pushop.ui.warn(msg)
774 pushop.ui.warn(msg)
775 return handlereply
775 return handlereply
776
776
777 @b2partsgenerator('obsmarkers')
777 @b2partsgenerator('obsmarkers')
778 def _pushb2obsmarkers(pushop, bundler):
778 def _pushb2obsmarkers(pushop, bundler):
779 if 'obsmarkers' in pushop.stepsdone:
779 if 'obsmarkers' in pushop.stepsdone:
780 return
780 return
781 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
781 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
782 if obsolete.commonversion(remoteversions) is None:
782 if obsolete.commonversion(remoteversions) is None:
783 return
783 return
784 pushop.stepsdone.add('obsmarkers')
784 pushop.stepsdone.add('obsmarkers')
785 if pushop.outobsmarkers:
785 if pushop.outobsmarkers:
786 markers = sorted(pushop.outobsmarkers)
786 markers = sorted(pushop.outobsmarkers)
787 buildobsmarkerspart(bundler, markers)
787 buildobsmarkerspart(bundler, markers)
788
788
789 @b2partsgenerator('bookmarks')
789 @b2partsgenerator('bookmarks')
790 def _pushb2bookmarks(pushop, bundler):
790 def _pushb2bookmarks(pushop, bundler):
791 """handle bookmark push through bundle2"""
791 """handle bookmark push through bundle2"""
792 if 'bookmarks' in pushop.stepsdone:
792 if 'bookmarks' in pushop.stepsdone:
793 return
793 return
794 b2caps = bundle2.bundle2caps(pushop.remote)
794 b2caps = bundle2.bundle2caps(pushop.remote)
795 if 'pushkey' not in b2caps:
795 if 'pushkey' not in b2caps:
796 return
796 return
797 pushop.stepsdone.add('bookmarks')
797 pushop.stepsdone.add('bookmarks')
798 part2book = []
798 part2book = []
799 enc = pushkey.encode
799 enc = pushkey.encode
800
800
801 def handlefailure(pushop, exc):
801 def handlefailure(pushop, exc):
802 targetid = int(exc.partid)
802 targetid = int(exc.partid)
803 for partid, book, action in part2book:
803 for partid, book, action in part2book:
804 if partid == targetid:
804 if partid == targetid:
805 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
805 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
806 # we should not be called for part we did not generated
806 # we should not be called for part we did not generated
807 assert False
807 assert False
808
808
809 for book, old, new in pushop.outbookmarks:
809 for book, old, new in pushop.outbookmarks:
810 part = bundler.newpart('pushkey')
810 part = bundler.newpart('pushkey')
811 part.addparam('namespace', enc('bookmarks'))
811 part.addparam('namespace', enc('bookmarks'))
812 part.addparam('key', enc(book))
812 part.addparam('key', enc(book))
813 part.addparam('old', enc(old))
813 part.addparam('old', enc(old))
814 part.addparam('new', enc(new))
814 part.addparam('new', enc(new))
815 action = 'update'
815 action = 'update'
816 if not old:
816 if not old:
817 action = 'export'
817 action = 'export'
818 elif not new:
818 elif not new:
819 action = 'delete'
819 action = 'delete'
820 part2book.append((part.id, book, action))
820 part2book.append((part.id, book, action))
821 pushop.pkfailcb[part.id] = handlefailure
821 pushop.pkfailcb[part.id] = handlefailure
822
822
823 def handlereply(op):
823 def handlereply(op):
824 ui = pushop.ui
824 ui = pushop.ui
825 for partid, book, action in part2book:
825 for partid, book, action in part2book:
826 partrep = op.records.getreplies(partid)
826 partrep = op.records.getreplies(partid)
827 results = partrep['pushkey']
827 results = partrep['pushkey']
828 assert len(results) <= 1
828 assert len(results) <= 1
829 if not results:
829 if not results:
830 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
830 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
831 else:
831 else:
832 ret = int(results[0]['return'])
832 ret = int(results[0]['return'])
833 if ret:
833 if ret:
834 ui.status(bookmsgmap[action][0] % book)
834 ui.status(bookmsgmap[action][0] % book)
835 else:
835 else:
836 ui.warn(bookmsgmap[action][1] % book)
836 ui.warn(bookmsgmap[action][1] % book)
837 if pushop.bkresult is not None:
837 if pushop.bkresult is not None:
838 pushop.bkresult = 1
838 pushop.bkresult = 1
839 return handlereply
839 return handlereply
840
840
841
841
842 def _pushbundle2(pushop):
842 def _pushbundle2(pushop):
843 """push data to the remote using bundle2
843 """push data to the remote using bundle2
844
844
845 The only currently supported type of data is changegroup but this will
845 The only currently supported type of data is changegroup but this will
846 evolve in the future."""
846 evolve in the future."""
847 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
847 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
848 pushback = (pushop.trmanager
848 pushback = (pushop.trmanager
849 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
849 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
850
850
851 # create reply capability
851 # create reply capability
852 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
852 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
853 allowpushback=pushback))
853 allowpushback=pushback))
854 bundler.newpart('replycaps', data=capsblob)
854 bundler.newpart('replycaps', data=capsblob)
855 replyhandlers = []
855 replyhandlers = []
856 for partgenname in b2partsgenorder:
856 for partgenname in b2partsgenorder:
857 partgen = b2partsgenmapping[partgenname]
857 partgen = b2partsgenmapping[partgenname]
858 ret = partgen(pushop, bundler)
858 ret = partgen(pushop, bundler)
859 if callable(ret):
859 if callable(ret):
860 replyhandlers.append(ret)
860 replyhandlers.append(ret)
861 # do not push if nothing to push
861 # do not push if nothing to push
862 if bundler.nbparts <= 1:
862 if bundler.nbparts <= 1:
863 return
863 return
864 stream = util.chunkbuffer(bundler.getchunks())
864 stream = util.chunkbuffer(bundler.getchunks())
865 try:
865 try:
866 try:
866 try:
867 reply = pushop.remote.unbundle(
867 reply = pushop.remote.unbundle(
868 stream, ['force'], pushop.remote.url())
868 stream, ['force'], pushop.remote.url())
869 except error.BundleValueError as exc:
869 except error.BundleValueError as exc:
870 raise error.Abort(_('missing support for %s') % exc)
870 raise error.Abort(_('missing support for %s') % exc)
871 try:
871 try:
872 trgetter = None
872 trgetter = None
873 if pushback:
873 if pushback:
874 trgetter = pushop.trmanager.transaction
874 trgetter = pushop.trmanager.transaction
875 op = bundle2.processbundle(pushop.repo, reply, trgetter)
875 op = bundle2.processbundle(pushop.repo, reply, trgetter)
876 except error.BundleValueError as exc:
876 except error.BundleValueError as exc:
877 raise error.Abort(_('missing support for %s') % exc)
877 raise error.Abort(_('missing support for %s') % exc)
878 except bundle2.AbortFromPart as exc:
878 except bundle2.AbortFromPart as exc:
879 pushop.ui.status(_('remote: %s\n') % exc)
879 pushop.ui.status(_('remote: %s\n') % exc)
880 raise error.Abort(_('push failed on remote'), hint=exc.hint)
880 raise error.Abort(_('push failed on remote'), hint=exc.hint)
881 except error.PushkeyFailed as exc:
881 except error.PushkeyFailed as exc:
882 partid = int(exc.partid)
882 partid = int(exc.partid)
883 if partid not in pushop.pkfailcb:
883 if partid not in pushop.pkfailcb:
884 raise
884 raise
885 pushop.pkfailcb[partid](pushop, exc)
885 pushop.pkfailcb[partid](pushop, exc)
886 for rephand in replyhandlers:
886 for rephand in replyhandlers:
887 rephand(op)
887 rephand(op)
888
888
889 def _pushchangeset(pushop):
889 def _pushchangeset(pushop):
890 """Make the actual push of changeset bundle to remote repo"""
890 """Make the actual push of changeset bundle to remote repo"""
891 if 'changesets' in pushop.stepsdone:
891 if 'changesets' in pushop.stepsdone:
892 return
892 return
893 pushop.stepsdone.add('changesets')
893 pushop.stepsdone.add('changesets')
894 if not _pushcheckoutgoing(pushop):
894 if not _pushcheckoutgoing(pushop):
895 return
895 return
896 pushop.repo.prepushoutgoinghooks(pushop)
896 pushop.repo.prepushoutgoinghooks(pushop)
897 outgoing = pushop.outgoing
897 outgoing = pushop.outgoing
898 unbundle = pushop.remote.capable('unbundle')
898 unbundle = pushop.remote.capable('unbundle')
899 # TODO: get bundlecaps from remote
899 # TODO: get bundlecaps from remote
900 bundlecaps = None
900 bundlecaps = None
901 # create a changegroup from local
901 # create a changegroup from local
902 if pushop.revs is None and not (outgoing.excluded
902 if pushop.revs is None and not (outgoing.excluded
903 or pushop.repo.changelog.filteredrevs):
903 or pushop.repo.changelog.filteredrevs):
904 # push everything,
904 # push everything,
905 # use the fast path, no race possible on push
905 # use the fast path, no race possible on push
906 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
906 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
907 cg = changegroup.getsubset(pushop.repo,
907 cg = changegroup.getsubset(pushop.repo,
908 outgoing,
908 outgoing,
909 bundler,
909 bundler,
910 'push',
910 'push',
911 fastpath=True)
911 fastpath=True)
912 else:
912 else:
913 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
913 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
914 bundlecaps)
914 bundlecaps)
915
915
916 # apply changegroup to remote
916 # apply changegroup to remote
917 if unbundle:
917 if unbundle:
918 # local repo finds heads on server, finds out what
918 # local repo finds heads on server, finds out what
919 # revs it must push. once revs transferred, if server
919 # revs it must push. once revs transferred, if server
920 # finds it has different heads (someone else won
920 # finds it has different heads (someone else won
921 # commit/push race), server aborts.
921 # commit/push race), server aborts.
922 if pushop.force:
922 if pushop.force:
923 remoteheads = ['force']
923 remoteheads = ['force']
924 else:
924 else:
925 remoteheads = pushop.remoteheads
925 remoteheads = pushop.remoteheads
926 # ssh: return remote's addchangegroup()
926 # ssh: return remote's addchangegroup()
927 # http: return remote's addchangegroup() or 0 for error
927 # http: return remote's addchangegroup() or 0 for error
928 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
928 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
929 pushop.repo.url())
929 pushop.repo.url())
930 else:
930 else:
931 # we return an integer indicating remote head count
931 # we return an integer indicating remote head count
932 # change
932 # change
933 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
933 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
934 pushop.repo.url())
934 pushop.repo.url())
935
935
936 def _pushsyncphase(pushop):
936 def _pushsyncphase(pushop):
937 """synchronise phase information locally and remotely"""
937 """synchronise phase information locally and remotely"""
938 cheads = pushop.commonheads
938 cheads = pushop.commonheads
939 # even when we don't push, exchanging phase data is useful
939 # even when we don't push, exchanging phase data is useful
940 remotephases = pushop.remote.listkeys('phases')
940 remotephases = pushop.remote.listkeys('phases')
941 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
941 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
942 and remotephases # server supports phases
942 and remotephases # server supports phases
943 and pushop.cgresult is None # nothing was pushed
943 and pushop.cgresult is None # nothing was pushed
944 and remotephases.get('publishing', False)):
944 and remotephases.get('publishing', False)):
945 # When:
945 # When:
946 # - this is a subrepo push
946 # - this is a subrepo push
947 # - and remote support phase
947 # - and remote support phase
948 # - and no changeset was pushed
948 # - and no changeset was pushed
949 # - and remote is publishing
949 # - and remote is publishing
950 # We may be in issue 3871 case!
950 # We may be in issue 3871 case!
951 # We drop the possible phase synchronisation done by
951 # We drop the possible phase synchronisation done by
952 # courtesy to publish changesets possibly locally draft
952 # courtesy to publish changesets possibly locally draft
953 # on the remote.
953 # on the remote.
954 remotephases = {'publishing': 'True'}
954 remotephases = {'publishing': 'True'}
955 if not remotephases: # old server or public only reply from non-publishing
955 if not remotephases: # old server or public only reply from non-publishing
956 _localphasemove(pushop, cheads)
956 _localphasemove(pushop, cheads)
957 # don't push any phase data as there is nothing to push
957 # don't push any phase data as there is nothing to push
958 else:
958 else:
959 ana = phases.analyzeremotephases(pushop.repo, cheads,
959 ana = phases.analyzeremotephases(pushop.repo, cheads,
960 remotephases)
960 remotephases)
961 pheads, droots = ana
961 pheads, droots = ana
962 ### Apply remote phase on local
962 ### Apply remote phase on local
963 if remotephases.get('publishing', False):
963 if remotephases.get('publishing', False):
964 _localphasemove(pushop, cheads)
964 _localphasemove(pushop, cheads)
965 else: # publish = False
965 else: # publish = False
966 _localphasemove(pushop, pheads)
966 _localphasemove(pushop, pheads)
967 _localphasemove(pushop, cheads, phases.draft)
967 _localphasemove(pushop, cheads, phases.draft)
968 ### Apply local phase on remote
968 ### Apply local phase on remote
969
969
970 if pushop.cgresult:
970 if pushop.cgresult:
971 if 'phases' in pushop.stepsdone:
971 if 'phases' in pushop.stepsdone:
972 # phases already pushed though bundle2
972 # phases already pushed though bundle2
973 return
973 return
974 outdated = pushop.outdatedphases
974 outdated = pushop.outdatedphases
975 else:
975 else:
976 outdated = pushop.fallbackoutdatedphases
976 outdated = pushop.fallbackoutdatedphases
977
977
978 pushop.stepsdone.add('phases')
978 pushop.stepsdone.add('phases')
979
979
980 # filter heads already turned public by the push
980 # filter heads already turned public by the push
981 outdated = [c for c in outdated if c.node() not in pheads]
981 outdated = [c for c in outdated if c.node() not in pheads]
982 # fallback to independent pushkey command
982 # fallback to independent pushkey command
983 for newremotehead in outdated:
983 for newremotehead in outdated:
984 r = pushop.remote.pushkey('phases',
984 r = pushop.remote.pushkey('phases',
985 newremotehead.hex(),
985 newremotehead.hex(),
986 str(phases.draft),
986 str(phases.draft),
987 str(phases.public))
987 str(phases.public))
988 if not r:
988 if not r:
989 pushop.ui.warn(_('updating %s to public failed!\n')
989 pushop.ui.warn(_('updating %s to public failed!\n')
990 % newremotehead)
990 % newremotehead)
991
991
992 def _localphasemove(pushop, nodes, phase=phases.public):
992 def _localphasemove(pushop, nodes, phase=phases.public):
993 """move <nodes> to <phase> in the local source repo"""
993 """move <nodes> to <phase> in the local source repo"""
994 if pushop.trmanager:
994 if pushop.trmanager:
995 phases.advanceboundary(pushop.repo,
995 phases.advanceboundary(pushop.repo,
996 pushop.trmanager.transaction(),
996 pushop.trmanager.transaction(),
997 phase,
997 phase,
998 nodes)
998 nodes)
999 else:
999 else:
1000 # repo is not locked, do not change any phases!
1000 # repo is not locked, do not change any phases!
1001 # Informs the user that phases should have been moved when
1001 # Informs the user that phases should have been moved when
1002 # applicable.
1002 # applicable.
1003 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1003 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1004 phasestr = phases.phasenames[phase]
1004 phasestr = phases.phasenames[phase]
1005 if actualmoves:
1005 if actualmoves:
1006 pushop.ui.status(_('cannot lock source repo, skipping '
1006 pushop.ui.status(_('cannot lock source repo, skipping '
1007 'local %s phase update\n') % phasestr)
1007 'local %s phase update\n') % phasestr)
1008
1008
1009 def _pushobsolete(pushop):
1009 def _pushobsolete(pushop):
1010 """utility function to push obsolete markers to a remote"""
1010 """utility function to push obsolete markers to a remote"""
1011 if 'obsmarkers' in pushop.stepsdone:
1011 if 'obsmarkers' in pushop.stepsdone:
1012 return
1012 return
1013 repo = pushop.repo
1013 repo = pushop.repo
1014 remote = pushop.remote
1014 remote = pushop.remote
1015 pushop.stepsdone.add('obsmarkers')
1015 pushop.stepsdone.add('obsmarkers')
1016 if pushop.outobsmarkers:
1016 if pushop.outobsmarkers:
1017 pushop.ui.debug('try to push obsolete markers to remote\n')
1017 pushop.ui.debug('try to push obsolete markers to remote\n')
1018 rslts = []
1018 rslts = []
1019 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1019 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1020 for key in sorted(remotedata, reverse=True):
1020 for key in sorted(remotedata, reverse=True):
1021 # reverse sort to ensure we end with dump0
1021 # reverse sort to ensure we end with dump0
1022 data = remotedata[key]
1022 data = remotedata[key]
1023 rslts.append(remote.pushkey('obsolete', key, '', data))
1023 rslts.append(remote.pushkey('obsolete', key, '', data))
1024 if [r for r in rslts if not r]:
1024 if [r for r in rslts if not r]:
1025 msg = _('failed to push some obsolete markers!\n')
1025 msg = _('failed to push some obsolete markers!\n')
1026 repo.ui.warn(msg)
1026 repo.ui.warn(msg)
1027
1027
1028 def _pushbookmark(pushop):
1028 def _pushbookmark(pushop):
1029 """Update bookmark position on remote"""
1029 """Update bookmark position on remote"""
1030 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1030 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1031 return
1031 return
1032 pushop.stepsdone.add('bookmarks')
1032 pushop.stepsdone.add('bookmarks')
1033 ui = pushop.ui
1033 ui = pushop.ui
1034 remote = pushop.remote
1034 remote = pushop.remote
1035
1035
1036 for b, old, new in pushop.outbookmarks:
1036 for b, old, new in pushop.outbookmarks:
1037 action = 'update'
1037 action = 'update'
1038 if not old:
1038 if not old:
1039 action = 'export'
1039 action = 'export'
1040 elif not new:
1040 elif not new:
1041 action = 'delete'
1041 action = 'delete'
1042 if remote.pushkey('bookmarks', b, old, new):
1042 if remote.pushkey('bookmarks', b, old, new):
1043 ui.status(bookmsgmap[action][0] % b)
1043 ui.status(bookmsgmap[action][0] % b)
1044 else:
1044 else:
1045 ui.warn(bookmsgmap[action][1] % b)
1045 ui.warn(bookmsgmap[action][1] % b)
1046 # discovery can have set the value form invalid entry
1046 # discovery can have set the value form invalid entry
1047 if pushop.bkresult is not None:
1047 if pushop.bkresult is not None:
1048 pushop.bkresult = 1
1048 pushop.bkresult = 1
1049
1049
1050 class pulloperation(object):
1050 class pulloperation(object):
1051 """A object that represent a single pull operation
1051 """A object that represent a single pull operation
1052
1052
1053 It purpose is to carry pull related state and very common operation.
1053 It purpose is to carry pull related state and very common operation.
1054
1054
1055 A new should be created at the beginning of each pull and discarded
1055 A new should be created at the beginning of each pull and discarded
1056 afterward.
1056 afterward.
1057 """
1057 """
1058
1058
1059 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1059 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1060 remotebookmarks=None, streamclonerequested=None):
1060 remotebookmarks=None, streamclonerequested=None):
1061 # repo we pull into
1061 # repo we pull into
1062 self.repo = repo
1062 self.repo = repo
1063 # repo we pull from
1063 # repo we pull from
1064 self.remote = remote
1064 self.remote = remote
1065 # revision we try to pull (None is "all")
1065 # revision we try to pull (None is "all")
1066 self.heads = heads
1066 self.heads = heads
1067 # bookmark pulled explicitly
1067 # bookmark pulled explicitly
1068 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1068 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1069 for bookmark in bookmarks]
1069 for bookmark in bookmarks]
1070 # do we force pull?
1070 # do we force pull?
1071 self.force = force
1071 self.force = force
1072 # whether a streaming clone was requested
1072 # whether a streaming clone was requested
1073 self.streamclonerequested = streamclonerequested
1073 self.streamclonerequested = streamclonerequested
1074 # transaction manager
1074 # transaction manager
1075 self.trmanager = None
1075 self.trmanager = None
1076 # set of common changeset between local and remote before pull
1076 # set of common changeset between local and remote before pull
1077 self.common = None
1077 self.common = None
1078 # set of pulled head
1078 # set of pulled head
1079 self.rheads = None
1079 self.rheads = None
1080 # list of missing changeset to fetch remotely
1080 # list of missing changeset to fetch remotely
1081 self.fetch = None
1081 self.fetch = None
1082 # remote bookmarks data
1082 # remote bookmarks data
1083 self.remotebookmarks = remotebookmarks
1083 self.remotebookmarks = remotebookmarks
1084 # result of changegroup pulling (used as return code by pull)
1084 # result of changegroup pulling (used as return code by pull)
1085 self.cgresult = None
1085 self.cgresult = None
1086 # list of step already done
1086 # list of step already done
1087 self.stepsdone = set()
1087 self.stepsdone = set()
1088 # Whether we attempted a clone from pre-generated bundles.
1088 # Whether we attempted a clone from pre-generated bundles.
1089 self.clonebundleattempted = False
1089 self.clonebundleattempted = False
1090
1090
1091 @util.propertycache
1091 @util.propertycache
1092 def pulledsubset(self):
1092 def pulledsubset(self):
1093 """heads of the set of changeset target by the pull"""
1093 """heads of the set of changeset target by the pull"""
1094 # compute target subset
1094 # compute target subset
1095 if self.heads is None:
1095 if self.heads is None:
1096 # We pulled every thing possible
1096 # We pulled every thing possible
1097 # sync on everything common
1097 # sync on everything common
1098 c = set(self.common)
1098 c = set(self.common)
1099 ret = list(self.common)
1099 ret = list(self.common)
1100 for n in self.rheads:
1100 for n in self.rheads:
1101 if n not in c:
1101 if n not in c:
1102 ret.append(n)
1102 ret.append(n)
1103 return ret
1103 return ret
1104 else:
1104 else:
1105 # We pulled a specific subset
1105 # We pulled a specific subset
1106 # sync on this subset
1106 # sync on this subset
1107 return self.heads
1107 return self.heads
1108
1108
1109 @util.propertycache
1109 @util.propertycache
1110 def canusebundle2(self):
1110 def canusebundle2(self):
1111 return not _forcebundle1(self)
1111 return not _forcebundle1(self)
1112
1112
1113 @util.propertycache
1113 @util.propertycache
1114 def remotebundle2caps(self):
1114 def remotebundle2caps(self):
1115 return bundle2.bundle2caps(self.remote)
1115 return bundle2.bundle2caps(self.remote)
1116
1116
1117 def gettransaction(self):
1117 def gettransaction(self):
1118 # deprecated; talk to trmanager directly
1118 # deprecated; talk to trmanager directly
1119 return self.trmanager.transaction()
1119 return self.trmanager.transaction()
1120
1120
1121 class transactionmanager(object):
1121 class transactionmanager(object):
1122 """An object to manage the life cycle of a transaction
1122 """An object to manage the life cycle of a transaction
1123
1123
1124 It creates the transaction on demand and calls the appropriate hooks when
1124 It creates the transaction on demand and calls the appropriate hooks when
1125 closing the transaction."""
1125 closing the transaction."""
1126 def __init__(self, repo, source, url):
1126 def __init__(self, repo, source, url):
1127 self.repo = repo
1127 self.repo = repo
1128 self.source = source
1128 self.source = source
1129 self.url = url
1129 self.url = url
1130 self._tr = None
1130 self._tr = None
1131
1131
1132 def transaction(self):
1132 def transaction(self):
1133 """Return an open transaction object, constructing if necessary"""
1133 """Return an open transaction object, constructing if necessary"""
1134 if not self._tr:
1134 if not self._tr:
1135 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1135 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1136 self._tr = self.repo.transaction(trname)
1136 self._tr = self.repo.transaction(trname)
1137 self._tr.hookargs['source'] = self.source
1137 self._tr.hookargs['source'] = self.source
1138 self._tr.hookargs['url'] = self.url
1138 self._tr.hookargs['url'] = self.url
1139 return self._tr
1139 return self._tr
1140
1140
1141 def close(self):
1141 def close(self):
1142 """close transaction if created"""
1142 """close transaction if created"""
1143 if self._tr is not None:
1143 if self._tr is not None:
1144 self._tr.close()
1144 self._tr.close()
1145
1145
1146 def release(self):
1146 def release(self):
1147 """release transaction if created"""
1147 """release transaction if created"""
1148 if self._tr is not None:
1148 if self._tr is not None:
1149 self._tr.release()
1149 self._tr.release()
1150
1150
1151 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1151 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1152 streamclonerequested=None):
1152 streamclonerequested=None):
1153 """Fetch repository data from a remote.
1153 """Fetch repository data from a remote.
1154
1154
1155 This is the main function used to retrieve data from a remote repository.
1155 This is the main function used to retrieve data from a remote repository.
1156
1156
1157 ``repo`` is the local repository to clone into.
1157 ``repo`` is the local repository to clone into.
1158 ``remote`` is a peer instance.
1158 ``remote`` is a peer instance.
1159 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1159 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1160 default) means to pull everything from the remote.
1160 default) means to pull everything from the remote.
1161 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1161 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1162 default, all remote bookmarks are pulled.
1162 default, all remote bookmarks are pulled.
1163 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1163 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1164 initialization.
1164 initialization.
1165 ``streamclonerequested`` is a boolean indicating whether a "streaming
1165 ``streamclonerequested`` is a boolean indicating whether a "streaming
1166 clone" is requested. A "streaming clone" is essentially a raw file copy
1166 clone" is requested. A "streaming clone" is essentially a raw file copy
1167 of revlogs from the server. This only works when the local repository is
1167 of revlogs from the server. This only works when the local repository is
1168 empty. The default value of ``None`` means to respect the server
1168 empty. The default value of ``None`` means to respect the server
1169 configuration for preferring stream clones.
1169 configuration for preferring stream clones.
1170
1170
1171 Returns the ``pulloperation`` created for this pull.
1171 Returns the ``pulloperation`` created for this pull.
1172 """
1172 """
1173 if opargs is None:
1173 if opargs is None:
1174 opargs = {}
1174 opargs = {}
1175 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1175 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1176 streamclonerequested=streamclonerequested, **opargs)
1176 streamclonerequested=streamclonerequested, **opargs)
1177 if pullop.remote.local():
1177 if pullop.remote.local():
1178 missing = set(pullop.remote.requirements) - pullop.repo.supported
1178 missing = set(pullop.remote.requirements) - pullop.repo.supported
1179 if missing:
1179 if missing:
1180 msg = _("required features are not"
1180 msg = _("required features are not"
1181 " supported in the destination:"
1181 " supported in the destination:"
1182 " %s") % (', '.join(sorted(missing)))
1182 " %s") % (', '.join(sorted(missing)))
1183 raise error.Abort(msg)
1183 raise error.Abort(msg)
1184
1184
1185 lock = pullop.repo.lock()
1185 lock = pullop.repo.lock()
1186 try:
1186 try:
1187 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1187 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1188 streamclone.maybeperformlegacystreamclone(pullop)
1188 streamclone.maybeperformlegacystreamclone(pullop)
1189 # This should ideally be in _pullbundle2(). However, it needs to run
1189 # This should ideally be in _pullbundle2(). However, it needs to run
1190 # before discovery to avoid extra work.
1190 # before discovery to avoid extra work.
1191 _maybeapplyclonebundle(pullop)
1191 _maybeapplyclonebundle(pullop)
1192 _pulldiscovery(pullop)
1192 _pulldiscovery(pullop)
1193 if pullop.canusebundle2:
1193 if pullop.canusebundle2:
1194 _pullbundle2(pullop)
1194 _pullbundle2(pullop)
1195 _pullchangeset(pullop)
1195 _pullchangeset(pullop)
1196 _pullphase(pullop)
1196 _pullphase(pullop)
1197 _pullbookmarks(pullop)
1197 _pullbookmarks(pullop)
1198 _pullobsolete(pullop)
1198 _pullobsolete(pullop)
1199 pullop.trmanager.close()
1199 pullop.trmanager.close()
1200 finally:
1200 finally:
1201 pullop.trmanager.release()
1201 pullop.trmanager.release()
1202 lock.release()
1202 lock.release()
1203
1203
1204 return pullop
1204 return pullop
1205
1205
1206 # list of steps to perform discovery before pull
1206 # list of steps to perform discovery before pull
1207 pulldiscoveryorder = []
1207 pulldiscoveryorder = []
1208
1208
1209 # Mapping between step name and function
1209 # Mapping between step name and function
1210 #
1210 #
1211 # This exists to help extensions wrap steps if necessary
1211 # This exists to help extensions wrap steps if necessary
1212 pulldiscoverymapping = {}
1212 pulldiscoverymapping = {}
1213
1213
1214 def pulldiscovery(stepname):
1214 def pulldiscovery(stepname):
1215 """decorator for function performing discovery before pull
1215 """decorator for function performing discovery before pull
1216
1216
1217 The function is added to the step -> function mapping and appended to the
1217 The function is added to the step -> function mapping and appended to the
1218 list of steps. Beware that decorated function will be added in order (this
1218 list of steps. Beware that decorated function will be added in order (this
1219 may matter).
1219 may matter).
1220
1220
1221 You can only use this decorator for a new step, if you want to wrap a step
1221 You can only use this decorator for a new step, if you want to wrap a step
1222 from an extension, change the pulldiscovery dictionary directly."""
1222 from an extension, change the pulldiscovery dictionary directly."""
1223 def dec(func):
1223 def dec(func):
1224 assert stepname not in pulldiscoverymapping
1224 assert stepname not in pulldiscoverymapping
1225 pulldiscoverymapping[stepname] = func
1225 pulldiscoverymapping[stepname] = func
1226 pulldiscoveryorder.append(stepname)
1226 pulldiscoveryorder.append(stepname)
1227 return func
1227 return func
1228 return dec
1228 return dec
1229
1229
1230 def _pulldiscovery(pullop):
1230 def _pulldiscovery(pullop):
1231 """Run all discovery steps"""
1231 """Run all discovery steps"""
1232 for stepname in pulldiscoveryorder:
1232 for stepname in pulldiscoveryorder:
1233 step = pulldiscoverymapping[stepname]
1233 step = pulldiscoverymapping[stepname]
1234 step(pullop)
1234 step(pullop)
1235
1235
1236 @pulldiscovery('b1:bookmarks')
1236 @pulldiscovery('b1:bookmarks')
1237 def _pullbookmarkbundle1(pullop):
1237 def _pullbookmarkbundle1(pullop):
1238 """fetch bookmark data in bundle1 case
1238 """fetch bookmark data in bundle1 case
1239
1239
1240 If not using bundle2, we have to fetch bookmarks before changeset
1240 If not using bundle2, we have to fetch bookmarks before changeset
1241 discovery to reduce the chance and impact of race conditions."""
1241 discovery to reduce the chance and impact of race conditions."""
1242 if pullop.remotebookmarks is not None:
1242 if pullop.remotebookmarks is not None:
1243 return
1243 return
1244 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1244 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1245 # all known bundle2 servers now support listkeys, but lets be nice with
1245 # all known bundle2 servers now support listkeys, but lets be nice with
1246 # new implementation.
1246 # new implementation.
1247 return
1247 return
1248 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1248 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1249
1249
1250
1250
1251 @pulldiscovery('changegroup')
1251 @pulldiscovery('changegroup')
1252 def _pulldiscoverychangegroup(pullop):
1252 def _pulldiscoverychangegroup(pullop):
1253 """discovery phase for the pull
1253 """discovery phase for the pull
1254
1254
1255 Current handle changeset discovery only, will change handle all discovery
1255 Current handle changeset discovery only, will change handle all discovery
1256 at some point."""
1256 at some point."""
1257 tmp = discovery.findcommonincoming(pullop.repo,
1257 tmp = discovery.findcommonincoming(pullop.repo,
1258 pullop.remote,
1258 pullop.remote,
1259 heads=pullop.heads,
1259 heads=pullop.heads,
1260 force=pullop.force)
1260 force=pullop.force)
1261 common, fetch, rheads = tmp
1261 common, fetch, rheads = tmp
1262 nm = pullop.repo.unfiltered().changelog.nodemap
1262 nm = pullop.repo.unfiltered().changelog.nodemap
1263 if fetch and rheads:
1263 if fetch and rheads:
1264 # If a remote heads in filtered locally, lets drop it from the unknown
1264 # If a remote heads in filtered locally, lets drop it from the unknown
1265 # remote heads and put in back in common.
1265 # remote heads and put in back in common.
1266 #
1266 #
1267 # This is a hackish solution to catch most of "common but locally
1267 # This is a hackish solution to catch most of "common but locally
1268 # hidden situation". We do not performs discovery on unfiltered
1268 # hidden situation". We do not performs discovery on unfiltered
1269 # repository because it end up doing a pathological amount of round
1269 # repository because it end up doing a pathological amount of round
1270 # trip for w huge amount of changeset we do not care about.
1270 # trip for w huge amount of changeset we do not care about.
1271 #
1271 #
1272 # If a set of such "common but filtered" changeset exist on the server
1272 # If a set of such "common but filtered" changeset exist on the server
1273 # but are not including a remote heads, we'll not be able to detect it,
1273 # but are not including a remote heads, we'll not be able to detect it,
1274 scommon = set(common)
1274 scommon = set(common)
1275 filteredrheads = []
1275 filteredrheads = []
1276 for n in rheads:
1276 for n in rheads:
1277 if n in nm:
1277 if n in nm:
1278 if n not in scommon:
1278 if n not in scommon:
1279 common.append(n)
1279 common.append(n)
1280 else:
1280 else:
1281 filteredrheads.append(n)
1281 filteredrheads.append(n)
1282 if not filteredrheads:
1282 if not filteredrheads:
1283 fetch = []
1283 fetch = []
1284 rheads = filteredrheads
1284 rheads = filteredrheads
1285 pullop.common = common
1285 pullop.common = common
1286 pullop.fetch = fetch
1286 pullop.fetch = fetch
1287 pullop.rheads = rheads
1287 pullop.rheads = rheads
1288
1288
1289 def _pullbundle2(pullop):
1289 def _pullbundle2(pullop):
1290 """pull data using bundle2
1290 """pull data using bundle2
1291
1291
1292 For now, the only supported data are changegroup."""
1292 For now, the only supported data are changegroup."""
1293 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1293 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1294
1294
1295 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1295 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1296
1296
1297 # pulling changegroup
1297 # pulling changegroup
1298 pullop.stepsdone.add('changegroup')
1298 pullop.stepsdone.add('changegroup')
1299
1299
1300 kwargs['common'] = pullop.common
1300 kwargs['common'] = pullop.common
1301 kwargs['heads'] = pullop.heads or pullop.rheads
1301 kwargs['heads'] = pullop.heads or pullop.rheads
1302 kwargs['cg'] = pullop.fetch
1302 kwargs['cg'] = pullop.fetch
1303 if 'listkeys' in pullop.remotebundle2caps:
1303 if 'listkeys' in pullop.remotebundle2caps:
1304 kwargs['listkeys'] = ['phases']
1304 kwargs['listkeys'] = ['phases']
1305 if pullop.remotebookmarks is None:
1305 if pullop.remotebookmarks is None:
1306 # make sure to always includes bookmark data when migrating
1306 # make sure to always includes bookmark data when migrating
1307 # `hg incoming --bundle` to using this function.
1307 # `hg incoming --bundle` to using this function.
1308 kwargs['listkeys'].append('bookmarks')
1308 kwargs['listkeys'].append('bookmarks')
1309
1309
1310 # If this is a full pull / clone and the server supports the clone bundles
1310 # If this is a full pull / clone and the server supports the clone bundles
1311 # feature, tell the server whether we attempted a clone bundle. The
1311 # feature, tell the server whether we attempted a clone bundle. The
1312 # presence of this flag indicates the client supports clone bundles. This
1312 # presence of this flag indicates the client supports clone bundles. This
1313 # will enable the server to treat clients that support clone bundles
1313 # will enable the server to treat clients that support clone bundles
1314 # differently from those that don't.
1314 # differently from those that don't.
1315 if (pullop.remote.capable('clonebundles')
1315 if (pullop.remote.capable('clonebundles')
1316 and pullop.heads is None and list(pullop.common) == [nullid]):
1316 and pullop.heads is None and list(pullop.common) == [nullid]):
1317 kwargs['cbattempted'] = pullop.clonebundleattempted
1317 kwargs['cbattempted'] = pullop.clonebundleattempted
1318
1318
1319 if streaming:
1319 if streaming:
1320 pullop.repo.ui.status(_('streaming all changes\n'))
1320 pullop.repo.ui.status(_('streaming all changes\n'))
1321 elif not pullop.fetch:
1321 elif not pullop.fetch:
1322 pullop.repo.ui.status(_("no changes found\n"))
1322 pullop.repo.ui.status(_("no changes found\n"))
1323 pullop.cgresult = 0
1323 pullop.cgresult = 0
1324 else:
1324 else:
1325 if pullop.heads is None and list(pullop.common) == [nullid]:
1325 if pullop.heads is None and list(pullop.common) == [nullid]:
1326 pullop.repo.ui.status(_("requesting all changes\n"))
1326 pullop.repo.ui.status(_("requesting all changes\n"))
1327 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1327 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1328 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1328 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1329 if obsolete.commonversion(remoteversions) is not None:
1329 if obsolete.commonversion(remoteversions) is not None:
1330 kwargs['obsmarkers'] = True
1330 kwargs['obsmarkers'] = True
1331 pullop.stepsdone.add('obsmarkers')
1331 pullop.stepsdone.add('obsmarkers')
1332 _pullbundle2extraprepare(pullop, kwargs)
1332 _pullbundle2extraprepare(pullop, kwargs)
1333 bundle = pullop.remote.getbundle('pull', **kwargs)
1333 bundle = pullop.remote.getbundle('pull', **kwargs)
1334 try:
1334 try:
1335 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1335 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1336 except error.BundleValueError as exc:
1336 except error.BundleValueError as exc:
1337 raise error.Abort(_('missing support for %s') % exc)
1337 raise error.Abort(_('missing support for %s') % exc)
1338
1338
1339 if pullop.fetch:
1339 if pullop.fetch:
1340 results = [cg['return'] for cg in op.records['changegroup']]
1340 results = [cg['return'] for cg in op.records['changegroup']]
1341 pullop.cgresult = changegroup.combineresults(results)
1341 pullop.cgresult = changegroup.combineresults(results)
1342
1342
1343 # processing phases change
1343 # processing phases change
1344 for namespace, value in op.records['listkeys']:
1344 for namespace, value in op.records['listkeys']:
1345 if namespace == 'phases':
1345 if namespace == 'phases':
1346 _pullapplyphases(pullop, value)
1346 _pullapplyphases(pullop, value)
1347
1347
1348 # processing bookmark update
1348 # processing bookmark update
1349 for namespace, value in op.records['listkeys']:
1349 for namespace, value in op.records['listkeys']:
1350 if namespace == 'bookmarks':
1350 if namespace == 'bookmarks':
1351 pullop.remotebookmarks = value
1351 pullop.remotebookmarks = value
1352
1352
1353 # bookmark data were either already there or pulled in the bundle
1353 # bookmark data were either already there or pulled in the bundle
1354 if pullop.remotebookmarks is not None:
1354 if pullop.remotebookmarks is not None:
1355 _pullbookmarks(pullop)
1355 _pullbookmarks(pullop)
1356
1356
1357 def _pullbundle2extraprepare(pullop, kwargs):
1357 def _pullbundle2extraprepare(pullop, kwargs):
1358 """hook function so that extensions can extend the getbundle call"""
1358 """hook function so that extensions can extend the getbundle call"""
1359 pass
1359 pass
1360
1360
1361 def _pullchangeset(pullop):
1361 def _pullchangeset(pullop):
1362 """pull changeset from unbundle into the local repo"""
1362 """pull changeset from unbundle into the local repo"""
1363 # We delay the open of the transaction as late as possible so we
1363 # We delay the open of the transaction as late as possible so we
1364 # don't open transaction for nothing or you break future useful
1364 # don't open transaction for nothing or you break future useful
1365 # rollback call
1365 # rollback call
1366 if 'changegroup' in pullop.stepsdone:
1366 if 'changegroup' in pullop.stepsdone:
1367 return
1367 return
1368 pullop.stepsdone.add('changegroup')
1368 pullop.stepsdone.add('changegroup')
1369 if not pullop.fetch:
1369 if not pullop.fetch:
1370 pullop.repo.ui.status(_("no changes found\n"))
1370 pullop.repo.ui.status(_("no changes found\n"))
1371 pullop.cgresult = 0
1371 pullop.cgresult = 0
1372 return
1372 return
1373 pullop.gettransaction()
1373 pullop.gettransaction()
1374 if pullop.heads is None and list(pullop.common) == [nullid]:
1374 if pullop.heads is None and list(pullop.common) == [nullid]:
1375 pullop.repo.ui.status(_("requesting all changes\n"))
1375 pullop.repo.ui.status(_("requesting all changes\n"))
1376 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1376 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1377 # issue1320, avoid a race if remote changed after discovery
1377 # issue1320, avoid a race if remote changed after discovery
1378 pullop.heads = pullop.rheads
1378 pullop.heads = pullop.rheads
1379
1379
1380 if pullop.remote.capable('getbundle'):
1380 if pullop.remote.capable('getbundle'):
1381 # TODO: get bundlecaps from remote
1381 # TODO: get bundlecaps from remote
1382 cg = pullop.remote.getbundle('pull', common=pullop.common,
1382 cg = pullop.remote.getbundle('pull', common=pullop.common,
1383 heads=pullop.heads or pullop.rheads)
1383 heads=pullop.heads or pullop.rheads)
1384 elif pullop.heads is None:
1384 elif pullop.heads is None:
1385 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1385 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1386 elif not pullop.remote.capable('changegroupsubset'):
1386 elif not pullop.remote.capable('changegroupsubset'):
1387 raise error.Abort(_("partial pull cannot be done because "
1387 raise error.Abort(_("partial pull cannot be done because "
1388 "other repository doesn't support "
1388 "other repository doesn't support "
1389 "changegroupsubset."))
1389 "changegroupsubset."))
1390 else:
1390 else:
1391 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1391 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1392 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1392 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1393
1393
1394 def _pullphase(pullop):
1394 def _pullphase(pullop):
1395 # Get remote phases data from remote
1395 # Get remote phases data from remote
1396 if 'phases' in pullop.stepsdone:
1396 if 'phases' in pullop.stepsdone:
1397 return
1397 return
1398 remotephases = pullop.remote.listkeys('phases')
1398 remotephases = pullop.remote.listkeys('phases')
1399 _pullapplyphases(pullop, remotephases)
1399 _pullapplyphases(pullop, remotephases)
1400
1400
1401 def _pullapplyphases(pullop, remotephases):
1401 def _pullapplyphases(pullop, remotephases):
1402 """apply phase movement from observed remote state"""
1402 """apply phase movement from observed remote state"""
1403 if 'phases' in pullop.stepsdone:
1403 if 'phases' in pullop.stepsdone:
1404 return
1404 return
1405 pullop.stepsdone.add('phases')
1405 pullop.stepsdone.add('phases')
1406 publishing = bool(remotephases.get('publishing', False))
1406 publishing = bool(remotephases.get('publishing', False))
1407 if remotephases and not publishing:
1407 if remotephases and not publishing:
1408 # remote is new and unpublishing
1408 # remote is new and unpublishing
1409 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1409 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1410 pullop.pulledsubset,
1410 pullop.pulledsubset,
1411 remotephases)
1411 remotephases)
1412 dheads = pullop.pulledsubset
1412 dheads = pullop.pulledsubset
1413 else:
1413 else:
1414 # Remote is old or publishing all common changesets
1414 # Remote is old or publishing all common changesets
1415 # should be seen as public
1415 # should be seen as public
1416 pheads = pullop.pulledsubset
1416 pheads = pullop.pulledsubset
1417 dheads = []
1417 dheads = []
1418 unfi = pullop.repo.unfiltered()
1418 unfi = pullop.repo.unfiltered()
1419 phase = unfi._phasecache.phase
1419 phase = unfi._phasecache.phase
1420 rev = unfi.changelog.nodemap.get
1420 rev = unfi.changelog.nodemap.get
1421 public = phases.public
1421 public = phases.public
1422 draft = phases.draft
1422 draft = phases.draft
1423
1423
1424 # exclude changesets already public locally and update the others
1424 # exclude changesets already public locally and update the others
1425 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1425 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1426 if pheads:
1426 if pheads:
1427 tr = pullop.gettransaction()
1427 tr = pullop.gettransaction()
1428 phases.advanceboundary(pullop.repo, tr, public, pheads)
1428 phases.advanceboundary(pullop.repo, tr, public, pheads)
1429
1429
1430 # exclude changesets already draft locally and update the others
1430 # exclude changesets already draft locally and update the others
1431 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1431 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1432 if dheads:
1432 if dheads:
1433 tr = pullop.gettransaction()
1433 tr = pullop.gettransaction()
1434 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1434 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1435
1435
1436 def _pullbookmarks(pullop):
1436 def _pullbookmarks(pullop):
1437 """process the remote bookmark information to update the local one"""
1437 """process the remote bookmark information to update the local one"""
1438 if 'bookmarks' in pullop.stepsdone:
1438 if 'bookmarks' in pullop.stepsdone:
1439 return
1439 return
1440 pullop.stepsdone.add('bookmarks')
1440 pullop.stepsdone.add('bookmarks')
1441 repo = pullop.repo
1441 repo = pullop.repo
1442 remotebookmarks = pullop.remotebookmarks
1442 remotebookmarks = pullop.remotebookmarks
1443 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1443 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1444 pullop.remote.url(),
1444 pullop.remote.url(),
1445 pullop.gettransaction,
1445 pullop.gettransaction,
1446 explicit=pullop.explicitbookmarks)
1446 explicit=pullop.explicitbookmarks)
1447
1447
1448 def _pullobsolete(pullop):
1448 def _pullobsolete(pullop):
1449 """utility function to pull obsolete markers from a remote
1449 """utility function to pull obsolete markers from a remote
1450
1450
1451 The `gettransaction` is function that return the pull transaction, creating
1451 The `gettransaction` is function that return the pull transaction, creating
1452 one if necessary. We return the transaction to inform the calling code that
1452 one if necessary. We return the transaction to inform the calling code that
1453 a new transaction have been created (when applicable).
1453 a new transaction have been created (when applicable).
1454
1454
1455 Exists mostly to allow overriding for experimentation purpose"""
1455 Exists mostly to allow overriding for experimentation purpose"""
1456 if 'obsmarkers' in pullop.stepsdone:
1456 if 'obsmarkers' in pullop.stepsdone:
1457 return
1457 return
1458 pullop.stepsdone.add('obsmarkers')
1458 pullop.stepsdone.add('obsmarkers')
1459 tr = None
1459 tr = None
1460 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1460 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1461 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1461 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1462 remoteobs = pullop.remote.listkeys('obsolete')
1462 remoteobs = pullop.remote.listkeys('obsolete')
1463 if 'dump0' in remoteobs:
1463 if 'dump0' in remoteobs:
1464 tr = pullop.gettransaction()
1464 tr = pullop.gettransaction()
1465 markers = []
1465 markers = []
1466 for key in sorted(remoteobs, reverse=True):
1466 for key in sorted(remoteobs, reverse=True):
1467 if key.startswith('dump'):
1467 if key.startswith('dump'):
1468 data = base85.b85decode(remoteobs[key])
1468 data = base85.b85decode(remoteobs[key])
1469 version, newmarks = obsolete._readmarkers(data)
1469 version, newmarks = obsolete._readmarkers(data)
1470 markers += newmarks
1470 markers += newmarks
1471 if markers:
1471 if markers:
1472 pullop.repo.obsstore.add(tr, markers)
1472 pullop.repo.obsstore.add(tr, markers)
1473 pullop.repo.invalidatevolatilesets()
1473 pullop.repo.invalidatevolatilesets()
1474 return tr
1474 return tr
1475
1475
1476 def caps20to10(repo):
1476 def caps20to10(repo):
1477 """return a set with appropriate options to use bundle20 during getbundle"""
1477 """return a set with appropriate options to use bundle20 during getbundle"""
1478 caps = set(['HG20'])
1478 caps = set(['HG20'])
1479 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1479 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1480 caps.add('bundle2=' + urlreq.quote(capsblob))
1480 caps.add('bundle2=' + urlreq.quote(capsblob))
1481 return caps
1481 return caps
1482
1482
1483 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1483 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1484 getbundle2partsorder = []
1484 getbundle2partsorder = []
1485
1485
1486 # Mapping between step name and function
1486 # Mapping between step name and function
1487 #
1487 #
1488 # This exists to help extensions wrap steps if necessary
1488 # This exists to help extensions wrap steps if necessary
1489 getbundle2partsmapping = {}
1489 getbundle2partsmapping = {}
1490
1490
1491 def getbundle2partsgenerator(stepname, idx=None):
1491 def getbundle2partsgenerator(stepname, idx=None):
1492 """decorator for function generating bundle2 part for getbundle
1492 """decorator for function generating bundle2 part for getbundle
1493
1493
1494 The function is added to the step -> function mapping and appended to the
1494 The function is added to the step -> function mapping and appended to the
1495 list of steps. Beware that decorated functions will be added in order
1495 list of steps. Beware that decorated functions will be added in order
1496 (this may matter).
1496 (this may matter).
1497
1497
1498 You can only use this decorator for new steps, if you want to wrap a step
1498 You can only use this decorator for new steps, if you want to wrap a step
1499 from an extension, attack the getbundle2partsmapping dictionary directly."""
1499 from an extension, attack the getbundle2partsmapping dictionary directly."""
1500 def dec(func):
1500 def dec(func):
1501 assert stepname not in getbundle2partsmapping
1501 assert stepname not in getbundle2partsmapping
1502 getbundle2partsmapping[stepname] = func
1502 getbundle2partsmapping[stepname] = func
1503 if idx is None:
1503 if idx is None:
1504 getbundle2partsorder.append(stepname)
1504 getbundle2partsorder.append(stepname)
1505 else:
1505 else:
1506 getbundle2partsorder.insert(idx, stepname)
1506 getbundle2partsorder.insert(idx, stepname)
1507 return func
1507 return func
1508 return dec
1508 return dec
1509
1509
1510 def bundle2requested(bundlecaps):
1510 def bundle2requested(bundlecaps):
1511 if bundlecaps is not None:
1511 if bundlecaps is not None:
1512 return any(cap.startswith('HG2') for cap in bundlecaps)
1512 return any(cap.startswith('HG2') for cap in bundlecaps)
1513 return False
1513 return False
1514
1514
1515 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1515 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1516 **kwargs):
1516 **kwargs):
1517 """return a full bundle (with potentially multiple kind of parts)
1517 """return a full bundle (with potentially multiple kind of parts)
1518
1518
1519 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1519 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1520 passed. For now, the bundle can contain only changegroup, but this will
1520 passed. For now, the bundle can contain only changegroup, but this will
1521 changes when more part type will be available for bundle2.
1521 changes when more part type will be available for bundle2.
1522
1522
1523 This is different from changegroup.getchangegroup that only returns an HG10
1523 This is different from changegroup.getchangegroup that only returns an HG10
1524 changegroup bundle. They may eventually get reunited in the future when we
1524 changegroup bundle. They may eventually get reunited in the future when we
1525 have a clearer idea of the API we what to query different data.
1525 have a clearer idea of the API we what to query different data.
1526
1526
1527 The implementation is at a very early stage and will get massive rework
1527 The implementation is at a very early stage and will get massive rework
1528 when the API of bundle is refined.
1528 when the API of bundle is refined.
1529 """
1529 """
1530 usebundle2 = bundle2requested(bundlecaps)
1530 usebundle2 = bundle2requested(bundlecaps)
1531 # bundle10 case
1531 # bundle10 case
1532 if not usebundle2:
1532 if not usebundle2:
1533 if bundlecaps and not kwargs.get('cg', True):
1533 if bundlecaps and not kwargs.get('cg', True):
1534 raise ValueError(_('request for bundle10 must include changegroup'))
1534 raise ValueError(_('request for bundle10 must include changegroup'))
1535
1535
1536 if kwargs:
1536 if kwargs:
1537 raise ValueError(_('unsupported getbundle arguments: %s')
1537 raise ValueError(_('unsupported getbundle arguments: %s')
1538 % ', '.join(sorted(kwargs.keys())))
1538 % ', '.join(sorted(kwargs.keys())))
1539 return changegroup.getchangegroup(repo, source, heads=heads,
1539 outgoing = changegroup.computeoutgoing(repo, heads, common)
1540 common=common, bundlecaps=bundlecaps)
1540 return changegroup.getchangegroup(repo, source, outgoing,
1541 bundlecaps=bundlecaps)
1541
1542
1542 # bundle20 case
1543 # bundle20 case
1543 b2caps = {}
1544 b2caps = {}
1544 for bcaps in bundlecaps:
1545 for bcaps in bundlecaps:
1545 if bcaps.startswith('bundle2='):
1546 if bcaps.startswith('bundle2='):
1546 blob = urlreq.unquote(bcaps[len('bundle2='):])
1547 blob = urlreq.unquote(bcaps[len('bundle2='):])
1547 b2caps.update(bundle2.decodecaps(blob))
1548 b2caps.update(bundle2.decodecaps(blob))
1548 bundler = bundle2.bundle20(repo.ui, b2caps)
1549 bundler = bundle2.bundle20(repo.ui, b2caps)
1549
1550
1550 kwargs['heads'] = heads
1551 kwargs['heads'] = heads
1551 kwargs['common'] = common
1552 kwargs['common'] = common
1552
1553
1553 for name in getbundle2partsorder:
1554 for name in getbundle2partsorder:
1554 func = getbundle2partsmapping[name]
1555 func = getbundle2partsmapping[name]
1555 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1556 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1556 **kwargs)
1557 **kwargs)
1557
1558
1558 return util.chunkbuffer(bundler.getchunks())
1559 return util.chunkbuffer(bundler.getchunks())
1559
1560
1560 @getbundle2partsgenerator('changegroup')
1561 @getbundle2partsgenerator('changegroup')
1561 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1562 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1562 b2caps=None, heads=None, common=None, **kwargs):
1563 b2caps=None, heads=None, common=None, **kwargs):
1563 """add a changegroup part to the requested bundle"""
1564 """add a changegroup part to the requested bundle"""
1564 cg = None
1565 cg = None
1565 if kwargs.get('cg', True):
1566 if kwargs.get('cg', True):
1566 # build changegroup bundle here.
1567 # build changegroup bundle here.
1567 version = '01'
1568 version = '01'
1568 cgversions = b2caps.get('changegroup')
1569 cgversions = b2caps.get('changegroup')
1569 if cgversions: # 3.1 and 3.2 ship with an empty value
1570 if cgversions: # 3.1 and 3.2 ship with an empty value
1570 cgversions = [v for v in cgversions
1571 cgversions = [v for v in cgversions
1571 if v in changegroup.supportedoutgoingversions(repo)]
1572 if v in changegroup.supportedoutgoingversions(repo)]
1572 if not cgversions:
1573 if not cgversions:
1573 raise ValueError(_('no common changegroup version'))
1574 raise ValueError(_('no common changegroup version'))
1574 version = max(cgversions)
1575 version = max(cgversions)
1575 outgoing = changegroup.computeoutgoing(repo, heads, common)
1576 outgoing = changegroup.computeoutgoing(repo, heads, common)
1576 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1577 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1577 bundlecaps=bundlecaps,
1578 bundlecaps=bundlecaps,
1578 version=version)
1579 version=version)
1579
1580
1580 if cg:
1581 if cg:
1581 part = bundler.newpart('changegroup', data=cg)
1582 part = bundler.newpart('changegroup', data=cg)
1582 if cgversions:
1583 if cgversions:
1583 part.addparam('version', version)
1584 part.addparam('version', version)
1584 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1585 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1585 if 'treemanifest' in repo.requirements:
1586 if 'treemanifest' in repo.requirements:
1586 part.addparam('treemanifest', '1')
1587 part.addparam('treemanifest', '1')
1587
1588
1588 @getbundle2partsgenerator('listkeys')
1589 @getbundle2partsgenerator('listkeys')
1589 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1590 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1590 b2caps=None, **kwargs):
1591 b2caps=None, **kwargs):
1591 """add parts containing listkeys namespaces to the requested bundle"""
1592 """add parts containing listkeys namespaces to the requested bundle"""
1592 listkeys = kwargs.get('listkeys', ())
1593 listkeys = kwargs.get('listkeys', ())
1593 for namespace in listkeys:
1594 for namespace in listkeys:
1594 part = bundler.newpart('listkeys')
1595 part = bundler.newpart('listkeys')
1595 part.addparam('namespace', namespace)
1596 part.addparam('namespace', namespace)
1596 keys = repo.listkeys(namespace).items()
1597 keys = repo.listkeys(namespace).items()
1597 part.data = pushkey.encodekeys(keys)
1598 part.data = pushkey.encodekeys(keys)
1598
1599
1599 @getbundle2partsgenerator('obsmarkers')
1600 @getbundle2partsgenerator('obsmarkers')
1600 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1601 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1601 b2caps=None, heads=None, **kwargs):
1602 b2caps=None, heads=None, **kwargs):
1602 """add an obsolescence markers part to the requested bundle"""
1603 """add an obsolescence markers part to the requested bundle"""
1603 if kwargs.get('obsmarkers', False):
1604 if kwargs.get('obsmarkers', False):
1604 if heads is None:
1605 if heads is None:
1605 heads = repo.heads()
1606 heads = repo.heads()
1606 subset = [c.node() for c in repo.set('::%ln', heads)]
1607 subset = [c.node() for c in repo.set('::%ln', heads)]
1607 markers = repo.obsstore.relevantmarkers(subset)
1608 markers = repo.obsstore.relevantmarkers(subset)
1608 markers = sorted(markers)
1609 markers = sorted(markers)
1609 buildobsmarkerspart(bundler, markers)
1610 buildobsmarkerspart(bundler, markers)
1610
1611
1611 @getbundle2partsgenerator('hgtagsfnodes')
1612 @getbundle2partsgenerator('hgtagsfnodes')
1612 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1613 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1613 b2caps=None, heads=None, common=None,
1614 b2caps=None, heads=None, common=None,
1614 **kwargs):
1615 **kwargs):
1615 """Transfer the .hgtags filenodes mapping.
1616 """Transfer the .hgtags filenodes mapping.
1616
1617
1617 Only values for heads in this bundle will be transferred.
1618 Only values for heads in this bundle will be transferred.
1618
1619
1619 The part data consists of pairs of 20 byte changeset node and .hgtags
1620 The part data consists of pairs of 20 byte changeset node and .hgtags
1620 filenodes raw values.
1621 filenodes raw values.
1621 """
1622 """
1622 # Don't send unless:
1623 # Don't send unless:
1623 # - changeset are being exchanged,
1624 # - changeset are being exchanged,
1624 # - the client supports it.
1625 # - the client supports it.
1625 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1626 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1626 return
1627 return
1627
1628
1628 outgoing = changegroup.computeoutgoing(repo, heads, common)
1629 outgoing = changegroup.computeoutgoing(repo, heads, common)
1629
1630
1630 if not outgoing.missingheads:
1631 if not outgoing.missingheads:
1631 return
1632 return
1632
1633
1633 cache = tags.hgtagsfnodescache(repo.unfiltered())
1634 cache = tags.hgtagsfnodescache(repo.unfiltered())
1634 chunks = []
1635 chunks = []
1635
1636
1636 # .hgtags fnodes are only relevant for head changesets. While we could
1637 # .hgtags fnodes are only relevant for head changesets. While we could
1637 # transfer values for all known nodes, there will likely be little to
1638 # transfer values for all known nodes, there will likely be little to
1638 # no benefit.
1639 # no benefit.
1639 #
1640 #
1640 # We don't bother using a generator to produce output data because
1641 # We don't bother using a generator to produce output data because
1641 # a) we only have 40 bytes per head and even esoteric numbers of heads
1642 # a) we only have 40 bytes per head and even esoteric numbers of heads
1642 # consume little memory (1M heads is 40MB) b) we don't want to send the
1643 # consume little memory (1M heads is 40MB) b) we don't want to send the
1643 # part if we don't have entries and knowing if we have entries requires
1644 # part if we don't have entries and knowing if we have entries requires
1644 # cache lookups.
1645 # cache lookups.
1645 for node in outgoing.missingheads:
1646 for node in outgoing.missingheads:
1646 # Don't compute missing, as this may slow down serving.
1647 # Don't compute missing, as this may slow down serving.
1647 fnode = cache.getfnode(node, computemissing=False)
1648 fnode = cache.getfnode(node, computemissing=False)
1648 if fnode is not None:
1649 if fnode is not None:
1649 chunks.extend([node, fnode])
1650 chunks.extend([node, fnode])
1650
1651
1651 if chunks:
1652 if chunks:
1652 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1653 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1653
1654
1654 def check_heads(repo, their_heads, context):
1655 def check_heads(repo, their_heads, context):
1655 """check if the heads of a repo have been modified
1656 """check if the heads of a repo have been modified
1656
1657
1657 Used by peer for unbundling.
1658 Used by peer for unbundling.
1658 """
1659 """
1659 heads = repo.heads()
1660 heads = repo.heads()
1660 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1661 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1661 if not (their_heads == ['force'] or their_heads == heads or
1662 if not (their_heads == ['force'] or their_heads == heads or
1662 their_heads == ['hashed', heads_hash]):
1663 their_heads == ['hashed', heads_hash]):
1663 # someone else committed/pushed/unbundled while we
1664 # someone else committed/pushed/unbundled while we
1664 # were transferring data
1665 # were transferring data
1665 raise error.PushRaced('repository changed while %s - '
1666 raise error.PushRaced('repository changed while %s - '
1666 'please try again' % context)
1667 'please try again' % context)
1667
1668
1668 def unbundle(repo, cg, heads, source, url):
1669 def unbundle(repo, cg, heads, source, url):
1669 """Apply a bundle to a repo.
1670 """Apply a bundle to a repo.
1670
1671
1671 this function makes sure the repo is locked during the application and have
1672 this function makes sure the repo is locked during the application and have
1672 mechanism to check that no push race occurred between the creation of the
1673 mechanism to check that no push race occurred between the creation of the
1673 bundle and its application.
1674 bundle and its application.
1674
1675
1675 If the push was raced as PushRaced exception is raised."""
1676 If the push was raced as PushRaced exception is raised."""
1676 r = 0
1677 r = 0
1677 # need a transaction when processing a bundle2 stream
1678 # need a transaction when processing a bundle2 stream
1678 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1679 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1679 lockandtr = [None, None, None]
1680 lockandtr = [None, None, None]
1680 recordout = None
1681 recordout = None
1681 # quick fix for output mismatch with bundle2 in 3.4
1682 # quick fix for output mismatch with bundle2 in 3.4
1682 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1683 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1683 False)
1684 False)
1684 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1685 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1685 captureoutput = True
1686 captureoutput = True
1686 try:
1687 try:
1687 check_heads(repo, heads, 'uploading changes')
1688 check_heads(repo, heads, 'uploading changes')
1688 # push can proceed
1689 # push can proceed
1689 if util.safehasattr(cg, 'params'):
1690 if util.safehasattr(cg, 'params'):
1690 r = None
1691 r = None
1691 try:
1692 try:
1692 def gettransaction():
1693 def gettransaction():
1693 if not lockandtr[2]:
1694 if not lockandtr[2]:
1694 lockandtr[0] = repo.wlock()
1695 lockandtr[0] = repo.wlock()
1695 lockandtr[1] = repo.lock()
1696 lockandtr[1] = repo.lock()
1696 lockandtr[2] = repo.transaction(source)
1697 lockandtr[2] = repo.transaction(source)
1697 lockandtr[2].hookargs['source'] = source
1698 lockandtr[2].hookargs['source'] = source
1698 lockandtr[2].hookargs['url'] = url
1699 lockandtr[2].hookargs['url'] = url
1699 lockandtr[2].hookargs['bundle2'] = '1'
1700 lockandtr[2].hookargs['bundle2'] = '1'
1700 return lockandtr[2]
1701 return lockandtr[2]
1701
1702
1702 # Do greedy locking by default until we're satisfied with lazy
1703 # Do greedy locking by default until we're satisfied with lazy
1703 # locking.
1704 # locking.
1704 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1705 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1705 gettransaction()
1706 gettransaction()
1706
1707
1707 op = bundle2.bundleoperation(repo, gettransaction,
1708 op = bundle2.bundleoperation(repo, gettransaction,
1708 captureoutput=captureoutput)
1709 captureoutput=captureoutput)
1709 try:
1710 try:
1710 op = bundle2.processbundle(repo, cg, op=op)
1711 op = bundle2.processbundle(repo, cg, op=op)
1711 finally:
1712 finally:
1712 r = op.reply
1713 r = op.reply
1713 if captureoutput and r is not None:
1714 if captureoutput and r is not None:
1714 repo.ui.pushbuffer(error=True, subproc=True)
1715 repo.ui.pushbuffer(error=True, subproc=True)
1715 def recordout(output):
1716 def recordout(output):
1716 r.newpart('output', data=output, mandatory=False)
1717 r.newpart('output', data=output, mandatory=False)
1717 if lockandtr[2] is not None:
1718 if lockandtr[2] is not None:
1718 lockandtr[2].close()
1719 lockandtr[2].close()
1719 except BaseException as exc:
1720 except BaseException as exc:
1720 exc.duringunbundle2 = True
1721 exc.duringunbundle2 = True
1721 if captureoutput and r is not None:
1722 if captureoutput and r is not None:
1722 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1723 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1723 def recordout(output):
1724 def recordout(output):
1724 part = bundle2.bundlepart('output', data=output,
1725 part = bundle2.bundlepart('output', data=output,
1725 mandatory=False)
1726 mandatory=False)
1726 parts.append(part)
1727 parts.append(part)
1727 raise
1728 raise
1728 else:
1729 else:
1729 lockandtr[1] = repo.lock()
1730 lockandtr[1] = repo.lock()
1730 r = cg.apply(repo, source, url)
1731 r = cg.apply(repo, source, url)
1731 finally:
1732 finally:
1732 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1733 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1733 if recordout is not None:
1734 if recordout is not None:
1734 recordout(repo.ui.popbuffer())
1735 recordout(repo.ui.popbuffer())
1735 return r
1736 return r
1736
1737
1737 def _maybeapplyclonebundle(pullop):
1738 def _maybeapplyclonebundle(pullop):
1738 """Apply a clone bundle from a remote, if possible."""
1739 """Apply a clone bundle from a remote, if possible."""
1739
1740
1740 repo = pullop.repo
1741 repo = pullop.repo
1741 remote = pullop.remote
1742 remote = pullop.remote
1742
1743
1743 if not repo.ui.configbool('ui', 'clonebundles', True):
1744 if not repo.ui.configbool('ui', 'clonebundles', True):
1744 return
1745 return
1745
1746
1746 # Only run if local repo is empty.
1747 # Only run if local repo is empty.
1747 if len(repo):
1748 if len(repo):
1748 return
1749 return
1749
1750
1750 if pullop.heads:
1751 if pullop.heads:
1751 return
1752 return
1752
1753
1753 if not remote.capable('clonebundles'):
1754 if not remote.capable('clonebundles'):
1754 return
1755 return
1755
1756
1756 res = remote._call('clonebundles')
1757 res = remote._call('clonebundles')
1757
1758
1758 # If we call the wire protocol command, that's good enough to record the
1759 # If we call the wire protocol command, that's good enough to record the
1759 # attempt.
1760 # attempt.
1760 pullop.clonebundleattempted = True
1761 pullop.clonebundleattempted = True
1761
1762
1762 entries = parseclonebundlesmanifest(repo, res)
1763 entries = parseclonebundlesmanifest(repo, res)
1763 if not entries:
1764 if not entries:
1764 repo.ui.note(_('no clone bundles available on remote; '
1765 repo.ui.note(_('no clone bundles available on remote; '
1765 'falling back to regular clone\n'))
1766 'falling back to regular clone\n'))
1766 return
1767 return
1767
1768
1768 entries = filterclonebundleentries(repo, entries)
1769 entries = filterclonebundleentries(repo, entries)
1769 if not entries:
1770 if not entries:
1770 # There is a thundering herd concern here. However, if a server
1771 # There is a thundering herd concern here. However, if a server
1771 # operator doesn't advertise bundles appropriate for its clients,
1772 # operator doesn't advertise bundles appropriate for its clients,
1772 # they deserve what's coming. Furthermore, from a client's
1773 # they deserve what's coming. Furthermore, from a client's
1773 # perspective, no automatic fallback would mean not being able to
1774 # perspective, no automatic fallback would mean not being able to
1774 # clone!
1775 # clone!
1775 repo.ui.warn(_('no compatible clone bundles available on server; '
1776 repo.ui.warn(_('no compatible clone bundles available on server; '
1776 'falling back to regular clone\n'))
1777 'falling back to regular clone\n'))
1777 repo.ui.warn(_('(you may want to report this to the server '
1778 repo.ui.warn(_('(you may want to report this to the server '
1778 'operator)\n'))
1779 'operator)\n'))
1779 return
1780 return
1780
1781
1781 entries = sortclonebundleentries(repo.ui, entries)
1782 entries = sortclonebundleentries(repo.ui, entries)
1782
1783
1783 url = entries[0]['URL']
1784 url = entries[0]['URL']
1784 repo.ui.status(_('applying clone bundle from %s\n') % url)
1785 repo.ui.status(_('applying clone bundle from %s\n') % url)
1785 if trypullbundlefromurl(repo.ui, repo, url):
1786 if trypullbundlefromurl(repo.ui, repo, url):
1786 repo.ui.status(_('finished applying clone bundle\n'))
1787 repo.ui.status(_('finished applying clone bundle\n'))
1787 # Bundle failed.
1788 # Bundle failed.
1788 #
1789 #
1789 # We abort by default to avoid the thundering herd of
1790 # We abort by default to avoid the thundering herd of
1790 # clients flooding a server that was expecting expensive
1791 # clients flooding a server that was expecting expensive
1791 # clone load to be offloaded.
1792 # clone load to be offloaded.
1792 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1793 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1793 repo.ui.warn(_('falling back to normal clone\n'))
1794 repo.ui.warn(_('falling back to normal clone\n'))
1794 else:
1795 else:
1795 raise error.Abort(_('error applying bundle'),
1796 raise error.Abort(_('error applying bundle'),
1796 hint=_('if this error persists, consider contacting '
1797 hint=_('if this error persists, consider contacting '
1797 'the server operator or disable clone '
1798 'the server operator or disable clone '
1798 'bundles via '
1799 'bundles via '
1799 '"--config ui.clonebundles=false"'))
1800 '"--config ui.clonebundles=false"'))
1800
1801
1801 def parseclonebundlesmanifest(repo, s):
1802 def parseclonebundlesmanifest(repo, s):
1802 """Parses the raw text of a clone bundles manifest.
1803 """Parses the raw text of a clone bundles manifest.
1803
1804
1804 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1805 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1805 to the URL and other keys are the attributes for the entry.
1806 to the URL and other keys are the attributes for the entry.
1806 """
1807 """
1807 m = []
1808 m = []
1808 for line in s.splitlines():
1809 for line in s.splitlines():
1809 fields = line.split()
1810 fields = line.split()
1810 if not fields:
1811 if not fields:
1811 continue
1812 continue
1812 attrs = {'URL': fields[0]}
1813 attrs = {'URL': fields[0]}
1813 for rawattr in fields[1:]:
1814 for rawattr in fields[1:]:
1814 key, value = rawattr.split('=', 1)
1815 key, value = rawattr.split('=', 1)
1815 key = urlreq.unquote(key)
1816 key = urlreq.unquote(key)
1816 value = urlreq.unquote(value)
1817 value = urlreq.unquote(value)
1817 attrs[key] = value
1818 attrs[key] = value
1818
1819
1819 # Parse BUNDLESPEC into components. This makes client-side
1820 # Parse BUNDLESPEC into components. This makes client-side
1820 # preferences easier to specify since you can prefer a single
1821 # preferences easier to specify since you can prefer a single
1821 # component of the BUNDLESPEC.
1822 # component of the BUNDLESPEC.
1822 if key == 'BUNDLESPEC':
1823 if key == 'BUNDLESPEC':
1823 try:
1824 try:
1824 comp, version, params = parsebundlespec(repo, value,
1825 comp, version, params = parsebundlespec(repo, value,
1825 externalnames=True)
1826 externalnames=True)
1826 attrs['COMPRESSION'] = comp
1827 attrs['COMPRESSION'] = comp
1827 attrs['VERSION'] = version
1828 attrs['VERSION'] = version
1828 except error.InvalidBundleSpecification:
1829 except error.InvalidBundleSpecification:
1829 pass
1830 pass
1830 except error.UnsupportedBundleSpecification:
1831 except error.UnsupportedBundleSpecification:
1831 pass
1832 pass
1832
1833
1833 m.append(attrs)
1834 m.append(attrs)
1834
1835
1835 return m
1836 return m
1836
1837
1837 def filterclonebundleentries(repo, entries):
1838 def filterclonebundleentries(repo, entries):
1838 """Remove incompatible clone bundle manifest entries.
1839 """Remove incompatible clone bundle manifest entries.
1839
1840
1840 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1841 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1841 and returns a new list consisting of only the entries that this client
1842 and returns a new list consisting of only the entries that this client
1842 should be able to apply.
1843 should be able to apply.
1843
1844
1844 There is no guarantee we'll be able to apply all returned entries because
1845 There is no guarantee we'll be able to apply all returned entries because
1845 the metadata we use to filter on may be missing or wrong.
1846 the metadata we use to filter on may be missing or wrong.
1846 """
1847 """
1847 newentries = []
1848 newentries = []
1848 for entry in entries:
1849 for entry in entries:
1849 spec = entry.get('BUNDLESPEC')
1850 spec = entry.get('BUNDLESPEC')
1850 if spec:
1851 if spec:
1851 try:
1852 try:
1852 parsebundlespec(repo, spec, strict=True)
1853 parsebundlespec(repo, spec, strict=True)
1853 except error.InvalidBundleSpecification as e:
1854 except error.InvalidBundleSpecification as e:
1854 repo.ui.debug(str(e) + '\n')
1855 repo.ui.debug(str(e) + '\n')
1855 continue
1856 continue
1856 except error.UnsupportedBundleSpecification as e:
1857 except error.UnsupportedBundleSpecification as e:
1857 repo.ui.debug('filtering %s because unsupported bundle '
1858 repo.ui.debug('filtering %s because unsupported bundle '
1858 'spec: %s\n' % (entry['URL'], str(e)))
1859 'spec: %s\n' % (entry['URL'], str(e)))
1859 continue
1860 continue
1860
1861
1861 if 'REQUIRESNI' in entry and not sslutil.hassni:
1862 if 'REQUIRESNI' in entry and not sslutil.hassni:
1862 repo.ui.debug('filtering %s because SNI not supported\n' %
1863 repo.ui.debug('filtering %s because SNI not supported\n' %
1863 entry['URL'])
1864 entry['URL'])
1864 continue
1865 continue
1865
1866
1866 newentries.append(entry)
1867 newentries.append(entry)
1867
1868
1868 return newentries
1869 return newentries
1869
1870
1870 def sortclonebundleentries(ui, entries):
1871 def sortclonebundleentries(ui, entries):
1871 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1872 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1872 if not prefers:
1873 if not prefers:
1873 return list(entries)
1874 return list(entries)
1874
1875
1875 prefers = [p.split('=', 1) for p in prefers]
1876 prefers = [p.split('=', 1) for p in prefers]
1876
1877
1877 # Our sort function.
1878 # Our sort function.
1878 def compareentry(a, b):
1879 def compareentry(a, b):
1879 for prefkey, prefvalue in prefers:
1880 for prefkey, prefvalue in prefers:
1880 avalue = a.get(prefkey)
1881 avalue = a.get(prefkey)
1881 bvalue = b.get(prefkey)
1882 bvalue = b.get(prefkey)
1882
1883
1883 # Special case for b missing attribute and a matches exactly.
1884 # Special case for b missing attribute and a matches exactly.
1884 if avalue is not None and bvalue is None and avalue == prefvalue:
1885 if avalue is not None and bvalue is None and avalue == prefvalue:
1885 return -1
1886 return -1
1886
1887
1887 # Special case for a missing attribute and b matches exactly.
1888 # Special case for a missing attribute and b matches exactly.
1888 if bvalue is not None and avalue is None and bvalue == prefvalue:
1889 if bvalue is not None and avalue is None and bvalue == prefvalue:
1889 return 1
1890 return 1
1890
1891
1891 # We can't compare unless attribute present on both.
1892 # We can't compare unless attribute present on both.
1892 if avalue is None or bvalue is None:
1893 if avalue is None or bvalue is None:
1893 continue
1894 continue
1894
1895
1895 # Same values should fall back to next attribute.
1896 # Same values should fall back to next attribute.
1896 if avalue == bvalue:
1897 if avalue == bvalue:
1897 continue
1898 continue
1898
1899
1899 # Exact matches come first.
1900 # Exact matches come first.
1900 if avalue == prefvalue:
1901 if avalue == prefvalue:
1901 return -1
1902 return -1
1902 if bvalue == prefvalue:
1903 if bvalue == prefvalue:
1903 return 1
1904 return 1
1904
1905
1905 # Fall back to next attribute.
1906 # Fall back to next attribute.
1906 continue
1907 continue
1907
1908
1908 # If we got here we couldn't sort by attributes and prefers. Fall
1909 # If we got here we couldn't sort by attributes and prefers. Fall
1909 # back to index order.
1910 # back to index order.
1910 return 0
1911 return 0
1911
1912
1912 return sorted(entries, cmp=compareentry)
1913 return sorted(entries, cmp=compareentry)
1913
1914
1914 def trypullbundlefromurl(ui, repo, url):
1915 def trypullbundlefromurl(ui, repo, url):
1915 """Attempt to apply a bundle from a URL."""
1916 """Attempt to apply a bundle from a URL."""
1916 lock = repo.lock()
1917 lock = repo.lock()
1917 try:
1918 try:
1918 tr = repo.transaction('bundleurl')
1919 tr = repo.transaction('bundleurl')
1919 try:
1920 try:
1920 try:
1921 try:
1921 fh = urlmod.open(ui, url)
1922 fh = urlmod.open(ui, url)
1922 cg = readbundle(ui, fh, 'stream')
1923 cg = readbundle(ui, fh, 'stream')
1923
1924
1924 if isinstance(cg, bundle2.unbundle20):
1925 if isinstance(cg, bundle2.unbundle20):
1925 bundle2.processbundle(repo, cg, lambda: tr)
1926 bundle2.processbundle(repo, cg, lambda: tr)
1926 elif isinstance(cg, streamclone.streamcloneapplier):
1927 elif isinstance(cg, streamclone.streamcloneapplier):
1927 cg.apply(repo)
1928 cg.apply(repo)
1928 else:
1929 else:
1929 cg.apply(repo, 'clonebundles', url)
1930 cg.apply(repo, 'clonebundles', url)
1930 tr.close()
1931 tr.close()
1931 return True
1932 return True
1932 except urlerr.httperror as e:
1933 except urlerr.httperror as e:
1933 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1934 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1934 except urlerr.urlerror as e:
1935 except urlerr.urlerror as e:
1935 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1936 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1936
1937
1937 return False
1938 return False
1938 finally:
1939 finally:
1939 tr.release()
1940 tr.release()
1940 finally:
1941 finally:
1941 lock.release()
1942 lock.release()
@@ -1,259 +1,260 b''
1 Create an extension to test bundle2 with multiple changegroups
1 Create an extension to test bundle2 with multiple changegroups
2
2
3 $ cat > bundle2.py <<EOF
3 $ cat > bundle2.py <<EOF
4 > """
4 > """
5 > """
5 > """
6 > from mercurial import changegroup, exchange
6 > from mercurial import changegroup, discovery, exchange
7 >
7 >
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
9 > b2caps=None, heads=None, common=None,
9 > b2caps=None, heads=None, common=None,
10 > **kwargs):
10 > **kwargs):
11 > # Create two changegroups given the common changesets and heads for the
11 > # Create two changegroups given the common changesets and heads for the
12 > # changegroup part we are being requested. Use the parent of each head
12 > # changegroup part we are being requested. Use the parent of each head
13 > # in 'heads' as intermediate heads for the first changegroup.
13 > # in 'heads' as intermediate heads for the first changegroup.
14 > intermediates = [repo[r].p1().node() for r in heads]
14 > intermediates = [repo[r].p1().node() for r in heads]
15 > cg = changegroup.getchangegroup(repo, source, heads=intermediates,
15 > outgoing = discovery.outgoing(repo, common, intermediates)
16 > common=common, bundlecaps=bundlecaps)
16 > cg = changegroup.getchangegroup(repo, source, outgoing,
17 > bundlecaps=bundlecaps)
17 > bundler.newpart('output', data='changegroup1')
18 > bundler.newpart('output', data='changegroup1')
18 > bundler.newpart('changegroup', data=cg.getchunks())
19 > bundler.newpart('changegroup', data=cg.getchunks())
19 > cg = changegroup.getchangegroup(repo, source, heads=heads,
20 > outgoing = discovery.outgoing(repo, common + intermediates, heads)
20 > common=common + intermediates,
21 > cg = changegroup.getchangegroup(repo, source, outgoing,
21 > bundlecaps=bundlecaps)
22 > bundlecaps=bundlecaps)
22 > bundler.newpart('output', data='changegroup2')
23 > bundler.newpart('output', data='changegroup2')
23 > bundler.newpart('changegroup', data=cg.getchunks())
24 > bundler.newpart('changegroup', data=cg.getchunks())
24 >
25 >
25 > def _pull(repo, *args, **kwargs):
26 > def _pull(repo, *args, **kwargs):
26 > pullop = _orig_pull(repo, *args, **kwargs)
27 > pullop = _orig_pull(repo, *args, **kwargs)
27 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
28 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
28 > return pullop
29 > return pullop
29 >
30 >
30 > _orig_pull = exchange.pull
31 > _orig_pull = exchange.pull
31 > exchange.pull = _pull
32 > exchange.pull = _pull
32 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
33 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
33 > EOF
34 > EOF
34
35
35 $ cat >> $HGRCPATH << EOF
36 $ cat >> $HGRCPATH << EOF
36 > [ui]
37 > [ui]
37 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
38 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
38 > EOF
39 > EOF
39
40
40 Start with a simple repository with a single commit
41 Start with a simple repository with a single commit
41
42
42 $ hg init repo
43 $ hg init repo
43 $ cd repo
44 $ cd repo
44 $ cat > .hg/hgrc << EOF
45 $ cat > .hg/hgrc << EOF
45 > [extensions]
46 > [extensions]
46 > bundle2=$TESTTMP/bundle2.py
47 > bundle2=$TESTTMP/bundle2.py
47 > EOF
48 > EOF
48
49
49 $ echo A > A
50 $ echo A > A
50 $ hg commit -A -m A -q
51 $ hg commit -A -m A -q
51 $ cd ..
52 $ cd ..
52
53
53 Clone
54 Clone
54
55
55 $ hg clone -q repo clone
56 $ hg clone -q repo clone
56
57
57 Add two linear commits
58 Add two linear commits
58
59
59 $ cd repo
60 $ cd repo
60 $ echo B > B
61 $ echo B > B
61 $ hg commit -A -m B -q
62 $ hg commit -A -m B -q
62 $ echo C > C
63 $ echo C > C
63 $ hg commit -A -m C -q
64 $ hg commit -A -m C -q
64
65
65 $ cd ../clone
66 $ cd ../clone
66 $ cat >> .hg/hgrc <<EOF
67 $ cat >> .hg/hgrc <<EOF
67 > [hooks]
68 > [hooks]
68 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
69 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
69 > changegroup = sh -c "printenv.py changegroup"
70 > changegroup = sh -c "printenv.py changegroup"
70 > incoming = sh -c "printenv.py incoming"
71 > incoming = sh -c "printenv.py incoming"
71 > EOF
72 > EOF
72
73
73 Pull the new commits in the clone
74 Pull the new commits in the clone
74
75
75 $ hg pull
76 $ hg pull
76 pulling from $TESTTMP/repo (glob)
77 pulling from $TESTTMP/repo (glob)
77 searching for changes
78 searching for changes
78 remote: changegroup1
79 remote: changegroup1
79 adding changesets
80 adding changesets
80 adding manifests
81 adding manifests
81 adding file changes
82 adding file changes
82 added 1 changesets with 1 changes to 1 files
83 added 1 changesets with 1 changes to 1 files
83 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
84 pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
84 remote: changegroup2
85 remote: changegroup2
85 adding changesets
86 adding changesets
86 adding manifests
87 adding manifests
87 adding file changes
88 adding file changes
88 added 1 changesets with 1 changes to 1 files
89 added 1 changesets with 1 changes to 1 files
89 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
90 pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
90 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
91 changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
91 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
92 incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
92 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
93 changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
93 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
94 incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
94 pullop.cgresult is 1
95 pullop.cgresult is 1
95 (run 'hg update' to get a working copy)
96 (run 'hg update' to get a working copy)
96 $ hg update
97 $ hg update
97 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 $ hg log -G
99 $ hg log -G
99 @ 2:f838bfaca5c7 public test C
100 @ 2:f838bfaca5c7 public test C
100 |
101 |
101 o 1:27547f69f254 public test B
102 o 1:27547f69f254 public test B
102 |
103 |
103 o 0:4a2df7238c3b public test A
104 o 0:4a2df7238c3b public test A
104
105
105 Add more changesets with multiple heads to the original repository
106 Add more changesets with multiple heads to the original repository
106
107
107 $ cd ../repo
108 $ cd ../repo
108 $ echo D > D
109 $ echo D > D
109 $ hg commit -A -m D -q
110 $ hg commit -A -m D -q
110 $ hg up -r 1
111 $ hg up -r 1
111 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
112 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
112 $ echo E > E
113 $ echo E > E
113 $ hg commit -A -m E -q
114 $ hg commit -A -m E -q
114 $ echo F > F
115 $ echo F > F
115 $ hg commit -A -m F -q
116 $ hg commit -A -m F -q
116 $ hg up -r 1
117 $ hg up -r 1
117 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
118 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
118 $ echo G > G
119 $ echo G > G
119 $ hg commit -A -m G -q
120 $ hg commit -A -m G -q
120 $ hg up -r 3
121 $ hg up -r 3
121 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
122 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
122 $ echo H > H
123 $ echo H > H
123 $ hg commit -A -m H -q
124 $ hg commit -A -m H -q
124 $ hg log -G
125 $ hg log -G
125 @ 7:5cd59d311f65 draft test H
126 @ 7:5cd59d311f65 draft test H
126 |
127 |
127 | o 6:1d14c3ce6ac0 draft test G
128 | o 6:1d14c3ce6ac0 draft test G
128 | |
129 | |
129 | | o 5:7f219660301f draft test F
130 | | o 5:7f219660301f draft test F
130 | | |
131 | | |
131 | | o 4:8a5212ebc852 draft test E
132 | | o 4:8a5212ebc852 draft test E
132 | |/
133 | |/
133 o | 3:b3325c91a4d9 draft test D
134 o | 3:b3325c91a4d9 draft test D
134 | |
135 | |
135 o | 2:f838bfaca5c7 draft test C
136 o | 2:f838bfaca5c7 draft test C
136 |/
137 |/
137 o 1:27547f69f254 draft test B
138 o 1:27547f69f254 draft test B
138 |
139 |
139 o 0:4a2df7238c3b draft test A
140 o 0:4a2df7238c3b draft test A
140
141
141 New heads are reported during transfer and properly accounted for in
142 New heads are reported during transfer and properly accounted for in
142 pullop.cgresult
143 pullop.cgresult
143
144
144 $ cd ../clone
145 $ cd ../clone
145 $ hg pull
146 $ hg pull
146 pulling from $TESTTMP/repo (glob)
147 pulling from $TESTTMP/repo (glob)
147 searching for changes
148 searching for changes
148 remote: changegroup1
149 remote: changegroup1
149 adding changesets
150 adding changesets
150 adding manifests
151 adding manifests
151 adding file changes
152 adding file changes
152 added 2 changesets with 2 changes to 2 files (+1 heads)
153 added 2 changesets with 2 changes to 2 files (+1 heads)
153 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
154 pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
154 remote: changegroup2
155 remote: changegroup2
155 adding changesets
156 adding changesets
156 adding manifests
157 adding manifests
157 adding file changes
158 adding file changes
158 added 3 changesets with 3 changes to 3 files (+1 heads)
159 added 3 changesets with 3 changes to 3 files (+1 heads)
159 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
160 pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
160 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
161 changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
161 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
162 incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
162 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
163 incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
163 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
164 changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
164 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
165 incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
165 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
166 incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
166 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
167 incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
167 pullop.cgresult is 3
168 pullop.cgresult is 3
168 (run 'hg heads' to see heads, 'hg merge' to merge)
169 (run 'hg heads' to see heads, 'hg merge' to merge)
169 $ hg log -G
170 $ hg log -G
170 o 7:5cd59d311f65 public test H
171 o 7:5cd59d311f65 public test H
171 |
172 |
172 | o 6:1d14c3ce6ac0 public test G
173 | o 6:1d14c3ce6ac0 public test G
173 | |
174 | |
174 | | o 5:7f219660301f public test F
175 | | o 5:7f219660301f public test F
175 | | |
176 | | |
176 | | o 4:8a5212ebc852 public test E
177 | | o 4:8a5212ebc852 public test E
177 | |/
178 | |/
178 o | 3:b3325c91a4d9 public test D
179 o | 3:b3325c91a4d9 public test D
179 | |
180 | |
180 @ | 2:f838bfaca5c7 public test C
181 @ | 2:f838bfaca5c7 public test C
181 |/
182 |/
182 o 1:27547f69f254 public test B
183 o 1:27547f69f254 public test B
183 |
184 |
184 o 0:4a2df7238c3b public test A
185 o 0:4a2df7238c3b public test A
185
186
186 Removing a head from the original repository by merging it
187 Removing a head from the original repository by merging it
187
188
188 $ cd ../repo
189 $ cd ../repo
189 $ hg merge -r 6 -q
190 $ hg merge -r 6 -q
190 $ hg commit -m Merge
191 $ hg commit -m Merge
191 $ echo I > I
192 $ echo I > I
192 $ hg commit -A -m H -q
193 $ hg commit -A -m H -q
193 $ hg log -G
194 $ hg log -G
194 @ 9:9d18e5bd9ab0 draft test H
195 @ 9:9d18e5bd9ab0 draft test H
195 |
196 |
196 o 8:71bd7b46de72 draft test Merge
197 o 8:71bd7b46de72 draft test Merge
197 |\
198 |\
198 | o 7:5cd59d311f65 draft test H
199 | o 7:5cd59d311f65 draft test H
199 | |
200 | |
200 o | 6:1d14c3ce6ac0 draft test G
201 o | 6:1d14c3ce6ac0 draft test G
201 | |
202 | |
202 | | o 5:7f219660301f draft test F
203 | | o 5:7f219660301f draft test F
203 | | |
204 | | |
204 +---o 4:8a5212ebc852 draft test E
205 +---o 4:8a5212ebc852 draft test E
205 | |
206 | |
206 | o 3:b3325c91a4d9 draft test D
207 | o 3:b3325c91a4d9 draft test D
207 | |
208 | |
208 | o 2:f838bfaca5c7 draft test C
209 | o 2:f838bfaca5c7 draft test C
209 |/
210 |/
210 o 1:27547f69f254 draft test B
211 o 1:27547f69f254 draft test B
211 |
212 |
212 o 0:4a2df7238c3b draft test A
213 o 0:4a2df7238c3b draft test A
213
214
214 Removed heads are reported during transfer and properly accounted for in
215 Removed heads are reported during transfer and properly accounted for in
215 pullop.cgresult
216 pullop.cgresult
216
217
217 $ cd ../clone
218 $ cd ../clone
218 $ hg pull
219 $ hg pull
219 pulling from $TESTTMP/repo (glob)
220 pulling from $TESTTMP/repo (glob)
220 searching for changes
221 searching for changes
221 remote: changegroup1
222 remote: changegroup1
222 adding changesets
223 adding changesets
223 adding manifests
224 adding manifests
224 adding file changes
225 adding file changes
225 added 1 changesets with 0 changes to 0 files (-1 heads)
226 added 1 changesets with 0 changes to 0 files (-1 heads)
226 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
227 pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
227 remote: changegroup2
228 remote: changegroup2
228 adding changesets
229 adding changesets
229 adding manifests
230 adding manifests
230 adding file changes
231 adding file changes
231 added 1 changesets with 1 changes to 1 files
232 added 1 changesets with 1 changes to 1 files
232 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
233 pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
233 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
234 changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
234 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
235 incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
235 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
236 changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
236 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
237 incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob)
237 pullop.cgresult is -2
238 pullop.cgresult is -2
238 (run 'hg update' to get a working copy)
239 (run 'hg update' to get a working copy)
239 $ hg log -G
240 $ hg log -G
240 o 9:9d18e5bd9ab0 public test H
241 o 9:9d18e5bd9ab0 public test H
241 |
242 |
242 o 8:71bd7b46de72 public test Merge
243 o 8:71bd7b46de72 public test Merge
243 |\
244 |\
244 | o 7:5cd59d311f65 public test H
245 | o 7:5cd59d311f65 public test H
245 | |
246 | |
246 o | 6:1d14c3ce6ac0 public test G
247 o | 6:1d14c3ce6ac0 public test G
247 | |
248 | |
248 | | o 5:7f219660301f public test F
249 | | o 5:7f219660301f public test F
249 | | |
250 | | |
250 +---o 4:8a5212ebc852 public test E
251 +---o 4:8a5212ebc852 public test E
251 | |
252 | |
252 | o 3:b3325c91a4d9 public test D
253 | o 3:b3325c91a4d9 public test D
253 | |
254 | |
254 | @ 2:f838bfaca5c7 public test C
255 | @ 2:f838bfaca5c7 public test C
255 |/
256 |/
256 o 1:27547f69f254 public test B
257 o 1:27547f69f254 public test B
257 |
258 |
258 o 0:4a2df7238c3b public test A
259 o 0:4a2df7238c3b public test A
259
260
@@ -1,590 +1,590 b''
1 #require killdaemons
1 #require killdaemons
2
2
3 Create an extension to test bundle2 remote-changegroup parts
3 Create an extension to test bundle2 remote-changegroup parts
4
4
5 $ cat > bundle2.py << EOF
5 $ cat > bundle2.py << EOF
6 > """A small extension to test bundle2 remote-changegroup parts.
6 > """A small extension to test bundle2 remote-changegroup parts.
7 >
7 >
8 > Current bundle2 implementation doesn't provide a way to generate those
8 > Current bundle2 implementation doesn't provide a way to generate those
9 > parts, so they must be created by extensions.
9 > parts, so they must be created by extensions.
10 > """
10 > """
11 > from mercurial import bundle2, changegroup, exchange, util
11 > from mercurial import bundle2, changegroup, discovery, exchange, util
12 >
12 >
13 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
13 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
14 > b2caps=None, heads=None, common=None,
14 > b2caps=None, heads=None, common=None,
15 > **kwargs):
15 > **kwargs):
16 > """this function replaces the changegroup part handler for getbundle.
16 > """this function replaces the changegroup part handler for getbundle.
17 > It allows to create a set of arbitrary parts containing changegroups
17 > It allows to create a set of arbitrary parts containing changegroups
18 > and remote-changegroups, as described in a bundle2maker file in the
18 > and remote-changegroups, as described in a bundle2maker file in the
19 > repository .hg/ directory.
19 > repository .hg/ directory.
20 >
20 >
21 > Each line of that bundle2maker file contain a description of the
21 > Each line of that bundle2maker file contain a description of the
22 > part to add:
22 > part to add:
23 > - changegroup common_revset heads_revset
23 > - changegroup common_revset heads_revset
24 > Creates a changegroup part based, using common_revset and
24 > Creates a changegroup part based, using common_revset and
25 > heads_revset for changegroup.getchangegroup.
25 > heads_revset for outgoing
26 > - remote-changegroup url file
26 > - remote-changegroup url file
27 > Creates a remote-changegroup part for a bundle at the given
27 > Creates a remote-changegroup part for a bundle at the given
28 > url. Size and digest, as required by the client, are computed
28 > url. Size and digest, as required by the client, are computed
29 > from the given file.
29 > from the given file.
30 > - raw-remote-changegroup <python expression>
30 > - raw-remote-changegroup <python expression>
31 > Creates a remote-changegroup part with the data given in the
31 > Creates a remote-changegroup part with the data given in the
32 > python expression as parameters. The python expression is
32 > python expression as parameters. The python expression is
33 > evaluated with eval, and is expected to be a dict.
33 > evaluated with eval, and is expected to be a dict.
34 > """
34 > """
35 > def newpart(name, data=''):
35 > def newpart(name, data=''):
36 > """wrapper around bundler.newpart adding an extra part making the
36 > """wrapper around bundler.newpart adding an extra part making the
37 > client output information about each processed part"""
37 > client output information about each processed part"""
38 > bundler.newpart('output', data=name)
38 > bundler.newpart('output', data=name)
39 > part = bundler.newpart(name, data=data)
39 > part = bundler.newpart(name, data=data)
40 > return part
40 > return part
41 >
41 >
42 > for line in open(repo.join('bundle2maker'), 'r'):
42 > for line in open(repo.join('bundle2maker'), 'r'):
43 > line = line.strip()
43 > line = line.strip()
44 > try:
44 > try:
45 > verb, args = line.split(None, 1)
45 > verb, args = line.split(None, 1)
46 > except ValueError:
46 > except ValueError:
47 > verb, args = line, ''
47 > verb, args = line, ''
48 > if verb == 'remote-changegroup':
48 > if verb == 'remote-changegroup':
49 > url, file = args.split()
49 > url, file = args.split()
50 > bundledata = open(file, 'rb').read()
50 > bundledata = open(file, 'rb').read()
51 > digest = util.digester.preferred(b2caps['digests'])
51 > digest = util.digester.preferred(b2caps['digests'])
52 > d = util.digester([digest], bundledata)
52 > d = util.digester([digest], bundledata)
53 > part = newpart('remote-changegroup')
53 > part = newpart('remote-changegroup')
54 > part.addparam('url', url)
54 > part.addparam('url', url)
55 > part.addparam('size', str(len(bundledata)))
55 > part.addparam('size', str(len(bundledata)))
56 > part.addparam('digests', digest)
56 > part.addparam('digests', digest)
57 > part.addparam('digest:%s' % digest, d[digest])
57 > part.addparam('digest:%s' % digest, d[digest])
58 > elif verb == 'raw-remote-changegroup':
58 > elif verb == 'raw-remote-changegroup':
59 > part = newpart('remote-changegroup')
59 > part = newpart('remote-changegroup')
60 > for k, v in eval(args).items():
60 > for k, v in eval(args).items():
61 > part.addparam(k, str(v))
61 > part.addparam(k, str(v))
62 > elif verb == 'changegroup':
62 > elif verb == 'changegroup':
63 > _common, heads = args.split()
63 > _common, heads = args.split()
64 > common.extend(repo.lookup(r) for r in repo.revs(_common))
64 > common.extend(repo.lookup(r) for r in repo.revs(_common))
65 > heads = [repo.lookup(r) for r in repo.revs(heads)]
65 > heads = [repo.lookup(r) for r in repo.revs(heads)]
66 > cg = changegroup.getchangegroup(repo, 'changegroup',
66 > outgoing = discovery.outgoing(repo, common, heads)
67 > heads=heads, common=common)
67 > cg = changegroup.getchangegroup(repo, 'changegroup', outgoing)
68 > newpart('changegroup', cg.getchunks())
68 > newpart('changegroup', cg.getchunks())
69 > else:
69 > else:
70 > raise Exception('unknown verb')
70 > raise Exception('unknown verb')
71 >
71 >
72 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
72 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
73 > EOF
73 > EOF
74
74
75 Start a simple HTTP server to serve bundles
75 Start a simple HTTP server to serve bundles
76
76
77 $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
77 $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid
78 $ cat dumb.pid >> $DAEMON_PIDS
78 $ cat dumb.pid >> $DAEMON_PIDS
79
79
80 $ cat >> $HGRCPATH << EOF
80 $ cat >> $HGRCPATH << EOF
81 > [ui]
81 > [ui]
82 > ssh=python "$TESTDIR/dummyssh"
82 > ssh=python "$TESTDIR/dummyssh"
83 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
83 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
84 > EOF
84 > EOF
85
85
86 $ hg init repo
86 $ hg init repo
87
87
88 $ hg -R repo unbundle $TESTDIR/bundles/rebase.hg
88 $ hg -R repo unbundle $TESTDIR/bundles/rebase.hg
89 adding changesets
89 adding changesets
90 adding manifests
90 adding manifests
91 adding file changes
91 adding file changes
92 added 8 changesets with 7 changes to 7 files (+2 heads)
92 added 8 changesets with 7 changes to 7 files (+2 heads)
93 (run 'hg heads' to see heads, 'hg merge' to merge)
93 (run 'hg heads' to see heads, 'hg merge' to merge)
94
94
95 $ hg -R repo log -G
95 $ hg -R repo log -G
96 o 7:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
96 o 7:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
97 |
97 |
98 | o 6:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
98 | o 6:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
99 |/|
99 |/|
100 o | 5:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
100 o | 5:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
101 | |
101 | |
102 | o 4:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
102 | o 4:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
103 |/
103 |/
104 | o 3:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
104 | o 3:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
105 | |
105 | |
106 | o 2:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
106 | o 2:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
107 | |
107 | |
108 | o 1:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
108 | o 1:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
109 |/
109 |/
110 o 0:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
110 o 0:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
111
111
112 $ hg clone repo orig
112 $ hg clone repo orig
113 updating to branch default
113 updating to branch default
114 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
114 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
115
115
116 $ cat > repo/.hg/hgrc << EOF
116 $ cat > repo/.hg/hgrc << EOF
117 > [extensions]
117 > [extensions]
118 > bundle2=$TESTTMP/bundle2.py
118 > bundle2=$TESTTMP/bundle2.py
119 > EOF
119 > EOF
120
120
121 Test a pull with an remote-changegroup
121 Test a pull with an remote-changegroup
122
122
123 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg
123 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg
124 3 changesets found
124 3 changesets found
125 $ cat > repo/.hg/bundle2maker << EOF
125 $ cat > repo/.hg/bundle2maker << EOF
126 > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg
126 > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg
127 > EOF
127 > EOF
128 $ hg clone orig clone -r 3 -r 4
128 $ hg clone orig clone -r 3 -r 4
129 adding changesets
129 adding changesets
130 adding manifests
130 adding manifests
131 adding file changes
131 adding file changes
132 added 5 changesets with 5 changes to 5 files (+1 heads)
132 added 5 changesets with 5 changes to 5 files (+1 heads)
133 updating to branch default
133 updating to branch default
134 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
134 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
135 $ hg pull -R clone ssh://user@dummy/repo
135 $ hg pull -R clone ssh://user@dummy/repo
136 pulling from ssh://user@dummy/repo
136 pulling from ssh://user@dummy/repo
137 searching for changes
137 searching for changes
138 remote: remote-changegroup
138 remote: remote-changegroup
139 adding changesets
139 adding changesets
140 adding manifests
140 adding manifests
141 adding file changes
141 adding file changes
142 added 3 changesets with 2 changes to 2 files (+1 heads)
142 added 3 changesets with 2 changes to 2 files (+1 heads)
143 (run 'hg heads .' to see heads, 'hg merge' to merge)
143 (run 'hg heads .' to see heads, 'hg merge' to merge)
144 $ hg -R clone log -G
144 $ hg -R clone log -G
145 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
145 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
146 |
146 |
147 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
147 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
148 |/|
148 |/|
149 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
149 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
150 | |
150 | |
151 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
151 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
152 |/
152 |/
153 | @ 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
153 | @ 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
154 | |
154 | |
155 | o 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
155 | o 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
156 | |
156 | |
157 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
157 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
158 |/
158 |/
159 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
159 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
160
160
161 $ rm -rf clone
161 $ rm -rf clone
162
162
163 Test a pull with an remote-changegroup and a following changegroup
163 Test a pull with an remote-changegroup and a following changegroup
164
164
165 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg
165 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg
166 2 changesets found
166 2 changesets found
167 $ cat > repo/.hg/bundle2maker << EOF
167 $ cat > repo/.hg/bundle2maker << EOF
168 > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg
168 > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg
169 > changegroup 0:4 5:7
169 > changegroup 0:4 5:7
170 > EOF
170 > EOF
171 $ hg clone orig clone -r 2
171 $ hg clone orig clone -r 2
172 adding changesets
172 adding changesets
173 adding manifests
173 adding manifests
174 adding file changes
174 adding file changes
175 added 3 changesets with 3 changes to 3 files
175 added 3 changesets with 3 changes to 3 files
176 updating to branch default
176 updating to branch default
177 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
177 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
178 $ hg pull -R clone ssh://user@dummy/repo
178 $ hg pull -R clone ssh://user@dummy/repo
179 pulling from ssh://user@dummy/repo
179 pulling from ssh://user@dummy/repo
180 searching for changes
180 searching for changes
181 remote: remote-changegroup
181 remote: remote-changegroup
182 adding changesets
182 adding changesets
183 adding manifests
183 adding manifests
184 adding file changes
184 adding file changes
185 added 2 changesets with 2 changes to 2 files (+1 heads)
185 added 2 changesets with 2 changes to 2 files (+1 heads)
186 remote: changegroup
186 remote: changegroup
187 adding changesets
187 adding changesets
188 adding manifests
188 adding manifests
189 adding file changes
189 adding file changes
190 added 3 changesets with 2 changes to 2 files (+1 heads)
190 added 3 changesets with 2 changes to 2 files (+1 heads)
191 (run 'hg heads' to see heads, 'hg merge' to merge)
191 (run 'hg heads' to see heads, 'hg merge' to merge)
192 $ hg -R clone log -G
192 $ hg -R clone log -G
193 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
193 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
194 |
194 |
195 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
195 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
196 |/|
196 |/|
197 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
197 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
198 | |
198 | |
199 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
199 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
200 |/
200 |/
201 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
201 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
202 | |
202 | |
203 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
203 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
204 | |
204 | |
205 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
205 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
206 |/
206 |/
207 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
207 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
208
208
209 $ rm -rf clone
209 $ rm -rf clone
210
210
211 Test a pull with a changegroup followed by an remote-changegroup
211 Test a pull with a changegroup followed by an remote-changegroup
212
212
213 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg
213 $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg
214 3 changesets found
214 3 changesets found
215 $ cat > repo/.hg/bundle2maker << EOF
215 $ cat > repo/.hg/bundle2maker << EOF
216 > changegroup 000000000000 :4
216 > changegroup 000000000000 :4
217 > remote-changegroup http://localhost:$HGPORT/bundle3.hg bundle3.hg
217 > remote-changegroup http://localhost:$HGPORT/bundle3.hg bundle3.hg
218 > EOF
218 > EOF
219 $ hg clone orig clone -r 2
219 $ hg clone orig clone -r 2
220 adding changesets
220 adding changesets
221 adding manifests
221 adding manifests
222 adding file changes
222 adding file changes
223 added 3 changesets with 3 changes to 3 files
223 added 3 changesets with 3 changes to 3 files
224 updating to branch default
224 updating to branch default
225 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
225 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
226 $ hg pull -R clone ssh://user@dummy/repo
226 $ hg pull -R clone ssh://user@dummy/repo
227 pulling from ssh://user@dummy/repo
227 pulling from ssh://user@dummy/repo
228 searching for changes
228 searching for changes
229 remote: changegroup
229 remote: changegroup
230 adding changesets
230 adding changesets
231 adding manifests
231 adding manifests
232 adding file changes
232 adding file changes
233 added 2 changesets with 2 changes to 2 files (+1 heads)
233 added 2 changesets with 2 changes to 2 files (+1 heads)
234 remote: remote-changegroup
234 remote: remote-changegroup
235 adding changesets
235 adding changesets
236 adding manifests
236 adding manifests
237 adding file changes
237 adding file changes
238 added 3 changesets with 2 changes to 2 files (+1 heads)
238 added 3 changesets with 2 changes to 2 files (+1 heads)
239 (run 'hg heads' to see heads, 'hg merge' to merge)
239 (run 'hg heads' to see heads, 'hg merge' to merge)
240 $ hg -R clone log -G
240 $ hg -R clone log -G
241 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
241 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
242 |
242 |
243 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
243 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
244 |/|
244 |/|
245 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
245 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
246 | |
246 | |
247 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
247 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
248 |/
248 |/
249 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
249 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
250 | |
250 | |
251 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
251 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
252 | |
252 | |
253 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
253 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
254 |/
254 |/
255 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
255 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
256
256
257 $ rm -rf clone
257 $ rm -rf clone
258
258
259 Test a pull with two remote-changegroups and a changegroup
259 Test a pull with two remote-changegroups and a changegroup
260
260
261 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg
261 $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg
262 2 changesets found
262 2 changesets found
263 $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg
263 $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg
264 2 changesets found
264 2 changesets found
265 $ cat > repo/.hg/bundle2maker << EOF
265 $ cat > repo/.hg/bundle2maker << EOF
266 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
266 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
267 > remote-changegroup http://localhost:$HGPORT/bundle5.hg bundle5.hg
267 > remote-changegroup http://localhost:$HGPORT/bundle5.hg bundle5.hg
268 > changegroup 0:6 7
268 > changegroup 0:6 7
269 > EOF
269 > EOF
270 $ hg clone orig clone -r 2
270 $ hg clone orig clone -r 2
271 adding changesets
271 adding changesets
272 adding manifests
272 adding manifests
273 adding file changes
273 adding file changes
274 added 3 changesets with 3 changes to 3 files
274 added 3 changesets with 3 changes to 3 files
275 updating to branch default
275 updating to branch default
276 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
276 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
277 $ hg pull -R clone ssh://user@dummy/repo
277 $ hg pull -R clone ssh://user@dummy/repo
278 pulling from ssh://user@dummy/repo
278 pulling from ssh://user@dummy/repo
279 searching for changes
279 searching for changes
280 remote: remote-changegroup
280 remote: remote-changegroup
281 adding changesets
281 adding changesets
282 adding manifests
282 adding manifests
283 adding file changes
283 adding file changes
284 added 2 changesets with 2 changes to 2 files (+1 heads)
284 added 2 changesets with 2 changes to 2 files (+1 heads)
285 remote: remote-changegroup
285 remote: remote-changegroup
286 adding changesets
286 adding changesets
287 adding manifests
287 adding manifests
288 adding file changes
288 adding file changes
289 added 2 changesets with 1 changes to 1 files
289 added 2 changesets with 1 changes to 1 files
290 remote: changegroup
290 remote: changegroup
291 adding changesets
291 adding changesets
292 adding manifests
292 adding manifests
293 adding file changes
293 adding file changes
294 added 1 changesets with 1 changes to 1 files (+1 heads)
294 added 1 changesets with 1 changes to 1 files (+1 heads)
295 (run 'hg heads' to see heads, 'hg merge' to merge)
295 (run 'hg heads' to see heads, 'hg merge' to merge)
296 $ hg -R clone log -G
296 $ hg -R clone log -G
297 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
297 o 7:02de42196ebe public Nicolas Dumazet <nicdumz.commits@gmail.com> H
298 |
298 |
299 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
299 | o 6:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> G
300 |/|
300 |/|
301 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
301 o | 5:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
302 | |
302 | |
303 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
303 | o 4:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
304 |/
304 |/
305 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
305 | o 3:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> D
306 | |
306 | |
307 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
307 | @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
308 | |
308 | |
309 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
309 | o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
310 |/
310 |/
311 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
311 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
312
312
313 $ rm -rf clone
313 $ rm -rf clone
314
314
315 Hash digest tests
315 Hash digest tests
316
316
317 $ hg bundle -R repo --type v1 -a bundle6.hg
317 $ hg bundle -R repo --type v1 -a bundle6.hg
318 8 changesets found
318 8 changesets found
319
319
320 $ cat > repo/.hg/bundle2maker << EOF
320 $ cat > repo/.hg/bundle2maker << EOF
321 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
321 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
322 > EOF
322 > EOF
323 $ hg clone ssh://user@dummy/repo clone
323 $ hg clone ssh://user@dummy/repo clone
324 requesting all changes
324 requesting all changes
325 remote: remote-changegroup
325 remote: remote-changegroup
326 adding changesets
326 adding changesets
327 adding manifests
327 adding manifests
328 adding file changes
328 adding file changes
329 added 8 changesets with 7 changes to 7 files (+2 heads)
329 added 8 changesets with 7 changes to 7 files (+2 heads)
330 updating to branch default
330 updating to branch default
331 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
331 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
332 $ rm -rf clone
332 $ rm -rf clone
333
333
334 $ cat > repo/.hg/bundle2maker << EOF
334 $ cat > repo/.hg/bundle2maker << EOF
335 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394'}
335 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394'}
336 > EOF
336 > EOF
337 $ hg clone ssh://user@dummy/repo clone
337 $ hg clone ssh://user@dummy/repo clone
338 requesting all changes
338 requesting all changes
339 remote: remote-changegroup
339 remote: remote-changegroup
340 adding changesets
340 adding changesets
341 adding manifests
341 adding manifests
342 adding file changes
342 adding file changes
343 added 8 changesets with 7 changes to 7 files (+2 heads)
343 added 8 changesets with 7 changes to 7 files (+2 heads)
344 updating to branch default
344 updating to branch default
345 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
345 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
346 $ rm -rf clone
346 $ rm -rf clone
347
347
348 Hash digest mismatch throws an error
348 Hash digest mismatch throws an error
349
349
350 $ cat > repo/.hg/bundle2maker << EOF
350 $ cat > repo/.hg/bundle2maker << EOF
351 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '0' * 40}
351 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '0' * 40}
352 > EOF
352 > EOF
353 $ hg clone ssh://user@dummy/repo clone
353 $ hg clone ssh://user@dummy/repo clone
354 requesting all changes
354 requesting all changes
355 remote: remote-changegroup
355 remote: remote-changegroup
356 adding changesets
356 adding changesets
357 adding manifests
357 adding manifests
358 adding file changes
358 adding file changes
359 added 8 changesets with 7 changes to 7 files (+2 heads)
359 added 8 changesets with 7 changes to 7 files (+2 heads)
360 transaction abort!
360 transaction abort!
361 rollback completed
361 rollback completed
362 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
362 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
363 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
363 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
364 [255]
364 [255]
365
365
366 Multiple hash digests can be given
366 Multiple hash digests can be given
367
367
368 $ cat > repo/.hg/bundle2maker << EOF
368 $ cat > repo/.hg/bundle2maker << EOF
369 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
369 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
370 > EOF
370 > EOF
371 $ hg clone ssh://user@dummy/repo clone
371 $ hg clone ssh://user@dummy/repo clone
372 requesting all changes
372 requesting all changes
373 remote: remote-changegroup
373 remote: remote-changegroup
374 adding changesets
374 adding changesets
375 adding manifests
375 adding manifests
376 adding file changes
376 adding file changes
377 added 8 changesets with 7 changes to 7 files (+2 heads)
377 added 8 changesets with 7 changes to 7 files (+2 heads)
378 updating to branch default
378 updating to branch default
379 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
379 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 $ rm -rf clone
380 $ rm -rf clone
381
381
382 If either of the multiple hash digests mismatches, an error is thrown
382 If either of the multiple hash digests mismatches, an error is thrown
383
383
384 $ cat > repo/.hg/bundle2maker << EOF
384 $ cat > repo/.hg/bundle2maker << EOF
385 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': '0' * 32, 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
385 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': '0' * 32, 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'}
386 > EOF
386 > EOF
387 $ hg clone ssh://user@dummy/repo clone
387 $ hg clone ssh://user@dummy/repo clone
388 requesting all changes
388 requesting all changes
389 remote: remote-changegroup
389 remote: remote-changegroup
390 adding changesets
390 adding changesets
391 adding manifests
391 adding manifests
392 adding file changes
392 adding file changes
393 added 8 changesets with 7 changes to 7 files (+2 heads)
393 added 8 changesets with 7 changes to 7 files (+2 heads)
394 transaction abort!
394 transaction abort!
395 rollback completed
395 rollback completed
396 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
396 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
397 md5 mismatch: expected 00000000000000000000000000000000, got e22172c2907ef88794b7bea6642c2394
397 md5 mismatch: expected 00000000000000000000000000000000, got e22172c2907ef88794b7bea6642c2394
398 [255]
398 [255]
399
399
400 $ cat > repo/.hg/bundle2maker << EOF
400 $ cat > repo/.hg/bundle2maker << EOF
401 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '0' * 40}
401 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '0' * 40}
402 > EOF
402 > EOF
403 $ hg clone ssh://user@dummy/repo clone
403 $ hg clone ssh://user@dummy/repo clone
404 requesting all changes
404 requesting all changes
405 remote: remote-changegroup
405 remote: remote-changegroup
406 adding changesets
406 adding changesets
407 adding manifests
407 adding manifests
408 adding file changes
408 adding file changes
409 added 8 changesets with 7 changes to 7 files (+2 heads)
409 added 8 changesets with 7 changes to 7 files (+2 heads)
410 transaction abort!
410 transaction abort!
411 rollback completed
411 rollback completed
412 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
412 abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted:
413 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
413 sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6
414 [255]
414 [255]
415
415
416 Corruption tests
416 Corruption tests
417
417
418 $ hg clone orig clone -r 2
418 $ hg clone orig clone -r 2
419 adding changesets
419 adding changesets
420 adding manifests
420 adding manifests
421 adding file changes
421 adding file changes
422 added 3 changesets with 3 changes to 3 files
422 added 3 changesets with 3 changes to 3 files
423 updating to branch default
423 updating to branch default
424 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
424 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
425
425
426 $ cat > repo/.hg/bundle2maker << EOF
426 $ cat > repo/.hg/bundle2maker << EOF
427 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
427 > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg
428 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle5.hg', 'size': 578, 'digests': 'sha1', 'digest:sha1': '0' * 40}
428 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle5.hg', 'size': 578, 'digests': 'sha1', 'digest:sha1': '0' * 40}
429 > changegroup 0:6 7
429 > changegroup 0:6 7
430 > EOF
430 > EOF
431 $ hg pull -R clone ssh://user@dummy/repo
431 $ hg pull -R clone ssh://user@dummy/repo
432 pulling from ssh://user@dummy/repo
432 pulling from ssh://user@dummy/repo
433 searching for changes
433 searching for changes
434 remote: remote-changegroup
434 remote: remote-changegroup
435 adding changesets
435 adding changesets
436 adding manifests
436 adding manifests
437 adding file changes
437 adding file changes
438 added 2 changesets with 2 changes to 2 files (+1 heads)
438 added 2 changesets with 2 changes to 2 files (+1 heads)
439 remote: remote-changegroup
439 remote: remote-changegroup
440 adding changesets
440 adding changesets
441 adding manifests
441 adding manifests
442 adding file changes
442 adding file changes
443 added 2 changesets with 1 changes to 1 files
443 added 2 changesets with 1 changes to 1 files
444 transaction abort!
444 transaction abort!
445 rollback completed
445 rollback completed
446 abort: bundle at http://localhost:$HGPORT/bundle5.hg is corrupted:
446 abort: bundle at http://localhost:$HGPORT/bundle5.hg is corrupted:
447 sha1 mismatch: expected 0000000000000000000000000000000000000000, got f29485d6bfd37db99983cfc95ecb52f8ca396106
447 sha1 mismatch: expected 0000000000000000000000000000000000000000, got f29485d6bfd37db99983cfc95ecb52f8ca396106
448 [255]
448 [255]
449
449
450 The entire transaction has been rolled back in the pull above
450 The entire transaction has been rolled back in the pull above
451
451
452 $ hg -R clone log -G
452 $ hg -R clone log -G
453 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
453 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
454 |
454 |
455 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
455 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
456 |
456 |
457 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
457 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
458
458
459
459
460 No params
460 No params
461
461
462 $ cat > repo/.hg/bundle2maker << EOF
462 $ cat > repo/.hg/bundle2maker << EOF
463 > raw-remote-changegroup {}
463 > raw-remote-changegroup {}
464 > EOF
464 > EOF
465 $ hg pull -R clone ssh://user@dummy/repo
465 $ hg pull -R clone ssh://user@dummy/repo
466 pulling from ssh://user@dummy/repo
466 pulling from ssh://user@dummy/repo
467 searching for changes
467 searching for changes
468 remote: remote-changegroup
468 remote: remote-changegroup
469 abort: remote-changegroup: missing "url" param
469 abort: remote-changegroup: missing "url" param
470 [255]
470 [255]
471
471
472 Missing size
472 Missing size
473
473
474 $ cat > repo/.hg/bundle2maker << EOF
474 $ cat > repo/.hg/bundle2maker << EOF
475 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg'}
475 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg'}
476 > EOF
476 > EOF
477 $ hg pull -R clone ssh://user@dummy/repo
477 $ hg pull -R clone ssh://user@dummy/repo
478 pulling from ssh://user@dummy/repo
478 pulling from ssh://user@dummy/repo
479 searching for changes
479 searching for changes
480 remote: remote-changegroup
480 remote: remote-changegroup
481 abort: remote-changegroup: missing "size" param
481 abort: remote-changegroup: missing "size" param
482 [255]
482 [255]
483
483
484 Invalid size
484 Invalid size
485
485
486 $ cat > repo/.hg/bundle2maker << EOF
486 $ cat > repo/.hg/bundle2maker << EOF
487 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 'foo'}
487 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 'foo'}
488 > EOF
488 > EOF
489 $ hg pull -R clone ssh://user@dummy/repo
489 $ hg pull -R clone ssh://user@dummy/repo
490 pulling from ssh://user@dummy/repo
490 pulling from ssh://user@dummy/repo
491 searching for changes
491 searching for changes
492 remote: remote-changegroup
492 remote: remote-changegroup
493 abort: remote-changegroup: invalid value for param "size"
493 abort: remote-changegroup: invalid value for param "size"
494 [255]
494 [255]
495
495
496 Size mismatch
496 Size mismatch
497
497
498 $ cat > repo/.hg/bundle2maker << EOF
498 $ cat > repo/.hg/bundle2maker << EOF
499 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 42}
499 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 42}
500 > EOF
500 > EOF
501 $ hg pull -R clone ssh://user@dummy/repo
501 $ hg pull -R clone ssh://user@dummy/repo
502 pulling from ssh://user@dummy/repo
502 pulling from ssh://user@dummy/repo
503 searching for changes
503 searching for changes
504 remote: remote-changegroup
504 remote: remote-changegroup
505 adding changesets
505 adding changesets
506 adding manifests
506 adding manifests
507 adding file changes
507 adding file changes
508 added 2 changesets with 2 changes to 2 files (+1 heads)
508 added 2 changesets with 2 changes to 2 files (+1 heads)
509 transaction abort!
509 transaction abort!
510 rollback completed
510 rollback completed
511 abort: bundle at http://localhost:$HGPORT/bundle4.hg is corrupted:
511 abort: bundle at http://localhost:$HGPORT/bundle4.hg is corrupted:
512 size mismatch: expected 42, got 581
512 size mismatch: expected 42, got 581
513 [255]
513 [255]
514
514
515 Unknown digest
515 Unknown digest
516
516
517 $ cat > repo/.hg/bundle2maker << EOF
517 $ cat > repo/.hg/bundle2maker << EOF
518 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'foo', 'digest:foo': 'bar'}
518 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'foo', 'digest:foo': 'bar'}
519 > EOF
519 > EOF
520 $ hg pull -R clone ssh://user@dummy/repo
520 $ hg pull -R clone ssh://user@dummy/repo
521 pulling from ssh://user@dummy/repo
521 pulling from ssh://user@dummy/repo
522 searching for changes
522 searching for changes
523 remote: remote-changegroup
523 remote: remote-changegroup
524 abort: missing support for remote-changegroup - digest:foo
524 abort: missing support for remote-changegroup - digest:foo
525 [255]
525 [255]
526
526
527 Missing digest
527 Missing digest
528
528
529 $ cat > repo/.hg/bundle2maker << EOF
529 $ cat > repo/.hg/bundle2maker << EOF
530 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'sha1'}
530 > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'sha1'}
531 > EOF
531 > EOF
532 $ hg pull -R clone ssh://user@dummy/repo
532 $ hg pull -R clone ssh://user@dummy/repo
533 pulling from ssh://user@dummy/repo
533 pulling from ssh://user@dummy/repo
534 searching for changes
534 searching for changes
535 remote: remote-changegroup
535 remote: remote-changegroup
536 abort: remote-changegroup: missing "digest:sha1" param
536 abort: remote-changegroup: missing "digest:sha1" param
537 [255]
537 [255]
538
538
539 Not an HTTP url
539 Not an HTTP url
540
540
541 $ cat > repo/.hg/bundle2maker << EOF
541 $ cat > repo/.hg/bundle2maker << EOF
542 > raw-remote-changegroup {'url': 'ssh://localhost:$HGPORT/bundle4.hg', 'size': 581}
542 > raw-remote-changegroup {'url': 'ssh://localhost:$HGPORT/bundle4.hg', 'size': 581}
543 > EOF
543 > EOF
544 $ hg pull -R clone ssh://user@dummy/repo
544 $ hg pull -R clone ssh://user@dummy/repo
545 pulling from ssh://user@dummy/repo
545 pulling from ssh://user@dummy/repo
546 searching for changes
546 searching for changes
547 remote: remote-changegroup
547 remote: remote-changegroup
548 abort: remote-changegroup does not support ssh urls
548 abort: remote-changegroup does not support ssh urls
549 [255]
549 [255]
550
550
551 Not a bundle
551 Not a bundle
552
552
553 $ cat > notbundle.hg << EOF
553 $ cat > notbundle.hg << EOF
554 > foo
554 > foo
555 > EOF
555 > EOF
556 $ cat > repo/.hg/bundle2maker << EOF
556 $ cat > repo/.hg/bundle2maker << EOF
557 > remote-changegroup http://localhost:$HGPORT/notbundle.hg notbundle.hg
557 > remote-changegroup http://localhost:$HGPORT/notbundle.hg notbundle.hg
558 > EOF
558 > EOF
559 $ hg pull -R clone ssh://user@dummy/repo
559 $ hg pull -R clone ssh://user@dummy/repo
560 pulling from ssh://user@dummy/repo
560 pulling from ssh://user@dummy/repo
561 searching for changes
561 searching for changes
562 remote: remote-changegroup
562 remote: remote-changegroup
563 abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
563 abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle
564 [255]
564 [255]
565
565
566 Not a bundle 1.0
566 Not a bundle 1.0
567
567
568 $ cat > notbundle10.hg << EOF
568 $ cat > notbundle10.hg << EOF
569 > HG20
569 > HG20
570 > EOF
570 > EOF
571 $ cat > repo/.hg/bundle2maker << EOF
571 $ cat > repo/.hg/bundle2maker << EOF
572 > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg
572 > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg
573 > EOF
573 > EOF
574 $ hg pull -R clone ssh://user@dummy/repo
574 $ hg pull -R clone ssh://user@dummy/repo
575 pulling from ssh://user@dummy/repo
575 pulling from ssh://user@dummy/repo
576 searching for changes
576 searching for changes
577 remote: remote-changegroup
577 remote: remote-changegroup
578 abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
578 abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0
579 [255]
579 [255]
580
580
581 $ hg -R clone log -G
581 $ hg -R clone log -G
582 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
582 @ 2:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> C
583 |
583 |
584 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
584 o 1:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> B
585 |
585 |
586 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
586 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
587
587
588 $ rm -rf clone
588 $ rm -rf clone
589
589
590 $ killdaemons.py
590 $ killdaemons.py
General Comments 0
You need to be logged in to leave comments. Login now