##// END OF EJS Templates
localrepo: allow narrowmatch() to accept matcher to intersect with...
Martin von Zweigbergk -
r40437:4fd0fac4 default
parent child Browse files
Show More
@@ -1,1362 +1,1361 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import weakref
12 import weakref
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 mdiff,
25 mdiff,
26 phases,
26 phases,
27 pycompat,
27 pycompat,
28 repository,
28 repository,
29 util,
29 util,
30 )
30 )
31
31
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
35
35
36 LFS_REQUIREMENT = 'lfs'
36 LFS_REQUIREMENT = 'lfs'
37
37
38 readexactly = util.readexactly
38 readexactly = util.readexactly
39
39
40 def getchunk(stream):
40 def getchunk(stream):
41 """return the next chunk from stream as a string"""
41 """return the next chunk from stream as a string"""
42 d = readexactly(stream, 4)
42 d = readexactly(stream, 4)
43 l = struct.unpack(">l", d)[0]
43 l = struct.unpack(">l", d)[0]
44 if l <= 4:
44 if l <= 4:
45 if l:
45 if l:
46 raise error.Abort(_("invalid chunk length %d") % l)
46 raise error.Abort(_("invalid chunk length %d") % l)
47 return ""
47 return ""
48 return readexactly(stream, l - 4)
48 return readexactly(stream, l - 4)
49
49
50 def chunkheader(length):
50 def chunkheader(length):
51 """return a changegroup chunk header (string)"""
51 """return a changegroup chunk header (string)"""
52 return struct.pack(">l", length + 4)
52 return struct.pack(">l", length + 4)
53
53
54 def closechunk():
54 def closechunk():
55 """return a changegroup chunk header (string) for a zero-length chunk"""
55 """return a changegroup chunk header (string) for a zero-length chunk"""
56 return struct.pack(">l", 0)
56 return struct.pack(">l", 0)
57
57
58 def _fileheader(path):
58 def _fileheader(path):
59 """Obtain a changegroup chunk header for a named path."""
59 """Obtain a changegroup chunk header for a named path."""
60 return chunkheader(len(path)) + path
60 return chunkheader(len(path)) + path
61
61
62 def writechunks(ui, chunks, filename, vfs=None):
62 def writechunks(ui, chunks, filename, vfs=None):
63 """Write chunks to a file and return its filename.
63 """Write chunks to a file and return its filename.
64
64
65 The stream is assumed to be a bundle file.
65 The stream is assumed to be a bundle file.
66 Existing files will not be overwritten.
66 Existing files will not be overwritten.
67 If no filename is specified, a temporary file is created.
67 If no filename is specified, a temporary file is created.
68 """
68 """
69 fh = None
69 fh = None
70 cleanup = None
70 cleanup = None
71 try:
71 try:
72 if filename:
72 if filename:
73 if vfs:
73 if vfs:
74 fh = vfs.open(filename, "wb")
74 fh = vfs.open(filename, "wb")
75 else:
75 else:
76 # Increase default buffer size because default is usually
76 # Increase default buffer size because default is usually
77 # small (4k is common on Linux).
77 # small (4k is common on Linux).
78 fh = open(filename, "wb", 131072)
78 fh = open(filename, "wb", 131072)
79 else:
79 else:
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
81 fh = os.fdopen(fd, r"wb")
81 fh = os.fdopen(fd, r"wb")
82 cleanup = filename
82 cleanup = filename
83 for c in chunks:
83 for c in chunks:
84 fh.write(c)
84 fh.write(c)
85 cleanup = None
85 cleanup = None
86 return filename
86 return filename
87 finally:
87 finally:
88 if fh is not None:
88 if fh is not None:
89 fh.close()
89 fh.close()
90 if cleanup is not None:
90 if cleanup is not None:
91 if filename and vfs:
91 if filename and vfs:
92 vfs.unlink(cleanup)
92 vfs.unlink(cleanup)
93 else:
93 else:
94 os.unlink(cleanup)
94 os.unlink(cleanup)
95
95
96 class cg1unpacker(object):
96 class cg1unpacker(object):
97 """Unpacker for cg1 changegroup streams.
97 """Unpacker for cg1 changegroup streams.
98
98
99 A changegroup unpacker handles the framing of the revision data in
99 A changegroup unpacker handles the framing of the revision data in
100 the wire format. Most consumers will want to use the apply()
100 the wire format. Most consumers will want to use the apply()
101 method to add the changes from the changegroup to a repository.
101 method to add the changes from the changegroup to a repository.
102
102
103 If you're forwarding a changegroup unmodified to another consumer,
103 If you're forwarding a changegroup unmodified to another consumer,
104 use getchunks(), which returns an iterator of changegroup
104 use getchunks(), which returns an iterator of changegroup
105 chunks. This is mostly useful for cases where you need to know the
105 chunks. This is mostly useful for cases where you need to know the
106 data stream has ended by observing the end of the changegroup.
106 data stream has ended by observing the end of the changegroup.
107
107
108 deltachunk() is useful only if you're applying delta data. Most
108 deltachunk() is useful only if you're applying delta data. Most
109 consumers should prefer apply() instead.
109 consumers should prefer apply() instead.
110
110
111 A few other public methods exist. Those are used only for
111 A few other public methods exist. Those are used only for
112 bundlerepo and some debug commands - their use is discouraged.
112 bundlerepo and some debug commands - their use is discouraged.
113 """
113 """
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
115 deltaheadersize = deltaheader.size
115 deltaheadersize = deltaheader.size
116 version = '01'
116 version = '01'
117 _grouplistcount = 1 # One list of files after the manifests
117 _grouplistcount = 1 # One list of files after the manifests
118
118
119 def __init__(self, fh, alg, extras=None):
119 def __init__(self, fh, alg, extras=None):
120 if alg is None:
120 if alg is None:
121 alg = 'UN'
121 alg = 'UN'
122 if alg not in util.compengines.supportedbundletypes:
122 if alg not in util.compengines.supportedbundletypes:
123 raise error.Abort(_('unknown stream compression type: %s')
123 raise error.Abort(_('unknown stream compression type: %s')
124 % alg)
124 % alg)
125 if alg == 'BZ':
125 if alg == 'BZ':
126 alg = '_truncatedBZ'
126 alg = '_truncatedBZ'
127
127
128 compengine = util.compengines.forbundletype(alg)
128 compengine = util.compengines.forbundletype(alg)
129 self._stream = compengine.decompressorreader(fh)
129 self._stream = compengine.decompressorreader(fh)
130 self._type = alg
130 self._type = alg
131 self.extras = extras or {}
131 self.extras = extras or {}
132 self.callback = None
132 self.callback = None
133
133
134 # These methods (compressed, read, seek, tell) all appear to only
134 # These methods (compressed, read, seek, tell) all appear to only
135 # be used by bundlerepo, but it's a little hard to tell.
135 # be used by bundlerepo, but it's a little hard to tell.
136 def compressed(self):
136 def compressed(self):
137 return self._type is not None and self._type != 'UN'
137 return self._type is not None and self._type != 'UN'
138 def read(self, l):
138 def read(self, l):
139 return self._stream.read(l)
139 return self._stream.read(l)
140 def seek(self, pos):
140 def seek(self, pos):
141 return self._stream.seek(pos)
141 return self._stream.seek(pos)
142 def tell(self):
142 def tell(self):
143 return self._stream.tell()
143 return self._stream.tell()
144 def close(self):
144 def close(self):
145 return self._stream.close()
145 return self._stream.close()
146
146
147 def _chunklength(self):
147 def _chunklength(self):
148 d = readexactly(self._stream, 4)
148 d = readexactly(self._stream, 4)
149 l = struct.unpack(">l", d)[0]
149 l = struct.unpack(">l", d)[0]
150 if l <= 4:
150 if l <= 4:
151 if l:
151 if l:
152 raise error.Abort(_("invalid chunk length %d") % l)
152 raise error.Abort(_("invalid chunk length %d") % l)
153 return 0
153 return 0
154 if self.callback:
154 if self.callback:
155 self.callback()
155 self.callback()
156 return l - 4
156 return l - 4
157
157
158 def changelogheader(self):
158 def changelogheader(self):
159 """v10 does not have a changelog header chunk"""
159 """v10 does not have a changelog header chunk"""
160 return {}
160 return {}
161
161
162 def manifestheader(self):
162 def manifestheader(self):
163 """v10 does not have a manifest header chunk"""
163 """v10 does not have a manifest header chunk"""
164 return {}
164 return {}
165
165
166 def filelogheader(self):
166 def filelogheader(self):
167 """return the header of the filelogs chunk, v10 only has the filename"""
167 """return the header of the filelogs chunk, v10 only has the filename"""
168 l = self._chunklength()
168 l = self._chunklength()
169 if not l:
169 if not l:
170 return {}
170 return {}
171 fname = readexactly(self._stream, l)
171 fname = readexactly(self._stream, l)
172 return {'filename': fname}
172 return {'filename': fname}
173
173
174 def _deltaheader(self, headertuple, prevnode):
174 def _deltaheader(self, headertuple, prevnode):
175 node, p1, p2, cs = headertuple
175 node, p1, p2, cs = headertuple
176 if prevnode is None:
176 if prevnode is None:
177 deltabase = p1
177 deltabase = p1
178 else:
178 else:
179 deltabase = prevnode
179 deltabase = prevnode
180 flags = 0
180 flags = 0
181 return node, p1, p2, deltabase, cs, flags
181 return node, p1, p2, deltabase, cs, flags
182
182
183 def deltachunk(self, prevnode):
183 def deltachunk(self, prevnode):
184 l = self._chunklength()
184 l = self._chunklength()
185 if not l:
185 if not l:
186 return {}
186 return {}
187 headerdata = readexactly(self._stream, self.deltaheadersize)
187 headerdata = readexactly(self._stream, self.deltaheadersize)
188 header = self.deltaheader.unpack(headerdata)
188 header = self.deltaheader.unpack(headerdata)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
191 return (node, p1, p2, cs, deltabase, delta, flags)
191 return (node, p1, p2, cs, deltabase, delta, flags)
192
192
193 def getchunks(self):
193 def getchunks(self):
194 """returns all the chunks contains in the bundle
194 """returns all the chunks contains in the bundle
195
195
196 Used when you need to forward the binary stream to a file or another
196 Used when you need to forward the binary stream to a file or another
197 network API. To do so, it parse the changegroup data, otherwise it will
197 network API. To do so, it parse the changegroup data, otherwise it will
198 block in case of sshrepo because it don't know the end of the stream.
198 block in case of sshrepo because it don't know the end of the stream.
199 """
199 """
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
203 # filelogs.
203 # filelogs.
204 #
204 #
205 # Changelog and manifestlog parts are terminated with empty chunks. The
205 # Changelog and manifestlog parts are terminated with empty chunks. The
206 # tree and file parts are a list of entry sections. Each entry section
206 # tree and file parts are a list of entry sections. Each entry section
207 # is a series of chunks terminating in an empty chunk. The list of these
207 # is a series of chunks terminating in an empty chunk. The list of these
208 # entry sections is terminated in yet another empty chunk, so we know
208 # entry sections is terminated in yet another empty chunk, so we know
209 # we've reached the end of the tree/file list when we reach an empty
209 # we've reached the end of the tree/file list when we reach an empty
210 # chunk that was proceeded by no non-empty chunks.
210 # chunk that was proceeded by no non-empty chunks.
211
211
212 parts = 0
212 parts = 0
213 while parts < 2 + self._grouplistcount:
213 while parts < 2 + self._grouplistcount:
214 noentries = True
214 noentries = True
215 while True:
215 while True:
216 chunk = getchunk(self)
216 chunk = getchunk(self)
217 if not chunk:
217 if not chunk:
218 # The first two empty chunks represent the end of the
218 # The first two empty chunks represent the end of the
219 # changelog and the manifestlog portions. The remaining
219 # changelog and the manifestlog portions. The remaining
220 # empty chunks represent either A) the end of individual
220 # empty chunks represent either A) the end of individual
221 # tree or file entries in the file list, or B) the end of
221 # tree or file entries in the file list, or B) the end of
222 # the entire list. It's the end of the entire list if there
222 # the entire list. It's the end of the entire list if there
223 # were no entries (i.e. noentries is True).
223 # were no entries (i.e. noentries is True).
224 if parts < 2:
224 if parts < 2:
225 parts += 1
225 parts += 1
226 elif noentries:
226 elif noentries:
227 parts += 1
227 parts += 1
228 break
228 break
229 noentries = False
229 noentries = False
230 yield chunkheader(len(chunk))
230 yield chunkheader(len(chunk))
231 pos = 0
231 pos = 0
232 while pos < len(chunk):
232 while pos < len(chunk):
233 next = pos + 2**20
233 next = pos + 2**20
234 yield chunk[pos:next]
234 yield chunk[pos:next]
235 pos = next
235 pos = next
236 yield closechunk()
236 yield closechunk()
237
237
238 def _unpackmanifests(self, repo, revmap, trp, prog):
238 def _unpackmanifests(self, repo, revmap, trp, prog):
239 self.callback = prog.increment
239 self.callback = prog.increment
240 # no need to check for empty manifest group here:
240 # no need to check for empty manifest group here:
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
242 # no new manifest will be created and the manifest group will
242 # no new manifest will be created and the manifest group will
243 # be empty during the pull
243 # be empty during the pull
244 self.manifestheader()
244 self.manifestheader()
245 deltas = self.deltaiter()
245 deltas = self.deltaiter()
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
247 prog.complete()
247 prog.complete()
248 self.callback = None
248 self.callback = None
249
249
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
251 expectedtotal=None):
251 expectedtotal=None):
252 """Add the changegroup returned by source.read() to this repo.
252 """Add the changegroup returned by source.read() to this repo.
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
254 the URL of the repo where this changegroup is coming from.
254 the URL of the repo where this changegroup is coming from.
255
255
256 Return an integer summarizing the change to this repo:
256 Return an integer summarizing the change to this repo:
257 - nothing changed or no source: 0
257 - nothing changed or no source: 0
258 - more heads than before: 1+added heads (2..n)
258 - more heads than before: 1+added heads (2..n)
259 - fewer heads than before: -1-removed heads (-2..-n)
259 - fewer heads than before: -1-removed heads (-2..-n)
260 - number of heads stays the same: 1
260 - number of heads stays the same: 1
261 """
261 """
262 repo = repo.unfiltered()
262 repo = repo.unfiltered()
263 def csmap(x):
263 def csmap(x):
264 repo.ui.debug("add changeset %s\n" % short(x))
264 repo.ui.debug("add changeset %s\n" % short(x))
265 return len(cl)
265 return len(cl)
266
266
267 def revmap(x):
267 def revmap(x):
268 return cl.rev(x)
268 return cl.rev(x)
269
269
270 changesets = files = revisions = 0
270 changesets = files = revisions = 0
271
271
272 try:
272 try:
273 # The transaction may already carry source information. In this
273 # The transaction may already carry source information. In this
274 # case we use the top level data. We overwrite the argument
274 # case we use the top level data. We overwrite the argument
275 # because we need to use the top level value (if they exist)
275 # because we need to use the top level value (if they exist)
276 # in this function.
276 # in this function.
277 srctype = tr.hookargs.setdefault('source', srctype)
277 srctype = tr.hookargs.setdefault('source', srctype)
278 url = tr.hookargs.setdefault('url', url)
278 url = tr.hookargs.setdefault('url', url)
279 repo.hook('prechangegroup',
279 repo.hook('prechangegroup',
280 throw=True, **pycompat.strkwargs(tr.hookargs))
280 throw=True, **pycompat.strkwargs(tr.hookargs))
281
281
282 # write changelog data to temp files so concurrent readers
282 # write changelog data to temp files so concurrent readers
283 # will not see an inconsistent view
283 # will not see an inconsistent view
284 cl = repo.changelog
284 cl = repo.changelog
285 cl.delayupdate(tr)
285 cl.delayupdate(tr)
286 oldheads = set(cl.heads())
286 oldheads = set(cl.heads())
287
287
288 trp = weakref.proxy(tr)
288 trp = weakref.proxy(tr)
289 # pull off the changeset group
289 # pull off the changeset group
290 repo.ui.status(_("adding changesets\n"))
290 repo.ui.status(_("adding changesets\n"))
291 clstart = len(cl)
291 clstart = len(cl)
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
293 total=expectedtotal)
293 total=expectedtotal)
294 self.callback = progress.increment
294 self.callback = progress.increment
295
295
296 efiles = set()
296 efiles = set()
297 def onchangelog(cl, node):
297 def onchangelog(cl, node):
298 efiles.update(cl.readfiles(node))
298 efiles.update(cl.readfiles(node))
299
299
300 self.changelogheader()
300 self.changelogheader()
301 deltas = self.deltaiter()
301 deltas = self.deltaiter()
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
303 efiles = len(efiles)
303 efiles = len(efiles)
304
304
305 if not cgnodes:
305 if not cgnodes:
306 repo.ui.develwarn('applied empty changelog from changegroup',
306 repo.ui.develwarn('applied empty changelog from changegroup',
307 config='warn-empty-changegroup')
307 config='warn-empty-changegroup')
308 clend = len(cl)
308 clend = len(cl)
309 changesets = clend - clstart
309 changesets = clend - clstart
310 progress.complete()
310 progress.complete()
311 self.callback = None
311 self.callback = None
312
312
313 # pull off the manifest group
313 # pull off the manifest group
314 repo.ui.status(_("adding manifests\n"))
314 repo.ui.status(_("adding manifests\n"))
315 # We know that we'll never have more manifests than we had
315 # We know that we'll never have more manifests than we had
316 # changesets.
316 # changesets.
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
318 total=changesets)
318 total=changesets)
319 self._unpackmanifests(repo, revmap, trp, progress)
319 self._unpackmanifests(repo, revmap, trp, progress)
320
320
321 needfiles = {}
321 needfiles = {}
322 if repo.ui.configbool('server', 'validate'):
322 if repo.ui.configbool('server', 'validate'):
323 cl = repo.changelog
323 cl = repo.changelog
324 ml = repo.manifestlog
324 ml = repo.manifestlog
325 # validate incoming csets have their manifests
325 # validate incoming csets have their manifests
326 for cset in pycompat.xrange(clstart, clend):
326 for cset in pycompat.xrange(clstart, clend):
327 mfnode = cl.changelogrevision(cset).manifest
327 mfnode = cl.changelogrevision(cset).manifest
328 mfest = ml[mfnode].readdelta()
328 mfest = ml[mfnode].readdelta()
329 # store file cgnodes we must see
329 # store file cgnodes we must see
330 for f, n in mfest.iteritems():
330 for f, n in mfest.iteritems():
331 needfiles.setdefault(f, set()).add(n)
331 needfiles.setdefault(f, set()).add(n)
332
332
333 # process the files
333 # process the files
334 repo.ui.status(_("adding file changes\n"))
334 repo.ui.status(_("adding file changes\n"))
335 newrevs, newfiles = _addchangegroupfiles(
335 newrevs, newfiles = _addchangegroupfiles(
336 repo, self, revmap, trp, efiles, needfiles)
336 repo, self, revmap, trp, efiles, needfiles)
337 revisions += newrevs
337 revisions += newrevs
338 files += newfiles
338 files += newfiles
339
339
340 deltaheads = 0
340 deltaheads = 0
341 if oldheads:
341 if oldheads:
342 heads = cl.heads()
342 heads = cl.heads()
343 deltaheads = len(heads) - len(oldheads)
343 deltaheads = len(heads) - len(oldheads)
344 for h in heads:
344 for h in heads:
345 if h not in oldheads and repo[h].closesbranch():
345 if h not in oldheads and repo[h].closesbranch():
346 deltaheads -= 1
346 deltaheads -= 1
347 htext = ""
347 htext = ""
348 if deltaheads:
348 if deltaheads:
349 htext = _(" (%+d heads)") % deltaheads
349 htext = _(" (%+d heads)") % deltaheads
350
350
351 repo.ui.status(_("added %d changesets"
351 repo.ui.status(_("added %d changesets"
352 " with %d changes to %d files%s\n")
352 " with %d changes to %d files%s\n")
353 % (changesets, revisions, files, htext))
353 % (changesets, revisions, files, htext))
354 repo.invalidatevolatilesets()
354 repo.invalidatevolatilesets()
355
355
356 if changesets > 0:
356 if changesets > 0:
357 if 'node' not in tr.hookargs:
357 if 'node' not in tr.hookargs:
358 tr.hookargs['node'] = hex(cl.node(clstart))
358 tr.hookargs['node'] = hex(cl.node(clstart))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
360 hookargs = dict(tr.hookargs)
360 hookargs = dict(tr.hookargs)
361 else:
361 else:
362 hookargs = dict(tr.hookargs)
362 hookargs = dict(tr.hookargs)
363 hookargs['node'] = hex(cl.node(clstart))
363 hookargs['node'] = hex(cl.node(clstart))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
365 repo.hook('pretxnchangegroup',
365 repo.hook('pretxnchangegroup',
366 throw=True, **pycompat.strkwargs(hookargs))
366 throw=True, **pycompat.strkwargs(hookargs))
367
367
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
369 phaseall = None
369 phaseall = None
370 if srctype in ('push', 'serve'):
370 if srctype in ('push', 'serve'):
371 # Old servers can not push the boundary themselves.
371 # Old servers can not push the boundary themselves.
372 # New servers won't push the boundary if changeset already
372 # New servers won't push the boundary if changeset already
373 # exists locally as secret
373 # exists locally as secret
374 #
374 #
375 # We should not use added here but the list of all change in
375 # We should not use added here but the list of all change in
376 # the bundle
376 # the bundle
377 if repo.publishing():
377 if repo.publishing():
378 targetphase = phaseall = phases.public
378 targetphase = phaseall = phases.public
379 else:
379 else:
380 # closer target phase computation
380 # closer target phase computation
381
381
382 # Those changesets have been pushed from the
382 # Those changesets have been pushed from the
383 # outside, their phases are going to be pushed
383 # outside, their phases are going to be pushed
384 # alongside. Therefor `targetphase` is
384 # alongside. Therefor `targetphase` is
385 # ignored.
385 # ignored.
386 targetphase = phaseall = phases.draft
386 targetphase = phaseall = phases.draft
387 if added:
387 if added:
388 phases.registernew(repo, tr, targetphase, added)
388 phases.registernew(repo, tr, targetphase, added)
389 if phaseall is not None:
389 if phaseall is not None:
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
391
391
392 if changesets > 0:
392 if changesets > 0:
393
393
394 def runhooks():
394 def runhooks():
395 # These hooks run when the lock releases, not when the
395 # These hooks run when the lock releases, not when the
396 # transaction closes. So it's possible for the changelog
396 # transaction closes. So it's possible for the changelog
397 # to have changed since we last saw it.
397 # to have changed since we last saw it.
398 if clstart >= len(repo):
398 if clstart >= len(repo):
399 return
399 return
400
400
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
402
402
403 for n in added:
403 for n in added:
404 args = hookargs.copy()
404 args = hookargs.copy()
405 args['node'] = hex(n)
405 args['node'] = hex(n)
406 del args['node_last']
406 del args['node_last']
407 repo.hook("incoming", **pycompat.strkwargs(args))
407 repo.hook("incoming", **pycompat.strkwargs(args))
408
408
409 newheads = [h for h in repo.heads()
409 newheads = [h for h in repo.heads()
410 if h not in oldheads]
410 if h not in oldheads]
411 repo.ui.log("incoming",
411 repo.ui.log("incoming",
412 "%d incoming changes - new heads: %s\n",
412 "%d incoming changes - new heads: %s\n",
413 len(added),
413 len(added),
414 ', '.join([hex(c[:6]) for c in newheads]))
414 ', '.join([hex(c[:6]) for c in newheads]))
415
415
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
417 lambda tr: repo._afterlock(runhooks))
417 lambda tr: repo._afterlock(runhooks))
418 finally:
418 finally:
419 repo.ui.flush()
419 repo.ui.flush()
420 # never return 0 here:
420 # never return 0 here:
421 if deltaheads < 0:
421 if deltaheads < 0:
422 ret = deltaheads - 1
422 ret = deltaheads - 1
423 else:
423 else:
424 ret = deltaheads + 1
424 ret = deltaheads + 1
425 return ret
425 return ret
426
426
427 def deltaiter(self):
427 def deltaiter(self):
428 """
428 """
429 returns an iterator of the deltas in this changegroup
429 returns an iterator of the deltas in this changegroup
430
430
431 Useful for passing to the underlying storage system to be stored.
431 Useful for passing to the underlying storage system to be stored.
432 """
432 """
433 chain = None
433 chain = None
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
436 yield chunkdata
436 yield chunkdata
437 chain = chunkdata[0]
437 chain = chunkdata[0]
438
438
439 class cg2unpacker(cg1unpacker):
439 class cg2unpacker(cg1unpacker):
440 """Unpacker for cg2 streams.
440 """Unpacker for cg2 streams.
441
441
442 cg2 streams add support for generaldelta, so the delta header
442 cg2 streams add support for generaldelta, so the delta header
443 format is slightly different. All other features about the data
443 format is slightly different. All other features about the data
444 remain the same.
444 remain the same.
445 """
445 """
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
447 deltaheadersize = deltaheader.size
447 deltaheadersize = deltaheader.size
448 version = '02'
448 version = '02'
449
449
450 def _deltaheader(self, headertuple, prevnode):
450 def _deltaheader(self, headertuple, prevnode):
451 node, p1, p2, deltabase, cs = headertuple
451 node, p1, p2, deltabase, cs = headertuple
452 flags = 0
452 flags = 0
453 return node, p1, p2, deltabase, cs, flags
453 return node, p1, p2, deltabase, cs, flags
454
454
455 class cg3unpacker(cg2unpacker):
455 class cg3unpacker(cg2unpacker):
456 """Unpacker for cg3 streams.
456 """Unpacker for cg3 streams.
457
457
458 cg3 streams add support for exchanging treemanifests and revlog
458 cg3 streams add support for exchanging treemanifests and revlog
459 flags. It adds the revlog flags to the delta header and an empty chunk
459 flags. It adds the revlog flags to the delta header and an empty chunk
460 separating manifests and files.
460 separating manifests and files.
461 """
461 """
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
463 deltaheadersize = deltaheader.size
463 deltaheadersize = deltaheader.size
464 version = '03'
464 version = '03'
465 _grouplistcount = 2 # One list of manifests and one list of files
465 _grouplistcount = 2 # One list of manifests and one list of files
466
466
467 def _deltaheader(self, headertuple, prevnode):
467 def _deltaheader(self, headertuple, prevnode):
468 node, p1, p2, deltabase, cs, flags = headertuple
468 node, p1, p2, deltabase, cs, flags = headertuple
469 return node, p1, p2, deltabase, cs, flags
469 return node, p1, p2, deltabase, cs, flags
470
470
471 def _unpackmanifests(self, repo, revmap, trp, prog):
471 def _unpackmanifests(self, repo, revmap, trp, prog):
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
473 for chunkdata in iter(self.filelogheader, {}):
473 for chunkdata in iter(self.filelogheader, {}):
474 # If we get here, there are directory manifests in the changegroup
474 # If we get here, there are directory manifests in the changegroup
475 d = chunkdata["filename"]
475 d = chunkdata["filename"]
476 repo.ui.debug("adding %s revisions\n" % d)
476 repo.ui.debug("adding %s revisions\n" % d)
477 deltas = self.deltaiter()
477 deltas = self.deltaiter()
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
479 raise error.Abort(_("received dir revlog group is empty"))
479 raise error.Abort(_("received dir revlog group is empty"))
480
480
481 class headerlessfixup(object):
481 class headerlessfixup(object):
482 def __init__(self, fh, h):
482 def __init__(self, fh, h):
483 self._h = h
483 self._h = h
484 self._fh = fh
484 self._fh = fh
485 def read(self, n):
485 def read(self, n):
486 if self._h:
486 if self._h:
487 d, self._h = self._h[:n], self._h[n:]
487 d, self._h = self._h[:n], self._h[n:]
488 if len(d) < n:
488 if len(d) < n:
489 d += readexactly(self._fh, n - len(d))
489 d += readexactly(self._fh, n - len(d))
490 return d
490 return d
491 return readexactly(self._fh, n)
491 return readexactly(self._fh, n)
492
492
493 def _revisiondeltatochunks(delta, headerfn):
493 def _revisiondeltatochunks(delta, headerfn):
494 """Serialize a revisiondelta to changegroup chunks."""
494 """Serialize a revisiondelta to changegroup chunks."""
495
495
496 # The captured revision delta may be encoded as a delta against
496 # The captured revision delta may be encoded as a delta against
497 # a base revision or as a full revision. The changegroup format
497 # a base revision or as a full revision. The changegroup format
498 # requires that everything on the wire be deltas. So for full
498 # requires that everything on the wire be deltas. So for full
499 # revisions, we need to invent a header that says to rewrite
499 # revisions, we need to invent a header that says to rewrite
500 # data.
500 # data.
501
501
502 if delta.delta is not None:
502 if delta.delta is not None:
503 prefix, data = b'', delta.delta
503 prefix, data = b'', delta.delta
504 elif delta.basenode == nullid:
504 elif delta.basenode == nullid:
505 data = delta.revision
505 data = delta.revision
506 prefix = mdiff.trivialdiffheader(len(data))
506 prefix = mdiff.trivialdiffheader(len(data))
507 else:
507 else:
508 data = delta.revision
508 data = delta.revision
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
510 len(data))
510 len(data))
511
511
512 meta = headerfn(delta)
512 meta = headerfn(delta)
513
513
514 yield chunkheader(len(meta) + len(prefix) + len(data))
514 yield chunkheader(len(meta) + len(prefix) + len(data))
515 yield meta
515 yield meta
516 if prefix:
516 if prefix:
517 yield prefix
517 yield prefix
518 yield data
518 yield data
519
519
520 def _sortnodesellipsis(store, nodes, cl, lookup):
520 def _sortnodesellipsis(store, nodes, cl, lookup):
521 """Sort nodes for changegroup generation."""
521 """Sort nodes for changegroup generation."""
522 # Ellipses serving mode.
522 # Ellipses serving mode.
523 #
523 #
524 # In a perfect world, we'd generate better ellipsis-ified graphs
524 # In a perfect world, we'd generate better ellipsis-ified graphs
525 # for non-changelog revlogs. In practice, we haven't started doing
525 # for non-changelog revlogs. In practice, we haven't started doing
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
527 # are actually full of bogus parentage on all the ellipsis
527 # are actually full of bogus parentage on all the ellipsis
528 # nodes. This has the side effect that, while the contents are
528 # nodes. This has the side effect that, while the contents are
529 # correct, the individual DAGs might be completely out of whack in
529 # correct, the individual DAGs might be completely out of whack in
530 # a case like 882681bc3166 and its ancestors (back about 10
530 # a case like 882681bc3166 and its ancestors (back about 10
531 # revisions or so) in the main hg repo.
531 # revisions or so) in the main hg repo.
532 #
532 #
533 # The one invariant we *know* holds is that the new (potentially
533 # The one invariant we *know* holds is that the new (potentially
534 # bogus) DAG shape will be valid if we order the nodes in the
534 # bogus) DAG shape will be valid if we order the nodes in the
535 # order that they're introduced in dramatis personae by the
535 # order that they're introduced in dramatis personae by the
536 # changelog, so what we do is we sort the non-changelog histories
536 # changelog, so what we do is we sort the non-changelog histories
537 # by the order in which they are used by the changelog.
537 # by the order in which they are used by the changelog.
538 key = lambda n: cl.rev(lookup(n))
538 key = lambda n: cl.rev(lookup(n))
539 return sorted(nodes, key=key)
539 return sorted(nodes, key=key)
540
540
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
542 linknode, clrevtolocalrev, fullclnodes,
542 linknode, clrevtolocalrev, fullclnodes,
543 precomputedellipsis):
543 precomputedellipsis):
544 linkparents = precomputedellipsis[linkrev]
544 linkparents = precomputedellipsis[linkrev]
545 def local(clrev):
545 def local(clrev):
546 """Turn a changelog revnum into a local revnum.
546 """Turn a changelog revnum into a local revnum.
547
547
548 The ellipsis dag is stored as revnums on the changelog,
548 The ellipsis dag is stored as revnums on the changelog,
549 but when we're producing ellipsis entries for
549 but when we're producing ellipsis entries for
550 non-changelog revlogs, we need to turn those numbers into
550 non-changelog revlogs, we need to turn those numbers into
551 something local. This does that for us, and during the
551 something local. This does that for us, and during the
552 changelog sending phase will also expand the stored
552 changelog sending phase will also expand the stored
553 mappings as needed.
553 mappings as needed.
554 """
554 """
555 if clrev == nullrev:
555 if clrev == nullrev:
556 return nullrev
556 return nullrev
557
557
558 if ischangelog:
558 if ischangelog:
559 return clrev
559 return clrev
560
560
561 # Walk the ellipsis-ized changelog breadth-first looking for a
561 # Walk the ellipsis-ized changelog breadth-first looking for a
562 # change that has been linked from the current revlog.
562 # change that has been linked from the current revlog.
563 #
563 #
564 # For a flat manifest revlog only a single step should be necessary
564 # For a flat manifest revlog only a single step should be necessary
565 # as all relevant changelog entries are relevant to the flat
565 # as all relevant changelog entries are relevant to the flat
566 # manifest.
566 # manifest.
567 #
567 #
568 # For a filelog or tree manifest dirlog however not every changelog
568 # For a filelog or tree manifest dirlog however not every changelog
569 # entry will have been relevant, so we need to skip some changelog
569 # entry will have been relevant, so we need to skip some changelog
570 # nodes even after ellipsis-izing.
570 # nodes even after ellipsis-izing.
571 walk = [clrev]
571 walk = [clrev]
572 while walk:
572 while walk:
573 p = walk[0]
573 p = walk[0]
574 walk = walk[1:]
574 walk = walk[1:]
575 if p in clrevtolocalrev:
575 if p in clrevtolocalrev:
576 return clrevtolocalrev[p]
576 return clrevtolocalrev[p]
577 elif p in fullclnodes:
577 elif p in fullclnodes:
578 walk.extend([pp for pp in cl.parentrevs(p)
578 walk.extend([pp for pp in cl.parentrevs(p)
579 if pp != nullrev])
579 if pp != nullrev])
580 elif p in precomputedellipsis:
580 elif p in precomputedellipsis:
581 walk.extend([pp for pp in precomputedellipsis[p]
581 walk.extend([pp for pp in precomputedellipsis[p]
582 if pp != nullrev])
582 if pp != nullrev])
583 else:
583 else:
584 # In this case, we've got an ellipsis with parents
584 # In this case, we've got an ellipsis with parents
585 # outside the current bundle (likely an
585 # outside the current bundle (likely an
586 # incremental pull). We "know" that we can use the
586 # incremental pull). We "know" that we can use the
587 # value of this same revlog at whatever revision
587 # value of this same revlog at whatever revision
588 # is pointed to by linknode. "Know" is in scare
588 # is pointed to by linknode. "Know" is in scare
589 # quotes because I haven't done enough examination
589 # quotes because I haven't done enough examination
590 # of edge cases to convince myself this is really
590 # of edge cases to convince myself this is really
591 # a fact - it works for all the (admittedly
591 # a fact - it works for all the (admittedly
592 # thorough) cases in our testsuite, but I would be
592 # thorough) cases in our testsuite, but I would be
593 # somewhat unsurprised to find a case in the wild
593 # somewhat unsurprised to find a case in the wild
594 # where this breaks down a bit. That said, I don't
594 # where this breaks down a bit. That said, I don't
595 # know if it would hurt anything.
595 # know if it would hurt anything.
596 for i in pycompat.xrange(rev, 0, -1):
596 for i in pycompat.xrange(rev, 0, -1):
597 if store.linkrev(i) == clrev:
597 if store.linkrev(i) == clrev:
598 return i
598 return i
599 # We failed to resolve a parent for this node, so
599 # We failed to resolve a parent for this node, so
600 # we crash the changegroup construction.
600 # we crash the changegroup construction.
601 raise error.Abort(
601 raise error.Abort(
602 'unable to resolve parent while packing %r %r'
602 'unable to resolve parent while packing %r %r'
603 ' for changeset %r' % (store.indexfile, rev, clrev))
603 ' for changeset %r' % (store.indexfile, rev, clrev))
604
604
605 return nullrev
605 return nullrev
606
606
607 if not linkparents or (
607 if not linkparents or (
608 store.parentrevs(rev) == (nullrev, nullrev)):
608 store.parentrevs(rev) == (nullrev, nullrev)):
609 p1, p2 = nullrev, nullrev
609 p1, p2 = nullrev, nullrev
610 elif len(linkparents) == 1:
610 elif len(linkparents) == 1:
611 p1, = sorted(local(p) for p in linkparents)
611 p1, = sorted(local(p) for p in linkparents)
612 p2 = nullrev
612 p2 = nullrev
613 else:
613 else:
614 p1, p2 = sorted(local(p) for p in linkparents)
614 p1, p2 = sorted(local(p) for p in linkparents)
615
615
616 p1node, p2node = store.node(p1), store.node(p2)
616 p1node, p2node = store.node(p1), store.node(p2)
617
617
618 return p1node, p2node, linknode
618 return p1node, p2node, linknode
619
619
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
621 topic=None,
621 topic=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
623 precomputedellipsis=None):
623 precomputedellipsis=None):
624 """Calculate deltas for a set of revisions.
624 """Calculate deltas for a set of revisions.
625
625
626 Is a generator of ``revisiondelta`` instances.
626 Is a generator of ``revisiondelta`` instances.
627
627
628 If topic is not None, progress detail will be generated using this
628 If topic is not None, progress detail will be generated using this
629 topic name (e.g. changesets, manifests, etc).
629 topic name (e.g. changesets, manifests, etc).
630 """
630 """
631 if not nodes:
631 if not nodes:
632 return
632 return
633
633
634 cl = repo.changelog
634 cl = repo.changelog
635
635
636 if ischangelog:
636 if ischangelog:
637 # `hg log` shows changesets in storage order. To preserve order
637 # `hg log` shows changesets in storage order. To preserve order
638 # across clones, send out changesets in storage order.
638 # across clones, send out changesets in storage order.
639 nodesorder = 'storage'
639 nodesorder = 'storage'
640 elif ellipses:
640 elif ellipses:
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
642 nodesorder = 'nodes'
642 nodesorder = 'nodes'
643 else:
643 else:
644 nodesorder = None
644 nodesorder = None
645
645
646 # Perform ellipses filtering and revision massaging. We do this before
646 # Perform ellipses filtering and revision massaging. We do this before
647 # emitrevisions() because a) filtering out revisions creates less work
647 # emitrevisions() because a) filtering out revisions creates less work
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
649 # assumptions about delta choices and we would possibly send a delta
649 # assumptions about delta choices and we would possibly send a delta
650 # referencing a missing base revision.
650 # referencing a missing base revision.
651 #
651 #
652 # Also, calling lookup() has side-effects with regards to populating
652 # Also, calling lookup() has side-effects with regards to populating
653 # data structures. If we don't call lookup() for each node or if we call
653 # data structures. If we don't call lookup() for each node or if we call
654 # lookup() after the first pass through each node, things can break -
654 # lookup() after the first pass through each node, things can break -
655 # possibly intermittently depending on the python hash seed! For that
655 # possibly intermittently depending on the python hash seed! For that
656 # reason, we store a mapping of all linknodes during the initial node
656 # reason, we store a mapping of all linknodes during the initial node
657 # pass rather than use lookup() on the output side.
657 # pass rather than use lookup() on the output side.
658 if ellipses:
658 if ellipses:
659 filtered = []
659 filtered = []
660 adjustedparents = {}
660 adjustedparents = {}
661 linknodes = {}
661 linknodes = {}
662
662
663 for node in nodes:
663 for node in nodes:
664 rev = store.rev(node)
664 rev = store.rev(node)
665 linknode = lookup(node)
665 linknode = lookup(node)
666 linkrev = cl.rev(linknode)
666 linkrev = cl.rev(linknode)
667 clrevtolocalrev[linkrev] = rev
667 clrevtolocalrev[linkrev] = rev
668
668
669 # If linknode is in fullclnodes, it means the corresponding
669 # If linknode is in fullclnodes, it means the corresponding
670 # changeset was a full changeset and is being sent unaltered.
670 # changeset was a full changeset and is being sent unaltered.
671 if linknode in fullclnodes:
671 if linknode in fullclnodes:
672 linknodes[node] = linknode
672 linknodes[node] = linknode
673
673
674 # If the corresponding changeset wasn't in the set computed
674 # If the corresponding changeset wasn't in the set computed
675 # as relevant to us, it should be dropped outright.
675 # as relevant to us, it should be dropped outright.
676 elif linkrev not in precomputedellipsis:
676 elif linkrev not in precomputedellipsis:
677 continue
677 continue
678
678
679 else:
679 else:
680 # We could probably do this later and avoid the dict
680 # We could probably do this later and avoid the dict
681 # holding state. But it likely doesn't matter.
681 # holding state. But it likely doesn't matter.
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
683 cl, store, ischangelog, rev, linkrev, linknode,
683 cl, store, ischangelog, rev, linkrev, linknode,
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
685
685
686 adjustedparents[node] = (p1node, p2node)
686 adjustedparents[node] = (p1node, p2node)
687 linknodes[node] = linknode
687 linknodes[node] = linknode
688
688
689 filtered.append(node)
689 filtered.append(node)
690
690
691 nodes = filtered
691 nodes = filtered
692
692
693 # We expect the first pass to be fast, so we only engage the progress
693 # We expect the first pass to be fast, so we only engage the progress
694 # meter for constructing the revision deltas.
694 # meter for constructing the revision deltas.
695 progress = None
695 progress = None
696 if topic is not None:
696 if topic is not None:
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
698 total=len(nodes))
698 total=len(nodes))
699
699
700 revisions = store.emitrevisions(
700 revisions = store.emitrevisions(
701 nodes,
701 nodes,
702 nodesorder=nodesorder,
702 nodesorder=nodesorder,
703 revisiondata=True,
703 revisiondata=True,
704 assumehaveparentrevisions=not ellipses,
704 assumehaveparentrevisions=not ellipses,
705 deltaprevious=forcedeltaparentprev)
705 deltaprevious=forcedeltaparentprev)
706
706
707 for i, revision in enumerate(revisions):
707 for i, revision in enumerate(revisions):
708 if progress:
708 if progress:
709 progress.update(i + 1)
709 progress.update(i + 1)
710
710
711 if ellipses:
711 if ellipses:
712 linknode = linknodes[revision.node]
712 linknode = linknodes[revision.node]
713
713
714 if revision.node in adjustedparents:
714 if revision.node in adjustedparents:
715 p1node, p2node = adjustedparents[revision.node]
715 p1node, p2node = adjustedparents[revision.node]
716 revision.p1node = p1node
716 revision.p1node = p1node
717 revision.p2node = p2node
717 revision.p2node = p2node
718 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
718 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
719
719
720 else:
720 else:
721 linknode = lookup(revision.node)
721 linknode = lookup(revision.node)
722
722
723 revision.linknode = linknode
723 revision.linknode = linknode
724 yield revision
724 yield revision
725
725
726 if progress:
726 if progress:
727 progress.complete()
727 progress.complete()
728
728
729 class cgpacker(object):
729 class cgpacker(object):
730 def __init__(self, repo, filematcher, version,
730 def __init__(self, repo, filematcher, version,
731 builddeltaheader, manifestsend,
731 builddeltaheader, manifestsend,
732 forcedeltaparentprev=False,
732 forcedeltaparentprev=False,
733 bundlecaps=None, ellipses=False,
733 bundlecaps=None, ellipses=False,
734 shallow=False, ellipsisroots=None, fullnodes=None):
734 shallow=False, ellipsisroots=None, fullnodes=None):
735 """Given a source repo, construct a bundler.
735 """Given a source repo, construct a bundler.
736
736
737 filematcher is a matcher that matches on files to include in the
737 filematcher is a matcher that matches on files to include in the
738 changegroup. Used to facilitate sparse changegroups.
738 changegroup. Used to facilitate sparse changegroups.
739
739
740 forcedeltaparentprev indicates whether delta parents must be against
740 forcedeltaparentprev indicates whether delta parents must be against
741 the previous revision in a delta group. This should only be used for
741 the previous revision in a delta group. This should only be used for
742 compatibility with changegroup version 1.
742 compatibility with changegroup version 1.
743
743
744 builddeltaheader is a callable that constructs the header for a group
744 builddeltaheader is a callable that constructs the header for a group
745 delta.
745 delta.
746
746
747 manifestsend is a chunk to send after manifests have been fully emitted.
747 manifestsend is a chunk to send after manifests have been fully emitted.
748
748
749 ellipses indicates whether ellipsis serving mode is enabled.
749 ellipses indicates whether ellipsis serving mode is enabled.
750
750
751 bundlecaps is optional and can be used to specify the set of
751 bundlecaps is optional and can be used to specify the set of
752 capabilities which can be used to build the bundle. While bundlecaps is
752 capabilities which can be used to build the bundle. While bundlecaps is
753 unused in core Mercurial, extensions rely on this feature to communicate
753 unused in core Mercurial, extensions rely on this feature to communicate
754 capabilities to customize the changegroup packer.
754 capabilities to customize the changegroup packer.
755
755
756 shallow indicates whether shallow data might be sent. The packer may
756 shallow indicates whether shallow data might be sent. The packer may
757 need to pack file contents not introduced by the changes being packed.
757 need to pack file contents not introduced by the changes being packed.
758
758
759 fullnodes is the set of changelog nodes which should not be ellipsis
759 fullnodes is the set of changelog nodes which should not be ellipsis
760 nodes. We store this rather than the set of nodes that should be
760 nodes. We store this rather than the set of nodes that should be
761 ellipsis because for very large histories we expect this to be
761 ellipsis because for very large histories we expect this to be
762 significantly smaller.
762 significantly smaller.
763 """
763 """
764 assert filematcher
764 assert filematcher
765 self._filematcher = filematcher
765 self._filematcher = filematcher
766
766
767 self.version = version
767 self.version = version
768 self._forcedeltaparentprev = forcedeltaparentprev
768 self._forcedeltaparentprev = forcedeltaparentprev
769 self._builddeltaheader = builddeltaheader
769 self._builddeltaheader = builddeltaheader
770 self._manifestsend = manifestsend
770 self._manifestsend = manifestsend
771 self._ellipses = ellipses
771 self._ellipses = ellipses
772
772
773 # Set of capabilities we can use to build the bundle.
773 # Set of capabilities we can use to build the bundle.
774 if bundlecaps is None:
774 if bundlecaps is None:
775 bundlecaps = set()
775 bundlecaps = set()
776 self._bundlecaps = bundlecaps
776 self._bundlecaps = bundlecaps
777 self._isshallow = shallow
777 self._isshallow = shallow
778 self._fullclnodes = fullnodes
778 self._fullclnodes = fullnodes
779
779
780 # Maps ellipsis revs to their roots at the changelog level.
780 # Maps ellipsis revs to their roots at the changelog level.
781 self._precomputedellipsis = ellipsisroots
781 self._precomputedellipsis = ellipsisroots
782
782
783 self._repo = repo
783 self._repo = repo
784
784
785 if self._repo.ui.verbose and not self._repo.ui.debugflag:
785 if self._repo.ui.verbose and not self._repo.ui.debugflag:
786 self._verbosenote = self._repo.ui.note
786 self._verbosenote = self._repo.ui.note
787 else:
787 else:
788 self._verbosenote = lambda s: None
788 self._verbosenote = lambda s: None
789
789
790 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
790 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
791 changelog=True):
791 changelog=True):
792 """Yield a sequence of changegroup byte chunks.
792 """Yield a sequence of changegroup byte chunks.
793 If changelog is False, changelog data won't be added to changegroup
793 If changelog is False, changelog data won't be added to changegroup
794 """
794 """
795
795
796 repo = self._repo
796 repo = self._repo
797 cl = repo.changelog
797 cl = repo.changelog
798
798
799 self._verbosenote(_('uncompressed size of bundle content:\n'))
799 self._verbosenote(_('uncompressed size of bundle content:\n'))
800 size = 0
800 size = 0
801
801
802 clstate, deltas = self._generatechangelog(cl, clnodes)
802 clstate, deltas = self._generatechangelog(cl, clnodes)
803 for delta in deltas:
803 for delta in deltas:
804 if changelog:
804 if changelog:
805 for chunk in _revisiondeltatochunks(delta,
805 for chunk in _revisiondeltatochunks(delta,
806 self._builddeltaheader):
806 self._builddeltaheader):
807 size += len(chunk)
807 size += len(chunk)
808 yield chunk
808 yield chunk
809
809
810 close = closechunk()
810 close = closechunk()
811 size += len(close)
811 size += len(close)
812 yield closechunk()
812 yield closechunk()
813
813
814 self._verbosenote(_('%8.i (changelog)\n') % size)
814 self._verbosenote(_('%8.i (changelog)\n') % size)
815
815
816 clrevorder = clstate['clrevorder']
816 clrevorder = clstate['clrevorder']
817 manifests = clstate['manifests']
817 manifests = clstate['manifests']
818 changedfiles = clstate['changedfiles']
818 changedfiles = clstate['changedfiles']
819
819
820 # We need to make sure that the linkrev in the changegroup refers to
820 # We need to make sure that the linkrev in the changegroup refers to
821 # the first changeset that introduced the manifest or file revision.
821 # the first changeset that introduced the manifest or file revision.
822 # The fastpath is usually safer than the slowpath, because the filelogs
822 # The fastpath is usually safer than the slowpath, because the filelogs
823 # are walked in revlog order.
823 # are walked in revlog order.
824 #
824 #
825 # When taking the slowpath when the manifest revlog uses generaldelta,
825 # When taking the slowpath when the manifest revlog uses generaldelta,
826 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
826 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
827 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
827 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
828 #
828 #
829 # When taking the fastpath, we are only vulnerable to reordering
829 # When taking the fastpath, we are only vulnerable to reordering
830 # of the changelog itself. The changelog never uses generaldelta and is
830 # of the changelog itself. The changelog never uses generaldelta and is
831 # never reordered. To handle this case, we simply take the slowpath,
831 # never reordered. To handle this case, we simply take the slowpath,
832 # which already has the 'clrevorder' logic. This was also fixed in
832 # which already has the 'clrevorder' logic. This was also fixed in
833 # cc0ff93d0c0c.
833 # cc0ff93d0c0c.
834
834
835 # Treemanifests don't work correctly with fastpathlinkrev
835 # Treemanifests don't work correctly with fastpathlinkrev
836 # either, because we don't discover which directory nodes to
836 # either, because we don't discover which directory nodes to
837 # send along with files. This could probably be fixed.
837 # send along with files. This could probably be fixed.
838 fastpathlinkrev = fastpathlinkrev and (
838 fastpathlinkrev = fastpathlinkrev and (
839 'treemanifest' not in repo.requirements)
839 'treemanifest' not in repo.requirements)
840
840
841 fnodes = {} # needed file nodes
841 fnodes = {} # needed file nodes
842
842
843 size = 0
843 size = 0
844 it = self.generatemanifests(
844 it = self.generatemanifests(
845 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
845 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
846 clstate['clrevtomanifestrev'])
846 clstate['clrevtomanifestrev'])
847
847
848 for tree, deltas in it:
848 for tree, deltas in it:
849 if tree:
849 if tree:
850 assert self.version == b'03'
850 assert self.version == b'03'
851 chunk = _fileheader(tree)
851 chunk = _fileheader(tree)
852 size += len(chunk)
852 size += len(chunk)
853 yield chunk
853 yield chunk
854
854
855 for delta in deltas:
855 for delta in deltas:
856 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
856 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
857 for chunk in chunks:
857 for chunk in chunks:
858 size += len(chunk)
858 size += len(chunk)
859 yield chunk
859 yield chunk
860
860
861 close = closechunk()
861 close = closechunk()
862 size += len(close)
862 size += len(close)
863 yield close
863 yield close
864
864
865 self._verbosenote(_('%8.i (manifests)\n') % size)
865 self._verbosenote(_('%8.i (manifests)\n') % size)
866 yield self._manifestsend
866 yield self._manifestsend
867
867
868 mfdicts = None
868 mfdicts = None
869 if self._ellipses and self._isshallow:
869 if self._ellipses and self._isshallow:
870 mfdicts = [(self._repo.manifestlog[n].read(), lr)
870 mfdicts = [(self._repo.manifestlog[n].read(), lr)
871 for (n, lr) in manifests.iteritems()]
871 for (n, lr) in manifests.iteritems()]
872
872
873 manifests.clear()
873 manifests.clear()
874 clrevs = set(cl.rev(x) for x in clnodes)
874 clrevs = set(cl.rev(x) for x in clnodes)
875
875
876 it = self.generatefiles(changedfiles, commonrevs,
876 it = self.generatefiles(changedfiles, commonrevs,
877 source, mfdicts, fastpathlinkrev,
877 source, mfdicts, fastpathlinkrev,
878 fnodes, clrevs)
878 fnodes, clrevs)
879
879
880 for path, deltas in it:
880 for path, deltas in it:
881 h = _fileheader(path)
881 h = _fileheader(path)
882 size = len(h)
882 size = len(h)
883 yield h
883 yield h
884
884
885 for delta in deltas:
885 for delta in deltas:
886 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
886 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
887 for chunk in chunks:
887 for chunk in chunks:
888 size += len(chunk)
888 size += len(chunk)
889 yield chunk
889 yield chunk
890
890
891 close = closechunk()
891 close = closechunk()
892 size += len(close)
892 size += len(close)
893 yield close
893 yield close
894
894
895 self._verbosenote(_('%8.i %s\n') % (size, path))
895 self._verbosenote(_('%8.i %s\n') % (size, path))
896
896
897 yield closechunk()
897 yield closechunk()
898
898
899 if clnodes:
899 if clnodes:
900 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
900 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
901
901
902 def _generatechangelog(self, cl, nodes):
902 def _generatechangelog(self, cl, nodes):
903 """Generate data for changelog chunks.
903 """Generate data for changelog chunks.
904
904
905 Returns a 2-tuple of a dict containing state and an iterable of
905 Returns a 2-tuple of a dict containing state and an iterable of
906 byte chunks. The state will not be fully populated until the
906 byte chunks. The state will not be fully populated until the
907 chunk stream has been fully consumed.
907 chunk stream has been fully consumed.
908 """
908 """
909 clrevorder = {}
909 clrevorder = {}
910 manifests = {}
910 manifests = {}
911 mfl = self._repo.manifestlog
911 mfl = self._repo.manifestlog
912 changedfiles = set()
912 changedfiles = set()
913 clrevtomanifestrev = {}
913 clrevtomanifestrev = {}
914
914
915 # Callback for the changelog, used to collect changed files and
915 # Callback for the changelog, used to collect changed files and
916 # manifest nodes.
916 # manifest nodes.
917 # Returns the linkrev node (identity in the changelog case).
917 # Returns the linkrev node (identity in the changelog case).
918 def lookupcl(x):
918 def lookupcl(x):
919 c = cl.changelogrevision(x)
919 c = cl.changelogrevision(x)
920 clrevorder[x] = len(clrevorder)
920 clrevorder[x] = len(clrevorder)
921
921
922 if self._ellipses:
922 if self._ellipses:
923 # Only update manifests if x is going to be sent. Otherwise we
923 # Only update manifests if x is going to be sent. Otherwise we
924 # end up with bogus linkrevs specified for manifests and
924 # end up with bogus linkrevs specified for manifests and
925 # we skip some manifest nodes that we should otherwise
925 # we skip some manifest nodes that we should otherwise
926 # have sent.
926 # have sent.
927 if (x in self._fullclnodes
927 if (x in self._fullclnodes
928 or cl.rev(x) in self._precomputedellipsis):
928 or cl.rev(x) in self._precomputedellipsis):
929
929
930 manifestnode = c.manifest
930 manifestnode = c.manifest
931 # Record the first changeset introducing this manifest
931 # Record the first changeset introducing this manifest
932 # version.
932 # version.
933 manifests.setdefault(manifestnode, x)
933 manifests.setdefault(manifestnode, x)
934 # Set this narrow-specific dict so we have the lowest
934 # Set this narrow-specific dict so we have the lowest
935 # manifest revnum to look up for this cl revnum. (Part of
935 # manifest revnum to look up for this cl revnum. (Part of
936 # mapping changelog ellipsis parents to manifest ellipsis
936 # mapping changelog ellipsis parents to manifest ellipsis
937 # parents)
937 # parents)
938 clrevtomanifestrev.setdefault(
938 clrevtomanifestrev.setdefault(
939 cl.rev(x), mfl.rev(manifestnode))
939 cl.rev(x), mfl.rev(manifestnode))
940 # We can't trust the changed files list in the changeset if the
940 # We can't trust the changed files list in the changeset if the
941 # client requested a shallow clone.
941 # client requested a shallow clone.
942 if self._isshallow:
942 if self._isshallow:
943 changedfiles.update(mfl[c.manifest].read().keys())
943 changedfiles.update(mfl[c.manifest].read().keys())
944 else:
944 else:
945 changedfiles.update(c.files)
945 changedfiles.update(c.files)
946 else:
946 else:
947 # record the first changeset introducing this manifest version
947 # record the first changeset introducing this manifest version
948 manifests.setdefault(c.manifest, x)
948 manifests.setdefault(c.manifest, x)
949 # Record a complete list of potentially-changed files in
949 # Record a complete list of potentially-changed files in
950 # this manifest.
950 # this manifest.
951 changedfiles.update(c.files)
951 changedfiles.update(c.files)
952
952
953 return x
953 return x
954
954
955 state = {
955 state = {
956 'clrevorder': clrevorder,
956 'clrevorder': clrevorder,
957 'manifests': manifests,
957 'manifests': manifests,
958 'changedfiles': changedfiles,
958 'changedfiles': changedfiles,
959 'clrevtomanifestrev': clrevtomanifestrev,
959 'clrevtomanifestrev': clrevtomanifestrev,
960 }
960 }
961
961
962 gen = deltagroup(
962 gen = deltagroup(
963 self._repo, cl, nodes, True, lookupcl,
963 self._repo, cl, nodes, True, lookupcl,
964 self._forcedeltaparentprev,
964 self._forcedeltaparentprev,
965 ellipses=self._ellipses,
965 ellipses=self._ellipses,
966 topic=_('changesets'),
966 topic=_('changesets'),
967 clrevtolocalrev={},
967 clrevtolocalrev={},
968 fullclnodes=self._fullclnodes,
968 fullclnodes=self._fullclnodes,
969 precomputedellipsis=self._precomputedellipsis)
969 precomputedellipsis=self._precomputedellipsis)
970
970
971 return state, gen
971 return state, gen
972
972
973 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
973 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
974 manifests, fnodes, source, clrevtolocalrev):
974 manifests, fnodes, source, clrevtolocalrev):
975 """Returns an iterator of changegroup chunks containing manifests.
975 """Returns an iterator of changegroup chunks containing manifests.
976
976
977 `source` is unused here, but is used by extensions like remotefilelog to
977 `source` is unused here, but is used by extensions like remotefilelog to
978 change what is sent based in pulls vs pushes, etc.
978 change what is sent based in pulls vs pushes, etc.
979 """
979 """
980 repo = self._repo
980 repo = self._repo
981 mfl = repo.manifestlog
981 mfl = repo.manifestlog
982 tmfnodes = {'': manifests}
982 tmfnodes = {'': manifests}
983
983
984 # Callback for the manifest, used to collect linkrevs for filelog
984 # Callback for the manifest, used to collect linkrevs for filelog
985 # revisions.
985 # revisions.
986 # Returns the linkrev node (collected in lookupcl).
986 # Returns the linkrev node (collected in lookupcl).
987 def makelookupmflinknode(tree, nodes):
987 def makelookupmflinknode(tree, nodes):
988 if fastpathlinkrev:
988 if fastpathlinkrev:
989 assert not tree
989 assert not tree
990 return manifests.__getitem__
990 return manifests.__getitem__
991
991
992 def lookupmflinknode(x):
992 def lookupmflinknode(x):
993 """Callback for looking up the linknode for manifests.
993 """Callback for looking up the linknode for manifests.
994
994
995 Returns the linkrev node for the specified manifest.
995 Returns the linkrev node for the specified manifest.
996
996
997 SIDE EFFECT:
997 SIDE EFFECT:
998
998
999 1) fclnodes gets populated with the list of relevant
999 1) fclnodes gets populated with the list of relevant
1000 file nodes if we're not using fastpathlinkrev
1000 file nodes if we're not using fastpathlinkrev
1001 2) When treemanifests are in use, collects treemanifest nodes
1001 2) When treemanifests are in use, collects treemanifest nodes
1002 to send
1002 to send
1003
1003
1004 Note that this means manifests must be completely sent to
1004 Note that this means manifests must be completely sent to
1005 the client before you can trust the list of files and
1005 the client before you can trust the list of files and
1006 treemanifests to send.
1006 treemanifests to send.
1007 """
1007 """
1008 clnode = nodes[x]
1008 clnode = nodes[x]
1009 mdata = mfl.get(tree, x).readfast(shallow=True)
1009 mdata = mfl.get(tree, x).readfast(shallow=True)
1010 for p, n, fl in mdata.iterentries():
1010 for p, n, fl in mdata.iterentries():
1011 if fl == 't': # subdirectory manifest
1011 if fl == 't': # subdirectory manifest
1012 subtree = tree + p + '/'
1012 subtree = tree + p + '/'
1013 tmfclnodes = tmfnodes.setdefault(subtree, {})
1013 tmfclnodes = tmfnodes.setdefault(subtree, {})
1014 tmfclnode = tmfclnodes.setdefault(n, clnode)
1014 tmfclnode = tmfclnodes.setdefault(n, clnode)
1015 if clrevorder[clnode] < clrevorder[tmfclnode]:
1015 if clrevorder[clnode] < clrevorder[tmfclnode]:
1016 tmfclnodes[n] = clnode
1016 tmfclnodes[n] = clnode
1017 else:
1017 else:
1018 f = tree + p
1018 f = tree + p
1019 fclnodes = fnodes.setdefault(f, {})
1019 fclnodes = fnodes.setdefault(f, {})
1020 fclnode = fclnodes.setdefault(n, clnode)
1020 fclnode = fclnodes.setdefault(n, clnode)
1021 if clrevorder[clnode] < clrevorder[fclnode]:
1021 if clrevorder[clnode] < clrevorder[fclnode]:
1022 fclnodes[n] = clnode
1022 fclnodes[n] = clnode
1023 return clnode
1023 return clnode
1024 return lookupmflinknode
1024 return lookupmflinknode
1025
1025
1026 while tmfnodes:
1026 while tmfnodes:
1027 tree, nodes = tmfnodes.popitem()
1027 tree, nodes = tmfnodes.popitem()
1028 store = mfl.getstorage(tree)
1028 store = mfl.getstorage(tree)
1029
1029
1030 if not self._filematcher.visitdir(store.tree[:-1] or '.'):
1030 if not self._filematcher.visitdir(store.tree[:-1] or '.'):
1031 # No nodes to send because this directory is out of
1031 # No nodes to send because this directory is out of
1032 # the client's view of the repository (probably
1032 # the client's view of the repository (probably
1033 # because of narrow clones).
1033 # because of narrow clones).
1034 prunednodes = []
1034 prunednodes = []
1035 else:
1035 else:
1036 # Avoid sending any manifest nodes we can prove the
1036 # Avoid sending any manifest nodes we can prove the
1037 # client already has by checking linkrevs. See the
1037 # client already has by checking linkrevs. See the
1038 # related comment in generatefiles().
1038 # related comment in generatefiles().
1039 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1039 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1040 if tree and not prunednodes:
1040 if tree and not prunednodes:
1041 continue
1041 continue
1042
1042
1043 lookupfn = makelookupmflinknode(tree, nodes)
1043 lookupfn = makelookupmflinknode(tree, nodes)
1044
1044
1045 deltas = deltagroup(
1045 deltas = deltagroup(
1046 self._repo, store, prunednodes, False, lookupfn,
1046 self._repo, store, prunednodes, False, lookupfn,
1047 self._forcedeltaparentprev,
1047 self._forcedeltaparentprev,
1048 ellipses=self._ellipses,
1048 ellipses=self._ellipses,
1049 topic=_('manifests'),
1049 topic=_('manifests'),
1050 clrevtolocalrev=clrevtolocalrev,
1050 clrevtolocalrev=clrevtolocalrev,
1051 fullclnodes=self._fullclnodes,
1051 fullclnodes=self._fullclnodes,
1052 precomputedellipsis=self._precomputedellipsis)
1052 precomputedellipsis=self._precomputedellipsis)
1053
1053
1054 yield tree, deltas
1054 yield tree, deltas
1055
1055
1056 def _prunemanifests(self, store, nodes, commonrevs):
1056 def _prunemanifests(self, store, nodes, commonrevs):
1057 # This is split out as a separate method to allow filtering
1057 # This is split out as a separate method to allow filtering
1058 # commonrevs in extension code.
1058 # commonrevs in extension code.
1059 #
1059 #
1060 # TODO(augie): this shouldn't be required, instead we should
1060 # TODO(augie): this shouldn't be required, instead we should
1061 # make filtering of revisions to send delegated to the store
1061 # make filtering of revisions to send delegated to the store
1062 # layer.
1062 # layer.
1063 frev, flr = store.rev, store.linkrev
1063 frev, flr = store.rev, store.linkrev
1064 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1064 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1065
1065
1066 # The 'source' parameter is useful for extensions
1066 # The 'source' parameter is useful for extensions
1067 def generatefiles(self, changedfiles, commonrevs, source,
1067 def generatefiles(self, changedfiles, commonrevs, source,
1068 mfdicts, fastpathlinkrev, fnodes, clrevs):
1068 mfdicts, fastpathlinkrev, fnodes, clrevs):
1069 changedfiles = list(filter(self._filematcher, changedfiles))
1069 changedfiles = list(filter(self._filematcher, changedfiles))
1070
1070
1071 if not fastpathlinkrev:
1071 if not fastpathlinkrev:
1072 def normallinknodes(unused, fname):
1072 def normallinknodes(unused, fname):
1073 return fnodes.get(fname, {})
1073 return fnodes.get(fname, {})
1074 else:
1074 else:
1075 cln = self._repo.changelog.node
1075 cln = self._repo.changelog.node
1076
1076
1077 def normallinknodes(store, fname):
1077 def normallinknodes(store, fname):
1078 flinkrev = store.linkrev
1078 flinkrev = store.linkrev
1079 fnode = store.node
1079 fnode = store.node
1080 revs = ((r, flinkrev(r)) for r in store)
1080 revs = ((r, flinkrev(r)) for r in store)
1081 return dict((fnode(r), cln(lr))
1081 return dict((fnode(r), cln(lr))
1082 for r, lr in revs if lr in clrevs)
1082 for r, lr in revs if lr in clrevs)
1083
1083
1084 clrevtolocalrev = {}
1084 clrevtolocalrev = {}
1085
1085
1086 if self._isshallow:
1086 if self._isshallow:
1087 # In a shallow clone, the linknodes callback needs to also include
1087 # In a shallow clone, the linknodes callback needs to also include
1088 # those file nodes that are in the manifests we sent but weren't
1088 # those file nodes that are in the manifests we sent but weren't
1089 # introduced by those manifests.
1089 # introduced by those manifests.
1090 commonctxs = [self._repo[c] for c in commonrevs]
1090 commonctxs = [self._repo[c] for c in commonrevs]
1091 clrev = self._repo.changelog.rev
1091 clrev = self._repo.changelog.rev
1092
1092
1093 def linknodes(flog, fname):
1093 def linknodes(flog, fname):
1094 for c in commonctxs:
1094 for c in commonctxs:
1095 try:
1095 try:
1096 fnode = c.filenode(fname)
1096 fnode = c.filenode(fname)
1097 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1097 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1098 except error.ManifestLookupError:
1098 except error.ManifestLookupError:
1099 pass
1099 pass
1100 links = normallinknodes(flog, fname)
1100 links = normallinknodes(flog, fname)
1101 if len(links) != len(mfdicts):
1101 if len(links) != len(mfdicts):
1102 for mf, lr in mfdicts:
1102 for mf, lr in mfdicts:
1103 fnode = mf.get(fname, None)
1103 fnode = mf.get(fname, None)
1104 if fnode in links:
1104 if fnode in links:
1105 links[fnode] = min(links[fnode], lr, key=clrev)
1105 links[fnode] = min(links[fnode], lr, key=clrev)
1106 elif fnode:
1106 elif fnode:
1107 links[fnode] = lr
1107 links[fnode] = lr
1108 return links
1108 return links
1109 else:
1109 else:
1110 linknodes = normallinknodes
1110 linknodes = normallinknodes
1111
1111
1112 repo = self._repo
1112 repo = self._repo
1113 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1113 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1114 total=len(changedfiles))
1114 total=len(changedfiles))
1115 for i, fname in enumerate(sorted(changedfiles)):
1115 for i, fname in enumerate(sorted(changedfiles)):
1116 filerevlog = repo.file(fname)
1116 filerevlog = repo.file(fname)
1117 if not filerevlog:
1117 if not filerevlog:
1118 raise error.Abort(_("empty or missing file data for %s") %
1118 raise error.Abort(_("empty or missing file data for %s") %
1119 fname)
1119 fname)
1120
1120
1121 clrevtolocalrev.clear()
1121 clrevtolocalrev.clear()
1122
1122
1123 linkrevnodes = linknodes(filerevlog, fname)
1123 linkrevnodes = linknodes(filerevlog, fname)
1124 # Lookup for filenodes, we collected the linkrev nodes above in the
1124 # Lookup for filenodes, we collected the linkrev nodes above in the
1125 # fastpath case and with lookupmf in the slowpath case.
1125 # fastpath case and with lookupmf in the slowpath case.
1126 def lookupfilelog(x):
1126 def lookupfilelog(x):
1127 return linkrevnodes[x]
1127 return linkrevnodes[x]
1128
1128
1129 frev, flr = filerevlog.rev, filerevlog.linkrev
1129 frev, flr = filerevlog.rev, filerevlog.linkrev
1130 # Skip sending any filenode we know the client already
1130 # Skip sending any filenode we know the client already
1131 # has. This avoids over-sending files relatively
1131 # has. This avoids over-sending files relatively
1132 # inexpensively, so it's not a problem if we under-filter
1132 # inexpensively, so it's not a problem if we under-filter
1133 # here.
1133 # here.
1134 filenodes = [n for n in linkrevnodes
1134 filenodes = [n for n in linkrevnodes
1135 if flr(frev(n)) not in commonrevs]
1135 if flr(frev(n)) not in commonrevs]
1136
1136
1137 if not filenodes:
1137 if not filenodes:
1138 continue
1138 continue
1139
1139
1140 progress.update(i + 1, item=fname)
1140 progress.update(i + 1, item=fname)
1141
1141
1142 deltas = deltagroup(
1142 deltas = deltagroup(
1143 self._repo, filerevlog, filenodes, False, lookupfilelog,
1143 self._repo, filerevlog, filenodes, False, lookupfilelog,
1144 self._forcedeltaparentprev,
1144 self._forcedeltaparentprev,
1145 ellipses=self._ellipses,
1145 ellipses=self._ellipses,
1146 clrevtolocalrev=clrevtolocalrev,
1146 clrevtolocalrev=clrevtolocalrev,
1147 fullclnodes=self._fullclnodes,
1147 fullclnodes=self._fullclnodes,
1148 precomputedellipsis=self._precomputedellipsis)
1148 precomputedellipsis=self._precomputedellipsis)
1149
1149
1150 yield fname, deltas
1150 yield fname, deltas
1151
1151
1152 progress.complete()
1152 progress.complete()
1153
1153
1154 def _makecg1packer(repo, filematcher, bundlecaps, ellipses=False,
1154 def _makecg1packer(repo, filematcher, bundlecaps, ellipses=False,
1155 shallow=False, ellipsisroots=None, fullnodes=None):
1155 shallow=False, ellipsisroots=None, fullnodes=None):
1156 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1156 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1157 d.node, d.p1node, d.p2node, d.linknode)
1157 d.node, d.p1node, d.p2node, d.linknode)
1158
1158
1159 return cgpacker(repo, filematcher, b'01',
1159 return cgpacker(repo, filematcher, b'01',
1160 builddeltaheader=builddeltaheader,
1160 builddeltaheader=builddeltaheader,
1161 manifestsend=b'',
1161 manifestsend=b'',
1162 forcedeltaparentprev=True,
1162 forcedeltaparentprev=True,
1163 bundlecaps=bundlecaps,
1163 bundlecaps=bundlecaps,
1164 ellipses=ellipses,
1164 ellipses=ellipses,
1165 shallow=shallow,
1165 shallow=shallow,
1166 ellipsisroots=ellipsisroots,
1166 ellipsisroots=ellipsisroots,
1167 fullnodes=fullnodes)
1167 fullnodes=fullnodes)
1168
1168
1169 def _makecg2packer(repo, filematcher, bundlecaps, ellipses=False,
1169 def _makecg2packer(repo, filematcher, bundlecaps, ellipses=False,
1170 shallow=False, ellipsisroots=None, fullnodes=None):
1170 shallow=False, ellipsisroots=None, fullnodes=None):
1171 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1171 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1172 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1172 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1173
1173
1174 return cgpacker(repo, filematcher, b'02',
1174 return cgpacker(repo, filematcher, b'02',
1175 builddeltaheader=builddeltaheader,
1175 builddeltaheader=builddeltaheader,
1176 manifestsend=b'',
1176 manifestsend=b'',
1177 bundlecaps=bundlecaps,
1177 bundlecaps=bundlecaps,
1178 ellipses=ellipses,
1178 ellipses=ellipses,
1179 shallow=shallow,
1179 shallow=shallow,
1180 ellipsisroots=ellipsisroots,
1180 ellipsisroots=ellipsisroots,
1181 fullnodes=fullnodes)
1181 fullnodes=fullnodes)
1182
1182
1183 def _makecg3packer(repo, filematcher, bundlecaps, ellipses=False,
1183 def _makecg3packer(repo, filematcher, bundlecaps, ellipses=False,
1184 shallow=False, ellipsisroots=None, fullnodes=None):
1184 shallow=False, ellipsisroots=None, fullnodes=None):
1185 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1185 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1186 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1186 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1187
1187
1188 return cgpacker(repo, filematcher, b'03',
1188 return cgpacker(repo, filematcher, b'03',
1189 builddeltaheader=builddeltaheader,
1189 builddeltaheader=builddeltaheader,
1190 manifestsend=closechunk(),
1190 manifestsend=closechunk(),
1191 bundlecaps=bundlecaps,
1191 bundlecaps=bundlecaps,
1192 ellipses=ellipses,
1192 ellipses=ellipses,
1193 shallow=shallow,
1193 shallow=shallow,
1194 ellipsisroots=ellipsisroots,
1194 ellipsisroots=ellipsisroots,
1195 fullnodes=fullnodes)
1195 fullnodes=fullnodes)
1196
1196
1197 _packermap = {'01': (_makecg1packer, cg1unpacker),
1197 _packermap = {'01': (_makecg1packer, cg1unpacker),
1198 # cg2 adds support for exchanging generaldelta
1198 # cg2 adds support for exchanging generaldelta
1199 '02': (_makecg2packer, cg2unpacker),
1199 '02': (_makecg2packer, cg2unpacker),
1200 # cg3 adds support for exchanging revlog flags and treemanifests
1200 # cg3 adds support for exchanging revlog flags and treemanifests
1201 '03': (_makecg3packer, cg3unpacker),
1201 '03': (_makecg3packer, cg3unpacker),
1202 }
1202 }
1203
1203
1204 def allsupportedversions(repo):
1204 def allsupportedversions(repo):
1205 versions = set(_packermap.keys())
1205 versions = set(_packermap.keys())
1206 if not (repo.ui.configbool('experimental', 'changegroup3') or
1206 if not (repo.ui.configbool('experimental', 'changegroup3') or
1207 repo.ui.configbool('experimental', 'treemanifest') or
1207 repo.ui.configbool('experimental', 'treemanifest') or
1208 'treemanifest' in repo.requirements):
1208 'treemanifest' in repo.requirements):
1209 versions.discard('03')
1209 versions.discard('03')
1210 return versions
1210 return versions
1211
1211
1212 # Changegroup versions that can be applied to the repo
1212 # Changegroup versions that can be applied to the repo
1213 def supportedincomingversions(repo):
1213 def supportedincomingversions(repo):
1214 return allsupportedversions(repo)
1214 return allsupportedversions(repo)
1215
1215
1216 # Changegroup versions that can be created from the repo
1216 # Changegroup versions that can be created from the repo
1217 def supportedoutgoingversions(repo):
1217 def supportedoutgoingversions(repo):
1218 versions = allsupportedversions(repo)
1218 versions = allsupportedversions(repo)
1219 if 'treemanifest' in repo.requirements:
1219 if 'treemanifest' in repo.requirements:
1220 # Versions 01 and 02 support only flat manifests and it's just too
1220 # Versions 01 and 02 support only flat manifests and it's just too
1221 # expensive to convert between the flat manifest and tree manifest on
1221 # expensive to convert between the flat manifest and tree manifest on
1222 # the fly. Since tree manifests are hashed differently, all of history
1222 # the fly. Since tree manifests are hashed differently, all of history
1223 # would have to be converted. Instead, we simply don't even pretend to
1223 # would have to be converted. Instead, we simply don't even pretend to
1224 # support versions 01 and 02.
1224 # support versions 01 and 02.
1225 versions.discard('01')
1225 versions.discard('01')
1226 versions.discard('02')
1226 versions.discard('02')
1227 if repository.NARROW_REQUIREMENT in repo.requirements:
1227 if repository.NARROW_REQUIREMENT in repo.requirements:
1228 # Versions 01 and 02 don't support revlog flags, and we need to
1228 # Versions 01 and 02 don't support revlog flags, and we need to
1229 # support that for stripping and unbundling to work.
1229 # support that for stripping and unbundling to work.
1230 versions.discard('01')
1230 versions.discard('01')
1231 versions.discard('02')
1231 versions.discard('02')
1232 if LFS_REQUIREMENT in repo.requirements:
1232 if LFS_REQUIREMENT in repo.requirements:
1233 # Versions 01 and 02 don't support revlog flags, and we need to
1233 # Versions 01 and 02 don't support revlog flags, and we need to
1234 # mark LFS entries with REVIDX_EXTSTORED.
1234 # mark LFS entries with REVIDX_EXTSTORED.
1235 versions.discard('01')
1235 versions.discard('01')
1236 versions.discard('02')
1236 versions.discard('02')
1237
1237
1238 return versions
1238 return versions
1239
1239
1240 def localversion(repo):
1240 def localversion(repo):
1241 # Finds the best version to use for bundles that are meant to be used
1241 # Finds the best version to use for bundles that are meant to be used
1242 # locally, such as those from strip and shelve, and temporary bundles.
1242 # locally, such as those from strip and shelve, and temporary bundles.
1243 return max(supportedoutgoingversions(repo))
1243 return max(supportedoutgoingversions(repo))
1244
1244
1245 def safeversion(repo):
1245 def safeversion(repo):
1246 # Finds the smallest version that it's safe to assume clients of the repo
1246 # Finds the smallest version that it's safe to assume clients of the repo
1247 # will support. For example, all hg versions that support generaldelta also
1247 # will support. For example, all hg versions that support generaldelta also
1248 # support changegroup 02.
1248 # support changegroup 02.
1249 versions = supportedoutgoingversions(repo)
1249 versions = supportedoutgoingversions(repo)
1250 if 'generaldelta' in repo.requirements:
1250 if 'generaldelta' in repo.requirements:
1251 versions.discard('01')
1251 versions.discard('01')
1252 assert versions
1252 assert versions
1253 return min(versions)
1253 return min(versions)
1254
1254
1255 def getbundler(version, repo, bundlecaps=None, filematcher=None,
1255 def getbundler(version, repo, bundlecaps=None, filematcher=None,
1256 ellipses=False, shallow=False, ellipsisroots=None,
1256 ellipses=False, shallow=False, ellipsisroots=None,
1257 fullnodes=None):
1257 fullnodes=None):
1258 assert version in supportedoutgoingversions(repo)
1258 assert version in supportedoutgoingversions(repo)
1259
1259
1260 if filematcher is None:
1260 if filematcher is None:
1261 filematcher = matchmod.alwaysmatcher(repo.root, '')
1261 filematcher = matchmod.alwaysmatcher(repo.root, '')
1262
1262
1263 if version == '01' and not filematcher.always():
1263 if version == '01' and not filematcher.always():
1264 raise error.ProgrammingError('version 01 changegroups do not support '
1264 raise error.ProgrammingError('version 01 changegroups do not support '
1265 'sparse file matchers')
1265 'sparse file matchers')
1266
1266
1267 if ellipses and version in (b'01', b'02'):
1267 if ellipses and version in (b'01', b'02'):
1268 raise error.Abort(
1268 raise error.Abort(
1269 _('ellipsis nodes require at least cg3 on client and server, '
1269 _('ellipsis nodes require at least cg3 on client and server, '
1270 'but negotiated version %s') % version)
1270 'but negotiated version %s') % version)
1271
1271
1272 # Requested files could include files not in the local store. So
1272 # Requested files could include files not in the local store. So
1273 # filter those out.
1273 # filter those out.
1274 filematcher = matchmod.intersectmatchers(repo.narrowmatch(),
1274 filematcher = repo.narrowmatch(filematcher)
1275 filematcher)
1276
1275
1277 fn = _packermap[version][0]
1276 fn = _packermap[version][0]
1278 return fn(repo, filematcher, bundlecaps, ellipses=ellipses,
1277 return fn(repo, filematcher, bundlecaps, ellipses=ellipses,
1279 shallow=shallow, ellipsisroots=ellipsisroots,
1278 shallow=shallow, ellipsisroots=ellipsisroots,
1280 fullnodes=fullnodes)
1279 fullnodes=fullnodes)
1281
1280
1282 def getunbundler(version, fh, alg, extras=None):
1281 def getunbundler(version, fh, alg, extras=None):
1283 return _packermap[version][1](fh, alg, extras=extras)
1282 return _packermap[version][1](fh, alg, extras=extras)
1284
1283
1285 def _changegroupinfo(repo, nodes, source):
1284 def _changegroupinfo(repo, nodes, source):
1286 if repo.ui.verbose or source == 'bundle':
1285 if repo.ui.verbose or source == 'bundle':
1287 repo.ui.status(_("%d changesets found\n") % len(nodes))
1286 repo.ui.status(_("%d changesets found\n") % len(nodes))
1288 if repo.ui.debugflag:
1287 if repo.ui.debugflag:
1289 repo.ui.debug("list of changesets:\n")
1288 repo.ui.debug("list of changesets:\n")
1290 for node in nodes:
1289 for node in nodes:
1291 repo.ui.debug("%s\n" % hex(node))
1290 repo.ui.debug("%s\n" % hex(node))
1292
1291
1293 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1292 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1294 bundlecaps=None):
1293 bundlecaps=None):
1295 cgstream = makestream(repo, outgoing, version, source,
1294 cgstream = makestream(repo, outgoing, version, source,
1296 fastpath=fastpath, bundlecaps=bundlecaps)
1295 fastpath=fastpath, bundlecaps=bundlecaps)
1297 return getunbundler(version, util.chunkbuffer(cgstream), None,
1296 return getunbundler(version, util.chunkbuffer(cgstream), None,
1298 {'clcount': len(outgoing.missing) })
1297 {'clcount': len(outgoing.missing) })
1299
1298
1300 def makestream(repo, outgoing, version, source, fastpath=False,
1299 def makestream(repo, outgoing, version, source, fastpath=False,
1301 bundlecaps=None, filematcher=None):
1300 bundlecaps=None, filematcher=None):
1302 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1301 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1303 filematcher=filematcher)
1302 filematcher=filematcher)
1304
1303
1305 repo = repo.unfiltered()
1304 repo = repo.unfiltered()
1306 commonrevs = outgoing.common
1305 commonrevs = outgoing.common
1307 csets = outgoing.missing
1306 csets = outgoing.missing
1308 heads = outgoing.missingheads
1307 heads = outgoing.missingheads
1309 # We go through the fast path if we get told to, or if all (unfiltered
1308 # We go through the fast path if we get told to, or if all (unfiltered
1310 # heads have been requested (since we then know there all linkrevs will
1309 # heads have been requested (since we then know there all linkrevs will
1311 # be pulled by the client).
1310 # be pulled by the client).
1312 heads.sort()
1311 heads.sort()
1313 fastpathlinkrev = fastpath or (
1312 fastpathlinkrev = fastpath or (
1314 repo.filtername is None and heads == sorted(repo.heads()))
1313 repo.filtername is None and heads == sorted(repo.heads()))
1315
1314
1316 repo.hook('preoutgoing', throw=True, source=source)
1315 repo.hook('preoutgoing', throw=True, source=source)
1317 _changegroupinfo(repo, csets, source)
1316 _changegroupinfo(repo, csets, source)
1318 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1317 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1319
1318
1320 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1319 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1321 revisions = 0
1320 revisions = 0
1322 files = 0
1321 files = 0
1323 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1322 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1324 total=expectedfiles)
1323 total=expectedfiles)
1325 for chunkdata in iter(source.filelogheader, {}):
1324 for chunkdata in iter(source.filelogheader, {}):
1326 files += 1
1325 files += 1
1327 f = chunkdata["filename"]
1326 f = chunkdata["filename"]
1328 repo.ui.debug("adding %s revisions\n" % f)
1327 repo.ui.debug("adding %s revisions\n" % f)
1329 progress.increment()
1328 progress.increment()
1330 fl = repo.file(f)
1329 fl = repo.file(f)
1331 o = len(fl)
1330 o = len(fl)
1332 try:
1331 try:
1333 deltas = source.deltaiter()
1332 deltas = source.deltaiter()
1334 if not fl.addgroup(deltas, revmap, trp):
1333 if not fl.addgroup(deltas, revmap, trp):
1335 raise error.Abort(_("received file revlog group is empty"))
1334 raise error.Abort(_("received file revlog group is empty"))
1336 except error.CensoredBaseError as e:
1335 except error.CensoredBaseError as e:
1337 raise error.Abort(_("received delta base is censored: %s") % e)
1336 raise error.Abort(_("received delta base is censored: %s") % e)
1338 revisions += len(fl) - o
1337 revisions += len(fl) - o
1339 if f in needfiles:
1338 if f in needfiles:
1340 needs = needfiles[f]
1339 needs = needfiles[f]
1341 for new in pycompat.xrange(o, len(fl)):
1340 for new in pycompat.xrange(o, len(fl)):
1342 n = fl.node(new)
1341 n = fl.node(new)
1343 if n in needs:
1342 if n in needs:
1344 needs.remove(n)
1343 needs.remove(n)
1345 else:
1344 else:
1346 raise error.Abort(
1345 raise error.Abort(
1347 _("received spurious file revlog entry"))
1346 _("received spurious file revlog entry"))
1348 if not needs:
1347 if not needs:
1349 del needfiles[f]
1348 del needfiles[f]
1350 progress.complete()
1349 progress.complete()
1351
1350
1352 for f, needs in needfiles.iteritems():
1351 for f, needs in needfiles.iteritems():
1353 fl = repo.file(f)
1352 fl = repo.file(f)
1354 for n in needs:
1353 for n in needs:
1355 try:
1354 try:
1356 fl.rev(n)
1355 fl.rev(n)
1357 except error.LookupError:
1356 except error.LookupError:
1358 raise error.Abort(
1357 raise error.Abort(
1359 _('missing file data for %s:%s - run hg verify') %
1358 _('missing file data for %s:%s - run hg verify') %
1360 (f, hex(n)))
1359 (f, hex(n)))
1361
1360
1362 return revisions, files
1361 return revisions, files
@@ -1,5993 +1,5992 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import sys
14 import sys
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 wdirhex,
22 wdirhex,
23 wdirrev,
23 wdirrev,
24 )
24 )
25 from . import (
25 from . import (
26 archival,
26 archival,
27 bookmarks,
27 bookmarks,
28 bundle2,
28 bundle2,
29 changegroup,
29 changegroup,
30 cmdutil,
30 cmdutil,
31 copies,
31 copies,
32 debugcommands as debugcommandsmod,
32 debugcommands as debugcommandsmod,
33 destutil,
33 destutil,
34 dirstateguard,
34 dirstateguard,
35 discovery,
35 discovery,
36 encoding,
36 encoding,
37 error,
37 error,
38 exchange,
38 exchange,
39 extensions,
39 extensions,
40 filemerge,
40 filemerge,
41 formatter,
41 formatter,
42 graphmod,
42 graphmod,
43 hbisect,
43 hbisect,
44 help,
44 help,
45 hg,
45 hg,
46 logcmdutil,
46 logcmdutil,
47 match as matchmod,
48 merge as mergemod,
47 merge as mergemod,
49 narrowspec,
48 narrowspec,
50 obsolete,
49 obsolete,
51 obsutil,
50 obsutil,
52 patch,
51 patch,
53 phases,
52 phases,
54 pycompat,
53 pycompat,
55 rcutil,
54 rcutil,
56 registrar,
55 registrar,
57 repair,
56 repair,
58 revsetlang,
57 revsetlang,
59 rewriteutil,
58 rewriteutil,
60 scmutil,
59 scmutil,
61 server,
60 server,
62 state as statemod,
61 state as statemod,
63 streamclone,
62 streamclone,
64 tags as tagsmod,
63 tags as tagsmod,
65 templatekw,
64 templatekw,
66 ui as uimod,
65 ui as uimod,
67 util,
66 util,
68 wireprotoserver,
67 wireprotoserver,
69 )
68 )
70 from .utils import (
69 from .utils import (
71 dateutil,
70 dateutil,
72 stringutil,
71 stringutil,
73 )
72 )
74
73
75 table = {}
74 table = {}
76 table.update(debugcommandsmod.command._table)
75 table.update(debugcommandsmod.command._table)
77
76
78 command = registrar.command(table)
77 command = registrar.command(table)
79 INTENT_READONLY = registrar.INTENT_READONLY
78 INTENT_READONLY = registrar.INTENT_READONLY
80
79
81 # common command options
80 # common command options
82
81
83 globalopts = [
82 globalopts = [
84 ('R', 'repository', '',
83 ('R', 'repository', '',
85 _('repository root directory or name of overlay bundle file'),
84 _('repository root directory or name of overlay bundle file'),
86 _('REPO')),
85 _('REPO')),
87 ('', 'cwd', '',
86 ('', 'cwd', '',
88 _('change working directory'), _('DIR')),
87 _('change working directory'), _('DIR')),
89 ('y', 'noninteractive', None,
88 ('y', 'noninteractive', None,
90 _('do not prompt, automatically pick the first choice for all prompts')),
89 _('do not prompt, automatically pick the first choice for all prompts')),
91 ('q', 'quiet', None, _('suppress output')),
90 ('q', 'quiet', None, _('suppress output')),
92 ('v', 'verbose', None, _('enable additional output')),
91 ('v', 'verbose', None, _('enable additional output')),
93 ('', 'color', '',
92 ('', 'color', '',
94 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
93 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
95 # and should not be translated
94 # and should not be translated
96 _("when to colorize (boolean, always, auto, never, or debug)"),
95 _("when to colorize (boolean, always, auto, never, or debug)"),
97 _('TYPE')),
96 _('TYPE')),
98 ('', 'config', [],
97 ('', 'config', [],
99 _('set/override config option (use \'section.name=value\')'),
98 _('set/override config option (use \'section.name=value\')'),
100 _('CONFIG')),
99 _('CONFIG')),
101 ('', 'debug', None, _('enable debugging output')),
100 ('', 'debug', None, _('enable debugging output')),
102 ('', 'debugger', None, _('start debugger')),
101 ('', 'debugger', None, _('start debugger')),
103 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
102 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
104 _('ENCODE')),
103 _('ENCODE')),
105 ('', 'encodingmode', encoding.encodingmode,
104 ('', 'encodingmode', encoding.encodingmode,
106 _('set the charset encoding mode'), _('MODE')),
105 _('set the charset encoding mode'), _('MODE')),
107 ('', 'traceback', None, _('always print a traceback on exception')),
106 ('', 'traceback', None, _('always print a traceback on exception')),
108 ('', 'time', None, _('time how long the command takes')),
107 ('', 'time', None, _('time how long the command takes')),
109 ('', 'profile', None, _('print command execution profile')),
108 ('', 'profile', None, _('print command execution profile')),
110 ('', 'version', None, _('output version information and exit')),
109 ('', 'version', None, _('output version information and exit')),
111 ('h', 'help', None, _('display help and exit')),
110 ('h', 'help', None, _('display help and exit')),
112 ('', 'hidden', False, _('consider hidden changesets')),
111 ('', 'hidden', False, _('consider hidden changesets')),
113 ('', 'pager', 'auto',
112 ('', 'pager', 'auto',
114 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
113 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
115 ]
114 ]
116
115
117 dryrunopts = cmdutil.dryrunopts
116 dryrunopts = cmdutil.dryrunopts
118 remoteopts = cmdutil.remoteopts
117 remoteopts = cmdutil.remoteopts
119 walkopts = cmdutil.walkopts
118 walkopts = cmdutil.walkopts
120 commitopts = cmdutil.commitopts
119 commitopts = cmdutil.commitopts
121 commitopts2 = cmdutil.commitopts2
120 commitopts2 = cmdutil.commitopts2
122 formatteropts = cmdutil.formatteropts
121 formatteropts = cmdutil.formatteropts
123 templateopts = cmdutil.templateopts
122 templateopts = cmdutil.templateopts
124 logopts = cmdutil.logopts
123 logopts = cmdutil.logopts
125 diffopts = cmdutil.diffopts
124 diffopts = cmdutil.diffopts
126 diffwsopts = cmdutil.diffwsopts
125 diffwsopts = cmdutil.diffwsopts
127 diffopts2 = cmdutil.diffopts2
126 diffopts2 = cmdutil.diffopts2
128 mergetoolopts = cmdutil.mergetoolopts
127 mergetoolopts = cmdutil.mergetoolopts
129 similarityopts = cmdutil.similarityopts
128 similarityopts = cmdutil.similarityopts
130 subrepoopts = cmdutil.subrepoopts
129 subrepoopts = cmdutil.subrepoopts
131 debugrevlogopts = cmdutil.debugrevlogopts
130 debugrevlogopts = cmdutil.debugrevlogopts
132
131
133 # Commands start here, listed alphabetically
132 # Commands start here, listed alphabetically
134
133
135 @command('^add',
134 @command('^add',
136 walkopts + subrepoopts + dryrunopts,
135 walkopts + subrepoopts + dryrunopts,
137 _('[OPTION]... [FILE]...'),
136 _('[OPTION]... [FILE]...'),
138 inferrepo=True)
137 inferrepo=True)
139 def add(ui, repo, *pats, **opts):
138 def add(ui, repo, *pats, **opts):
140 """add the specified files on the next commit
139 """add the specified files on the next commit
141
140
142 Schedule files to be version controlled and added to the
141 Schedule files to be version controlled and added to the
143 repository.
142 repository.
144
143
145 The files will be added to the repository at the next commit. To
144 The files will be added to the repository at the next commit. To
146 undo an add before that, see :hg:`forget`.
145 undo an add before that, see :hg:`forget`.
147
146
148 If no names are given, add all files to the repository (except
147 If no names are given, add all files to the repository (except
149 files matching ``.hgignore``).
148 files matching ``.hgignore``).
150
149
151 .. container:: verbose
150 .. container:: verbose
152
151
153 Examples:
152 Examples:
154
153
155 - New (unknown) files are added
154 - New (unknown) files are added
156 automatically by :hg:`add`::
155 automatically by :hg:`add`::
157
156
158 $ ls
157 $ ls
159 foo.c
158 foo.c
160 $ hg status
159 $ hg status
161 ? foo.c
160 ? foo.c
162 $ hg add
161 $ hg add
163 adding foo.c
162 adding foo.c
164 $ hg status
163 $ hg status
165 A foo.c
164 A foo.c
166
165
167 - Specific files to be added can be specified::
166 - Specific files to be added can be specified::
168
167
169 $ ls
168 $ ls
170 bar.c foo.c
169 bar.c foo.c
171 $ hg status
170 $ hg status
172 ? bar.c
171 ? bar.c
173 ? foo.c
172 ? foo.c
174 $ hg add bar.c
173 $ hg add bar.c
175 $ hg status
174 $ hg status
176 A bar.c
175 A bar.c
177 ? foo.c
176 ? foo.c
178
177
179 Returns 0 if all files are successfully added.
178 Returns 0 if all files are successfully added.
180 """
179 """
181
180
182 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
181 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
183 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
182 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
184 return rejected and 1 or 0
183 return rejected and 1 or 0
185
184
186 @command('addremove',
185 @command('addremove',
187 similarityopts + subrepoopts + walkopts + dryrunopts,
186 similarityopts + subrepoopts + walkopts + dryrunopts,
188 _('[OPTION]... [FILE]...'),
187 _('[OPTION]... [FILE]...'),
189 inferrepo=True)
188 inferrepo=True)
190 def addremove(ui, repo, *pats, **opts):
189 def addremove(ui, repo, *pats, **opts):
191 """add all new files, delete all missing files
190 """add all new files, delete all missing files
192
191
193 Add all new files and remove all missing files from the
192 Add all new files and remove all missing files from the
194 repository.
193 repository.
195
194
196 Unless names are given, new files are ignored if they match any of
195 Unless names are given, new files are ignored if they match any of
197 the patterns in ``.hgignore``. As with add, these changes take
196 the patterns in ``.hgignore``. As with add, these changes take
198 effect at the next commit.
197 effect at the next commit.
199
198
200 Use the -s/--similarity option to detect renamed files. This
199 Use the -s/--similarity option to detect renamed files. This
201 option takes a percentage between 0 (disabled) and 100 (files must
200 option takes a percentage between 0 (disabled) and 100 (files must
202 be identical) as its parameter. With a parameter greater than 0,
201 be identical) as its parameter. With a parameter greater than 0,
203 this compares every removed file with every added file and records
202 this compares every removed file with every added file and records
204 those similar enough as renames. Detecting renamed files this way
203 those similar enough as renames. Detecting renamed files this way
205 can be expensive. After using this option, :hg:`status -C` can be
204 can be expensive. After using this option, :hg:`status -C` can be
206 used to check which files were identified as moved or renamed. If
205 used to check which files were identified as moved or renamed. If
207 not specified, -s/--similarity defaults to 100 and only renames of
206 not specified, -s/--similarity defaults to 100 and only renames of
208 identical files are detected.
207 identical files are detected.
209
208
210 .. container:: verbose
209 .. container:: verbose
211
210
212 Examples:
211 Examples:
213
212
214 - A number of files (bar.c and foo.c) are new,
213 - A number of files (bar.c and foo.c) are new,
215 while foobar.c has been removed (without using :hg:`remove`)
214 while foobar.c has been removed (without using :hg:`remove`)
216 from the repository::
215 from the repository::
217
216
218 $ ls
217 $ ls
219 bar.c foo.c
218 bar.c foo.c
220 $ hg status
219 $ hg status
221 ! foobar.c
220 ! foobar.c
222 ? bar.c
221 ? bar.c
223 ? foo.c
222 ? foo.c
224 $ hg addremove
223 $ hg addremove
225 adding bar.c
224 adding bar.c
226 adding foo.c
225 adding foo.c
227 removing foobar.c
226 removing foobar.c
228 $ hg status
227 $ hg status
229 A bar.c
228 A bar.c
230 A foo.c
229 A foo.c
231 R foobar.c
230 R foobar.c
232
231
233 - A file foobar.c was moved to foo.c without using :hg:`rename`.
232 - A file foobar.c was moved to foo.c without using :hg:`rename`.
234 Afterwards, it was edited slightly::
233 Afterwards, it was edited slightly::
235
234
236 $ ls
235 $ ls
237 foo.c
236 foo.c
238 $ hg status
237 $ hg status
239 ! foobar.c
238 ! foobar.c
240 ? foo.c
239 ? foo.c
241 $ hg addremove --similarity 90
240 $ hg addremove --similarity 90
242 removing foobar.c
241 removing foobar.c
243 adding foo.c
242 adding foo.c
244 recording removal of foobar.c as rename to foo.c (94% similar)
243 recording removal of foobar.c as rename to foo.c (94% similar)
245 $ hg status -C
244 $ hg status -C
246 A foo.c
245 A foo.c
247 foobar.c
246 foobar.c
248 R foobar.c
247 R foobar.c
249
248
250 Returns 0 if all files are successfully added.
249 Returns 0 if all files are successfully added.
251 """
250 """
252 opts = pycompat.byteskwargs(opts)
251 opts = pycompat.byteskwargs(opts)
253 if not opts.get('similarity'):
252 if not opts.get('similarity'):
254 opts['similarity'] = '100'
253 opts['similarity'] = '100'
255 matcher = scmutil.match(repo[None], pats, opts)
254 matcher = scmutil.match(repo[None], pats, opts)
256 return scmutil.addremove(repo, matcher, "", opts)
255 return scmutil.addremove(repo, matcher, "", opts)
257
256
258 @command('^annotate|blame',
257 @command('^annotate|blame',
259 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
258 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
260 ('', 'follow', None,
259 ('', 'follow', None,
261 _('follow copies/renames and list the filename (DEPRECATED)')),
260 _('follow copies/renames and list the filename (DEPRECATED)')),
262 ('', 'no-follow', None, _("don't follow copies and renames")),
261 ('', 'no-follow', None, _("don't follow copies and renames")),
263 ('a', 'text', None, _('treat all files as text')),
262 ('a', 'text', None, _('treat all files as text')),
264 ('u', 'user', None, _('list the author (long with -v)')),
263 ('u', 'user', None, _('list the author (long with -v)')),
265 ('f', 'file', None, _('list the filename')),
264 ('f', 'file', None, _('list the filename')),
266 ('d', 'date', None, _('list the date (short with -q)')),
265 ('d', 'date', None, _('list the date (short with -q)')),
267 ('n', 'number', None, _('list the revision number (default)')),
266 ('n', 'number', None, _('list the revision number (default)')),
268 ('c', 'changeset', None, _('list the changeset')),
267 ('c', 'changeset', None, _('list the changeset')),
269 ('l', 'line-number', None, _('show line number at the first appearance')),
268 ('l', 'line-number', None, _('show line number at the first appearance')),
270 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
269 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
271 ] + diffwsopts + walkopts + formatteropts,
270 ] + diffwsopts + walkopts + formatteropts,
272 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
271 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
273 inferrepo=True)
272 inferrepo=True)
274 def annotate(ui, repo, *pats, **opts):
273 def annotate(ui, repo, *pats, **opts):
275 """show changeset information by line for each file
274 """show changeset information by line for each file
276
275
277 List changes in files, showing the revision id responsible for
276 List changes in files, showing the revision id responsible for
278 each line.
277 each line.
279
278
280 This command is useful for discovering when a change was made and
279 This command is useful for discovering when a change was made and
281 by whom.
280 by whom.
282
281
283 If you include --file, --user, or --date, the revision number is
282 If you include --file, --user, or --date, the revision number is
284 suppressed unless you also include --number.
283 suppressed unless you also include --number.
285
284
286 Without the -a/--text option, annotate will avoid processing files
285 Without the -a/--text option, annotate will avoid processing files
287 it detects as binary. With -a, annotate will annotate the file
286 it detects as binary. With -a, annotate will annotate the file
288 anyway, although the results will probably be neither useful
287 anyway, although the results will probably be neither useful
289 nor desirable.
288 nor desirable.
290
289
291 .. container:: verbose
290 .. container:: verbose
292
291
293 Template:
292 Template:
294
293
295 The following keywords are supported in addition to the common template
294 The following keywords are supported in addition to the common template
296 keywords and functions. See also :hg:`help templates`.
295 keywords and functions. See also :hg:`help templates`.
297
296
298 :lines: List of lines with annotation data.
297 :lines: List of lines with annotation data.
299 :path: String. Repository-absolute path of the specified file.
298 :path: String. Repository-absolute path of the specified file.
300
299
301 And each entry of ``{lines}`` provides the following sub-keywords in
300 And each entry of ``{lines}`` provides the following sub-keywords in
302 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
301 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
303
302
304 :line: String. Line content.
303 :line: String. Line content.
305 :lineno: Integer. Line number at that revision.
304 :lineno: Integer. Line number at that revision.
306 :path: String. Repository-absolute path of the file at that revision.
305 :path: String. Repository-absolute path of the file at that revision.
307
306
308 See :hg:`help templates.operators` for the list expansion syntax.
307 See :hg:`help templates.operators` for the list expansion syntax.
309
308
310 Returns 0 on success.
309 Returns 0 on success.
311 """
310 """
312 opts = pycompat.byteskwargs(opts)
311 opts = pycompat.byteskwargs(opts)
313 if not pats:
312 if not pats:
314 raise error.Abort(_('at least one filename or pattern is required'))
313 raise error.Abort(_('at least one filename or pattern is required'))
315
314
316 if opts.get('follow'):
315 if opts.get('follow'):
317 # --follow is deprecated and now just an alias for -f/--file
316 # --follow is deprecated and now just an alias for -f/--file
318 # to mimic the behavior of Mercurial before version 1.5
317 # to mimic the behavior of Mercurial before version 1.5
319 opts['file'] = True
318 opts['file'] = True
320
319
321 rev = opts.get('rev')
320 rev = opts.get('rev')
322 if rev:
321 if rev:
323 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
322 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
324 ctx = scmutil.revsingle(repo, rev)
323 ctx = scmutil.revsingle(repo, rev)
325
324
326 rootfm = ui.formatter('annotate', opts)
325 rootfm = ui.formatter('annotate', opts)
327 if ui.debugflag:
326 if ui.debugflag:
328 shorthex = pycompat.identity
327 shorthex = pycompat.identity
329 else:
328 else:
330 def shorthex(h):
329 def shorthex(h):
331 return h[:12]
330 return h[:12]
332 if ui.quiet:
331 if ui.quiet:
333 datefunc = dateutil.shortdate
332 datefunc = dateutil.shortdate
334 else:
333 else:
335 datefunc = dateutil.datestr
334 datefunc = dateutil.datestr
336 if ctx.rev() is None:
335 if ctx.rev() is None:
337 if opts.get('changeset'):
336 if opts.get('changeset'):
338 # omit "+" suffix which is appended to node hex
337 # omit "+" suffix which is appended to node hex
339 def formatrev(rev):
338 def formatrev(rev):
340 if rev == wdirrev:
339 if rev == wdirrev:
341 return '%d' % ctx.p1().rev()
340 return '%d' % ctx.p1().rev()
342 else:
341 else:
343 return '%d' % rev
342 return '%d' % rev
344 else:
343 else:
345 def formatrev(rev):
344 def formatrev(rev):
346 if rev == wdirrev:
345 if rev == wdirrev:
347 return '%d+' % ctx.p1().rev()
346 return '%d+' % ctx.p1().rev()
348 else:
347 else:
349 return '%d ' % rev
348 return '%d ' % rev
350 def formathex(h):
349 def formathex(h):
351 if h == wdirhex:
350 if h == wdirhex:
352 return '%s+' % shorthex(hex(ctx.p1().node()))
351 return '%s+' % shorthex(hex(ctx.p1().node()))
353 else:
352 else:
354 return '%s ' % shorthex(h)
353 return '%s ' % shorthex(h)
355 else:
354 else:
356 formatrev = b'%d'.__mod__
355 formatrev = b'%d'.__mod__
357 formathex = shorthex
356 formathex = shorthex
358
357
359 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
358 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
360 ('rev', ' ', lambda x: scmutil.intrev(x.fctx), formatrev),
359 ('rev', ' ', lambda x: scmutil.intrev(x.fctx), formatrev),
361 ('node', ' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
360 ('node', ' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
362 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
361 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
363 ('path', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
362 ('path', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
364 ('lineno', ':', lambda x: x.lineno, pycompat.bytestr),
363 ('lineno', ':', lambda x: x.lineno, pycompat.bytestr),
365 ]
364 ]
366 opnamemap = {'rev': 'number', 'node': 'changeset', 'path': 'file',
365 opnamemap = {'rev': 'number', 'node': 'changeset', 'path': 'file',
367 'lineno': 'line_number'}
366 'lineno': 'line_number'}
368
367
369 if (not opts.get('user') and not opts.get('changeset')
368 if (not opts.get('user') and not opts.get('changeset')
370 and not opts.get('date') and not opts.get('file')):
369 and not opts.get('date') and not opts.get('file')):
371 opts['number'] = True
370 opts['number'] = True
372
371
373 linenumber = opts.get('line_number') is not None
372 linenumber = opts.get('line_number') is not None
374 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
373 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
375 raise error.Abort(_('at least one of -n/-c is required for -l'))
374 raise error.Abort(_('at least one of -n/-c is required for -l'))
376
375
377 ui.pager('annotate')
376 ui.pager('annotate')
378
377
379 if rootfm.isplain():
378 if rootfm.isplain():
380 def makefunc(get, fmt):
379 def makefunc(get, fmt):
381 return lambda x: fmt(get(x))
380 return lambda x: fmt(get(x))
382 else:
381 else:
383 def makefunc(get, fmt):
382 def makefunc(get, fmt):
384 return get
383 return get
385 datahint = rootfm.datahint()
384 datahint = rootfm.datahint()
386 funcmap = [(makefunc(get, fmt), sep) for fn, sep, get, fmt in opmap
385 funcmap = [(makefunc(get, fmt), sep) for fn, sep, get, fmt in opmap
387 if opts.get(opnamemap.get(fn, fn)) or fn in datahint]
386 if opts.get(opnamemap.get(fn, fn)) or fn in datahint]
388 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
387 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
389 fields = ' '.join(fn for fn, sep, get, fmt in opmap
388 fields = ' '.join(fn for fn, sep, get, fmt in opmap
390 if opts.get(opnamemap.get(fn, fn)) or fn in datahint)
389 if opts.get(opnamemap.get(fn, fn)) or fn in datahint)
391
390
392 def bad(x, y):
391 def bad(x, y):
393 raise error.Abort("%s: %s" % (x, y))
392 raise error.Abort("%s: %s" % (x, y))
394
393
395 m = scmutil.match(ctx, pats, opts, badfn=bad)
394 m = scmutil.match(ctx, pats, opts, badfn=bad)
396
395
397 follow = not opts.get('no_follow')
396 follow = not opts.get('no_follow')
398 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
397 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
399 whitespace=True)
398 whitespace=True)
400 skiprevs = opts.get('skip')
399 skiprevs = opts.get('skip')
401 if skiprevs:
400 if skiprevs:
402 skiprevs = scmutil.revrange(repo, skiprevs)
401 skiprevs = scmutil.revrange(repo, skiprevs)
403
402
404 for abs in ctx.walk(m):
403 for abs in ctx.walk(m):
405 fctx = ctx[abs]
404 fctx = ctx[abs]
406 rootfm.startitem()
405 rootfm.startitem()
407 rootfm.data(path=abs)
406 rootfm.data(path=abs)
408 if not opts.get('text') and fctx.isbinary():
407 if not opts.get('text') and fctx.isbinary():
409 rootfm.plain(_("%s: binary file\n")
408 rootfm.plain(_("%s: binary file\n")
410 % ((pats and m.rel(abs)) or abs))
409 % ((pats and m.rel(abs)) or abs))
411 continue
410 continue
412
411
413 fm = rootfm.nested('lines', tmpl='{rev}: {line}')
412 fm = rootfm.nested('lines', tmpl='{rev}: {line}')
414 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
413 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
415 diffopts=diffopts)
414 diffopts=diffopts)
416 if not lines:
415 if not lines:
417 fm.end()
416 fm.end()
418 continue
417 continue
419 formats = []
418 formats = []
420 pieces = []
419 pieces = []
421
420
422 for f, sep in funcmap:
421 for f, sep in funcmap:
423 l = [f(n) for n in lines]
422 l = [f(n) for n in lines]
424 if fm.isplain():
423 if fm.isplain():
425 sizes = [encoding.colwidth(x) for x in l]
424 sizes = [encoding.colwidth(x) for x in l]
426 ml = max(sizes)
425 ml = max(sizes)
427 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
426 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
428 else:
427 else:
429 formats.append(['%s' for x in l])
428 formats.append(['%s' for x in l])
430 pieces.append(l)
429 pieces.append(l)
431
430
432 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
431 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
433 fm.startitem()
432 fm.startitem()
434 fm.context(fctx=n.fctx)
433 fm.context(fctx=n.fctx)
435 fm.write(fields, "".join(f), *p)
434 fm.write(fields, "".join(f), *p)
436 if n.skip:
435 if n.skip:
437 fmt = "* %s"
436 fmt = "* %s"
438 else:
437 else:
439 fmt = ": %s"
438 fmt = ": %s"
440 fm.write('line', fmt, n.text)
439 fm.write('line', fmt, n.text)
441
440
442 if not lines[-1].text.endswith('\n'):
441 if not lines[-1].text.endswith('\n'):
443 fm.plain('\n')
442 fm.plain('\n')
444 fm.end()
443 fm.end()
445
444
446 rootfm.end()
445 rootfm.end()
447
446
448 @command('archive',
447 @command('archive',
449 [('', 'no-decode', None, _('do not pass files through decoders')),
448 [('', 'no-decode', None, _('do not pass files through decoders')),
450 ('p', 'prefix', '', _('directory prefix for files in archive'),
449 ('p', 'prefix', '', _('directory prefix for files in archive'),
451 _('PREFIX')),
450 _('PREFIX')),
452 ('r', 'rev', '', _('revision to distribute'), _('REV')),
451 ('r', 'rev', '', _('revision to distribute'), _('REV')),
453 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
452 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
454 ] + subrepoopts + walkopts,
453 ] + subrepoopts + walkopts,
455 _('[OPTION]... DEST'))
454 _('[OPTION]... DEST'))
456 def archive(ui, repo, dest, **opts):
455 def archive(ui, repo, dest, **opts):
457 '''create an unversioned archive of a repository revision
456 '''create an unversioned archive of a repository revision
458
457
459 By default, the revision used is the parent of the working
458 By default, the revision used is the parent of the working
460 directory; use -r/--rev to specify a different revision.
459 directory; use -r/--rev to specify a different revision.
461
460
462 The archive type is automatically detected based on file
461 The archive type is automatically detected based on file
463 extension (to override, use -t/--type).
462 extension (to override, use -t/--type).
464
463
465 .. container:: verbose
464 .. container:: verbose
466
465
467 Examples:
466 Examples:
468
467
469 - create a zip file containing the 1.0 release::
468 - create a zip file containing the 1.0 release::
470
469
471 hg archive -r 1.0 project-1.0.zip
470 hg archive -r 1.0 project-1.0.zip
472
471
473 - create a tarball excluding .hg files::
472 - create a tarball excluding .hg files::
474
473
475 hg archive project.tar.gz -X ".hg*"
474 hg archive project.tar.gz -X ".hg*"
476
475
477 Valid types are:
476 Valid types are:
478
477
479 :``files``: a directory full of files (default)
478 :``files``: a directory full of files (default)
480 :``tar``: tar archive, uncompressed
479 :``tar``: tar archive, uncompressed
481 :``tbz2``: tar archive, compressed using bzip2
480 :``tbz2``: tar archive, compressed using bzip2
482 :``tgz``: tar archive, compressed using gzip
481 :``tgz``: tar archive, compressed using gzip
483 :``uzip``: zip archive, uncompressed
482 :``uzip``: zip archive, uncompressed
484 :``zip``: zip archive, compressed using deflate
483 :``zip``: zip archive, compressed using deflate
485
484
486 The exact name of the destination archive or directory is given
485 The exact name of the destination archive or directory is given
487 using a format string; see :hg:`help export` for details.
486 using a format string; see :hg:`help export` for details.
488
487
489 Each member added to an archive file has a directory prefix
488 Each member added to an archive file has a directory prefix
490 prepended. Use -p/--prefix to specify a format string for the
489 prepended. Use -p/--prefix to specify a format string for the
491 prefix. The default is the basename of the archive, with suffixes
490 prefix. The default is the basename of the archive, with suffixes
492 removed.
491 removed.
493
492
494 Returns 0 on success.
493 Returns 0 on success.
495 '''
494 '''
496
495
497 opts = pycompat.byteskwargs(opts)
496 opts = pycompat.byteskwargs(opts)
498 rev = opts.get('rev')
497 rev = opts.get('rev')
499 if rev:
498 if rev:
500 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
499 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
501 ctx = scmutil.revsingle(repo, rev)
500 ctx = scmutil.revsingle(repo, rev)
502 if not ctx:
501 if not ctx:
503 raise error.Abort(_('no working directory: please specify a revision'))
502 raise error.Abort(_('no working directory: please specify a revision'))
504 node = ctx.node()
503 node = ctx.node()
505 dest = cmdutil.makefilename(ctx, dest)
504 dest = cmdutil.makefilename(ctx, dest)
506 if os.path.realpath(dest) == repo.root:
505 if os.path.realpath(dest) == repo.root:
507 raise error.Abort(_('repository root cannot be destination'))
506 raise error.Abort(_('repository root cannot be destination'))
508
507
509 kind = opts.get('type') or archival.guesskind(dest) or 'files'
508 kind = opts.get('type') or archival.guesskind(dest) or 'files'
510 prefix = opts.get('prefix')
509 prefix = opts.get('prefix')
511
510
512 if dest == '-':
511 if dest == '-':
513 if kind == 'files':
512 if kind == 'files':
514 raise error.Abort(_('cannot archive plain files to stdout'))
513 raise error.Abort(_('cannot archive plain files to stdout'))
515 dest = cmdutil.makefileobj(ctx, dest)
514 dest = cmdutil.makefileobj(ctx, dest)
516 if not prefix:
515 if not prefix:
517 prefix = os.path.basename(repo.root) + '-%h'
516 prefix = os.path.basename(repo.root) + '-%h'
518
517
519 prefix = cmdutil.makefilename(ctx, prefix)
518 prefix = cmdutil.makefilename(ctx, prefix)
520 match = scmutil.match(ctx, [], opts)
519 match = scmutil.match(ctx, [], opts)
521 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
520 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
522 match, prefix, subrepos=opts.get('subrepos'))
521 match, prefix, subrepos=opts.get('subrepos'))
523
522
524 @command('backout',
523 @command('backout',
525 [('', 'merge', None, _('merge with old dirstate parent after backout')),
524 [('', 'merge', None, _('merge with old dirstate parent after backout')),
526 ('', 'commit', None,
525 ('', 'commit', None,
527 _('commit if no conflicts were encountered (DEPRECATED)')),
526 _('commit if no conflicts were encountered (DEPRECATED)')),
528 ('', 'no-commit', None, _('do not commit')),
527 ('', 'no-commit', None, _('do not commit')),
529 ('', 'parent', '',
528 ('', 'parent', '',
530 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
529 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
531 ('r', 'rev', '', _('revision to backout'), _('REV')),
530 ('r', 'rev', '', _('revision to backout'), _('REV')),
532 ('e', 'edit', False, _('invoke editor on commit messages')),
531 ('e', 'edit', False, _('invoke editor on commit messages')),
533 ] + mergetoolopts + walkopts + commitopts + commitopts2,
532 ] + mergetoolopts + walkopts + commitopts + commitopts2,
534 _('[OPTION]... [-r] REV'))
533 _('[OPTION]... [-r] REV'))
535 def backout(ui, repo, node=None, rev=None, **opts):
534 def backout(ui, repo, node=None, rev=None, **opts):
536 '''reverse effect of earlier changeset
535 '''reverse effect of earlier changeset
537
536
538 Prepare a new changeset with the effect of REV undone in the
537 Prepare a new changeset with the effect of REV undone in the
539 current working directory. If no conflicts were encountered,
538 current working directory. If no conflicts were encountered,
540 it will be committed immediately.
539 it will be committed immediately.
541
540
542 If REV is the parent of the working directory, then this new changeset
541 If REV is the parent of the working directory, then this new changeset
543 is committed automatically (unless --no-commit is specified).
542 is committed automatically (unless --no-commit is specified).
544
543
545 .. note::
544 .. note::
546
545
547 :hg:`backout` cannot be used to fix either an unwanted or
546 :hg:`backout` cannot be used to fix either an unwanted or
548 incorrect merge.
547 incorrect merge.
549
548
550 .. container:: verbose
549 .. container:: verbose
551
550
552 Examples:
551 Examples:
553
552
554 - Reverse the effect of the parent of the working directory.
553 - Reverse the effect of the parent of the working directory.
555 This backout will be committed immediately::
554 This backout will be committed immediately::
556
555
557 hg backout -r .
556 hg backout -r .
558
557
559 - Reverse the effect of previous bad revision 23::
558 - Reverse the effect of previous bad revision 23::
560
559
561 hg backout -r 23
560 hg backout -r 23
562
561
563 - Reverse the effect of previous bad revision 23 and
562 - Reverse the effect of previous bad revision 23 and
564 leave changes uncommitted::
563 leave changes uncommitted::
565
564
566 hg backout -r 23 --no-commit
565 hg backout -r 23 --no-commit
567 hg commit -m "Backout revision 23"
566 hg commit -m "Backout revision 23"
568
567
569 By default, the pending changeset will have one parent,
568 By default, the pending changeset will have one parent,
570 maintaining a linear history. With --merge, the pending
569 maintaining a linear history. With --merge, the pending
571 changeset will instead have two parents: the old parent of the
570 changeset will instead have two parents: the old parent of the
572 working directory and a new child of REV that simply undoes REV.
571 working directory and a new child of REV that simply undoes REV.
573
572
574 Before version 1.7, the behavior without --merge was equivalent
573 Before version 1.7, the behavior without --merge was equivalent
575 to specifying --merge followed by :hg:`update --clean .` to
574 to specifying --merge followed by :hg:`update --clean .` to
576 cancel the merge and leave the child of REV as a head to be
575 cancel the merge and leave the child of REV as a head to be
577 merged separately.
576 merged separately.
578
577
579 See :hg:`help dates` for a list of formats valid for -d/--date.
578 See :hg:`help dates` for a list of formats valid for -d/--date.
580
579
581 See :hg:`help revert` for a way to restore files to the state
580 See :hg:`help revert` for a way to restore files to the state
582 of another revision.
581 of another revision.
583
582
584 Returns 0 on success, 1 if nothing to backout or there are unresolved
583 Returns 0 on success, 1 if nothing to backout or there are unresolved
585 files.
584 files.
586 '''
585 '''
587 with repo.wlock(), repo.lock():
586 with repo.wlock(), repo.lock():
588 return _dobackout(ui, repo, node, rev, **opts)
587 return _dobackout(ui, repo, node, rev, **opts)
589
588
590 def _dobackout(ui, repo, node=None, rev=None, **opts):
589 def _dobackout(ui, repo, node=None, rev=None, **opts):
591 opts = pycompat.byteskwargs(opts)
590 opts = pycompat.byteskwargs(opts)
592 if opts.get('commit') and opts.get('no_commit'):
591 if opts.get('commit') and opts.get('no_commit'):
593 raise error.Abort(_("cannot use --commit with --no-commit"))
592 raise error.Abort(_("cannot use --commit with --no-commit"))
594 if opts.get('merge') and opts.get('no_commit'):
593 if opts.get('merge') and opts.get('no_commit'):
595 raise error.Abort(_("cannot use --merge with --no-commit"))
594 raise error.Abort(_("cannot use --merge with --no-commit"))
596
595
597 if rev and node:
596 if rev and node:
598 raise error.Abort(_("please specify just one revision"))
597 raise error.Abort(_("please specify just one revision"))
599
598
600 if not rev:
599 if not rev:
601 rev = node
600 rev = node
602
601
603 if not rev:
602 if not rev:
604 raise error.Abort(_("please specify a revision to backout"))
603 raise error.Abort(_("please specify a revision to backout"))
605
604
606 date = opts.get('date')
605 date = opts.get('date')
607 if date:
606 if date:
608 opts['date'] = dateutil.parsedate(date)
607 opts['date'] = dateutil.parsedate(date)
609
608
610 cmdutil.checkunfinished(repo)
609 cmdutil.checkunfinished(repo)
611 cmdutil.bailifchanged(repo)
610 cmdutil.bailifchanged(repo)
612 node = scmutil.revsingle(repo, rev).node()
611 node = scmutil.revsingle(repo, rev).node()
613
612
614 op1, op2 = repo.dirstate.parents()
613 op1, op2 = repo.dirstate.parents()
615 if not repo.changelog.isancestor(node, op1):
614 if not repo.changelog.isancestor(node, op1):
616 raise error.Abort(_('cannot backout change that is not an ancestor'))
615 raise error.Abort(_('cannot backout change that is not an ancestor'))
617
616
618 p1, p2 = repo.changelog.parents(node)
617 p1, p2 = repo.changelog.parents(node)
619 if p1 == nullid:
618 if p1 == nullid:
620 raise error.Abort(_('cannot backout a change with no parents'))
619 raise error.Abort(_('cannot backout a change with no parents'))
621 if p2 != nullid:
620 if p2 != nullid:
622 if not opts.get('parent'):
621 if not opts.get('parent'):
623 raise error.Abort(_('cannot backout a merge changeset'))
622 raise error.Abort(_('cannot backout a merge changeset'))
624 p = repo.lookup(opts['parent'])
623 p = repo.lookup(opts['parent'])
625 if p not in (p1, p2):
624 if p not in (p1, p2):
626 raise error.Abort(_('%s is not a parent of %s') %
625 raise error.Abort(_('%s is not a parent of %s') %
627 (short(p), short(node)))
626 (short(p), short(node)))
628 parent = p
627 parent = p
629 else:
628 else:
630 if opts.get('parent'):
629 if opts.get('parent'):
631 raise error.Abort(_('cannot use --parent on non-merge changeset'))
630 raise error.Abort(_('cannot use --parent on non-merge changeset'))
632 parent = p1
631 parent = p1
633
632
634 # the backout should appear on the same branch
633 # the backout should appear on the same branch
635 branch = repo.dirstate.branch()
634 branch = repo.dirstate.branch()
636 bheads = repo.branchheads(branch)
635 bheads = repo.branchheads(branch)
637 rctx = scmutil.revsingle(repo, hex(parent))
636 rctx = scmutil.revsingle(repo, hex(parent))
638 if not opts.get('merge') and op1 != node:
637 if not opts.get('merge') and op1 != node:
639 with dirstateguard.dirstateguard(repo, 'backout'):
638 with dirstateguard.dirstateguard(repo, 'backout'):
640 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
639 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
641 with ui.configoverride(overrides, 'backout'):
640 with ui.configoverride(overrides, 'backout'):
642 stats = mergemod.update(repo, parent, True, True, node, False)
641 stats = mergemod.update(repo, parent, True, True, node, False)
643 repo.setparents(op1, op2)
642 repo.setparents(op1, op2)
644 hg._showstats(repo, stats)
643 hg._showstats(repo, stats)
645 if stats.unresolvedcount:
644 if stats.unresolvedcount:
646 repo.ui.status(_("use 'hg resolve' to retry unresolved "
645 repo.ui.status(_("use 'hg resolve' to retry unresolved "
647 "file merges\n"))
646 "file merges\n"))
648 return 1
647 return 1
649 else:
648 else:
650 hg.clean(repo, node, show_stats=False)
649 hg.clean(repo, node, show_stats=False)
651 repo.dirstate.setbranch(branch)
650 repo.dirstate.setbranch(branch)
652 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
651 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
653
652
654 if opts.get('no_commit'):
653 if opts.get('no_commit'):
655 msg = _("changeset %s backed out, "
654 msg = _("changeset %s backed out, "
656 "don't forget to commit.\n")
655 "don't forget to commit.\n")
657 ui.status(msg % short(node))
656 ui.status(msg % short(node))
658 return 0
657 return 0
659
658
660 def commitfunc(ui, repo, message, match, opts):
659 def commitfunc(ui, repo, message, match, opts):
661 editform = 'backout'
660 editform = 'backout'
662 e = cmdutil.getcommiteditor(editform=editform,
661 e = cmdutil.getcommiteditor(editform=editform,
663 **pycompat.strkwargs(opts))
662 **pycompat.strkwargs(opts))
664 if not message:
663 if not message:
665 # we don't translate commit messages
664 # we don't translate commit messages
666 message = "Backed out changeset %s" % short(node)
665 message = "Backed out changeset %s" % short(node)
667 e = cmdutil.getcommiteditor(edit=True, editform=editform)
666 e = cmdutil.getcommiteditor(edit=True, editform=editform)
668 return repo.commit(message, opts.get('user'), opts.get('date'),
667 return repo.commit(message, opts.get('user'), opts.get('date'),
669 match, editor=e)
668 match, editor=e)
670 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
669 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
671 if not newnode:
670 if not newnode:
672 ui.status(_("nothing changed\n"))
671 ui.status(_("nothing changed\n"))
673 return 1
672 return 1
674 cmdutil.commitstatus(repo, newnode, branch, bheads)
673 cmdutil.commitstatus(repo, newnode, branch, bheads)
675
674
676 def nice(node):
675 def nice(node):
677 return '%d:%s' % (repo.changelog.rev(node), short(node))
676 return '%d:%s' % (repo.changelog.rev(node), short(node))
678 ui.status(_('changeset %s backs out changeset %s\n') %
677 ui.status(_('changeset %s backs out changeset %s\n') %
679 (nice(repo.changelog.tip()), nice(node)))
678 (nice(repo.changelog.tip()), nice(node)))
680 if opts.get('merge') and op1 != node:
679 if opts.get('merge') and op1 != node:
681 hg.clean(repo, op1, show_stats=False)
680 hg.clean(repo, op1, show_stats=False)
682 ui.status(_('merging with changeset %s\n')
681 ui.status(_('merging with changeset %s\n')
683 % nice(repo.changelog.tip()))
682 % nice(repo.changelog.tip()))
684 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
683 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
685 with ui.configoverride(overrides, 'backout'):
684 with ui.configoverride(overrides, 'backout'):
686 return hg.merge(repo, hex(repo.changelog.tip()))
685 return hg.merge(repo, hex(repo.changelog.tip()))
687 return 0
686 return 0
688
687
689 @command('bisect',
688 @command('bisect',
690 [('r', 'reset', False, _('reset bisect state')),
689 [('r', 'reset', False, _('reset bisect state')),
691 ('g', 'good', False, _('mark changeset good')),
690 ('g', 'good', False, _('mark changeset good')),
692 ('b', 'bad', False, _('mark changeset bad')),
691 ('b', 'bad', False, _('mark changeset bad')),
693 ('s', 'skip', False, _('skip testing changeset')),
692 ('s', 'skip', False, _('skip testing changeset')),
694 ('e', 'extend', False, _('extend the bisect range')),
693 ('e', 'extend', False, _('extend the bisect range')),
695 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
694 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
696 ('U', 'noupdate', False, _('do not update to target'))],
695 ('U', 'noupdate', False, _('do not update to target'))],
697 _("[-gbsr] [-U] [-c CMD] [REV]"))
696 _("[-gbsr] [-U] [-c CMD] [REV]"))
698 def bisect(ui, repo, rev=None, extra=None, command=None,
697 def bisect(ui, repo, rev=None, extra=None, command=None,
699 reset=None, good=None, bad=None, skip=None, extend=None,
698 reset=None, good=None, bad=None, skip=None, extend=None,
700 noupdate=None):
699 noupdate=None):
701 """subdivision search of changesets
700 """subdivision search of changesets
702
701
703 This command helps to find changesets which introduce problems. To
702 This command helps to find changesets which introduce problems. To
704 use, mark the earliest changeset you know exhibits the problem as
703 use, mark the earliest changeset you know exhibits the problem as
705 bad, then mark the latest changeset which is free from the problem
704 bad, then mark the latest changeset which is free from the problem
706 as good. Bisect will update your working directory to a revision
705 as good. Bisect will update your working directory to a revision
707 for testing (unless the -U/--noupdate option is specified). Once
706 for testing (unless the -U/--noupdate option is specified). Once
708 you have performed tests, mark the working directory as good or
707 you have performed tests, mark the working directory as good or
709 bad, and bisect will either update to another candidate changeset
708 bad, and bisect will either update to another candidate changeset
710 or announce that it has found the bad revision.
709 or announce that it has found the bad revision.
711
710
712 As a shortcut, you can also use the revision argument to mark a
711 As a shortcut, you can also use the revision argument to mark a
713 revision as good or bad without checking it out first.
712 revision as good or bad without checking it out first.
714
713
715 If you supply a command, it will be used for automatic bisection.
714 If you supply a command, it will be used for automatic bisection.
716 The environment variable HG_NODE will contain the ID of the
715 The environment variable HG_NODE will contain the ID of the
717 changeset being tested. The exit status of the command will be
716 changeset being tested. The exit status of the command will be
718 used to mark revisions as good or bad: status 0 means good, 125
717 used to mark revisions as good or bad: status 0 means good, 125
719 means to skip the revision, 127 (command not found) will abort the
718 means to skip the revision, 127 (command not found) will abort the
720 bisection, and any other non-zero exit status means the revision
719 bisection, and any other non-zero exit status means the revision
721 is bad.
720 is bad.
722
721
723 .. container:: verbose
722 .. container:: verbose
724
723
725 Some examples:
724 Some examples:
726
725
727 - start a bisection with known bad revision 34, and good revision 12::
726 - start a bisection with known bad revision 34, and good revision 12::
728
727
729 hg bisect --bad 34
728 hg bisect --bad 34
730 hg bisect --good 12
729 hg bisect --good 12
731
730
732 - advance the current bisection by marking current revision as good or
731 - advance the current bisection by marking current revision as good or
733 bad::
732 bad::
734
733
735 hg bisect --good
734 hg bisect --good
736 hg bisect --bad
735 hg bisect --bad
737
736
738 - mark the current revision, or a known revision, to be skipped (e.g. if
737 - mark the current revision, or a known revision, to be skipped (e.g. if
739 that revision is not usable because of another issue)::
738 that revision is not usable because of another issue)::
740
739
741 hg bisect --skip
740 hg bisect --skip
742 hg bisect --skip 23
741 hg bisect --skip 23
743
742
744 - skip all revisions that do not touch directories ``foo`` or ``bar``::
743 - skip all revisions that do not touch directories ``foo`` or ``bar``::
745
744
746 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
745 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
747
746
748 - forget the current bisection::
747 - forget the current bisection::
749
748
750 hg bisect --reset
749 hg bisect --reset
751
750
752 - use 'make && make tests' to automatically find the first broken
751 - use 'make && make tests' to automatically find the first broken
753 revision::
752 revision::
754
753
755 hg bisect --reset
754 hg bisect --reset
756 hg bisect --bad 34
755 hg bisect --bad 34
757 hg bisect --good 12
756 hg bisect --good 12
758 hg bisect --command "make && make tests"
757 hg bisect --command "make && make tests"
759
758
760 - see all changesets whose states are already known in the current
759 - see all changesets whose states are already known in the current
761 bisection::
760 bisection::
762
761
763 hg log -r "bisect(pruned)"
762 hg log -r "bisect(pruned)"
764
763
765 - see the changeset currently being bisected (especially useful
764 - see the changeset currently being bisected (especially useful
766 if running with -U/--noupdate)::
765 if running with -U/--noupdate)::
767
766
768 hg log -r "bisect(current)"
767 hg log -r "bisect(current)"
769
768
770 - see all changesets that took part in the current bisection::
769 - see all changesets that took part in the current bisection::
771
770
772 hg log -r "bisect(range)"
771 hg log -r "bisect(range)"
773
772
774 - you can even get a nice graph::
773 - you can even get a nice graph::
775
774
776 hg log --graph -r "bisect(range)"
775 hg log --graph -r "bisect(range)"
777
776
778 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
777 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
779
778
780 Returns 0 on success.
779 Returns 0 on success.
781 """
780 """
782 # backward compatibility
781 # backward compatibility
783 if rev in "good bad reset init".split():
782 if rev in "good bad reset init".split():
784 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
783 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
785 cmd, rev, extra = rev, extra, None
784 cmd, rev, extra = rev, extra, None
786 if cmd == "good":
785 if cmd == "good":
787 good = True
786 good = True
788 elif cmd == "bad":
787 elif cmd == "bad":
789 bad = True
788 bad = True
790 else:
789 else:
791 reset = True
790 reset = True
792 elif extra:
791 elif extra:
793 raise error.Abort(_('incompatible arguments'))
792 raise error.Abort(_('incompatible arguments'))
794
793
795 incompatibles = {
794 incompatibles = {
796 '--bad': bad,
795 '--bad': bad,
797 '--command': bool(command),
796 '--command': bool(command),
798 '--extend': extend,
797 '--extend': extend,
799 '--good': good,
798 '--good': good,
800 '--reset': reset,
799 '--reset': reset,
801 '--skip': skip,
800 '--skip': skip,
802 }
801 }
803
802
804 enabled = [x for x in incompatibles if incompatibles[x]]
803 enabled = [x for x in incompatibles if incompatibles[x]]
805
804
806 if len(enabled) > 1:
805 if len(enabled) > 1:
807 raise error.Abort(_('%s and %s are incompatible') %
806 raise error.Abort(_('%s and %s are incompatible') %
808 tuple(sorted(enabled)[0:2]))
807 tuple(sorted(enabled)[0:2]))
809
808
810 if reset:
809 if reset:
811 hbisect.resetstate(repo)
810 hbisect.resetstate(repo)
812 return
811 return
813
812
814 state = hbisect.load_state(repo)
813 state = hbisect.load_state(repo)
815
814
816 # update state
815 # update state
817 if good or bad or skip:
816 if good or bad or skip:
818 if rev:
817 if rev:
819 nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
818 nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
820 else:
819 else:
821 nodes = [repo.lookup('.')]
820 nodes = [repo.lookup('.')]
822 if good:
821 if good:
823 state['good'] += nodes
822 state['good'] += nodes
824 elif bad:
823 elif bad:
825 state['bad'] += nodes
824 state['bad'] += nodes
826 elif skip:
825 elif skip:
827 state['skip'] += nodes
826 state['skip'] += nodes
828 hbisect.save_state(repo, state)
827 hbisect.save_state(repo, state)
829 if not (state['good'] and state['bad']):
828 if not (state['good'] and state['bad']):
830 return
829 return
831
830
832 def mayupdate(repo, node, show_stats=True):
831 def mayupdate(repo, node, show_stats=True):
833 """common used update sequence"""
832 """common used update sequence"""
834 if noupdate:
833 if noupdate:
835 return
834 return
836 cmdutil.checkunfinished(repo)
835 cmdutil.checkunfinished(repo)
837 cmdutil.bailifchanged(repo)
836 cmdutil.bailifchanged(repo)
838 return hg.clean(repo, node, show_stats=show_stats)
837 return hg.clean(repo, node, show_stats=show_stats)
839
838
840 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
839 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
841
840
842 if command:
841 if command:
843 changesets = 1
842 changesets = 1
844 if noupdate:
843 if noupdate:
845 try:
844 try:
846 node = state['current'][0]
845 node = state['current'][0]
847 except LookupError:
846 except LookupError:
848 raise error.Abort(_('current bisect revision is unknown - '
847 raise error.Abort(_('current bisect revision is unknown - '
849 'start a new bisect to fix'))
848 'start a new bisect to fix'))
850 else:
849 else:
851 node, p2 = repo.dirstate.parents()
850 node, p2 = repo.dirstate.parents()
852 if p2 != nullid:
851 if p2 != nullid:
853 raise error.Abort(_('current bisect revision is a merge'))
852 raise error.Abort(_('current bisect revision is a merge'))
854 if rev:
853 if rev:
855 node = repo[scmutil.revsingle(repo, rev, node)].node()
854 node = repo[scmutil.revsingle(repo, rev, node)].node()
856 try:
855 try:
857 while changesets:
856 while changesets:
858 # update state
857 # update state
859 state['current'] = [node]
858 state['current'] = [node]
860 hbisect.save_state(repo, state)
859 hbisect.save_state(repo, state)
861 status = ui.system(command, environ={'HG_NODE': hex(node)},
860 status = ui.system(command, environ={'HG_NODE': hex(node)},
862 blockedtag='bisect_check')
861 blockedtag='bisect_check')
863 if status == 125:
862 if status == 125:
864 transition = "skip"
863 transition = "skip"
865 elif status == 0:
864 elif status == 0:
866 transition = "good"
865 transition = "good"
867 # status < 0 means process was killed
866 # status < 0 means process was killed
868 elif status == 127:
867 elif status == 127:
869 raise error.Abort(_("failed to execute %s") % command)
868 raise error.Abort(_("failed to execute %s") % command)
870 elif status < 0:
869 elif status < 0:
871 raise error.Abort(_("%s killed") % command)
870 raise error.Abort(_("%s killed") % command)
872 else:
871 else:
873 transition = "bad"
872 transition = "bad"
874 state[transition].append(node)
873 state[transition].append(node)
875 ctx = repo[node]
874 ctx = repo[node]
876 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
875 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
877 transition))
876 transition))
878 hbisect.checkstate(state)
877 hbisect.checkstate(state)
879 # bisect
878 # bisect
880 nodes, changesets, bgood = hbisect.bisect(repo, state)
879 nodes, changesets, bgood = hbisect.bisect(repo, state)
881 # update to next check
880 # update to next check
882 node = nodes[0]
881 node = nodes[0]
883 mayupdate(repo, node, show_stats=False)
882 mayupdate(repo, node, show_stats=False)
884 finally:
883 finally:
885 state['current'] = [node]
884 state['current'] = [node]
886 hbisect.save_state(repo, state)
885 hbisect.save_state(repo, state)
887 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
886 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
888 return
887 return
889
888
890 hbisect.checkstate(state)
889 hbisect.checkstate(state)
891
890
892 # actually bisect
891 # actually bisect
893 nodes, changesets, good = hbisect.bisect(repo, state)
892 nodes, changesets, good = hbisect.bisect(repo, state)
894 if extend:
893 if extend:
895 if not changesets:
894 if not changesets:
896 extendnode = hbisect.extendrange(repo, state, nodes, good)
895 extendnode = hbisect.extendrange(repo, state, nodes, good)
897 if extendnode is not None:
896 if extendnode is not None:
898 ui.write(_("Extending search to changeset %d:%s\n")
897 ui.write(_("Extending search to changeset %d:%s\n")
899 % (extendnode.rev(), extendnode))
898 % (extendnode.rev(), extendnode))
900 state['current'] = [extendnode.node()]
899 state['current'] = [extendnode.node()]
901 hbisect.save_state(repo, state)
900 hbisect.save_state(repo, state)
902 return mayupdate(repo, extendnode.node())
901 return mayupdate(repo, extendnode.node())
903 raise error.Abort(_("nothing to extend"))
902 raise error.Abort(_("nothing to extend"))
904
903
905 if changesets == 0:
904 if changesets == 0:
906 hbisect.printresult(ui, repo, state, displayer, nodes, good)
905 hbisect.printresult(ui, repo, state, displayer, nodes, good)
907 else:
906 else:
908 assert len(nodes) == 1 # only a single node can be tested next
907 assert len(nodes) == 1 # only a single node can be tested next
909 node = nodes[0]
908 node = nodes[0]
910 # compute the approximate number of remaining tests
909 # compute the approximate number of remaining tests
911 tests, size = 0, 2
910 tests, size = 0, 2
912 while size <= changesets:
911 while size <= changesets:
913 tests, size = tests + 1, size * 2
912 tests, size = tests + 1, size * 2
914 rev = repo.changelog.rev(node)
913 rev = repo.changelog.rev(node)
915 ui.write(_("Testing changeset %d:%s "
914 ui.write(_("Testing changeset %d:%s "
916 "(%d changesets remaining, ~%d tests)\n")
915 "(%d changesets remaining, ~%d tests)\n")
917 % (rev, short(node), changesets, tests))
916 % (rev, short(node), changesets, tests))
918 state['current'] = [node]
917 state['current'] = [node]
919 hbisect.save_state(repo, state)
918 hbisect.save_state(repo, state)
920 return mayupdate(repo, node)
919 return mayupdate(repo, node)
921
920
922 @command('bookmarks|bookmark',
921 @command('bookmarks|bookmark',
923 [('f', 'force', False, _('force')),
922 [('f', 'force', False, _('force')),
924 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
923 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
925 ('d', 'delete', False, _('delete a given bookmark')),
924 ('d', 'delete', False, _('delete a given bookmark')),
926 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
925 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
927 ('i', 'inactive', False, _('mark a bookmark inactive')),
926 ('i', 'inactive', False, _('mark a bookmark inactive')),
928 ('l', 'list', False, _('list existing bookmarks')),
927 ('l', 'list', False, _('list existing bookmarks')),
929 ] + formatteropts,
928 ] + formatteropts,
930 _('hg bookmarks [OPTIONS]... [NAME]...'))
929 _('hg bookmarks [OPTIONS]... [NAME]...'))
931 def bookmark(ui, repo, *names, **opts):
930 def bookmark(ui, repo, *names, **opts):
932 '''create a new bookmark or list existing bookmarks
931 '''create a new bookmark or list existing bookmarks
933
932
934 Bookmarks are labels on changesets to help track lines of development.
933 Bookmarks are labels on changesets to help track lines of development.
935 Bookmarks are unversioned and can be moved, renamed and deleted.
934 Bookmarks are unversioned and can be moved, renamed and deleted.
936 Deleting or moving a bookmark has no effect on the associated changesets.
935 Deleting or moving a bookmark has no effect on the associated changesets.
937
936
938 Creating or updating to a bookmark causes it to be marked as 'active'.
937 Creating or updating to a bookmark causes it to be marked as 'active'.
939 The active bookmark is indicated with a '*'.
938 The active bookmark is indicated with a '*'.
940 When a commit is made, the active bookmark will advance to the new commit.
939 When a commit is made, the active bookmark will advance to the new commit.
941 A plain :hg:`update` will also advance an active bookmark, if possible.
940 A plain :hg:`update` will also advance an active bookmark, if possible.
942 Updating away from a bookmark will cause it to be deactivated.
941 Updating away from a bookmark will cause it to be deactivated.
943
942
944 Bookmarks can be pushed and pulled between repositories (see
943 Bookmarks can be pushed and pulled between repositories (see
945 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
944 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
946 diverged, a new 'divergent bookmark' of the form 'name@path' will
945 diverged, a new 'divergent bookmark' of the form 'name@path' will
947 be created. Using :hg:`merge` will resolve the divergence.
946 be created. Using :hg:`merge` will resolve the divergence.
948
947
949 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
948 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
950 the active bookmark's name.
949 the active bookmark's name.
951
950
952 A bookmark named '@' has the special property that :hg:`clone` will
951 A bookmark named '@' has the special property that :hg:`clone` will
953 check it out by default if it exists.
952 check it out by default if it exists.
954
953
955 .. container:: verbose
954 .. container:: verbose
956
955
957 Template:
956 Template:
958
957
959 The following keywords are supported in addition to the common template
958 The following keywords are supported in addition to the common template
960 keywords and functions such as ``{bookmark}``. See also
959 keywords and functions such as ``{bookmark}``. See also
961 :hg:`help templates`.
960 :hg:`help templates`.
962
961
963 :active: Boolean. True if the bookmark is active.
962 :active: Boolean. True if the bookmark is active.
964
963
965 Examples:
964 Examples:
966
965
967 - create an active bookmark for a new line of development::
966 - create an active bookmark for a new line of development::
968
967
969 hg book new-feature
968 hg book new-feature
970
969
971 - create an inactive bookmark as a place marker::
970 - create an inactive bookmark as a place marker::
972
971
973 hg book -i reviewed
972 hg book -i reviewed
974
973
975 - create an inactive bookmark on another changeset::
974 - create an inactive bookmark on another changeset::
976
975
977 hg book -r .^ tested
976 hg book -r .^ tested
978
977
979 - rename bookmark turkey to dinner::
978 - rename bookmark turkey to dinner::
980
979
981 hg book -m turkey dinner
980 hg book -m turkey dinner
982
981
983 - move the '@' bookmark from another branch::
982 - move the '@' bookmark from another branch::
984
983
985 hg book -f @
984 hg book -f @
986
985
987 - print only the active bookmark name::
986 - print only the active bookmark name::
988
987
989 hg book -ql .
988 hg book -ql .
990 '''
989 '''
991 opts = pycompat.byteskwargs(opts)
990 opts = pycompat.byteskwargs(opts)
992 force = opts.get('force')
991 force = opts.get('force')
993 rev = opts.get('rev')
992 rev = opts.get('rev')
994 inactive = opts.get('inactive') # meaning add/rename to inactive bookmark
993 inactive = opts.get('inactive') # meaning add/rename to inactive bookmark
995
994
996 selactions = [k for k in ['delete', 'rename', 'list'] if opts.get(k)]
995 selactions = [k for k in ['delete', 'rename', 'list'] if opts.get(k)]
997 if len(selactions) > 1:
996 if len(selactions) > 1:
998 raise error.Abort(_('--%s and --%s are incompatible')
997 raise error.Abort(_('--%s and --%s are incompatible')
999 % tuple(selactions[:2]))
998 % tuple(selactions[:2]))
1000 if selactions:
999 if selactions:
1001 action = selactions[0]
1000 action = selactions[0]
1002 elif names or rev:
1001 elif names or rev:
1003 action = 'add'
1002 action = 'add'
1004 elif inactive:
1003 elif inactive:
1005 action = 'inactive' # meaning deactivate
1004 action = 'inactive' # meaning deactivate
1006 else:
1005 else:
1007 action = 'list'
1006 action = 'list'
1008
1007
1009 if rev and action in {'delete', 'rename', 'list'}:
1008 if rev and action in {'delete', 'rename', 'list'}:
1010 raise error.Abort(_("--rev is incompatible with --%s") % action)
1009 raise error.Abort(_("--rev is incompatible with --%s") % action)
1011 if inactive and action in {'delete', 'list'}:
1010 if inactive and action in {'delete', 'list'}:
1012 raise error.Abort(_("--inactive is incompatible with --%s") % action)
1011 raise error.Abort(_("--inactive is incompatible with --%s") % action)
1013 if not names and action in {'add', 'delete'}:
1012 if not names and action in {'add', 'delete'}:
1014 raise error.Abort(_("bookmark name required"))
1013 raise error.Abort(_("bookmark name required"))
1015
1014
1016 if action in {'add', 'delete', 'rename', 'inactive'}:
1015 if action in {'add', 'delete', 'rename', 'inactive'}:
1017 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
1016 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
1018 if action == 'delete':
1017 if action == 'delete':
1019 names = pycompat.maplist(repo._bookmarks.expandname, names)
1018 names = pycompat.maplist(repo._bookmarks.expandname, names)
1020 bookmarks.delete(repo, tr, names)
1019 bookmarks.delete(repo, tr, names)
1021 elif action == 'rename':
1020 elif action == 'rename':
1022 if not names:
1021 if not names:
1023 raise error.Abort(_("new bookmark name required"))
1022 raise error.Abort(_("new bookmark name required"))
1024 elif len(names) > 1:
1023 elif len(names) > 1:
1025 raise error.Abort(_("only one new bookmark name allowed"))
1024 raise error.Abort(_("only one new bookmark name allowed"))
1026 oldname = repo._bookmarks.expandname(opts['rename'])
1025 oldname = repo._bookmarks.expandname(opts['rename'])
1027 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1026 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1028 elif action == 'add':
1027 elif action == 'add':
1029 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1028 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1030 elif action == 'inactive':
1029 elif action == 'inactive':
1031 if len(repo._bookmarks) == 0:
1030 if len(repo._bookmarks) == 0:
1032 ui.status(_("no bookmarks set\n"))
1031 ui.status(_("no bookmarks set\n"))
1033 elif not repo._activebookmark:
1032 elif not repo._activebookmark:
1034 ui.status(_("no active bookmark\n"))
1033 ui.status(_("no active bookmark\n"))
1035 else:
1034 else:
1036 bookmarks.deactivate(repo)
1035 bookmarks.deactivate(repo)
1037 elif action == 'list':
1036 elif action == 'list':
1038 names = pycompat.maplist(repo._bookmarks.expandname, names)
1037 names = pycompat.maplist(repo._bookmarks.expandname, names)
1039 with ui.formatter('bookmarks', opts) as fm:
1038 with ui.formatter('bookmarks', opts) as fm:
1040 bookmarks.printbookmarks(ui, repo, fm, names)
1039 bookmarks.printbookmarks(ui, repo, fm, names)
1041 else:
1040 else:
1042 raise error.ProgrammingError('invalid action: %s' % action)
1041 raise error.ProgrammingError('invalid action: %s' % action)
1043
1042
1044 @command('branch',
1043 @command('branch',
1045 [('f', 'force', None,
1044 [('f', 'force', None,
1046 _('set branch name even if it shadows an existing branch')),
1045 _('set branch name even if it shadows an existing branch')),
1047 ('C', 'clean', None, _('reset branch name to parent branch name')),
1046 ('C', 'clean', None, _('reset branch name to parent branch name')),
1048 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1047 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1049 ],
1048 ],
1050 _('[-fC] [NAME]'))
1049 _('[-fC] [NAME]'))
1051 def branch(ui, repo, label=None, **opts):
1050 def branch(ui, repo, label=None, **opts):
1052 """set or show the current branch name
1051 """set or show the current branch name
1053
1052
1054 .. note::
1053 .. note::
1055
1054
1056 Branch names are permanent and global. Use :hg:`bookmark` to create a
1055 Branch names are permanent and global. Use :hg:`bookmark` to create a
1057 light-weight bookmark instead. See :hg:`help glossary` for more
1056 light-weight bookmark instead. See :hg:`help glossary` for more
1058 information about named branches and bookmarks.
1057 information about named branches and bookmarks.
1059
1058
1060 With no argument, show the current branch name. With one argument,
1059 With no argument, show the current branch name. With one argument,
1061 set the working directory branch name (the branch will not exist
1060 set the working directory branch name (the branch will not exist
1062 in the repository until the next commit). Standard practice
1061 in the repository until the next commit). Standard practice
1063 recommends that primary development take place on the 'default'
1062 recommends that primary development take place on the 'default'
1064 branch.
1063 branch.
1065
1064
1066 Unless -f/--force is specified, branch will not let you set a
1065 Unless -f/--force is specified, branch will not let you set a
1067 branch name that already exists.
1066 branch name that already exists.
1068
1067
1069 Use -C/--clean to reset the working directory branch to that of
1068 Use -C/--clean to reset the working directory branch to that of
1070 the parent of the working directory, negating a previous branch
1069 the parent of the working directory, negating a previous branch
1071 change.
1070 change.
1072
1071
1073 Use the command :hg:`update` to switch to an existing branch. Use
1072 Use the command :hg:`update` to switch to an existing branch. Use
1074 :hg:`commit --close-branch` to mark this branch head as closed.
1073 :hg:`commit --close-branch` to mark this branch head as closed.
1075 When all heads of a branch are closed, the branch will be
1074 When all heads of a branch are closed, the branch will be
1076 considered closed.
1075 considered closed.
1077
1076
1078 Returns 0 on success.
1077 Returns 0 on success.
1079 """
1078 """
1080 opts = pycompat.byteskwargs(opts)
1079 opts = pycompat.byteskwargs(opts)
1081 revs = opts.get('rev')
1080 revs = opts.get('rev')
1082 if label:
1081 if label:
1083 label = label.strip()
1082 label = label.strip()
1084
1083
1085 if not opts.get('clean') and not label:
1084 if not opts.get('clean') and not label:
1086 if revs:
1085 if revs:
1087 raise error.Abort(_("no branch name specified for the revisions"))
1086 raise error.Abort(_("no branch name specified for the revisions"))
1088 ui.write("%s\n" % repo.dirstate.branch())
1087 ui.write("%s\n" % repo.dirstate.branch())
1089 return
1088 return
1090
1089
1091 with repo.wlock():
1090 with repo.wlock():
1092 if opts.get('clean'):
1091 if opts.get('clean'):
1093 label = repo[None].p1().branch()
1092 label = repo[None].p1().branch()
1094 repo.dirstate.setbranch(label)
1093 repo.dirstate.setbranch(label)
1095 ui.status(_('reset working directory to branch %s\n') % label)
1094 ui.status(_('reset working directory to branch %s\n') % label)
1096 elif label:
1095 elif label:
1097
1096
1098 scmutil.checknewlabel(repo, label, 'branch')
1097 scmutil.checknewlabel(repo, label, 'branch')
1099 if revs:
1098 if revs:
1100 return cmdutil.changebranch(ui, repo, revs, label)
1099 return cmdutil.changebranch(ui, repo, revs, label)
1101
1100
1102 if not opts.get('force') and label in repo.branchmap():
1101 if not opts.get('force') and label in repo.branchmap():
1103 if label not in [p.branch() for p in repo[None].parents()]:
1102 if label not in [p.branch() for p in repo[None].parents()]:
1104 raise error.Abort(_('a branch of the same name already'
1103 raise error.Abort(_('a branch of the same name already'
1105 ' exists'),
1104 ' exists'),
1106 # i18n: "it" refers to an existing branch
1105 # i18n: "it" refers to an existing branch
1107 hint=_("use 'hg update' to switch to it"))
1106 hint=_("use 'hg update' to switch to it"))
1108
1107
1109 repo.dirstate.setbranch(label)
1108 repo.dirstate.setbranch(label)
1110 ui.status(_('marked working directory as branch %s\n') % label)
1109 ui.status(_('marked working directory as branch %s\n') % label)
1111
1110
1112 # find any open named branches aside from default
1111 # find any open named branches aside from default
1113 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1112 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1114 if n != "default" and not c]
1113 if n != "default" and not c]
1115 if not others:
1114 if not others:
1116 ui.status(_('(branches are permanent and global, '
1115 ui.status(_('(branches are permanent and global, '
1117 'did you want a bookmark?)\n'))
1116 'did you want a bookmark?)\n'))
1118
1117
1119 @command('branches',
1118 @command('branches',
1120 [('a', 'active', False,
1119 [('a', 'active', False,
1121 _('show only branches that have unmerged heads (DEPRECATED)')),
1120 _('show only branches that have unmerged heads (DEPRECATED)')),
1122 ('c', 'closed', False, _('show normal and closed branches')),
1121 ('c', 'closed', False, _('show normal and closed branches')),
1123 ] + formatteropts,
1122 ] + formatteropts,
1124 _('[-c]'),
1123 _('[-c]'),
1125 intents={INTENT_READONLY})
1124 intents={INTENT_READONLY})
1126 def branches(ui, repo, active=False, closed=False, **opts):
1125 def branches(ui, repo, active=False, closed=False, **opts):
1127 """list repository named branches
1126 """list repository named branches
1128
1127
1129 List the repository's named branches, indicating which ones are
1128 List the repository's named branches, indicating which ones are
1130 inactive. If -c/--closed is specified, also list branches which have
1129 inactive. If -c/--closed is specified, also list branches which have
1131 been marked closed (see :hg:`commit --close-branch`).
1130 been marked closed (see :hg:`commit --close-branch`).
1132
1131
1133 Use the command :hg:`update` to switch to an existing branch.
1132 Use the command :hg:`update` to switch to an existing branch.
1134
1133
1135 .. container:: verbose
1134 .. container:: verbose
1136
1135
1137 Template:
1136 Template:
1138
1137
1139 The following keywords are supported in addition to the common template
1138 The following keywords are supported in addition to the common template
1140 keywords and functions such as ``{branch}``. See also
1139 keywords and functions such as ``{branch}``. See also
1141 :hg:`help templates`.
1140 :hg:`help templates`.
1142
1141
1143 :active: Boolean. True if the branch is active.
1142 :active: Boolean. True if the branch is active.
1144 :closed: Boolean. True if the branch is closed.
1143 :closed: Boolean. True if the branch is closed.
1145 :current: Boolean. True if it is the current branch.
1144 :current: Boolean. True if it is the current branch.
1146
1145
1147 Returns 0.
1146 Returns 0.
1148 """
1147 """
1149
1148
1150 opts = pycompat.byteskwargs(opts)
1149 opts = pycompat.byteskwargs(opts)
1151 ui.pager('branches')
1150 ui.pager('branches')
1152 fm = ui.formatter('branches', opts)
1151 fm = ui.formatter('branches', opts)
1153 hexfunc = fm.hexfunc
1152 hexfunc = fm.hexfunc
1154
1153
1155 allheads = set(repo.heads())
1154 allheads = set(repo.heads())
1156 branches = []
1155 branches = []
1157 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1156 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1158 isactive = False
1157 isactive = False
1159 if not isclosed:
1158 if not isclosed:
1160 openheads = set(repo.branchmap().iteropen(heads))
1159 openheads = set(repo.branchmap().iteropen(heads))
1161 isactive = bool(openheads & allheads)
1160 isactive = bool(openheads & allheads)
1162 branches.append((tag, repo[tip], isactive, not isclosed))
1161 branches.append((tag, repo[tip], isactive, not isclosed))
1163 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1162 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1164 reverse=True)
1163 reverse=True)
1165
1164
1166 for tag, ctx, isactive, isopen in branches:
1165 for tag, ctx, isactive, isopen in branches:
1167 if active and not isactive:
1166 if active and not isactive:
1168 continue
1167 continue
1169 if isactive:
1168 if isactive:
1170 label = 'branches.active'
1169 label = 'branches.active'
1171 notice = ''
1170 notice = ''
1172 elif not isopen:
1171 elif not isopen:
1173 if not closed:
1172 if not closed:
1174 continue
1173 continue
1175 label = 'branches.closed'
1174 label = 'branches.closed'
1176 notice = _(' (closed)')
1175 notice = _(' (closed)')
1177 else:
1176 else:
1178 label = 'branches.inactive'
1177 label = 'branches.inactive'
1179 notice = _(' (inactive)')
1178 notice = _(' (inactive)')
1180 current = (tag == repo.dirstate.branch())
1179 current = (tag == repo.dirstate.branch())
1181 if current:
1180 if current:
1182 label = 'branches.current'
1181 label = 'branches.current'
1183
1182
1184 fm.startitem()
1183 fm.startitem()
1185 fm.write('branch', '%s', tag, label=label)
1184 fm.write('branch', '%s', tag, label=label)
1186 rev = ctx.rev()
1185 rev = ctx.rev()
1187 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1186 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1188 fmt = ' ' * padsize + ' %d:%s'
1187 fmt = ' ' * padsize + ' %d:%s'
1189 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1188 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1190 label='log.changeset changeset.%s' % ctx.phasestr())
1189 label='log.changeset changeset.%s' % ctx.phasestr())
1191 fm.context(ctx=ctx)
1190 fm.context(ctx=ctx)
1192 fm.data(active=isactive, closed=not isopen, current=current)
1191 fm.data(active=isactive, closed=not isopen, current=current)
1193 if not ui.quiet:
1192 if not ui.quiet:
1194 fm.plain(notice)
1193 fm.plain(notice)
1195 fm.plain('\n')
1194 fm.plain('\n')
1196 fm.end()
1195 fm.end()
1197
1196
1198 @command('bundle',
1197 @command('bundle',
1199 [('f', 'force', None, _('run even when the destination is unrelated')),
1198 [('f', 'force', None, _('run even when the destination is unrelated')),
1200 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1199 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1201 _('REV')),
1200 _('REV')),
1202 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1201 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1203 _('BRANCH')),
1202 _('BRANCH')),
1204 ('', 'base', [],
1203 ('', 'base', [],
1205 _('a base changeset assumed to be available at the destination'),
1204 _('a base changeset assumed to be available at the destination'),
1206 _('REV')),
1205 _('REV')),
1207 ('a', 'all', None, _('bundle all changesets in the repository')),
1206 ('a', 'all', None, _('bundle all changesets in the repository')),
1208 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1207 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1209 ] + remoteopts,
1208 ] + remoteopts,
1210 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1209 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1211 def bundle(ui, repo, fname, dest=None, **opts):
1210 def bundle(ui, repo, fname, dest=None, **opts):
1212 """create a bundle file
1211 """create a bundle file
1213
1212
1214 Generate a bundle file containing data to be transferred to another
1213 Generate a bundle file containing data to be transferred to another
1215 repository.
1214 repository.
1216
1215
1217 To create a bundle containing all changesets, use -a/--all
1216 To create a bundle containing all changesets, use -a/--all
1218 (or --base null). Otherwise, hg assumes the destination will have
1217 (or --base null). Otherwise, hg assumes the destination will have
1219 all the nodes you specify with --base parameters. Otherwise, hg
1218 all the nodes you specify with --base parameters. Otherwise, hg
1220 will assume the repository has all the nodes in destination, or
1219 will assume the repository has all the nodes in destination, or
1221 default-push/default if no destination is specified, where destination
1220 default-push/default if no destination is specified, where destination
1222 is the repository you provide through DEST option.
1221 is the repository you provide through DEST option.
1223
1222
1224 You can change bundle format with the -t/--type option. See
1223 You can change bundle format with the -t/--type option. See
1225 :hg:`help bundlespec` for documentation on this format. By default,
1224 :hg:`help bundlespec` for documentation on this format. By default,
1226 the most appropriate format is used and compression defaults to
1225 the most appropriate format is used and compression defaults to
1227 bzip2.
1226 bzip2.
1228
1227
1229 The bundle file can then be transferred using conventional means
1228 The bundle file can then be transferred using conventional means
1230 and applied to another repository with the unbundle or pull
1229 and applied to another repository with the unbundle or pull
1231 command. This is useful when direct push and pull are not
1230 command. This is useful when direct push and pull are not
1232 available or when exporting an entire repository is undesirable.
1231 available or when exporting an entire repository is undesirable.
1233
1232
1234 Applying bundles preserves all changeset contents including
1233 Applying bundles preserves all changeset contents including
1235 permissions, copy/rename information, and revision history.
1234 permissions, copy/rename information, and revision history.
1236
1235
1237 Returns 0 on success, 1 if no changes found.
1236 Returns 0 on success, 1 if no changes found.
1238 """
1237 """
1239 opts = pycompat.byteskwargs(opts)
1238 opts = pycompat.byteskwargs(opts)
1240 revs = None
1239 revs = None
1241 if 'rev' in opts:
1240 if 'rev' in opts:
1242 revstrings = opts['rev']
1241 revstrings = opts['rev']
1243 revs = scmutil.revrange(repo, revstrings)
1242 revs = scmutil.revrange(repo, revstrings)
1244 if revstrings and not revs:
1243 if revstrings and not revs:
1245 raise error.Abort(_('no commits to bundle'))
1244 raise error.Abort(_('no commits to bundle'))
1246
1245
1247 bundletype = opts.get('type', 'bzip2').lower()
1246 bundletype = opts.get('type', 'bzip2').lower()
1248 try:
1247 try:
1249 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1248 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1250 except error.UnsupportedBundleSpecification as e:
1249 except error.UnsupportedBundleSpecification as e:
1251 raise error.Abort(pycompat.bytestr(e),
1250 raise error.Abort(pycompat.bytestr(e),
1252 hint=_("see 'hg help bundlespec' for supported "
1251 hint=_("see 'hg help bundlespec' for supported "
1253 "values for --type"))
1252 "values for --type"))
1254 cgversion = bundlespec.contentopts["cg.version"]
1253 cgversion = bundlespec.contentopts["cg.version"]
1255
1254
1256 # Packed bundles are a pseudo bundle format for now.
1255 # Packed bundles are a pseudo bundle format for now.
1257 if cgversion == 's1':
1256 if cgversion == 's1':
1258 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1257 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1259 hint=_("use 'hg debugcreatestreamclonebundle'"))
1258 hint=_("use 'hg debugcreatestreamclonebundle'"))
1260
1259
1261 if opts.get('all'):
1260 if opts.get('all'):
1262 if dest:
1261 if dest:
1263 raise error.Abort(_("--all is incompatible with specifying "
1262 raise error.Abort(_("--all is incompatible with specifying "
1264 "a destination"))
1263 "a destination"))
1265 if opts.get('base'):
1264 if opts.get('base'):
1266 ui.warn(_("ignoring --base because --all was specified\n"))
1265 ui.warn(_("ignoring --base because --all was specified\n"))
1267 base = [nullrev]
1266 base = [nullrev]
1268 else:
1267 else:
1269 base = scmutil.revrange(repo, opts.get('base'))
1268 base = scmutil.revrange(repo, opts.get('base'))
1270 if cgversion not in changegroup.supportedoutgoingversions(repo):
1269 if cgversion not in changegroup.supportedoutgoingversions(repo):
1271 raise error.Abort(_("repository does not support bundle version %s") %
1270 raise error.Abort(_("repository does not support bundle version %s") %
1272 cgversion)
1271 cgversion)
1273
1272
1274 if base:
1273 if base:
1275 if dest:
1274 if dest:
1276 raise error.Abort(_("--base is incompatible with specifying "
1275 raise error.Abort(_("--base is incompatible with specifying "
1277 "a destination"))
1276 "a destination"))
1278 common = [repo[rev].node() for rev in base]
1277 common = [repo[rev].node() for rev in base]
1279 heads = [repo[r].node() for r in revs] if revs else None
1278 heads = [repo[r].node() for r in revs] if revs else None
1280 outgoing = discovery.outgoing(repo, common, heads)
1279 outgoing = discovery.outgoing(repo, common, heads)
1281 else:
1280 else:
1282 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1281 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1283 dest, branches = hg.parseurl(dest, opts.get('branch'))
1282 dest, branches = hg.parseurl(dest, opts.get('branch'))
1284 other = hg.peer(repo, opts, dest)
1283 other = hg.peer(repo, opts, dest)
1285 revs = [repo[r].hex() for r in revs]
1284 revs = [repo[r].hex() for r in revs]
1286 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1285 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1287 heads = revs and pycompat.maplist(repo.lookup, revs) or revs
1286 heads = revs and pycompat.maplist(repo.lookup, revs) or revs
1288 outgoing = discovery.findcommonoutgoing(repo, other,
1287 outgoing = discovery.findcommonoutgoing(repo, other,
1289 onlyheads=heads,
1288 onlyheads=heads,
1290 force=opts.get('force'),
1289 force=opts.get('force'),
1291 portable=True)
1290 portable=True)
1292
1291
1293 if not outgoing.missing:
1292 if not outgoing.missing:
1294 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1293 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1295 return 1
1294 return 1
1296
1295
1297 if cgversion == '01': #bundle1
1296 if cgversion == '01': #bundle1
1298 bversion = 'HG10' + bundlespec.wirecompression
1297 bversion = 'HG10' + bundlespec.wirecompression
1299 bcompression = None
1298 bcompression = None
1300 elif cgversion in ('02', '03'):
1299 elif cgversion in ('02', '03'):
1301 bversion = 'HG20'
1300 bversion = 'HG20'
1302 bcompression = bundlespec.wirecompression
1301 bcompression = bundlespec.wirecompression
1303 else:
1302 else:
1304 raise error.ProgrammingError(
1303 raise error.ProgrammingError(
1305 'bundle: unexpected changegroup version %s' % cgversion)
1304 'bundle: unexpected changegroup version %s' % cgversion)
1306
1305
1307 # TODO compression options should be derived from bundlespec parsing.
1306 # TODO compression options should be derived from bundlespec parsing.
1308 # This is a temporary hack to allow adjusting bundle compression
1307 # This is a temporary hack to allow adjusting bundle compression
1309 # level without a) formalizing the bundlespec changes to declare it
1308 # level without a) formalizing the bundlespec changes to declare it
1310 # b) introducing a command flag.
1309 # b) introducing a command flag.
1311 compopts = {}
1310 compopts = {}
1312 complevel = ui.configint('experimental',
1311 complevel = ui.configint('experimental',
1313 'bundlecomplevel.' + bundlespec.compression)
1312 'bundlecomplevel.' + bundlespec.compression)
1314 if complevel is None:
1313 if complevel is None:
1315 complevel = ui.configint('experimental', 'bundlecomplevel')
1314 complevel = ui.configint('experimental', 'bundlecomplevel')
1316 if complevel is not None:
1315 if complevel is not None:
1317 compopts['level'] = complevel
1316 compopts['level'] = complevel
1318
1317
1319 # Allow overriding the bundling of obsmarker in phases through
1318 # Allow overriding the bundling of obsmarker in phases through
1320 # configuration while we don't have a bundle version that include them
1319 # configuration while we don't have a bundle version that include them
1321 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1320 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1322 bundlespec.contentopts['obsolescence'] = True
1321 bundlespec.contentopts['obsolescence'] = True
1323 if repo.ui.configbool('experimental', 'bundle-phases'):
1322 if repo.ui.configbool('experimental', 'bundle-phases'):
1324 bundlespec.contentopts['phases'] = True
1323 bundlespec.contentopts['phases'] = True
1325
1324
1326 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1325 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1327 bundlespec.contentopts, compression=bcompression,
1326 bundlespec.contentopts, compression=bcompression,
1328 compopts=compopts)
1327 compopts=compopts)
1329
1328
1330 @command('cat',
1329 @command('cat',
1331 [('o', 'output', '',
1330 [('o', 'output', '',
1332 _('print output to file with formatted name'), _('FORMAT')),
1331 _('print output to file with formatted name'), _('FORMAT')),
1333 ('r', 'rev', '', _('print the given revision'), _('REV')),
1332 ('r', 'rev', '', _('print the given revision'), _('REV')),
1334 ('', 'decode', None, _('apply any matching decode filter')),
1333 ('', 'decode', None, _('apply any matching decode filter')),
1335 ] + walkopts + formatteropts,
1334 ] + walkopts + formatteropts,
1336 _('[OPTION]... FILE...'),
1335 _('[OPTION]... FILE...'),
1337 inferrepo=True,
1336 inferrepo=True,
1338 intents={INTENT_READONLY})
1337 intents={INTENT_READONLY})
1339 def cat(ui, repo, file1, *pats, **opts):
1338 def cat(ui, repo, file1, *pats, **opts):
1340 """output the current or given revision of files
1339 """output the current or given revision of files
1341
1340
1342 Print the specified files as they were at the given revision. If
1341 Print the specified files as they were at the given revision. If
1343 no revision is given, the parent of the working directory is used.
1342 no revision is given, the parent of the working directory is used.
1344
1343
1345 Output may be to a file, in which case the name of the file is
1344 Output may be to a file, in which case the name of the file is
1346 given using a template string. See :hg:`help templates`. In addition
1345 given using a template string. See :hg:`help templates`. In addition
1347 to the common template keywords, the following formatting rules are
1346 to the common template keywords, the following formatting rules are
1348 supported:
1347 supported:
1349
1348
1350 :``%%``: literal "%" character
1349 :``%%``: literal "%" character
1351 :``%s``: basename of file being printed
1350 :``%s``: basename of file being printed
1352 :``%d``: dirname of file being printed, or '.' if in repository root
1351 :``%d``: dirname of file being printed, or '.' if in repository root
1353 :``%p``: root-relative path name of file being printed
1352 :``%p``: root-relative path name of file being printed
1354 :``%H``: changeset hash (40 hexadecimal digits)
1353 :``%H``: changeset hash (40 hexadecimal digits)
1355 :``%R``: changeset revision number
1354 :``%R``: changeset revision number
1356 :``%h``: short-form changeset hash (12 hexadecimal digits)
1355 :``%h``: short-form changeset hash (12 hexadecimal digits)
1357 :``%r``: zero-padded changeset revision number
1356 :``%r``: zero-padded changeset revision number
1358 :``%b``: basename of the exporting repository
1357 :``%b``: basename of the exporting repository
1359 :``\\``: literal "\\" character
1358 :``\\``: literal "\\" character
1360
1359
1361 .. container:: verbose
1360 .. container:: verbose
1362
1361
1363 Template:
1362 Template:
1364
1363
1365 The following keywords are supported in addition to the common template
1364 The following keywords are supported in addition to the common template
1366 keywords and functions. See also :hg:`help templates`.
1365 keywords and functions. See also :hg:`help templates`.
1367
1366
1368 :data: String. File content.
1367 :data: String. File content.
1369 :path: String. Repository-absolute path of the file.
1368 :path: String. Repository-absolute path of the file.
1370
1369
1371 Returns 0 on success.
1370 Returns 0 on success.
1372 """
1371 """
1373 opts = pycompat.byteskwargs(opts)
1372 opts = pycompat.byteskwargs(opts)
1374 rev = opts.get('rev')
1373 rev = opts.get('rev')
1375 if rev:
1374 if rev:
1376 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1375 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1377 ctx = scmutil.revsingle(repo, rev)
1376 ctx = scmutil.revsingle(repo, rev)
1378 m = scmutil.match(ctx, (file1,) + pats, opts)
1377 m = scmutil.match(ctx, (file1,) + pats, opts)
1379 fntemplate = opts.pop('output', '')
1378 fntemplate = opts.pop('output', '')
1380 if cmdutil.isstdiofilename(fntemplate):
1379 if cmdutil.isstdiofilename(fntemplate):
1381 fntemplate = ''
1380 fntemplate = ''
1382
1381
1383 if fntemplate:
1382 if fntemplate:
1384 fm = formatter.nullformatter(ui, 'cat', opts)
1383 fm = formatter.nullformatter(ui, 'cat', opts)
1385 else:
1384 else:
1386 ui.pager('cat')
1385 ui.pager('cat')
1387 fm = ui.formatter('cat', opts)
1386 fm = ui.formatter('cat', opts)
1388 with fm:
1387 with fm:
1389 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1388 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1390 **pycompat.strkwargs(opts))
1389 **pycompat.strkwargs(opts))
1391
1390
1392 @command('^clone',
1391 @command('^clone',
1393 [('U', 'noupdate', None, _('the clone will include an empty working '
1392 [('U', 'noupdate', None, _('the clone will include an empty working '
1394 'directory (only a repository)')),
1393 'directory (only a repository)')),
1395 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1394 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1396 _('REV')),
1395 _('REV')),
1397 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1396 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1398 ' and its ancestors'), _('REV')),
1397 ' and its ancestors'), _('REV')),
1399 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1398 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1400 ' changesets and their ancestors'), _('BRANCH')),
1399 ' changesets and their ancestors'), _('BRANCH')),
1401 ('', 'pull', None, _('use pull protocol to copy metadata')),
1400 ('', 'pull', None, _('use pull protocol to copy metadata')),
1402 ('', 'uncompressed', None,
1401 ('', 'uncompressed', None,
1403 _('an alias to --stream (DEPRECATED)')),
1402 _('an alias to --stream (DEPRECATED)')),
1404 ('', 'stream', None,
1403 ('', 'stream', None,
1405 _('clone with minimal data processing')),
1404 _('clone with minimal data processing')),
1406 ] + remoteopts,
1405 ] + remoteopts,
1407 _('[OPTION]... SOURCE [DEST]'),
1406 _('[OPTION]... SOURCE [DEST]'),
1408 norepo=True)
1407 norepo=True)
1409 def clone(ui, source, dest=None, **opts):
1408 def clone(ui, source, dest=None, **opts):
1410 """make a copy of an existing repository
1409 """make a copy of an existing repository
1411
1410
1412 Create a copy of an existing repository in a new directory.
1411 Create a copy of an existing repository in a new directory.
1413
1412
1414 If no destination directory name is specified, it defaults to the
1413 If no destination directory name is specified, it defaults to the
1415 basename of the source.
1414 basename of the source.
1416
1415
1417 The location of the source is added to the new repository's
1416 The location of the source is added to the new repository's
1418 ``.hg/hgrc`` file, as the default to be used for future pulls.
1417 ``.hg/hgrc`` file, as the default to be used for future pulls.
1419
1418
1420 Only local paths and ``ssh://`` URLs are supported as
1419 Only local paths and ``ssh://`` URLs are supported as
1421 destinations. For ``ssh://`` destinations, no working directory or
1420 destinations. For ``ssh://`` destinations, no working directory or
1422 ``.hg/hgrc`` will be created on the remote side.
1421 ``.hg/hgrc`` will be created on the remote side.
1423
1422
1424 If the source repository has a bookmark called '@' set, that
1423 If the source repository has a bookmark called '@' set, that
1425 revision will be checked out in the new repository by default.
1424 revision will be checked out in the new repository by default.
1426
1425
1427 To check out a particular version, use -u/--update, or
1426 To check out a particular version, use -u/--update, or
1428 -U/--noupdate to create a clone with no working directory.
1427 -U/--noupdate to create a clone with no working directory.
1429
1428
1430 To pull only a subset of changesets, specify one or more revisions
1429 To pull only a subset of changesets, specify one or more revisions
1431 identifiers with -r/--rev or branches with -b/--branch. The
1430 identifiers with -r/--rev or branches with -b/--branch. The
1432 resulting clone will contain only the specified changesets and
1431 resulting clone will contain only the specified changesets and
1433 their ancestors. These options (or 'clone src#rev dest') imply
1432 their ancestors. These options (or 'clone src#rev dest') imply
1434 --pull, even for local source repositories.
1433 --pull, even for local source repositories.
1435
1434
1436 In normal clone mode, the remote normalizes repository data into a common
1435 In normal clone mode, the remote normalizes repository data into a common
1437 exchange format and the receiving end translates this data into its local
1436 exchange format and the receiving end translates this data into its local
1438 storage format. --stream activates a different clone mode that essentially
1437 storage format. --stream activates a different clone mode that essentially
1439 copies repository files from the remote with minimal data processing. This
1438 copies repository files from the remote with minimal data processing. This
1440 significantly reduces the CPU cost of a clone both remotely and locally.
1439 significantly reduces the CPU cost of a clone both remotely and locally.
1441 However, it often increases the transferred data size by 30-40%. This can
1440 However, it often increases the transferred data size by 30-40%. This can
1442 result in substantially faster clones where I/O throughput is plentiful,
1441 result in substantially faster clones where I/O throughput is plentiful,
1443 especially for larger repositories. A side-effect of --stream clones is
1442 especially for larger repositories. A side-effect of --stream clones is
1444 that storage settings and requirements on the remote are applied locally:
1443 that storage settings and requirements on the remote are applied locally:
1445 a modern client may inherit legacy or inefficient storage used by the
1444 a modern client may inherit legacy or inefficient storage used by the
1446 remote or a legacy Mercurial client may not be able to clone from a
1445 remote or a legacy Mercurial client may not be able to clone from a
1447 modern Mercurial remote.
1446 modern Mercurial remote.
1448
1447
1449 .. note::
1448 .. note::
1450
1449
1451 Specifying a tag will include the tagged changeset but not the
1450 Specifying a tag will include the tagged changeset but not the
1452 changeset containing the tag.
1451 changeset containing the tag.
1453
1452
1454 .. container:: verbose
1453 .. container:: verbose
1455
1454
1456 For efficiency, hardlinks are used for cloning whenever the
1455 For efficiency, hardlinks are used for cloning whenever the
1457 source and destination are on the same filesystem (note this
1456 source and destination are on the same filesystem (note this
1458 applies only to the repository data, not to the working
1457 applies only to the repository data, not to the working
1459 directory). Some filesystems, such as AFS, implement hardlinking
1458 directory). Some filesystems, such as AFS, implement hardlinking
1460 incorrectly, but do not report errors. In these cases, use the
1459 incorrectly, but do not report errors. In these cases, use the
1461 --pull option to avoid hardlinking.
1460 --pull option to avoid hardlinking.
1462
1461
1463 Mercurial will update the working directory to the first applicable
1462 Mercurial will update the working directory to the first applicable
1464 revision from this list:
1463 revision from this list:
1465
1464
1466 a) null if -U or the source repository has no changesets
1465 a) null if -U or the source repository has no changesets
1467 b) if -u . and the source repository is local, the first parent of
1466 b) if -u . and the source repository is local, the first parent of
1468 the source repository's working directory
1467 the source repository's working directory
1469 c) the changeset specified with -u (if a branch name, this means the
1468 c) the changeset specified with -u (if a branch name, this means the
1470 latest head of that branch)
1469 latest head of that branch)
1471 d) the changeset specified with -r
1470 d) the changeset specified with -r
1472 e) the tipmost head specified with -b
1471 e) the tipmost head specified with -b
1473 f) the tipmost head specified with the url#branch source syntax
1472 f) the tipmost head specified with the url#branch source syntax
1474 g) the revision marked with the '@' bookmark, if present
1473 g) the revision marked with the '@' bookmark, if present
1475 h) the tipmost head of the default branch
1474 h) the tipmost head of the default branch
1476 i) tip
1475 i) tip
1477
1476
1478 When cloning from servers that support it, Mercurial may fetch
1477 When cloning from servers that support it, Mercurial may fetch
1479 pre-generated data from a server-advertised URL or inline from the
1478 pre-generated data from a server-advertised URL or inline from the
1480 same stream. When this is done, hooks operating on incoming changesets
1479 same stream. When this is done, hooks operating on incoming changesets
1481 and changegroups may fire more than once, once for each pre-generated
1480 and changegroups may fire more than once, once for each pre-generated
1482 bundle and as well as for any additional remaining data. In addition,
1481 bundle and as well as for any additional remaining data. In addition,
1483 if an error occurs, the repository may be rolled back to a partial
1482 if an error occurs, the repository may be rolled back to a partial
1484 clone. This behavior may change in future releases.
1483 clone. This behavior may change in future releases.
1485 See :hg:`help -e clonebundles` for more.
1484 See :hg:`help -e clonebundles` for more.
1486
1485
1487 Examples:
1486 Examples:
1488
1487
1489 - clone a remote repository to a new directory named hg/::
1488 - clone a remote repository to a new directory named hg/::
1490
1489
1491 hg clone https://www.mercurial-scm.org/repo/hg/
1490 hg clone https://www.mercurial-scm.org/repo/hg/
1492
1491
1493 - create a lightweight local clone::
1492 - create a lightweight local clone::
1494
1493
1495 hg clone project/ project-feature/
1494 hg clone project/ project-feature/
1496
1495
1497 - clone from an absolute path on an ssh server (note double-slash)::
1496 - clone from an absolute path on an ssh server (note double-slash)::
1498
1497
1499 hg clone ssh://user@server//home/projects/alpha/
1498 hg clone ssh://user@server//home/projects/alpha/
1500
1499
1501 - do a streaming clone while checking out a specified version::
1500 - do a streaming clone while checking out a specified version::
1502
1501
1503 hg clone --stream http://server/repo -u 1.5
1502 hg clone --stream http://server/repo -u 1.5
1504
1503
1505 - create a repository without changesets after a particular revision::
1504 - create a repository without changesets after a particular revision::
1506
1505
1507 hg clone -r 04e544 experimental/ good/
1506 hg clone -r 04e544 experimental/ good/
1508
1507
1509 - clone (and track) a particular named branch::
1508 - clone (and track) a particular named branch::
1510
1509
1511 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1510 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1512
1511
1513 See :hg:`help urls` for details on specifying URLs.
1512 See :hg:`help urls` for details on specifying URLs.
1514
1513
1515 Returns 0 on success.
1514 Returns 0 on success.
1516 """
1515 """
1517 opts = pycompat.byteskwargs(opts)
1516 opts = pycompat.byteskwargs(opts)
1518 if opts.get('noupdate') and opts.get('updaterev'):
1517 if opts.get('noupdate') and opts.get('updaterev'):
1519 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1518 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1520
1519
1521 # --include/--exclude can come from narrow or sparse.
1520 # --include/--exclude can come from narrow or sparse.
1522 includepats, excludepats = None, None
1521 includepats, excludepats = None, None
1523
1522
1524 # hg.clone() differentiates between None and an empty set. So make sure
1523 # hg.clone() differentiates between None and an empty set. So make sure
1525 # patterns are sets if narrow is requested without patterns.
1524 # patterns are sets if narrow is requested without patterns.
1526 if opts.get('narrow'):
1525 if opts.get('narrow'):
1527 includepats = set()
1526 includepats = set()
1528 excludepats = set()
1527 excludepats = set()
1529
1528
1530 if opts.get('include'):
1529 if opts.get('include'):
1531 includepats = narrowspec.parsepatterns(opts.get('include'))
1530 includepats = narrowspec.parsepatterns(opts.get('include'))
1532 if opts.get('exclude'):
1531 if opts.get('exclude'):
1533 excludepats = narrowspec.parsepatterns(opts.get('exclude'))
1532 excludepats = narrowspec.parsepatterns(opts.get('exclude'))
1534
1533
1535 r = hg.clone(ui, opts, source, dest,
1534 r = hg.clone(ui, opts, source, dest,
1536 pull=opts.get('pull'),
1535 pull=opts.get('pull'),
1537 stream=opts.get('stream') or opts.get('uncompressed'),
1536 stream=opts.get('stream') or opts.get('uncompressed'),
1538 revs=opts.get('rev'),
1537 revs=opts.get('rev'),
1539 update=opts.get('updaterev') or not opts.get('noupdate'),
1538 update=opts.get('updaterev') or not opts.get('noupdate'),
1540 branch=opts.get('branch'),
1539 branch=opts.get('branch'),
1541 shareopts=opts.get('shareopts'),
1540 shareopts=opts.get('shareopts'),
1542 storeincludepats=includepats,
1541 storeincludepats=includepats,
1543 storeexcludepats=excludepats)
1542 storeexcludepats=excludepats)
1544
1543
1545 return r is None
1544 return r is None
1546
1545
1547 @command('^commit|ci',
1546 @command('^commit|ci',
1548 [('A', 'addremove', None,
1547 [('A', 'addremove', None,
1549 _('mark new/missing files as added/removed before committing')),
1548 _('mark new/missing files as added/removed before committing')),
1550 ('', 'close-branch', None,
1549 ('', 'close-branch', None,
1551 _('mark a branch head as closed')),
1550 _('mark a branch head as closed')),
1552 ('', 'amend', None, _('amend the parent of the working directory')),
1551 ('', 'amend', None, _('amend the parent of the working directory')),
1553 ('s', 'secret', None, _('use the secret phase for committing')),
1552 ('s', 'secret', None, _('use the secret phase for committing')),
1554 ('e', 'edit', None, _('invoke editor on commit messages')),
1553 ('e', 'edit', None, _('invoke editor on commit messages')),
1555 ('i', 'interactive', None, _('use interactive mode')),
1554 ('i', 'interactive', None, _('use interactive mode')),
1556 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1555 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1557 _('[OPTION]... [FILE]...'),
1556 _('[OPTION]... [FILE]...'),
1558 inferrepo=True)
1557 inferrepo=True)
1559 def commit(ui, repo, *pats, **opts):
1558 def commit(ui, repo, *pats, **opts):
1560 """commit the specified files or all outstanding changes
1559 """commit the specified files or all outstanding changes
1561
1560
1562 Commit changes to the given files into the repository. Unlike a
1561 Commit changes to the given files into the repository. Unlike a
1563 centralized SCM, this operation is a local operation. See
1562 centralized SCM, this operation is a local operation. See
1564 :hg:`push` for a way to actively distribute your changes.
1563 :hg:`push` for a way to actively distribute your changes.
1565
1564
1566 If a list of files is omitted, all changes reported by :hg:`status`
1565 If a list of files is omitted, all changes reported by :hg:`status`
1567 will be committed.
1566 will be committed.
1568
1567
1569 If you are committing the result of a merge, do not provide any
1568 If you are committing the result of a merge, do not provide any
1570 filenames or -I/-X filters.
1569 filenames or -I/-X filters.
1571
1570
1572 If no commit message is specified, Mercurial starts your
1571 If no commit message is specified, Mercurial starts your
1573 configured editor where you can enter a message. In case your
1572 configured editor where you can enter a message. In case your
1574 commit fails, you will find a backup of your message in
1573 commit fails, you will find a backup of your message in
1575 ``.hg/last-message.txt``.
1574 ``.hg/last-message.txt``.
1576
1575
1577 The --close-branch flag can be used to mark the current branch
1576 The --close-branch flag can be used to mark the current branch
1578 head closed. When all heads of a branch are closed, the branch
1577 head closed. When all heads of a branch are closed, the branch
1579 will be considered closed and no longer listed.
1578 will be considered closed and no longer listed.
1580
1579
1581 The --amend flag can be used to amend the parent of the
1580 The --amend flag can be used to amend the parent of the
1582 working directory with a new commit that contains the changes
1581 working directory with a new commit that contains the changes
1583 in the parent in addition to those currently reported by :hg:`status`,
1582 in the parent in addition to those currently reported by :hg:`status`,
1584 if there are any. The old commit is stored in a backup bundle in
1583 if there are any. The old commit is stored in a backup bundle in
1585 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1584 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1586 on how to restore it).
1585 on how to restore it).
1587
1586
1588 Message, user and date are taken from the amended commit unless
1587 Message, user and date are taken from the amended commit unless
1589 specified. When a message isn't specified on the command line,
1588 specified. When a message isn't specified on the command line,
1590 the editor will open with the message of the amended commit.
1589 the editor will open with the message of the amended commit.
1591
1590
1592 It is not possible to amend public changesets (see :hg:`help phases`)
1591 It is not possible to amend public changesets (see :hg:`help phases`)
1593 or changesets that have children.
1592 or changesets that have children.
1594
1593
1595 See :hg:`help dates` for a list of formats valid for -d/--date.
1594 See :hg:`help dates` for a list of formats valid for -d/--date.
1596
1595
1597 Returns 0 on success, 1 if nothing changed.
1596 Returns 0 on success, 1 if nothing changed.
1598
1597
1599 .. container:: verbose
1598 .. container:: verbose
1600
1599
1601 Examples:
1600 Examples:
1602
1601
1603 - commit all files ending in .py::
1602 - commit all files ending in .py::
1604
1603
1605 hg commit --include "set:**.py"
1604 hg commit --include "set:**.py"
1606
1605
1607 - commit all non-binary files::
1606 - commit all non-binary files::
1608
1607
1609 hg commit --exclude "set:binary()"
1608 hg commit --exclude "set:binary()"
1610
1609
1611 - amend the current commit and set the date to now::
1610 - amend the current commit and set the date to now::
1612
1611
1613 hg commit --amend --date now
1612 hg commit --amend --date now
1614 """
1613 """
1615 with repo.wlock(), repo.lock():
1614 with repo.wlock(), repo.lock():
1616 return _docommit(ui, repo, *pats, **opts)
1615 return _docommit(ui, repo, *pats, **opts)
1617
1616
1618 def _docommit(ui, repo, *pats, **opts):
1617 def _docommit(ui, repo, *pats, **opts):
1619 if opts.get(r'interactive'):
1618 if opts.get(r'interactive'):
1620 opts.pop(r'interactive')
1619 opts.pop(r'interactive')
1621 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1620 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1622 cmdutil.recordfilter, *pats,
1621 cmdutil.recordfilter, *pats,
1623 **opts)
1622 **opts)
1624 # ret can be 0 (no changes to record) or the value returned by
1623 # ret can be 0 (no changes to record) or the value returned by
1625 # commit(), 1 if nothing changed or None on success.
1624 # commit(), 1 if nothing changed or None on success.
1626 return 1 if ret == 0 else ret
1625 return 1 if ret == 0 else ret
1627
1626
1628 opts = pycompat.byteskwargs(opts)
1627 opts = pycompat.byteskwargs(opts)
1629 if opts.get('subrepos'):
1628 if opts.get('subrepos'):
1630 if opts.get('amend'):
1629 if opts.get('amend'):
1631 raise error.Abort(_('cannot amend with --subrepos'))
1630 raise error.Abort(_('cannot amend with --subrepos'))
1632 # Let --subrepos on the command line override config setting.
1631 # Let --subrepos on the command line override config setting.
1633 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1632 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1634
1633
1635 cmdutil.checkunfinished(repo, commit=True)
1634 cmdutil.checkunfinished(repo, commit=True)
1636
1635
1637 branch = repo[None].branch()
1636 branch = repo[None].branch()
1638 bheads = repo.branchheads(branch)
1637 bheads = repo.branchheads(branch)
1639
1638
1640 extra = {}
1639 extra = {}
1641 if opts.get('close_branch'):
1640 if opts.get('close_branch'):
1642 extra['close'] = '1'
1641 extra['close'] = '1'
1643
1642
1644 if not bheads:
1643 if not bheads:
1645 raise error.Abort(_('can only close branch heads'))
1644 raise error.Abort(_('can only close branch heads'))
1646 elif opts.get('amend'):
1645 elif opts.get('amend'):
1647 if repo[None].parents()[0].p1().branch() != branch and \
1646 if repo[None].parents()[0].p1().branch() != branch and \
1648 repo[None].parents()[0].p2().branch() != branch:
1647 repo[None].parents()[0].p2().branch() != branch:
1649 raise error.Abort(_('can only close branch heads'))
1648 raise error.Abort(_('can only close branch heads'))
1650
1649
1651 if opts.get('amend'):
1650 if opts.get('amend'):
1652 if ui.configbool('ui', 'commitsubrepos'):
1651 if ui.configbool('ui', 'commitsubrepos'):
1653 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1652 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1654
1653
1655 old = repo['.']
1654 old = repo['.']
1656 rewriteutil.precheck(repo, [old.rev()], 'amend')
1655 rewriteutil.precheck(repo, [old.rev()], 'amend')
1657
1656
1658 # Currently histedit gets confused if an amend happens while histedit
1657 # Currently histedit gets confused if an amend happens while histedit
1659 # is in progress. Since we have a checkunfinished command, we are
1658 # is in progress. Since we have a checkunfinished command, we are
1660 # temporarily honoring it.
1659 # temporarily honoring it.
1661 #
1660 #
1662 # Note: eventually this guard will be removed. Please do not expect
1661 # Note: eventually this guard will be removed. Please do not expect
1663 # this behavior to remain.
1662 # this behavior to remain.
1664 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1663 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1665 cmdutil.checkunfinished(repo)
1664 cmdutil.checkunfinished(repo)
1666
1665
1667 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1666 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1668 if node == old.node():
1667 if node == old.node():
1669 ui.status(_("nothing changed\n"))
1668 ui.status(_("nothing changed\n"))
1670 return 1
1669 return 1
1671 else:
1670 else:
1672 def commitfunc(ui, repo, message, match, opts):
1671 def commitfunc(ui, repo, message, match, opts):
1673 overrides = {}
1672 overrides = {}
1674 if opts.get('secret'):
1673 if opts.get('secret'):
1675 overrides[('phases', 'new-commit')] = 'secret'
1674 overrides[('phases', 'new-commit')] = 'secret'
1676
1675
1677 baseui = repo.baseui
1676 baseui = repo.baseui
1678 with baseui.configoverride(overrides, 'commit'):
1677 with baseui.configoverride(overrides, 'commit'):
1679 with ui.configoverride(overrides, 'commit'):
1678 with ui.configoverride(overrides, 'commit'):
1680 editform = cmdutil.mergeeditform(repo[None],
1679 editform = cmdutil.mergeeditform(repo[None],
1681 'commit.normal')
1680 'commit.normal')
1682 editor = cmdutil.getcommiteditor(
1681 editor = cmdutil.getcommiteditor(
1683 editform=editform, **pycompat.strkwargs(opts))
1682 editform=editform, **pycompat.strkwargs(opts))
1684 return repo.commit(message,
1683 return repo.commit(message,
1685 opts.get('user'),
1684 opts.get('user'),
1686 opts.get('date'),
1685 opts.get('date'),
1687 match,
1686 match,
1688 editor=editor,
1687 editor=editor,
1689 extra=extra)
1688 extra=extra)
1690
1689
1691 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1690 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1692
1691
1693 if not node:
1692 if not node:
1694 stat = cmdutil.postcommitstatus(repo, pats, opts)
1693 stat = cmdutil.postcommitstatus(repo, pats, opts)
1695 if stat[3]:
1694 if stat[3]:
1696 ui.status(_("nothing changed (%d missing files, see "
1695 ui.status(_("nothing changed (%d missing files, see "
1697 "'hg status')\n") % len(stat[3]))
1696 "'hg status')\n") % len(stat[3]))
1698 else:
1697 else:
1699 ui.status(_("nothing changed\n"))
1698 ui.status(_("nothing changed\n"))
1700 return 1
1699 return 1
1701
1700
1702 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1701 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1703
1702
1704 @command('config|showconfig|debugconfig',
1703 @command('config|showconfig|debugconfig',
1705 [('u', 'untrusted', None, _('show untrusted configuration options')),
1704 [('u', 'untrusted', None, _('show untrusted configuration options')),
1706 ('e', 'edit', None, _('edit user config')),
1705 ('e', 'edit', None, _('edit user config')),
1707 ('l', 'local', None, _('edit repository config')),
1706 ('l', 'local', None, _('edit repository config')),
1708 ('g', 'global', None, _('edit global config'))] + formatteropts,
1707 ('g', 'global', None, _('edit global config'))] + formatteropts,
1709 _('[-u] [NAME]...'),
1708 _('[-u] [NAME]...'),
1710 optionalrepo=True,
1709 optionalrepo=True,
1711 intents={INTENT_READONLY})
1710 intents={INTENT_READONLY})
1712 def config(ui, repo, *values, **opts):
1711 def config(ui, repo, *values, **opts):
1713 """show combined config settings from all hgrc files
1712 """show combined config settings from all hgrc files
1714
1713
1715 With no arguments, print names and values of all config items.
1714 With no arguments, print names and values of all config items.
1716
1715
1717 With one argument of the form section.name, print just the value
1716 With one argument of the form section.name, print just the value
1718 of that config item.
1717 of that config item.
1719
1718
1720 With multiple arguments, print names and values of all config
1719 With multiple arguments, print names and values of all config
1721 items with matching section names or section.names.
1720 items with matching section names or section.names.
1722
1721
1723 With --edit, start an editor on the user-level config file. With
1722 With --edit, start an editor on the user-level config file. With
1724 --global, edit the system-wide config file. With --local, edit the
1723 --global, edit the system-wide config file. With --local, edit the
1725 repository-level config file.
1724 repository-level config file.
1726
1725
1727 With --debug, the source (filename and line number) is printed
1726 With --debug, the source (filename and line number) is printed
1728 for each config item.
1727 for each config item.
1729
1728
1730 See :hg:`help config` for more information about config files.
1729 See :hg:`help config` for more information about config files.
1731
1730
1732 .. container:: verbose
1731 .. container:: verbose
1733
1732
1734 Template:
1733 Template:
1735
1734
1736 The following keywords are supported. See also :hg:`help templates`.
1735 The following keywords are supported. See also :hg:`help templates`.
1737
1736
1738 :name: String. Config name.
1737 :name: String. Config name.
1739 :source: String. Filename and line number where the item is defined.
1738 :source: String. Filename and line number where the item is defined.
1740 :value: String. Config value.
1739 :value: String. Config value.
1741
1740
1742 Returns 0 on success, 1 if NAME does not exist.
1741 Returns 0 on success, 1 if NAME does not exist.
1743
1742
1744 """
1743 """
1745
1744
1746 opts = pycompat.byteskwargs(opts)
1745 opts = pycompat.byteskwargs(opts)
1747 if opts.get('edit') or opts.get('local') or opts.get('global'):
1746 if opts.get('edit') or opts.get('local') or opts.get('global'):
1748 if opts.get('local') and opts.get('global'):
1747 if opts.get('local') and opts.get('global'):
1749 raise error.Abort(_("can't use --local and --global together"))
1748 raise error.Abort(_("can't use --local and --global together"))
1750
1749
1751 if opts.get('local'):
1750 if opts.get('local'):
1752 if not repo:
1751 if not repo:
1753 raise error.Abort(_("can't use --local outside a repository"))
1752 raise error.Abort(_("can't use --local outside a repository"))
1754 paths = [repo.vfs.join('hgrc')]
1753 paths = [repo.vfs.join('hgrc')]
1755 elif opts.get('global'):
1754 elif opts.get('global'):
1756 paths = rcutil.systemrcpath()
1755 paths = rcutil.systemrcpath()
1757 else:
1756 else:
1758 paths = rcutil.userrcpath()
1757 paths = rcutil.userrcpath()
1759
1758
1760 for f in paths:
1759 for f in paths:
1761 if os.path.exists(f):
1760 if os.path.exists(f):
1762 break
1761 break
1763 else:
1762 else:
1764 if opts.get('global'):
1763 if opts.get('global'):
1765 samplehgrc = uimod.samplehgrcs['global']
1764 samplehgrc = uimod.samplehgrcs['global']
1766 elif opts.get('local'):
1765 elif opts.get('local'):
1767 samplehgrc = uimod.samplehgrcs['local']
1766 samplehgrc = uimod.samplehgrcs['local']
1768 else:
1767 else:
1769 samplehgrc = uimod.samplehgrcs['user']
1768 samplehgrc = uimod.samplehgrcs['user']
1770
1769
1771 f = paths[0]
1770 f = paths[0]
1772 fp = open(f, "wb")
1771 fp = open(f, "wb")
1773 fp.write(util.tonativeeol(samplehgrc))
1772 fp.write(util.tonativeeol(samplehgrc))
1774 fp.close()
1773 fp.close()
1775
1774
1776 editor = ui.geteditor()
1775 editor = ui.geteditor()
1777 ui.system("%s \"%s\"" % (editor, f),
1776 ui.system("%s \"%s\"" % (editor, f),
1778 onerr=error.Abort, errprefix=_("edit failed"),
1777 onerr=error.Abort, errprefix=_("edit failed"),
1779 blockedtag='config_edit')
1778 blockedtag='config_edit')
1780 return
1779 return
1781 ui.pager('config')
1780 ui.pager('config')
1782 fm = ui.formatter('config', opts)
1781 fm = ui.formatter('config', opts)
1783 for t, f in rcutil.rccomponents():
1782 for t, f in rcutil.rccomponents():
1784 if t == 'path':
1783 if t == 'path':
1785 ui.debug('read config from: %s\n' % f)
1784 ui.debug('read config from: %s\n' % f)
1786 elif t == 'items':
1785 elif t == 'items':
1787 for section, name, value, source in f:
1786 for section, name, value, source in f:
1788 ui.debug('set config by: %s\n' % source)
1787 ui.debug('set config by: %s\n' % source)
1789 else:
1788 else:
1790 raise error.ProgrammingError('unknown rctype: %s' % t)
1789 raise error.ProgrammingError('unknown rctype: %s' % t)
1791 untrusted = bool(opts.get('untrusted'))
1790 untrusted = bool(opts.get('untrusted'))
1792
1791
1793 selsections = selentries = []
1792 selsections = selentries = []
1794 if values:
1793 if values:
1795 selsections = [v for v in values if '.' not in v]
1794 selsections = [v for v in values if '.' not in v]
1796 selentries = [v for v in values if '.' in v]
1795 selentries = [v for v in values if '.' in v]
1797 uniquesel = (len(selentries) == 1 and not selsections)
1796 uniquesel = (len(selentries) == 1 and not selsections)
1798 selsections = set(selsections)
1797 selsections = set(selsections)
1799 selentries = set(selentries)
1798 selentries = set(selentries)
1800
1799
1801 matched = False
1800 matched = False
1802 for section, name, value in ui.walkconfig(untrusted=untrusted):
1801 for section, name, value in ui.walkconfig(untrusted=untrusted):
1803 source = ui.configsource(section, name, untrusted)
1802 source = ui.configsource(section, name, untrusted)
1804 value = pycompat.bytestr(value)
1803 value = pycompat.bytestr(value)
1805 if fm.isplain():
1804 if fm.isplain():
1806 source = source or 'none'
1805 source = source or 'none'
1807 value = value.replace('\n', '\\n')
1806 value = value.replace('\n', '\\n')
1808 entryname = section + '.' + name
1807 entryname = section + '.' + name
1809 if values and not (section in selsections or entryname in selentries):
1808 if values and not (section in selsections or entryname in selentries):
1810 continue
1809 continue
1811 fm.startitem()
1810 fm.startitem()
1812 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1811 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1813 if uniquesel:
1812 if uniquesel:
1814 fm.data(name=entryname)
1813 fm.data(name=entryname)
1815 fm.write('value', '%s\n', value)
1814 fm.write('value', '%s\n', value)
1816 else:
1815 else:
1817 fm.write('name value', '%s=%s\n', entryname, value)
1816 fm.write('name value', '%s=%s\n', entryname, value)
1818 matched = True
1817 matched = True
1819 fm.end()
1818 fm.end()
1820 if matched:
1819 if matched:
1821 return 0
1820 return 0
1822 return 1
1821 return 1
1823
1822
1824 @command('copy|cp',
1823 @command('copy|cp',
1825 [('A', 'after', None, _('record a copy that has already occurred')),
1824 [('A', 'after', None, _('record a copy that has already occurred')),
1826 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1825 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1827 ] + walkopts + dryrunopts,
1826 ] + walkopts + dryrunopts,
1828 _('[OPTION]... [SOURCE]... DEST'))
1827 _('[OPTION]... [SOURCE]... DEST'))
1829 def copy(ui, repo, *pats, **opts):
1828 def copy(ui, repo, *pats, **opts):
1830 """mark files as copied for the next commit
1829 """mark files as copied for the next commit
1831
1830
1832 Mark dest as having copies of source files. If dest is a
1831 Mark dest as having copies of source files. If dest is a
1833 directory, copies are put in that directory. If dest is a file,
1832 directory, copies are put in that directory. If dest is a file,
1834 the source must be a single file.
1833 the source must be a single file.
1835
1834
1836 By default, this command copies the contents of files as they
1835 By default, this command copies the contents of files as they
1837 exist in the working directory. If invoked with -A/--after, the
1836 exist in the working directory. If invoked with -A/--after, the
1838 operation is recorded, but no copying is performed.
1837 operation is recorded, but no copying is performed.
1839
1838
1840 This command takes effect with the next commit. To undo a copy
1839 This command takes effect with the next commit. To undo a copy
1841 before that, see :hg:`revert`.
1840 before that, see :hg:`revert`.
1842
1841
1843 Returns 0 on success, 1 if errors are encountered.
1842 Returns 0 on success, 1 if errors are encountered.
1844 """
1843 """
1845 opts = pycompat.byteskwargs(opts)
1844 opts = pycompat.byteskwargs(opts)
1846 with repo.wlock(False):
1845 with repo.wlock(False):
1847 return cmdutil.copy(ui, repo, pats, opts)
1846 return cmdutil.copy(ui, repo, pats, opts)
1848
1847
1849 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1848 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1850 def debugcommands(ui, cmd='', *args):
1849 def debugcommands(ui, cmd='', *args):
1851 """list all available commands and options"""
1850 """list all available commands and options"""
1852 for cmd, vals in sorted(table.iteritems()):
1851 for cmd, vals in sorted(table.iteritems()):
1853 cmd = cmd.split('|')[0].strip('^')
1852 cmd = cmd.split('|')[0].strip('^')
1854 opts = ', '.join([i[1] for i in vals[1]])
1853 opts = ', '.join([i[1] for i in vals[1]])
1855 ui.write('%s: %s\n' % (cmd, opts))
1854 ui.write('%s: %s\n' % (cmd, opts))
1856
1855
1857 @command('debugcomplete',
1856 @command('debugcomplete',
1858 [('o', 'options', None, _('show the command options'))],
1857 [('o', 'options', None, _('show the command options'))],
1859 _('[-o] CMD'),
1858 _('[-o] CMD'),
1860 norepo=True)
1859 norepo=True)
1861 def debugcomplete(ui, cmd='', **opts):
1860 def debugcomplete(ui, cmd='', **opts):
1862 """returns the completion list associated with the given command"""
1861 """returns the completion list associated with the given command"""
1863
1862
1864 if opts.get(r'options'):
1863 if opts.get(r'options'):
1865 options = []
1864 options = []
1866 otables = [globalopts]
1865 otables = [globalopts]
1867 if cmd:
1866 if cmd:
1868 aliases, entry = cmdutil.findcmd(cmd, table, False)
1867 aliases, entry = cmdutil.findcmd(cmd, table, False)
1869 otables.append(entry[1])
1868 otables.append(entry[1])
1870 for t in otables:
1869 for t in otables:
1871 for o in t:
1870 for o in t:
1872 if "(DEPRECATED)" in o[3]:
1871 if "(DEPRECATED)" in o[3]:
1873 continue
1872 continue
1874 if o[0]:
1873 if o[0]:
1875 options.append('-%s' % o[0])
1874 options.append('-%s' % o[0])
1876 options.append('--%s' % o[1])
1875 options.append('--%s' % o[1])
1877 ui.write("%s\n" % "\n".join(options))
1876 ui.write("%s\n" % "\n".join(options))
1878 return
1877 return
1879
1878
1880 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1879 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1881 if ui.verbose:
1880 if ui.verbose:
1882 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1881 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1883 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1882 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1884
1883
1885 @command('^diff',
1884 @command('^diff',
1886 [('r', 'rev', [], _('revision'), _('REV')),
1885 [('r', 'rev', [], _('revision'), _('REV')),
1887 ('c', 'change', '', _('change made by revision'), _('REV'))
1886 ('c', 'change', '', _('change made by revision'), _('REV'))
1888 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1887 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1889 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1888 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1890 inferrepo=True,
1889 inferrepo=True,
1891 intents={INTENT_READONLY})
1890 intents={INTENT_READONLY})
1892 def diff(ui, repo, *pats, **opts):
1891 def diff(ui, repo, *pats, **opts):
1893 """diff repository (or selected files)
1892 """diff repository (or selected files)
1894
1893
1895 Show differences between revisions for the specified files.
1894 Show differences between revisions for the specified files.
1896
1895
1897 Differences between files are shown using the unified diff format.
1896 Differences between files are shown using the unified diff format.
1898
1897
1899 .. note::
1898 .. note::
1900
1899
1901 :hg:`diff` may generate unexpected results for merges, as it will
1900 :hg:`diff` may generate unexpected results for merges, as it will
1902 default to comparing against the working directory's first
1901 default to comparing against the working directory's first
1903 parent changeset if no revisions are specified.
1902 parent changeset if no revisions are specified.
1904
1903
1905 When two revision arguments are given, then changes are shown
1904 When two revision arguments are given, then changes are shown
1906 between those revisions. If only one revision is specified then
1905 between those revisions. If only one revision is specified then
1907 that revision is compared to the working directory, and, when no
1906 that revision is compared to the working directory, and, when no
1908 revisions are specified, the working directory files are compared
1907 revisions are specified, the working directory files are compared
1909 to its first parent.
1908 to its first parent.
1910
1909
1911 Alternatively you can specify -c/--change with a revision to see
1910 Alternatively you can specify -c/--change with a revision to see
1912 the changes in that changeset relative to its first parent.
1911 the changes in that changeset relative to its first parent.
1913
1912
1914 Without the -a/--text option, diff will avoid generating diffs of
1913 Without the -a/--text option, diff will avoid generating diffs of
1915 files it detects as binary. With -a, diff will generate a diff
1914 files it detects as binary. With -a, diff will generate a diff
1916 anyway, probably with undesirable results.
1915 anyway, probably with undesirable results.
1917
1916
1918 Use the -g/--git option to generate diffs in the git extended diff
1917 Use the -g/--git option to generate diffs in the git extended diff
1919 format. For more information, read :hg:`help diffs`.
1918 format. For more information, read :hg:`help diffs`.
1920
1919
1921 .. container:: verbose
1920 .. container:: verbose
1922
1921
1923 Examples:
1922 Examples:
1924
1923
1925 - compare a file in the current working directory to its parent::
1924 - compare a file in the current working directory to its parent::
1926
1925
1927 hg diff foo.c
1926 hg diff foo.c
1928
1927
1929 - compare two historical versions of a directory, with rename info::
1928 - compare two historical versions of a directory, with rename info::
1930
1929
1931 hg diff --git -r 1.0:1.2 lib/
1930 hg diff --git -r 1.0:1.2 lib/
1932
1931
1933 - get change stats relative to the last change on some date::
1932 - get change stats relative to the last change on some date::
1934
1933
1935 hg diff --stat -r "date('may 2')"
1934 hg diff --stat -r "date('may 2')"
1936
1935
1937 - diff all newly-added files that contain a keyword::
1936 - diff all newly-added files that contain a keyword::
1938
1937
1939 hg diff "set:added() and grep(GNU)"
1938 hg diff "set:added() and grep(GNU)"
1940
1939
1941 - compare a revision and its parents::
1940 - compare a revision and its parents::
1942
1941
1943 hg diff -c 9353 # compare against first parent
1942 hg diff -c 9353 # compare against first parent
1944 hg diff -r 9353^:9353 # same using revset syntax
1943 hg diff -r 9353^:9353 # same using revset syntax
1945 hg diff -r 9353^2:9353 # compare against the second parent
1944 hg diff -r 9353^2:9353 # compare against the second parent
1946
1945
1947 Returns 0 on success.
1946 Returns 0 on success.
1948 """
1947 """
1949
1948
1950 opts = pycompat.byteskwargs(opts)
1949 opts = pycompat.byteskwargs(opts)
1951 revs = opts.get('rev')
1950 revs = opts.get('rev')
1952 change = opts.get('change')
1951 change = opts.get('change')
1953 stat = opts.get('stat')
1952 stat = opts.get('stat')
1954 reverse = opts.get('reverse')
1953 reverse = opts.get('reverse')
1955
1954
1956 if revs and change:
1955 if revs and change:
1957 msg = _('cannot specify --rev and --change at the same time')
1956 msg = _('cannot specify --rev and --change at the same time')
1958 raise error.Abort(msg)
1957 raise error.Abort(msg)
1959 elif change:
1958 elif change:
1960 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1959 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1961 ctx2 = scmutil.revsingle(repo, change, None)
1960 ctx2 = scmutil.revsingle(repo, change, None)
1962 ctx1 = ctx2.p1()
1961 ctx1 = ctx2.p1()
1963 else:
1962 else:
1964 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1963 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1965 ctx1, ctx2 = scmutil.revpair(repo, revs)
1964 ctx1, ctx2 = scmutil.revpair(repo, revs)
1966 node1, node2 = ctx1.node(), ctx2.node()
1965 node1, node2 = ctx1.node(), ctx2.node()
1967
1966
1968 if reverse:
1967 if reverse:
1969 node1, node2 = node2, node1
1968 node1, node2 = node2, node1
1970
1969
1971 diffopts = patch.diffallopts(ui, opts)
1970 diffopts = patch.diffallopts(ui, opts)
1972 m = scmutil.match(ctx2, pats, opts)
1971 m = scmutil.match(ctx2, pats, opts)
1973 m = matchmod.intersectmatchers(m, repo.narrowmatch())
1972 m = repo.narrowmatch(m)
1974 ui.pager('diff')
1973 ui.pager('diff')
1975 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1974 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1976 listsubrepos=opts.get('subrepos'),
1975 listsubrepos=opts.get('subrepos'),
1977 root=opts.get('root'))
1976 root=opts.get('root'))
1978
1977
1979 @command('^export',
1978 @command('^export',
1980 [('B', 'bookmark', '',
1979 [('B', 'bookmark', '',
1981 _('export changes only reachable by given bookmark')),
1980 _('export changes only reachable by given bookmark')),
1982 ('o', 'output', '',
1981 ('o', 'output', '',
1983 _('print output to file with formatted name'), _('FORMAT')),
1982 _('print output to file with formatted name'), _('FORMAT')),
1984 ('', 'switch-parent', None, _('diff against the second parent')),
1983 ('', 'switch-parent', None, _('diff against the second parent')),
1985 ('r', 'rev', [], _('revisions to export'), _('REV')),
1984 ('r', 'rev', [], _('revisions to export'), _('REV')),
1986 ] + diffopts + formatteropts,
1985 ] + diffopts + formatteropts,
1987 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
1986 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
1988 intents={INTENT_READONLY})
1987 intents={INTENT_READONLY})
1989 def export(ui, repo, *changesets, **opts):
1988 def export(ui, repo, *changesets, **opts):
1990 """dump the header and diffs for one or more changesets
1989 """dump the header and diffs for one or more changesets
1991
1990
1992 Print the changeset header and diffs for one or more revisions.
1991 Print the changeset header and diffs for one or more revisions.
1993 If no revision is given, the parent of the working directory is used.
1992 If no revision is given, the parent of the working directory is used.
1994
1993
1995 The information shown in the changeset header is: author, date,
1994 The information shown in the changeset header is: author, date,
1996 branch name (if non-default), changeset hash, parent(s) and commit
1995 branch name (if non-default), changeset hash, parent(s) and commit
1997 comment.
1996 comment.
1998
1997
1999 .. note::
1998 .. note::
2000
1999
2001 :hg:`export` may generate unexpected diff output for merge
2000 :hg:`export` may generate unexpected diff output for merge
2002 changesets, as it will compare the merge changeset against its
2001 changesets, as it will compare the merge changeset against its
2003 first parent only.
2002 first parent only.
2004
2003
2005 Output may be to a file, in which case the name of the file is
2004 Output may be to a file, in which case the name of the file is
2006 given using a template string. See :hg:`help templates`. In addition
2005 given using a template string. See :hg:`help templates`. In addition
2007 to the common template keywords, the following formatting rules are
2006 to the common template keywords, the following formatting rules are
2008 supported:
2007 supported:
2009
2008
2010 :``%%``: literal "%" character
2009 :``%%``: literal "%" character
2011 :``%H``: changeset hash (40 hexadecimal digits)
2010 :``%H``: changeset hash (40 hexadecimal digits)
2012 :``%N``: number of patches being generated
2011 :``%N``: number of patches being generated
2013 :``%R``: changeset revision number
2012 :``%R``: changeset revision number
2014 :``%b``: basename of the exporting repository
2013 :``%b``: basename of the exporting repository
2015 :``%h``: short-form changeset hash (12 hexadecimal digits)
2014 :``%h``: short-form changeset hash (12 hexadecimal digits)
2016 :``%m``: first line of the commit message (only alphanumeric characters)
2015 :``%m``: first line of the commit message (only alphanumeric characters)
2017 :``%n``: zero-padded sequence number, starting at 1
2016 :``%n``: zero-padded sequence number, starting at 1
2018 :``%r``: zero-padded changeset revision number
2017 :``%r``: zero-padded changeset revision number
2019 :``\\``: literal "\\" character
2018 :``\\``: literal "\\" character
2020
2019
2021 Without the -a/--text option, export will avoid generating diffs
2020 Without the -a/--text option, export will avoid generating diffs
2022 of files it detects as binary. With -a, export will generate a
2021 of files it detects as binary. With -a, export will generate a
2023 diff anyway, probably with undesirable results.
2022 diff anyway, probably with undesirable results.
2024
2023
2025 With -B/--bookmark changesets reachable by the given bookmark are
2024 With -B/--bookmark changesets reachable by the given bookmark are
2026 selected.
2025 selected.
2027
2026
2028 Use the -g/--git option to generate diffs in the git extended diff
2027 Use the -g/--git option to generate diffs in the git extended diff
2029 format. See :hg:`help diffs` for more information.
2028 format. See :hg:`help diffs` for more information.
2030
2029
2031 With the --switch-parent option, the diff will be against the
2030 With the --switch-parent option, the diff will be against the
2032 second parent. It can be useful to review a merge.
2031 second parent. It can be useful to review a merge.
2033
2032
2034 .. container:: verbose
2033 .. container:: verbose
2035
2034
2036 Template:
2035 Template:
2037
2036
2038 The following keywords are supported in addition to the common template
2037 The following keywords are supported in addition to the common template
2039 keywords and functions. See also :hg:`help templates`.
2038 keywords and functions. See also :hg:`help templates`.
2040
2039
2041 :diff: String. Diff content.
2040 :diff: String. Diff content.
2042 :parents: List of strings. Parent nodes of the changeset.
2041 :parents: List of strings. Parent nodes of the changeset.
2043
2042
2044 Examples:
2043 Examples:
2045
2044
2046 - use export and import to transplant a bugfix to the current
2045 - use export and import to transplant a bugfix to the current
2047 branch::
2046 branch::
2048
2047
2049 hg export -r 9353 | hg import -
2048 hg export -r 9353 | hg import -
2050
2049
2051 - export all the changesets between two revisions to a file with
2050 - export all the changesets between two revisions to a file with
2052 rename information::
2051 rename information::
2053
2052
2054 hg export --git -r 123:150 > changes.txt
2053 hg export --git -r 123:150 > changes.txt
2055
2054
2056 - split outgoing changes into a series of patches with
2055 - split outgoing changes into a series of patches with
2057 descriptive names::
2056 descriptive names::
2058
2057
2059 hg export -r "outgoing()" -o "%n-%m.patch"
2058 hg export -r "outgoing()" -o "%n-%m.patch"
2060
2059
2061 Returns 0 on success.
2060 Returns 0 on success.
2062 """
2061 """
2063 opts = pycompat.byteskwargs(opts)
2062 opts = pycompat.byteskwargs(opts)
2064 bookmark = opts.get('bookmark')
2063 bookmark = opts.get('bookmark')
2065 changesets += tuple(opts.get('rev', []))
2064 changesets += tuple(opts.get('rev', []))
2066
2065
2067 if bookmark and changesets:
2066 if bookmark and changesets:
2068 raise error.Abort(_("-r and -B are mutually exclusive"))
2067 raise error.Abort(_("-r and -B are mutually exclusive"))
2069
2068
2070 if bookmark:
2069 if bookmark:
2071 if bookmark not in repo._bookmarks:
2070 if bookmark not in repo._bookmarks:
2072 raise error.Abort(_("bookmark '%s' not found") % bookmark)
2071 raise error.Abort(_("bookmark '%s' not found") % bookmark)
2073
2072
2074 revs = scmutil.bookmarkrevs(repo, bookmark)
2073 revs = scmutil.bookmarkrevs(repo, bookmark)
2075 else:
2074 else:
2076 if not changesets:
2075 if not changesets:
2077 changesets = ['.']
2076 changesets = ['.']
2078
2077
2079 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
2078 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
2080 revs = scmutil.revrange(repo, changesets)
2079 revs = scmutil.revrange(repo, changesets)
2081
2080
2082 if not revs:
2081 if not revs:
2083 raise error.Abort(_("export requires at least one changeset"))
2082 raise error.Abort(_("export requires at least one changeset"))
2084 if len(revs) > 1:
2083 if len(revs) > 1:
2085 ui.note(_('exporting patches:\n'))
2084 ui.note(_('exporting patches:\n'))
2086 else:
2085 else:
2087 ui.note(_('exporting patch:\n'))
2086 ui.note(_('exporting patch:\n'))
2088
2087
2089 fntemplate = opts.get('output')
2088 fntemplate = opts.get('output')
2090 if cmdutil.isstdiofilename(fntemplate):
2089 if cmdutil.isstdiofilename(fntemplate):
2091 fntemplate = ''
2090 fntemplate = ''
2092
2091
2093 if fntemplate:
2092 if fntemplate:
2094 fm = formatter.nullformatter(ui, 'export', opts)
2093 fm = formatter.nullformatter(ui, 'export', opts)
2095 else:
2094 else:
2096 ui.pager('export')
2095 ui.pager('export')
2097 fm = ui.formatter('export', opts)
2096 fm = ui.formatter('export', opts)
2098 with fm:
2097 with fm:
2099 cmdutil.export(repo, revs, fm, fntemplate=fntemplate,
2098 cmdutil.export(repo, revs, fm, fntemplate=fntemplate,
2100 switch_parent=opts.get('switch_parent'),
2099 switch_parent=opts.get('switch_parent'),
2101 opts=patch.diffallopts(ui, opts))
2100 opts=patch.diffallopts(ui, opts))
2102
2101
2103 @command('files',
2102 @command('files',
2104 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
2103 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
2105 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2104 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2106 ] + walkopts + formatteropts + subrepoopts,
2105 ] + walkopts + formatteropts + subrepoopts,
2107 _('[OPTION]... [FILE]...'),
2106 _('[OPTION]... [FILE]...'),
2108 intents={INTENT_READONLY})
2107 intents={INTENT_READONLY})
2109 def files(ui, repo, *pats, **opts):
2108 def files(ui, repo, *pats, **opts):
2110 """list tracked files
2109 """list tracked files
2111
2110
2112 Print files under Mercurial control in the working directory or
2111 Print files under Mercurial control in the working directory or
2113 specified revision for given files (excluding removed files).
2112 specified revision for given files (excluding removed files).
2114 Files can be specified as filenames or filesets.
2113 Files can be specified as filenames or filesets.
2115
2114
2116 If no files are given to match, this command prints the names
2115 If no files are given to match, this command prints the names
2117 of all files under Mercurial control.
2116 of all files under Mercurial control.
2118
2117
2119 .. container:: verbose
2118 .. container:: verbose
2120
2119
2121 Template:
2120 Template:
2122
2121
2123 The following keywords are supported in addition to the common template
2122 The following keywords are supported in addition to the common template
2124 keywords and functions. See also :hg:`help templates`.
2123 keywords and functions. See also :hg:`help templates`.
2125
2124
2126 :flags: String. Character denoting file's symlink and executable bits.
2125 :flags: String. Character denoting file's symlink and executable bits.
2127 :path: String. Repository-absolute path of the file.
2126 :path: String. Repository-absolute path of the file.
2128 :size: Integer. Size of the file in bytes.
2127 :size: Integer. Size of the file in bytes.
2129
2128
2130 Examples:
2129 Examples:
2131
2130
2132 - list all files under the current directory::
2131 - list all files under the current directory::
2133
2132
2134 hg files .
2133 hg files .
2135
2134
2136 - shows sizes and flags for current revision::
2135 - shows sizes and flags for current revision::
2137
2136
2138 hg files -vr .
2137 hg files -vr .
2139
2138
2140 - list all files named README::
2139 - list all files named README::
2141
2140
2142 hg files -I "**/README"
2141 hg files -I "**/README"
2143
2142
2144 - list all binary files::
2143 - list all binary files::
2145
2144
2146 hg files "set:binary()"
2145 hg files "set:binary()"
2147
2146
2148 - find files containing a regular expression::
2147 - find files containing a regular expression::
2149
2148
2150 hg files "set:grep('bob')"
2149 hg files "set:grep('bob')"
2151
2150
2152 - search tracked file contents with xargs and grep::
2151 - search tracked file contents with xargs and grep::
2153
2152
2154 hg files -0 | xargs -0 grep foo
2153 hg files -0 | xargs -0 grep foo
2155
2154
2156 See :hg:`help patterns` and :hg:`help filesets` for more information
2155 See :hg:`help patterns` and :hg:`help filesets` for more information
2157 on specifying file patterns.
2156 on specifying file patterns.
2158
2157
2159 Returns 0 if a match is found, 1 otherwise.
2158 Returns 0 if a match is found, 1 otherwise.
2160
2159
2161 """
2160 """
2162
2161
2163 opts = pycompat.byteskwargs(opts)
2162 opts = pycompat.byteskwargs(opts)
2164 rev = opts.get('rev')
2163 rev = opts.get('rev')
2165 if rev:
2164 if rev:
2166 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2165 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2167 ctx = scmutil.revsingle(repo, rev, None)
2166 ctx = scmutil.revsingle(repo, rev, None)
2168
2167
2169 end = '\n'
2168 end = '\n'
2170 if opts.get('print0'):
2169 if opts.get('print0'):
2171 end = '\0'
2170 end = '\0'
2172 fmt = '%s' + end
2171 fmt = '%s' + end
2173
2172
2174 m = scmutil.match(ctx, pats, opts)
2173 m = scmutil.match(ctx, pats, opts)
2175 ui.pager('files')
2174 ui.pager('files')
2176 with ui.formatter('files', opts) as fm:
2175 with ui.formatter('files', opts) as fm:
2177 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2176 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2178
2177
2179 @command(
2178 @command(
2180 '^forget',
2179 '^forget',
2181 [('i', 'interactive', None, _('use interactive mode')),
2180 [('i', 'interactive', None, _('use interactive mode')),
2182 ] + walkopts + dryrunopts,
2181 ] + walkopts + dryrunopts,
2183 _('[OPTION]... FILE...'), inferrepo=True)
2182 _('[OPTION]... FILE...'), inferrepo=True)
2184 def forget(ui, repo, *pats, **opts):
2183 def forget(ui, repo, *pats, **opts):
2185 """forget the specified files on the next commit
2184 """forget the specified files on the next commit
2186
2185
2187 Mark the specified files so they will no longer be tracked
2186 Mark the specified files so they will no longer be tracked
2188 after the next commit.
2187 after the next commit.
2189
2188
2190 This only removes files from the current branch, not from the
2189 This only removes files from the current branch, not from the
2191 entire project history, and it does not delete them from the
2190 entire project history, and it does not delete them from the
2192 working directory.
2191 working directory.
2193
2192
2194 To delete the file from the working directory, see :hg:`remove`.
2193 To delete the file from the working directory, see :hg:`remove`.
2195
2194
2196 To undo a forget before the next commit, see :hg:`add`.
2195 To undo a forget before the next commit, see :hg:`add`.
2197
2196
2198 .. container:: verbose
2197 .. container:: verbose
2199
2198
2200 Examples:
2199 Examples:
2201
2200
2202 - forget newly-added binary files::
2201 - forget newly-added binary files::
2203
2202
2204 hg forget "set:added() and binary()"
2203 hg forget "set:added() and binary()"
2205
2204
2206 - forget files that would be excluded by .hgignore::
2205 - forget files that would be excluded by .hgignore::
2207
2206
2208 hg forget "set:hgignore()"
2207 hg forget "set:hgignore()"
2209
2208
2210 Returns 0 on success.
2209 Returns 0 on success.
2211 """
2210 """
2212
2211
2213 opts = pycompat.byteskwargs(opts)
2212 opts = pycompat.byteskwargs(opts)
2214 if not pats:
2213 if not pats:
2215 raise error.Abort(_('no files specified'))
2214 raise error.Abort(_('no files specified'))
2216
2215
2217 m = scmutil.match(repo[None], pats, opts)
2216 m = scmutil.match(repo[None], pats, opts)
2218 dryrun, interactive = opts.get('dry_run'), opts.get('interactive')
2217 dryrun, interactive = opts.get('dry_run'), opts.get('interactive')
2219 rejected = cmdutil.forget(ui, repo, m, prefix="",
2218 rejected = cmdutil.forget(ui, repo, m, prefix="",
2220 explicitonly=False, dryrun=dryrun,
2219 explicitonly=False, dryrun=dryrun,
2221 interactive=interactive)[0]
2220 interactive=interactive)[0]
2222 return rejected and 1 or 0
2221 return rejected and 1 or 0
2223
2222
2224 @command(
2223 @command(
2225 'graft',
2224 'graft',
2226 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2225 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2227 ('c', 'continue', False, _('resume interrupted graft')),
2226 ('c', 'continue', False, _('resume interrupted graft')),
2228 ('', 'stop', False, _('stop interrupted graft')),
2227 ('', 'stop', False, _('stop interrupted graft')),
2229 ('', 'abort', False, _('abort interrupted graft')),
2228 ('', 'abort', False, _('abort interrupted graft')),
2230 ('e', 'edit', False, _('invoke editor on commit messages')),
2229 ('e', 'edit', False, _('invoke editor on commit messages')),
2231 ('', 'log', None, _('append graft info to log message')),
2230 ('', 'log', None, _('append graft info to log message')),
2232 ('', 'no-commit', None,
2231 ('', 'no-commit', None,
2233 _("don't commit, just apply the changes in working directory")),
2232 _("don't commit, just apply the changes in working directory")),
2234 ('f', 'force', False, _('force graft')),
2233 ('f', 'force', False, _('force graft')),
2235 ('D', 'currentdate', False,
2234 ('D', 'currentdate', False,
2236 _('record the current date as commit date')),
2235 _('record the current date as commit date')),
2237 ('U', 'currentuser', False,
2236 ('U', 'currentuser', False,
2238 _('record the current user as committer'), _('DATE'))]
2237 _('record the current user as committer'), _('DATE'))]
2239 + commitopts2 + mergetoolopts + dryrunopts,
2238 + commitopts2 + mergetoolopts + dryrunopts,
2240 _('[OPTION]... [-r REV]... REV...'))
2239 _('[OPTION]... [-r REV]... REV...'))
2241 def graft(ui, repo, *revs, **opts):
2240 def graft(ui, repo, *revs, **opts):
2242 '''copy changes from other branches onto the current branch
2241 '''copy changes from other branches onto the current branch
2243
2242
2244 This command uses Mercurial's merge logic to copy individual
2243 This command uses Mercurial's merge logic to copy individual
2245 changes from other branches without merging branches in the
2244 changes from other branches without merging branches in the
2246 history graph. This is sometimes known as 'backporting' or
2245 history graph. This is sometimes known as 'backporting' or
2247 'cherry-picking'. By default, graft will copy user, date, and
2246 'cherry-picking'. By default, graft will copy user, date, and
2248 description from the source changesets.
2247 description from the source changesets.
2249
2248
2250 Changesets that are ancestors of the current revision, that have
2249 Changesets that are ancestors of the current revision, that have
2251 already been grafted, or that are merges will be skipped.
2250 already been grafted, or that are merges will be skipped.
2252
2251
2253 If --log is specified, log messages will have a comment appended
2252 If --log is specified, log messages will have a comment appended
2254 of the form::
2253 of the form::
2255
2254
2256 (grafted from CHANGESETHASH)
2255 (grafted from CHANGESETHASH)
2257
2256
2258 If --force is specified, revisions will be grafted even if they
2257 If --force is specified, revisions will be grafted even if they
2259 are already ancestors of, or have been grafted to, the destination.
2258 are already ancestors of, or have been grafted to, the destination.
2260 This is useful when the revisions have since been backed out.
2259 This is useful when the revisions have since been backed out.
2261
2260
2262 If a graft merge results in conflicts, the graft process is
2261 If a graft merge results in conflicts, the graft process is
2263 interrupted so that the current merge can be manually resolved.
2262 interrupted so that the current merge can be manually resolved.
2264 Once all conflicts are addressed, the graft process can be
2263 Once all conflicts are addressed, the graft process can be
2265 continued with the -c/--continue option.
2264 continued with the -c/--continue option.
2266
2265
2267 The -c/--continue option reapplies all the earlier options.
2266 The -c/--continue option reapplies all the earlier options.
2268
2267
2269 .. container:: verbose
2268 .. container:: verbose
2270
2269
2271 Examples:
2270 Examples:
2272
2271
2273 - copy a single change to the stable branch and edit its description::
2272 - copy a single change to the stable branch and edit its description::
2274
2273
2275 hg update stable
2274 hg update stable
2276 hg graft --edit 9393
2275 hg graft --edit 9393
2277
2276
2278 - graft a range of changesets with one exception, updating dates::
2277 - graft a range of changesets with one exception, updating dates::
2279
2278
2280 hg graft -D "2085::2093 and not 2091"
2279 hg graft -D "2085::2093 and not 2091"
2281
2280
2282 - continue a graft after resolving conflicts::
2281 - continue a graft after resolving conflicts::
2283
2282
2284 hg graft -c
2283 hg graft -c
2285
2284
2286 - show the source of a grafted changeset::
2285 - show the source of a grafted changeset::
2287
2286
2288 hg log --debug -r .
2287 hg log --debug -r .
2289
2288
2290 - show revisions sorted by date::
2289 - show revisions sorted by date::
2291
2290
2292 hg log -r "sort(all(), date)"
2291 hg log -r "sort(all(), date)"
2293
2292
2294 See :hg:`help revisions` for more about specifying revisions.
2293 See :hg:`help revisions` for more about specifying revisions.
2295
2294
2296 Returns 0 on successful completion.
2295 Returns 0 on successful completion.
2297 '''
2296 '''
2298 with repo.wlock():
2297 with repo.wlock():
2299 return _dograft(ui, repo, *revs, **opts)
2298 return _dograft(ui, repo, *revs, **opts)
2300
2299
2301 def _dograft(ui, repo, *revs, **opts):
2300 def _dograft(ui, repo, *revs, **opts):
2302 opts = pycompat.byteskwargs(opts)
2301 opts = pycompat.byteskwargs(opts)
2303 if revs and opts.get('rev'):
2302 if revs and opts.get('rev'):
2304 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2303 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2305 'revision ordering!\n'))
2304 'revision ordering!\n'))
2306
2305
2307 revs = list(revs)
2306 revs = list(revs)
2308 revs.extend(opts.get('rev'))
2307 revs.extend(opts.get('rev'))
2309 # a dict of data to be stored in state file
2308 # a dict of data to be stored in state file
2310 statedata = {}
2309 statedata = {}
2311 # list of new nodes created by ongoing graft
2310 # list of new nodes created by ongoing graft
2312 statedata['newnodes'] = []
2311 statedata['newnodes'] = []
2313
2312
2314 if not opts.get('user') and opts.get('currentuser'):
2313 if not opts.get('user') and opts.get('currentuser'):
2315 opts['user'] = ui.username()
2314 opts['user'] = ui.username()
2316 if not opts.get('date') and opts.get('currentdate'):
2315 if not opts.get('date') and opts.get('currentdate'):
2317 opts['date'] = "%d %d" % dateutil.makedate()
2316 opts['date'] = "%d %d" % dateutil.makedate()
2318
2317
2319 editor = cmdutil.getcommiteditor(editform='graft',
2318 editor = cmdutil.getcommiteditor(editform='graft',
2320 **pycompat.strkwargs(opts))
2319 **pycompat.strkwargs(opts))
2321
2320
2322 cont = False
2321 cont = False
2323 if opts.get('no_commit'):
2322 if opts.get('no_commit'):
2324 if opts.get('edit'):
2323 if opts.get('edit'):
2325 raise error.Abort(_("cannot specify --no-commit and "
2324 raise error.Abort(_("cannot specify --no-commit and "
2326 "--edit together"))
2325 "--edit together"))
2327 if opts.get('currentuser'):
2326 if opts.get('currentuser'):
2328 raise error.Abort(_("cannot specify --no-commit and "
2327 raise error.Abort(_("cannot specify --no-commit and "
2329 "--currentuser together"))
2328 "--currentuser together"))
2330 if opts.get('currentdate'):
2329 if opts.get('currentdate'):
2331 raise error.Abort(_("cannot specify --no-commit and "
2330 raise error.Abort(_("cannot specify --no-commit and "
2332 "--currentdate together"))
2331 "--currentdate together"))
2333 if opts.get('log'):
2332 if opts.get('log'):
2334 raise error.Abort(_("cannot specify --no-commit and "
2333 raise error.Abort(_("cannot specify --no-commit and "
2335 "--log together"))
2334 "--log together"))
2336
2335
2337 graftstate = statemod.cmdstate(repo, 'graftstate')
2336 graftstate = statemod.cmdstate(repo, 'graftstate')
2338
2337
2339 if opts.get('stop'):
2338 if opts.get('stop'):
2340 if opts.get('continue'):
2339 if opts.get('continue'):
2341 raise error.Abort(_("cannot use '--continue' and "
2340 raise error.Abort(_("cannot use '--continue' and "
2342 "'--stop' together"))
2341 "'--stop' together"))
2343 if opts.get('abort'):
2342 if opts.get('abort'):
2344 raise error.Abort(_("cannot use '--abort' and '--stop' together"))
2343 raise error.Abort(_("cannot use '--abort' and '--stop' together"))
2345
2344
2346 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2345 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2347 opts.get('date'), opts.get('currentdate'),
2346 opts.get('date'), opts.get('currentdate'),
2348 opts.get('currentuser'), opts.get('rev'))):
2347 opts.get('currentuser'), opts.get('rev'))):
2349 raise error.Abort(_("cannot specify any other flag with '--stop'"))
2348 raise error.Abort(_("cannot specify any other flag with '--stop'"))
2350 return _stopgraft(ui, repo, graftstate)
2349 return _stopgraft(ui, repo, graftstate)
2351 elif opts.get('abort'):
2350 elif opts.get('abort'):
2352 if opts.get('continue'):
2351 if opts.get('continue'):
2353 raise error.Abort(_("cannot use '--continue' and "
2352 raise error.Abort(_("cannot use '--continue' and "
2354 "'--abort' together"))
2353 "'--abort' together"))
2355 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2354 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2356 opts.get('date'), opts.get('currentdate'),
2355 opts.get('date'), opts.get('currentdate'),
2357 opts.get('currentuser'), opts.get('rev'))):
2356 opts.get('currentuser'), opts.get('rev'))):
2358 raise error.Abort(_("cannot specify any other flag with '--abort'"))
2357 raise error.Abort(_("cannot specify any other flag with '--abort'"))
2359
2358
2360 return _abortgraft(ui, repo, graftstate)
2359 return _abortgraft(ui, repo, graftstate)
2361 elif opts.get('continue'):
2360 elif opts.get('continue'):
2362 cont = True
2361 cont = True
2363 if revs:
2362 if revs:
2364 raise error.Abort(_("can't specify --continue and revisions"))
2363 raise error.Abort(_("can't specify --continue and revisions"))
2365 # read in unfinished revisions
2364 # read in unfinished revisions
2366 if graftstate.exists():
2365 if graftstate.exists():
2367 statedata = _readgraftstate(repo, graftstate)
2366 statedata = _readgraftstate(repo, graftstate)
2368 if statedata.get('date'):
2367 if statedata.get('date'):
2369 opts['date'] = statedata['date']
2368 opts['date'] = statedata['date']
2370 if statedata.get('user'):
2369 if statedata.get('user'):
2371 opts['user'] = statedata['user']
2370 opts['user'] = statedata['user']
2372 if statedata.get('log'):
2371 if statedata.get('log'):
2373 opts['log'] = True
2372 opts['log'] = True
2374 if statedata.get('no_commit'):
2373 if statedata.get('no_commit'):
2375 opts['no_commit'] = statedata.get('no_commit')
2374 opts['no_commit'] = statedata.get('no_commit')
2376 nodes = statedata['nodes']
2375 nodes = statedata['nodes']
2377 revs = [repo[node].rev() for node in nodes]
2376 revs = [repo[node].rev() for node in nodes]
2378 else:
2377 else:
2379 cmdutil.wrongtooltocontinue(repo, _('graft'))
2378 cmdutil.wrongtooltocontinue(repo, _('graft'))
2380 else:
2379 else:
2381 if not revs:
2380 if not revs:
2382 raise error.Abort(_('no revisions specified'))
2381 raise error.Abort(_('no revisions specified'))
2383 cmdutil.checkunfinished(repo)
2382 cmdutil.checkunfinished(repo)
2384 cmdutil.bailifchanged(repo)
2383 cmdutil.bailifchanged(repo)
2385 revs = scmutil.revrange(repo, revs)
2384 revs = scmutil.revrange(repo, revs)
2386
2385
2387 skipped = set()
2386 skipped = set()
2388 # check for merges
2387 # check for merges
2389 for rev in repo.revs('%ld and merge()', revs):
2388 for rev in repo.revs('%ld and merge()', revs):
2390 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2389 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2391 skipped.add(rev)
2390 skipped.add(rev)
2392 revs = [r for r in revs if r not in skipped]
2391 revs = [r for r in revs if r not in skipped]
2393 if not revs:
2392 if not revs:
2394 return -1
2393 return -1
2395
2394
2396 # Don't check in the --continue case, in effect retaining --force across
2395 # Don't check in the --continue case, in effect retaining --force across
2397 # --continues. That's because without --force, any revisions we decided to
2396 # --continues. That's because without --force, any revisions we decided to
2398 # skip would have been filtered out here, so they wouldn't have made their
2397 # skip would have been filtered out here, so they wouldn't have made their
2399 # way to the graftstate. With --force, any revisions we would have otherwise
2398 # way to the graftstate. With --force, any revisions we would have otherwise
2400 # skipped would not have been filtered out, and if they hadn't been applied
2399 # skipped would not have been filtered out, and if they hadn't been applied
2401 # already, they'd have been in the graftstate.
2400 # already, they'd have been in the graftstate.
2402 if not (cont or opts.get('force')):
2401 if not (cont or opts.get('force')):
2403 # check for ancestors of dest branch
2402 # check for ancestors of dest branch
2404 crev = repo['.'].rev()
2403 crev = repo['.'].rev()
2405 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2404 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2406 # XXX make this lazy in the future
2405 # XXX make this lazy in the future
2407 # don't mutate while iterating, create a copy
2406 # don't mutate while iterating, create a copy
2408 for rev in list(revs):
2407 for rev in list(revs):
2409 if rev in ancestors:
2408 if rev in ancestors:
2410 ui.warn(_('skipping ancestor revision %d:%s\n') %
2409 ui.warn(_('skipping ancestor revision %d:%s\n') %
2411 (rev, repo[rev]))
2410 (rev, repo[rev]))
2412 # XXX remove on list is slow
2411 # XXX remove on list is slow
2413 revs.remove(rev)
2412 revs.remove(rev)
2414 if not revs:
2413 if not revs:
2415 return -1
2414 return -1
2416
2415
2417 # analyze revs for earlier grafts
2416 # analyze revs for earlier grafts
2418 ids = {}
2417 ids = {}
2419 for ctx in repo.set("%ld", revs):
2418 for ctx in repo.set("%ld", revs):
2420 ids[ctx.hex()] = ctx.rev()
2419 ids[ctx.hex()] = ctx.rev()
2421 n = ctx.extra().get('source')
2420 n = ctx.extra().get('source')
2422 if n:
2421 if n:
2423 ids[n] = ctx.rev()
2422 ids[n] = ctx.rev()
2424
2423
2425 # check ancestors for earlier grafts
2424 # check ancestors for earlier grafts
2426 ui.debug('scanning for duplicate grafts\n')
2425 ui.debug('scanning for duplicate grafts\n')
2427
2426
2428 # The only changesets we can be sure doesn't contain grafts of any
2427 # The only changesets we can be sure doesn't contain grafts of any
2429 # revs, are the ones that are common ancestors of *all* revs:
2428 # revs, are the ones that are common ancestors of *all* revs:
2430 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2429 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2431 ctx = repo[rev]
2430 ctx = repo[rev]
2432 n = ctx.extra().get('source')
2431 n = ctx.extra().get('source')
2433 if n in ids:
2432 if n in ids:
2434 try:
2433 try:
2435 r = repo[n].rev()
2434 r = repo[n].rev()
2436 except error.RepoLookupError:
2435 except error.RepoLookupError:
2437 r = None
2436 r = None
2438 if r in revs:
2437 if r in revs:
2439 ui.warn(_('skipping revision %d:%s '
2438 ui.warn(_('skipping revision %d:%s '
2440 '(already grafted to %d:%s)\n')
2439 '(already grafted to %d:%s)\n')
2441 % (r, repo[r], rev, ctx))
2440 % (r, repo[r], rev, ctx))
2442 revs.remove(r)
2441 revs.remove(r)
2443 elif ids[n] in revs:
2442 elif ids[n] in revs:
2444 if r is None:
2443 if r is None:
2445 ui.warn(_('skipping already grafted revision %d:%s '
2444 ui.warn(_('skipping already grafted revision %d:%s '
2446 '(%d:%s also has unknown origin %s)\n')
2445 '(%d:%s also has unknown origin %s)\n')
2447 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2446 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2448 else:
2447 else:
2449 ui.warn(_('skipping already grafted revision %d:%s '
2448 ui.warn(_('skipping already grafted revision %d:%s '
2450 '(%d:%s also has origin %d:%s)\n')
2449 '(%d:%s also has origin %d:%s)\n')
2451 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2450 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2452 revs.remove(ids[n])
2451 revs.remove(ids[n])
2453 elif ctx.hex() in ids:
2452 elif ctx.hex() in ids:
2454 r = ids[ctx.hex()]
2453 r = ids[ctx.hex()]
2455 ui.warn(_('skipping already grafted revision %d:%s '
2454 ui.warn(_('skipping already grafted revision %d:%s '
2456 '(was grafted from %d:%s)\n') %
2455 '(was grafted from %d:%s)\n') %
2457 (r, repo[r], rev, ctx))
2456 (r, repo[r], rev, ctx))
2458 revs.remove(r)
2457 revs.remove(r)
2459 if not revs:
2458 if not revs:
2460 return -1
2459 return -1
2461
2460
2462 if opts.get('no_commit'):
2461 if opts.get('no_commit'):
2463 statedata['no_commit'] = True
2462 statedata['no_commit'] = True
2464 for pos, ctx in enumerate(repo.set("%ld", revs)):
2463 for pos, ctx in enumerate(repo.set("%ld", revs)):
2465 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2464 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2466 ctx.description().split('\n', 1)[0])
2465 ctx.description().split('\n', 1)[0])
2467 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2466 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2468 if names:
2467 if names:
2469 desc += ' (%s)' % ' '.join(names)
2468 desc += ' (%s)' % ' '.join(names)
2470 ui.status(_('grafting %s\n') % desc)
2469 ui.status(_('grafting %s\n') % desc)
2471 if opts.get('dry_run'):
2470 if opts.get('dry_run'):
2472 continue
2471 continue
2473
2472
2474 source = ctx.extra().get('source')
2473 source = ctx.extra().get('source')
2475 extra = {}
2474 extra = {}
2476 if source:
2475 if source:
2477 extra['source'] = source
2476 extra['source'] = source
2478 extra['intermediate-source'] = ctx.hex()
2477 extra['intermediate-source'] = ctx.hex()
2479 else:
2478 else:
2480 extra['source'] = ctx.hex()
2479 extra['source'] = ctx.hex()
2481 user = ctx.user()
2480 user = ctx.user()
2482 if opts.get('user'):
2481 if opts.get('user'):
2483 user = opts['user']
2482 user = opts['user']
2484 statedata['user'] = user
2483 statedata['user'] = user
2485 date = ctx.date()
2484 date = ctx.date()
2486 if opts.get('date'):
2485 if opts.get('date'):
2487 date = opts['date']
2486 date = opts['date']
2488 statedata['date'] = date
2487 statedata['date'] = date
2489 message = ctx.description()
2488 message = ctx.description()
2490 if opts.get('log'):
2489 if opts.get('log'):
2491 message += '\n(grafted from %s)' % ctx.hex()
2490 message += '\n(grafted from %s)' % ctx.hex()
2492 statedata['log'] = True
2491 statedata['log'] = True
2493
2492
2494 # we don't merge the first commit when continuing
2493 # we don't merge the first commit when continuing
2495 if not cont:
2494 if not cont:
2496 # perform the graft merge with p1(rev) as 'ancestor'
2495 # perform the graft merge with p1(rev) as 'ancestor'
2497 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
2496 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
2498 with ui.configoverride(overrides, 'graft'):
2497 with ui.configoverride(overrides, 'graft'):
2499 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'graft'])
2498 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'graft'])
2500 # report any conflicts
2499 # report any conflicts
2501 if stats.unresolvedcount > 0:
2500 if stats.unresolvedcount > 0:
2502 # write out state for --continue
2501 # write out state for --continue
2503 nodes = [repo[rev].hex() for rev in revs[pos:]]
2502 nodes = [repo[rev].hex() for rev in revs[pos:]]
2504 statedata['nodes'] = nodes
2503 statedata['nodes'] = nodes
2505 stateversion = 1
2504 stateversion = 1
2506 graftstate.save(stateversion, statedata)
2505 graftstate.save(stateversion, statedata)
2507 hint = _("use 'hg resolve' and 'hg graft --continue'")
2506 hint = _("use 'hg resolve' and 'hg graft --continue'")
2508 raise error.Abort(
2507 raise error.Abort(
2509 _("unresolved conflicts, can't continue"),
2508 _("unresolved conflicts, can't continue"),
2510 hint=hint)
2509 hint=hint)
2511 else:
2510 else:
2512 cont = False
2511 cont = False
2513
2512
2514 # commit if --no-commit is false
2513 # commit if --no-commit is false
2515 if not opts.get('no_commit'):
2514 if not opts.get('no_commit'):
2516 node = repo.commit(text=message, user=user, date=date, extra=extra,
2515 node = repo.commit(text=message, user=user, date=date, extra=extra,
2517 editor=editor)
2516 editor=editor)
2518 if node is None:
2517 if node is None:
2519 ui.warn(
2518 ui.warn(
2520 _('note: graft of %d:%s created no changes to commit\n') %
2519 _('note: graft of %d:%s created no changes to commit\n') %
2521 (ctx.rev(), ctx))
2520 (ctx.rev(), ctx))
2522 # checking that newnodes exist because old state files won't have it
2521 # checking that newnodes exist because old state files won't have it
2523 elif statedata.get('newnodes') is not None:
2522 elif statedata.get('newnodes') is not None:
2524 statedata['newnodes'].append(node)
2523 statedata['newnodes'].append(node)
2525
2524
2526 # remove state when we complete successfully
2525 # remove state when we complete successfully
2527 if not opts.get('dry_run'):
2526 if not opts.get('dry_run'):
2528 graftstate.delete()
2527 graftstate.delete()
2529
2528
2530 return 0
2529 return 0
2531
2530
2532 def _abortgraft(ui, repo, graftstate):
2531 def _abortgraft(ui, repo, graftstate):
2533 """abort the interrupted graft and rollbacks to the state before interrupted
2532 """abort the interrupted graft and rollbacks to the state before interrupted
2534 graft"""
2533 graft"""
2535 if not graftstate.exists():
2534 if not graftstate.exists():
2536 raise error.Abort(_("no interrupted graft to abort"))
2535 raise error.Abort(_("no interrupted graft to abort"))
2537 statedata = _readgraftstate(repo, graftstate)
2536 statedata = _readgraftstate(repo, graftstate)
2538 newnodes = statedata.get('newnodes')
2537 newnodes = statedata.get('newnodes')
2539 if newnodes is None:
2538 if newnodes is None:
2540 # and old graft state which does not have all the data required to abort
2539 # and old graft state which does not have all the data required to abort
2541 # the graft
2540 # the graft
2542 raise error.Abort(_("cannot abort using an old graftstate"))
2541 raise error.Abort(_("cannot abort using an old graftstate"))
2543
2542
2544 # changeset from which graft operation was started
2543 # changeset from which graft operation was started
2545 startctx = None
2544 startctx = None
2546 if len(newnodes) > 0:
2545 if len(newnodes) > 0:
2547 startctx = repo[newnodes[0]].p1()
2546 startctx = repo[newnodes[0]].p1()
2548 else:
2547 else:
2549 startctx = repo['.']
2548 startctx = repo['.']
2550 # whether to strip or not
2549 # whether to strip or not
2551 cleanup = False
2550 cleanup = False
2552 if newnodes:
2551 if newnodes:
2553 newnodes = [repo[r].rev() for r in newnodes]
2552 newnodes = [repo[r].rev() for r in newnodes]
2554 cleanup = True
2553 cleanup = True
2555 # checking that none of the newnodes turned public or is public
2554 # checking that none of the newnodes turned public or is public
2556 immutable = [c for c in newnodes if not repo[c].mutable()]
2555 immutable = [c for c in newnodes if not repo[c].mutable()]
2557 if immutable:
2556 if immutable:
2558 repo.ui.warn(_("cannot clean up public changesets %s\n")
2557 repo.ui.warn(_("cannot clean up public changesets %s\n")
2559 % ', '.join(bytes(repo[r]) for r in immutable),
2558 % ', '.join(bytes(repo[r]) for r in immutable),
2560 hint=_("see 'hg help phases' for details"))
2559 hint=_("see 'hg help phases' for details"))
2561 cleanup = False
2560 cleanup = False
2562
2561
2563 # checking that no new nodes are created on top of grafted revs
2562 # checking that no new nodes are created on top of grafted revs
2564 desc = set(repo.changelog.descendants(newnodes))
2563 desc = set(repo.changelog.descendants(newnodes))
2565 if desc - set(newnodes):
2564 if desc - set(newnodes):
2566 repo.ui.warn(_("new changesets detected on destination "
2565 repo.ui.warn(_("new changesets detected on destination "
2567 "branch, can't strip\n"))
2566 "branch, can't strip\n"))
2568 cleanup = False
2567 cleanup = False
2569
2568
2570 if cleanup:
2569 if cleanup:
2571 with repo.wlock(), repo.lock():
2570 with repo.wlock(), repo.lock():
2572 hg.updaterepo(repo, startctx.node(), overwrite=True)
2571 hg.updaterepo(repo, startctx.node(), overwrite=True)
2573 # stripping the new nodes created
2572 # stripping the new nodes created
2574 strippoints = [c.node() for c in repo.set("roots(%ld)",
2573 strippoints = [c.node() for c in repo.set("roots(%ld)",
2575 newnodes)]
2574 newnodes)]
2576 repair.strip(repo.ui, repo, strippoints, backup=False)
2575 repair.strip(repo.ui, repo, strippoints, backup=False)
2577
2576
2578 if not cleanup:
2577 if not cleanup:
2579 # we don't update to the startnode if we can't strip
2578 # we don't update to the startnode if we can't strip
2580 startctx = repo['.']
2579 startctx = repo['.']
2581 hg.updaterepo(repo, startctx.node(), overwrite=True)
2580 hg.updaterepo(repo, startctx.node(), overwrite=True)
2582
2581
2583 ui.status(_("graft aborted\n"))
2582 ui.status(_("graft aborted\n"))
2584 ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
2583 ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
2585 graftstate.delete()
2584 graftstate.delete()
2586 return 0
2585 return 0
2587
2586
2588 def _readgraftstate(repo, graftstate):
2587 def _readgraftstate(repo, graftstate):
2589 """read the graft state file and return a dict of the data stored in it"""
2588 """read the graft state file and return a dict of the data stored in it"""
2590 try:
2589 try:
2591 return graftstate.read()
2590 return graftstate.read()
2592 except error.CorruptedState:
2591 except error.CorruptedState:
2593 nodes = repo.vfs.read('graftstate').splitlines()
2592 nodes = repo.vfs.read('graftstate').splitlines()
2594 return {'nodes': nodes}
2593 return {'nodes': nodes}
2595
2594
2596 def _stopgraft(ui, repo, graftstate):
2595 def _stopgraft(ui, repo, graftstate):
2597 """stop the interrupted graft"""
2596 """stop the interrupted graft"""
2598 if not graftstate.exists():
2597 if not graftstate.exists():
2599 raise error.Abort(_("no interrupted graft found"))
2598 raise error.Abort(_("no interrupted graft found"))
2600 pctx = repo['.']
2599 pctx = repo['.']
2601 hg.updaterepo(repo, pctx.node(), overwrite=True)
2600 hg.updaterepo(repo, pctx.node(), overwrite=True)
2602 graftstate.delete()
2601 graftstate.delete()
2603 ui.status(_("stopped the interrupted graft\n"))
2602 ui.status(_("stopped the interrupted graft\n"))
2604 ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
2603 ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
2605 return 0
2604 return 0
2606
2605
2607 @command('grep',
2606 @command('grep',
2608 [('0', 'print0', None, _('end fields with NUL')),
2607 [('0', 'print0', None, _('end fields with NUL')),
2609 ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
2608 ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
2610 ('', 'diff', None, _('print all revisions when the term was introduced '
2609 ('', 'diff', None, _('print all revisions when the term was introduced '
2611 'or removed')),
2610 'or removed')),
2612 ('a', 'text', None, _('treat all files as text')),
2611 ('a', 'text', None, _('treat all files as text')),
2613 ('f', 'follow', None,
2612 ('f', 'follow', None,
2614 _('follow changeset history,'
2613 _('follow changeset history,'
2615 ' or file history across copies and renames')),
2614 ' or file history across copies and renames')),
2616 ('i', 'ignore-case', None, _('ignore case when matching')),
2615 ('i', 'ignore-case', None, _('ignore case when matching')),
2617 ('l', 'files-with-matches', None,
2616 ('l', 'files-with-matches', None,
2618 _('print only filenames and revisions that match')),
2617 _('print only filenames and revisions that match')),
2619 ('n', 'line-number', None, _('print matching line numbers')),
2618 ('n', 'line-number', None, _('print matching line numbers')),
2620 ('r', 'rev', [],
2619 ('r', 'rev', [],
2621 _('only search files changed within revision range'), _('REV')),
2620 _('only search files changed within revision range'), _('REV')),
2622 ('', 'all-files', None,
2621 ('', 'all-files', None,
2623 _('include all files in the changeset while grepping (EXPERIMENTAL)')),
2622 _('include all files in the changeset while grepping (EXPERIMENTAL)')),
2624 ('u', 'user', None, _('list the author (long with -v)')),
2623 ('u', 'user', None, _('list the author (long with -v)')),
2625 ('d', 'date', None, _('list the date (short with -q)')),
2624 ('d', 'date', None, _('list the date (short with -q)')),
2626 ] + formatteropts + walkopts,
2625 ] + formatteropts + walkopts,
2627 _('[OPTION]... PATTERN [FILE]...'),
2626 _('[OPTION]... PATTERN [FILE]...'),
2628 inferrepo=True,
2627 inferrepo=True,
2629 intents={INTENT_READONLY})
2628 intents={INTENT_READONLY})
2630 def grep(ui, repo, pattern, *pats, **opts):
2629 def grep(ui, repo, pattern, *pats, **opts):
2631 """search revision history for a pattern in specified files
2630 """search revision history for a pattern in specified files
2632
2631
2633 Search revision history for a regular expression in the specified
2632 Search revision history for a regular expression in the specified
2634 files or the entire project.
2633 files or the entire project.
2635
2634
2636 By default, grep prints the most recent revision number for each
2635 By default, grep prints the most recent revision number for each
2637 file in which it finds a match. To get it to print every revision
2636 file in which it finds a match. To get it to print every revision
2638 that contains a change in match status ("-" for a match that becomes
2637 that contains a change in match status ("-" for a match that becomes
2639 a non-match, or "+" for a non-match that becomes a match), use the
2638 a non-match, or "+" for a non-match that becomes a match), use the
2640 --diff flag.
2639 --diff flag.
2641
2640
2642 PATTERN can be any Python (roughly Perl-compatible) regular
2641 PATTERN can be any Python (roughly Perl-compatible) regular
2643 expression.
2642 expression.
2644
2643
2645 If no FILEs are specified (and -f/--follow isn't set), all files in
2644 If no FILEs are specified (and -f/--follow isn't set), all files in
2646 the repository are searched, including those that don't exist in the
2645 the repository are searched, including those that don't exist in the
2647 current branch or have been deleted in a prior changeset.
2646 current branch or have been deleted in a prior changeset.
2648
2647
2649 Returns 0 if a match is found, 1 otherwise.
2648 Returns 0 if a match is found, 1 otherwise.
2650 """
2649 """
2651 opts = pycompat.byteskwargs(opts)
2650 opts = pycompat.byteskwargs(opts)
2652 diff = opts.get('all') or opts.get('diff')
2651 diff = opts.get('all') or opts.get('diff')
2653 all_files = opts.get('all_files')
2652 all_files = opts.get('all_files')
2654 if diff and opts.get('all_files'):
2653 if diff and opts.get('all_files'):
2655 raise error.Abort(_('--diff and --all-files are mutually exclusive'))
2654 raise error.Abort(_('--diff and --all-files are mutually exclusive'))
2656 # TODO: remove "not opts.get('rev')" if --all-files -rMULTIREV gets working
2655 # TODO: remove "not opts.get('rev')" if --all-files -rMULTIREV gets working
2657 if opts.get('all_files') is None and not opts.get('rev') and not diff:
2656 if opts.get('all_files') is None and not opts.get('rev') and not diff:
2658 # experimental config: commands.grep.all-files
2657 # experimental config: commands.grep.all-files
2659 opts['all_files'] = ui.configbool('commands', 'grep.all-files')
2658 opts['all_files'] = ui.configbool('commands', 'grep.all-files')
2660 plaingrep = opts.get('all_files') and not opts.get('rev')
2659 plaingrep = opts.get('all_files') and not opts.get('rev')
2661 if plaingrep:
2660 if plaingrep:
2662 opts['rev'] = ['wdir()']
2661 opts['rev'] = ['wdir()']
2663
2662
2664 reflags = re.M
2663 reflags = re.M
2665 if opts.get('ignore_case'):
2664 if opts.get('ignore_case'):
2666 reflags |= re.I
2665 reflags |= re.I
2667 try:
2666 try:
2668 regexp = util.re.compile(pattern, reflags)
2667 regexp = util.re.compile(pattern, reflags)
2669 except re.error as inst:
2668 except re.error as inst:
2670 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2669 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2671 return 1
2670 return 1
2672 sep, eol = ':', '\n'
2671 sep, eol = ':', '\n'
2673 if opts.get('print0'):
2672 if opts.get('print0'):
2674 sep = eol = '\0'
2673 sep = eol = '\0'
2675
2674
2676 getfile = util.lrucachefunc(repo.file)
2675 getfile = util.lrucachefunc(repo.file)
2677
2676
2678 def matchlines(body):
2677 def matchlines(body):
2679 begin = 0
2678 begin = 0
2680 linenum = 0
2679 linenum = 0
2681 while begin < len(body):
2680 while begin < len(body):
2682 match = regexp.search(body, begin)
2681 match = regexp.search(body, begin)
2683 if not match:
2682 if not match:
2684 break
2683 break
2685 mstart, mend = match.span()
2684 mstart, mend = match.span()
2686 linenum += body.count('\n', begin, mstart) + 1
2685 linenum += body.count('\n', begin, mstart) + 1
2687 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2686 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2688 begin = body.find('\n', mend) + 1 or len(body) + 1
2687 begin = body.find('\n', mend) + 1 or len(body) + 1
2689 lend = begin - 1
2688 lend = begin - 1
2690 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2689 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2691
2690
2692 class linestate(object):
2691 class linestate(object):
2693 def __init__(self, line, linenum, colstart, colend):
2692 def __init__(self, line, linenum, colstart, colend):
2694 self.line = line
2693 self.line = line
2695 self.linenum = linenum
2694 self.linenum = linenum
2696 self.colstart = colstart
2695 self.colstart = colstart
2697 self.colend = colend
2696 self.colend = colend
2698
2697
2699 def __hash__(self):
2698 def __hash__(self):
2700 return hash((self.linenum, self.line))
2699 return hash((self.linenum, self.line))
2701
2700
2702 def __eq__(self, other):
2701 def __eq__(self, other):
2703 return self.line == other.line
2702 return self.line == other.line
2704
2703
2705 def findpos(self):
2704 def findpos(self):
2706 """Iterate all (start, end) indices of matches"""
2705 """Iterate all (start, end) indices of matches"""
2707 yield self.colstart, self.colend
2706 yield self.colstart, self.colend
2708 p = self.colend
2707 p = self.colend
2709 while p < len(self.line):
2708 while p < len(self.line):
2710 m = regexp.search(self.line, p)
2709 m = regexp.search(self.line, p)
2711 if not m:
2710 if not m:
2712 break
2711 break
2713 yield m.span()
2712 yield m.span()
2714 p = m.end()
2713 p = m.end()
2715
2714
2716 matches = {}
2715 matches = {}
2717 copies = {}
2716 copies = {}
2718 def grepbody(fn, rev, body):
2717 def grepbody(fn, rev, body):
2719 matches[rev].setdefault(fn, [])
2718 matches[rev].setdefault(fn, [])
2720 m = matches[rev][fn]
2719 m = matches[rev][fn]
2721 for lnum, cstart, cend, line in matchlines(body):
2720 for lnum, cstart, cend, line in matchlines(body):
2722 s = linestate(line, lnum, cstart, cend)
2721 s = linestate(line, lnum, cstart, cend)
2723 m.append(s)
2722 m.append(s)
2724
2723
2725 def difflinestates(a, b):
2724 def difflinestates(a, b):
2726 sm = difflib.SequenceMatcher(None, a, b)
2725 sm = difflib.SequenceMatcher(None, a, b)
2727 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2726 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2728 if tag == r'insert':
2727 if tag == r'insert':
2729 for i in pycompat.xrange(blo, bhi):
2728 for i in pycompat.xrange(blo, bhi):
2730 yield ('+', b[i])
2729 yield ('+', b[i])
2731 elif tag == r'delete':
2730 elif tag == r'delete':
2732 for i in pycompat.xrange(alo, ahi):
2731 for i in pycompat.xrange(alo, ahi):
2733 yield ('-', a[i])
2732 yield ('-', a[i])
2734 elif tag == r'replace':
2733 elif tag == r'replace':
2735 for i in pycompat.xrange(alo, ahi):
2734 for i in pycompat.xrange(alo, ahi):
2736 yield ('-', a[i])
2735 yield ('-', a[i])
2737 for i in pycompat.xrange(blo, bhi):
2736 for i in pycompat.xrange(blo, bhi):
2738 yield ('+', b[i])
2737 yield ('+', b[i])
2739
2738
2740 def display(fm, fn, ctx, pstates, states):
2739 def display(fm, fn, ctx, pstates, states):
2741 rev = scmutil.intrev(ctx)
2740 rev = scmutil.intrev(ctx)
2742 if fm.isplain():
2741 if fm.isplain():
2743 formatuser = ui.shortuser
2742 formatuser = ui.shortuser
2744 else:
2743 else:
2745 formatuser = pycompat.bytestr
2744 formatuser = pycompat.bytestr
2746 if ui.quiet:
2745 if ui.quiet:
2747 datefmt = '%Y-%m-%d'
2746 datefmt = '%Y-%m-%d'
2748 else:
2747 else:
2749 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2748 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2750 found = False
2749 found = False
2751 @util.cachefunc
2750 @util.cachefunc
2752 def binary():
2751 def binary():
2753 flog = getfile(fn)
2752 flog = getfile(fn)
2754 try:
2753 try:
2755 return stringutil.binary(flog.read(ctx.filenode(fn)))
2754 return stringutil.binary(flog.read(ctx.filenode(fn)))
2756 except error.WdirUnsupported:
2755 except error.WdirUnsupported:
2757 return ctx[fn].isbinary()
2756 return ctx[fn].isbinary()
2758
2757
2759 fieldnamemap = {'filename': 'path', 'linenumber': 'lineno'}
2758 fieldnamemap = {'filename': 'path', 'linenumber': 'lineno'}
2760 if diff:
2759 if diff:
2761 iter = difflinestates(pstates, states)
2760 iter = difflinestates(pstates, states)
2762 else:
2761 else:
2763 iter = [('', l) for l in states]
2762 iter = [('', l) for l in states]
2764 for change, l in iter:
2763 for change, l in iter:
2765 fm.startitem()
2764 fm.startitem()
2766 fm.context(ctx=ctx)
2765 fm.context(ctx=ctx)
2767 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)))
2766 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)))
2768
2767
2769 cols = [
2768 cols = [
2770 ('filename', '%s', fn, True),
2769 ('filename', '%s', fn, True),
2771 ('rev', '%d', rev, not plaingrep),
2770 ('rev', '%d', rev, not plaingrep),
2772 ('linenumber', '%d', l.linenum, opts.get('line_number')),
2771 ('linenumber', '%d', l.linenum, opts.get('line_number')),
2773 ]
2772 ]
2774 if diff:
2773 if diff:
2775 cols.append(('change', '%s', change, True))
2774 cols.append(('change', '%s', change, True))
2776 cols.extend([
2775 cols.extend([
2777 ('user', '%s', formatuser(ctx.user()), opts.get('user')),
2776 ('user', '%s', formatuser(ctx.user()), opts.get('user')),
2778 ('date', '%s', fm.formatdate(ctx.date(), datefmt),
2777 ('date', '%s', fm.formatdate(ctx.date(), datefmt),
2779 opts.get('date')),
2778 opts.get('date')),
2780 ])
2779 ])
2781 lastcol = next(
2780 lastcol = next(
2782 name for name, fmt, data, cond in reversed(cols) if cond)
2781 name for name, fmt, data, cond in reversed(cols) if cond)
2783 for name, fmt, data, cond in cols:
2782 for name, fmt, data, cond in cols:
2784 field = fieldnamemap.get(name, name)
2783 field = fieldnamemap.get(name, name)
2785 fm.condwrite(cond, field, fmt, data, label='grep.%s' % name)
2784 fm.condwrite(cond, field, fmt, data, label='grep.%s' % name)
2786 if cond and name != lastcol:
2785 if cond and name != lastcol:
2787 fm.plain(sep, label='grep.sep')
2786 fm.plain(sep, label='grep.sep')
2788 if not opts.get('files_with_matches'):
2787 if not opts.get('files_with_matches'):
2789 fm.plain(sep, label='grep.sep')
2788 fm.plain(sep, label='grep.sep')
2790 if not opts.get('text') and binary():
2789 if not opts.get('text') and binary():
2791 fm.plain(_(" Binary file matches"))
2790 fm.plain(_(" Binary file matches"))
2792 else:
2791 else:
2793 displaymatches(fm.nested('texts', tmpl='{text}'), l)
2792 displaymatches(fm.nested('texts', tmpl='{text}'), l)
2794 fm.plain(eol)
2793 fm.plain(eol)
2795 found = True
2794 found = True
2796 if opts.get('files_with_matches'):
2795 if opts.get('files_with_matches'):
2797 break
2796 break
2798 return found
2797 return found
2799
2798
2800 def displaymatches(fm, l):
2799 def displaymatches(fm, l):
2801 p = 0
2800 p = 0
2802 for s, e in l.findpos():
2801 for s, e in l.findpos():
2803 if p < s:
2802 if p < s:
2804 fm.startitem()
2803 fm.startitem()
2805 fm.write('text', '%s', l.line[p:s])
2804 fm.write('text', '%s', l.line[p:s])
2806 fm.data(matched=False)
2805 fm.data(matched=False)
2807 fm.startitem()
2806 fm.startitem()
2808 fm.write('text', '%s', l.line[s:e], label='grep.match')
2807 fm.write('text', '%s', l.line[s:e], label='grep.match')
2809 fm.data(matched=True)
2808 fm.data(matched=True)
2810 p = e
2809 p = e
2811 if p < len(l.line):
2810 if p < len(l.line):
2812 fm.startitem()
2811 fm.startitem()
2813 fm.write('text', '%s', l.line[p:])
2812 fm.write('text', '%s', l.line[p:])
2814 fm.data(matched=False)
2813 fm.data(matched=False)
2815 fm.end()
2814 fm.end()
2816
2815
2817 skip = {}
2816 skip = {}
2818 revfiles = {}
2817 revfiles = {}
2819 match = scmutil.match(repo[None], pats, opts)
2818 match = scmutil.match(repo[None], pats, opts)
2820 found = False
2819 found = False
2821 follow = opts.get('follow')
2820 follow = opts.get('follow')
2822
2821
2823 def prep(ctx, fns):
2822 def prep(ctx, fns):
2824 rev = ctx.rev()
2823 rev = ctx.rev()
2825 pctx = ctx.p1()
2824 pctx = ctx.p1()
2826 parent = pctx.rev()
2825 parent = pctx.rev()
2827 matches.setdefault(rev, {})
2826 matches.setdefault(rev, {})
2828 matches.setdefault(parent, {})
2827 matches.setdefault(parent, {})
2829 files = revfiles.setdefault(rev, [])
2828 files = revfiles.setdefault(rev, [])
2830 for fn in fns:
2829 for fn in fns:
2831 flog = getfile(fn)
2830 flog = getfile(fn)
2832 try:
2831 try:
2833 fnode = ctx.filenode(fn)
2832 fnode = ctx.filenode(fn)
2834 except error.LookupError:
2833 except error.LookupError:
2835 continue
2834 continue
2836 try:
2835 try:
2837 copied = flog.renamed(fnode)
2836 copied = flog.renamed(fnode)
2838 except error.WdirUnsupported:
2837 except error.WdirUnsupported:
2839 copied = ctx[fn].renamed()
2838 copied = ctx[fn].renamed()
2840 copy = follow and copied and copied[0]
2839 copy = follow and copied and copied[0]
2841 if copy:
2840 if copy:
2842 copies.setdefault(rev, {})[fn] = copy
2841 copies.setdefault(rev, {})[fn] = copy
2843 if fn in skip:
2842 if fn in skip:
2844 if copy:
2843 if copy:
2845 skip[copy] = True
2844 skip[copy] = True
2846 continue
2845 continue
2847 files.append(fn)
2846 files.append(fn)
2848
2847
2849 if fn not in matches[rev]:
2848 if fn not in matches[rev]:
2850 try:
2849 try:
2851 content = flog.read(fnode)
2850 content = flog.read(fnode)
2852 except error.WdirUnsupported:
2851 except error.WdirUnsupported:
2853 content = ctx[fn].data()
2852 content = ctx[fn].data()
2854 grepbody(fn, rev, content)
2853 grepbody(fn, rev, content)
2855
2854
2856 pfn = copy or fn
2855 pfn = copy or fn
2857 if pfn not in matches[parent]:
2856 if pfn not in matches[parent]:
2858 try:
2857 try:
2859 fnode = pctx.filenode(pfn)
2858 fnode = pctx.filenode(pfn)
2860 grepbody(pfn, parent, flog.read(fnode))
2859 grepbody(pfn, parent, flog.read(fnode))
2861 except error.LookupError:
2860 except error.LookupError:
2862 pass
2861 pass
2863
2862
2864 ui.pager('grep')
2863 ui.pager('grep')
2865 fm = ui.formatter('grep', opts)
2864 fm = ui.formatter('grep', opts)
2866 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2865 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2867 rev = ctx.rev()
2866 rev = ctx.rev()
2868 parent = ctx.p1().rev()
2867 parent = ctx.p1().rev()
2869 for fn in sorted(revfiles.get(rev, [])):
2868 for fn in sorted(revfiles.get(rev, [])):
2870 states = matches[rev][fn]
2869 states = matches[rev][fn]
2871 copy = copies.get(rev, {}).get(fn)
2870 copy = copies.get(rev, {}).get(fn)
2872 if fn in skip:
2871 if fn in skip:
2873 if copy:
2872 if copy:
2874 skip[copy] = True
2873 skip[copy] = True
2875 continue
2874 continue
2876 pstates = matches.get(parent, {}).get(copy or fn, [])
2875 pstates = matches.get(parent, {}).get(copy or fn, [])
2877 if pstates or states:
2876 if pstates or states:
2878 r = display(fm, fn, ctx, pstates, states)
2877 r = display(fm, fn, ctx, pstates, states)
2879 found = found or r
2878 found = found or r
2880 if r and not diff and not all_files:
2879 if r and not diff and not all_files:
2881 skip[fn] = True
2880 skip[fn] = True
2882 if copy:
2881 if copy:
2883 skip[copy] = True
2882 skip[copy] = True
2884 del revfiles[rev]
2883 del revfiles[rev]
2885 # We will keep the matches dict for the duration of the window
2884 # We will keep the matches dict for the duration of the window
2886 # clear the matches dict once the window is over
2885 # clear the matches dict once the window is over
2887 if not revfiles:
2886 if not revfiles:
2888 matches.clear()
2887 matches.clear()
2889 fm.end()
2888 fm.end()
2890
2889
2891 return not found
2890 return not found
2892
2891
2893 @command('heads',
2892 @command('heads',
2894 [('r', 'rev', '',
2893 [('r', 'rev', '',
2895 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2894 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2896 ('t', 'topo', False, _('show topological heads only')),
2895 ('t', 'topo', False, _('show topological heads only')),
2897 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2896 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2898 ('c', 'closed', False, _('show normal and closed branch heads')),
2897 ('c', 'closed', False, _('show normal and closed branch heads')),
2899 ] + templateopts,
2898 ] + templateopts,
2900 _('[-ct] [-r STARTREV] [REV]...'),
2899 _('[-ct] [-r STARTREV] [REV]...'),
2901 intents={INTENT_READONLY})
2900 intents={INTENT_READONLY})
2902 def heads(ui, repo, *branchrevs, **opts):
2901 def heads(ui, repo, *branchrevs, **opts):
2903 """show branch heads
2902 """show branch heads
2904
2903
2905 With no arguments, show all open branch heads in the repository.
2904 With no arguments, show all open branch heads in the repository.
2906 Branch heads are changesets that have no descendants on the
2905 Branch heads are changesets that have no descendants on the
2907 same branch. They are where development generally takes place and
2906 same branch. They are where development generally takes place and
2908 are the usual targets for update and merge operations.
2907 are the usual targets for update and merge operations.
2909
2908
2910 If one or more REVs are given, only open branch heads on the
2909 If one or more REVs are given, only open branch heads on the
2911 branches associated with the specified changesets are shown. This
2910 branches associated with the specified changesets are shown. This
2912 means that you can use :hg:`heads .` to see the heads on the
2911 means that you can use :hg:`heads .` to see the heads on the
2913 currently checked-out branch.
2912 currently checked-out branch.
2914
2913
2915 If -c/--closed is specified, also show branch heads marked closed
2914 If -c/--closed is specified, also show branch heads marked closed
2916 (see :hg:`commit --close-branch`).
2915 (see :hg:`commit --close-branch`).
2917
2916
2918 If STARTREV is specified, only those heads that are descendants of
2917 If STARTREV is specified, only those heads that are descendants of
2919 STARTREV will be displayed.
2918 STARTREV will be displayed.
2920
2919
2921 If -t/--topo is specified, named branch mechanics will be ignored and only
2920 If -t/--topo is specified, named branch mechanics will be ignored and only
2922 topological heads (changesets with no children) will be shown.
2921 topological heads (changesets with no children) will be shown.
2923
2922
2924 Returns 0 if matching heads are found, 1 if not.
2923 Returns 0 if matching heads are found, 1 if not.
2925 """
2924 """
2926
2925
2927 opts = pycompat.byteskwargs(opts)
2926 opts = pycompat.byteskwargs(opts)
2928 start = None
2927 start = None
2929 rev = opts.get('rev')
2928 rev = opts.get('rev')
2930 if rev:
2929 if rev:
2931 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2930 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2932 start = scmutil.revsingle(repo, rev, None).node()
2931 start = scmutil.revsingle(repo, rev, None).node()
2933
2932
2934 if opts.get('topo'):
2933 if opts.get('topo'):
2935 heads = [repo[h] for h in repo.heads(start)]
2934 heads = [repo[h] for h in repo.heads(start)]
2936 else:
2935 else:
2937 heads = []
2936 heads = []
2938 for branch in repo.branchmap():
2937 for branch in repo.branchmap():
2939 heads += repo.branchheads(branch, start, opts.get('closed'))
2938 heads += repo.branchheads(branch, start, opts.get('closed'))
2940 heads = [repo[h] for h in heads]
2939 heads = [repo[h] for h in heads]
2941
2940
2942 if branchrevs:
2941 if branchrevs:
2943 branches = set(repo[r].branch()
2942 branches = set(repo[r].branch()
2944 for r in scmutil.revrange(repo, branchrevs))
2943 for r in scmutil.revrange(repo, branchrevs))
2945 heads = [h for h in heads if h.branch() in branches]
2944 heads = [h for h in heads if h.branch() in branches]
2946
2945
2947 if opts.get('active') and branchrevs:
2946 if opts.get('active') and branchrevs:
2948 dagheads = repo.heads(start)
2947 dagheads = repo.heads(start)
2949 heads = [h for h in heads if h.node() in dagheads]
2948 heads = [h for h in heads if h.node() in dagheads]
2950
2949
2951 if branchrevs:
2950 if branchrevs:
2952 haveheads = set(h.branch() for h in heads)
2951 haveheads = set(h.branch() for h in heads)
2953 if branches - haveheads:
2952 if branches - haveheads:
2954 headless = ', '.join(b for b in branches - haveheads)
2953 headless = ', '.join(b for b in branches - haveheads)
2955 msg = _('no open branch heads found on branches %s')
2954 msg = _('no open branch heads found on branches %s')
2956 if opts.get('rev'):
2955 if opts.get('rev'):
2957 msg += _(' (started at %s)') % opts['rev']
2956 msg += _(' (started at %s)') % opts['rev']
2958 ui.warn((msg + '\n') % headless)
2957 ui.warn((msg + '\n') % headless)
2959
2958
2960 if not heads:
2959 if not heads:
2961 return 1
2960 return 1
2962
2961
2963 ui.pager('heads')
2962 ui.pager('heads')
2964 heads = sorted(heads, key=lambda x: -x.rev())
2963 heads = sorted(heads, key=lambda x: -x.rev())
2965 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2964 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2966 for ctx in heads:
2965 for ctx in heads:
2967 displayer.show(ctx)
2966 displayer.show(ctx)
2968 displayer.close()
2967 displayer.close()
2969
2968
2970 @command('help',
2969 @command('help',
2971 [('e', 'extension', None, _('show only help for extensions')),
2970 [('e', 'extension', None, _('show only help for extensions')),
2972 ('c', 'command', None, _('show only help for commands')),
2971 ('c', 'command', None, _('show only help for commands')),
2973 ('k', 'keyword', None, _('show topics matching keyword')),
2972 ('k', 'keyword', None, _('show topics matching keyword')),
2974 ('s', 'system', [], _('show help for specific platform(s)')),
2973 ('s', 'system', [], _('show help for specific platform(s)')),
2975 ],
2974 ],
2976 _('[-ecks] [TOPIC]'),
2975 _('[-ecks] [TOPIC]'),
2977 norepo=True,
2976 norepo=True,
2978 intents={INTENT_READONLY})
2977 intents={INTENT_READONLY})
2979 def help_(ui, name=None, **opts):
2978 def help_(ui, name=None, **opts):
2980 """show help for a given topic or a help overview
2979 """show help for a given topic or a help overview
2981
2980
2982 With no arguments, print a list of commands with short help messages.
2981 With no arguments, print a list of commands with short help messages.
2983
2982
2984 Given a topic, extension, or command name, print help for that
2983 Given a topic, extension, or command name, print help for that
2985 topic.
2984 topic.
2986
2985
2987 Returns 0 if successful.
2986 Returns 0 if successful.
2988 """
2987 """
2989
2988
2990 keep = opts.get(r'system') or []
2989 keep = opts.get(r'system') or []
2991 if len(keep) == 0:
2990 if len(keep) == 0:
2992 if pycompat.sysplatform.startswith('win'):
2991 if pycompat.sysplatform.startswith('win'):
2993 keep.append('windows')
2992 keep.append('windows')
2994 elif pycompat.sysplatform == 'OpenVMS':
2993 elif pycompat.sysplatform == 'OpenVMS':
2995 keep.append('vms')
2994 keep.append('vms')
2996 elif pycompat.sysplatform == 'plan9':
2995 elif pycompat.sysplatform == 'plan9':
2997 keep.append('plan9')
2996 keep.append('plan9')
2998 else:
2997 else:
2999 keep.append('unix')
2998 keep.append('unix')
3000 keep.append(pycompat.sysplatform.lower())
2999 keep.append(pycompat.sysplatform.lower())
3001 if ui.verbose:
3000 if ui.verbose:
3002 keep.append('verbose')
3001 keep.append('verbose')
3003
3002
3004 commands = sys.modules[__name__]
3003 commands = sys.modules[__name__]
3005 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3004 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3006 ui.pager('help')
3005 ui.pager('help')
3007 ui.write(formatted)
3006 ui.write(formatted)
3008
3007
3009
3008
3010 @command('identify|id',
3009 @command('identify|id',
3011 [('r', 'rev', '',
3010 [('r', 'rev', '',
3012 _('identify the specified revision'), _('REV')),
3011 _('identify the specified revision'), _('REV')),
3013 ('n', 'num', None, _('show local revision number')),
3012 ('n', 'num', None, _('show local revision number')),
3014 ('i', 'id', None, _('show global revision id')),
3013 ('i', 'id', None, _('show global revision id')),
3015 ('b', 'branch', None, _('show branch')),
3014 ('b', 'branch', None, _('show branch')),
3016 ('t', 'tags', None, _('show tags')),
3015 ('t', 'tags', None, _('show tags')),
3017 ('B', 'bookmarks', None, _('show bookmarks')),
3016 ('B', 'bookmarks', None, _('show bookmarks')),
3018 ] + remoteopts + formatteropts,
3017 ] + remoteopts + formatteropts,
3019 _('[-nibtB] [-r REV] [SOURCE]'),
3018 _('[-nibtB] [-r REV] [SOURCE]'),
3020 optionalrepo=True,
3019 optionalrepo=True,
3021 intents={INTENT_READONLY})
3020 intents={INTENT_READONLY})
3022 def identify(ui, repo, source=None, rev=None,
3021 def identify(ui, repo, source=None, rev=None,
3023 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3022 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3024 """identify the working directory or specified revision
3023 """identify the working directory or specified revision
3025
3024
3026 Print a summary identifying the repository state at REV using one or
3025 Print a summary identifying the repository state at REV using one or
3027 two parent hash identifiers, followed by a "+" if the working
3026 two parent hash identifiers, followed by a "+" if the working
3028 directory has uncommitted changes, the branch name (if not default),
3027 directory has uncommitted changes, the branch name (if not default),
3029 a list of tags, and a list of bookmarks.
3028 a list of tags, and a list of bookmarks.
3030
3029
3031 When REV is not given, print a summary of the current state of the
3030 When REV is not given, print a summary of the current state of the
3032 repository including the working directory. Specify -r. to get information
3031 repository including the working directory. Specify -r. to get information
3033 of the working directory parent without scanning uncommitted changes.
3032 of the working directory parent without scanning uncommitted changes.
3034
3033
3035 Specifying a path to a repository root or Mercurial bundle will
3034 Specifying a path to a repository root or Mercurial bundle will
3036 cause lookup to operate on that repository/bundle.
3035 cause lookup to operate on that repository/bundle.
3037
3036
3038 .. container:: verbose
3037 .. container:: verbose
3039
3038
3040 Examples:
3039 Examples:
3041
3040
3042 - generate a build identifier for the working directory::
3041 - generate a build identifier for the working directory::
3043
3042
3044 hg id --id > build-id.dat
3043 hg id --id > build-id.dat
3045
3044
3046 - find the revision corresponding to a tag::
3045 - find the revision corresponding to a tag::
3047
3046
3048 hg id -n -r 1.3
3047 hg id -n -r 1.3
3049
3048
3050 - check the most recent revision of a remote repository::
3049 - check the most recent revision of a remote repository::
3051
3050
3052 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3051 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3053
3052
3054 See :hg:`log` for generating more information about specific revisions,
3053 See :hg:`log` for generating more information about specific revisions,
3055 including full hash identifiers.
3054 including full hash identifiers.
3056
3055
3057 Returns 0 if successful.
3056 Returns 0 if successful.
3058 """
3057 """
3059
3058
3060 opts = pycompat.byteskwargs(opts)
3059 opts = pycompat.byteskwargs(opts)
3061 if not repo and not source:
3060 if not repo and not source:
3062 raise error.Abort(_("there is no Mercurial repository here "
3061 raise error.Abort(_("there is no Mercurial repository here "
3063 "(.hg not found)"))
3062 "(.hg not found)"))
3064
3063
3065 default = not (num or id or branch or tags or bookmarks)
3064 default = not (num or id or branch or tags or bookmarks)
3066 output = []
3065 output = []
3067 revs = []
3066 revs = []
3068
3067
3069 if source:
3068 if source:
3070 source, branches = hg.parseurl(ui.expandpath(source))
3069 source, branches = hg.parseurl(ui.expandpath(source))
3071 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3070 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3072 repo = peer.local()
3071 repo = peer.local()
3073 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3072 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3074
3073
3075 fm = ui.formatter('identify', opts)
3074 fm = ui.formatter('identify', opts)
3076 fm.startitem()
3075 fm.startitem()
3077
3076
3078 if not repo:
3077 if not repo:
3079 if num or branch or tags:
3078 if num or branch or tags:
3080 raise error.Abort(
3079 raise error.Abort(
3081 _("can't query remote revision number, branch, or tags"))
3080 _("can't query remote revision number, branch, or tags"))
3082 if not rev and revs:
3081 if not rev and revs:
3083 rev = revs[0]
3082 rev = revs[0]
3084 if not rev:
3083 if not rev:
3085 rev = "tip"
3084 rev = "tip"
3086
3085
3087 remoterev = peer.lookup(rev)
3086 remoterev = peer.lookup(rev)
3088 hexrev = fm.hexfunc(remoterev)
3087 hexrev = fm.hexfunc(remoterev)
3089 if default or id:
3088 if default or id:
3090 output = [hexrev]
3089 output = [hexrev]
3091 fm.data(id=hexrev)
3090 fm.data(id=hexrev)
3092
3091
3093 @util.cachefunc
3092 @util.cachefunc
3094 def getbms():
3093 def getbms():
3095 bms = []
3094 bms = []
3096
3095
3097 if 'bookmarks' in peer.listkeys('namespaces'):
3096 if 'bookmarks' in peer.listkeys('namespaces'):
3098 hexremoterev = hex(remoterev)
3097 hexremoterev = hex(remoterev)
3099 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3098 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3100 if bmr == hexremoterev]
3099 if bmr == hexremoterev]
3101
3100
3102 return sorted(bms)
3101 return sorted(bms)
3103
3102
3104 if fm.isplain():
3103 if fm.isplain():
3105 if bookmarks:
3104 if bookmarks:
3106 output.extend(getbms())
3105 output.extend(getbms())
3107 elif default and not ui.quiet:
3106 elif default and not ui.quiet:
3108 # multiple bookmarks for a single parent separated by '/'
3107 # multiple bookmarks for a single parent separated by '/'
3109 bm = '/'.join(getbms())
3108 bm = '/'.join(getbms())
3110 if bm:
3109 if bm:
3111 output.append(bm)
3110 output.append(bm)
3112 else:
3111 else:
3113 fm.data(node=hex(remoterev))
3112 fm.data(node=hex(remoterev))
3114 if bookmarks or 'bookmarks' in fm.datahint():
3113 if bookmarks or 'bookmarks' in fm.datahint():
3115 fm.data(bookmarks=fm.formatlist(getbms(), name='bookmark'))
3114 fm.data(bookmarks=fm.formatlist(getbms(), name='bookmark'))
3116 else:
3115 else:
3117 if rev:
3116 if rev:
3118 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3117 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3119 ctx = scmutil.revsingle(repo, rev, None)
3118 ctx = scmutil.revsingle(repo, rev, None)
3120
3119
3121 if ctx.rev() is None:
3120 if ctx.rev() is None:
3122 ctx = repo[None]
3121 ctx = repo[None]
3123 parents = ctx.parents()
3122 parents = ctx.parents()
3124 taglist = []
3123 taglist = []
3125 for p in parents:
3124 for p in parents:
3126 taglist.extend(p.tags())
3125 taglist.extend(p.tags())
3127
3126
3128 dirty = ""
3127 dirty = ""
3129 if ctx.dirty(missing=True, merge=False, branch=False):
3128 if ctx.dirty(missing=True, merge=False, branch=False):
3130 dirty = '+'
3129 dirty = '+'
3131 fm.data(dirty=dirty)
3130 fm.data(dirty=dirty)
3132
3131
3133 hexoutput = [fm.hexfunc(p.node()) for p in parents]
3132 hexoutput = [fm.hexfunc(p.node()) for p in parents]
3134 if default or id:
3133 if default or id:
3135 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
3134 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
3136 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
3135 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
3137
3136
3138 if num:
3137 if num:
3139 numoutput = ["%d" % p.rev() for p in parents]
3138 numoutput = ["%d" % p.rev() for p in parents]
3140 output.append("%s%s" % ('+'.join(numoutput), dirty))
3139 output.append("%s%s" % ('+'.join(numoutput), dirty))
3141
3140
3142 fm.data(parents=fm.formatlist([fm.hexfunc(p.node())
3141 fm.data(parents=fm.formatlist([fm.hexfunc(p.node())
3143 for p in parents], name='node'))
3142 for p in parents], name='node'))
3144 else:
3143 else:
3145 hexoutput = fm.hexfunc(ctx.node())
3144 hexoutput = fm.hexfunc(ctx.node())
3146 if default or id:
3145 if default or id:
3147 output = [hexoutput]
3146 output = [hexoutput]
3148 fm.data(id=hexoutput)
3147 fm.data(id=hexoutput)
3149
3148
3150 if num:
3149 if num:
3151 output.append(pycompat.bytestr(ctx.rev()))
3150 output.append(pycompat.bytestr(ctx.rev()))
3152 taglist = ctx.tags()
3151 taglist = ctx.tags()
3153
3152
3154 if default and not ui.quiet:
3153 if default and not ui.quiet:
3155 b = ctx.branch()
3154 b = ctx.branch()
3156 if b != 'default':
3155 if b != 'default':
3157 output.append("(%s)" % b)
3156 output.append("(%s)" % b)
3158
3157
3159 # multiple tags for a single parent separated by '/'
3158 # multiple tags for a single parent separated by '/'
3160 t = '/'.join(taglist)
3159 t = '/'.join(taglist)
3161 if t:
3160 if t:
3162 output.append(t)
3161 output.append(t)
3163
3162
3164 # multiple bookmarks for a single parent separated by '/'
3163 # multiple bookmarks for a single parent separated by '/'
3165 bm = '/'.join(ctx.bookmarks())
3164 bm = '/'.join(ctx.bookmarks())
3166 if bm:
3165 if bm:
3167 output.append(bm)
3166 output.append(bm)
3168 else:
3167 else:
3169 if branch:
3168 if branch:
3170 output.append(ctx.branch())
3169 output.append(ctx.branch())
3171
3170
3172 if tags:
3171 if tags:
3173 output.extend(taglist)
3172 output.extend(taglist)
3174
3173
3175 if bookmarks:
3174 if bookmarks:
3176 output.extend(ctx.bookmarks())
3175 output.extend(ctx.bookmarks())
3177
3176
3178 fm.data(node=ctx.hex())
3177 fm.data(node=ctx.hex())
3179 fm.data(branch=ctx.branch())
3178 fm.data(branch=ctx.branch())
3180 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
3179 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
3181 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
3180 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
3182 fm.context(ctx=ctx)
3181 fm.context(ctx=ctx)
3183
3182
3184 fm.plain("%s\n" % ' '.join(output))
3183 fm.plain("%s\n" % ' '.join(output))
3185 fm.end()
3184 fm.end()
3186
3185
3187 @command('import|patch',
3186 @command('import|patch',
3188 [('p', 'strip', 1,
3187 [('p', 'strip', 1,
3189 _('directory strip option for patch. This has the same '
3188 _('directory strip option for patch. This has the same '
3190 'meaning as the corresponding patch option'), _('NUM')),
3189 'meaning as the corresponding patch option'), _('NUM')),
3191 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3190 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3192 ('e', 'edit', False, _('invoke editor on commit messages')),
3191 ('e', 'edit', False, _('invoke editor on commit messages')),
3193 ('f', 'force', None,
3192 ('f', 'force', None,
3194 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3193 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3195 ('', 'no-commit', None,
3194 ('', 'no-commit', None,
3196 _("don't commit, just update the working directory")),
3195 _("don't commit, just update the working directory")),
3197 ('', 'bypass', None,
3196 ('', 'bypass', None,
3198 _("apply patch without touching the working directory")),
3197 _("apply patch without touching the working directory")),
3199 ('', 'partial', None,
3198 ('', 'partial', None,
3200 _('commit even if some hunks fail')),
3199 _('commit even if some hunks fail')),
3201 ('', 'exact', None,
3200 ('', 'exact', None,
3202 _('abort if patch would apply lossily')),
3201 _('abort if patch would apply lossily')),
3203 ('', 'prefix', '',
3202 ('', 'prefix', '',
3204 _('apply patch to subdirectory'), _('DIR')),
3203 _('apply patch to subdirectory'), _('DIR')),
3205 ('', 'import-branch', None,
3204 ('', 'import-branch', None,
3206 _('use any branch information in patch (implied by --exact)'))] +
3205 _('use any branch information in patch (implied by --exact)'))] +
3207 commitopts + commitopts2 + similarityopts,
3206 commitopts + commitopts2 + similarityopts,
3208 _('[OPTION]... PATCH...'))
3207 _('[OPTION]... PATCH...'))
3209 def import_(ui, repo, patch1=None, *patches, **opts):
3208 def import_(ui, repo, patch1=None, *patches, **opts):
3210 """import an ordered set of patches
3209 """import an ordered set of patches
3211
3210
3212 Import a list of patches and commit them individually (unless
3211 Import a list of patches and commit them individually (unless
3213 --no-commit is specified).
3212 --no-commit is specified).
3214
3213
3215 To read a patch from standard input (stdin), use "-" as the patch
3214 To read a patch from standard input (stdin), use "-" as the patch
3216 name. If a URL is specified, the patch will be downloaded from
3215 name. If a URL is specified, the patch will be downloaded from
3217 there.
3216 there.
3218
3217
3219 Import first applies changes to the working directory (unless
3218 Import first applies changes to the working directory (unless
3220 --bypass is specified), import will abort if there are outstanding
3219 --bypass is specified), import will abort if there are outstanding
3221 changes.
3220 changes.
3222
3221
3223 Use --bypass to apply and commit patches directly to the
3222 Use --bypass to apply and commit patches directly to the
3224 repository, without affecting the working directory. Without
3223 repository, without affecting the working directory. Without
3225 --exact, patches will be applied on top of the working directory
3224 --exact, patches will be applied on top of the working directory
3226 parent revision.
3225 parent revision.
3227
3226
3228 You can import a patch straight from a mail message. Even patches
3227 You can import a patch straight from a mail message. Even patches
3229 as attachments work (to use the body part, it must have type
3228 as attachments work (to use the body part, it must have type
3230 text/plain or text/x-patch). From and Subject headers of email
3229 text/plain or text/x-patch). From and Subject headers of email
3231 message are used as default committer and commit message. All
3230 message are used as default committer and commit message. All
3232 text/plain body parts before first diff are added to the commit
3231 text/plain body parts before first diff are added to the commit
3233 message.
3232 message.
3234
3233
3235 If the imported patch was generated by :hg:`export`, user and
3234 If the imported patch was generated by :hg:`export`, user and
3236 description from patch override values from message headers and
3235 description from patch override values from message headers and
3237 body. Values given on command line with -m/--message and -u/--user
3236 body. Values given on command line with -m/--message and -u/--user
3238 override these.
3237 override these.
3239
3238
3240 If --exact is specified, import will set the working directory to
3239 If --exact is specified, import will set the working directory to
3241 the parent of each patch before applying it, and will abort if the
3240 the parent of each patch before applying it, and will abort if the
3242 resulting changeset has a different ID than the one recorded in
3241 resulting changeset has a different ID than the one recorded in
3243 the patch. This will guard against various ways that portable
3242 the patch. This will guard against various ways that portable
3244 patch formats and mail systems might fail to transfer Mercurial
3243 patch formats and mail systems might fail to transfer Mercurial
3245 data or metadata. See :hg:`bundle` for lossless transmission.
3244 data or metadata. See :hg:`bundle` for lossless transmission.
3246
3245
3247 Use --partial to ensure a changeset will be created from the patch
3246 Use --partial to ensure a changeset will be created from the patch
3248 even if some hunks fail to apply. Hunks that fail to apply will be
3247 even if some hunks fail to apply. Hunks that fail to apply will be
3249 written to a <target-file>.rej file. Conflicts can then be resolved
3248 written to a <target-file>.rej file. Conflicts can then be resolved
3250 by hand before :hg:`commit --amend` is run to update the created
3249 by hand before :hg:`commit --amend` is run to update the created
3251 changeset. This flag exists to let people import patches that
3250 changeset. This flag exists to let people import patches that
3252 partially apply without losing the associated metadata (author,
3251 partially apply without losing the associated metadata (author,
3253 date, description, ...).
3252 date, description, ...).
3254
3253
3255 .. note::
3254 .. note::
3256
3255
3257 When no hunks apply cleanly, :hg:`import --partial` will create
3256 When no hunks apply cleanly, :hg:`import --partial` will create
3258 an empty changeset, importing only the patch metadata.
3257 an empty changeset, importing only the patch metadata.
3259
3258
3260 With -s/--similarity, hg will attempt to discover renames and
3259 With -s/--similarity, hg will attempt to discover renames and
3261 copies in the patch in the same way as :hg:`addremove`.
3260 copies in the patch in the same way as :hg:`addremove`.
3262
3261
3263 It is possible to use external patch programs to perform the patch
3262 It is possible to use external patch programs to perform the patch
3264 by setting the ``ui.patch`` configuration option. For the default
3263 by setting the ``ui.patch`` configuration option. For the default
3265 internal tool, the fuzz can also be configured via ``patch.fuzz``.
3264 internal tool, the fuzz can also be configured via ``patch.fuzz``.
3266 See :hg:`help config` for more information about configuration
3265 See :hg:`help config` for more information about configuration
3267 files and how to use these options.
3266 files and how to use these options.
3268
3267
3269 See :hg:`help dates` for a list of formats valid for -d/--date.
3268 See :hg:`help dates` for a list of formats valid for -d/--date.
3270
3269
3271 .. container:: verbose
3270 .. container:: verbose
3272
3271
3273 Examples:
3272 Examples:
3274
3273
3275 - import a traditional patch from a website and detect renames::
3274 - import a traditional patch from a website and detect renames::
3276
3275
3277 hg import -s 80 http://example.com/bugfix.patch
3276 hg import -s 80 http://example.com/bugfix.patch
3278
3277
3279 - import a changeset from an hgweb server::
3278 - import a changeset from an hgweb server::
3280
3279
3281 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
3280 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
3282
3281
3283 - import all the patches in an Unix-style mbox::
3282 - import all the patches in an Unix-style mbox::
3284
3283
3285 hg import incoming-patches.mbox
3284 hg import incoming-patches.mbox
3286
3285
3287 - import patches from stdin::
3286 - import patches from stdin::
3288
3287
3289 hg import -
3288 hg import -
3290
3289
3291 - attempt to exactly restore an exported changeset (not always
3290 - attempt to exactly restore an exported changeset (not always
3292 possible)::
3291 possible)::
3293
3292
3294 hg import --exact proposed-fix.patch
3293 hg import --exact proposed-fix.patch
3295
3294
3296 - use an external tool to apply a patch which is too fuzzy for
3295 - use an external tool to apply a patch which is too fuzzy for
3297 the default internal tool.
3296 the default internal tool.
3298
3297
3299 hg import --config ui.patch="patch --merge" fuzzy.patch
3298 hg import --config ui.patch="patch --merge" fuzzy.patch
3300
3299
3301 - change the default fuzzing from 2 to a less strict 7
3300 - change the default fuzzing from 2 to a less strict 7
3302
3301
3303 hg import --config ui.fuzz=7 fuzz.patch
3302 hg import --config ui.fuzz=7 fuzz.patch
3304
3303
3305 Returns 0 on success, 1 on partial success (see --partial).
3304 Returns 0 on success, 1 on partial success (see --partial).
3306 """
3305 """
3307
3306
3308 opts = pycompat.byteskwargs(opts)
3307 opts = pycompat.byteskwargs(opts)
3309 if not patch1:
3308 if not patch1:
3310 raise error.Abort(_('need at least one patch to import'))
3309 raise error.Abort(_('need at least one patch to import'))
3311
3310
3312 patches = (patch1,) + patches
3311 patches = (patch1,) + patches
3313
3312
3314 date = opts.get('date')
3313 date = opts.get('date')
3315 if date:
3314 if date:
3316 opts['date'] = dateutil.parsedate(date)
3315 opts['date'] = dateutil.parsedate(date)
3317
3316
3318 exact = opts.get('exact')
3317 exact = opts.get('exact')
3319 update = not opts.get('bypass')
3318 update = not opts.get('bypass')
3320 if not update and opts.get('no_commit'):
3319 if not update and opts.get('no_commit'):
3321 raise error.Abort(_('cannot use --no-commit with --bypass'))
3320 raise error.Abort(_('cannot use --no-commit with --bypass'))
3322 try:
3321 try:
3323 sim = float(opts.get('similarity') or 0)
3322 sim = float(opts.get('similarity') or 0)
3324 except ValueError:
3323 except ValueError:
3325 raise error.Abort(_('similarity must be a number'))
3324 raise error.Abort(_('similarity must be a number'))
3326 if sim < 0 or sim > 100:
3325 if sim < 0 or sim > 100:
3327 raise error.Abort(_('similarity must be between 0 and 100'))
3326 raise error.Abort(_('similarity must be between 0 and 100'))
3328 if sim and not update:
3327 if sim and not update:
3329 raise error.Abort(_('cannot use --similarity with --bypass'))
3328 raise error.Abort(_('cannot use --similarity with --bypass'))
3330 if exact:
3329 if exact:
3331 if opts.get('edit'):
3330 if opts.get('edit'):
3332 raise error.Abort(_('cannot use --exact with --edit'))
3331 raise error.Abort(_('cannot use --exact with --edit'))
3333 if opts.get('prefix'):
3332 if opts.get('prefix'):
3334 raise error.Abort(_('cannot use --exact with --prefix'))
3333 raise error.Abort(_('cannot use --exact with --prefix'))
3335
3334
3336 base = opts["base"]
3335 base = opts["base"]
3337 msgs = []
3336 msgs = []
3338 ret = 0
3337 ret = 0
3339
3338
3340 with repo.wlock():
3339 with repo.wlock():
3341 if update:
3340 if update:
3342 cmdutil.checkunfinished(repo)
3341 cmdutil.checkunfinished(repo)
3343 if (exact or not opts.get('force')):
3342 if (exact or not opts.get('force')):
3344 cmdutil.bailifchanged(repo)
3343 cmdutil.bailifchanged(repo)
3345
3344
3346 if not opts.get('no_commit'):
3345 if not opts.get('no_commit'):
3347 lock = repo.lock
3346 lock = repo.lock
3348 tr = lambda: repo.transaction('import')
3347 tr = lambda: repo.transaction('import')
3349 dsguard = util.nullcontextmanager
3348 dsguard = util.nullcontextmanager
3350 else:
3349 else:
3351 lock = util.nullcontextmanager
3350 lock = util.nullcontextmanager
3352 tr = util.nullcontextmanager
3351 tr = util.nullcontextmanager
3353 dsguard = lambda: dirstateguard.dirstateguard(repo, 'import')
3352 dsguard = lambda: dirstateguard.dirstateguard(repo, 'import')
3354 with lock(), tr(), dsguard():
3353 with lock(), tr(), dsguard():
3355 parents = repo[None].parents()
3354 parents = repo[None].parents()
3356 for patchurl in patches:
3355 for patchurl in patches:
3357 if patchurl == '-':
3356 if patchurl == '-':
3358 ui.status(_('applying patch from stdin\n'))
3357 ui.status(_('applying patch from stdin\n'))
3359 patchfile = ui.fin
3358 patchfile = ui.fin
3360 patchurl = 'stdin' # for error message
3359 patchurl = 'stdin' # for error message
3361 else:
3360 else:
3362 patchurl = os.path.join(base, patchurl)
3361 patchurl = os.path.join(base, patchurl)
3363 ui.status(_('applying %s\n') % patchurl)
3362 ui.status(_('applying %s\n') % patchurl)
3364 patchfile = hg.openpath(ui, patchurl)
3363 patchfile = hg.openpath(ui, patchurl)
3365
3364
3366 haspatch = False
3365 haspatch = False
3367 for hunk in patch.split(patchfile):
3366 for hunk in patch.split(patchfile):
3368 with patch.extract(ui, hunk) as patchdata:
3367 with patch.extract(ui, hunk) as patchdata:
3369 msg, node, rej = cmdutil.tryimportone(ui, repo,
3368 msg, node, rej = cmdutil.tryimportone(ui, repo,
3370 patchdata,
3369 patchdata,
3371 parents, opts,
3370 parents, opts,
3372 msgs, hg.clean)
3371 msgs, hg.clean)
3373 if msg:
3372 if msg:
3374 haspatch = True
3373 haspatch = True
3375 ui.note(msg + '\n')
3374 ui.note(msg + '\n')
3376 if update or exact:
3375 if update or exact:
3377 parents = repo[None].parents()
3376 parents = repo[None].parents()
3378 else:
3377 else:
3379 parents = [repo[node]]
3378 parents = [repo[node]]
3380 if rej:
3379 if rej:
3381 ui.write_err(_("patch applied partially\n"))
3380 ui.write_err(_("patch applied partially\n"))
3382 ui.write_err(_("(fix the .rej files and run "
3381 ui.write_err(_("(fix the .rej files and run "
3383 "`hg commit --amend`)\n"))
3382 "`hg commit --amend`)\n"))
3384 ret = 1
3383 ret = 1
3385 break
3384 break
3386
3385
3387 if not haspatch:
3386 if not haspatch:
3388 raise error.Abort(_('%s: no diffs found') % patchurl)
3387 raise error.Abort(_('%s: no diffs found') % patchurl)
3389
3388
3390 if msgs:
3389 if msgs:
3391 repo.savecommitmessage('\n* * *\n'.join(msgs))
3390 repo.savecommitmessage('\n* * *\n'.join(msgs))
3392 return ret
3391 return ret
3393
3392
3394 @command('incoming|in',
3393 @command('incoming|in',
3395 [('f', 'force', None,
3394 [('f', 'force', None,
3396 _('run even if remote repository is unrelated')),
3395 _('run even if remote repository is unrelated')),
3397 ('n', 'newest-first', None, _('show newest record first')),
3396 ('n', 'newest-first', None, _('show newest record first')),
3398 ('', 'bundle', '',
3397 ('', 'bundle', '',
3399 _('file to store the bundles into'), _('FILE')),
3398 _('file to store the bundles into'), _('FILE')),
3400 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3399 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3401 ('B', 'bookmarks', False, _("compare bookmarks")),
3400 ('B', 'bookmarks', False, _("compare bookmarks")),
3402 ('b', 'branch', [],
3401 ('b', 'branch', [],
3403 _('a specific branch you would like to pull'), _('BRANCH')),
3402 _('a specific branch you would like to pull'), _('BRANCH')),
3404 ] + logopts + remoteopts + subrepoopts,
3403 ] + logopts + remoteopts + subrepoopts,
3405 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3404 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3406 def incoming(ui, repo, source="default", **opts):
3405 def incoming(ui, repo, source="default", **opts):
3407 """show new changesets found in source
3406 """show new changesets found in source
3408
3407
3409 Show new changesets found in the specified path/URL or the default
3408 Show new changesets found in the specified path/URL or the default
3410 pull location. These are the changesets that would have been pulled
3409 pull location. These are the changesets that would have been pulled
3411 by :hg:`pull` at the time you issued this command.
3410 by :hg:`pull` at the time you issued this command.
3412
3411
3413 See pull for valid source format details.
3412 See pull for valid source format details.
3414
3413
3415 .. container:: verbose
3414 .. container:: verbose
3416
3415
3417 With -B/--bookmarks, the result of bookmark comparison between
3416 With -B/--bookmarks, the result of bookmark comparison between
3418 local and remote repositories is displayed. With -v/--verbose,
3417 local and remote repositories is displayed. With -v/--verbose,
3419 status is also displayed for each bookmark like below::
3418 status is also displayed for each bookmark like below::
3420
3419
3421 BM1 01234567890a added
3420 BM1 01234567890a added
3422 BM2 1234567890ab advanced
3421 BM2 1234567890ab advanced
3423 BM3 234567890abc diverged
3422 BM3 234567890abc diverged
3424 BM4 34567890abcd changed
3423 BM4 34567890abcd changed
3425
3424
3426 The action taken locally when pulling depends on the
3425 The action taken locally when pulling depends on the
3427 status of each bookmark:
3426 status of each bookmark:
3428
3427
3429 :``added``: pull will create it
3428 :``added``: pull will create it
3430 :``advanced``: pull will update it
3429 :``advanced``: pull will update it
3431 :``diverged``: pull will create a divergent bookmark
3430 :``diverged``: pull will create a divergent bookmark
3432 :``changed``: result depends on remote changesets
3431 :``changed``: result depends on remote changesets
3433
3432
3434 From the point of view of pulling behavior, bookmark
3433 From the point of view of pulling behavior, bookmark
3435 existing only in the remote repository are treated as ``added``,
3434 existing only in the remote repository are treated as ``added``,
3436 even if it is in fact locally deleted.
3435 even if it is in fact locally deleted.
3437
3436
3438 .. container:: verbose
3437 .. container:: verbose
3439
3438
3440 For remote repository, using --bundle avoids downloading the
3439 For remote repository, using --bundle avoids downloading the
3441 changesets twice if the incoming is followed by a pull.
3440 changesets twice if the incoming is followed by a pull.
3442
3441
3443 Examples:
3442 Examples:
3444
3443
3445 - show incoming changes with patches and full description::
3444 - show incoming changes with patches and full description::
3446
3445
3447 hg incoming -vp
3446 hg incoming -vp
3448
3447
3449 - show incoming changes excluding merges, store a bundle::
3448 - show incoming changes excluding merges, store a bundle::
3450
3449
3451 hg in -vpM --bundle incoming.hg
3450 hg in -vpM --bundle incoming.hg
3452 hg pull incoming.hg
3451 hg pull incoming.hg
3453
3452
3454 - briefly list changes inside a bundle::
3453 - briefly list changes inside a bundle::
3455
3454
3456 hg in changes.hg -T "{desc|firstline}\\n"
3455 hg in changes.hg -T "{desc|firstline}\\n"
3457
3456
3458 Returns 0 if there are incoming changes, 1 otherwise.
3457 Returns 0 if there are incoming changes, 1 otherwise.
3459 """
3458 """
3460 opts = pycompat.byteskwargs(opts)
3459 opts = pycompat.byteskwargs(opts)
3461 if opts.get('graph'):
3460 if opts.get('graph'):
3462 logcmdutil.checkunsupportedgraphflags([], opts)
3461 logcmdutil.checkunsupportedgraphflags([], opts)
3463 def display(other, chlist, displayer):
3462 def display(other, chlist, displayer):
3464 revdag = logcmdutil.graphrevs(other, chlist, opts)
3463 revdag = logcmdutil.graphrevs(other, chlist, opts)
3465 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3464 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3466 graphmod.asciiedges)
3465 graphmod.asciiedges)
3467
3466
3468 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3467 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3469 return 0
3468 return 0
3470
3469
3471 if opts.get('bundle') and opts.get('subrepos'):
3470 if opts.get('bundle') and opts.get('subrepos'):
3472 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3471 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3473
3472
3474 if opts.get('bookmarks'):
3473 if opts.get('bookmarks'):
3475 source, branches = hg.parseurl(ui.expandpath(source),
3474 source, branches = hg.parseurl(ui.expandpath(source),
3476 opts.get('branch'))
3475 opts.get('branch'))
3477 other = hg.peer(repo, opts, source)
3476 other = hg.peer(repo, opts, source)
3478 if 'bookmarks' not in other.listkeys('namespaces'):
3477 if 'bookmarks' not in other.listkeys('namespaces'):
3479 ui.warn(_("remote doesn't support bookmarks\n"))
3478 ui.warn(_("remote doesn't support bookmarks\n"))
3480 return 0
3479 return 0
3481 ui.pager('incoming')
3480 ui.pager('incoming')
3482 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3481 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3483 return bookmarks.incoming(ui, repo, other)
3482 return bookmarks.incoming(ui, repo, other)
3484
3483
3485 repo._subtoppath = ui.expandpath(source)
3484 repo._subtoppath = ui.expandpath(source)
3486 try:
3485 try:
3487 return hg.incoming(ui, repo, source, opts)
3486 return hg.incoming(ui, repo, source, opts)
3488 finally:
3487 finally:
3489 del repo._subtoppath
3488 del repo._subtoppath
3490
3489
3491
3490
3492 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3491 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3493 norepo=True)
3492 norepo=True)
3494 def init(ui, dest=".", **opts):
3493 def init(ui, dest=".", **opts):
3495 """create a new repository in the given directory
3494 """create a new repository in the given directory
3496
3495
3497 Initialize a new repository in the given directory. If the given
3496 Initialize a new repository in the given directory. If the given
3498 directory does not exist, it will be created.
3497 directory does not exist, it will be created.
3499
3498
3500 If no directory is given, the current directory is used.
3499 If no directory is given, the current directory is used.
3501
3500
3502 It is possible to specify an ``ssh://`` URL as the destination.
3501 It is possible to specify an ``ssh://`` URL as the destination.
3503 See :hg:`help urls` for more information.
3502 See :hg:`help urls` for more information.
3504
3503
3505 Returns 0 on success.
3504 Returns 0 on success.
3506 """
3505 """
3507 opts = pycompat.byteskwargs(opts)
3506 opts = pycompat.byteskwargs(opts)
3508 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3507 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3509
3508
3510 @command('locate',
3509 @command('locate',
3511 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3510 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3512 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3511 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3513 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3512 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3514 ] + walkopts,
3513 ] + walkopts,
3515 _('[OPTION]... [PATTERN]...'))
3514 _('[OPTION]... [PATTERN]...'))
3516 def locate(ui, repo, *pats, **opts):
3515 def locate(ui, repo, *pats, **opts):
3517 """locate files matching specific patterns (DEPRECATED)
3516 """locate files matching specific patterns (DEPRECATED)
3518
3517
3519 Print files under Mercurial control in the working directory whose
3518 Print files under Mercurial control in the working directory whose
3520 names match the given patterns.
3519 names match the given patterns.
3521
3520
3522 By default, this command searches all directories in the working
3521 By default, this command searches all directories in the working
3523 directory. To search just the current directory and its
3522 directory. To search just the current directory and its
3524 subdirectories, use "--include .".
3523 subdirectories, use "--include .".
3525
3524
3526 If no patterns are given to match, this command prints the names
3525 If no patterns are given to match, this command prints the names
3527 of all files under Mercurial control in the working directory.
3526 of all files under Mercurial control in the working directory.
3528
3527
3529 If you want to feed the output of this command into the "xargs"
3528 If you want to feed the output of this command into the "xargs"
3530 command, use the -0 option to both this command and "xargs". This
3529 command, use the -0 option to both this command and "xargs". This
3531 will avoid the problem of "xargs" treating single filenames that
3530 will avoid the problem of "xargs" treating single filenames that
3532 contain whitespace as multiple filenames.
3531 contain whitespace as multiple filenames.
3533
3532
3534 See :hg:`help files` for a more versatile command.
3533 See :hg:`help files` for a more versatile command.
3535
3534
3536 Returns 0 if a match is found, 1 otherwise.
3535 Returns 0 if a match is found, 1 otherwise.
3537 """
3536 """
3538 opts = pycompat.byteskwargs(opts)
3537 opts = pycompat.byteskwargs(opts)
3539 if opts.get('print0'):
3538 if opts.get('print0'):
3540 end = '\0'
3539 end = '\0'
3541 else:
3540 else:
3542 end = '\n'
3541 end = '\n'
3543 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3542 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3544
3543
3545 ret = 1
3544 ret = 1
3546 m = scmutil.match(ctx, pats, opts, default='relglob',
3545 m = scmutil.match(ctx, pats, opts, default='relglob',
3547 badfn=lambda x, y: False)
3546 badfn=lambda x, y: False)
3548
3547
3549 ui.pager('locate')
3548 ui.pager('locate')
3550 if ctx.rev() is None:
3549 if ctx.rev() is None:
3551 # When run on the working copy, "locate" includes removed files, so
3550 # When run on the working copy, "locate" includes removed files, so
3552 # we get the list of files from the dirstate.
3551 # we get the list of files from the dirstate.
3553 filesgen = sorted(repo.dirstate.matches(m))
3552 filesgen = sorted(repo.dirstate.matches(m))
3554 else:
3553 else:
3555 filesgen = ctx.matches(m)
3554 filesgen = ctx.matches(m)
3556 for abs in filesgen:
3555 for abs in filesgen:
3557 if opts.get('fullpath'):
3556 if opts.get('fullpath'):
3558 ui.write(repo.wjoin(abs), end)
3557 ui.write(repo.wjoin(abs), end)
3559 else:
3558 else:
3560 ui.write(((pats and m.rel(abs)) or abs), end)
3559 ui.write(((pats and m.rel(abs)) or abs), end)
3561 ret = 0
3560 ret = 0
3562
3561
3563 return ret
3562 return ret
3564
3563
3565 @command('^log|history',
3564 @command('^log|history',
3566 [('f', 'follow', None,
3565 [('f', 'follow', None,
3567 _('follow changeset history, or file history across copies and renames')),
3566 _('follow changeset history, or file history across copies and renames')),
3568 ('', 'follow-first', None,
3567 ('', 'follow-first', None,
3569 _('only follow the first parent of merge changesets (DEPRECATED)')),
3568 _('only follow the first parent of merge changesets (DEPRECATED)')),
3570 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3569 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3571 ('C', 'copies', None, _('show copied files')),
3570 ('C', 'copies', None, _('show copied files')),
3572 ('k', 'keyword', [],
3571 ('k', 'keyword', [],
3573 _('do case-insensitive search for a given text'), _('TEXT')),
3572 _('do case-insensitive search for a given text'), _('TEXT')),
3574 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3573 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3575 ('L', 'line-range', [],
3574 ('L', 'line-range', [],
3576 _('follow line range of specified file (EXPERIMENTAL)'),
3575 _('follow line range of specified file (EXPERIMENTAL)'),
3577 _('FILE,RANGE')),
3576 _('FILE,RANGE')),
3578 ('', 'removed', None, _('include revisions where files were removed')),
3577 ('', 'removed', None, _('include revisions where files were removed')),
3579 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3578 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3580 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3579 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3581 ('', 'only-branch', [],
3580 ('', 'only-branch', [],
3582 _('show only changesets within the given named branch (DEPRECATED)'),
3581 _('show only changesets within the given named branch (DEPRECATED)'),
3583 _('BRANCH')),
3582 _('BRANCH')),
3584 ('b', 'branch', [],
3583 ('b', 'branch', [],
3585 _('show changesets within the given named branch'), _('BRANCH')),
3584 _('show changesets within the given named branch'), _('BRANCH')),
3586 ('P', 'prune', [],
3585 ('P', 'prune', [],
3587 _('do not display revision or any of its ancestors'), _('REV')),
3586 _('do not display revision or any of its ancestors'), _('REV')),
3588 ] + logopts + walkopts,
3587 ] + logopts + walkopts,
3589 _('[OPTION]... [FILE]'),
3588 _('[OPTION]... [FILE]'),
3590 inferrepo=True,
3589 inferrepo=True,
3591 intents={INTENT_READONLY})
3590 intents={INTENT_READONLY})
3592 def log(ui, repo, *pats, **opts):
3591 def log(ui, repo, *pats, **opts):
3593 """show revision history of entire repository or files
3592 """show revision history of entire repository or files
3594
3593
3595 Print the revision history of the specified files or the entire
3594 Print the revision history of the specified files or the entire
3596 project.
3595 project.
3597
3596
3598 If no revision range is specified, the default is ``tip:0`` unless
3597 If no revision range is specified, the default is ``tip:0`` unless
3599 --follow is set, in which case the working directory parent is
3598 --follow is set, in which case the working directory parent is
3600 used as the starting revision.
3599 used as the starting revision.
3601
3600
3602 File history is shown without following rename or copy history of
3601 File history is shown without following rename or copy history of
3603 files. Use -f/--follow with a filename to follow history across
3602 files. Use -f/--follow with a filename to follow history across
3604 renames and copies. --follow without a filename will only show
3603 renames and copies. --follow without a filename will only show
3605 ancestors of the starting revision.
3604 ancestors of the starting revision.
3606
3605
3607 By default this command prints revision number and changeset id,
3606 By default this command prints revision number and changeset id,
3608 tags, non-trivial parents, user, date and time, and a summary for
3607 tags, non-trivial parents, user, date and time, and a summary for
3609 each commit. When the -v/--verbose switch is used, the list of
3608 each commit. When the -v/--verbose switch is used, the list of
3610 changed files and full commit message are shown.
3609 changed files and full commit message are shown.
3611
3610
3612 With --graph the revisions are shown as an ASCII art DAG with the most
3611 With --graph the revisions are shown as an ASCII art DAG with the most
3613 recent changeset at the top.
3612 recent changeset at the top.
3614 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3613 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3615 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3614 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3616 changeset from the lines below is a parent of the 'o' merge on the same
3615 changeset from the lines below is a parent of the 'o' merge on the same
3617 line.
3616 line.
3618 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3617 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3619 of a '|' indicates one or more revisions in a path are omitted.
3618 of a '|' indicates one or more revisions in a path are omitted.
3620
3619
3621 .. container:: verbose
3620 .. container:: verbose
3622
3621
3623 Use -L/--line-range FILE,M:N options to follow the history of lines
3622 Use -L/--line-range FILE,M:N options to follow the history of lines
3624 from M to N in FILE. With -p/--patch only diff hunks affecting
3623 from M to N in FILE. With -p/--patch only diff hunks affecting
3625 specified line range will be shown. This option requires --follow;
3624 specified line range will be shown. This option requires --follow;
3626 it can be specified multiple times. Currently, this option is not
3625 it can be specified multiple times. Currently, this option is not
3627 compatible with --graph. This option is experimental.
3626 compatible with --graph. This option is experimental.
3628
3627
3629 .. note::
3628 .. note::
3630
3629
3631 :hg:`log --patch` may generate unexpected diff output for merge
3630 :hg:`log --patch` may generate unexpected diff output for merge
3632 changesets, as it will only compare the merge changeset against
3631 changesets, as it will only compare the merge changeset against
3633 its first parent. Also, only files different from BOTH parents
3632 its first parent. Also, only files different from BOTH parents
3634 will appear in files:.
3633 will appear in files:.
3635
3634
3636 .. note::
3635 .. note::
3637
3636
3638 For performance reasons, :hg:`log FILE` may omit duplicate changes
3637 For performance reasons, :hg:`log FILE` may omit duplicate changes
3639 made on branches and will not show removals or mode changes. To
3638 made on branches and will not show removals or mode changes. To
3640 see all such changes, use the --removed switch.
3639 see all such changes, use the --removed switch.
3641
3640
3642 .. container:: verbose
3641 .. container:: verbose
3643
3642
3644 .. note::
3643 .. note::
3645
3644
3646 The history resulting from -L/--line-range options depends on diff
3645 The history resulting from -L/--line-range options depends on diff
3647 options; for instance if white-spaces are ignored, respective changes
3646 options; for instance if white-spaces are ignored, respective changes
3648 with only white-spaces in specified line range will not be listed.
3647 with only white-spaces in specified line range will not be listed.
3649
3648
3650 .. container:: verbose
3649 .. container:: verbose
3651
3650
3652 Some examples:
3651 Some examples:
3653
3652
3654 - changesets with full descriptions and file lists::
3653 - changesets with full descriptions and file lists::
3655
3654
3656 hg log -v
3655 hg log -v
3657
3656
3658 - changesets ancestral to the working directory::
3657 - changesets ancestral to the working directory::
3659
3658
3660 hg log -f
3659 hg log -f
3661
3660
3662 - last 10 commits on the current branch::
3661 - last 10 commits on the current branch::
3663
3662
3664 hg log -l 10 -b .
3663 hg log -l 10 -b .
3665
3664
3666 - changesets showing all modifications of a file, including removals::
3665 - changesets showing all modifications of a file, including removals::
3667
3666
3668 hg log --removed file.c
3667 hg log --removed file.c
3669
3668
3670 - all changesets that touch a directory, with diffs, excluding merges::
3669 - all changesets that touch a directory, with diffs, excluding merges::
3671
3670
3672 hg log -Mp lib/
3671 hg log -Mp lib/
3673
3672
3674 - all revision numbers that match a keyword::
3673 - all revision numbers that match a keyword::
3675
3674
3676 hg log -k bug --template "{rev}\\n"
3675 hg log -k bug --template "{rev}\\n"
3677
3676
3678 - the full hash identifier of the working directory parent::
3677 - the full hash identifier of the working directory parent::
3679
3678
3680 hg log -r . --template "{node}\\n"
3679 hg log -r . --template "{node}\\n"
3681
3680
3682 - list available log templates::
3681 - list available log templates::
3683
3682
3684 hg log -T list
3683 hg log -T list
3685
3684
3686 - check if a given changeset is included in a tagged release::
3685 - check if a given changeset is included in a tagged release::
3687
3686
3688 hg log -r "a21ccf and ancestor(1.9)"
3687 hg log -r "a21ccf and ancestor(1.9)"
3689
3688
3690 - find all changesets by some user in a date range::
3689 - find all changesets by some user in a date range::
3691
3690
3692 hg log -k alice -d "may 2008 to jul 2008"
3691 hg log -k alice -d "may 2008 to jul 2008"
3693
3692
3694 - summary of all changesets after the last tag::
3693 - summary of all changesets after the last tag::
3695
3694
3696 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3695 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3697
3696
3698 - changesets touching lines 13 to 23 for file.c::
3697 - changesets touching lines 13 to 23 for file.c::
3699
3698
3700 hg log -L file.c,13:23
3699 hg log -L file.c,13:23
3701
3700
3702 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3701 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3703 main.c with patch::
3702 main.c with patch::
3704
3703
3705 hg log -L file.c,13:23 -L main.c,2:6 -p
3704 hg log -L file.c,13:23 -L main.c,2:6 -p
3706
3705
3707 See :hg:`help dates` for a list of formats valid for -d/--date.
3706 See :hg:`help dates` for a list of formats valid for -d/--date.
3708
3707
3709 See :hg:`help revisions` for more about specifying and ordering
3708 See :hg:`help revisions` for more about specifying and ordering
3710 revisions.
3709 revisions.
3711
3710
3712 See :hg:`help templates` for more about pre-packaged styles and
3711 See :hg:`help templates` for more about pre-packaged styles and
3713 specifying custom templates. The default template used by the log
3712 specifying custom templates. The default template used by the log
3714 command can be customized via the ``ui.logtemplate`` configuration
3713 command can be customized via the ``ui.logtemplate`` configuration
3715 setting.
3714 setting.
3716
3715
3717 Returns 0 on success.
3716 Returns 0 on success.
3718
3717
3719 """
3718 """
3720 opts = pycompat.byteskwargs(opts)
3719 opts = pycompat.byteskwargs(opts)
3721 linerange = opts.get('line_range')
3720 linerange = opts.get('line_range')
3722
3721
3723 if linerange and not opts.get('follow'):
3722 if linerange and not opts.get('follow'):
3724 raise error.Abort(_('--line-range requires --follow'))
3723 raise error.Abort(_('--line-range requires --follow'))
3725
3724
3726 if linerange and pats:
3725 if linerange and pats:
3727 # TODO: take pats as patterns with no line-range filter
3726 # TODO: take pats as patterns with no line-range filter
3728 raise error.Abort(
3727 raise error.Abort(
3729 _('FILE arguments are not compatible with --line-range option')
3728 _('FILE arguments are not compatible with --line-range option')
3730 )
3729 )
3731
3730
3732 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3731 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3733 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3732 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3734 if linerange:
3733 if linerange:
3735 # TODO: should follow file history from logcmdutil._initialrevs(),
3734 # TODO: should follow file history from logcmdutil._initialrevs(),
3736 # then filter the result by logcmdutil._makerevset() and --limit
3735 # then filter the result by logcmdutil._makerevset() and --limit
3737 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3736 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3738
3737
3739 getrenamed = None
3738 getrenamed = None
3740 if opts.get('copies'):
3739 if opts.get('copies'):
3741 endrev = None
3740 endrev = None
3742 if revs:
3741 if revs:
3743 endrev = revs.max() + 1
3742 endrev = revs.max() + 1
3744 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3743 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3745
3744
3746 ui.pager('log')
3745 ui.pager('log')
3747 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3746 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3748 buffered=True)
3747 buffered=True)
3749 if opts.get('graph'):
3748 if opts.get('graph'):
3750 displayfn = logcmdutil.displaygraphrevs
3749 displayfn = logcmdutil.displaygraphrevs
3751 else:
3750 else:
3752 displayfn = logcmdutil.displayrevs
3751 displayfn = logcmdutil.displayrevs
3753 displayfn(ui, repo, revs, displayer, getrenamed)
3752 displayfn(ui, repo, revs, displayer, getrenamed)
3754
3753
3755 @command('manifest',
3754 @command('manifest',
3756 [('r', 'rev', '', _('revision to display'), _('REV')),
3755 [('r', 'rev', '', _('revision to display'), _('REV')),
3757 ('', 'all', False, _("list files from all revisions"))]
3756 ('', 'all', False, _("list files from all revisions"))]
3758 + formatteropts,
3757 + formatteropts,
3759 _('[-r REV]'),
3758 _('[-r REV]'),
3760 intents={INTENT_READONLY})
3759 intents={INTENT_READONLY})
3761 def manifest(ui, repo, node=None, rev=None, **opts):
3760 def manifest(ui, repo, node=None, rev=None, **opts):
3762 """output the current or given revision of the project manifest
3761 """output the current or given revision of the project manifest
3763
3762
3764 Print a list of version controlled files for the given revision.
3763 Print a list of version controlled files for the given revision.
3765 If no revision is given, the first parent of the working directory
3764 If no revision is given, the first parent of the working directory
3766 is used, or the null revision if no revision is checked out.
3765 is used, or the null revision if no revision is checked out.
3767
3766
3768 With -v, print file permissions, symlink and executable bits.
3767 With -v, print file permissions, symlink and executable bits.
3769 With --debug, print file revision hashes.
3768 With --debug, print file revision hashes.
3770
3769
3771 If option --all is specified, the list of all files from all revisions
3770 If option --all is specified, the list of all files from all revisions
3772 is printed. This includes deleted and renamed files.
3771 is printed. This includes deleted and renamed files.
3773
3772
3774 Returns 0 on success.
3773 Returns 0 on success.
3775 """
3774 """
3776 opts = pycompat.byteskwargs(opts)
3775 opts = pycompat.byteskwargs(opts)
3777 fm = ui.formatter('manifest', opts)
3776 fm = ui.formatter('manifest', opts)
3778
3777
3779 if opts.get('all'):
3778 if opts.get('all'):
3780 if rev or node:
3779 if rev or node:
3781 raise error.Abort(_("can't specify a revision with --all"))
3780 raise error.Abort(_("can't specify a revision with --all"))
3782
3781
3783 res = set()
3782 res = set()
3784 for rev in repo:
3783 for rev in repo:
3785 ctx = repo[rev]
3784 ctx = repo[rev]
3786 res |= set(ctx.files())
3785 res |= set(ctx.files())
3787
3786
3788 ui.pager('manifest')
3787 ui.pager('manifest')
3789 for f in sorted(res):
3788 for f in sorted(res):
3790 fm.startitem()
3789 fm.startitem()
3791 fm.write("path", '%s\n', f)
3790 fm.write("path", '%s\n', f)
3792 fm.end()
3791 fm.end()
3793 return
3792 return
3794
3793
3795 if rev and node:
3794 if rev and node:
3796 raise error.Abort(_("please specify just one revision"))
3795 raise error.Abort(_("please specify just one revision"))
3797
3796
3798 if not node:
3797 if not node:
3799 node = rev
3798 node = rev
3800
3799
3801 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3800 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3802 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3801 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3803 if node:
3802 if node:
3804 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3803 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3805 ctx = scmutil.revsingle(repo, node)
3804 ctx = scmutil.revsingle(repo, node)
3806 mf = ctx.manifest()
3805 mf = ctx.manifest()
3807 ui.pager('manifest')
3806 ui.pager('manifest')
3808 for f in ctx:
3807 for f in ctx:
3809 fm.startitem()
3808 fm.startitem()
3810 fm.context(ctx=ctx)
3809 fm.context(ctx=ctx)
3811 fl = ctx[f].flags()
3810 fl = ctx[f].flags()
3812 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3811 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3813 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3812 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3814 fm.write('path', '%s\n', f)
3813 fm.write('path', '%s\n', f)
3815 fm.end()
3814 fm.end()
3816
3815
3817 @command('^merge',
3816 @command('^merge',
3818 [('f', 'force', None,
3817 [('f', 'force', None,
3819 _('force a merge including outstanding changes (DEPRECATED)')),
3818 _('force a merge including outstanding changes (DEPRECATED)')),
3820 ('r', 'rev', '', _('revision to merge'), _('REV')),
3819 ('r', 'rev', '', _('revision to merge'), _('REV')),
3821 ('P', 'preview', None,
3820 ('P', 'preview', None,
3822 _('review revisions to merge (no merge is performed)')),
3821 _('review revisions to merge (no merge is performed)')),
3823 ('', 'abort', None, _('abort the ongoing merge')),
3822 ('', 'abort', None, _('abort the ongoing merge')),
3824 ] + mergetoolopts,
3823 ] + mergetoolopts,
3825 _('[-P] [[-r] REV]'))
3824 _('[-P] [[-r] REV]'))
3826 def merge(ui, repo, node=None, **opts):
3825 def merge(ui, repo, node=None, **opts):
3827 """merge another revision into working directory
3826 """merge another revision into working directory
3828
3827
3829 The current working directory is updated with all changes made in
3828 The current working directory is updated with all changes made in
3830 the requested revision since the last common predecessor revision.
3829 the requested revision since the last common predecessor revision.
3831
3830
3832 Files that changed between either parent are marked as changed for
3831 Files that changed between either parent are marked as changed for
3833 the next commit and a commit must be performed before any further
3832 the next commit and a commit must be performed before any further
3834 updates to the repository are allowed. The next commit will have
3833 updates to the repository are allowed. The next commit will have
3835 two parents.
3834 two parents.
3836
3835
3837 ``--tool`` can be used to specify the merge tool used for file
3836 ``--tool`` can be used to specify the merge tool used for file
3838 merges. It overrides the HGMERGE environment variable and your
3837 merges. It overrides the HGMERGE environment variable and your
3839 configuration files. See :hg:`help merge-tools` for options.
3838 configuration files. See :hg:`help merge-tools` for options.
3840
3839
3841 If no revision is specified, the working directory's parent is a
3840 If no revision is specified, the working directory's parent is a
3842 head revision, and the current branch contains exactly one other
3841 head revision, and the current branch contains exactly one other
3843 head, the other head is merged with by default. Otherwise, an
3842 head, the other head is merged with by default. Otherwise, an
3844 explicit revision with which to merge with must be provided.
3843 explicit revision with which to merge with must be provided.
3845
3844
3846 See :hg:`help resolve` for information on handling file conflicts.
3845 See :hg:`help resolve` for information on handling file conflicts.
3847
3846
3848 To undo an uncommitted merge, use :hg:`merge --abort` which
3847 To undo an uncommitted merge, use :hg:`merge --abort` which
3849 will check out a clean copy of the original merge parent, losing
3848 will check out a clean copy of the original merge parent, losing
3850 all changes.
3849 all changes.
3851
3850
3852 Returns 0 on success, 1 if there are unresolved files.
3851 Returns 0 on success, 1 if there are unresolved files.
3853 """
3852 """
3854
3853
3855 opts = pycompat.byteskwargs(opts)
3854 opts = pycompat.byteskwargs(opts)
3856 abort = opts.get('abort')
3855 abort = opts.get('abort')
3857 if abort and repo.dirstate.p2() == nullid:
3856 if abort and repo.dirstate.p2() == nullid:
3858 cmdutil.wrongtooltocontinue(repo, _('merge'))
3857 cmdutil.wrongtooltocontinue(repo, _('merge'))
3859 if abort:
3858 if abort:
3860 if node:
3859 if node:
3861 raise error.Abort(_("cannot specify a node with --abort"))
3860 raise error.Abort(_("cannot specify a node with --abort"))
3862 if opts.get('rev'):
3861 if opts.get('rev'):
3863 raise error.Abort(_("cannot specify both --rev and --abort"))
3862 raise error.Abort(_("cannot specify both --rev and --abort"))
3864 if opts.get('preview'):
3863 if opts.get('preview'):
3865 raise error.Abort(_("cannot specify --preview with --abort"))
3864 raise error.Abort(_("cannot specify --preview with --abort"))
3866 if opts.get('rev') and node:
3865 if opts.get('rev') and node:
3867 raise error.Abort(_("please specify just one revision"))
3866 raise error.Abort(_("please specify just one revision"))
3868 if not node:
3867 if not node:
3869 node = opts.get('rev')
3868 node = opts.get('rev')
3870
3869
3871 if node:
3870 if node:
3872 node = scmutil.revsingle(repo, node).node()
3871 node = scmutil.revsingle(repo, node).node()
3873
3872
3874 if not node and not abort:
3873 if not node and not abort:
3875 node = repo[destutil.destmerge(repo)].node()
3874 node = repo[destutil.destmerge(repo)].node()
3876
3875
3877 if opts.get('preview'):
3876 if opts.get('preview'):
3878 # find nodes that are ancestors of p2 but not of p1
3877 # find nodes that are ancestors of p2 but not of p1
3879 p1 = repo.lookup('.')
3878 p1 = repo.lookup('.')
3880 p2 = node
3879 p2 = node
3881 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3880 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3882
3881
3883 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3882 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3884 for node in nodes:
3883 for node in nodes:
3885 displayer.show(repo[node])
3884 displayer.show(repo[node])
3886 displayer.close()
3885 displayer.close()
3887 return 0
3886 return 0
3888
3887
3889 # ui.forcemerge is an internal variable, do not document
3888 # ui.forcemerge is an internal variable, do not document
3890 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
3889 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
3891 with ui.configoverride(overrides, 'merge'):
3890 with ui.configoverride(overrides, 'merge'):
3892 force = opts.get('force')
3891 force = opts.get('force')
3893 labels = ['working copy', 'merge rev']
3892 labels = ['working copy', 'merge rev']
3894 return hg.merge(repo, node, force=force, mergeforce=force,
3893 return hg.merge(repo, node, force=force, mergeforce=force,
3895 labels=labels, abort=abort)
3894 labels=labels, abort=abort)
3896
3895
3897 @command('outgoing|out',
3896 @command('outgoing|out',
3898 [('f', 'force', None, _('run even when the destination is unrelated')),
3897 [('f', 'force', None, _('run even when the destination is unrelated')),
3899 ('r', 'rev', [],
3898 ('r', 'rev', [],
3900 _('a changeset intended to be included in the destination'), _('REV')),
3899 _('a changeset intended to be included in the destination'), _('REV')),
3901 ('n', 'newest-first', None, _('show newest record first')),
3900 ('n', 'newest-first', None, _('show newest record first')),
3902 ('B', 'bookmarks', False, _('compare bookmarks')),
3901 ('B', 'bookmarks', False, _('compare bookmarks')),
3903 ('b', 'branch', [], _('a specific branch you would like to push'),
3902 ('b', 'branch', [], _('a specific branch you would like to push'),
3904 _('BRANCH')),
3903 _('BRANCH')),
3905 ] + logopts + remoteopts + subrepoopts,
3904 ] + logopts + remoteopts + subrepoopts,
3906 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3905 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3907 def outgoing(ui, repo, dest=None, **opts):
3906 def outgoing(ui, repo, dest=None, **opts):
3908 """show changesets not found in the destination
3907 """show changesets not found in the destination
3909
3908
3910 Show changesets not found in the specified destination repository
3909 Show changesets not found in the specified destination repository
3911 or the default push location. These are the changesets that would
3910 or the default push location. These are the changesets that would
3912 be pushed if a push was requested.
3911 be pushed if a push was requested.
3913
3912
3914 See pull for details of valid destination formats.
3913 See pull for details of valid destination formats.
3915
3914
3916 .. container:: verbose
3915 .. container:: verbose
3917
3916
3918 With -B/--bookmarks, the result of bookmark comparison between
3917 With -B/--bookmarks, the result of bookmark comparison between
3919 local and remote repositories is displayed. With -v/--verbose,
3918 local and remote repositories is displayed. With -v/--verbose,
3920 status is also displayed for each bookmark like below::
3919 status is also displayed for each bookmark like below::
3921
3920
3922 BM1 01234567890a added
3921 BM1 01234567890a added
3923 BM2 deleted
3922 BM2 deleted
3924 BM3 234567890abc advanced
3923 BM3 234567890abc advanced
3925 BM4 34567890abcd diverged
3924 BM4 34567890abcd diverged
3926 BM5 4567890abcde changed
3925 BM5 4567890abcde changed
3927
3926
3928 The action taken when pushing depends on the
3927 The action taken when pushing depends on the
3929 status of each bookmark:
3928 status of each bookmark:
3930
3929
3931 :``added``: push with ``-B`` will create it
3930 :``added``: push with ``-B`` will create it
3932 :``deleted``: push with ``-B`` will delete it
3931 :``deleted``: push with ``-B`` will delete it
3933 :``advanced``: push will update it
3932 :``advanced``: push will update it
3934 :``diverged``: push with ``-B`` will update it
3933 :``diverged``: push with ``-B`` will update it
3935 :``changed``: push with ``-B`` will update it
3934 :``changed``: push with ``-B`` will update it
3936
3935
3937 From the point of view of pushing behavior, bookmarks
3936 From the point of view of pushing behavior, bookmarks
3938 existing only in the remote repository are treated as
3937 existing only in the remote repository are treated as
3939 ``deleted``, even if it is in fact added remotely.
3938 ``deleted``, even if it is in fact added remotely.
3940
3939
3941 Returns 0 if there are outgoing changes, 1 otherwise.
3940 Returns 0 if there are outgoing changes, 1 otherwise.
3942 """
3941 """
3943 # hg._outgoing() needs to re-resolve the path in order to handle #branch
3942 # hg._outgoing() needs to re-resolve the path in order to handle #branch
3944 # style URLs, so don't overwrite dest.
3943 # style URLs, so don't overwrite dest.
3945 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3944 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3946 if not path:
3945 if not path:
3947 raise error.Abort(_('default repository not configured!'),
3946 raise error.Abort(_('default repository not configured!'),
3948 hint=_("see 'hg help config.paths'"))
3947 hint=_("see 'hg help config.paths'"))
3949
3948
3950 opts = pycompat.byteskwargs(opts)
3949 opts = pycompat.byteskwargs(opts)
3951 if opts.get('graph'):
3950 if opts.get('graph'):
3952 logcmdutil.checkunsupportedgraphflags([], opts)
3951 logcmdutil.checkunsupportedgraphflags([], opts)
3953 o, other = hg._outgoing(ui, repo, dest, opts)
3952 o, other = hg._outgoing(ui, repo, dest, opts)
3954 if not o:
3953 if not o:
3955 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3954 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3956 return
3955 return
3957
3956
3958 revdag = logcmdutil.graphrevs(repo, o, opts)
3957 revdag = logcmdutil.graphrevs(repo, o, opts)
3959 ui.pager('outgoing')
3958 ui.pager('outgoing')
3960 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3959 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3961 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3960 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3962 graphmod.asciiedges)
3961 graphmod.asciiedges)
3963 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3962 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3964 return 0
3963 return 0
3965
3964
3966 if opts.get('bookmarks'):
3965 if opts.get('bookmarks'):
3967 dest = path.pushloc or path.loc
3966 dest = path.pushloc or path.loc
3968 other = hg.peer(repo, opts, dest)
3967 other = hg.peer(repo, opts, dest)
3969 if 'bookmarks' not in other.listkeys('namespaces'):
3968 if 'bookmarks' not in other.listkeys('namespaces'):
3970 ui.warn(_("remote doesn't support bookmarks\n"))
3969 ui.warn(_("remote doesn't support bookmarks\n"))
3971 return 0
3970 return 0
3972 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3971 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3973 ui.pager('outgoing')
3972 ui.pager('outgoing')
3974 return bookmarks.outgoing(ui, repo, other)
3973 return bookmarks.outgoing(ui, repo, other)
3975
3974
3976 repo._subtoppath = path.pushloc or path.loc
3975 repo._subtoppath = path.pushloc or path.loc
3977 try:
3976 try:
3978 return hg.outgoing(ui, repo, dest, opts)
3977 return hg.outgoing(ui, repo, dest, opts)
3979 finally:
3978 finally:
3980 del repo._subtoppath
3979 del repo._subtoppath
3981
3980
3982 @command('parents',
3981 @command('parents',
3983 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3982 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3984 ] + templateopts,
3983 ] + templateopts,
3985 _('[-r REV] [FILE]'),
3984 _('[-r REV] [FILE]'),
3986 inferrepo=True)
3985 inferrepo=True)
3987 def parents(ui, repo, file_=None, **opts):
3986 def parents(ui, repo, file_=None, **opts):
3988 """show the parents of the working directory or revision (DEPRECATED)
3987 """show the parents of the working directory or revision (DEPRECATED)
3989
3988
3990 Print the working directory's parent revisions. If a revision is
3989 Print the working directory's parent revisions. If a revision is
3991 given via -r/--rev, the parent of that revision will be printed.
3990 given via -r/--rev, the parent of that revision will be printed.
3992 If a file argument is given, the revision in which the file was
3991 If a file argument is given, the revision in which the file was
3993 last changed (before the working directory revision or the
3992 last changed (before the working directory revision or the
3994 argument to --rev if given) is printed.
3993 argument to --rev if given) is printed.
3995
3994
3996 This command is equivalent to::
3995 This command is equivalent to::
3997
3996
3998 hg log -r "p1()+p2()" or
3997 hg log -r "p1()+p2()" or
3999 hg log -r "p1(REV)+p2(REV)" or
3998 hg log -r "p1(REV)+p2(REV)" or
4000 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3999 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4001 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4000 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4002
4001
4003 See :hg:`summary` and :hg:`help revsets` for related information.
4002 See :hg:`summary` and :hg:`help revsets` for related information.
4004
4003
4005 Returns 0 on success.
4004 Returns 0 on success.
4006 """
4005 """
4007
4006
4008 opts = pycompat.byteskwargs(opts)
4007 opts = pycompat.byteskwargs(opts)
4009 rev = opts.get('rev')
4008 rev = opts.get('rev')
4010 if rev:
4009 if rev:
4011 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4010 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4012 ctx = scmutil.revsingle(repo, rev, None)
4011 ctx = scmutil.revsingle(repo, rev, None)
4013
4012
4014 if file_:
4013 if file_:
4015 m = scmutil.match(ctx, (file_,), opts)
4014 m = scmutil.match(ctx, (file_,), opts)
4016 if m.anypats() or len(m.files()) != 1:
4015 if m.anypats() or len(m.files()) != 1:
4017 raise error.Abort(_('can only specify an explicit filename'))
4016 raise error.Abort(_('can only specify an explicit filename'))
4018 file_ = m.files()[0]
4017 file_ = m.files()[0]
4019 filenodes = []
4018 filenodes = []
4020 for cp in ctx.parents():
4019 for cp in ctx.parents():
4021 if not cp:
4020 if not cp:
4022 continue
4021 continue
4023 try:
4022 try:
4024 filenodes.append(cp.filenode(file_))
4023 filenodes.append(cp.filenode(file_))
4025 except error.LookupError:
4024 except error.LookupError:
4026 pass
4025 pass
4027 if not filenodes:
4026 if not filenodes:
4028 raise error.Abort(_("'%s' not found in manifest!") % file_)
4027 raise error.Abort(_("'%s' not found in manifest!") % file_)
4029 p = []
4028 p = []
4030 for fn in filenodes:
4029 for fn in filenodes:
4031 fctx = repo.filectx(file_, fileid=fn)
4030 fctx = repo.filectx(file_, fileid=fn)
4032 p.append(fctx.node())
4031 p.append(fctx.node())
4033 else:
4032 else:
4034 p = [cp.node() for cp in ctx.parents()]
4033 p = [cp.node() for cp in ctx.parents()]
4035
4034
4036 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4035 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4037 for n in p:
4036 for n in p:
4038 if n != nullid:
4037 if n != nullid:
4039 displayer.show(repo[n])
4038 displayer.show(repo[n])
4040 displayer.close()
4039 displayer.close()
4041
4040
4042 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
4041 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
4043 intents={INTENT_READONLY})
4042 intents={INTENT_READONLY})
4044 def paths(ui, repo, search=None, **opts):
4043 def paths(ui, repo, search=None, **opts):
4045 """show aliases for remote repositories
4044 """show aliases for remote repositories
4046
4045
4047 Show definition of symbolic path name NAME. If no name is given,
4046 Show definition of symbolic path name NAME. If no name is given,
4048 show definition of all available names.
4047 show definition of all available names.
4049
4048
4050 Option -q/--quiet suppresses all output when searching for NAME
4049 Option -q/--quiet suppresses all output when searching for NAME
4051 and shows only the path names when listing all definitions.
4050 and shows only the path names when listing all definitions.
4052
4051
4053 Path names are defined in the [paths] section of your
4052 Path names are defined in the [paths] section of your
4054 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4053 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4055 repository, ``.hg/hgrc`` is used, too.
4054 repository, ``.hg/hgrc`` is used, too.
4056
4055
4057 The path names ``default`` and ``default-push`` have a special
4056 The path names ``default`` and ``default-push`` have a special
4058 meaning. When performing a push or pull operation, they are used
4057 meaning. When performing a push or pull operation, they are used
4059 as fallbacks if no location is specified on the command-line.
4058 as fallbacks if no location is specified on the command-line.
4060 When ``default-push`` is set, it will be used for push and
4059 When ``default-push`` is set, it will be used for push and
4061 ``default`` will be used for pull; otherwise ``default`` is used
4060 ``default`` will be used for pull; otherwise ``default`` is used
4062 as the fallback for both. When cloning a repository, the clone
4061 as the fallback for both. When cloning a repository, the clone
4063 source is written as ``default`` in ``.hg/hgrc``.
4062 source is written as ``default`` in ``.hg/hgrc``.
4064
4063
4065 .. note::
4064 .. note::
4066
4065
4067 ``default`` and ``default-push`` apply to all inbound (e.g.
4066 ``default`` and ``default-push`` apply to all inbound (e.g.
4068 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
4067 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
4069 and :hg:`bundle`) operations.
4068 and :hg:`bundle`) operations.
4070
4069
4071 See :hg:`help urls` for more information.
4070 See :hg:`help urls` for more information.
4072
4071
4073 Returns 0 on success.
4072 Returns 0 on success.
4074 """
4073 """
4075
4074
4076 opts = pycompat.byteskwargs(opts)
4075 opts = pycompat.byteskwargs(opts)
4077 ui.pager('paths')
4076 ui.pager('paths')
4078 if search:
4077 if search:
4079 pathitems = [(name, path) for name, path in ui.paths.iteritems()
4078 pathitems = [(name, path) for name, path in ui.paths.iteritems()
4080 if name == search]
4079 if name == search]
4081 else:
4080 else:
4082 pathitems = sorted(ui.paths.iteritems())
4081 pathitems = sorted(ui.paths.iteritems())
4083
4082
4084 fm = ui.formatter('paths', opts)
4083 fm = ui.formatter('paths', opts)
4085 if fm.isplain():
4084 if fm.isplain():
4086 hidepassword = util.hidepassword
4085 hidepassword = util.hidepassword
4087 else:
4086 else:
4088 hidepassword = bytes
4087 hidepassword = bytes
4089 if ui.quiet:
4088 if ui.quiet:
4090 namefmt = '%s\n'
4089 namefmt = '%s\n'
4091 else:
4090 else:
4092 namefmt = '%s = '
4091 namefmt = '%s = '
4093 showsubopts = not search and not ui.quiet
4092 showsubopts = not search and not ui.quiet
4094
4093
4095 for name, path in pathitems:
4094 for name, path in pathitems:
4096 fm.startitem()
4095 fm.startitem()
4097 fm.condwrite(not search, 'name', namefmt, name)
4096 fm.condwrite(not search, 'name', namefmt, name)
4098 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
4097 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
4099 for subopt, value in sorted(path.suboptions.items()):
4098 for subopt, value in sorted(path.suboptions.items()):
4100 assert subopt not in ('name', 'url')
4099 assert subopt not in ('name', 'url')
4101 if showsubopts:
4100 if showsubopts:
4102 fm.plain('%s:%s = ' % (name, subopt))
4101 fm.plain('%s:%s = ' % (name, subopt))
4103 fm.condwrite(showsubopts, subopt, '%s\n', value)
4102 fm.condwrite(showsubopts, subopt, '%s\n', value)
4104
4103
4105 fm.end()
4104 fm.end()
4106
4105
4107 if search and not pathitems:
4106 if search and not pathitems:
4108 if not ui.quiet:
4107 if not ui.quiet:
4109 ui.warn(_("not found!\n"))
4108 ui.warn(_("not found!\n"))
4110 return 1
4109 return 1
4111 else:
4110 else:
4112 return 0
4111 return 0
4113
4112
4114 @command('phase',
4113 @command('phase',
4115 [('p', 'public', False, _('set changeset phase to public')),
4114 [('p', 'public', False, _('set changeset phase to public')),
4116 ('d', 'draft', False, _('set changeset phase to draft')),
4115 ('d', 'draft', False, _('set changeset phase to draft')),
4117 ('s', 'secret', False, _('set changeset phase to secret')),
4116 ('s', 'secret', False, _('set changeset phase to secret')),
4118 ('f', 'force', False, _('allow to move boundary backward')),
4117 ('f', 'force', False, _('allow to move boundary backward')),
4119 ('r', 'rev', [], _('target revision'), _('REV')),
4118 ('r', 'rev', [], _('target revision'), _('REV')),
4120 ],
4119 ],
4121 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4120 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4122 def phase(ui, repo, *revs, **opts):
4121 def phase(ui, repo, *revs, **opts):
4123 """set or show the current phase name
4122 """set or show the current phase name
4124
4123
4125 With no argument, show the phase name of the current revision(s).
4124 With no argument, show the phase name of the current revision(s).
4126
4125
4127 With one of -p/--public, -d/--draft or -s/--secret, change the
4126 With one of -p/--public, -d/--draft or -s/--secret, change the
4128 phase value of the specified revisions.
4127 phase value of the specified revisions.
4129
4128
4130 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
4129 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
4131 lower phase to a higher phase. Phases are ordered as follows::
4130 lower phase to a higher phase. Phases are ordered as follows::
4132
4131
4133 public < draft < secret
4132 public < draft < secret
4134
4133
4135 Returns 0 on success, 1 if some phases could not be changed.
4134 Returns 0 on success, 1 if some phases could not be changed.
4136
4135
4137 (For more information about the phases concept, see :hg:`help phases`.)
4136 (For more information about the phases concept, see :hg:`help phases`.)
4138 """
4137 """
4139 opts = pycompat.byteskwargs(opts)
4138 opts = pycompat.byteskwargs(opts)
4140 # search for a unique phase argument
4139 # search for a unique phase argument
4141 targetphase = None
4140 targetphase = None
4142 for idx, name in enumerate(phases.phasenames):
4141 for idx, name in enumerate(phases.phasenames):
4143 if opts.get(name, False):
4142 if opts.get(name, False):
4144 if targetphase is not None:
4143 if targetphase is not None:
4145 raise error.Abort(_('only one phase can be specified'))
4144 raise error.Abort(_('only one phase can be specified'))
4146 targetphase = idx
4145 targetphase = idx
4147
4146
4148 # look for specified revision
4147 # look for specified revision
4149 revs = list(revs)
4148 revs = list(revs)
4150 revs.extend(opts['rev'])
4149 revs.extend(opts['rev'])
4151 if not revs:
4150 if not revs:
4152 # display both parents as the second parent phase can influence
4151 # display both parents as the second parent phase can influence
4153 # the phase of a merge commit
4152 # the phase of a merge commit
4154 revs = [c.rev() for c in repo[None].parents()]
4153 revs = [c.rev() for c in repo[None].parents()]
4155
4154
4156 revs = scmutil.revrange(repo, revs)
4155 revs = scmutil.revrange(repo, revs)
4157
4156
4158 ret = 0
4157 ret = 0
4159 if targetphase is None:
4158 if targetphase is None:
4160 # display
4159 # display
4161 for r in revs:
4160 for r in revs:
4162 ctx = repo[r]
4161 ctx = repo[r]
4163 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4162 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4164 else:
4163 else:
4165 with repo.lock(), repo.transaction("phase") as tr:
4164 with repo.lock(), repo.transaction("phase") as tr:
4166 # set phase
4165 # set phase
4167 if not revs:
4166 if not revs:
4168 raise error.Abort(_('empty revision set'))
4167 raise error.Abort(_('empty revision set'))
4169 nodes = [repo[r].node() for r in revs]
4168 nodes = [repo[r].node() for r in revs]
4170 # moving revision from public to draft may hide them
4169 # moving revision from public to draft may hide them
4171 # We have to check result on an unfiltered repository
4170 # We have to check result on an unfiltered repository
4172 unfi = repo.unfiltered()
4171 unfi = repo.unfiltered()
4173 getphase = unfi._phasecache.phase
4172 getphase = unfi._phasecache.phase
4174 olddata = [getphase(unfi, r) for r in unfi]
4173 olddata = [getphase(unfi, r) for r in unfi]
4175 phases.advanceboundary(repo, tr, targetphase, nodes)
4174 phases.advanceboundary(repo, tr, targetphase, nodes)
4176 if opts['force']:
4175 if opts['force']:
4177 phases.retractboundary(repo, tr, targetphase, nodes)
4176 phases.retractboundary(repo, tr, targetphase, nodes)
4178 getphase = unfi._phasecache.phase
4177 getphase = unfi._phasecache.phase
4179 newdata = [getphase(unfi, r) for r in unfi]
4178 newdata = [getphase(unfi, r) for r in unfi]
4180 changes = sum(newdata[r] != olddata[r] for r in unfi)
4179 changes = sum(newdata[r] != olddata[r] for r in unfi)
4181 cl = unfi.changelog
4180 cl = unfi.changelog
4182 rejected = [n for n in nodes
4181 rejected = [n for n in nodes
4183 if newdata[cl.rev(n)] < targetphase]
4182 if newdata[cl.rev(n)] < targetphase]
4184 if rejected:
4183 if rejected:
4185 ui.warn(_('cannot move %i changesets to a higher '
4184 ui.warn(_('cannot move %i changesets to a higher '
4186 'phase, use --force\n') % len(rejected))
4185 'phase, use --force\n') % len(rejected))
4187 ret = 1
4186 ret = 1
4188 if changes:
4187 if changes:
4189 msg = _('phase changed for %i changesets\n') % changes
4188 msg = _('phase changed for %i changesets\n') % changes
4190 if ret:
4189 if ret:
4191 ui.status(msg)
4190 ui.status(msg)
4192 else:
4191 else:
4193 ui.note(msg)
4192 ui.note(msg)
4194 else:
4193 else:
4195 ui.warn(_('no phases changed\n'))
4194 ui.warn(_('no phases changed\n'))
4196 return ret
4195 return ret
4197
4196
4198 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
4197 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
4199 """Run after a changegroup has been added via pull/unbundle
4198 """Run after a changegroup has been added via pull/unbundle
4200
4199
4201 This takes arguments below:
4200 This takes arguments below:
4202
4201
4203 :modheads: change of heads by pull/unbundle
4202 :modheads: change of heads by pull/unbundle
4204 :optupdate: updating working directory is needed or not
4203 :optupdate: updating working directory is needed or not
4205 :checkout: update destination revision (or None to default destination)
4204 :checkout: update destination revision (or None to default destination)
4206 :brev: a name, which might be a bookmark to be activated after updating
4205 :brev: a name, which might be a bookmark to be activated after updating
4207 """
4206 """
4208 if modheads == 0:
4207 if modheads == 0:
4209 return
4208 return
4210 if optupdate:
4209 if optupdate:
4211 try:
4210 try:
4212 return hg.updatetotally(ui, repo, checkout, brev)
4211 return hg.updatetotally(ui, repo, checkout, brev)
4213 except error.UpdateAbort as inst:
4212 except error.UpdateAbort as inst:
4214 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
4213 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
4215 hint = inst.hint
4214 hint = inst.hint
4216 raise error.UpdateAbort(msg, hint=hint)
4215 raise error.UpdateAbort(msg, hint=hint)
4217 if modheads > 1:
4216 if modheads > 1:
4218 currentbranchheads = len(repo.branchheads())
4217 currentbranchheads = len(repo.branchheads())
4219 if currentbranchheads == modheads:
4218 if currentbranchheads == modheads:
4220 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4219 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4221 elif currentbranchheads > 1:
4220 elif currentbranchheads > 1:
4222 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4221 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4223 "merge)\n"))
4222 "merge)\n"))
4224 else:
4223 else:
4225 ui.status(_("(run 'hg heads' to see heads)\n"))
4224 ui.status(_("(run 'hg heads' to see heads)\n"))
4226 elif not ui.configbool('commands', 'update.requiredest'):
4225 elif not ui.configbool('commands', 'update.requiredest'):
4227 ui.status(_("(run 'hg update' to get a working copy)\n"))
4226 ui.status(_("(run 'hg update' to get a working copy)\n"))
4228
4227
4229 @command('^pull',
4228 @command('^pull',
4230 [('u', 'update', None,
4229 [('u', 'update', None,
4231 _('update to new branch head if new descendants were pulled')),
4230 _('update to new branch head if new descendants were pulled')),
4232 ('f', 'force', None, _('run even when remote repository is unrelated')),
4231 ('f', 'force', None, _('run even when remote repository is unrelated')),
4233 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4232 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4234 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4233 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4235 ('b', 'branch', [], _('a specific branch you would like to pull'),
4234 ('b', 'branch', [], _('a specific branch you would like to pull'),
4236 _('BRANCH')),
4235 _('BRANCH')),
4237 ] + remoteopts,
4236 ] + remoteopts,
4238 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4237 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4239 def pull(ui, repo, source="default", **opts):
4238 def pull(ui, repo, source="default", **opts):
4240 """pull changes from the specified source
4239 """pull changes from the specified source
4241
4240
4242 Pull changes from a remote repository to a local one.
4241 Pull changes from a remote repository to a local one.
4243
4242
4244 This finds all changes from the repository at the specified path
4243 This finds all changes from the repository at the specified path
4245 or URL and adds them to a local repository (the current one unless
4244 or URL and adds them to a local repository (the current one unless
4246 -R is specified). By default, this does not update the copy of the
4245 -R is specified). By default, this does not update the copy of the
4247 project in the working directory.
4246 project in the working directory.
4248
4247
4249 When cloning from servers that support it, Mercurial may fetch
4248 When cloning from servers that support it, Mercurial may fetch
4250 pre-generated data. When this is done, hooks operating on incoming
4249 pre-generated data. When this is done, hooks operating on incoming
4251 changesets and changegroups may fire more than once, once for each
4250 changesets and changegroups may fire more than once, once for each
4252 pre-generated bundle and as well as for any additional remaining
4251 pre-generated bundle and as well as for any additional remaining
4253 data. See :hg:`help -e clonebundles` for more.
4252 data. See :hg:`help -e clonebundles` for more.
4254
4253
4255 Use :hg:`incoming` if you want to see what would have been added
4254 Use :hg:`incoming` if you want to see what would have been added
4256 by a pull at the time you issued this command. If you then decide
4255 by a pull at the time you issued this command. If you then decide
4257 to add those changes to the repository, you should use :hg:`pull
4256 to add those changes to the repository, you should use :hg:`pull
4258 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4257 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4259
4258
4260 If SOURCE is omitted, the 'default' path will be used.
4259 If SOURCE is omitted, the 'default' path will be used.
4261 See :hg:`help urls` for more information.
4260 See :hg:`help urls` for more information.
4262
4261
4263 Specifying bookmark as ``.`` is equivalent to specifying the active
4262 Specifying bookmark as ``.`` is equivalent to specifying the active
4264 bookmark's name.
4263 bookmark's name.
4265
4264
4266 Returns 0 on success, 1 if an update had unresolved files.
4265 Returns 0 on success, 1 if an update had unresolved files.
4267 """
4266 """
4268
4267
4269 opts = pycompat.byteskwargs(opts)
4268 opts = pycompat.byteskwargs(opts)
4270 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
4269 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
4271 msg = _('update destination required by configuration')
4270 msg = _('update destination required by configuration')
4272 hint = _('use hg pull followed by hg update DEST')
4271 hint = _('use hg pull followed by hg update DEST')
4273 raise error.Abort(msg, hint=hint)
4272 raise error.Abort(msg, hint=hint)
4274
4273
4275 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4274 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4276 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4275 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4277 other = hg.peer(repo, opts, source)
4276 other = hg.peer(repo, opts, source)
4278 try:
4277 try:
4279 revs, checkout = hg.addbranchrevs(repo, other, branches,
4278 revs, checkout = hg.addbranchrevs(repo, other, branches,
4280 opts.get('rev'))
4279 opts.get('rev'))
4281
4280
4282
4281
4283 pullopargs = {}
4282 pullopargs = {}
4284 if opts.get('bookmark'):
4283 if opts.get('bookmark'):
4285 if not revs:
4284 if not revs:
4286 revs = []
4285 revs = []
4287 # The list of bookmark used here is not the one used to actually
4286 # The list of bookmark used here is not the one used to actually
4288 # update the bookmark name. This can result in the revision pulled
4287 # update the bookmark name. This can result in the revision pulled
4289 # not ending up with the name of the bookmark because of a race
4288 # not ending up with the name of the bookmark because of a race
4290 # condition on the server. (See issue 4689 for details)
4289 # condition on the server. (See issue 4689 for details)
4291 remotebookmarks = other.listkeys('bookmarks')
4290 remotebookmarks = other.listkeys('bookmarks')
4292 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
4291 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
4293 pullopargs['remotebookmarks'] = remotebookmarks
4292 pullopargs['remotebookmarks'] = remotebookmarks
4294 for b in opts['bookmark']:
4293 for b in opts['bookmark']:
4295 b = repo._bookmarks.expandname(b)
4294 b = repo._bookmarks.expandname(b)
4296 if b not in remotebookmarks:
4295 if b not in remotebookmarks:
4297 raise error.Abort(_('remote bookmark %s not found!') % b)
4296 raise error.Abort(_('remote bookmark %s not found!') % b)
4298 revs.append(hex(remotebookmarks[b]))
4297 revs.append(hex(remotebookmarks[b]))
4299
4298
4300 if revs:
4299 if revs:
4301 try:
4300 try:
4302 # When 'rev' is a bookmark name, we cannot guarantee that it
4301 # When 'rev' is a bookmark name, we cannot guarantee that it
4303 # will be updated with that name because of a race condition
4302 # will be updated with that name because of a race condition
4304 # server side. (See issue 4689 for details)
4303 # server side. (See issue 4689 for details)
4305 oldrevs = revs
4304 oldrevs = revs
4306 revs = [] # actually, nodes
4305 revs = [] # actually, nodes
4307 for r in oldrevs:
4306 for r in oldrevs:
4308 with other.commandexecutor() as e:
4307 with other.commandexecutor() as e:
4309 node = e.callcommand('lookup', {'key': r}).result()
4308 node = e.callcommand('lookup', {'key': r}).result()
4310
4309
4311 revs.append(node)
4310 revs.append(node)
4312 if r == checkout:
4311 if r == checkout:
4313 checkout = node
4312 checkout = node
4314 except error.CapabilityError:
4313 except error.CapabilityError:
4315 err = _("other repository doesn't support revision lookup, "
4314 err = _("other repository doesn't support revision lookup, "
4316 "so a rev cannot be specified.")
4315 "so a rev cannot be specified.")
4317 raise error.Abort(err)
4316 raise error.Abort(err)
4318
4317
4319 wlock = util.nullcontextmanager()
4318 wlock = util.nullcontextmanager()
4320 if opts.get('update'):
4319 if opts.get('update'):
4321 wlock = repo.wlock()
4320 wlock = repo.wlock()
4322 with wlock:
4321 with wlock:
4323 pullopargs.update(opts.get('opargs', {}))
4322 pullopargs.update(opts.get('opargs', {}))
4324 modheads = exchange.pull(repo, other, heads=revs,
4323 modheads = exchange.pull(repo, other, heads=revs,
4325 force=opts.get('force'),
4324 force=opts.get('force'),
4326 bookmarks=opts.get('bookmark', ()),
4325 bookmarks=opts.get('bookmark', ()),
4327 opargs=pullopargs).cgresult
4326 opargs=pullopargs).cgresult
4328
4327
4329 # brev is a name, which might be a bookmark to be activated at
4328 # brev is a name, which might be a bookmark to be activated at
4330 # the end of the update. In other words, it is an explicit
4329 # the end of the update. In other words, it is an explicit
4331 # destination of the update
4330 # destination of the update
4332 brev = None
4331 brev = None
4333
4332
4334 if checkout:
4333 if checkout:
4335 checkout = repo.changelog.rev(checkout)
4334 checkout = repo.changelog.rev(checkout)
4336
4335
4337 # order below depends on implementation of
4336 # order below depends on implementation of
4338 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4337 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4339 # because 'checkout' is determined without it.
4338 # because 'checkout' is determined without it.
4340 if opts.get('rev'):
4339 if opts.get('rev'):
4341 brev = opts['rev'][0]
4340 brev = opts['rev'][0]
4342 elif opts.get('branch'):
4341 elif opts.get('branch'):
4343 brev = opts['branch'][0]
4342 brev = opts['branch'][0]
4344 else:
4343 else:
4345 brev = branches[0]
4344 brev = branches[0]
4346 repo._subtoppath = source
4345 repo._subtoppath = source
4347 try:
4346 try:
4348 ret = postincoming(ui, repo, modheads, opts.get('update'),
4347 ret = postincoming(ui, repo, modheads, opts.get('update'),
4349 checkout, brev)
4348 checkout, brev)
4350
4349
4351 finally:
4350 finally:
4352 del repo._subtoppath
4351 del repo._subtoppath
4353
4352
4354 finally:
4353 finally:
4355 other.close()
4354 other.close()
4356 return ret
4355 return ret
4357
4356
4358 @command('^push',
4357 @command('^push',
4359 [('f', 'force', None, _('force push')),
4358 [('f', 'force', None, _('force push')),
4360 ('r', 'rev', [],
4359 ('r', 'rev', [],
4361 _('a changeset intended to be included in the destination'),
4360 _('a changeset intended to be included in the destination'),
4362 _('REV')),
4361 _('REV')),
4363 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4362 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4364 ('b', 'branch', [],
4363 ('b', 'branch', [],
4365 _('a specific branch you would like to push'), _('BRANCH')),
4364 _('a specific branch you would like to push'), _('BRANCH')),
4366 ('', 'new-branch', False, _('allow pushing a new branch')),
4365 ('', 'new-branch', False, _('allow pushing a new branch')),
4367 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4366 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4368 ] + remoteopts,
4367 ] + remoteopts,
4369 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4368 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4370 def push(ui, repo, dest=None, **opts):
4369 def push(ui, repo, dest=None, **opts):
4371 """push changes to the specified destination
4370 """push changes to the specified destination
4372
4371
4373 Push changesets from the local repository to the specified
4372 Push changesets from the local repository to the specified
4374 destination.
4373 destination.
4375
4374
4376 This operation is symmetrical to pull: it is identical to a pull
4375 This operation is symmetrical to pull: it is identical to a pull
4377 in the destination repository from the current one.
4376 in the destination repository from the current one.
4378
4377
4379 By default, push will not allow creation of new heads at the
4378 By default, push will not allow creation of new heads at the
4380 destination, since multiple heads would make it unclear which head
4379 destination, since multiple heads would make it unclear which head
4381 to use. In this situation, it is recommended to pull and merge
4380 to use. In this situation, it is recommended to pull and merge
4382 before pushing.
4381 before pushing.
4383
4382
4384 Use --new-branch if you want to allow push to create a new named
4383 Use --new-branch if you want to allow push to create a new named
4385 branch that is not present at the destination. This allows you to
4384 branch that is not present at the destination. This allows you to
4386 only create a new branch without forcing other changes.
4385 only create a new branch without forcing other changes.
4387
4386
4388 .. note::
4387 .. note::
4389
4388
4390 Extra care should be taken with the -f/--force option,
4389 Extra care should be taken with the -f/--force option,
4391 which will push all new heads on all branches, an action which will
4390 which will push all new heads on all branches, an action which will
4392 almost always cause confusion for collaborators.
4391 almost always cause confusion for collaborators.
4393
4392
4394 If -r/--rev is used, the specified revision and all its ancestors
4393 If -r/--rev is used, the specified revision and all its ancestors
4395 will be pushed to the remote repository.
4394 will be pushed to the remote repository.
4396
4395
4397 If -B/--bookmark is used, the specified bookmarked revision, its
4396 If -B/--bookmark is used, the specified bookmarked revision, its
4398 ancestors, and the bookmark will be pushed to the remote
4397 ancestors, and the bookmark will be pushed to the remote
4399 repository. Specifying ``.`` is equivalent to specifying the active
4398 repository. Specifying ``.`` is equivalent to specifying the active
4400 bookmark's name.
4399 bookmark's name.
4401
4400
4402 Please see :hg:`help urls` for important details about ``ssh://``
4401 Please see :hg:`help urls` for important details about ``ssh://``
4403 URLs. If DESTINATION is omitted, a default path will be used.
4402 URLs. If DESTINATION is omitted, a default path will be used.
4404
4403
4405 .. container:: verbose
4404 .. container:: verbose
4406
4405
4407 The --pushvars option sends strings to the server that become
4406 The --pushvars option sends strings to the server that become
4408 environment variables prepended with ``HG_USERVAR_``. For example,
4407 environment variables prepended with ``HG_USERVAR_``. For example,
4409 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4408 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4410 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4409 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4411
4410
4412 pushvars can provide for user-overridable hooks as well as set debug
4411 pushvars can provide for user-overridable hooks as well as set debug
4413 levels. One example is having a hook that blocks commits containing
4412 levels. One example is having a hook that blocks commits containing
4414 conflict markers, but enables the user to override the hook if the file
4413 conflict markers, but enables the user to override the hook if the file
4415 is using conflict markers for testing purposes or the file format has
4414 is using conflict markers for testing purposes or the file format has
4416 strings that look like conflict markers.
4415 strings that look like conflict markers.
4417
4416
4418 By default, servers will ignore `--pushvars`. To enable it add the
4417 By default, servers will ignore `--pushvars`. To enable it add the
4419 following to your configuration file::
4418 following to your configuration file::
4420
4419
4421 [push]
4420 [push]
4422 pushvars.server = true
4421 pushvars.server = true
4423
4422
4424 Returns 0 if push was successful, 1 if nothing to push.
4423 Returns 0 if push was successful, 1 if nothing to push.
4425 """
4424 """
4426
4425
4427 opts = pycompat.byteskwargs(opts)
4426 opts = pycompat.byteskwargs(opts)
4428 if opts.get('bookmark'):
4427 if opts.get('bookmark'):
4429 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4428 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4430 for b in opts['bookmark']:
4429 for b in opts['bookmark']:
4431 # translate -B options to -r so changesets get pushed
4430 # translate -B options to -r so changesets get pushed
4432 b = repo._bookmarks.expandname(b)
4431 b = repo._bookmarks.expandname(b)
4433 if b in repo._bookmarks:
4432 if b in repo._bookmarks:
4434 opts.setdefault('rev', []).append(b)
4433 opts.setdefault('rev', []).append(b)
4435 else:
4434 else:
4436 # if we try to push a deleted bookmark, translate it to null
4435 # if we try to push a deleted bookmark, translate it to null
4437 # this lets simultaneous -r, -b options continue working
4436 # this lets simultaneous -r, -b options continue working
4438 opts.setdefault('rev', []).append("null")
4437 opts.setdefault('rev', []).append("null")
4439
4438
4440 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4439 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4441 if not path:
4440 if not path:
4442 raise error.Abort(_('default repository not configured!'),
4441 raise error.Abort(_('default repository not configured!'),
4443 hint=_("see 'hg help config.paths'"))
4442 hint=_("see 'hg help config.paths'"))
4444 dest = path.pushloc or path.loc
4443 dest = path.pushloc or path.loc
4445 branches = (path.branch, opts.get('branch') or [])
4444 branches = (path.branch, opts.get('branch') or [])
4446 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4445 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4447 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4446 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4448 other = hg.peer(repo, opts, dest)
4447 other = hg.peer(repo, opts, dest)
4449
4448
4450 if revs:
4449 if revs:
4451 revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
4450 revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
4452 if not revs:
4451 if not revs:
4453 raise error.Abort(_("specified revisions evaluate to an empty set"),
4452 raise error.Abort(_("specified revisions evaluate to an empty set"),
4454 hint=_("use different revision arguments"))
4453 hint=_("use different revision arguments"))
4455 elif path.pushrev:
4454 elif path.pushrev:
4456 # It doesn't make any sense to specify ancestor revisions. So limit
4455 # It doesn't make any sense to specify ancestor revisions. So limit
4457 # to DAG heads to make discovery simpler.
4456 # to DAG heads to make discovery simpler.
4458 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4457 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4459 revs = scmutil.revrange(repo, [expr])
4458 revs = scmutil.revrange(repo, [expr])
4460 revs = [repo[rev].node() for rev in revs]
4459 revs = [repo[rev].node() for rev in revs]
4461 if not revs:
4460 if not revs:
4462 raise error.Abort(_('default push revset for path evaluates to an '
4461 raise error.Abort(_('default push revset for path evaluates to an '
4463 'empty set'))
4462 'empty set'))
4464
4463
4465 repo._subtoppath = dest
4464 repo._subtoppath = dest
4466 try:
4465 try:
4467 # push subrepos depth-first for coherent ordering
4466 # push subrepos depth-first for coherent ordering
4468 c = repo['.']
4467 c = repo['.']
4469 subs = c.substate # only repos that are committed
4468 subs = c.substate # only repos that are committed
4470 for s in sorted(subs):
4469 for s in sorted(subs):
4471 result = c.sub(s).push(opts)
4470 result = c.sub(s).push(opts)
4472 if result == 0:
4471 if result == 0:
4473 return not result
4472 return not result
4474 finally:
4473 finally:
4475 del repo._subtoppath
4474 del repo._subtoppath
4476
4475
4477 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4476 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4478 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4477 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4479
4478
4480 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4479 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4481 newbranch=opts.get('new_branch'),
4480 newbranch=opts.get('new_branch'),
4482 bookmarks=opts.get('bookmark', ()),
4481 bookmarks=opts.get('bookmark', ()),
4483 opargs=opargs)
4482 opargs=opargs)
4484
4483
4485 result = not pushop.cgresult
4484 result = not pushop.cgresult
4486
4485
4487 if pushop.bkresult is not None:
4486 if pushop.bkresult is not None:
4488 if pushop.bkresult == 2:
4487 if pushop.bkresult == 2:
4489 result = 2
4488 result = 2
4490 elif not result and pushop.bkresult:
4489 elif not result and pushop.bkresult:
4491 result = 2
4490 result = 2
4492
4491
4493 return result
4492 return result
4494
4493
4495 @command('recover', [])
4494 @command('recover', [])
4496 def recover(ui, repo):
4495 def recover(ui, repo):
4497 """roll back an interrupted transaction
4496 """roll back an interrupted transaction
4498
4497
4499 Recover from an interrupted commit or pull.
4498 Recover from an interrupted commit or pull.
4500
4499
4501 This command tries to fix the repository status after an
4500 This command tries to fix the repository status after an
4502 interrupted operation. It should only be necessary when Mercurial
4501 interrupted operation. It should only be necessary when Mercurial
4503 suggests it.
4502 suggests it.
4504
4503
4505 Returns 0 if successful, 1 if nothing to recover or verify fails.
4504 Returns 0 if successful, 1 if nothing to recover or verify fails.
4506 """
4505 """
4507 if repo.recover():
4506 if repo.recover():
4508 return hg.verify(repo)
4507 return hg.verify(repo)
4509 return 1
4508 return 1
4510
4509
4511 @command('^remove|rm',
4510 @command('^remove|rm',
4512 [('A', 'after', None, _('record delete for missing files')),
4511 [('A', 'after', None, _('record delete for missing files')),
4513 ('f', 'force', None,
4512 ('f', 'force', None,
4514 _('forget added files, delete modified files')),
4513 _('forget added files, delete modified files')),
4515 ] + subrepoopts + walkopts + dryrunopts,
4514 ] + subrepoopts + walkopts + dryrunopts,
4516 _('[OPTION]... FILE...'),
4515 _('[OPTION]... FILE...'),
4517 inferrepo=True)
4516 inferrepo=True)
4518 def remove(ui, repo, *pats, **opts):
4517 def remove(ui, repo, *pats, **opts):
4519 """remove the specified files on the next commit
4518 """remove the specified files on the next commit
4520
4519
4521 Schedule the indicated files for removal from the current branch.
4520 Schedule the indicated files for removal from the current branch.
4522
4521
4523 This command schedules the files to be removed at the next commit.
4522 This command schedules the files to be removed at the next commit.
4524 To undo a remove before that, see :hg:`revert`. To undo added
4523 To undo a remove before that, see :hg:`revert`. To undo added
4525 files, see :hg:`forget`.
4524 files, see :hg:`forget`.
4526
4525
4527 .. container:: verbose
4526 .. container:: verbose
4528
4527
4529 -A/--after can be used to remove only files that have already
4528 -A/--after can be used to remove only files that have already
4530 been deleted, -f/--force can be used to force deletion, and -Af
4529 been deleted, -f/--force can be used to force deletion, and -Af
4531 can be used to remove files from the next revision without
4530 can be used to remove files from the next revision without
4532 deleting them from the working directory.
4531 deleting them from the working directory.
4533
4532
4534 The following table details the behavior of remove for different
4533 The following table details the behavior of remove for different
4535 file states (columns) and option combinations (rows). The file
4534 file states (columns) and option combinations (rows). The file
4536 states are Added [A], Clean [C], Modified [M] and Missing [!]
4535 states are Added [A], Clean [C], Modified [M] and Missing [!]
4537 (as reported by :hg:`status`). The actions are Warn, Remove
4536 (as reported by :hg:`status`). The actions are Warn, Remove
4538 (from branch) and Delete (from disk):
4537 (from branch) and Delete (from disk):
4539
4538
4540 ========= == == == ==
4539 ========= == == == ==
4541 opt/state A C M !
4540 opt/state A C M !
4542 ========= == == == ==
4541 ========= == == == ==
4543 none W RD W R
4542 none W RD W R
4544 -f R RD RD R
4543 -f R RD RD R
4545 -A W W W R
4544 -A W W W R
4546 -Af R R R R
4545 -Af R R R R
4547 ========= == == == ==
4546 ========= == == == ==
4548
4547
4549 .. note::
4548 .. note::
4550
4549
4551 :hg:`remove` never deletes files in Added [A] state from the
4550 :hg:`remove` never deletes files in Added [A] state from the
4552 working directory, not even if ``--force`` is specified.
4551 working directory, not even if ``--force`` is specified.
4553
4552
4554 Returns 0 on success, 1 if any warnings encountered.
4553 Returns 0 on success, 1 if any warnings encountered.
4555 """
4554 """
4556
4555
4557 opts = pycompat.byteskwargs(opts)
4556 opts = pycompat.byteskwargs(opts)
4558 after, force = opts.get('after'), opts.get('force')
4557 after, force = opts.get('after'), opts.get('force')
4559 dryrun = opts.get('dry_run')
4558 dryrun = opts.get('dry_run')
4560 if not pats and not after:
4559 if not pats and not after:
4561 raise error.Abort(_('no files specified'))
4560 raise error.Abort(_('no files specified'))
4562
4561
4563 m = scmutil.match(repo[None], pats, opts)
4562 m = scmutil.match(repo[None], pats, opts)
4564 subrepos = opts.get('subrepos')
4563 subrepos = opts.get('subrepos')
4565 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4564 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4566 dryrun=dryrun)
4565 dryrun=dryrun)
4567
4566
4568 @command('rename|move|mv',
4567 @command('rename|move|mv',
4569 [('A', 'after', None, _('record a rename that has already occurred')),
4568 [('A', 'after', None, _('record a rename that has already occurred')),
4570 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4569 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4571 ] + walkopts + dryrunopts,
4570 ] + walkopts + dryrunopts,
4572 _('[OPTION]... SOURCE... DEST'))
4571 _('[OPTION]... SOURCE... DEST'))
4573 def rename(ui, repo, *pats, **opts):
4572 def rename(ui, repo, *pats, **opts):
4574 """rename files; equivalent of copy + remove
4573 """rename files; equivalent of copy + remove
4575
4574
4576 Mark dest as copies of sources; mark sources for deletion. If dest
4575 Mark dest as copies of sources; mark sources for deletion. If dest
4577 is a directory, copies are put in that directory. If dest is a
4576 is a directory, copies are put in that directory. If dest is a
4578 file, there can only be one source.
4577 file, there can only be one source.
4579
4578
4580 By default, this command copies the contents of files as they
4579 By default, this command copies the contents of files as they
4581 exist in the working directory. If invoked with -A/--after, the
4580 exist in the working directory. If invoked with -A/--after, the
4582 operation is recorded, but no copying is performed.
4581 operation is recorded, but no copying is performed.
4583
4582
4584 This command takes effect at the next commit. To undo a rename
4583 This command takes effect at the next commit. To undo a rename
4585 before that, see :hg:`revert`.
4584 before that, see :hg:`revert`.
4586
4585
4587 Returns 0 on success, 1 if errors are encountered.
4586 Returns 0 on success, 1 if errors are encountered.
4588 """
4587 """
4589 opts = pycompat.byteskwargs(opts)
4588 opts = pycompat.byteskwargs(opts)
4590 with repo.wlock(False):
4589 with repo.wlock(False):
4591 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4590 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4592
4591
4593 @command('resolve',
4592 @command('resolve',
4594 [('a', 'all', None, _('select all unresolved files')),
4593 [('a', 'all', None, _('select all unresolved files')),
4595 ('l', 'list', None, _('list state of files needing merge')),
4594 ('l', 'list', None, _('list state of files needing merge')),
4596 ('m', 'mark', None, _('mark files as resolved')),
4595 ('m', 'mark', None, _('mark files as resolved')),
4597 ('u', 'unmark', None, _('mark files as unresolved')),
4596 ('u', 'unmark', None, _('mark files as unresolved')),
4598 ('n', 'no-status', None, _('hide status prefix')),
4597 ('n', 'no-status', None, _('hide status prefix')),
4599 ('', 're-merge', None, _('re-merge files'))]
4598 ('', 're-merge', None, _('re-merge files'))]
4600 + mergetoolopts + walkopts + formatteropts,
4599 + mergetoolopts + walkopts + formatteropts,
4601 _('[OPTION]... [FILE]...'),
4600 _('[OPTION]... [FILE]...'),
4602 inferrepo=True)
4601 inferrepo=True)
4603 def resolve(ui, repo, *pats, **opts):
4602 def resolve(ui, repo, *pats, **opts):
4604 """redo merges or set/view the merge status of files
4603 """redo merges or set/view the merge status of files
4605
4604
4606 Merges with unresolved conflicts are often the result of
4605 Merges with unresolved conflicts are often the result of
4607 non-interactive merging using the ``internal:merge`` configuration
4606 non-interactive merging using the ``internal:merge`` configuration
4608 setting, or a command-line merge tool like ``diff3``. The resolve
4607 setting, or a command-line merge tool like ``diff3``. The resolve
4609 command is used to manage the files involved in a merge, after
4608 command is used to manage the files involved in a merge, after
4610 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4609 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4611 working directory must have two parents). See :hg:`help
4610 working directory must have two parents). See :hg:`help
4612 merge-tools` for information on configuring merge tools.
4611 merge-tools` for information on configuring merge tools.
4613
4612
4614 The resolve command can be used in the following ways:
4613 The resolve command can be used in the following ways:
4615
4614
4616 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
4615 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
4617 the specified files, discarding any previous merge attempts. Re-merging
4616 the specified files, discarding any previous merge attempts. Re-merging
4618 is not performed for files already marked as resolved. Use ``--all/-a``
4617 is not performed for files already marked as resolved. Use ``--all/-a``
4619 to select all unresolved files. ``--tool`` can be used to specify
4618 to select all unresolved files. ``--tool`` can be used to specify
4620 the merge tool used for the given files. It overrides the HGMERGE
4619 the merge tool used for the given files. It overrides the HGMERGE
4621 environment variable and your configuration files. Previous file
4620 environment variable and your configuration files. Previous file
4622 contents are saved with a ``.orig`` suffix.
4621 contents are saved with a ``.orig`` suffix.
4623
4622
4624 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4623 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4625 (e.g. after having manually fixed-up the files). The default is
4624 (e.g. after having manually fixed-up the files). The default is
4626 to mark all unresolved files.
4625 to mark all unresolved files.
4627
4626
4628 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4627 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4629 default is to mark all resolved files.
4628 default is to mark all resolved files.
4630
4629
4631 - :hg:`resolve -l`: list files which had or still have conflicts.
4630 - :hg:`resolve -l`: list files which had or still have conflicts.
4632 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4631 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4633 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4632 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4634 the list. See :hg:`help filesets` for details.
4633 the list. See :hg:`help filesets` for details.
4635
4634
4636 .. note::
4635 .. note::
4637
4636
4638 Mercurial will not let you commit files with unresolved merge
4637 Mercurial will not let you commit files with unresolved merge
4639 conflicts. You must use :hg:`resolve -m ...` before you can
4638 conflicts. You must use :hg:`resolve -m ...` before you can
4640 commit after a conflicting merge.
4639 commit after a conflicting merge.
4641
4640
4642 Returns 0 on success, 1 if any files fail a resolve attempt.
4641 Returns 0 on success, 1 if any files fail a resolve attempt.
4643 """
4642 """
4644
4643
4645 opts = pycompat.byteskwargs(opts)
4644 opts = pycompat.byteskwargs(opts)
4646 confirm = ui.configbool('commands', 'resolve.confirm')
4645 confirm = ui.configbool('commands', 'resolve.confirm')
4647 flaglist = 'all mark unmark list no_status re_merge'.split()
4646 flaglist = 'all mark unmark list no_status re_merge'.split()
4648 all, mark, unmark, show, nostatus, remerge = \
4647 all, mark, unmark, show, nostatus, remerge = \
4649 [opts.get(o) for o in flaglist]
4648 [opts.get(o) for o in flaglist]
4650
4649
4651 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
4650 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
4652 if actioncount > 1:
4651 if actioncount > 1:
4653 raise error.Abort(_("too many actions specified"))
4652 raise error.Abort(_("too many actions specified"))
4654 elif (actioncount == 0
4653 elif (actioncount == 0
4655 and ui.configbool('commands', 'resolve.explicit-re-merge')):
4654 and ui.configbool('commands', 'resolve.explicit-re-merge')):
4656 hint = _('use --mark, --unmark, --list or --re-merge')
4655 hint = _('use --mark, --unmark, --list or --re-merge')
4657 raise error.Abort(_('no action specified'), hint=hint)
4656 raise error.Abort(_('no action specified'), hint=hint)
4658 if pats and all:
4657 if pats and all:
4659 raise error.Abort(_("can't specify --all and patterns"))
4658 raise error.Abort(_("can't specify --all and patterns"))
4660 if not (all or pats or show or mark or unmark):
4659 if not (all or pats or show or mark or unmark):
4661 raise error.Abort(_('no files or directories specified'),
4660 raise error.Abort(_('no files or directories specified'),
4662 hint=('use --all to re-merge all unresolved files'))
4661 hint=('use --all to re-merge all unresolved files'))
4663
4662
4664 if confirm:
4663 if confirm:
4665 if all:
4664 if all:
4666 if ui.promptchoice(_(b're-merge all unresolved files (yn)?'
4665 if ui.promptchoice(_(b're-merge all unresolved files (yn)?'
4667 b'$$ &Yes $$ &No')):
4666 b'$$ &Yes $$ &No')):
4668 raise error.Abort(_('user quit'))
4667 raise error.Abort(_('user quit'))
4669 if mark and not pats:
4668 if mark and not pats:
4670 if ui.promptchoice(_(b'mark all unresolved files as resolved (yn)?'
4669 if ui.promptchoice(_(b'mark all unresolved files as resolved (yn)?'
4671 b'$$ &Yes $$ &No')):
4670 b'$$ &Yes $$ &No')):
4672 raise error.Abort(_('user quit'))
4671 raise error.Abort(_('user quit'))
4673 if unmark and not pats:
4672 if unmark and not pats:
4674 if ui.promptchoice(_(b'mark all resolved files as unresolved (yn)?'
4673 if ui.promptchoice(_(b'mark all resolved files as unresolved (yn)?'
4675 b'$$ &Yes $$ &No')):
4674 b'$$ &Yes $$ &No')):
4676 raise error.Abort(_('user quit'))
4675 raise error.Abort(_('user quit'))
4677
4676
4678 if show:
4677 if show:
4679 ui.pager('resolve')
4678 ui.pager('resolve')
4680 fm = ui.formatter('resolve', opts)
4679 fm = ui.formatter('resolve', opts)
4681 ms = mergemod.mergestate.read(repo)
4680 ms = mergemod.mergestate.read(repo)
4682 wctx = repo[None]
4681 wctx = repo[None]
4683 m = scmutil.match(wctx, pats, opts)
4682 m = scmutil.match(wctx, pats, opts)
4684
4683
4685 # Labels and keys based on merge state. Unresolved path conflicts show
4684 # Labels and keys based on merge state. Unresolved path conflicts show
4686 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4685 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4687 # resolved conflicts.
4686 # resolved conflicts.
4688 mergestateinfo = {
4687 mergestateinfo = {
4689 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4688 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4690 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4689 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4691 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4690 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4692 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4691 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4693 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4692 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4694 'D'),
4693 'D'),
4695 }
4694 }
4696
4695
4697 for f in ms:
4696 for f in ms:
4698 if not m(f):
4697 if not m(f):
4699 continue
4698 continue
4700
4699
4701 label, key = mergestateinfo[ms[f]]
4700 label, key = mergestateinfo[ms[f]]
4702 fm.startitem()
4701 fm.startitem()
4703 fm.context(ctx=wctx)
4702 fm.context(ctx=wctx)
4704 fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label)
4703 fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label)
4705 fm.write('path', '%s\n', f, label=label)
4704 fm.write('path', '%s\n', f, label=label)
4706 fm.end()
4705 fm.end()
4707 return 0
4706 return 0
4708
4707
4709 with repo.wlock():
4708 with repo.wlock():
4710 ms = mergemod.mergestate.read(repo)
4709 ms = mergemod.mergestate.read(repo)
4711
4710
4712 if not (ms.active() or repo.dirstate.p2() != nullid):
4711 if not (ms.active() or repo.dirstate.p2() != nullid):
4713 raise error.Abort(
4712 raise error.Abort(
4714 _('resolve command not applicable when not merging'))
4713 _('resolve command not applicable when not merging'))
4715
4714
4716 wctx = repo[None]
4715 wctx = repo[None]
4717
4716
4718 if (ms.mergedriver
4717 if (ms.mergedriver
4719 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4718 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4720 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4719 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4721 ms.commit()
4720 ms.commit()
4722 # allow mark and unmark to go through
4721 # allow mark and unmark to go through
4723 if not mark and not unmark and not proceed:
4722 if not mark and not unmark and not proceed:
4724 return 1
4723 return 1
4725
4724
4726 m = scmutil.match(wctx, pats, opts)
4725 m = scmutil.match(wctx, pats, opts)
4727 ret = 0
4726 ret = 0
4728 didwork = False
4727 didwork = False
4729 runconclude = False
4728 runconclude = False
4730
4729
4731 tocomplete = []
4730 tocomplete = []
4732 hasconflictmarkers = []
4731 hasconflictmarkers = []
4733 if mark:
4732 if mark:
4734 markcheck = ui.config('commands', 'resolve.mark-check')
4733 markcheck = ui.config('commands', 'resolve.mark-check')
4735 if markcheck not in ['warn', 'abort']:
4734 if markcheck not in ['warn', 'abort']:
4736 # Treat all invalid / unrecognized values as 'none'.
4735 # Treat all invalid / unrecognized values as 'none'.
4737 markcheck = False
4736 markcheck = False
4738 for f in ms:
4737 for f in ms:
4739 if not m(f):
4738 if not m(f):
4740 continue
4739 continue
4741
4740
4742 didwork = True
4741 didwork = True
4743
4742
4744 # don't let driver-resolved files be marked, and run the conclude
4743 # don't let driver-resolved files be marked, and run the conclude
4745 # step if asked to resolve
4744 # step if asked to resolve
4746 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4745 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4747 exact = m.exact(f)
4746 exact = m.exact(f)
4748 if mark:
4747 if mark:
4749 if exact:
4748 if exact:
4750 ui.warn(_('not marking %s as it is driver-resolved\n')
4749 ui.warn(_('not marking %s as it is driver-resolved\n')
4751 % f)
4750 % f)
4752 elif unmark:
4751 elif unmark:
4753 if exact:
4752 if exact:
4754 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4753 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4755 % f)
4754 % f)
4756 else:
4755 else:
4757 runconclude = True
4756 runconclude = True
4758 continue
4757 continue
4759
4758
4760 # path conflicts must be resolved manually
4759 # path conflicts must be resolved manually
4761 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4760 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4762 mergemod.MERGE_RECORD_RESOLVED_PATH):
4761 mergemod.MERGE_RECORD_RESOLVED_PATH):
4763 if mark:
4762 if mark:
4764 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4763 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4765 elif unmark:
4764 elif unmark:
4766 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4765 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4767 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4766 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4768 ui.warn(_('%s: path conflict must be resolved manually\n')
4767 ui.warn(_('%s: path conflict must be resolved manually\n')
4769 % f)
4768 % f)
4770 continue
4769 continue
4771
4770
4772 if mark:
4771 if mark:
4773 if markcheck:
4772 if markcheck:
4774 with repo.wvfs(f) as fobj:
4773 with repo.wvfs(f) as fobj:
4775 fdata = fobj.read()
4774 fdata = fobj.read()
4776 if filemerge.hasconflictmarkers(fdata) and \
4775 if filemerge.hasconflictmarkers(fdata) and \
4777 ms[f] != mergemod.MERGE_RECORD_RESOLVED:
4776 ms[f] != mergemod.MERGE_RECORD_RESOLVED:
4778 hasconflictmarkers.append(f)
4777 hasconflictmarkers.append(f)
4779 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4778 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4780 elif unmark:
4779 elif unmark:
4781 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4780 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4782 else:
4781 else:
4783 # backup pre-resolve (merge uses .orig for its own purposes)
4782 # backup pre-resolve (merge uses .orig for its own purposes)
4784 a = repo.wjoin(f)
4783 a = repo.wjoin(f)
4785 try:
4784 try:
4786 util.copyfile(a, a + ".resolve")
4785 util.copyfile(a, a + ".resolve")
4787 except (IOError, OSError) as inst:
4786 except (IOError, OSError) as inst:
4788 if inst.errno != errno.ENOENT:
4787 if inst.errno != errno.ENOENT:
4789 raise
4788 raise
4790
4789
4791 try:
4790 try:
4792 # preresolve file
4791 # preresolve file
4793 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4792 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4794 with ui.configoverride(overrides, 'resolve'):
4793 with ui.configoverride(overrides, 'resolve'):
4795 complete, r = ms.preresolve(f, wctx)
4794 complete, r = ms.preresolve(f, wctx)
4796 if not complete:
4795 if not complete:
4797 tocomplete.append(f)
4796 tocomplete.append(f)
4798 elif r:
4797 elif r:
4799 ret = 1
4798 ret = 1
4800 finally:
4799 finally:
4801 ms.commit()
4800 ms.commit()
4802
4801
4803 # replace filemerge's .orig file with our resolve file, but only
4802 # replace filemerge's .orig file with our resolve file, but only
4804 # for merges that are complete
4803 # for merges that are complete
4805 if complete:
4804 if complete:
4806 try:
4805 try:
4807 util.rename(a + ".resolve",
4806 util.rename(a + ".resolve",
4808 scmutil.origpath(ui, repo, a))
4807 scmutil.origpath(ui, repo, a))
4809 except OSError as inst:
4808 except OSError as inst:
4810 if inst.errno != errno.ENOENT:
4809 if inst.errno != errno.ENOENT:
4811 raise
4810 raise
4812
4811
4813 if hasconflictmarkers:
4812 if hasconflictmarkers:
4814 ui.warn(_('warning: the following files still have conflict '
4813 ui.warn(_('warning: the following files still have conflict '
4815 'markers:\n ') + '\n '.join(hasconflictmarkers) + '\n')
4814 'markers:\n ') + '\n '.join(hasconflictmarkers) + '\n')
4816 if markcheck == 'abort' and not all:
4815 if markcheck == 'abort' and not all:
4817 raise error.Abort(_('conflict markers detected'),
4816 raise error.Abort(_('conflict markers detected'),
4818 hint=_('use --all to mark anyway'))
4817 hint=_('use --all to mark anyway'))
4819
4818
4820 for f in tocomplete:
4819 for f in tocomplete:
4821 try:
4820 try:
4822 # resolve file
4821 # resolve file
4823 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4822 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4824 with ui.configoverride(overrides, 'resolve'):
4823 with ui.configoverride(overrides, 'resolve'):
4825 r = ms.resolve(f, wctx)
4824 r = ms.resolve(f, wctx)
4826 if r:
4825 if r:
4827 ret = 1
4826 ret = 1
4828 finally:
4827 finally:
4829 ms.commit()
4828 ms.commit()
4830
4829
4831 # replace filemerge's .orig file with our resolve file
4830 # replace filemerge's .orig file with our resolve file
4832 a = repo.wjoin(f)
4831 a = repo.wjoin(f)
4833 try:
4832 try:
4834 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4833 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4835 except OSError as inst:
4834 except OSError as inst:
4836 if inst.errno != errno.ENOENT:
4835 if inst.errno != errno.ENOENT:
4837 raise
4836 raise
4838
4837
4839 ms.commit()
4838 ms.commit()
4840 ms.recordactions()
4839 ms.recordactions()
4841
4840
4842 if not didwork and pats:
4841 if not didwork and pats:
4843 hint = None
4842 hint = None
4844 if not any([p for p in pats if p.find(':') >= 0]):
4843 if not any([p for p in pats if p.find(':') >= 0]):
4845 pats = ['path:%s' % p for p in pats]
4844 pats = ['path:%s' % p for p in pats]
4846 m = scmutil.match(wctx, pats, opts)
4845 m = scmutil.match(wctx, pats, opts)
4847 for f in ms:
4846 for f in ms:
4848 if not m(f):
4847 if not m(f):
4849 continue
4848 continue
4850 def flag(o):
4849 def flag(o):
4851 if o == 're_merge':
4850 if o == 're_merge':
4852 return '--re-merge '
4851 return '--re-merge '
4853 return '-%s ' % o[0:1]
4852 return '-%s ' % o[0:1]
4854 flags = ''.join([flag(o) for o in flaglist if opts.get(o)])
4853 flags = ''.join([flag(o) for o in flaglist if opts.get(o)])
4855 hint = _("(try: hg resolve %s%s)\n") % (
4854 hint = _("(try: hg resolve %s%s)\n") % (
4856 flags,
4855 flags,
4857 ' '.join(pats))
4856 ' '.join(pats))
4858 break
4857 break
4859 ui.warn(_("arguments do not match paths that need resolving\n"))
4858 ui.warn(_("arguments do not match paths that need resolving\n"))
4860 if hint:
4859 if hint:
4861 ui.warn(hint)
4860 ui.warn(hint)
4862 elif ms.mergedriver and ms.mdstate() != 's':
4861 elif ms.mergedriver and ms.mdstate() != 's':
4863 # run conclude step when either a driver-resolved file is requested
4862 # run conclude step when either a driver-resolved file is requested
4864 # or there are no driver-resolved files
4863 # or there are no driver-resolved files
4865 # we can't use 'ret' to determine whether any files are unresolved
4864 # we can't use 'ret' to determine whether any files are unresolved
4866 # because we might not have tried to resolve some
4865 # because we might not have tried to resolve some
4867 if ((runconclude or not list(ms.driverresolved()))
4866 if ((runconclude or not list(ms.driverresolved()))
4868 and not list(ms.unresolved())):
4867 and not list(ms.unresolved())):
4869 proceed = mergemod.driverconclude(repo, ms, wctx)
4868 proceed = mergemod.driverconclude(repo, ms, wctx)
4870 ms.commit()
4869 ms.commit()
4871 if not proceed:
4870 if not proceed:
4872 return 1
4871 return 1
4873
4872
4874 # Nudge users into finishing an unfinished operation
4873 # Nudge users into finishing an unfinished operation
4875 unresolvedf = list(ms.unresolved())
4874 unresolvedf = list(ms.unresolved())
4876 driverresolvedf = list(ms.driverresolved())
4875 driverresolvedf = list(ms.driverresolved())
4877 if not unresolvedf and not driverresolvedf:
4876 if not unresolvedf and not driverresolvedf:
4878 ui.status(_('(no more unresolved files)\n'))
4877 ui.status(_('(no more unresolved files)\n'))
4879 cmdutil.checkafterresolved(repo)
4878 cmdutil.checkafterresolved(repo)
4880 elif not unresolvedf:
4879 elif not unresolvedf:
4881 ui.status(_('(no more unresolved files -- '
4880 ui.status(_('(no more unresolved files -- '
4882 'run "hg resolve --all" to conclude)\n'))
4881 'run "hg resolve --all" to conclude)\n'))
4883
4882
4884 return ret
4883 return ret
4885
4884
4886 @command('revert',
4885 @command('revert',
4887 [('a', 'all', None, _('revert all changes when no arguments given')),
4886 [('a', 'all', None, _('revert all changes when no arguments given')),
4888 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4887 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4889 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4888 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4890 ('C', 'no-backup', None, _('do not save backup copies of files')),
4889 ('C', 'no-backup', None, _('do not save backup copies of files')),
4891 ('i', 'interactive', None, _('interactively select the changes')),
4890 ('i', 'interactive', None, _('interactively select the changes')),
4892 ] + walkopts + dryrunopts,
4891 ] + walkopts + dryrunopts,
4893 _('[OPTION]... [-r REV] [NAME]...'))
4892 _('[OPTION]... [-r REV] [NAME]...'))
4894 def revert(ui, repo, *pats, **opts):
4893 def revert(ui, repo, *pats, **opts):
4895 """restore files to their checkout state
4894 """restore files to their checkout state
4896
4895
4897 .. note::
4896 .. note::
4898
4897
4899 To check out earlier revisions, you should use :hg:`update REV`.
4898 To check out earlier revisions, you should use :hg:`update REV`.
4900 To cancel an uncommitted merge (and lose your changes),
4899 To cancel an uncommitted merge (and lose your changes),
4901 use :hg:`merge --abort`.
4900 use :hg:`merge --abort`.
4902
4901
4903 With no revision specified, revert the specified files or directories
4902 With no revision specified, revert the specified files or directories
4904 to the contents they had in the parent of the working directory.
4903 to the contents they had in the parent of the working directory.
4905 This restores the contents of files to an unmodified
4904 This restores the contents of files to an unmodified
4906 state and unschedules adds, removes, copies, and renames. If the
4905 state and unschedules adds, removes, copies, and renames. If the
4907 working directory has two parents, you must explicitly specify a
4906 working directory has two parents, you must explicitly specify a
4908 revision.
4907 revision.
4909
4908
4910 Using the -r/--rev or -d/--date options, revert the given files or
4909 Using the -r/--rev or -d/--date options, revert the given files or
4911 directories to their states as of a specific revision. Because
4910 directories to their states as of a specific revision. Because
4912 revert does not change the working directory parents, this will
4911 revert does not change the working directory parents, this will
4913 cause these files to appear modified. This can be helpful to "back
4912 cause these files to appear modified. This can be helpful to "back
4914 out" some or all of an earlier change. See :hg:`backout` for a
4913 out" some or all of an earlier change. See :hg:`backout` for a
4915 related method.
4914 related method.
4916
4915
4917 Modified files are saved with a .orig suffix before reverting.
4916 Modified files are saved with a .orig suffix before reverting.
4918 To disable these backups, use --no-backup. It is possible to store
4917 To disable these backups, use --no-backup. It is possible to store
4919 the backup files in a custom directory relative to the root of the
4918 the backup files in a custom directory relative to the root of the
4920 repository by setting the ``ui.origbackuppath`` configuration
4919 repository by setting the ``ui.origbackuppath`` configuration
4921 option.
4920 option.
4922
4921
4923 See :hg:`help dates` for a list of formats valid for -d/--date.
4922 See :hg:`help dates` for a list of formats valid for -d/--date.
4924
4923
4925 See :hg:`help backout` for a way to reverse the effect of an
4924 See :hg:`help backout` for a way to reverse the effect of an
4926 earlier changeset.
4925 earlier changeset.
4927
4926
4928 Returns 0 on success.
4927 Returns 0 on success.
4929 """
4928 """
4930
4929
4931 opts = pycompat.byteskwargs(opts)
4930 opts = pycompat.byteskwargs(opts)
4932 if opts.get("date"):
4931 if opts.get("date"):
4933 if opts.get("rev"):
4932 if opts.get("rev"):
4934 raise error.Abort(_("you can't specify a revision and a date"))
4933 raise error.Abort(_("you can't specify a revision and a date"))
4935 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4934 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4936
4935
4937 parent, p2 = repo.dirstate.parents()
4936 parent, p2 = repo.dirstate.parents()
4938 if not opts.get('rev') and p2 != nullid:
4937 if not opts.get('rev') and p2 != nullid:
4939 # revert after merge is a trap for new users (issue2915)
4938 # revert after merge is a trap for new users (issue2915)
4940 raise error.Abort(_('uncommitted merge with no revision specified'),
4939 raise error.Abort(_('uncommitted merge with no revision specified'),
4941 hint=_("use 'hg update' or see 'hg help revert'"))
4940 hint=_("use 'hg update' or see 'hg help revert'"))
4942
4941
4943 rev = opts.get('rev')
4942 rev = opts.get('rev')
4944 if rev:
4943 if rev:
4945 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4944 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4946 ctx = scmutil.revsingle(repo, rev)
4945 ctx = scmutil.revsingle(repo, rev)
4947
4946
4948 if (not (pats or opts.get('include') or opts.get('exclude') or
4947 if (not (pats or opts.get('include') or opts.get('exclude') or
4949 opts.get('all') or opts.get('interactive'))):
4948 opts.get('all') or opts.get('interactive'))):
4950 msg = _("no files or directories specified")
4949 msg = _("no files or directories specified")
4951 if p2 != nullid:
4950 if p2 != nullid:
4952 hint = _("uncommitted merge, use --all to discard all changes,"
4951 hint = _("uncommitted merge, use --all to discard all changes,"
4953 " or 'hg update -C .' to abort the merge")
4952 " or 'hg update -C .' to abort the merge")
4954 raise error.Abort(msg, hint=hint)
4953 raise error.Abort(msg, hint=hint)
4955 dirty = any(repo.status())
4954 dirty = any(repo.status())
4956 node = ctx.node()
4955 node = ctx.node()
4957 if node != parent:
4956 if node != parent:
4958 if dirty:
4957 if dirty:
4959 hint = _("uncommitted changes, use --all to discard all"
4958 hint = _("uncommitted changes, use --all to discard all"
4960 " changes, or 'hg update %d' to update") % ctx.rev()
4959 " changes, or 'hg update %d' to update") % ctx.rev()
4961 else:
4960 else:
4962 hint = _("use --all to revert all files,"
4961 hint = _("use --all to revert all files,"
4963 " or 'hg update %d' to update") % ctx.rev()
4962 " or 'hg update %d' to update") % ctx.rev()
4964 elif dirty:
4963 elif dirty:
4965 hint = _("uncommitted changes, use --all to discard all changes")
4964 hint = _("uncommitted changes, use --all to discard all changes")
4966 else:
4965 else:
4967 hint = _("use --all to revert all files")
4966 hint = _("use --all to revert all files")
4968 raise error.Abort(msg, hint=hint)
4967 raise error.Abort(msg, hint=hint)
4969
4968
4970 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4969 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4971 **pycompat.strkwargs(opts))
4970 **pycompat.strkwargs(opts))
4972
4971
4973 @command('rollback', dryrunopts +
4972 @command('rollback', dryrunopts +
4974 [('f', 'force', False, _('ignore safety measures'))])
4973 [('f', 'force', False, _('ignore safety measures'))])
4975 def rollback(ui, repo, **opts):
4974 def rollback(ui, repo, **opts):
4976 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4975 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4977
4976
4978 Please use :hg:`commit --amend` instead of rollback to correct
4977 Please use :hg:`commit --amend` instead of rollback to correct
4979 mistakes in the last commit.
4978 mistakes in the last commit.
4980
4979
4981 This command should be used with care. There is only one level of
4980 This command should be used with care. There is only one level of
4982 rollback, and there is no way to undo a rollback. It will also
4981 rollback, and there is no way to undo a rollback. It will also
4983 restore the dirstate at the time of the last transaction, losing
4982 restore the dirstate at the time of the last transaction, losing
4984 any dirstate changes since that time. This command does not alter
4983 any dirstate changes since that time. This command does not alter
4985 the working directory.
4984 the working directory.
4986
4985
4987 Transactions are used to encapsulate the effects of all commands
4986 Transactions are used to encapsulate the effects of all commands
4988 that create new changesets or propagate existing changesets into a
4987 that create new changesets or propagate existing changesets into a
4989 repository.
4988 repository.
4990
4989
4991 .. container:: verbose
4990 .. container:: verbose
4992
4991
4993 For example, the following commands are transactional, and their
4992 For example, the following commands are transactional, and their
4994 effects can be rolled back:
4993 effects can be rolled back:
4995
4994
4996 - commit
4995 - commit
4997 - import
4996 - import
4998 - pull
4997 - pull
4999 - push (with this repository as the destination)
4998 - push (with this repository as the destination)
5000 - unbundle
4999 - unbundle
5001
5000
5002 To avoid permanent data loss, rollback will refuse to rollback a
5001 To avoid permanent data loss, rollback will refuse to rollback a
5003 commit transaction if it isn't checked out. Use --force to
5002 commit transaction if it isn't checked out. Use --force to
5004 override this protection.
5003 override this protection.
5005
5004
5006 The rollback command can be entirely disabled by setting the
5005 The rollback command can be entirely disabled by setting the
5007 ``ui.rollback`` configuration setting to false. If you're here
5006 ``ui.rollback`` configuration setting to false. If you're here
5008 because you want to use rollback and it's disabled, you can
5007 because you want to use rollback and it's disabled, you can
5009 re-enable the command by setting ``ui.rollback`` to true.
5008 re-enable the command by setting ``ui.rollback`` to true.
5010
5009
5011 This command is not intended for use on public repositories. Once
5010 This command is not intended for use on public repositories. Once
5012 changes are visible for pull by other users, rolling a transaction
5011 changes are visible for pull by other users, rolling a transaction
5013 back locally is ineffective (someone else may already have pulled
5012 back locally is ineffective (someone else may already have pulled
5014 the changes). Furthermore, a race is possible with readers of the
5013 the changes). Furthermore, a race is possible with readers of the
5015 repository; for example an in-progress pull from the repository
5014 repository; for example an in-progress pull from the repository
5016 may fail if a rollback is performed.
5015 may fail if a rollback is performed.
5017
5016
5018 Returns 0 on success, 1 if no rollback data is available.
5017 Returns 0 on success, 1 if no rollback data is available.
5019 """
5018 """
5020 if not ui.configbool('ui', 'rollback'):
5019 if not ui.configbool('ui', 'rollback'):
5021 raise error.Abort(_('rollback is disabled because it is unsafe'),
5020 raise error.Abort(_('rollback is disabled because it is unsafe'),
5022 hint=('see `hg help -v rollback` for information'))
5021 hint=('see `hg help -v rollback` for information'))
5023 return repo.rollback(dryrun=opts.get(r'dry_run'),
5022 return repo.rollback(dryrun=opts.get(r'dry_run'),
5024 force=opts.get(r'force'))
5023 force=opts.get(r'force'))
5025
5024
5026 @command('root', [], intents={INTENT_READONLY})
5025 @command('root', [], intents={INTENT_READONLY})
5027 def root(ui, repo):
5026 def root(ui, repo):
5028 """print the root (top) of the current working directory
5027 """print the root (top) of the current working directory
5029
5028
5030 Print the root directory of the current repository.
5029 Print the root directory of the current repository.
5031
5030
5032 Returns 0 on success.
5031 Returns 0 on success.
5033 """
5032 """
5034 ui.write(repo.root + "\n")
5033 ui.write(repo.root + "\n")
5035
5034
5036 @command('^serve',
5035 @command('^serve',
5037 [('A', 'accesslog', '', _('name of access log file to write to'),
5036 [('A', 'accesslog', '', _('name of access log file to write to'),
5038 _('FILE')),
5037 _('FILE')),
5039 ('d', 'daemon', None, _('run server in background')),
5038 ('d', 'daemon', None, _('run server in background')),
5040 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
5039 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
5041 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5040 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5042 # use string type, then we can check if something was passed
5041 # use string type, then we can check if something was passed
5043 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5042 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5044 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5043 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5045 _('ADDR')),
5044 _('ADDR')),
5046 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5045 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5047 _('PREFIX')),
5046 _('PREFIX')),
5048 ('n', 'name', '',
5047 ('n', 'name', '',
5049 _('name to show in web pages (default: working directory)'), _('NAME')),
5048 _('name to show in web pages (default: working directory)'), _('NAME')),
5050 ('', 'web-conf', '',
5049 ('', 'web-conf', '',
5051 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
5050 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
5052 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5051 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5053 _('FILE')),
5052 _('FILE')),
5054 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5053 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5055 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
5054 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
5056 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
5055 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
5057 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5056 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5058 ('', 'style', '', _('template style to use'), _('STYLE')),
5057 ('', 'style', '', _('template style to use'), _('STYLE')),
5059 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5058 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5060 ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
5059 ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
5061 ('', 'print-url', None, _('start and print only the URL'))]
5060 ('', 'print-url', None, _('start and print only the URL'))]
5062 + subrepoopts,
5061 + subrepoopts,
5063 _('[OPTION]...'),
5062 _('[OPTION]...'),
5064 optionalrepo=True)
5063 optionalrepo=True)
5065 def serve(ui, repo, **opts):
5064 def serve(ui, repo, **opts):
5066 """start stand-alone webserver
5065 """start stand-alone webserver
5067
5066
5068 Start a local HTTP repository browser and pull server. You can use
5067 Start a local HTTP repository browser and pull server. You can use
5069 this for ad-hoc sharing and browsing of repositories. It is
5068 this for ad-hoc sharing and browsing of repositories. It is
5070 recommended to use a real web server to serve a repository for
5069 recommended to use a real web server to serve a repository for
5071 longer periods of time.
5070 longer periods of time.
5072
5071
5073 Please note that the server does not implement access control.
5072 Please note that the server does not implement access control.
5074 This means that, by default, anybody can read from the server and
5073 This means that, by default, anybody can read from the server and
5075 nobody can write to it by default. Set the ``web.allow-push``
5074 nobody can write to it by default. Set the ``web.allow-push``
5076 option to ``*`` to allow everybody to push to the server. You
5075 option to ``*`` to allow everybody to push to the server. You
5077 should use a real web server if you need to authenticate users.
5076 should use a real web server if you need to authenticate users.
5078
5077
5079 By default, the server logs accesses to stdout and errors to
5078 By default, the server logs accesses to stdout and errors to
5080 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5079 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5081 files.
5080 files.
5082
5081
5083 To have the server choose a free port number to listen on, specify
5082 To have the server choose a free port number to listen on, specify
5084 a port number of 0; in this case, the server will print the port
5083 a port number of 0; in this case, the server will print the port
5085 number it uses.
5084 number it uses.
5086
5085
5087 Returns 0 on success.
5086 Returns 0 on success.
5088 """
5087 """
5089
5088
5090 opts = pycompat.byteskwargs(opts)
5089 opts = pycompat.byteskwargs(opts)
5091 if opts["stdio"] and opts["cmdserver"]:
5090 if opts["stdio"] and opts["cmdserver"]:
5092 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5091 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5093 if opts["print_url"] and ui.verbose:
5092 if opts["print_url"] and ui.verbose:
5094 raise error.Abort(_("cannot use --print-url with --verbose"))
5093 raise error.Abort(_("cannot use --print-url with --verbose"))
5095
5094
5096 if opts["stdio"]:
5095 if opts["stdio"]:
5097 if repo is None:
5096 if repo is None:
5098 raise error.RepoError(_("there is no Mercurial repository here"
5097 raise error.RepoError(_("there is no Mercurial repository here"
5099 " (.hg not found)"))
5098 " (.hg not found)"))
5100 s = wireprotoserver.sshserver(ui, repo)
5099 s = wireprotoserver.sshserver(ui, repo)
5101 s.serve_forever()
5100 s.serve_forever()
5102
5101
5103 service = server.createservice(ui, repo, opts)
5102 service = server.createservice(ui, repo, opts)
5104 return server.runservice(opts, initfn=service.init, runfn=service.run)
5103 return server.runservice(opts, initfn=service.init, runfn=service.run)
5105
5104
5106 _NOTTERSE = 'nothing'
5105 _NOTTERSE = 'nothing'
5107
5106
5108 @command('^status|st',
5107 @command('^status|st',
5109 [('A', 'all', None, _('show status of all files')),
5108 [('A', 'all', None, _('show status of all files')),
5110 ('m', 'modified', None, _('show only modified files')),
5109 ('m', 'modified', None, _('show only modified files')),
5111 ('a', 'added', None, _('show only added files')),
5110 ('a', 'added', None, _('show only added files')),
5112 ('r', 'removed', None, _('show only removed files')),
5111 ('r', 'removed', None, _('show only removed files')),
5113 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5112 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5114 ('c', 'clean', None, _('show only files without changes')),
5113 ('c', 'clean', None, _('show only files without changes')),
5115 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5114 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5116 ('i', 'ignored', None, _('show only ignored files')),
5115 ('i', 'ignored', None, _('show only ignored files')),
5117 ('n', 'no-status', None, _('hide status prefix')),
5116 ('n', 'no-status', None, _('hide status prefix')),
5118 ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
5117 ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
5119 ('C', 'copies', None, _('show source of copied files')),
5118 ('C', 'copies', None, _('show source of copied files')),
5120 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5119 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5121 ('', 'rev', [], _('show difference from revision'), _('REV')),
5120 ('', 'rev', [], _('show difference from revision'), _('REV')),
5122 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5121 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5123 ] + walkopts + subrepoopts + formatteropts,
5122 ] + walkopts + subrepoopts + formatteropts,
5124 _('[OPTION]... [FILE]...'),
5123 _('[OPTION]... [FILE]...'),
5125 inferrepo=True,
5124 inferrepo=True,
5126 intents={INTENT_READONLY})
5125 intents={INTENT_READONLY})
5127 def status(ui, repo, *pats, **opts):
5126 def status(ui, repo, *pats, **opts):
5128 """show changed files in the working directory
5127 """show changed files in the working directory
5129
5128
5130 Show status of files in the repository. If names are given, only
5129 Show status of files in the repository. If names are given, only
5131 files that match are shown. Files that are clean or ignored or
5130 files that match are shown. Files that are clean or ignored or
5132 the source of a copy/move operation, are not listed unless
5131 the source of a copy/move operation, are not listed unless
5133 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5132 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5134 Unless options described with "show only ..." are given, the
5133 Unless options described with "show only ..." are given, the
5135 options -mardu are used.
5134 options -mardu are used.
5136
5135
5137 Option -q/--quiet hides untracked (unknown and ignored) files
5136 Option -q/--quiet hides untracked (unknown and ignored) files
5138 unless explicitly requested with -u/--unknown or -i/--ignored.
5137 unless explicitly requested with -u/--unknown or -i/--ignored.
5139
5138
5140 .. note::
5139 .. note::
5141
5140
5142 :hg:`status` may appear to disagree with diff if permissions have
5141 :hg:`status` may appear to disagree with diff if permissions have
5143 changed or a merge has occurred. The standard diff format does
5142 changed or a merge has occurred. The standard diff format does
5144 not report permission changes and diff only reports changes
5143 not report permission changes and diff only reports changes
5145 relative to one merge parent.
5144 relative to one merge parent.
5146
5145
5147 If one revision is given, it is used as the base revision.
5146 If one revision is given, it is used as the base revision.
5148 If two revisions are given, the differences between them are
5147 If two revisions are given, the differences between them are
5149 shown. The --change option can also be used as a shortcut to list
5148 shown. The --change option can also be used as a shortcut to list
5150 the changed files of a revision from its first parent.
5149 the changed files of a revision from its first parent.
5151
5150
5152 The codes used to show the status of files are::
5151 The codes used to show the status of files are::
5153
5152
5154 M = modified
5153 M = modified
5155 A = added
5154 A = added
5156 R = removed
5155 R = removed
5157 C = clean
5156 C = clean
5158 ! = missing (deleted by non-hg command, but still tracked)
5157 ! = missing (deleted by non-hg command, but still tracked)
5159 ? = not tracked
5158 ? = not tracked
5160 I = ignored
5159 I = ignored
5161 = origin of the previous file (with --copies)
5160 = origin of the previous file (with --copies)
5162
5161
5163 .. container:: verbose
5162 .. container:: verbose
5164
5163
5165 The -t/--terse option abbreviates the output by showing only the directory
5164 The -t/--terse option abbreviates the output by showing only the directory
5166 name if all the files in it share the same status. The option takes an
5165 name if all the files in it share the same status. The option takes an
5167 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
5166 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
5168 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
5167 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
5169 for 'ignored' and 'c' for clean.
5168 for 'ignored' and 'c' for clean.
5170
5169
5171 It abbreviates only those statuses which are passed. Note that clean and
5170 It abbreviates only those statuses which are passed. Note that clean and
5172 ignored files are not displayed with '--terse ic' unless the -c/--clean
5171 ignored files are not displayed with '--terse ic' unless the -c/--clean
5173 and -i/--ignored options are also used.
5172 and -i/--ignored options are also used.
5174
5173
5175 The -v/--verbose option shows information when the repository is in an
5174 The -v/--verbose option shows information when the repository is in an
5176 unfinished merge, shelve, rebase state etc. You can have this behavior
5175 unfinished merge, shelve, rebase state etc. You can have this behavior
5177 turned on by default by enabling the ``commands.status.verbose`` option.
5176 turned on by default by enabling the ``commands.status.verbose`` option.
5178
5177
5179 You can skip displaying some of these states by setting
5178 You can skip displaying some of these states by setting
5180 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
5179 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
5181 'histedit', 'merge', 'rebase', or 'unshelve'.
5180 'histedit', 'merge', 'rebase', or 'unshelve'.
5182
5181
5183 Examples:
5182 Examples:
5184
5183
5185 - show changes in the working directory relative to a
5184 - show changes in the working directory relative to a
5186 changeset::
5185 changeset::
5187
5186
5188 hg status --rev 9353
5187 hg status --rev 9353
5189
5188
5190 - show changes in the working directory relative to the
5189 - show changes in the working directory relative to the
5191 current directory (see :hg:`help patterns` for more information)::
5190 current directory (see :hg:`help patterns` for more information)::
5192
5191
5193 hg status re:
5192 hg status re:
5194
5193
5195 - show all changes including copies in an existing changeset::
5194 - show all changes including copies in an existing changeset::
5196
5195
5197 hg status --copies --change 9353
5196 hg status --copies --change 9353
5198
5197
5199 - get a NUL separated list of added files, suitable for xargs::
5198 - get a NUL separated list of added files, suitable for xargs::
5200
5199
5201 hg status -an0
5200 hg status -an0
5202
5201
5203 - show more information about the repository status, abbreviating
5202 - show more information about the repository status, abbreviating
5204 added, removed, modified, deleted, and untracked paths::
5203 added, removed, modified, deleted, and untracked paths::
5205
5204
5206 hg status -v -t mardu
5205 hg status -v -t mardu
5207
5206
5208 Returns 0 on success.
5207 Returns 0 on success.
5209
5208
5210 """
5209 """
5211
5210
5212 opts = pycompat.byteskwargs(opts)
5211 opts = pycompat.byteskwargs(opts)
5213 revs = opts.get('rev')
5212 revs = opts.get('rev')
5214 change = opts.get('change')
5213 change = opts.get('change')
5215 terse = opts.get('terse')
5214 terse = opts.get('terse')
5216 if terse is _NOTTERSE:
5215 if terse is _NOTTERSE:
5217 if revs:
5216 if revs:
5218 terse = ''
5217 terse = ''
5219 else:
5218 else:
5220 terse = ui.config('commands', 'status.terse')
5219 terse = ui.config('commands', 'status.terse')
5221
5220
5222 if revs and change:
5221 if revs and change:
5223 msg = _('cannot specify --rev and --change at the same time')
5222 msg = _('cannot specify --rev and --change at the same time')
5224 raise error.Abort(msg)
5223 raise error.Abort(msg)
5225 elif revs and terse:
5224 elif revs and terse:
5226 msg = _('cannot use --terse with --rev')
5225 msg = _('cannot use --terse with --rev')
5227 raise error.Abort(msg)
5226 raise error.Abort(msg)
5228 elif change:
5227 elif change:
5229 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
5228 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
5230 ctx2 = scmutil.revsingle(repo, change, None)
5229 ctx2 = scmutil.revsingle(repo, change, None)
5231 ctx1 = ctx2.p1()
5230 ctx1 = ctx2.p1()
5232 else:
5231 else:
5233 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
5232 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
5234 ctx1, ctx2 = scmutil.revpair(repo, revs)
5233 ctx1, ctx2 = scmutil.revpair(repo, revs)
5235
5234
5236 if pats or ui.configbool('commands', 'status.relative'):
5235 if pats or ui.configbool('commands', 'status.relative'):
5237 cwd = repo.getcwd()
5236 cwd = repo.getcwd()
5238 else:
5237 else:
5239 cwd = ''
5238 cwd = ''
5240
5239
5241 if opts.get('print0'):
5240 if opts.get('print0'):
5242 end = '\0'
5241 end = '\0'
5243 else:
5242 else:
5244 end = '\n'
5243 end = '\n'
5245 copy = {}
5244 copy = {}
5246 states = 'modified added removed deleted unknown ignored clean'.split()
5245 states = 'modified added removed deleted unknown ignored clean'.split()
5247 show = [k for k in states if opts.get(k)]
5246 show = [k for k in states if opts.get(k)]
5248 if opts.get('all'):
5247 if opts.get('all'):
5249 show += ui.quiet and (states[:4] + ['clean']) or states
5248 show += ui.quiet and (states[:4] + ['clean']) or states
5250
5249
5251 if not show:
5250 if not show:
5252 if ui.quiet:
5251 if ui.quiet:
5253 show = states[:4]
5252 show = states[:4]
5254 else:
5253 else:
5255 show = states[:5]
5254 show = states[:5]
5256
5255
5257 m = scmutil.match(ctx2, pats, opts)
5256 m = scmutil.match(ctx2, pats, opts)
5258 if terse:
5257 if terse:
5259 # we need to compute clean and unknown to terse
5258 # we need to compute clean and unknown to terse
5260 stat = repo.status(ctx1.node(), ctx2.node(), m,
5259 stat = repo.status(ctx1.node(), ctx2.node(), m,
5261 'ignored' in show or 'i' in terse,
5260 'ignored' in show or 'i' in terse,
5262 clean=True, unknown=True,
5261 clean=True, unknown=True,
5263 listsubrepos=opts.get('subrepos'))
5262 listsubrepos=opts.get('subrepos'))
5264
5263
5265 stat = cmdutil.tersedir(stat, terse)
5264 stat = cmdutil.tersedir(stat, terse)
5266 else:
5265 else:
5267 stat = repo.status(ctx1.node(), ctx2.node(), m,
5266 stat = repo.status(ctx1.node(), ctx2.node(), m,
5268 'ignored' in show, 'clean' in show,
5267 'ignored' in show, 'clean' in show,
5269 'unknown' in show, opts.get('subrepos'))
5268 'unknown' in show, opts.get('subrepos'))
5270
5269
5271 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
5270 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
5272
5271
5273 if (opts.get('all') or opts.get('copies')
5272 if (opts.get('all') or opts.get('copies')
5274 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5273 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5275 copy = copies.pathcopies(ctx1, ctx2, m)
5274 copy = copies.pathcopies(ctx1, ctx2, m)
5276
5275
5277 ui.pager('status')
5276 ui.pager('status')
5278 fm = ui.formatter('status', opts)
5277 fm = ui.formatter('status', opts)
5279 fmt = '%s' + end
5278 fmt = '%s' + end
5280 showchar = not opts.get('no_status')
5279 showchar = not opts.get('no_status')
5281
5280
5282 for state, char, files in changestates:
5281 for state, char, files in changestates:
5283 if state in show:
5282 if state in show:
5284 label = 'status.' + state
5283 label = 'status.' + state
5285 for f in files:
5284 for f in files:
5286 fm.startitem()
5285 fm.startitem()
5287 fm.context(ctx=ctx2)
5286 fm.context(ctx=ctx2)
5288 fm.data(path=f)
5287 fm.data(path=f)
5289 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5288 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5290 fm.plain(fmt % repo.pathto(f, cwd), label=label)
5289 fm.plain(fmt % repo.pathto(f, cwd), label=label)
5291 if f in copy:
5290 if f in copy:
5292 fm.data(source=copy[f])
5291 fm.data(source=copy[f])
5293 fm.plain((' %s' + end) % repo.pathto(copy[f], cwd),
5292 fm.plain((' %s' + end) % repo.pathto(copy[f], cwd),
5294 label='status.copied')
5293 label='status.copied')
5295
5294
5296 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
5295 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
5297 and not ui.plain()):
5296 and not ui.plain()):
5298 cmdutil.morestatus(repo, fm)
5297 cmdutil.morestatus(repo, fm)
5299 fm.end()
5298 fm.end()
5300
5299
5301 @command('^summary|sum',
5300 @command('^summary|sum',
5302 [('', 'remote', None, _('check for push and pull'))],
5301 [('', 'remote', None, _('check for push and pull'))],
5303 '[--remote]',
5302 '[--remote]',
5304 intents={INTENT_READONLY})
5303 intents={INTENT_READONLY})
5305 def summary(ui, repo, **opts):
5304 def summary(ui, repo, **opts):
5306 """summarize working directory state
5305 """summarize working directory state
5307
5306
5308 This generates a brief summary of the working directory state,
5307 This generates a brief summary of the working directory state,
5309 including parents, branch, commit status, phase and available updates.
5308 including parents, branch, commit status, phase and available updates.
5310
5309
5311 With the --remote option, this will check the default paths for
5310 With the --remote option, this will check the default paths for
5312 incoming and outgoing changes. This can be time-consuming.
5311 incoming and outgoing changes. This can be time-consuming.
5313
5312
5314 Returns 0 on success.
5313 Returns 0 on success.
5315 """
5314 """
5316
5315
5317 opts = pycompat.byteskwargs(opts)
5316 opts = pycompat.byteskwargs(opts)
5318 ui.pager('summary')
5317 ui.pager('summary')
5319 ctx = repo[None]
5318 ctx = repo[None]
5320 parents = ctx.parents()
5319 parents = ctx.parents()
5321 pnode = parents[0].node()
5320 pnode = parents[0].node()
5322 marks = []
5321 marks = []
5323
5322
5324 ms = None
5323 ms = None
5325 try:
5324 try:
5326 ms = mergemod.mergestate.read(repo)
5325 ms = mergemod.mergestate.read(repo)
5327 except error.UnsupportedMergeRecords as e:
5326 except error.UnsupportedMergeRecords as e:
5328 s = ' '.join(e.recordtypes)
5327 s = ' '.join(e.recordtypes)
5329 ui.warn(
5328 ui.warn(
5330 _('warning: merge state has unsupported record types: %s\n') % s)
5329 _('warning: merge state has unsupported record types: %s\n') % s)
5331 unresolved = []
5330 unresolved = []
5332 else:
5331 else:
5333 unresolved = list(ms.unresolved())
5332 unresolved = list(ms.unresolved())
5334
5333
5335 for p in parents:
5334 for p in parents:
5336 # label with log.changeset (instead of log.parent) since this
5335 # label with log.changeset (instead of log.parent) since this
5337 # shows a working directory parent *changeset*:
5336 # shows a working directory parent *changeset*:
5338 # i18n: column positioning for "hg summary"
5337 # i18n: column positioning for "hg summary"
5339 ui.write(_('parent: %d:%s ') % (p.rev(), p),
5338 ui.write(_('parent: %d:%s ') % (p.rev(), p),
5340 label=logcmdutil.changesetlabels(p))
5339 label=logcmdutil.changesetlabels(p))
5341 ui.write(' '.join(p.tags()), label='log.tag')
5340 ui.write(' '.join(p.tags()), label='log.tag')
5342 if p.bookmarks():
5341 if p.bookmarks():
5343 marks.extend(p.bookmarks())
5342 marks.extend(p.bookmarks())
5344 if p.rev() == -1:
5343 if p.rev() == -1:
5345 if not len(repo):
5344 if not len(repo):
5346 ui.write(_(' (empty repository)'))
5345 ui.write(_(' (empty repository)'))
5347 else:
5346 else:
5348 ui.write(_(' (no revision checked out)'))
5347 ui.write(_(' (no revision checked out)'))
5349 if p.obsolete():
5348 if p.obsolete():
5350 ui.write(_(' (obsolete)'))
5349 ui.write(_(' (obsolete)'))
5351 if p.isunstable():
5350 if p.isunstable():
5352 instabilities = (ui.label(instability, 'trouble.%s' % instability)
5351 instabilities = (ui.label(instability, 'trouble.%s' % instability)
5353 for instability in p.instabilities())
5352 for instability in p.instabilities())
5354 ui.write(' ('
5353 ui.write(' ('
5355 + ', '.join(instabilities)
5354 + ', '.join(instabilities)
5356 + ')')
5355 + ')')
5357 ui.write('\n')
5356 ui.write('\n')
5358 if p.description():
5357 if p.description():
5359 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5358 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5360 label='log.summary')
5359 label='log.summary')
5361
5360
5362 branch = ctx.branch()
5361 branch = ctx.branch()
5363 bheads = repo.branchheads(branch)
5362 bheads = repo.branchheads(branch)
5364 # i18n: column positioning for "hg summary"
5363 # i18n: column positioning for "hg summary"
5365 m = _('branch: %s\n') % branch
5364 m = _('branch: %s\n') % branch
5366 if branch != 'default':
5365 if branch != 'default':
5367 ui.write(m, label='log.branch')
5366 ui.write(m, label='log.branch')
5368 else:
5367 else:
5369 ui.status(m, label='log.branch')
5368 ui.status(m, label='log.branch')
5370
5369
5371 if marks:
5370 if marks:
5372 active = repo._activebookmark
5371 active = repo._activebookmark
5373 # i18n: column positioning for "hg summary"
5372 # i18n: column positioning for "hg summary"
5374 ui.write(_('bookmarks:'), label='log.bookmark')
5373 ui.write(_('bookmarks:'), label='log.bookmark')
5375 if active is not None:
5374 if active is not None:
5376 if active in marks:
5375 if active in marks:
5377 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5376 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5378 marks.remove(active)
5377 marks.remove(active)
5379 else:
5378 else:
5380 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5379 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5381 for m in marks:
5380 for m in marks:
5382 ui.write(' ' + m, label='log.bookmark')
5381 ui.write(' ' + m, label='log.bookmark')
5383 ui.write('\n', label='log.bookmark')
5382 ui.write('\n', label='log.bookmark')
5384
5383
5385 status = repo.status(unknown=True)
5384 status = repo.status(unknown=True)
5386
5385
5387 c = repo.dirstate.copies()
5386 c = repo.dirstate.copies()
5388 copied, renamed = [], []
5387 copied, renamed = [], []
5389 for d, s in c.iteritems():
5388 for d, s in c.iteritems():
5390 if s in status.removed:
5389 if s in status.removed:
5391 status.removed.remove(s)
5390 status.removed.remove(s)
5392 renamed.append(d)
5391 renamed.append(d)
5393 else:
5392 else:
5394 copied.append(d)
5393 copied.append(d)
5395 if d in status.added:
5394 if d in status.added:
5396 status.added.remove(d)
5395 status.added.remove(d)
5397
5396
5398 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5397 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5399
5398
5400 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5399 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5401 (ui.label(_('%d added'), 'status.added'), status.added),
5400 (ui.label(_('%d added'), 'status.added'), status.added),
5402 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5401 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5403 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5402 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5404 (ui.label(_('%d copied'), 'status.copied'), copied),
5403 (ui.label(_('%d copied'), 'status.copied'), copied),
5405 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5404 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5406 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5405 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5407 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5406 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5408 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5407 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5409 t = []
5408 t = []
5410 for l, s in labels:
5409 for l, s in labels:
5411 if s:
5410 if s:
5412 t.append(l % len(s))
5411 t.append(l % len(s))
5413
5412
5414 t = ', '.join(t)
5413 t = ', '.join(t)
5415 cleanworkdir = False
5414 cleanworkdir = False
5416
5415
5417 if repo.vfs.exists('graftstate'):
5416 if repo.vfs.exists('graftstate'):
5418 t += _(' (graft in progress)')
5417 t += _(' (graft in progress)')
5419 if repo.vfs.exists('updatestate'):
5418 if repo.vfs.exists('updatestate'):
5420 t += _(' (interrupted update)')
5419 t += _(' (interrupted update)')
5421 elif len(parents) > 1:
5420 elif len(parents) > 1:
5422 t += _(' (merge)')
5421 t += _(' (merge)')
5423 elif branch != parents[0].branch():
5422 elif branch != parents[0].branch():
5424 t += _(' (new branch)')
5423 t += _(' (new branch)')
5425 elif (parents[0].closesbranch() and
5424 elif (parents[0].closesbranch() and
5426 pnode in repo.branchheads(branch, closed=True)):
5425 pnode in repo.branchheads(branch, closed=True)):
5427 t += _(' (head closed)')
5426 t += _(' (head closed)')
5428 elif not (status.modified or status.added or status.removed or renamed or
5427 elif not (status.modified or status.added or status.removed or renamed or
5429 copied or subs):
5428 copied or subs):
5430 t += _(' (clean)')
5429 t += _(' (clean)')
5431 cleanworkdir = True
5430 cleanworkdir = True
5432 elif pnode not in bheads:
5431 elif pnode not in bheads:
5433 t += _(' (new branch head)')
5432 t += _(' (new branch head)')
5434
5433
5435 if parents:
5434 if parents:
5436 pendingphase = max(p.phase() for p in parents)
5435 pendingphase = max(p.phase() for p in parents)
5437 else:
5436 else:
5438 pendingphase = phases.public
5437 pendingphase = phases.public
5439
5438
5440 if pendingphase > phases.newcommitphase(ui):
5439 if pendingphase > phases.newcommitphase(ui):
5441 t += ' (%s)' % phases.phasenames[pendingphase]
5440 t += ' (%s)' % phases.phasenames[pendingphase]
5442
5441
5443 if cleanworkdir:
5442 if cleanworkdir:
5444 # i18n: column positioning for "hg summary"
5443 # i18n: column positioning for "hg summary"
5445 ui.status(_('commit: %s\n') % t.strip())
5444 ui.status(_('commit: %s\n') % t.strip())
5446 else:
5445 else:
5447 # i18n: column positioning for "hg summary"
5446 # i18n: column positioning for "hg summary"
5448 ui.write(_('commit: %s\n') % t.strip())
5447 ui.write(_('commit: %s\n') % t.strip())
5449
5448
5450 # all ancestors of branch heads - all ancestors of parent = new csets
5449 # all ancestors of branch heads - all ancestors of parent = new csets
5451 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5450 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5452 bheads))
5451 bheads))
5453
5452
5454 if new == 0:
5453 if new == 0:
5455 # i18n: column positioning for "hg summary"
5454 # i18n: column positioning for "hg summary"
5456 ui.status(_('update: (current)\n'))
5455 ui.status(_('update: (current)\n'))
5457 elif pnode not in bheads:
5456 elif pnode not in bheads:
5458 # i18n: column positioning for "hg summary"
5457 # i18n: column positioning for "hg summary"
5459 ui.write(_('update: %d new changesets (update)\n') % new)
5458 ui.write(_('update: %d new changesets (update)\n') % new)
5460 else:
5459 else:
5461 # i18n: column positioning for "hg summary"
5460 # i18n: column positioning for "hg summary"
5462 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5461 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5463 (new, len(bheads)))
5462 (new, len(bheads)))
5464
5463
5465 t = []
5464 t = []
5466 draft = len(repo.revs('draft()'))
5465 draft = len(repo.revs('draft()'))
5467 if draft:
5466 if draft:
5468 t.append(_('%d draft') % draft)
5467 t.append(_('%d draft') % draft)
5469 secret = len(repo.revs('secret()'))
5468 secret = len(repo.revs('secret()'))
5470 if secret:
5469 if secret:
5471 t.append(_('%d secret') % secret)
5470 t.append(_('%d secret') % secret)
5472
5471
5473 if draft or secret:
5472 if draft or secret:
5474 ui.status(_('phases: %s\n') % ', '.join(t))
5473 ui.status(_('phases: %s\n') % ', '.join(t))
5475
5474
5476 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5475 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5477 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5476 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5478 numtrouble = len(repo.revs(trouble + "()"))
5477 numtrouble = len(repo.revs(trouble + "()"))
5479 # We write all the possibilities to ease translation
5478 # We write all the possibilities to ease translation
5480 troublemsg = {
5479 troublemsg = {
5481 "orphan": _("orphan: %d changesets"),
5480 "orphan": _("orphan: %d changesets"),
5482 "contentdivergent": _("content-divergent: %d changesets"),
5481 "contentdivergent": _("content-divergent: %d changesets"),
5483 "phasedivergent": _("phase-divergent: %d changesets"),
5482 "phasedivergent": _("phase-divergent: %d changesets"),
5484 }
5483 }
5485 if numtrouble > 0:
5484 if numtrouble > 0:
5486 ui.status(troublemsg[trouble] % numtrouble + "\n")
5485 ui.status(troublemsg[trouble] % numtrouble + "\n")
5487
5486
5488 cmdutil.summaryhooks(ui, repo)
5487 cmdutil.summaryhooks(ui, repo)
5489
5488
5490 if opts.get('remote'):
5489 if opts.get('remote'):
5491 needsincoming, needsoutgoing = True, True
5490 needsincoming, needsoutgoing = True, True
5492 else:
5491 else:
5493 needsincoming, needsoutgoing = False, False
5492 needsincoming, needsoutgoing = False, False
5494 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5493 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5495 if i:
5494 if i:
5496 needsincoming = True
5495 needsincoming = True
5497 if o:
5496 if o:
5498 needsoutgoing = True
5497 needsoutgoing = True
5499 if not needsincoming and not needsoutgoing:
5498 if not needsincoming and not needsoutgoing:
5500 return
5499 return
5501
5500
5502 def getincoming():
5501 def getincoming():
5503 source, branches = hg.parseurl(ui.expandpath('default'))
5502 source, branches = hg.parseurl(ui.expandpath('default'))
5504 sbranch = branches[0]
5503 sbranch = branches[0]
5505 try:
5504 try:
5506 other = hg.peer(repo, {}, source)
5505 other = hg.peer(repo, {}, source)
5507 except error.RepoError:
5506 except error.RepoError:
5508 if opts.get('remote'):
5507 if opts.get('remote'):
5509 raise
5508 raise
5510 return source, sbranch, None, None, None
5509 return source, sbranch, None, None, None
5511 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5510 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5512 if revs:
5511 if revs:
5513 revs = [other.lookup(rev) for rev in revs]
5512 revs = [other.lookup(rev) for rev in revs]
5514 ui.debug('comparing with %s\n' % util.hidepassword(source))
5513 ui.debug('comparing with %s\n' % util.hidepassword(source))
5515 repo.ui.pushbuffer()
5514 repo.ui.pushbuffer()
5516 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5515 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5517 repo.ui.popbuffer()
5516 repo.ui.popbuffer()
5518 return source, sbranch, other, commoninc, commoninc[1]
5517 return source, sbranch, other, commoninc, commoninc[1]
5519
5518
5520 if needsincoming:
5519 if needsincoming:
5521 source, sbranch, sother, commoninc, incoming = getincoming()
5520 source, sbranch, sother, commoninc, incoming = getincoming()
5522 else:
5521 else:
5523 source = sbranch = sother = commoninc = incoming = None
5522 source = sbranch = sother = commoninc = incoming = None
5524
5523
5525 def getoutgoing():
5524 def getoutgoing():
5526 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5525 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5527 dbranch = branches[0]
5526 dbranch = branches[0]
5528 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5527 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5529 if source != dest:
5528 if source != dest:
5530 try:
5529 try:
5531 dother = hg.peer(repo, {}, dest)
5530 dother = hg.peer(repo, {}, dest)
5532 except error.RepoError:
5531 except error.RepoError:
5533 if opts.get('remote'):
5532 if opts.get('remote'):
5534 raise
5533 raise
5535 return dest, dbranch, None, None
5534 return dest, dbranch, None, None
5536 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5535 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5537 elif sother is None:
5536 elif sother is None:
5538 # there is no explicit destination peer, but source one is invalid
5537 # there is no explicit destination peer, but source one is invalid
5539 return dest, dbranch, None, None
5538 return dest, dbranch, None, None
5540 else:
5539 else:
5541 dother = sother
5540 dother = sother
5542 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5541 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5543 common = None
5542 common = None
5544 else:
5543 else:
5545 common = commoninc
5544 common = commoninc
5546 if revs:
5545 if revs:
5547 revs = [repo.lookup(rev) for rev in revs]
5546 revs = [repo.lookup(rev) for rev in revs]
5548 repo.ui.pushbuffer()
5547 repo.ui.pushbuffer()
5549 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5548 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5550 commoninc=common)
5549 commoninc=common)
5551 repo.ui.popbuffer()
5550 repo.ui.popbuffer()
5552 return dest, dbranch, dother, outgoing
5551 return dest, dbranch, dother, outgoing
5553
5552
5554 if needsoutgoing:
5553 if needsoutgoing:
5555 dest, dbranch, dother, outgoing = getoutgoing()
5554 dest, dbranch, dother, outgoing = getoutgoing()
5556 else:
5555 else:
5557 dest = dbranch = dother = outgoing = None
5556 dest = dbranch = dother = outgoing = None
5558
5557
5559 if opts.get('remote'):
5558 if opts.get('remote'):
5560 t = []
5559 t = []
5561 if incoming:
5560 if incoming:
5562 t.append(_('1 or more incoming'))
5561 t.append(_('1 or more incoming'))
5563 o = outgoing.missing
5562 o = outgoing.missing
5564 if o:
5563 if o:
5565 t.append(_('%d outgoing') % len(o))
5564 t.append(_('%d outgoing') % len(o))
5566 other = dother or sother
5565 other = dother or sother
5567 if 'bookmarks' in other.listkeys('namespaces'):
5566 if 'bookmarks' in other.listkeys('namespaces'):
5568 counts = bookmarks.summary(repo, other)
5567 counts = bookmarks.summary(repo, other)
5569 if counts[0] > 0:
5568 if counts[0] > 0:
5570 t.append(_('%d incoming bookmarks') % counts[0])
5569 t.append(_('%d incoming bookmarks') % counts[0])
5571 if counts[1] > 0:
5570 if counts[1] > 0:
5572 t.append(_('%d outgoing bookmarks') % counts[1])
5571 t.append(_('%d outgoing bookmarks') % counts[1])
5573
5572
5574 if t:
5573 if t:
5575 # i18n: column positioning for "hg summary"
5574 # i18n: column positioning for "hg summary"
5576 ui.write(_('remote: %s\n') % (', '.join(t)))
5575 ui.write(_('remote: %s\n') % (', '.join(t)))
5577 else:
5576 else:
5578 # i18n: column positioning for "hg summary"
5577 # i18n: column positioning for "hg summary"
5579 ui.status(_('remote: (synced)\n'))
5578 ui.status(_('remote: (synced)\n'))
5580
5579
5581 cmdutil.summaryremotehooks(ui, repo, opts,
5580 cmdutil.summaryremotehooks(ui, repo, opts,
5582 ((source, sbranch, sother, commoninc),
5581 ((source, sbranch, sother, commoninc),
5583 (dest, dbranch, dother, outgoing)))
5582 (dest, dbranch, dother, outgoing)))
5584
5583
5585 @command('tag',
5584 @command('tag',
5586 [('f', 'force', None, _('force tag')),
5585 [('f', 'force', None, _('force tag')),
5587 ('l', 'local', None, _('make the tag local')),
5586 ('l', 'local', None, _('make the tag local')),
5588 ('r', 'rev', '', _('revision to tag'), _('REV')),
5587 ('r', 'rev', '', _('revision to tag'), _('REV')),
5589 ('', 'remove', None, _('remove a tag')),
5588 ('', 'remove', None, _('remove a tag')),
5590 # -l/--local is already there, commitopts cannot be used
5589 # -l/--local is already there, commitopts cannot be used
5591 ('e', 'edit', None, _('invoke editor on commit messages')),
5590 ('e', 'edit', None, _('invoke editor on commit messages')),
5592 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5591 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5593 ] + commitopts2,
5592 ] + commitopts2,
5594 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5593 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5595 def tag(ui, repo, name1, *names, **opts):
5594 def tag(ui, repo, name1, *names, **opts):
5596 """add one or more tags for the current or given revision
5595 """add one or more tags for the current or given revision
5597
5596
5598 Name a particular revision using <name>.
5597 Name a particular revision using <name>.
5599
5598
5600 Tags are used to name particular revisions of the repository and are
5599 Tags are used to name particular revisions of the repository and are
5601 very useful to compare different revisions, to go back to significant
5600 very useful to compare different revisions, to go back to significant
5602 earlier versions or to mark branch points as releases, etc. Changing
5601 earlier versions or to mark branch points as releases, etc. Changing
5603 an existing tag is normally disallowed; use -f/--force to override.
5602 an existing tag is normally disallowed; use -f/--force to override.
5604
5603
5605 If no revision is given, the parent of the working directory is
5604 If no revision is given, the parent of the working directory is
5606 used.
5605 used.
5607
5606
5608 To facilitate version control, distribution, and merging of tags,
5607 To facilitate version control, distribution, and merging of tags,
5609 they are stored as a file named ".hgtags" which is managed similarly
5608 they are stored as a file named ".hgtags" which is managed similarly
5610 to other project files and can be hand-edited if necessary. This
5609 to other project files and can be hand-edited if necessary. This
5611 also means that tagging creates a new commit. The file
5610 also means that tagging creates a new commit. The file
5612 ".hg/localtags" is used for local tags (not shared among
5611 ".hg/localtags" is used for local tags (not shared among
5613 repositories).
5612 repositories).
5614
5613
5615 Tag commits are usually made at the head of a branch. If the parent
5614 Tag commits are usually made at the head of a branch. If the parent
5616 of the working directory is not a branch head, :hg:`tag` aborts; use
5615 of the working directory is not a branch head, :hg:`tag` aborts; use
5617 -f/--force to force the tag commit to be based on a non-head
5616 -f/--force to force the tag commit to be based on a non-head
5618 changeset.
5617 changeset.
5619
5618
5620 See :hg:`help dates` for a list of formats valid for -d/--date.
5619 See :hg:`help dates` for a list of formats valid for -d/--date.
5621
5620
5622 Since tag names have priority over branch names during revision
5621 Since tag names have priority over branch names during revision
5623 lookup, using an existing branch name as a tag name is discouraged.
5622 lookup, using an existing branch name as a tag name is discouraged.
5624
5623
5625 Returns 0 on success.
5624 Returns 0 on success.
5626 """
5625 """
5627 opts = pycompat.byteskwargs(opts)
5626 opts = pycompat.byteskwargs(opts)
5628 with repo.wlock(), repo.lock():
5627 with repo.wlock(), repo.lock():
5629 rev_ = "."
5628 rev_ = "."
5630 names = [t.strip() for t in (name1,) + names]
5629 names = [t.strip() for t in (name1,) + names]
5631 if len(names) != len(set(names)):
5630 if len(names) != len(set(names)):
5632 raise error.Abort(_('tag names must be unique'))
5631 raise error.Abort(_('tag names must be unique'))
5633 for n in names:
5632 for n in names:
5634 scmutil.checknewlabel(repo, n, 'tag')
5633 scmutil.checknewlabel(repo, n, 'tag')
5635 if not n:
5634 if not n:
5636 raise error.Abort(_('tag names cannot consist entirely of '
5635 raise error.Abort(_('tag names cannot consist entirely of '
5637 'whitespace'))
5636 'whitespace'))
5638 if opts.get('rev') and opts.get('remove'):
5637 if opts.get('rev') and opts.get('remove'):
5639 raise error.Abort(_("--rev and --remove are incompatible"))
5638 raise error.Abort(_("--rev and --remove are incompatible"))
5640 if opts.get('rev'):
5639 if opts.get('rev'):
5641 rev_ = opts['rev']
5640 rev_ = opts['rev']
5642 message = opts.get('message')
5641 message = opts.get('message')
5643 if opts.get('remove'):
5642 if opts.get('remove'):
5644 if opts.get('local'):
5643 if opts.get('local'):
5645 expectedtype = 'local'
5644 expectedtype = 'local'
5646 else:
5645 else:
5647 expectedtype = 'global'
5646 expectedtype = 'global'
5648
5647
5649 for n in names:
5648 for n in names:
5650 if not repo.tagtype(n):
5649 if not repo.tagtype(n):
5651 raise error.Abort(_("tag '%s' does not exist") % n)
5650 raise error.Abort(_("tag '%s' does not exist") % n)
5652 if repo.tagtype(n) != expectedtype:
5651 if repo.tagtype(n) != expectedtype:
5653 if expectedtype == 'global':
5652 if expectedtype == 'global':
5654 raise error.Abort(_("tag '%s' is not a global tag") % n)
5653 raise error.Abort(_("tag '%s' is not a global tag") % n)
5655 else:
5654 else:
5656 raise error.Abort(_("tag '%s' is not a local tag") % n)
5655 raise error.Abort(_("tag '%s' is not a local tag") % n)
5657 rev_ = 'null'
5656 rev_ = 'null'
5658 if not message:
5657 if not message:
5659 # we don't translate commit messages
5658 # we don't translate commit messages
5660 message = 'Removed tag %s' % ', '.join(names)
5659 message = 'Removed tag %s' % ', '.join(names)
5661 elif not opts.get('force'):
5660 elif not opts.get('force'):
5662 for n in names:
5661 for n in names:
5663 if n in repo.tags():
5662 if n in repo.tags():
5664 raise error.Abort(_("tag '%s' already exists "
5663 raise error.Abort(_("tag '%s' already exists "
5665 "(use -f to force)") % n)
5664 "(use -f to force)") % n)
5666 if not opts.get('local'):
5665 if not opts.get('local'):
5667 p1, p2 = repo.dirstate.parents()
5666 p1, p2 = repo.dirstate.parents()
5668 if p2 != nullid:
5667 if p2 != nullid:
5669 raise error.Abort(_('uncommitted merge'))
5668 raise error.Abort(_('uncommitted merge'))
5670 bheads = repo.branchheads()
5669 bheads = repo.branchheads()
5671 if not opts.get('force') and bheads and p1 not in bheads:
5670 if not opts.get('force') and bheads and p1 not in bheads:
5672 raise error.Abort(_('working directory is not at a branch head '
5671 raise error.Abort(_('working directory is not at a branch head '
5673 '(use -f to force)'))
5672 '(use -f to force)'))
5674 node = scmutil.revsingle(repo, rev_).node()
5673 node = scmutil.revsingle(repo, rev_).node()
5675
5674
5676 if not message:
5675 if not message:
5677 # we don't translate commit messages
5676 # we don't translate commit messages
5678 message = ('Added tag %s for changeset %s' %
5677 message = ('Added tag %s for changeset %s' %
5679 (', '.join(names), short(node)))
5678 (', '.join(names), short(node)))
5680
5679
5681 date = opts.get('date')
5680 date = opts.get('date')
5682 if date:
5681 if date:
5683 date = dateutil.parsedate(date)
5682 date = dateutil.parsedate(date)
5684
5683
5685 if opts.get('remove'):
5684 if opts.get('remove'):
5686 editform = 'tag.remove'
5685 editform = 'tag.remove'
5687 else:
5686 else:
5688 editform = 'tag.add'
5687 editform = 'tag.add'
5689 editor = cmdutil.getcommiteditor(editform=editform,
5688 editor = cmdutil.getcommiteditor(editform=editform,
5690 **pycompat.strkwargs(opts))
5689 **pycompat.strkwargs(opts))
5691
5690
5692 # don't allow tagging the null rev
5691 # don't allow tagging the null rev
5693 if (not opts.get('remove') and
5692 if (not opts.get('remove') and
5694 scmutil.revsingle(repo, rev_).rev() == nullrev):
5693 scmutil.revsingle(repo, rev_).rev() == nullrev):
5695 raise error.Abort(_("cannot tag null revision"))
5694 raise error.Abort(_("cannot tag null revision"))
5696
5695
5697 tagsmod.tag(repo, names, node, message, opts.get('local'),
5696 tagsmod.tag(repo, names, node, message, opts.get('local'),
5698 opts.get('user'), date, editor=editor)
5697 opts.get('user'), date, editor=editor)
5699
5698
5700 @command('tags', formatteropts, '', intents={INTENT_READONLY})
5699 @command('tags', formatteropts, '', intents={INTENT_READONLY})
5701 def tags(ui, repo, **opts):
5700 def tags(ui, repo, **opts):
5702 """list repository tags
5701 """list repository tags
5703
5702
5704 This lists both regular and local tags. When the -v/--verbose
5703 This lists both regular and local tags. When the -v/--verbose
5705 switch is used, a third column "local" is printed for local tags.
5704 switch is used, a third column "local" is printed for local tags.
5706 When the -q/--quiet switch is used, only the tag name is printed.
5705 When the -q/--quiet switch is used, only the tag name is printed.
5707
5706
5708 Returns 0 on success.
5707 Returns 0 on success.
5709 """
5708 """
5710
5709
5711 opts = pycompat.byteskwargs(opts)
5710 opts = pycompat.byteskwargs(opts)
5712 ui.pager('tags')
5711 ui.pager('tags')
5713 fm = ui.formatter('tags', opts)
5712 fm = ui.formatter('tags', opts)
5714 hexfunc = fm.hexfunc
5713 hexfunc = fm.hexfunc
5715 tagtype = ""
5714 tagtype = ""
5716
5715
5717 for t, n in reversed(repo.tagslist()):
5716 for t, n in reversed(repo.tagslist()):
5718 hn = hexfunc(n)
5717 hn = hexfunc(n)
5719 label = 'tags.normal'
5718 label = 'tags.normal'
5720 tagtype = ''
5719 tagtype = ''
5721 if repo.tagtype(t) == 'local':
5720 if repo.tagtype(t) == 'local':
5722 label = 'tags.local'
5721 label = 'tags.local'
5723 tagtype = 'local'
5722 tagtype = 'local'
5724
5723
5725 fm.startitem()
5724 fm.startitem()
5726 fm.context(repo=repo)
5725 fm.context(repo=repo)
5727 fm.write('tag', '%s', t, label=label)
5726 fm.write('tag', '%s', t, label=label)
5728 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5727 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5729 fm.condwrite(not ui.quiet, 'rev node', fmt,
5728 fm.condwrite(not ui.quiet, 'rev node', fmt,
5730 repo.changelog.rev(n), hn, label=label)
5729 repo.changelog.rev(n), hn, label=label)
5731 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5730 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5732 tagtype, label=label)
5731 tagtype, label=label)
5733 fm.plain('\n')
5732 fm.plain('\n')
5734 fm.end()
5733 fm.end()
5735
5734
5736 @command('tip',
5735 @command('tip',
5737 [('p', 'patch', None, _('show patch')),
5736 [('p', 'patch', None, _('show patch')),
5738 ('g', 'git', None, _('use git extended diff format')),
5737 ('g', 'git', None, _('use git extended diff format')),
5739 ] + templateopts,
5738 ] + templateopts,
5740 _('[-p] [-g]'))
5739 _('[-p] [-g]'))
5741 def tip(ui, repo, **opts):
5740 def tip(ui, repo, **opts):
5742 """show the tip revision (DEPRECATED)
5741 """show the tip revision (DEPRECATED)
5743
5742
5744 The tip revision (usually just called the tip) is the changeset
5743 The tip revision (usually just called the tip) is the changeset
5745 most recently added to the repository (and therefore the most
5744 most recently added to the repository (and therefore the most
5746 recently changed head).
5745 recently changed head).
5747
5746
5748 If you have just made a commit, that commit will be the tip. If
5747 If you have just made a commit, that commit will be the tip. If
5749 you have just pulled changes from another repository, the tip of
5748 you have just pulled changes from another repository, the tip of
5750 that repository becomes the current tip. The "tip" tag is special
5749 that repository becomes the current tip. The "tip" tag is special
5751 and cannot be renamed or assigned to a different changeset.
5750 and cannot be renamed or assigned to a different changeset.
5752
5751
5753 This command is deprecated, please use :hg:`heads` instead.
5752 This command is deprecated, please use :hg:`heads` instead.
5754
5753
5755 Returns 0 on success.
5754 Returns 0 on success.
5756 """
5755 """
5757 opts = pycompat.byteskwargs(opts)
5756 opts = pycompat.byteskwargs(opts)
5758 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5757 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5759 displayer.show(repo['tip'])
5758 displayer.show(repo['tip'])
5760 displayer.close()
5759 displayer.close()
5761
5760
5762 @command('unbundle',
5761 @command('unbundle',
5763 [('u', 'update', None,
5762 [('u', 'update', None,
5764 _('update to new branch head if changesets were unbundled'))],
5763 _('update to new branch head if changesets were unbundled'))],
5765 _('[-u] FILE...'))
5764 _('[-u] FILE...'))
5766 def unbundle(ui, repo, fname1, *fnames, **opts):
5765 def unbundle(ui, repo, fname1, *fnames, **opts):
5767 """apply one or more bundle files
5766 """apply one or more bundle files
5768
5767
5769 Apply one or more bundle files generated by :hg:`bundle`.
5768 Apply one or more bundle files generated by :hg:`bundle`.
5770
5769
5771 Returns 0 on success, 1 if an update has unresolved files.
5770 Returns 0 on success, 1 if an update has unresolved files.
5772 """
5771 """
5773 fnames = (fname1,) + fnames
5772 fnames = (fname1,) + fnames
5774
5773
5775 with repo.lock():
5774 with repo.lock():
5776 for fname in fnames:
5775 for fname in fnames:
5777 f = hg.openpath(ui, fname)
5776 f = hg.openpath(ui, fname)
5778 gen = exchange.readbundle(ui, f, fname)
5777 gen = exchange.readbundle(ui, f, fname)
5779 if isinstance(gen, streamclone.streamcloneapplier):
5778 if isinstance(gen, streamclone.streamcloneapplier):
5780 raise error.Abort(
5779 raise error.Abort(
5781 _('packed bundles cannot be applied with '
5780 _('packed bundles cannot be applied with '
5782 '"hg unbundle"'),
5781 '"hg unbundle"'),
5783 hint=_('use "hg debugapplystreamclonebundle"'))
5782 hint=_('use "hg debugapplystreamclonebundle"'))
5784 url = 'bundle:' + fname
5783 url = 'bundle:' + fname
5785 try:
5784 try:
5786 txnname = 'unbundle'
5785 txnname = 'unbundle'
5787 if not isinstance(gen, bundle2.unbundle20):
5786 if not isinstance(gen, bundle2.unbundle20):
5788 txnname = 'unbundle\n%s' % util.hidepassword(url)
5787 txnname = 'unbundle\n%s' % util.hidepassword(url)
5789 with repo.transaction(txnname) as tr:
5788 with repo.transaction(txnname) as tr:
5790 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5789 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5791 url=url)
5790 url=url)
5792 except error.BundleUnknownFeatureError as exc:
5791 except error.BundleUnknownFeatureError as exc:
5793 raise error.Abort(
5792 raise error.Abort(
5794 _('%s: unknown bundle feature, %s') % (fname, exc),
5793 _('%s: unknown bundle feature, %s') % (fname, exc),
5795 hint=_("see https://mercurial-scm.org/"
5794 hint=_("see https://mercurial-scm.org/"
5796 "wiki/BundleFeature for more "
5795 "wiki/BundleFeature for more "
5797 "information"))
5796 "information"))
5798 modheads = bundle2.combinechangegroupresults(op)
5797 modheads = bundle2.combinechangegroupresults(op)
5799
5798
5800 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5799 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5801
5800
5802 @command('^update|up|checkout|co',
5801 @command('^update|up|checkout|co',
5803 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5802 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5804 ('c', 'check', None, _('require clean working directory')),
5803 ('c', 'check', None, _('require clean working directory')),
5805 ('m', 'merge', None, _('merge uncommitted changes')),
5804 ('m', 'merge', None, _('merge uncommitted changes')),
5806 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5805 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5807 ('r', 'rev', '', _('revision'), _('REV'))
5806 ('r', 'rev', '', _('revision'), _('REV'))
5808 ] + mergetoolopts,
5807 ] + mergetoolopts,
5809 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5808 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5810 def update(ui, repo, node=None, **opts):
5809 def update(ui, repo, node=None, **opts):
5811 """update working directory (or switch revisions)
5810 """update working directory (or switch revisions)
5812
5811
5813 Update the repository's working directory to the specified
5812 Update the repository's working directory to the specified
5814 changeset. If no changeset is specified, update to the tip of the
5813 changeset. If no changeset is specified, update to the tip of the
5815 current named branch and move the active bookmark (see :hg:`help
5814 current named branch and move the active bookmark (see :hg:`help
5816 bookmarks`).
5815 bookmarks`).
5817
5816
5818 Update sets the working directory's parent revision to the specified
5817 Update sets the working directory's parent revision to the specified
5819 changeset (see :hg:`help parents`).
5818 changeset (see :hg:`help parents`).
5820
5819
5821 If the changeset is not a descendant or ancestor of the working
5820 If the changeset is not a descendant or ancestor of the working
5822 directory's parent and there are uncommitted changes, the update is
5821 directory's parent and there are uncommitted changes, the update is
5823 aborted. With the -c/--check option, the working directory is checked
5822 aborted. With the -c/--check option, the working directory is checked
5824 for uncommitted changes; if none are found, the working directory is
5823 for uncommitted changes; if none are found, the working directory is
5825 updated to the specified changeset.
5824 updated to the specified changeset.
5826
5825
5827 .. container:: verbose
5826 .. container:: verbose
5828
5827
5829 The -C/--clean, -c/--check, and -m/--merge options control what
5828 The -C/--clean, -c/--check, and -m/--merge options control what
5830 happens if the working directory contains uncommitted changes.
5829 happens if the working directory contains uncommitted changes.
5831 At most of one of them can be specified.
5830 At most of one of them can be specified.
5832
5831
5833 1. If no option is specified, and if
5832 1. If no option is specified, and if
5834 the requested changeset is an ancestor or descendant of
5833 the requested changeset is an ancestor or descendant of
5835 the working directory's parent, the uncommitted changes
5834 the working directory's parent, the uncommitted changes
5836 are merged into the requested changeset and the merged
5835 are merged into the requested changeset and the merged
5837 result is left uncommitted. If the requested changeset is
5836 result is left uncommitted. If the requested changeset is
5838 not an ancestor or descendant (that is, it is on another
5837 not an ancestor or descendant (that is, it is on another
5839 branch), the update is aborted and the uncommitted changes
5838 branch), the update is aborted and the uncommitted changes
5840 are preserved.
5839 are preserved.
5841
5840
5842 2. With the -m/--merge option, the update is allowed even if the
5841 2. With the -m/--merge option, the update is allowed even if the
5843 requested changeset is not an ancestor or descendant of
5842 requested changeset is not an ancestor or descendant of
5844 the working directory's parent.
5843 the working directory's parent.
5845
5844
5846 3. With the -c/--check option, the update is aborted and the
5845 3. With the -c/--check option, the update is aborted and the
5847 uncommitted changes are preserved.
5846 uncommitted changes are preserved.
5848
5847
5849 4. With the -C/--clean option, uncommitted changes are discarded and
5848 4. With the -C/--clean option, uncommitted changes are discarded and
5850 the working directory is updated to the requested changeset.
5849 the working directory is updated to the requested changeset.
5851
5850
5852 To cancel an uncommitted merge (and lose your changes), use
5851 To cancel an uncommitted merge (and lose your changes), use
5853 :hg:`merge --abort`.
5852 :hg:`merge --abort`.
5854
5853
5855 Use null as the changeset to remove the working directory (like
5854 Use null as the changeset to remove the working directory (like
5856 :hg:`clone -U`).
5855 :hg:`clone -U`).
5857
5856
5858 If you want to revert just one file to an older revision, use
5857 If you want to revert just one file to an older revision, use
5859 :hg:`revert [-r REV] NAME`.
5858 :hg:`revert [-r REV] NAME`.
5860
5859
5861 See :hg:`help dates` for a list of formats valid for -d/--date.
5860 See :hg:`help dates` for a list of formats valid for -d/--date.
5862
5861
5863 Returns 0 on success, 1 if there are unresolved files.
5862 Returns 0 on success, 1 if there are unresolved files.
5864 """
5863 """
5865 rev = opts.get(r'rev')
5864 rev = opts.get(r'rev')
5866 date = opts.get(r'date')
5865 date = opts.get(r'date')
5867 clean = opts.get(r'clean')
5866 clean = opts.get(r'clean')
5868 check = opts.get(r'check')
5867 check = opts.get(r'check')
5869 merge = opts.get(r'merge')
5868 merge = opts.get(r'merge')
5870 if rev and node:
5869 if rev and node:
5871 raise error.Abort(_("please specify just one revision"))
5870 raise error.Abort(_("please specify just one revision"))
5872
5871
5873 if ui.configbool('commands', 'update.requiredest'):
5872 if ui.configbool('commands', 'update.requiredest'):
5874 if not node and not rev and not date:
5873 if not node and not rev and not date:
5875 raise error.Abort(_('you must specify a destination'),
5874 raise error.Abort(_('you must specify a destination'),
5876 hint=_('for example: hg update ".::"'))
5875 hint=_('for example: hg update ".::"'))
5877
5876
5878 if rev is None or rev == '':
5877 if rev is None or rev == '':
5879 rev = node
5878 rev = node
5880
5879
5881 if date and rev is not None:
5880 if date and rev is not None:
5882 raise error.Abort(_("you can't specify a revision and a date"))
5881 raise error.Abort(_("you can't specify a revision and a date"))
5883
5882
5884 if len([x for x in (clean, check, merge) if x]) > 1:
5883 if len([x for x in (clean, check, merge) if x]) > 1:
5885 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5884 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5886 "or -m/--merge"))
5885 "or -m/--merge"))
5887
5886
5888 updatecheck = None
5887 updatecheck = None
5889 if check:
5888 if check:
5890 updatecheck = 'abort'
5889 updatecheck = 'abort'
5891 elif merge:
5890 elif merge:
5892 updatecheck = 'none'
5891 updatecheck = 'none'
5893
5892
5894 with repo.wlock():
5893 with repo.wlock():
5895 cmdutil.clearunfinished(repo)
5894 cmdutil.clearunfinished(repo)
5896
5895
5897 if date:
5896 if date:
5898 rev = cmdutil.finddate(ui, repo, date)
5897 rev = cmdutil.finddate(ui, repo, date)
5899
5898
5900 # if we defined a bookmark, we have to remember the original name
5899 # if we defined a bookmark, we have to remember the original name
5901 brev = rev
5900 brev = rev
5902 if rev:
5901 if rev:
5903 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5902 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5904 ctx = scmutil.revsingle(repo, rev, rev)
5903 ctx = scmutil.revsingle(repo, rev, rev)
5905 rev = ctx.rev()
5904 rev = ctx.rev()
5906 hidden = ctx.hidden()
5905 hidden = ctx.hidden()
5907 overrides = {('ui', 'forcemerge'): opts.get(r'tool', '')}
5906 overrides = {('ui', 'forcemerge'): opts.get(r'tool', '')}
5908 with ui.configoverride(overrides, 'update'):
5907 with ui.configoverride(overrides, 'update'):
5909 ret = hg.updatetotally(ui, repo, rev, brev, clean=clean,
5908 ret = hg.updatetotally(ui, repo, rev, brev, clean=clean,
5910 updatecheck=updatecheck)
5909 updatecheck=updatecheck)
5911 if hidden:
5910 if hidden:
5912 ctxstr = ctx.hex()[:12]
5911 ctxstr = ctx.hex()[:12]
5913 ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
5912 ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
5914
5913
5915 if ctx.obsolete():
5914 if ctx.obsolete():
5916 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5915 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5917 ui.warn("(%s)\n" % obsfatemsg)
5916 ui.warn("(%s)\n" % obsfatemsg)
5918 return ret
5917 return ret
5919
5918
5920 @command('verify', [])
5919 @command('verify', [])
5921 def verify(ui, repo):
5920 def verify(ui, repo):
5922 """verify the integrity of the repository
5921 """verify the integrity of the repository
5923
5922
5924 Verify the integrity of the current repository.
5923 Verify the integrity of the current repository.
5925
5924
5926 This will perform an extensive check of the repository's
5925 This will perform an extensive check of the repository's
5927 integrity, validating the hashes and checksums of each entry in
5926 integrity, validating the hashes and checksums of each entry in
5928 the changelog, manifest, and tracked files, as well as the
5927 the changelog, manifest, and tracked files, as well as the
5929 integrity of their crosslinks and indices.
5928 integrity of their crosslinks and indices.
5930
5929
5931 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5930 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5932 for more information about recovery from corruption of the
5931 for more information about recovery from corruption of the
5933 repository.
5932 repository.
5934
5933
5935 Returns 0 on success, 1 if errors are encountered.
5934 Returns 0 on success, 1 if errors are encountered.
5936 """
5935 """
5937 return hg.verify(repo)
5936 return hg.verify(repo)
5938
5937
5939 @command('version', [] + formatteropts, norepo=True,
5938 @command('version', [] + formatteropts, norepo=True,
5940 intents={INTENT_READONLY})
5939 intents={INTENT_READONLY})
5941 def version_(ui, **opts):
5940 def version_(ui, **opts):
5942 """output version and copyright information"""
5941 """output version and copyright information"""
5943 opts = pycompat.byteskwargs(opts)
5942 opts = pycompat.byteskwargs(opts)
5944 if ui.verbose:
5943 if ui.verbose:
5945 ui.pager('version')
5944 ui.pager('version')
5946 fm = ui.formatter("version", opts)
5945 fm = ui.formatter("version", opts)
5947 fm.startitem()
5946 fm.startitem()
5948 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5947 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5949 util.version())
5948 util.version())
5950 license = _(
5949 license = _(
5951 "(see https://mercurial-scm.org for more information)\n"
5950 "(see https://mercurial-scm.org for more information)\n"
5952 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5951 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5953 "This is free software; see the source for copying conditions. "
5952 "This is free software; see the source for copying conditions. "
5954 "There is NO\nwarranty; "
5953 "There is NO\nwarranty; "
5955 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5954 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5956 )
5955 )
5957 if not ui.quiet:
5956 if not ui.quiet:
5958 fm.plain(license)
5957 fm.plain(license)
5959
5958
5960 if ui.verbose:
5959 if ui.verbose:
5961 fm.plain(_("\nEnabled extensions:\n\n"))
5960 fm.plain(_("\nEnabled extensions:\n\n"))
5962 # format names and versions into columns
5961 # format names and versions into columns
5963 names = []
5962 names = []
5964 vers = []
5963 vers = []
5965 isinternals = []
5964 isinternals = []
5966 for name, module in extensions.extensions():
5965 for name, module in extensions.extensions():
5967 names.append(name)
5966 names.append(name)
5968 vers.append(extensions.moduleversion(module) or None)
5967 vers.append(extensions.moduleversion(module) or None)
5969 isinternals.append(extensions.ismoduleinternal(module))
5968 isinternals.append(extensions.ismoduleinternal(module))
5970 fn = fm.nested("extensions", tmpl='{name}\n')
5969 fn = fm.nested("extensions", tmpl='{name}\n')
5971 if names:
5970 if names:
5972 namefmt = " %%-%ds " % max(len(n) for n in names)
5971 namefmt = " %%-%ds " % max(len(n) for n in names)
5973 places = [_("external"), _("internal")]
5972 places = [_("external"), _("internal")]
5974 for n, v, p in zip(names, vers, isinternals):
5973 for n, v, p in zip(names, vers, isinternals):
5975 fn.startitem()
5974 fn.startitem()
5976 fn.condwrite(ui.verbose, "name", namefmt, n)
5975 fn.condwrite(ui.verbose, "name", namefmt, n)
5977 if ui.verbose:
5976 if ui.verbose:
5978 fn.plain("%s " % places[p])
5977 fn.plain("%s " % places[p])
5979 fn.data(bundled=p)
5978 fn.data(bundled=p)
5980 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5979 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5981 if ui.verbose:
5980 if ui.verbose:
5982 fn.plain("\n")
5981 fn.plain("\n")
5983 fn.end()
5982 fn.end()
5984 fm.end()
5983 fm.end()
5985
5984
5986 def loadcmdtable(ui, name, cmdtable):
5985 def loadcmdtable(ui, name, cmdtable):
5987 """Load command functions from specified cmdtable
5986 """Load command functions from specified cmdtable
5988 """
5987 """
5989 overrides = [cmd for cmd in cmdtable if cmd in table]
5988 overrides = [cmd for cmd in cmdtable if cmd in table]
5990 if overrides:
5989 if overrides:
5991 ui.warn(_("extension '%s' overrides commands: %s\n")
5990 ui.warn(_("extension '%s' overrides commands: %s\n")
5992 % (name, " ".join(overrides)))
5991 % (name, " ".join(overrides)))
5993 table.update(cmdtable)
5992 table.update(cmdtable)
@@ -1,3016 +1,3022 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import random
13 import random
14 import sys
14 import sys
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 )
25 )
26 from . import (
26 from . import (
27 bookmarks,
27 bookmarks,
28 branchmap,
28 branchmap,
29 bundle2,
29 bundle2,
30 changegroup,
30 changegroup,
31 changelog,
31 changelog,
32 color,
32 color,
33 context,
33 context,
34 dirstate,
34 dirstate,
35 dirstateguard,
35 dirstateguard,
36 discovery,
36 discovery,
37 encoding,
37 encoding,
38 error,
38 error,
39 exchange,
39 exchange,
40 extensions,
40 extensions,
41 filelog,
41 filelog,
42 hook,
42 hook,
43 lock as lockmod,
43 lock as lockmod,
44 manifest,
44 manifest,
45 match as matchmod,
45 match as matchmod,
46 merge as mergemod,
46 merge as mergemod,
47 mergeutil,
47 mergeutil,
48 namespaces,
48 namespaces,
49 narrowspec,
49 narrowspec,
50 obsolete,
50 obsolete,
51 pathutil,
51 pathutil,
52 phases,
52 phases,
53 pushkey,
53 pushkey,
54 pycompat,
54 pycompat,
55 repository,
55 repository,
56 repoview,
56 repoview,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 sparse,
60 sparse,
61 store as storemod,
61 store as storemod,
62 subrepoutil,
62 subrepoutil,
63 tags as tagsmod,
63 tags as tagsmod,
64 transaction,
64 transaction,
65 txnutil,
65 txnutil,
66 util,
66 util,
67 vfs as vfsmod,
67 vfs as vfsmod,
68 )
68 )
69 from .utils import (
69 from .utils import (
70 interfaceutil,
70 interfaceutil,
71 procutil,
71 procutil,
72 stringutil,
72 stringutil,
73 )
73 )
74
74
75 from .revlogutils import (
75 from .revlogutils import (
76 constants as revlogconst,
76 constants as revlogconst,
77 )
77 )
78
78
79 release = lockmod.release
79 release = lockmod.release
80 urlerr = util.urlerr
80 urlerr = util.urlerr
81 urlreq = util.urlreq
81 urlreq = util.urlreq
82
82
83 # set of (path, vfs-location) tuples. vfs-location is:
83 # set of (path, vfs-location) tuples. vfs-location is:
84 # - 'plain for vfs relative paths
84 # - 'plain for vfs relative paths
85 # - '' for svfs relative paths
85 # - '' for svfs relative paths
86 _cachedfiles = set()
86 _cachedfiles = set()
87
87
88 class _basefilecache(scmutil.filecache):
88 class _basefilecache(scmutil.filecache):
89 """All filecache usage on repo are done for logic that should be unfiltered
89 """All filecache usage on repo are done for logic that should be unfiltered
90 """
90 """
91 def __get__(self, repo, type=None):
91 def __get__(self, repo, type=None):
92 if repo is None:
92 if repo is None:
93 return self
93 return self
94 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
94 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
95 def __set__(self, repo, value):
95 def __set__(self, repo, value):
96 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
96 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
97 def __delete__(self, repo):
97 def __delete__(self, repo):
98 return super(_basefilecache, self).__delete__(repo.unfiltered())
98 return super(_basefilecache, self).__delete__(repo.unfiltered())
99
99
100 class repofilecache(_basefilecache):
100 class repofilecache(_basefilecache):
101 """filecache for files in .hg but outside of .hg/store"""
101 """filecache for files in .hg but outside of .hg/store"""
102 def __init__(self, *paths):
102 def __init__(self, *paths):
103 super(repofilecache, self).__init__(*paths)
103 super(repofilecache, self).__init__(*paths)
104 for path in paths:
104 for path in paths:
105 _cachedfiles.add((path, 'plain'))
105 _cachedfiles.add((path, 'plain'))
106
106
107 def join(self, obj, fname):
107 def join(self, obj, fname):
108 return obj.vfs.join(fname)
108 return obj.vfs.join(fname)
109
109
110 class storecache(_basefilecache):
110 class storecache(_basefilecache):
111 """filecache for files in the store"""
111 """filecache for files in the store"""
112 def __init__(self, *paths):
112 def __init__(self, *paths):
113 super(storecache, self).__init__(*paths)
113 super(storecache, self).__init__(*paths)
114 for path in paths:
114 for path in paths:
115 _cachedfiles.add((path, ''))
115 _cachedfiles.add((path, ''))
116
116
117 def join(self, obj, fname):
117 def join(self, obj, fname):
118 return obj.sjoin(fname)
118 return obj.sjoin(fname)
119
119
120 def isfilecached(repo, name):
120 def isfilecached(repo, name):
121 """check if a repo has already cached "name" filecache-ed property
121 """check if a repo has already cached "name" filecache-ed property
122
122
123 This returns (cachedobj-or-None, iscached) tuple.
123 This returns (cachedobj-or-None, iscached) tuple.
124 """
124 """
125 cacheentry = repo.unfiltered()._filecache.get(name, None)
125 cacheentry = repo.unfiltered()._filecache.get(name, None)
126 if not cacheentry:
126 if not cacheentry:
127 return None, False
127 return None, False
128 return cacheentry.obj, True
128 return cacheentry.obj, True
129
129
130 class unfilteredpropertycache(util.propertycache):
130 class unfilteredpropertycache(util.propertycache):
131 """propertycache that apply to unfiltered repo only"""
131 """propertycache that apply to unfiltered repo only"""
132
132
133 def __get__(self, repo, type=None):
133 def __get__(self, repo, type=None):
134 unfi = repo.unfiltered()
134 unfi = repo.unfiltered()
135 if unfi is repo:
135 if unfi is repo:
136 return super(unfilteredpropertycache, self).__get__(unfi)
136 return super(unfilteredpropertycache, self).__get__(unfi)
137 return getattr(unfi, self.name)
137 return getattr(unfi, self.name)
138
138
139 class filteredpropertycache(util.propertycache):
139 class filteredpropertycache(util.propertycache):
140 """propertycache that must take filtering in account"""
140 """propertycache that must take filtering in account"""
141
141
142 def cachevalue(self, obj, value):
142 def cachevalue(self, obj, value):
143 object.__setattr__(obj, self.name, value)
143 object.__setattr__(obj, self.name, value)
144
144
145
145
146 def hasunfilteredcache(repo, name):
146 def hasunfilteredcache(repo, name):
147 """check if a repo has an unfilteredpropertycache value for <name>"""
147 """check if a repo has an unfilteredpropertycache value for <name>"""
148 return name in vars(repo.unfiltered())
148 return name in vars(repo.unfiltered())
149
149
150 def unfilteredmethod(orig):
150 def unfilteredmethod(orig):
151 """decorate method that always need to be run on unfiltered version"""
151 """decorate method that always need to be run on unfiltered version"""
152 def wrapper(repo, *args, **kwargs):
152 def wrapper(repo, *args, **kwargs):
153 return orig(repo.unfiltered(), *args, **kwargs)
153 return orig(repo.unfiltered(), *args, **kwargs)
154 return wrapper
154 return wrapper
155
155
156 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
156 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
157 'unbundle'}
157 'unbundle'}
158 legacycaps = moderncaps.union({'changegroupsubset'})
158 legacycaps = moderncaps.union({'changegroupsubset'})
159
159
160 @interfaceutil.implementer(repository.ipeercommandexecutor)
160 @interfaceutil.implementer(repository.ipeercommandexecutor)
161 class localcommandexecutor(object):
161 class localcommandexecutor(object):
162 def __init__(self, peer):
162 def __init__(self, peer):
163 self._peer = peer
163 self._peer = peer
164 self._sent = False
164 self._sent = False
165 self._closed = False
165 self._closed = False
166
166
167 def __enter__(self):
167 def __enter__(self):
168 return self
168 return self
169
169
170 def __exit__(self, exctype, excvalue, exctb):
170 def __exit__(self, exctype, excvalue, exctb):
171 self.close()
171 self.close()
172
172
173 def callcommand(self, command, args):
173 def callcommand(self, command, args):
174 if self._sent:
174 if self._sent:
175 raise error.ProgrammingError('callcommand() cannot be used after '
175 raise error.ProgrammingError('callcommand() cannot be used after '
176 'sendcommands()')
176 'sendcommands()')
177
177
178 if self._closed:
178 if self._closed:
179 raise error.ProgrammingError('callcommand() cannot be used after '
179 raise error.ProgrammingError('callcommand() cannot be used after '
180 'close()')
180 'close()')
181
181
182 # We don't need to support anything fancy. Just call the named
182 # We don't need to support anything fancy. Just call the named
183 # method on the peer and return a resolved future.
183 # method on the peer and return a resolved future.
184 fn = getattr(self._peer, pycompat.sysstr(command))
184 fn = getattr(self._peer, pycompat.sysstr(command))
185
185
186 f = pycompat.futures.Future()
186 f = pycompat.futures.Future()
187
187
188 try:
188 try:
189 result = fn(**pycompat.strkwargs(args))
189 result = fn(**pycompat.strkwargs(args))
190 except Exception:
190 except Exception:
191 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
191 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
192 else:
192 else:
193 f.set_result(result)
193 f.set_result(result)
194
194
195 return f
195 return f
196
196
197 def sendcommands(self):
197 def sendcommands(self):
198 self._sent = True
198 self._sent = True
199
199
200 def close(self):
200 def close(self):
201 self._closed = True
201 self._closed = True
202
202
203 @interfaceutil.implementer(repository.ipeercommands)
203 @interfaceutil.implementer(repository.ipeercommands)
204 class localpeer(repository.peer):
204 class localpeer(repository.peer):
205 '''peer for a local repo; reflects only the most recent API'''
205 '''peer for a local repo; reflects only the most recent API'''
206
206
207 def __init__(self, repo, caps=None):
207 def __init__(self, repo, caps=None):
208 super(localpeer, self).__init__()
208 super(localpeer, self).__init__()
209
209
210 if caps is None:
210 if caps is None:
211 caps = moderncaps.copy()
211 caps = moderncaps.copy()
212 self._repo = repo.filtered('served')
212 self._repo = repo.filtered('served')
213 self.ui = repo.ui
213 self.ui = repo.ui
214 self._caps = repo._restrictcapabilities(caps)
214 self._caps = repo._restrictcapabilities(caps)
215
215
216 # Begin of _basepeer interface.
216 # Begin of _basepeer interface.
217
217
218 def url(self):
218 def url(self):
219 return self._repo.url()
219 return self._repo.url()
220
220
221 def local(self):
221 def local(self):
222 return self._repo
222 return self._repo
223
223
224 def peer(self):
224 def peer(self):
225 return self
225 return self
226
226
227 def canpush(self):
227 def canpush(self):
228 return True
228 return True
229
229
230 def close(self):
230 def close(self):
231 self._repo.close()
231 self._repo.close()
232
232
233 # End of _basepeer interface.
233 # End of _basepeer interface.
234
234
235 # Begin of _basewirecommands interface.
235 # Begin of _basewirecommands interface.
236
236
237 def branchmap(self):
237 def branchmap(self):
238 return self._repo.branchmap()
238 return self._repo.branchmap()
239
239
240 def capabilities(self):
240 def capabilities(self):
241 return self._caps
241 return self._caps
242
242
243 def clonebundles(self):
243 def clonebundles(self):
244 return self._repo.tryread('clonebundles.manifest')
244 return self._repo.tryread('clonebundles.manifest')
245
245
246 def debugwireargs(self, one, two, three=None, four=None, five=None):
246 def debugwireargs(self, one, two, three=None, four=None, five=None):
247 """Used to test argument passing over the wire"""
247 """Used to test argument passing over the wire"""
248 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
248 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
249 pycompat.bytestr(four),
249 pycompat.bytestr(four),
250 pycompat.bytestr(five))
250 pycompat.bytestr(five))
251
251
252 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
252 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
253 **kwargs):
253 **kwargs):
254 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
254 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
255 common=common, bundlecaps=bundlecaps,
255 common=common, bundlecaps=bundlecaps,
256 **kwargs)[1]
256 **kwargs)[1]
257 cb = util.chunkbuffer(chunks)
257 cb = util.chunkbuffer(chunks)
258
258
259 if exchange.bundle2requested(bundlecaps):
259 if exchange.bundle2requested(bundlecaps):
260 # When requesting a bundle2, getbundle returns a stream to make the
260 # When requesting a bundle2, getbundle returns a stream to make the
261 # wire level function happier. We need to build a proper object
261 # wire level function happier. We need to build a proper object
262 # from it in local peer.
262 # from it in local peer.
263 return bundle2.getunbundler(self.ui, cb)
263 return bundle2.getunbundler(self.ui, cb)
264 else:
264 else:
265 return changegroup.getunbundler('01', cb, None)
265 return changegroup.getunbundler('01', cb, None)
266
266
267 def heads(self):
267 def heads(self):
268 return self._repo.heads()
268 return self._repo.heads()
269
269
270 def known(self, nodes):
270 def known(self, nodes):
271 return self._repo.known(nodes)
271 return self._repo.known(nodes)
272
272
273 def listkeys(self, namespace):
273 def listkeys(self, namespace):
274 return self._repo.listkeys(namespace)
274 return self._repo.listkeys(namespace)
275
275
276 def lookup(self, key):
276 def lookup(self, key):
277 return self._repo.lookup(key)
277 return self._repo.lookup(key)
278
278
279 def pushkey(self, namespace, key, old, new):
279 def pushkey(self, namespace, key, old, new):
280 return self._repo.pushkey(namespace, key, old, new)
280 return self._repo.pushkey(namespace, key, old, new)
281
281
282 def stream_out(self):
282 def stream_out(self):
283 raise error.Abort(_('cannot perform stream clone against local '
283 raise error.Abort(_('cannot perform stream clone against local '
284 'peer'))
284 'peer'))
285
285
286 def unbundle(self, bundle, heads, url):
286 def unbundle(self, bundle, heads, url):
287 """apply a bundle on a repo
287 """apply a bundle on a repo
288
288
289 This function handles the repo locking itself."""
289 This function handles the repo locking itself."""
290 try:
290 try:
291 try:
291 try:
292 bundle = exchange.readbundle(self.ui, bundle, None)
292 bundle = exchange.readbundle(self.ui, bundle, None)
293 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
293 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
294 if util.safehasattr(ret, 'getchunks'):
294 if util.safehasattr(ret, 'getchunks'):
295 # This is a bundle20 object, turn it into an unbundler.
295 # This is a bundle20 object, turn it into an unbundler.
296 # This little dance should be dropped eventually when the
296 # This little dance should be dropped eventually when the
297 # API is finally improved.
297 # API is finally improved.
298 stream = util.chunkbuffer(ret.getchunks())
298 stream = util.chunkbuffer(ret.getchunks())
299 ret = bundle2.getunbundler(self.ui, stream)
299 ret = bundle2.getunbundler(self.ui, stream)
300 return ret
300 return ret
301 except Exception as exc:
301 except Exception as exc:
302 # If the exception contains output salvaged from a bundle2
302 # If the exception contains output salvaged from a bundle2
303 # reply, we need to make sure it is printed before continuing
303 # reply, we need to make sure it is printed before continuing
304 # to fail. So we build a bundle2 with such output and consume
304 # to fail. So we build a bundle2 with such output and consume
305 # it directly.
305 # it directly.
306 #
306 #
307 # This is not very elegant but allows a "simple" solution for
307 # This is not very elegant but allows a "simple" solution for
308 # issue4594
308 # issue4594
309 output = getattr(exc, '_bundle2salvagedoutput', ())
309 output = getattr(exc, '_bundle2salvagedoutput', ())
310 if output:
310 if output:
311 bundler = bundle2.bundle20(self._repo.ui)
311 bundler = bundle2.bundle20(self._repo.ui)
312 for out in output:
312 for out in output:
313 bundler.addpart(out)
313 bundler.addpart(out)
314 stream = util.chunkbuffer(bundler.getchunks())
314 stream = util.chunkbuffer(bundler.getchunks())
315 b = bundle2.getunbundler(self.ui, stream)
315 b = bundle2.getunbundler(self.ui, stream)
316 bundle2.processbundle(self._repo, b)
316 bundle2.processbundle(self._repo, b)
317 raise
317 raise
318 except error.PushRaced as exc:
318 except error.PushRaced as exc:
319 raise error.ResponseError(_('push failed:'),
319 raise error.ResponseError(_('push failed:'),
320 stringutil.forcebytestr(exc))
320 stringutil.forcebytestr(exc))
321
321
322 # End of _basewirecommands interface.
322 # End of _basewirecommands interface.
323
323
324 # Begin of peer interface.
324 # Begin of peer interface.
325
325
326 def commandexecutor(self):
326 def commandexecutor(self):
327 return localcommandexecutor(self)
327 return localcommandexecutor(self)
328
328
329 # End of peer interface.
329 # End of peer interface.
330
330
331 @interfaceutil.implementer(repository.ipeerlegacycommands)
331 @interfaceutil.implementer(repository.ipeerlegacycommands)
332 class locallegacypeer(localpeer):
332 class locallegacypeer(localpeer):
333 '''peer extension which implements legacy methods too; used for tests with
333 '''peer extension which implements legacy methods too; used for tests with
334 restricted capabilities'''
334 restricted capabilities'''
335
335
336 def __init__(self, repo):
336 def __init__(self, repo):
337 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
337 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
338
338
339 # Begin of baselegacywirecommands interface.
339 # Begin of baselegacywirecommands interface.
340
340
341 def between(self, pairs):
341 def between(self, pairs):
342 return self._repo.between(pairs)
342 return self._repo.between(pairs)
343
343
344 def branches(self, nodes):
344 def branches(self, nodes):
345 return self._repo.branches(nodes)
345 return self._repo.branches(nodes)
346
346
347 def changegroup(self, nodes, source):
347 def changegroup(self, nodes, source):
348 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
348 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
349 missingheads=self._repo.heads())
349 missingheads=self._repo.heads())
350 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
350 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
351
351
352 def changegroupsubset(self, bases, heads, source):
352 def changegroupsubset(self, bases, heads, source):
353 outgoing = discovery.outgoing(self._repo, missingroots=bases,
353 outgoing = discovery.outgoing(self._repo, missingroots=bases,
354 missingheads=heads)
354 missingheads=heads)
355 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
355 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
356
356
357 # End of baselegacywirecommands interface.
357 # End of baselegacywirecommands interface.
358
358
359 # Increment the sub-version when the revlog v2 format changes to lock out old
359 # Increment the sub-version when the revlog v2 format changes to lock out old
360 # clients.
360 # clients.
361 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
361 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
362
362
363 # A repository with the sparserevlog feature will have delta chains that
363 # A repository with the sparserevlog feature will have delta chains that
364 # can spread over a larger span. Sparse reading cuts these large spans into
364 # can spread over a larger span. Sparse reading cuts these large spans into
365 # pieces, so that each piece isn't too big.
365 # pieces, so that each piece isn't too big.
366 # Without the sparserevlog capability, reading from the repository could use
366 # Without the sparserevlog capability, reading from the repository could use
367 # huge amounts of memory, because the whole span would be read at once,
367 # huge amounts of memory, because the whole span would be read at once,
368 # including all the intermediate revisions that aren't pertinent for the chain.
368 # including all the intermediate revisions that aren't pertinent for the chain.
369 # This is why once a repository has enabled sparse-read, it becomes required.
369 # This is why once a repository has enabled sparse-read, it becomes required.
370 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
370 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
371
371
372 # Functions receiving (ui, features) that extensions can register to impact
372 # Functions receiving (ui, features) that extensions can register to impact
373 # the ability to load repositories with custom requirements. Only
373 # the ability to load repositories with custom requirements. Only
374 # functions defined in loaded extensions are called.
374 # functions defined in loaded extensions are called.
375 #
375 #
376 # The function receives a set of requirement strings that the repository
376 # The function receives a set of requirement strings that the repository
377 # is capable of opening. Functions will typically add elements to the
377 # is capable of opening. Functions will typically add elements to the
378 # set to reflect that the extension knows how to handle that requirements.
378 # set to reflect that the extension knows how to handle that requirements.
379 featuresetupfuncs = set()
379 featuresetupfuncs = set()
380
380
381 def makelocalrepository(baseui, path, intents=None):
381 def makelocalrepository(baseui, path, intents=None):
382 """Create a local repository object.
382 """Create a local repository object.
383
383
384 Given arguments needed to construct a local repository, this function
384 Given arguments needed to construct a local repository, this function
385 performs various early repository loading functionality (such as
385 performs various early repository loading functionality (such as
386 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
386 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
387 the repository can be opened, derives a type suitable for representing
387 the repository can be opened, derives a type suitable for representing
388 that repository, and returns an instance of it.
388 that repository, and returns an instance of it.
389
389
390 The returned object conforms to the ``repository.completelocalrepository``
390 The returned object conforms to the ``repository.completelocalrepository``
391 interface.
391 interface.
392
392
393 The repository type is derived by calling a series of factory functions
393 The repository type is derived by calling a series of factory functions
394 for each aspect/interface of the final repository. These are defined by
394 for each aspect/interface of the final repository. These are defined by
395 ``REPO_INTERFACES``.
395 ``REPO_INTERFACES``.
396
396
397 Each factory function is called to produce a type implementing a specific
397 Each factory function is called to produce a type implementing a specific
398 interface. The cumulative list of returned types will be combined into a
398 interface. The cumulative list of returned types will be combined into a
399 new type and that type will be instantiated to represent the local
399 new type and that type will be instantiated to represent the local
400 repository.
400 repository.
401
401
402 The factory functions each receive various state that may be consulted
402 The factory functions each receive various state that may be consulted
403 as part of deriving a type.
403 as part of deriving a type.
404
404
405 Extensions should wrap these factory functions to customize repository type
405 Extensions should wrap these factory functions to customize repository type
406 creation. Note that an extension's wrapped function may be called even if
406 creation. Note that an extension's wrapped function may be called even if
407 that extension is not loaded for the repo being constructed. Extensions
407 that extension is not loaded for the repo being constructed. Extensions
408 should check if their ``__name__`` appears in the
408 should check if their ``__name__`` appears in the
409 ``extensionmodulenames`` set passed to the factory function and no-op if
409 ``extensionmodulenames`` set passed to the factory function and no-op if
410 not.
410 not.
411 """
411 """
412 ui = baseui.copy()
412 ui = baseui.copy()
413 # Prevent copying repo configuration.
413 # Prevent copying repo configuration.
414 ui.copy = baseui.copy
414 ui.copy = baseui.copy
415
415
416 # Working directory VFS rooted at repository root.
416 # Working directory VFS rooted at repository root.
417 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
417 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
418
418
419 # Main VFS for .hg/ directory.
419 # Main VFS for .hg/ directory.
420 hgpath = wdirvfs.join(b'.hg')
420 hgpath = wdirvfs.join(b'.hg')
421 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
421 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
422
422
423 # The .hg/ path should exist and should be a directory. All other
423 # The .hg/ path should exist and should be a directory. All other
424 # cases are errors.
424 # cases are errors.
425 if not hgvfs.isdir():
425 if not hgvfs.isdir():
426 try:
426 try:
427 hgvfs.stat()
427 hgvfs.stat()
428 except OSError as e:
428 except OSError as e:
429 if e.errno != errno.ENOENT:
429 if e.errno != errno.ENOENT:
430 raise
430 raise
431
431
432 raise error.RepoError(_(b'repository %s not found') % path)
432 raise error.RepoError(_(b'repository %s not found') % path)
433
433
434 # .hg/requires file contains a newline-delimited list of
434 # .hg/requires file contains a newline-delimited list of
435 # features/capabilities the opener (us) must have in order to use
435 # features/capabilities the opener (us) must have in order to use
436 # the repository. This file was introduced in Mercurial 0.9.2,
436 # the repository. This file was introduced in Mercurial 0.9.2,
437 # which means very old repositories may not have one. We assume
437 # which means very old repositories may not have one. We assume
438 # a missing file translates to no requirements.
438 # a missing file translates to no requirements.
439 try:
439 try:
440 requirements = set(hgvfs.read(b'requires').splitlines())
440 requirements = set(hgvfs.read(b'requires').splitlines())
441 except IOError as e:
441 except IOError as e:
442 if e.errno != errno.ENOENT:
442 if e.errno != errno.ENOENT:
443 raise
443 raise
444 requirements = set()
444 requirements = set()
445
445
446 # The .hg/hgrc file may load extensions or contain config options
446 # The .hg/hgrc file may load extensions or contain config options
447 # that influence repository construction. Attempt to load it and
447 # that influence repository construction. Attempt to load it and
448 # process any new extensions that it may have pulled in.
448 # process any new extensions that it may have pulled in.
449 try:
449 try:
450 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
450 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
451 # Run this before extensions.loadall() so extensions can be
451 # Run this before extensions.loadall() so extensions can be
452 # automatically enabled.
452 # automatically enabled.
453 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
453 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
454 except IOError:
454 except IOError:
455 pass
455 pass
456 else:
456 else:
457 extensions.loadall(ui)
457 extensions.loadall(ui)
458
458
459 # Set of module names of extensions loaded for this repository.
459 # Set of module names of extensions loaded for this repository.
460 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
460 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
461
461
462 supportedrequirements = gathersupportedrequirements(ui)
462 supportedrequirements = gathersupportedrequirements(ui)
463
463
464 # We first validate the requirements are known.
464 # We first validate the requirements are known.
465 ensurerequirementsrecognized(requirements, supportedrequirements)
465 ensurerequirementsrecognized(requirements, supportedrequirements)
466
466
467 # Then we validate that the known set is reasonable to use together.
467 # Then we validate that the known set is reasonable to use together.
468 ensurerequirementscompatible(ui, requirements)
468 ensurerequirementscompatible(ui, requirements)
469
469
470 # TODO there are unhandled edge cases related to opening repositories with
470 # TODO there are unhandled edge cases related to opening repositories with
471 # shared storage. If storage is shared, we should also test for requirements
471 # shared storage. If storage is shared, we should also test for requirements
472 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
472 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
473 # that repo, as that repo may load extensions needed to open it. This is a
473 # that repo, as that repo may load extensions needed to open it. This is a
474 # bit complicated because we don't want the other hgrc to overwrite settings
474 # bit complicated because we don't want the other hgrc to overwrite settings
475 # in this hgrc.
475 # in this hgrc.
476 #
476 #
477 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
477 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
478 # file when sharing repos. But if a requirement is added after the share is
478 # file when sharing repos. But if a requirement is added after the share is
479 # performed, thereby introducing a new requirement for the opener, we may
479 # performed, thereby introducing a new requirement for the opener, we may
480 # will not see that and could encounter a run-time error interacting with
480 # will not see that and could encounter a run-time error interacting with
481 # that shared store since it has an unknown-to-us requirement.
481 # that shared store since it has an unknown-to-us requirement.
482
482
483 # At this point, we know we should be capable of opening the repository.
483 # At this point, we know we should be capable of opening the repository.
484 # Now get on with doing that.
484 # Now get on with doing that.
485
485
486 features = set()
486 features = set()
487
487
488 # The "store" part of the repository holds versioned data. How it is
488 # The "store" part of the repository holds versioned data. How it is
489 # accessed is determined by various requirements. The ``shared`` or
489 # accessed is determined by various requirements. The ``shared`` or
490 # ``relshared`` requirements indicate the store lives in the path contained
490 # ``relshared`` requirements indicate the store lives in the path contained
491 # in the ``.hg/sharedpath`` file. This is an absolute path for
491 # in the ``.hg/sharedpath`` file. This is an absolute path for
492 # ``shared`` and relative to ``.hg/`` for ``relshared``.
492 # ``shared`` and relative to ``.hg/`` for ``relshared``.
493 if b'shared' in requirements or b'relshared' in requirements:
493 if b'shared' in requirements or b'relshared' in requirements:
494 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
494 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
495 if b'relshared' in requirements:
495 if b'relshared' in requirements:
496 sharedpath = hgvfs.join(sharedpath)
496 sharedpath = hgvfs.join(sharedpath)
497
497
498 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
498 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
499
499
500 if not sharedvfs.exists():
500 if not sharedvfs.exists():
501 raise error.RepoError(_(b'.hg/sharedpath points to nonexistent '
501 raise error.RepoError(_(b'.hg/sharedpath points to nonexistent '
502 b'directory %s') % sharedvfs.base)
502 b'directory %s') % sharedvfs.base)
503
503
504 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
504 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
505
505
506 storebasepath = sharedvfs.base
506 storebasepath = sharedvfs.base
507 cachepath = sharedvfs.join(b'cache')
507 cachepath = sharedvfs.join(b'cache')
508 else:
508 else:
509 storebasepath = hgvfs.base
509 storebasepath = hgvfs.base
510 cachepath = hgvfs.join(b'cache')
510 cachepath = hgvfs.join(b'cache')
511
511
512 # The store has changed over time and the exact layout is dictated by
512 # The store has changed over time and the exact layout is dictated by
513 # requirements. The store interface abstracts differences across all
513 # requirements. The store interface abstracts differences across all
514 # of them.
514 # of them.
515 store = makestore(requirements, storebasepath,
515 store = makestore(requirements, storebasepath,
516 lambda base: vfsmod.vfs(base, cacheaudited=True))
516 lambda base: vfsmod.vfs(base, cacheaudited=True))
517 hgvfs.createmode = store.createmode
517 hgvfs.createmode = store.createmode
518
518
519 storevfs = store.vfs
519 storevfs = store.vfs
520 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
520 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
521
521
522 # The cache vfs is used to manage cache files.
522 # The cache vfs is used to manage cache files.
523 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
523 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
524 cachevfs.createmode = store.createmode
524 cachevfs.createmode = store.createmode
525
525
526 # Now resolve the type for the repository object. We do this by repeatedly
526 # Now resolve the type for the repository object. We do this by repeatedly
527 # calling a factory function to produces types for specific aspects of the
527 # calling a factory function to produces types for specific aspects of the
528 # repo's operation. The aggregate returned types are used as base classes
528 # repo's operation. The aggregate returned types are used as base classes
529 # for a dynamically-derived type, which will represent our new repository.
529 # for a dynamically-derived type, which will represent our new repository.
530
530
531 bases = []
531 bases = []
532 extrastate = {}
532 extrastate = {}
533
533
534 for iface, fn in REPO_INTERFACES:
534 for iface, fn in REPO_INTERFACES:
535 # We pass all potentially useful state to give extensions tons of
535 # We pass all potentially useful state to give extensions tons of
536 # flexibility.
536 # flexibility.
537 typ = fn()(ui=ui,
537 typ = fn()(ui=ui,
538 intents=intents,
538 intents=intents,
539 requirements=requirements,
539 requirements=requirements,
540 features=features,
540 features=features,
541 wdirvfs=wdirvfs,
541 wdirvfs=wdirvfs,
542 hgvfs=hgvfs,
542 hgvfs=hgvfs,
543 store=store,
543 store=store,
544 storevfs=storevfs,
544 storevfs=storevfs,
545 storeoptions=storevfs.options,
545 storeoptions=storevfs.options,
546 cachevfs=cachevfs,
546 cachevfs=cachevfs,
547 extensionmodulenames=extensionmodulenames,
547 extensionmodulenames=extensionmodulenames,
548 extrastate=extrastate,
548 extrastate=extrastate,
549 baseclasses=bases)
549 baseclasses=bases)
550
550
551 if not isinstance(typ, type):
551 if not isinstance(typ, type):
552 raise error.ProgrammingError('unable to construct type for %s' %
552 raise error.ProgrammingError('unable to construct type for %s' %
553 iface)
553 iface)
554
554
555 bases.append(typ)
555 bases.append(typ)
556
556
557 # type() allows you to use characters in type names that wouldn't be
557 # type() allows you to use characters in type names that wouldn't be
558 # recognized as Python symbols in source code. We abuse that to add
558 # recognized as Python symbols in source code. We abuse that to add
559 # rich information about our constructed repo.
559 # rich information about our constructed repo.
560 name = pycompat.sysstr(b'derivedrepo:%s<%s>' % (
560 name = pycompat.sysstr(b'derivedrepo:%s<%s>' % (
561 wdirvfs.base,
561 wdirvfs.base,
562 b','.join(sorted(requirements))))
562 b','.join(sorted(requirements))))
563
563
564 cls = type(name, tuple(bases), {})
564 cls = type(name, tuple(bases), {})
565
565
566 return cls(
566 return cls(
567 baseui=baseui,
567 baseui=baseui,
568 ui=ui,
568 ui=ui,
569 origroot=path,
569 origroot=path,
570 wdirvfs=wdirvfs,
570 wdirvfs=wdirvfs,
571 hgvfs=hgvfs,
571 hgvfs=hgvfs,
572 requirements=requirements,
572 requirements=requirements,
573 supportedrequirements=supportedrequirements,
573 supportedrequirements=supportedrequirements,
574 sharedpath=storebasepath,
574 sharedpath=storebasepath,
575 store=store,
575 store=store,
576 cachevfs=cachevfs,
576 cachevfs=cachevfs,
577 features=features,
577 features=features,
578 intents=intents)
578 intents=intents)
579
579
580 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
580 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
581 """Perform additional actions after .hg/hgrc is loaded.
581 """Perform additional actions after .hg/hgrc is loaded.
582
582
583 This function is called during repository loading immediately after
583 This function is called during repository loading immediately after
584 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
584 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
585
585
586 The function can be used to validate configs, automatically add
586 The function can be used to validate configs, automatically add
587 options (including extensions) based on requirements, etc.
587 options (including extensions) based on requirements, etc.
588 """
588 """
589
589
590 # Map of requirements to list of extensions to load automatically when
590 # Map of requirements to list of extensions to load automatically when
591 # requirement is present.
591 # requirement is present.
592 autoextensions = {
592 autoextensions = {
593 b'largefiles': [b'largefiles'],
593 b'largefiles': [b'largefiles'],
594 b'lfs': [b'lfs'],
594 b'lfs': [b'lfs'],
595 }
595 }
596
596
597 for requirement, names in sorted(autoextensions.items()):
597 for requirement, names in sorted(autoextensions.items()):
598 if requirement not in requirements:
598 if requirement not in requirements:
599 continue
599 continue
600
600
601 for name in names:
601 for name in names:
602 if not ui.hasconfig(b'extensions', name):
602 if not ui.hasconfig(b'extensions', name):
603 ui.setconfig(b'extensions', name, b'', source='autoload')
603 ui.setconfig(b'extensions', name, b'', source='autoload')
604
604
605 def gathersupportedrequirements(ui):
605 def gathersupportedrequirements(ui):
606 """Determine the complete set of recognized requirements."""
606 """Determine the complete set of recognized requirements."""
607 # Start with all requirements supported by this file.
607 # Start with all requirements supported by this file.
608 supported = set(localrepository._basesupported)
608 supported = set(localrepository._basesupported)
609
609
610 # Execute ``featuresetupfuncs`` entries if they belong to an extension
610 # Execute ``featuresetupfuncs`` entries if they belong to an extension
611 # relevant to this ui instance.
611 # relevant to this ui instance.
612 modules = {m.__name__ for n, m in extensions.extensions(ui)}
612 modules = {m.__name__ for n, m in extensions.extensions(ui)}
613
613
614 for fn in featuresetupfuncs:
614 for fn in featuresetupfuncs:
615 if fn.__module__ in modules:
615 if fn.__module__ in modules:
616 fn(ui, supported)
616 fn(ui, supported)
617
617
618 # Add derived requirements from registered compression engines.
618 # Add derived requirements from registered compression engines.
619 for name in util.compengines:
619 for name in util.compengines:
620 engine = util.compengines[name]
620 engine = util.compengines[name]
621 if engine.revlogheader():
621 if engine.revlogheader():
622 supported.add(b'exp-compression-%s' % name)
622 supported.add(b'exp-compression-%s' % name)
623
623
624 return supported
624 return supported
625
625
626 def ensurerequirementsrecognized(requirements, supported):
626 def ensurerequirementsrecognized(requirements, supported):
627 """Validate that a set of local requirements is recognized.
627 """Validate that a set of local requirements is recognized.
628
628
629 Receives a set of requirements. Raises an ``error.RepoError`` if there
629 Receives a set of requirements. Raises an ``error.RepoError`` if there
630 exists any requirement in that set that currently loaded code doesn't
630 exists any requirement in that set that currently loaded code doesn't
631 recognize.
631 recognize.
632
632
633 Returns a set of supported requirements.
633 Returns a set of supported requirements.
634 """
634 """
635 missing = set()
635 missing = set()
636
636
637 for requirement in requirements:
637 for requirement in requirements:
638 if requirement in supported:
638 if requirement in supported:
639 continue
639 continue
640
640
641 if not requirement or not requirement[0:1].isalnum():
641 if not requirement or not requirement[0:1].isalnum():
642 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
642 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
643
643
644 missing.add(requirement)
644 missing.add(requirement)
645
645
646 if missing:
646 if missing:
647 raise error.RequirementError(
647 raise error.RequirementError(
648 _(b'repository requires features unknown to this Mercurial: %s') %
648 _(b'repository requires features unknown to this Mercurial: %s') %
649 b' '.join(sorted(missing)),
649 b' '.join(sorted(missing)),
650 hint=_(b'see https://mercurial-scm.org/wiki/MissingRequirement '
650 hint=_(b'see https://mercurial-scm.org/wiki/MissingRequirement '
651 b'for more information'))
651 b'for more information'))
652
652
653 def ensurerequirementscompatible(ui, requirements):
653 def ensurerequirementscompatible(ui, requirements):
654 """Validates that a set of recognized requirements is mutually compatible.
654 """Validates that a set of recognized requirements is mutually compatible.
655
655
656 Some requirements may not be compatible with others or require
656 Some requirements may not be compatible with others or require
657 config options that aren't enabled. This function is called during
657 config options that aren't enabled. This function is called during
658 repository opening to ensure that the set of requirements needed
658 repository opening to ensure that the set of requirements needed
659 to open a repository is sane and compatible with config options.
659 to open a repository is sane and compatible with config options.
660
660
661 Extensions can monkeypatch this function to perform additional
661 Extensions can monkeypatch this function to perform additional
662 checking.
662 checking.
663
663
664 ``error.RepoError`` should be raised on failure.
664 ``error.RepoError`` should be raised on failure.
665 """
665 """
666 if b'exp-sparse' in requirements and not sparse.enabled:
666 if b'exp-sparse' in requirements and not sparse.enabled:
667 raise error.RepoError(_(b'repository is using sparse feature but '
667 raise error.RepoError(_(b'repository is using sparse feature but '
668 b'sparse is not enabled; enable the '
668 b'sparse is not enabled; enable the '
669 b'"sparse" extensions to access'))
669 b'"sparse" extensions to access'))
670
670
671 def makestore(requirements, path, vfstype):
671 def makestore(requirements, path, vfstype):
672 """Construct a storage object for a repository."""
672 """Construct a storage object for a repository."""
673 if b'store' in requirements:
673 if b'store' in requirements:
674 if b'fncache' in requirements:
674 if b'fncache' in requirements:
675 return storemod.fncachestore(path, vfstype,
675 return storemod.fncachestore(path, vfstype,
676 b'dotencode' in requirements)
676 b'dotencode' in requirements)
677
677
678 return storemod.encodedstore(path, vfstype)
678 return storemod.encodedstore(path, vfstype)
679
679
680 return storemod.basicstore(path, vfstype)
680 return storemod.basicstore(path, vfstype)
681
681
682 def resolvestorevfsoptions(ui, requirements, features):
682 def resolvestorevfsoptions(ui, requirements, features):
683 """Resolve the options to pass to the store vfs opener.
683 """Resolve the options to pass to the store vfs opener.
684
684
685 The returned dict is used to influence behavior of the storage layer.
685 The returned dict is used to influence behavior of the storage layer.
686 """
686 """
687 options = {}
687 options = {}
688
688
689 if b'treemanifest' in requirements:
689 if b'treemanifest' in requirements:
690 options[b'treemanifest'] = True
690 options[b'treemanifest'] = True
691
691
692 # experimental config: format.manifestcachesize
692 # experimental config: format.manifestcachesize
693 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
693 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
694 if manifestcachesize is not None:
694 if manifestcachesize is not None:
695 options[b'manifestcachesize'] = manifestcachesize
695 options[b'manifestcachesize'] = manifestcachesize
696
696
697 # In the absence of another requirement superseding a revlog-related
697 # In the absence of another requirement superseding a revlog-related
698 # requirement, we have to assume the repo is using revlog version 0.
698 # requirement, we have to assume the repo is using revlog version 0.
699 # This revlog format is super old and we don't bother trying to parse
699 # This revlog format is super old and we don't bother trying to parse
700 # opener options for it because those options wouldn't do anything
700 # opener options for it because those options wouldn't do anything
701 # meaningful on such old repos.
701 # meaningful on such old repos.
702 if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
702 if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
703 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
703 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
704
704
705 return options
705 return options
706
706
707 def resolverevlogstorevfsoptions(ui, requirements, features):
707 def resolverevlogstorevfsoptions(ui, requirements, features):
708 """Resolve opener options specific to revlogs."""
708 """Resolve opener options specific to revlogs."""
709
709
710 options = {}
710 options = {}
711
711
712 if b'revlogv1' in requirements:
712 if b'revlogv1' in requirements:
713 options[b'revlogv1'] = True
713 options[b'revlogv1'] = True
714 if REVLOGV2_REQUIREMENT in requirements:
714 if REVLOGV2_REQUIREMENT in requirements:
715 options[b'revlogv2'] = True
715 options[b'revlogv2'] = True
716
716
717 if b'generaldelta' in requirements:
717 if b'generaldelta' in requirements:
718 options[b'generaldelta'] = True
718 options[b'generaldelta'] = True
719
719
720 # experimental config: format.chunkcachesize
720 # experimental config: format.chunkcachesize
721 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
721 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
722 if chunkcachesize is not None:
722 if chunkcachesize is not None:
723 options[b'chunkcachesize'] = chunkcachesize
723 options[b'chunkcachesize'] = chunkcachesize
724
724
725 deltabothparents = ui.configbool(b'storage',
725 deltabothparents = ui.configbool(b'storage',
726 b'revlog.optimize-delta-parent-choice')
726 b'revlog.optimize-delta-parent-choice')
727 options[b'deltabothparents'] = deltabothparents
727 options[b'deltabothparents'] = deltabothparents
728
728
729 options[b'lazydeltabase'] = not scmutil.gddeltaconfig(ui)
729 options[b'lazydeltabase'] = not scmutil.gddeltaconfig(ui)
730
730
731 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
731 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
732 if 0 <= chainspan:
732 if 0 <= chainspan:
733 options[b'maxdeltachainspan'] = chainspan
733 options[b'maxdeltachainspan'] = chainspan
734
734
735 mmapindexthreshold = ui.configbytes(b'experimental',
735 mmapindexthreshold = ui.configbytes(b'experimental',
736 b'mmapindexthreshold')
736 b'mmapindexthreshold')
737 if mmapindexthreshold is not None:
737 if mmapindexthreshold is not None:
738 options[b'mmapindexthreshold'] = mmapindexthreshold
738 options[b'mmapindexthreshold'] = mmapindexthreshold
739
739
740 withsparseread = ui.configbool(b'experimental', b'sparse-read')
740 withsparseread = ui.configbool(b'experimental', b'sparse-read')
741 srdensitythres = float(ui.config(b'experimental',
741 srdensitythres = float(ui.config(b'experimental',
742 b'sparse-read.density-threshold'))
742 b'sparse-read.density-threshold'))
743 srmingapsize = ui.configbytes(b'experimental',
743 srmingapsize = ui.configbytes(b'experimental',
744 b'sparse-read.min-gap-size')
744 b'sparse-read.min-gap-size')
745 options[b'with-sparse-read'] = withsparseread
745 options[b'with-sparse-read'] = withsparseread
746 options[b'sparse-read-density-threshold'] = srdensitythres
746 options[b'sparse-read-density-threshold'] = srdensitythres
747 options[b'sparse-read-min-gap-size'] = srmingapsize
747 options[b'sparse-read-min-gap-size'] = srmingapsize
748
748
749 sparserevlog = SPARSEREVLOG_REQUIREMENT in requirements
749 sparserevlog = SPARSEREVLOG_REQUIREMENT in requirements
750 options[b'sparse-revlog'] = sparserevlog
750 options[b'sparse-revlog'] = sparserevlog
751 if sparserevlog:
751 if sparserevlog:
752 options[b'generaldelta'] = True
752 options[b'generaldelta'] = True
753
753
754 maxchainlen = None
754 maxchainlen = None
755 if sparserevlog:
755 if sparserevlog:
756 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
756 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
757 # experimental config: format.maxchainlen
757 # experimental config: format.maxchainlen
758 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
758 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
759 if maxchainlen is not None:
759 if maxchainlen is not None:
760 options[b'maxchainlen'] = maxchainlen
760 options[b'maxchainlen'] = maxchainlen
761
761
762 for r in requirements:
762 for r in requirements:
763 if r.startswith(b'exp-compression-'):
763 if r.startswith(b'exp-compression-'):
764 options[b'compengine'] = r[len(b'exp-compression-'):]
764 options[b'compengine'] = r[len(b'exp-compression-'):]
765
765
766 if repository.NARROW_REQUIREMENT in requirements:
766 if repository.NARROW_REQUIREMENT in requirements:
767 options[b'enableellipsis'] = True
767 options[b'enableellipsis'] = True
768
768
769 return options
769 return options
770
770
771 def makemain(**kwargs):
771 def makemain(**kwargs):
772 """Produce a type conforming to ``ilocalrepositorymain``."""
772 """Produce a type conforming to ``ilocalrepositorymain``."""
773 return localrepository
773 return localrepository
774
774
775 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
775 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
776 class revlogfilestorage(object):
776 class revlogfilestorage(object):
777 """File storage when using revlogs."""
777 """File storage when using revlogs."""
778
778
779 def file(self, path):
779 def file(self, path):
780 if path[0] == b'/':
780 if path[0] == b'/':
781 path = path[1:]
781 path = path[1:]
782
782
783 return filelog.filelog(self.svfs, path)
783 return filelog.filelog(self.svfs, path)
784
784
785 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
785 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
786 class revlognarrowfilestorage(object):
786 class revlognarrowfilestorage(object):
787 """File storage when using revlogs and narrow files."""
787 """File storage when using revlogs and narrow files."""
788
788
789 def file(self, path):
789 def file(self, path):
790 if path[0] == b'/':
790 if path[0] == b'/':
791 path = path[1:]
791 path = path[1:]
792
792
793 return filelog.narrowfilelog(self.svfs, path, self.narrowmatch())
793 return filelog.narrowfilelog(self.svfs, path, self.narrowmatch())
794
794
795 def makefilestorage(requirements, features, **kwargs):
795 def makefilestorage(requirements, features, **kwargs):
796 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
796 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
797 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
797 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
798 features.add(repository.REPO_FEATURE_STREAM_CLONE)
798 features.add(repository.REPO_FEATURE_STREAM_CLONE)
799
799
800 if repository.NARROW_REQUIREMENT in requirements:
800 if repository.NARROW_REQUIREMENT in requirements:
801 return revlognarrowfilestorage
801 return revlognarrowfilestorage
802 else:
802 else:
803 return revlogfilestorage
803 return revlogfilestorage
804
804
805 # List of repository interfaces and factory functions for them. Each
805 # List of repository interfaces and factory functions for them. Each
806 # will be called in order during ``makelocalrepository()`` to iteratively
806 # will be called in order during ``makelocalrepository()`` to iteratively
807 # derive the final type for a local repository instance. We capture the
807 # derive the final type for a local repository instance. We capture the
808 # function as a lambda so we don't hold a reference and the module-level
808 # function as a lambda so we don't hold a reference and the module-level
809 # functions can be wrapped.
809 # functions can be wrapped.
810 REPO_INTERFACES = [
810 REPO_INTERFACES = [
811 (repository.ilocalrepositorymain, lambda: makemain),
811 (repository.ilocalrepositorymain, lambda: makemain),
812 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
812 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
813 ]
813 ]
814
814
815 @interfaceutil.implementer(repository.ilocalrepositorymain)
815 @interfaceutil.implementer(repository.ilocalrepositorymain)
816 class localrepository(object):
816 class localrepository(object):
817 """Main class for representing local repositories.
817 """Main class for representing local repositories.
818
818
819 All local repositories are instances of this class.
819 All local repositories are instances of this class.
820
820
821 Constructed on its own, instances of this class are not usable as
821 Constructed on its own, instances of this class are not usable as
822 repository objects. To obtain a usable repository object, call
822 repository objects. To obtain a usable repository object, call
823 ``hg.repository()``, ``localrepo.instance()``, or
823 ``hg.repository()``, ``localrepo.instance()``, or
824 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
824 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
825 ``instance()`` adds support for creating new repositories.
825 ``instance()`` adds support for creating new repositories.
826 ``hg.repository()`` adds more extension integration, including calling
826 ``hg.repository()`` adds more extension integration, including calling
827 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
827 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
828 used.
828 used.
829 """
829 """
830
830
831 # obsolete experimental requirements:
831 # obsolete experimental requirements:
832 # - manifestv2: An experimental new manifest format that allowed
832 # - manifestv2: An experimental new manifest format that allowed
833 # for stem compression of long paths. Experiment ended up not
833 # for stem compression of long paths. Experiment ended up not
834 # being successful (repository sizes went up due to worse delta
834 # being successful (repository sizes went up due to worse delta
835 # chains), and the code was deleted in 4.6.
835 # chains), and the code was deleted in 4.6.
836 supportedformats = {
836 supportedformats = {
837 'revlogv1',
837 'revlogv1',
838 'generaldelta',
838 'generaldelta',
839 'treemanifest',
839 'treemanifest',
840 REVLOGV2_REQUIREMENT,
840 REVLOGV2_REQUIREMENT,
841 SPARSEREVLOG_REQUIREMENT,
841 SPARSEREVLOG_REQUIREMENT,
842 }
842 }
843 _basesupported = supportedformats | {
843 _basesupported = supportedformats | {
844 'store',
844 'store',
845 'fncache',
845 'fncache',
846 'shared',
846 'shared',
847 'relshared',
847 'relshared',
848 'dotencode',
848 'dotencode',
849 'exp-sparse',
849 'exp-sparse',
850 'internal-phase'
850 'internal-phase'
851 }
851 }
852
852
853 # list of prefix for file which can be written without 'wlock'
853 # list of prefix for file which can be written without 'wlock'
854 # Extensions should extend this list when needed
854 # Extensions should extend this list when needed
855 _wlockfreeprefix = {
855 _wlockfreeprefix = {
856 # We migh consider requiring 'wlock' for the next
856 # We migh consider requiring 'wlock' for the next
857 # two, but pretty much all the existing code assume
857 # two, but pretty much all the existing code assume
858 # wlock is not needed so we keep them excluded for
858 # wlock is not needed so we keep them excluded for
859 # now.
859 # now.
860 'hgrc',
860 'hgrc',
861 'requires',
861 'requires',
862 # XXX cache is a complicatged business someone
862 # XXX cache is a complicatged business someone
863 # should investigate this in depth at some point
863 # should investigate this in depth at some point
864 'cache/',
864 'cache/',
865 # XXX shouldn't be dirstate covered by the wlock?
865 # XXX shouldn't be dirstate covered by the wlock?
866 'dirstate',
866 'dirstate',
867 # XXX bisect was still a bit too messy at the time
867 # XXX bisect was still a bit too messy at the time
868 # this changeset was introduced. Someone should fix
868 # this changeset was introduced. Someone should fix
869 # the remainig bit and drop this line
869 # the remainig bit and drop this line
870 'bisect.state',
870 'bisect.state',
871 }
871 }
872
872
873 def __init__(self, baseui, ui, origroot, wdirvfs, hgvfs, requirements,
873 def __init__(self, baseui, ui, origroot, wdirvfs, hgvfs, requirements,
874 supportedrequirements, sharedpath, store, cachevfs,
874 supportedrequirements, sharedpath, store, cachevfs,
875 features, intents=None):
875 features, intents=None):
876 """Create a new local repository instance.
876 """Create a new local repository instance.
877
877
878 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
878 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
879 or ``localrepo.makelocalrepository()`` for obtaining a new repository
879 or ``localrepo.makelocalrepository()`` for obtaining a new repository
880 object.
880 object.
881
881
882 Arguments:
882 Arguments:
883
883
884 baseui
884 baseui
885 ``ui.ui`` instance that ``ui`` argument was based off of.
885 ``ui.ui`` instance that ``ui`` argument was based off of.
886
886
887 ui
887 ui
888 ``ui.ui`` instance for use by the repository.
888 ``ui.ui`` instance for use by the repository.
889
889
890 origroot
890 origroot
891 ``bytes`` path to working directory root of this repository.
891 ``bytes`` path to working directory root of this repository.
892
892
893 wdirvfs
893 wdirvfs
894 ``vfs.vfs`` rooted at the working directory.
894 ``vfs.vfs`` rooted at the working directory.
895
895
896 hgvfs
896 hgvfs
897 ``vfs.vfs`` rooted at .hg/
897 ``vfs.vfs`` rooted at .hg/
898
898
899 requirements
899 requirements
900 ``set`` of bytestrings representing repository opening requirements.
900 ``set`` of bytestrings representing repository opening requirements.
901
901
902 supportedrequirements
902 supportedrequirements
903 ``set`` of bytestrings representing repository requirements that we
903 ``set`` of bytestrings representing repository requirements that we
904 know how to open. May be a supetset of ``requirements``.
904 know how to open. May be a supetset of ``requirements``.
905
905
906 sharedpath
906 sharedpath
907 ``bytes`` Defining path to storage base directory. Points to a
907 ``bytes`` Defining path to storage base directory. Points to a
908 ``.hg/`` directory somewhere.
908 ``.hg/`` directory somewhere.
909
909
910 store
910 store
911 ``store.basicstore`` (or derived) instance providing access to
911 ``store.basicstore`` (or derived) instance providing access to
912 versioned storage.
912 versioned storage.
913
913
914 cachevfs
914 cachevfs
915 ``vfs.vfs`` used for cache files.
915 ``vfs.vfs`` used for cache files.
916
916
917 features
917 features
918 ``set`` of bytestrings defining features/capabilities of this
918 ``set`` of bytestrings defining features/capabilities of this
919 instance.
919 instance.
920
920
921 intents
921 intents
922 ``set`` of system strings indicating what this repo will be used
922 ``set`` of system strings indicating what this repo will be used
923 for.
923 for.
924 """
924 """
925 self.baseui = baseui
925 self.baseui = baseui
926 self.ui = ui
926 self.ui = ui
927 self.origroot = origroot
927 self.origroot = origroot
928 # vfs rooted at working directory.
928 # vfs rooted at working directory.
929 self.wvfs = wdirvfs
929 self.wvfs = wdirvfs
930 self.root = wdirvfs.base
930 self.root = wdirvfs.base
931 # vfs rooted at .hg/. Used to access most non-store paths.
931 # vfs rooted at .hg/. Used to access most non-store paths.
932 self.vfs = hgvfs
932 self.vfs = hgvfs
933 self.path = hgvfs.base
933 self.path = hgvfs.base
934 self.requirements = requirements
934 self.requirements = requirements
935 self.supported = supportedrequirements
935 self.supported = supportedrequirements
936 self.sharedpath = sharedpath
936 self.sharedpath = sharedpath
937 self.store = store
937 self.store = store
938 self.cachevfs = cachevfs
938 self.cachevfs = cachevfs
939 self.features = features
939 self.features = features
940
940
941 self.filtername = None
941 self.filtername = None
942
942
943 if (self.ui.configbool('devel', 'all-warnings') or
943 if (self.ui.configbool('devel', 'all-warnings') or
944 self.ui.configbool('devel', 'check-locks')):
944 self.ui.configbool('devel', 'check-locks')):
945 self.vfs.audit = self._getvfsward(self.vfs.audit)
945 self.vfs.audit = self._getvfsward(self.vfs.audit)
946 # A list of callback to shape the phase if no data were found.
946 # A list of callback to shape the phase if no data were found.
947 # Callback are in the form: func(repo, roots) --> processed root.
947 # Callback are in the form: func(repo, roots) --> processed root.
948 # This list it to be filled by extension during repo setup
948 # This list it to be filled by extension during repo setup
949 self._phasedefaults = []
949 self._phasedefaults = []
950
950
951 color.setup(self.ui)
951 color.setup(self.ui)
952
952
953 self.spath = self.store.path
953 self.spath = self.store.path
954 self.svfs = self.store.vfs
954 self.svfs = self.store.vfs
955 self.sjoin = self.store.join
955 self.sjoin = self.store.join
956 if (self.ui.configbool('devel', 'all-warnings') or
956 if (self.ui.configbool('devel', 'all-warnings') or
957 self.ui.configbool('devel', 'check-locks')):
957 self.ui.configbool('devel', 'check-locks')):
958 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
958 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
959 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
959 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
960 else: # standard vfs
960 else: # standard vfs
961 self.svfs.audit = self._getsvfsward(self.svfs.audit)
961 self.svfs.audit = self._getsvfsward(self.svfs.audit)
962
962
963 self._dirstatevalidatewarned = False
963 self._dirstatevalidatewarned = False
964
964
965 self._branchcaches = {}
965 self._branchcaches = {}
966 self._revbranchcache = None
966 self._revbranchcache = None
967 self._filterpats = {}
967 self._filterpats = {}
968 self._datafilters = {}
968 self._datafilters = {}
969 self._transref = self._lockref = self._wlockref = None
969 self._transref = self._lockref = self._wlockref = None
970
970
971 # A cache for various files under .hg/ that tracks file changes,
971 # A cache for various files under .hg/ that tracks file changes,
972 # (used by the filecache decorator)
972 # (used by the filecache decorator)
973 #
973 #
974 # Maps a property name to its util.filecacheentry
974 # Maps a property name to its util.filecacheentry
975 self._filecache = {}
975 self._filecache = {}
976
976
977 # hold sets of revision to be filtered
977 # hold sets of revision to be filtered
978 # should be cleared when something might have changed the filter value:
978 # should be cleared when something might have changed the filter value:
979 # - new changesets,
979 # - new changesets,
980 # - phase change,
980 # - phase change,
981 # - new obsolescence marker,
981 # - new obsolescence marker,
982 # - working directory parent change,
982 # - working directory parent change,
983 # - bookmark changes
983 # - bookmark changes
984 self.filteredrevcache = {}
984 self.filteredrevcache = {}
985
985
986 # post-dirstate-status hooks
986 # post-dirstate-status hooks
987 self._postdsstatus = []
987 self._postdsstatus = []
988
988
989 # generic mapping between names and nodes
989 # generic mapping between names and nodes
990 self.names = namespaces.namespaces()
990 self.names = namespaces.namespaces()
991
991
992 # Key to signature value.
992 # Key to signature value.
993 self._sparsesignaturecache = {}
993 self._sparsesignaturecache = {}
994 # Signature to cached matcher instance.
994 # Signature to cached matcher instance.
995 self._sparsematchercache = {}
995 self._sparsematchercache = {}
996
996
997 def _getvfsward(self, origfunc):
997 def _getvfsward(self, origfunc):
998 """build a ward for self.vfs"""
998 """build a ward for self.vfs"""
999 rref = weakref.ref(self)
999 rref = weakref.ref(self)
1000 def checkvfs(path, mode=None):
1000 def checkvfs(path, mode=None):
1001 ret = origfunc(path, mode=mode)
1001 ret = origfunc(path, mode=mode)
1002 repo = rref()
1002 repo = rref()
1003 if (repo is None
1003 if (repo is None
1004 or not util.safehasattr(repo, '_wlockref')
1004 or not util.safehasattr(repo, '_wlockref')
1005 or not util.safehasattr(repo, '_lockref')):
1005 or not util.safehasattr(repo, '_lockref')):
1006 return
1006 return
1007 if mode in (None, 'r', 'rb'):
1007 if mode in (None, 'r', 'rb'):
1008 return
1008 return
1009 if path.startswith(repo.path):
1009 if path.startswith(repo.path):
1010 # truncate name relative to the repository (.hg)
1010 # truncate name relative to the repository (.hg)
1011 path = path[len(repo.path) + 1:]
1011 path = path[len(repo.path) + 1:]
1012 if path.startswith('cache/'):
1012 if path.startswith('cache/'):
1013 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
1013 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
1014 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
1014 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
1015 if path.startswith('journal.'):
1015 if path.startswith('journal.'):
1016 # journal is covered by 'lock'
1016 # journal is covered by 'lock'
1017 if repo._currentlock(repo._lockref) is None:
1017 if repo._currentlock(repo._lockref) is None:
1018 repo.ui.develwarn('write with no lock: "%s"' % path,
1018 repo.ui.develwarn('write with no lock: "%s"' % path,
1019 stacklevel=2, config='check-locks')
1019 stacklevel=2, config='check-locks')
1020 elif repo._currentlock(repo._wlockref) is None:
1020 elif repo._currentlock(repo._wlockref) is None:
1021 # rest of vfs files are covered by 'wlock'
1021 # rest of vfs files are covered by 'wlock'
1022 #
1022 #
1023 # exclude special files
1023 # exclude special files
1024 for prefix in self._wlockfreeprefix:
1024 for prefix in self._wlockfreeprefix:
1025 if path.startswith(prefix):
1025 if path.startswith(prefix):
1026 return
1026 return
1027 repo.ui.develwarn('write with no wlock: "%s"' % path,
1027 repo.ui.develwarn('write with no wlock: "%s"' % path,
1028 stacklevel=2, config='check-locks')
1028 stacklevel=2, config='check-locks')
1029 return ret
1029 return ret
1030 return checkvfs
1030 return checkvfs
1031
1031
1032 def _getsvfsward(self, origfunc):
1032 def _getsvfsward(self, origfunc):
1033 """build a ward for self.svfs"""
1033 """build a ward for self.svfs"""
1034 rref = weakref.ref(self)
1034 rref = weakref.ref(self)
1035 def checksvfs(path, mode=None):
1035 def checksvfs(path, mode=None):
1036 ret = origfunc(path, mode=mode)
1036 ret = origfunc(path, mode=mode)
1037 repo = rref()
1037 repo = rref()
1038 if repo is None or not util.safehasattr(repo, '_lockref'):
1038 if repo is None or not util.safehasattr(repo, '_lockref'):
1039 return
1039 return
1040 if mode in (None, 'r', 'rb'):
1040 if mode in (None, 'r', 'rb'):
1041 return
1041 return
1042 if path.startswith(repo.sharedpath):
1042 if path.startswith(repo.sharedpath):
1043 # truncate name relative to the repository (.hg)
1043 # truncate name relative to the repository (.hg)
1044 path = path[len(repo.sharedpath) + 1:]
1044 path = path[len(repo.sharedpath) + 1:]
1045 if repo._currentlock(repo._lockref) is None:
1045 if repo._currentlock(repo._lockref) is None:
1046 repo.ui.develwarn('write with no lock: "%s"' % path,
1046 repo.ui.develwarn('write with no lock: "%s"' % path,
1047 stacklevel=3)
1047 stacklevel=3)
1048 return ret
1048 return ret
1049 return checksvfs
1049 return checksvfs
1050
1050
1051 def close(self):
1051 def close(self):
1052 self._writecaches()
1052 self._writecaches()
1053
1053
1054 def _writecaches(self):
1054 def _writecaches(self):
1055 if self._revbranchcache:
1055 if self._revbranchcache:
1056 self._revbranchcache.write()
1056 self._revbranchcache.write()
1057
1057
1058 def _restrictcapabilities(self, caps):
1058 def _restrictcapabilities(self, caps):
1059 if self.ui.configbool('experimental', 'bundle2-advertise'):
1059 if self.ui.configbool('experimental', 'bundle2-advertise'):
1060 caps = set(caps)
1060 caps = set(caps)
1061 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
1061 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
1062 role='client'))
1062 role='client'))
1063 caps.add('bundle2=' + urlreq.quote(capsblob))
1063 caps.add('bundle2=' + urlreq.quote(capsblob))
1064 return caps
1064 return caps
1065
1065
1066 def _writerequirements(self):
1066 def _writerequirements(self):
1067 scmutil.writerequires(self.vfs, self.requirements)
1067 scmutil.writerequires(self.vfs, self.requirements)
1068
1068
1069 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1069 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1070 # self -> auditor -> self._checknested -> self
1070 # self -> auditor -> self._checknested -> self
1071
1071
1072 @property
1072 @property
1073 def auditor(self):
1073 def auditor(self):
1074 # This is only used by context.workingctx.match in order to
1074 # This is only used by context.workingctx.match in order to
1075 # detect files in subrepos.
1075 # detect files in subrepos.
1076 return pathutil.pathauditor(self.root, callback=self._checknested)
1076 return pathutil.pathauditor(self.root, callback=self._checknested)
1077
1077
1078 @property
1078 @property
1079 def nofsauditor(self):
1079 def nofsauditor(self):
1080 # This is only used by context.basectx.match in order to detect
1080 # This is only used by context.basectx.match in order to detect
1081 # files in subrepos.
1081 # files in subrepos.
1082 return pathutil.pathauditor(self.root, callback=self._checknested,
1082 return pathutil.pathauditor(self.root, callback=self._checknested,
1083 realfs=False, cached=True)
1083 realfs=False, cached=True)
1084
1084
1085 def _checknested(self, path):
1085 def _checknested(self, path):
1086 """Determine if path is a legal nested repository."""
1086 """Determine if path is a legal nested repository."""
1087 if not path.startswith(self.root):
1087 if not path.startswith(self.root):
1088 return False
1088 return False
1089 subpath = path[len(self.root) + 1:]
1089 subpath = path[len(self.root) + 1:]
1090 normsubpath = util.pconvert(subpath)
1090 normsubpath = util.pconvert(subpath)
1091
1091
1092 # XXX: Checking against the current working copy is wrong in
1092 # XXX: Checking against the current working copy is wrong in
1093 # the sense that it can reject things like
1093 # the sense that it can reject things like
1094 #
1094 #
1095 # $ hg cat -r 10 sub/x.txt
1095 # $ hg cat -r 10 sub/x.txt
1096 #
1096 #
1097 # if sub/ is no longer a subrepository in the working copy
1097 # if sub/ is no longer a subrepository in the working copy
1098 # parent revision.
1098 # parent revision.
1099 #
1099 #
1100 # However, it can of course also allow things that would have
1100 # However, it can of course also allow things that would have
1101 # been rejected before, such as the above cat command if sub/
1101 # been rejected before, such as the above cat command if sub/
1102 # is a subrepository now, but was a normal directory before.
1102 # is a subrepository now, but was a normal directory before.
1103 # The old path auditor would have rejected by mistake since it
1103 # The old path auditor would have rejected by mistake since it
1104 # panics when it sees sub/.hg/.
1104 # panics when it sees sub/.hg/.
1105 #
1105 #
1106 # All in all, checking against the working copy seems sensible
1106 # All in all, checking against the working copy seems sensible
1107 # since we want to prevent access to nested repositories on
1107 # since we want to prevent access to nested repositories on
1108 # the filesystem *now*.
1108 # the filesystem *now*.
1109 ctx = self[None]
1109 ctx = self[None]
1110 parts = util.splitpath(subpath)
1110 parts = util.splitpath(subpath)
1111 while parts:
1111 while parts:
1112 prefix = '/'.join(parts)
1112 prefix = '/'.join(parts)
1113 if prefix in ctx.substate:
1113 if prefix in ctx.substate:
1114 if prefix == normsubpath:
1114 if prefix == normsubpath:
1115 return True
1115 return True
1116 else:
1116 else:
1117 sub = ctx.sub(prefix)
1117 sub = ctx.sub(prefix)
1118 return sub.checknested(subpath[len(prefix) + 1:])
1118 return sub.checknested(subpath[len(prefix) + 1:])
1119 else:
1119 else:
1120 parts.pop()
1120 parts.pop()
1121 return False
1121 return False
1122
1122
1123 def peer(self):
1123 def peer(self):
1124 return localpeer(self) # not cached to avoid reference cycle
1124 return localpeer(self) # not cached to avoid reference cycle
1125
1125
1126 def unfiltered(self):
1126 def unfiltered(self):
1127 """Return unfiltered version of the repository
1127 """Return unfiltered version of the repository
1128
1128
1129 Intended to be overwritten by filtered repo."""
1129 Intended to be overwritten by filtered repo."""
1130 return self
1130 return self
1131
1131
1132 def filtered(self, name, visibilityexceptions=None):
1132 def filtered(self, name, visibilityexceptions=None):
1133 """Return a filtered version of a repository"""
1133 """Return a filtered version of a repository"""
1134 cls = repoview.newtype(self.unfiltered().__class__)
1134 cls = repoview.newtype(self.unfiltered().__class__)
1135 return cls(self, name, visibilityexceptions)
1135 return cls(self, name, visibilityexceptions)
1136
1136
1137 @repofilecache('bookmarks', 'bookmarks.current')
1137 @repofilecache('bookmarks', 'bookmarks.current')
1138 def _bookmarks(self):
1138 def _bookmarks(self):
1139 return bookmarks.bmstore(self)
1139 return bookmarks.bmstore(self)
1140
1140
1141 @property
1141 @property
1142 def _activebookmark(self):
1142 def _activebookmark(self):
1143 return self._bookmarks.active
1143 return self._bookmarks.active
1144
1144
1145 # _phasesets depend on changelog. what we need is to call
1145 # _phasesets depend on changelog. what we need is to call
1146 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1146 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1147 # can't be easily expressed in filecache mechanism.
1147 # can't be easily expressed in filecache mechanism.
1148 @storecache('phaseroots', '00changelog.i')
1148 @storecache('phaseroots', '00changelog.i')
1149 def _phasecache(self):
1149 def _phasecache(self):
1150 return phases.phasecache(self, self._phasedefaults)
1150 return phases.phasecache(self, self._phasedefaults)
1151
1151
1152 @storecache('obsstore')
1152 @storecache('obsstore')
1153 def obsstore(self):
1153 def obsstore(self):
1154 return obsolete.makestore(self.ui, self)
1154 return obsolete.makestore(self.ui, self)
1155
1155
1156 @storecache('00changelog.i')
1156 @storecache('00changelog.i')
1157 def changelog(self):
1157 def changelog(self):
1158 return changelog.changelog(self.svfs,
1158 return changelog.changelog(self.svfs,
1159 trypending=txnutil.mayhavepending(self.root))
1159 trypending=txnutil.mayhavepending(self.root))
1160
1160
1161 @storecache('00manifest.i')
1161 @storecache('00manifest.i')
1162 def manifestlog(self):
1162 def manifestlog(self):
1163 rootstore = manifest.manifestrevlog(self.svfs)
1163 rootstore = manifest.manifestrevlog(self.svfs)
1164 return manifest.manifestlog(self.svfs, self, rootstore)
1164 return manifest.manifestlog(self.svfs, self, rootstore)
1165
1165
1166 @repofilecache('dirstate')
1166 @repofilecache('dirstate')
1167 def dirstate(self):
1167 def dirstate(self):
1168 return self._makedirstate()
1168 return self._makedirstate()
1169
1169
1170 def _makedirstate(self):
1170 def _makedirstate(self):
1171 """Extension point for wrapping the dirstate per-repo."""
1171 """Extension point for wrapping the dirstate per-repo."""
1172 sparsematchfn = lambda: sparse.matcher(self)
1172 sparsematchfn = lambda: sparse.matcher(self)
1173
1173
1174 return dirstate.dirstate(self.vfs, self.ui, self.root,
1174 return dirstate.dirstate(self.vfs, self.ui, self.root,
1175 self._dirstatevalidate, sparsematchfn)
1175 self._dirstatevalidate, sparsematchfn)
1176
1176
1177 def _dirstatevalidate(self, node):
1177 def _dirstatevalidate(self, node):
1178 try:
1178 try:
1179 self.changelog.rev(node)
1179 self.changelog.rev(node)
1180 return node
1180 return node
1181 except error.LookupError:
1181 except error.LookupError:
1182 if not self._dirstatevalidatewarned:
1182 if not self._dirstatevalidatewarned:
1183 self._dirstatevalidatewarned = True
1183 self._dirstatevalidatewarned = True
1184 self.ui.warn(_("warning: ignoring unknown"
1184 self.ui.warn(_("warning: ignoring unknown"
1185 " working parent %s!\n") % short(node))
1185 " working parent %s!\n") % short(node))
1186 return nullid
1186 return nullid
1187
1187
1188 @storecache(narrowspec.FILENAME)
1188 @storecache(narrowspec.FILENAME)
1189 def narrowpats(self):
1189 def narrowpats(self):
1190 """matcher patterns for this repository's narrowspec
1190 """matcher patterns for this repository's narrowspec
1191
1191
1192 A tuple of (includes, excludes).
1192 A tuple of (includes, excludes).
1193 """
1193 """
1194 return narrowspec.load(self)
1194 return narrowspec.load(self)
1195
1195
1196 @storecache(narrowspec.FILENAME)
1196 @storecache(narrowspec.FILENAME)
1197 def _narrowmatch(self):
1197 def _narrowmatch(self):
1198 if repository.NARROW_REQUIREMENT not in self.requirements:
1198 if repository.NARROW_REQUIREMENT not in self.requirements:
1199 return matchmod.always(self.root, '')
1199 return matchmod.always(self.root, '')
1200 include, exclude = self.narrowpats
1200 include, exclude = self.narrowpats
1201 return narrowspec.match(self.root, include=include, exclude=exclude)
1201 return narrowspec.match(self.root, include=include, exclude=exclude)
1202
1202
1203 # TODO(martinvonz): make this property-like instead?
1203 def narrowmatch(self, match=None):
1204 def narrowmatch(self):
1204 """matcher corresponding the the repo's narrowspec
1205
1206 If `match` is given, then that will be intersected with the narrow
1207 matcher.
1208 """
1209 if match:
1210 return matchmod.intersectmatchers(match, self._narrowmatch)
1205 return self._narrowmatch
1211 return self._narrowmatch
1206
1212
1207 def setnarrowpats(self, newincludes, newexcludes):
1213 def setnarrowpats(self, newincludes, newexcludes):
1208 narrowspec.save(self, newincludes, newexcludes)
1214 narrowspec.save(self, newincludes, newexcludes)
1209 self.invalidate(clearfilecache=True)
1215 self.invalidate(clearfilecache=True)
1210
1216
1211 def __getitem__(self, changeid):
1217 def __getitem__(self, changeid):
1212 if changeid is None:
1218 if changeid is None:
1213 return context.workingctx(self)
1219 return context.workingctx(self)
1214 if isinstance(changeid, context.basectx):
1220 if isinstance(changeid, context.basectx):
1215 return changeid
1221 return changeid
1216 if isinstance(changeid, slice):
1222 if isinstance(changeid, slice):
1217 # wdirrev isn't contiguous so the slice shouldn't include it
1223 # wdirrev isn't contiguous so the slice shouldn't include it
1218 return [self[i]
1224 return [self[i]
1219 for i in pycompat.xrange(*changeid.indices(len(self)))
1225 for i in pycompat.xrange(*changeid.indices(len(self)))
1220 if i not in self.changelog.filteredrevs]
1226 if i not in self.changelog.filteredrevs]
1221 try:
1227 try:
1222 if isinstance(changeid, int):
1228 if isinstance(changeid, int):
1223 node = self.changelog.node(changeid)
1229 node = self.changelog.node(changeid)
1224 rev = changeid
1230 rev = changeid
1225 elif changeid == 'null':
1231 elif changeid == 'null':
1226 node = nullid
1232 node = nullid
1227 rev = nullrev
1233 rev = nullrev
1228 elif changeid == 'tip':
1234 elif changeid == 'tip':
1229 node = self.changelog.tip()
1235 node = self.changelog.tip()
1230 rev = self.changelog.rev(node)
1236 rev = self.changelog.rev(node)
1231 elif changeid == '.':
1237 elif changeid == '.':
1232 # this is a hack to delay/avoid loading obsmarkers
1238 # this is a hack to delay/avoid loading obsmarkers
1233 # when we know that '.' won't be hidden
1239 # when we know that '.' won't be hidden
1234 node = self.dirstate.p1()
1240 node = self.dirstate.p1()
1235 rev = self.unfiltered().changelog.rev(node)
1241 rev = self.unfiltered().changelog.rev(node)
1236 elif len(changeid) == 20:
1242 elif len(changeid) == 20:
1237 try:
1243 try:
1238 node = changeid
1244 node = changeid
1239 rev = self.changelog.rev(changeid)
1245 rev = self.changelog.rev(changeid)
1240 except error.FilteredLookupError:
1246 except error.FilteredLookupError:
1241 changeid = hex(changeid) # for the error message
1247 changeid = hex(changeid) # for the error message
1242 raise
1248 raise
1243 except LookupError:
1249 except LookupError:
1244 # check if it might have come from damaged dirstate
1250 # check if it might have come from damaged dirstate
1245 #
1251 #
1246 # XXX we could avoid the unfiltered if we had a recognizable
1252 # XXX we could avoid the unfiltered if we had a recognizable
1247 # exception for filtered changeset access
1253 # exception for filtered changeset access
1248 if (self.local()
1254 if (self.local()
1249 and changeid in self.unfiltered().dirstate.parents()):
1255 and changeid in self.unfiltered().dirstate.parents()):
1250 msg = _("working directory has unknown parent '%s'!")
1256 msg = _("working directory has unknown parent '%s'!")
1251 raise error.Abort(msg % short(changeid))
1257 raise error.Abort(msg % short(changeid))
1252 changeid = hex(changeid) # for the error message
1258 changeid = hex(changeid) # for the error message
1253 raise
1259 raise
1254
1260
1255 elif len(changeid) == 40:
1261 elif len(changeid) == 40:
1256 node = bin(changeid)
1262 node = bin(changeid)
1257 rev = self.changelog.rev(node)
1263 rev = self.changelog.rev(node)
1258 else:
1264 else:
1259 raise error.ProgrammingError(
1265 raise error.ProgrammingError(
1260 "unsupported changeid '%s' of type %s" %
1266 "unsupported changeid '%s' of type %s" %
1261 (changeid, type(changeid)))
1267 (changeid, type(changeid)))
1262
1268
1263 return context.changectx(self, rev, node)
1269 return context.changectx(self, rev, node)
1264
1270
1265 except (error.FilteredIndexError, error.FilteredLookupError):
1271 except (error.FilteredIndexError, error.FilteredLookupError):
1266 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
1272 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
1267 % pycompat.bytestr(changeid))
1273 % pycompat.bytestr(changeid))
1268 except (IndexError, LookupError):
1274 except (IndexError, LookupError):
1269 raise error.RepoLookupError(_("unknown revision '%s'") % changeid)
1275 raise error.RepoLookupError(_("unknown revision '%s'") % changeid)
1270 except error.WdirUnsupported:
1276 except error.WdirUnsupported:
1271 return context.workingctx(self)
1277 return context.workingctx(self)
1272
1278
1273 def __contains__(self, changeid):
1279 def __contains__(self, changeid):
1274 """True if the given changeid exists
1280 """True if the given changeid exists
1275
1281
1276 error.AmbiguousPrefixLookupError is raised if an ambiguous node
1282 error.AmbiguousPrefixLookupError is raised if an ambiguous node
1277 specified.
1283 specified.
1278 """
1284 """
1279 try:
1285 try:
1280 self[changeid]
1286 self[changeid]
1281 return True
1287 return True
1282 except error.RepoLookupError:
1288 except error.RepoLookupError:
1283 return False
1289 return False
1284
1290
1285 def __nonzero__(self):
1291 def __nonzero__(self):
1286 return True
1292 return True
1287
1293
1288 __bool__ = __nonzero__
1294 __bool__ = __nonzero__
1289
1295
1290 def __len__(self):
1296 def __len__(self):
1291 # no need to pay the cost of repoview.changelog
1297 # no need to pay the cost of repoview.changelog
1292 unfi = self.unfiltered()
1298 unfi = self.unfiltered()
1293 return len(unfi.changelog)
1299 return len(unfi.changelog)
1294
1300
1295 def __iter__(self):
1301 def __iter__(self):
1296 return iter(self.changelog)
1302 return iter(self.changelog)
1297
1303
1298 def revs(self, expr, *args):
1304 def revs(self, expr, *args):
1299 '''Find revisions matching a revset.
1305 '''Find revisions matching a revset.
1300
1306
1301 The revset is specified as a string ``expr`` that may contain
1307 The revset is specified as a string ``expr`` that may contain
1302 %-formatting to escape certain types. See ``revsetlang.formatspec``.
1308 %-formatting to escape certain types. See ``revsetlang.formatspec``.
1303
1309
1304 Revset aliases from the configuration are not expanded. To expand
1310 Revset aliases from the configuration are not expanded. To expand
1305 user aliases, consider calling ``scmutil.revrange()`` or
1311 user aliases, consider calling ``scmutil.revrange()`` or
1306 ``repo.anyrevs([expr], user=True)``.
1312 ``repo.anyrevs([expr], user=True)``.
1307
1313
1308 Returns a revset.abstractsmartset, which is a list-like interface
1314 Returns a revset.abstractsmartset, which is a list-like interface
1309 that contains integer revisions.
1315 that contains integer revisions.
1310 '''
1316 '''
1311 expr = revsetlang.formatspec(expr, *args)
1317 expr = revsetlang.formatspec(expr, *args)
1312 m = revset.match(None, expr)
1318 m = revset.match(None, expr)
1313 return m(self)
1319 return m(self)
1314
1320
1315 def set(self, expr, *args):
1321 def set(self, expr, *args):
1316 '''Find revisions matching a revset and emit changectx instances.
1322 '''Find revisions matching a revset and emit changectx instances.
1317
1323
1318 This is a convenience wrapper around ``revs()`` that iterates the
1324 This is a convenience wrapper around ``revs()`` that iterates the
1319 result and is a generator of changectx instances.
1325 result and is a generator of changectx instances.
1320
1326
1321 Revset aliases from the configuration are not expanded. To expand
1327 Revset aliases from the configuration are not expanded. To expand
1322 user aliases, consider calling ``scmutil.revrange()``.
1328 user aliases, consider calling ``scmutil.revrange()``.
1323 '''
1329 '''
1324 for r in self.revs(expr, *args):
1330 for r in self.revs(expr, *args):
1325 yield self[r]
1331 yield self[r]
1326
1332
1327 def anyrevs(self, specs, user=False, localalias=None):
1333 def anyrevs(self, specs, user=False, localalias=None):
1328 '''Find revisions matching one of the given revsets.
1334 '''Find revisions matching one of the given revsets.
1329
1335
1330 Revset aliases from the configuration are not expanded by default. To
1336 Revset aliases from the configuration are not expanded by default. To
1331 expand user aliases, specify ``user=True``. To provide some local
1337 expand user aliases, specify ``user=True``. To provide some local
1332 definitions overriding user aliases, set ``localalias`` to
1338 definitions overriding user aliases, set ``localalias`` to
1333 ``{name: definitionstring}``.
1339 ``{name: definitionstring}``.
1334 '''
1340 '''
1335 if user:
1341 if user:
1336 m = revset.matchany(self.ui, specs,
1342 m = revset.matchany(self.ui, specs,
1337 lookup=revset.lookupfn(self),
1343 lookup=revset.lookupfn(self),
1338 localalias=localalias)
1344 localalias=localalias)
1339 else:
1345 else:
1340 m = revset.matchany(None, specs, localalias=localalias)
1346 m = revset.matchany(None, specs, localalias=localalias)
1341 return m(self)
1347 return m(self)
1342
1348
1343 def url(self):
1349 def url(self):
1344 return 'file:' + self.root
1350 return 'file:' + self.root
1345
1351
1346 def hook(self, name, throw=False, **args):
1352 def hook(self, name, throw=False, **args):
1347 """Call a hook, passing this repo instance.
1353 """Call a hook, passing this repo instance.
1348
1354
1349 This a convenience method to aid invoking hooks. Extensions likely
1355 This a convenience method to aid invoking hooks. Extensions likely
1350 won't call this unless they have registered a custom hook or are
1356 won't call this unless they have registered a custom hook or are
1351 replacing code that is expected to call a hook.
1357 replacing code that is expected to call a hook.
1352 """
1358 """
1353 return hook.hook(self.ui, self, name, throw, **args)
1359 return hook.hook(self.ui, self, name, throw, **args)
1354
1360
1355 @filteredpropertycache
1361 @filteredpropertycache
1356 def _tagscache(self):
1362 def _tagscache(self):
1357 '''Returns a tagscache object that contains various tags related
1363 '''Returns a tagscache object that contains various tags related
1358 caches.'''
1364 caches.'''
1359
1365
1360 # This simplifies its cache management by having one decorated
1366 # This simplifies its cache management by having one decorated
1361 # function (this one) and the rest simply fetch things from it.
1367 # function (this one) and the rest simply fetch things from it.
1362 class tagscache(object):
1368 class tagscache(object):
1363 def __init__(self):
1369 def __init__(self):
1364 # These two define the set of tags for this repository. tags
1370 # These two define the set of tags for this repository. tags
1365 # maps tag name to node; tagtypes maps tag name to 'global' or
1371 # maps tag name to node; tagtypes maps tag name to 'global' or
1366 # 'local'. (Global tags are defined by .hgtags across all
1372 # 'local'. (Global tags are defined by .hgtags across all
1367 # heads, and local tags are defined in .hg/localtags.)
1373 # heads, and local tags are defined in .hg/localtags.)
1368 # They constitute the in-memory cache of tags.
1374 # They constitute the in-memory cache of tags.
1369 self.tags = self.tagtypes = None
1375 self.tags = self.tagtypes = None
1370
1376
1371 self.nodetagscache = self.tagslist = None
1377 self.nodetagscache = self.tagslist = None
1372
1378
1373 cache = tagscache()
1379 cache = tagscache()
1374 cache.tags, cache.tagtypes = self._findtags()
1380 cache.tags, cache.tagtypes = self._findtags()
1375
1381
1376 return cache
1382 return cache
1377
1383
1378 def tags(self):
1384 def tags(self):
1379 '''return a mapping of tag to node'''
1385 '''return a mapping of tag to node'''
1380 t = {}
1386 t = {}
1381 if self.changelog.filteredrevs:
1387 if self.changelog.filteredrevs:
1382 tags, tt = self._findtags()
1388 tags, tt = self._findtags()
1383 else:
1389 else:
1384 tags = self._tagscache.tags
1390 tags = self._tagscache.tags
1385 for k, v in tags.iteritems():
1391 for k, v in tags.iteritems():
1386 try:
1392 try:
1387 # ignore tags to unknown nodes
1393 # ignore tags to unknown nodes
1388 self.changelog.rev(v)
1394 self.changelog.rev(v)
1389 t[k] = v
1395 t[k] = v
1390 except (error.LookupError, ValueError):
1396 except (error.LookupError, ValueError):
1391 pass
1397 pass
1392 return t
1398 return t
1393
1399
1394 def _findtags(self):
1400 def _findtags(self):
1395 '''Do the hard work of finding tags. Return a pair of dicts
1401 '''Do the hard work of finding tags. Return a pair of dicts
1396 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1402 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1397 maps tag name to a string like \'global\' or \'local\'.
1403 maps tag name to a string like \'global\' or \'local\'.
1398 Subclasses or extensions are free to add their own tags, but
1404 Subclasses or extensions are free to add their own tags, but
1399 should be aware that the returned dicts will be retained for the
1405 should be aware that the returned dicts will be retained for the
1400 duration of the localrepo object.'''
1406 duration of the localrepo object.'''
1401
1407
1402 # XXX what tagtype should subclasses/extensions use? Currently
1408 # XXX what tagtype should subclasses/extensions use? Currently
1403 # mq and bookmarks add tags, but do not set the tagtype at all.
1409 # mq and bookmarks add tags, but do not set the tagtype at all.
1404 # Should each extension invent its own tag type? Should there
1410 # Should each extension invent its own tag type? Should there
1405 # be one tagtype for all such "virtual" tags? Or is the status
1411 # be one tagtype for all such "virtual" tags? Or is the status
1406 # quo fine?
1412 # quo fine?
1407
1413
1408
1414
1409 # map tag name to (node, hist)
1415 # map tag name to (node, hist)
1410 alltags = tagsmod.findglobaltags(self.ui, self)
1416 alltags = tagsmod.findglobaltags(self.ui, self)
1411 # map tag name to tag type
1417 # map tag name to tag type
1412 tagtypes = dict((tag, 'global') for tag in alltags)
1418 tagtypes = dict((tag, 'global') for tag in alltags)
1413
1419
1414 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1420 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1415
1421
1416 # Build the return dicts. Have to re-encode tag names because
1422 # Build the return dicts. Have to re-encode tag names because
1417 # the tags module always uses UTF-8 (in order not to lose info
1423 # the tags module always uses UTF-8 (in order not to lose info
1418 # writing to the cache), but the rest of Mercurial wants them in
1424 # writing to the cache), but the rest of Mercurial wants them in
1419 # local encoding.
1425 # local encoding.
1420 tags = {}
1426 tags = {}
1421 for (name, (node, hist)) in alltags.iteritems():
1427 for (name, (node, hist)) in alltags.iteritems():
1422 if node != nullid:
1428 if node != nullid:
1423 tags[encoding.tolocal(name)] = node
1429 tags[encoding.tolocal(name)] = node
1424 tags['tip'] = self.changelog.tip()
1430 tags['tip'] = self.changelog.tip()
1425 tagtypes = dict([(encoding.tolocal(name), value)
1431 tagtypes = dict([(encoding.tolocal(name), value)
1426 for (name, value) in tagtypes.iteritems()])
1432 for (name, value) in tagtypes.iteritems()])
1427 return (tags, tagtypes)
1433 return (tags, tagtypes)
1428
1434
1429 def tagtype(self, tagname):
1435 def tagtype(self, tagname):
1430 '''
1436 '''
1431 return the type of the given tag. result can be:
1437 return the type of the given tag. result can be:
1432
1438
1433 'local' : a local tag
1439 'local' : a local tag
1434 'global' : a global tag
1440 'global' : a global tag
1435 None : tag does not exist
1441 None : tag does not exist
1436 '''
1442 '''
1437
1443
1438 return self._tagscache.tagtypes.get(tagname)
1444 return self._tagscache.tagtypes.get(tagname)
1439
1445
1440 def tagslist(self):
1446 def tagslist(self):
1441 '''return a list of tags ordered by revision'''
1447 '''return a list of tags ordered by revision'''
1442 if not self._tagscache.tagslist:
1448 if not self._tagscache.tagslist:
1443 l = []
1449 l = []
1444 for t, n in self.tags().iteritems():
1450 for t, n in self.tags().iteritems():
1445 l.append((self.changelog.rev(n), t, n))
1451 l.append((self.changelog.rev(n), t, n))
1446 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1452 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1447
1453
1448 return self._tagscache.tagslist
1454 return self._tagscache.tagslist
1449
1455
1450 def nodetags(self, node):
1456 def nodetags(self, node):
1451 '''return the tags associated with a node'''
1457 '''return the tags associated with a node'''
1452 if not self._tagscache.nodetagscache:
1458 if not self._tagscache.nodetagscache:
1453 nodetagscache = {}
1459 nodetagscache = {}
1454 for t, n in self._tagscache.tags.iteritems():
1460 for t, n in self._tagscache.tags.iteritems():
1455 nodetagscache.setdefault(n, []).append(t)
1461 nodetagscache.setdefault(n, []).append(t)
1456 for tags in nodetagscache.itervalues():
1462 for tags in nodetagscache.itervalues():
1457 tags.sort()
1463 tags.sort()
1458 self._tagscache.nodetagscache = nodetagscache
1464 self._tagscache.nodetagscache = nodetagscache
1459 return self._tagscache.nodetagscache.get(node, [])
1465 return self._tagscache.nodetagscache.get(node, [])
1460
1466
1461 def nodebookmarks(self, node):
1467 def nodebookmarks(self, node):
1462 """return the list of bookmarks pointing to the specified node"""
1468 """return the list of bookmarks pointing to the specified node"""
1463 return self._bookmarks.names(node)
1469 return self._bookmarks.names(node)
1464
1470
1465 def branchmap(self):
1471 def branchmap(self):
1466 '''returns a dictionary {branch: [branchheads]} with branchheads
1472 '''returns a dictionary {branch: [branchheads]} with branchheads
1467 ordered by increasing revision number'''
1473 ordered by increasing revision number'''
1468 branchmap.updatecache(self)
1474 branchmap.updatecache(self)
1469 return self._branchcaches[self.filtername]
1475 return self._branchcaches[self.filtername]
1470
1476
1471 @unfilteredmethod
1477 @unfilteredmethod
1472 def revbranchcache(self):
1478 def revbranchcache(self):
1473 if not self._revbranchcache:
1479 if not self._revbranchcache:
1474 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1480 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1475 return self._revbranchcache
1481 return self._revbranchcache
1476
1482
1477 def branchtip(self, branch, ignoremissing=False):
1483 def branchtip(self, branch, ignoremissing=False):
1478 '''return the tip node for a given branch
1484 '''return the tip node for a given branch
1479
1485
1480 If ignoremissing is True, then this method will not raise an error.
1486 If ignoremissing is True, then this method will not raise an error.
1481 This is helpful for callers that only expect None for a missing branch
1487 This is helpful for callers that only expect None for a missing branch
1482 (e.g. namespace).
1488 (e.g. namespace).
1483
1489
1484 '''
1490 '''
1485 try:
1491 try:
1486 return self.branchmap().branchtip(branch)
1492 return self.branchmap().branchtip(branch)
1487 except KeyError:
1493 except KeyError:
1488 if not ignoremissing:
1494 if not ignoremissing:
1489 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1495 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1490 else:
1496 else:
1491 pass
1497 pass
1492
1498
1493 def lookup(self, key):
1499 def lookup(self, key):
1494 return scmutil.revsymbol(self, key).node()
1500 return scmutil.revsymbol(self, key).node()
1495
1501
1496 def lookupbranch(self, key):
1502 def lookupbranch(self, key):
1497 if key in self.branchmap():
1503 if key in self.branchmap():
1498 return key
1504 return key
1499
1505
1500 return scmutil.revsymbol(self, key).branch()
1506 return scmutil.revsymbol(self, key).branch()
1501
1507
1502 def known(self, nodes):
1508 def known(self, nodes):
1503 cl = self.changelog
1509 cl = self.changelog
1504 nm = cl.nodemap
1510 nm = cl.nodemap
1505 filtered = cl.filteredrevs
1511 filtered = cl.filteredrevs
1506 result = []
1512 result = []
1507 for n in nodes:
1513 for n in nodes:
1508 r = nm.get(n)
1514 r = nm.get(n)
1509 resp = not (r is None or r in filtered)
1515 resp = not (r is None or r in filtered)
1510 result.append(resp)
1516 result.append(resp)
1511 return result
1517 return result
1512
1518
1513 def local(self):
1519 def local(self):
1514 return self
1520 return self
1515
1521
1516 def publishing(self):
1522 def publishing(self):
1517 # it's safe (and desirable) to trust the publish flag unconditionally
1523 # it's safe (and desirable) to trust the publish flag unconditionally
1518 # so that we don't finalize changes shared between users via ssh or nfs
1524 # so that we don't finalize changes shared between users via ssh or nfs
1519 return self.ui.configbool('phases', 'publish', untrusted=True)
1525 return self.ui.configbool('phases', 'publish', untrusted=True)
1520
1526
1521 def cancopy(self):
1527 def cancopy(self):
1522 # so statichttprepo's override of local() works
1528 # so statichttprepo's override of local() works
1523 if not self.local():
1529 if not self.local():
1524 return False
1530 return False
1525 if not self.publishing():
1531 if not self.publishing():
1526 return True
1532 return True
1527 # if publishing we can't copy if there is filtered content
1533 # if publishing we can't copy if there is filtered content
1528 return not self.filtered('visible').changelog.filteredrevs
1534 return not self.filtered('visible').changelog.filteredrevs
1529
1535
1530 def shared(self):
1536 def shared(self):
1531 '''the type of shared repository (None if not shared)'''
1537 '''the type of shared repository (None if not shared)'''
1532 if self.sharedpath != self.path:
1538 if self.sharedpath != self.path:
1533 return 'store'
1539 return 'store'
1534 return None
1540 return None
1535
1541
1536 def wjoin(self, f, *insidef):
1542 def wjoin(self, f, *insidef):
1537 return self.vfs.reljoin(self.root, f, *insidef)
1543 return self.vfs.reljoin(self.root, f, *insidef)
1538
1544
1539 def setparents(self, p1, p2=nullid):
1545 def setparents(self, p1, p2=nullid):
1540 with self.dirstate.parentchange():
1546 with self.dirstate.parentchange():
1541 copies = self.dirstate.setparents(p1, p2)
1547 copies = self.dirstate.setparents(p1, p2)
1542 pctx = self[p1]
1548 pctx = self[p1]
1543 if copies:
1549 if copies:
1544 # Adjust copy records, the dirstate cannot do it, it
1550 # Adjust copy records, the dirstate cannot do it, it
1545 # requires access to parents manifests. Preserve them
1551 # requires access to parents manifests. Preserve them
1546 # only for entries added to first parent.
1552 # only for entries added to first parent.
1547 for f in copies:
1553 for f in copies:
1548 if f not in pctx and copies[f] in pctx:
1554 if f not in pctx and copies[f] in pctx:
1549 self.dirstate.copy(copies[f], f)
1555 self.dirstate.copy(copies[f], f)
1550 if p2 == nullid:
1556 if p2 == nullid:
1551 for f, s in sorted(self.dirstate.copies().items()):
1557 for f, s in sorted(self.dirstate.copies().items()):
1552 if f not in pctx and s not in pctx:
1558 if f not in pctx and s not in pctx:
1553 self.dirstate.copy(None, f)
1559 self.dirstate.copy(None, f)
1554
1560
1555 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1561 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1556 """changeid can be a changeset revision, node, or tag.
1562 """changeid can be a changeset revision, node, or tag.
1557 fileid can be a file revision or node."""
1563 fileid can be a file revision or node."""
1558 return context.filectx(self, path, changeid, fileid,
1564 return context.filectx(self, path, changeid, fileid,
1559 changectx=changectx)
1565 changectx=changectx)
1560
1566
1561 def getcwd(self):
1567 def getcwd(self):
1562 return self.dirstate.getcwd()
1568 return self.dirstate.getcwd()
1563
1569
1564 def pathto(self, f, cwd=None):
1570 def pathto(self, f, cwd=None):
1565 return self.dirstate.pathto(f, cwd)
1571 return self.dirstate.pathto(f, cwd)
1566
1572
1567 def _loadfilter(self, filter):
1573 def _loadfilter(self, filter):
1568 if filter not in self._filterpats:
1574 if filter not in self._filterpats:
1569 l = []
1575 l = []
1570 for pat, cmd in self.ui.configitems(filter):
1576 for pat, cmd in self.ui.configitems(filter):
1571 if cmd == '!':
1577 if cmd == '!':
1572 continue
1578 continue
1573 mf = matchmod.match(self.root, '', [pat])
1579 mf = matchmod.match(self.root, '', [pat])
1574 fn = None
1580 fn = None
1575 params = cmd
1581 params = cmd
1576 for name, filterfn in self._datafilters.iteritems():
1582 for name, filterfn in self._datafilters.iteritems():
1577 if cmd.startswith(name):
1583 if cmd.startswith(name):
1578 fn = filterfn
1584 fn = filterfn
1579 params = cmd[len(name):].lstrip()
1585 params = cmd[len(name):].lstrip()
1580 break
1586 break
1581 if not fn:
1587 if not fn:
1582 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1588 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1583 # Wrap old filters not supporting keyword arguments
1589 # Wrap old filters not supporting keyword arguments
1584 if not pycompat.getargspec(fn)[2]:
1590 if not pycompat.getargspec(fn)[2]:
1585 oldfn = fn
1591 oldfn = fn
1586 fn = lambda s, c, **kwargs: oldfn(s, c)
1592 fn = lambda s, c, **kwargs: oldfn(s, c)
1587 l.append((mf, fn, params))
1593 l.append((mf, fn, params))
1588 self._filterpats[filter] = l
1594 self._filterpats[filter] = l
1589 return self._filterpats[filter]
1595 return self._filterpats[filter]
1590
1596
1591 def _filter(self, filterpats, filename, data):
1597 def _filter(self, filterpats, filename, data):
1592 for mf, fn, cmd in filterpats:
1598 for mf, fn, cmd in filterpats:
1593 if mf(filename):
1599 if mf(filename):
1594 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1600 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1595 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1601 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1596 break
1602 break
1597
1603
1598 return data
1604 return data
1599
1605
1600 @unfilteredpropertycache
1606 @unfilteredpropertycache
1601 def _encodefilterpats(self):
1607 def _encodefilterpats(self):
1602 return self._loadfilter('encode')
1608 return self._loadfilter('encode')
1603
1609
1604 @unfilteredpropertycache
1610 @unfilteredpropertycache
1605 def _decodefilterpats(self):
1611 def _decodefilterpats(self):
1606 return self._loadfilter('decode')
1612 return self._loadfilter('decode')
1607
1613
1608 def adddatafilter(self, name, filter):
1614 def adddatafilter(self, name, filter):
1609 self._datafilters[name] = filter
1615 self._datafilters[name] = filter
1610
1616
1611 def wread(self, filename):
1617 def wread(self, filename):
1612 if self.wvfs.islink(filename):
1618 if self.wvfs.islink(filename):
1613 data = self.wvfs.readlink(filename)
1619 data = self.wvfs.readlink(filename)
1614 else:
1620 else:
1615 data = self.wvfs.read(filename)
1621 data = self.wvfs.read(filename)
1616 return self._filter(self._encodefilterpats, filename, data)
1622 return self._filter(self._encodefilterpats, filename, data)
1617
1623
1618 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1624 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1619 """write ``data`` into ``filename`` in the working directory
1625 """write ``data`` into ``filename`` in the working directory
1620
1626
1621 This returns length of written (maybe decoded) data.
1627 This returns length of written (maybe decoded) data.
1622 """
1628 """
1623 data = self._filter(self._decodefilterpats, filename, data)
1629 data = self._filter(self._decodefilterpats, filename, data)
1624 if 'l' in flags:
1630 if 'l' in flags:
1625 self.wvfs.symlink(data, filename)
1631 self.wvfs.symlink(data, filename)
1626 else:
1632 else:
1627 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1633 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1628 **kwargs)
1634 **kwargs)
1629 if 'x' in flags:
1635 if 'x' in flags:
1630 self.wvfs.setflags(filename, False, True)
1636 self.wvfs.setflags(filename, False, True)
1631 else:
1637 else:
1632 self.wvfs.setflags(filename, False, False)
1638 self.wvfs.setflags(filename, False, False)
1633 return len(data)
1639 return len(data)
1634
1640
1635 def wwritedata(self, filename, data):
1641 def wwritedata(self, filename, data):
1636 return self._filter(self._decodefilterpats, filename, data)
1642 return self._filter(self._decodefilterpats, filename, data)
1637
1643
1638 def currenttransaction(self):
1644 def currenttransaction(self):
1639 """return the current transaction or None if non exists"""
1645 """return the current transaction or None if non exists"""
1640 if self._transref:
1646 if self._transref:
1641 tr = self._transref()
1647 tr = self._transref()
1642 else:
1648 else:
1643 tr = None
1649 tr = None
1644
1650
1645 if tr and tr.running():
1651 if tr and tr.running():
1646 return tr
1652 return tr
1647 return None
1653 return None
1648
1654
1649 def transaction(self, desc, report=None):
1655 def transaction(self, desc, report=None):
1650 if (self.ui.configbool('devel', 'all-warnings')
1656 if (self.ui.configbool('devel', 'all-warnings')
1651 or self.ui.configbool('devel', 'check-locks')):
1657 or self.ui.configbool('devel', 'check-locks')):
1652 if self._currentlock(self._lockref) is None:
1658 if self._currentlock(self._lockref) is None:
1653 raise error.ProgrammingError('transaction requires locking')
1659 raise error.ProgrammingError('transaction requires locking')
1654 tr = self.currenttransaction()
1660 tr = self.currenttransaction()
1655 if tr is not None:
1661 if tr is not None:
1656 return tr.nest(name=desc)
1662 return tr.nest(name=desc)
1657
1663
1658 # abort here if the journal already exists
1664 # abort here if the journal already exists
1659 if self.svfs.exists("journal"):
1665 if self.svfs.exists("journal"):
1660 raise error.RepoError(
1666 raise error.RepoError(
1661 _("abandoned transaction found"),
1667 _("abandoned transaction found"),
1662 hint=_("run 'hg recover' to clean up transaction"))
1668 hint=_("run 'hg recover' to clean up transaction"))
1663
1669
1664 idbase = "%.40f#%f" % (random.random(), time.time())
1670 idbase = "%.40f#%f" % (random.random(), time.time())
1665 ha = hex(hashlib.sha1(idbase).digest())
1671 ha = hex(hashlib.sha1(idbase).digest())
1666 txnid = 'TXN:' + ha
1672 txnid = 'TXN:' + ha
1667 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1673 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1668
1674
1669 self._writejournal(desc)
1675 self._writejournal(desc)
1670 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1676 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1671 if report:
1677 if report:
1672 rp = report
1678 rp = report
1673 else:
1679 else:
1674 rp = self.ui.warn
1680 rp = self.ui.warn
1675 vfsmap = {'plain': self.vfs, 'store': self.svfs} # root of .hg/
1681 vfsmap = {'plain': self.vfs, 'store': self.svfs} # root of .hg/
1676 # we must avoid cyclic reference between repo and transaction.
1682 # we must avoid cyclic reference between repo and transaction.
1677 reporef = weakref.ref(self)
1683 reporef = weakref.ref(self)
1678 # Code to track tag movement
1684 # Code to track tag movement
1679 #
1685 #
1680 # Since tags are all handled as file content, it is actually quite hard
1686 # Since tags are all handled as file content, it is actually quite hard
1681 # to track these movement from a code perspective. So we fallback to a
1687 # to track these movement from a code perspective. So we fallback to a
1682 # tracking at the repository level. One could envision to track changes
1688 # tracking at the repository level. One could envision to track changes
1683 # to the '.hgtags' file through changegroup apply but that fails to
1689 # to the '.hgtags' file through changegroup apply but that fails to
1684 # cope with case where transaction expose new heads without changegroup
1690 # cope with case where transaction expose new heads without changegroup
1685 # being involved (eg: phase movement).
1691 # being involved (eg: phase movement).
1686 #
1692 #
1687 # For now, We gate the feature behind a flag since this likely comes
1693 # For now, We gate the feature behind a flag since this likely comes
1688 # with performance impacts. The current code run more often than needed
1694 # with performance impacts. The current code run more often than needed
1689 # and do not use caches as much as it could. The current focus is on
1695 # and do not use caches as much as it could. The current focus is on
1690 # the behavior of the feature so we disable it by default. The flag
1696 # the behavior of the feature so we disable it by default. The flag
1691 # will be removed when we are happy with the performance impact.
1697 # will be removed when we are happy with the performance impact.
1692 #
1698 #
1693 # Once this feature is no longer experimental move the following
1699 # Once this feature is no longer experimental move the following
1694 # documentation to the appropriate help section:
1700 # documentation to the appropriate help section:
1695 #
1701 #
1696 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1702 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1697 # tags (new or changed or deleted tags). In addition the details of
1703 # tags (new or changed or deleted tags). In addition the details of
1698 # these changes are made available in a file at:
1704 # these changes are made available in a file at:
1699 # ``REPOROOT/.hg/changes/tags.changes``.
1705 # ``REPOROOT/.hg/changes/tags.changes``.
1700 # Make sure you check for HG_TAG_MOVED before reading that file as it
1706 # Make sure you check for HG_TAG_MOVED before reading that file as it
1701 # might exist from a previous transaction even if no tag were touched
1707 # might exist from a previous transaction even if no tag were touched
1702 # in this one. Changes are recorded in a line base format::
1708 # in this one. Changes are recorded in a line base format::
1703 #
1709 #
1704 # <action> <hex-node> <tag-name>\n
1710 # <action> <hex-node> <tag-name>\n
1705 #
1711 #
1706 # Actions are defined as follow:
1712 # Actions are defined as follow:
1707 # "-R": tag is removed,
1713 # "-R": tag is removed,
1708 # "+A": tag is added,
1714 # "+A": tag is added,
1709 # "-M": tag is moved (old value),
1715 # "-M": tag is moved (old value),
1710 # "+M": tag is moved (new value),
1716 # "+M": tag is moved (new value),
1711 tracktags = lambda x: None
1717 tracktags = lambda x: None
1712 # experimental config: experimental.hook-track-tags
1718 # experimental config: experimental.hook-track-tags
1713 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1719 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1714 if desc != 'strip' and shouldtracktags:
1720 if desc != 'strip' and shouldtracktags:
1715 oldheads = self.changelog.headrevs()
1721 oldheads = self.changelog.headrevs()
1716 def tracktags(tr2):
1722 def tracktags(tr2):
1717 repo = reporef()
1723 repo = reporef()
1718 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1724 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1719 newheads = repo.changelog.headrevs()
1725 newheads = repo.changelog.headrevs()
1720 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1726 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1721 # notes: we compare lists here.
1727 # notes: we compare lists here.
1722 # As we do it only once buiding set would not be cheaper
1728 # As we do it only once buiding set would not be cheaper
1723 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1729 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1724 if changes:
1730 if changes:
1725 tr2.hookargs['tag_moved'] = '1'
1731 tr2.hookargs['tag_moved'] = '1'
1726 with repo.vfs('changes/tags.changes', 'w',
1732 with repo.vfs('changes/tags.changes', 'w',
1727 atomictemp=True) as changesfile:
1733 atomictemp=True) as changesfile:
1728 # note: we do not register the file to the transaction
1734 # note: we do not register the file to the transaction
1729 # because we needs it to still exist on the transaction
1735 # because we needs it to still exist on the transaction
1730 # is close (for txnclose hooks)
1736 # is close (for txnclose hooks)
1731 tagsmod.writediff(changesfile, changes)
1737 tagsmod.writediff(changesfile, changes)
1732 def validate(tr2):
1738 def validate(tr2):
1733 """will run pre-closing hooks"""
1739 """will run pre-closing hooks"""
1734 # XXX the transaction API is a bit lacking here so we take a hacky
1740 # XXX the transaction API is a bit lacking here so we take a hacky
1735 # path for now
1741 # path for now
1736 #
1742 #
1737 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1743 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1738 # dict is copied before these run. In addition we needs the data
1744 # dict is copied before these run. In addition we needs the data
1739 # available to in memory hooks too.
1745 # available to in memory hooks too.
1740 #
1746 #
1741 # Moreover, we also need to make sure this runs before txnclose
1747 # Moreover, we also need to make sure this runs before txnclose
1742 # hooks and there is no "pending" mechanism that would execute
1748 # hooks and there is no "pending" mechanism that would execute
1743 # logic only if hooks are about to run.
1749 # logic only if hooks are about to run.
1744 #
1750 #
1745 # Fixing this limitation of the transaction is also needed to track
1751 # Fixing this limitation of the transaction is also needed to track
1746 # other families of changes (bookmarks, phases, obsolescence).
1752 # other families of changes (bookmarks, phases, obsolescence).
1747 #
1753 #
1748 # This will have to be fixed before we remove the experimental
1754 # This will have to be fixed before we remove the experimental
1749 # gating.
1755 # gating.
1750 tracktags(tr2)
1756 tracktags(tr2)
1751 repo = reporef()
1757 repo = reporef()
1752 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1758 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1753 scmutil.enforcesinglehead(repo, tr2, desc)
1759 scmutil.enforcesinglehead(repo, tr2, desc)
1754 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1760 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1755 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1761 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1756 args = tr.hookargs.copy()
1762 args = tr.hookargs.copy()
1757 args.update(bookmarks.preparehookargs(name, old, new))
1763 args.update(bookmarks.preparehookargs(name, old, new))
1758 repo.hook('pretxnclose-bookmark', throw=True,
1764 repo.hook('pretxnclose-bookmark', throw=True,
1759 txnname=desc,
1765 txnname=desc,
1760 **pycompat.strkwargs(args))
1766 **pycompat.strkwargs(args))
1761 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1767 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1762 cl = repo.unfiltered().changelog
1768 cl = repo.unfiltered().changelog
1763 for rev, (old, new) in tr.changes['phases'].items():
1769 for rev, (old, new) in tr.changes['phases'].items():
1764 args = tr.hookargs.copy()
1770 args = tr.hookargs.copy()
1765 node = hex(cl.node(rev))
1771 node = hex(cl.node(rev))
1766 args.update(phases.preparehookargs(node, old, new))
1772 args.update(phases.preparehookargs(node, old, new))
1767 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1773 repo.hook('pretxnclose-phase', throw=True, txnname=desc,
1768 **pycompat.strkwargs(args))
1774 **pycompat.strkwargs(args))
1769
1775
1770 repo.hook('pretxnclose', throw=True,
1776 repo.hook('pretxnclose', throw=True,
1771 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1777 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1772 def releasefn(tr, success):
1778 def releasefn(tr, success):
1773 repo = reporef()
1779 repo = reporef()
1774 if success:
1780 if success:
1775 # this should be explicitly invoked here, because
1781 # this should be explicitly invoked here, because
1776 # in-memory changes aren't written out at closing
1782 # in-memory changes aren't written out at closing
1777 # transaction, if tr.addfilegenerator (via
1783 # transaction, if tr.addfilegenerator (via
1778 # dirstate.write or so) isn't invoked while
1784 # dirstate.write or so) isn't invoked while
1779 # transaction running
1785 # transaction running
1780 repo.dirstate.write(None)
1786 repo.dirstate.write(None)
1781 else:
1787 else:
1782 # discard all changes (including ones already written
1788 # discard all changes (including ones already written
1783 # out) in this transaction
1789 # out) in this transaction
1784 narrowspec.restorebackup(self, 'journal.narrowspec')
1790 narrowspec.restorebackup(self, 'journal.narrowspec')
1785 repo.dirstate.restorebackup(None, 'journal.dirstate')
1791 repo.dirstate.restorebackup(None, 'journal.dirstate')
1786
1792
1787 repo.invalidate(clearfilecache=True)
1793 repo.invalidate(clearfilecache=True)
1788
1794
1789 tr = transaction.transaction(rp, self.svfs, vfsmap,
1795 tr = transaction.transaction(rp, self.svfs, vfsmap,
1790 "journal",
1796 "journal",
1791 "undo",
1797 "undo",
1792 aftertrans(renames),
1798 aftertrans(renames),
1793 self.store.createmode,
1799 self.store.createmode,
1794 validator=validate,
1800 validator=validate,
1795 releasefn=releasefn,
1801 releasefn=releasefn,
1796 checkambigfiles=_cachedfiles,
1802 checkambigfiles=_cachedfiles,
1797 name=desc)
1803 name=desc)
1798 tr.changes['origrepolen'] = len(self)
1804 tr.changes['origrepolen'] = len(self)
1799 tr.changes['obsmarkers'] = set()
1805 tr.changes['obsmarkers'] = set()
1800 tr.changes['phases'] = {}
1806 tr.changes['phases'] = {}
1801 tr.changes['bookmarks'] = {}
1807 tr.changes['bookmarks'] = {}
1802
1808
1803 tr.hookargs['txnid'] = txnid
1809 tr.hookargs['txnid'] = txnid
1804 # note: writing the fncache only during finalize mean that the file is
1810 # note: writing the fncache only during finalize mean that the file is
1805 # outdated when running hooks. As fncache is used for streaming clone,
1811 # outdated when running hooks. As fncache is used for streaming clone,
1806 # this is not expected to break anything that happen during the hooks.
1812 # this is not expected to break anything that happen during the hooks.
1807 tr.addfinalize('flush-fncache', self.store.write)
1813 tr.addfinalize('flush-fncache', self.store.write)
1808 def txnclosehook(tr2):
1814 def txnclosehook(tr2):
1809 """To be run if transaction is successful, will schedule a hook run
1815 """To be run if transaction is successful, will schedule a hook run
1810 """
1816 """
1811 # Don't reference tr2 in hook() so we don't hold a reference.
1817 # Don't reference tr2 in hook() so we don't hold a reference.
1812 # This reduces memory consumption when there are multiple
1818 # This reduces memory consumption when there are multiple
1813 # transactions per lock. This can likely go away if issue5045
1819 # transactions per lock. This can likely go away if issue5045
1814 # fixes the function accumulation.
1820 # fixes the function accumulation.
1815 hookargs = tr2.hookargs
1821 hookargs = tr2.hookargs
1816
1822
1817 def hookfunc():
1823 def hookfunc():
1818 repo = reporef()
1824 repo = reporef()
1819 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1825 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1820 bmchanges = sorted(tr.changes['bookmarks'].items())
1826 bmchanges = sorted(tr.changes['bookmarks'].items())
1821 for name, (old, new) in bmchanges:
1827 for name, (old, new) in bmchanges:
1822 args = tr.hookargs.copy()
1828 args = tr.hookargs.copy()
1823 args.update(bookmarks.preparehookargs(name, old, new))
1829 args.update(bookmarks.preparehookargs(name, old, new))
1824 repo.hook('txnclose-bookmark', throw=False,
1830 repo.hook('txnclose-bookmark', throw=False,
1825 txnname=desc, **pycompat.strkwargs(args))
1831 txnname=desc, **pycompat.strkwargs(args))
1826
1832
1827 if hook.hashook(repo.ui, 'txnclose-phase'):
1833 if hook.hashook(repo.ui, 'txnclose-phase'):
1828 cl = repo.unfiltered().changelog
1834 cl = repo.unfiltered().changelog
1829 phasemv = sorted(tr.changes['phases'].items())
1835 phasemv = sorted(tr.changes['phases'].items())
1830 for rev, (old, new) in phasemv:
1836 for rev, (old, new) in phasemv:
1831 args = tr.hookargs.copy()
1837 args = tr.hookargs.copy()
1832 node = hex(cl.node(rev))
1838 node = hex(cl.node(rev))
1833 args.update(phases.preparehookargs(node, old, new))
1839 args.update(phases.preparehookargs(node, old, new))
1834 repo.hook('txnclose-phase', throw=False, txnname=desc,
1840 repo.hook('txnclose-phase', throw=False, txnname=desc,
1835 **pycompat.strkwargs(args))
1841 **pycompat.strkwargs(args))
1836
1842
1837 repo.hook('txnclose', throw=False, txnname=desc,
1843 repo.hook('txnclose', throw=False, txnname=desc,
1838 **pycompat.strkwargs(hookargs))
1844 **pycompat.strkwargs(hookargs))
1839 reporef()._afterlock(hookfunc)
1845 reporef()._afterlock(hookfunc)
1840 tr.addfinalize('txnclose-hook', txnclosehook)
1846 tr.addfinalize('txnclose-hook', txnclosehook)
1841 # Include a leading "-" to make it happen before the transaction summary
1847 # Include a leading "-" to make it happen before the transaction summary
1842 # reports registered via scmutil.registersummarycallback() whose names
1848 # reports registered via scmutil.registersummarycallback() whose names
1843 # are 00-txnreport etc. That way, the caches will be warm when the
1849 # are 00-txnreport etc. That way, the caches will be warm when the
1844 # callbacks run.
1850 # callbacks run.
1845 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1851 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1846 def txnaborthook(tr2):
1852 def txnaborthook(tr2):
1847 """To be run if transaction is aborted
1853 """To be run if transaction is aborted
1848 """
1854 """
1849 reporef().hook('txnabort', throw=False, txnname=desc,
1855 reporef().hook('txnabort', throw=False, txnname=desc,
1850 **pycompat.strkwargs(tr2.hookargs))
1856 **pycompat.strkwargs(tr2.hookargs))
1851 tr.addabort('txnabort-hook', txnaborthook)
1857 tr.addabort('txnabort-hook', txnaborthook)
1852 # avoid eager cache invalidation. in-memory data should be identical
1858 # avoid eager cache invalidation. in-memory data should be identical
1853 # to stored data if transaction has no error.
1859 # to stored data if transaction has no error.
1854 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1860 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1855 self._transref = weakref.ref(tr)
1861 self._transref = weakref.ref(tr)
1856 scmutil.registersummarycallback(self, tr, desc)
1862 scmutil.registersummarycallback(self, tr, desc)
1857 return tr
1863 return tr
1858
1864
1859 def _journalfiles(self):
1865 def _journalfiles(self):
1860 return ((self.svfs, 'journal'),
1866 return ((self.svfs, 'journal'),
1861 (self.vfs, 'journal.dirstate'),
1867 (self.vfs, 'journal.dirstate'),
1862 (self.vfs, 'journal.branch'),
1868 (self.vfs, 'journal.branch'),
1863 (self.vfs, 'journal.desc'),
1869 (self.vfs, 'journal.desc'),
1864 (self.vfs, 'journal.bookmarks'),
1870 (self.vfs, 'journal.bookmarks'),
1865 (self.svfs, 'journal.phaseroots'))
1871 (self.svfs, 'journal.phaseroots'))
1866
1872
1867 def undofiles(self):
1873 def undofiles(self):
1868 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1874 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1869
1875
1870 @unfilteredmethod
1876 @unfilteredmethod
1871 def _writejournal(self, desc):
1877 def _writejournal(self, desc):
1872 self.dirstate.savebackup(None, 'journal.dirstate')
1878 self.dirstate.savebackup(None, 'journal.dirstate')
1873 narrowspec.savebackup(self, 'journal.narrowspec')
1879 narrowspec.savebackup(self, 'journal.narrowspec')
1874 self.vfs.write("journal.branch",
1880 self.vfs.write("journal.branch",
1875 encoding.fromlocal(self.dirstate.branch()))
1881 encoding.fromlocal(self.dirstate.branch()))
1876 self.vfs.write("journal.desc",
1882 self.vfs.write("journal.desc",
1877 "%d\n%s\n" % (len(self), desc))
1883 "%d\n%s\n" % (len(self), desc))
1878 self.vfs.write("journal.bookmarks",
1884 self.vfs.write("journal.bookmarks",
1879 self.vfs.tryread("bookmarks"))
1885 self.vfs.tryread("bookmarks"))
1880 self.svfs.write("journal.phaseroots",
1886 self.svfs.write("journal.phaseroots",
1881 self.svfs.tryread("phaseroots"))
1887 self.svfs.tryread("phaseroots"))
1882
1888
1883 def recover(self):
1889 def recover(self):
1884 with self.lock():
1890 with self.lock():
1885 if self.svfs.exists("journal"):
1891 if self.svfs.exists("journal"):
1886 self.ui.status(_("rolling back interrupted transaction\n"))
1892 self.ui.status(_("rolling back interrupted transaction\n"))
1887 vfsmap = {'': self.svfs,
1893 vfsmap = {'': self.svfs,
1888 'plain': self.vfs,}
1894 'plain': self.vfs,}
1889 transaction.rollback(self.svfs, vfsmap, "journal",
1895 transaction.rollback(self.svfs, vfsmap, "journal",
1890 self.ui.warn,
1896 self.ui.warn,
1891 checkambigfiles=_cachedfiles)
1897 checkambigfiles=_cachedfiles)
1892 self.invalidate()
1898 self.invalidate()
1893 return True
1899 return True
1894 else:
1900 else:
1895 self.ui.warn(_("no interrupted transaction available\n"))
1901 self.ui.warn(_("no interrupted transaction available\n"))
1896 return False
1902 return False
1897
1903
1898 def rollback(self, dryrun=False, force=False):
1904 def rollback(self, dryrun=False, force=False):
1899 wlock = lock = dsguard = None
1905 wlock = lock = dsguard = None
1900 try:
1906 try:
1901 wlock = self.wlock()
1907 wlock = self.wlock()
1902 lock = self.lock()
1908 lock = self.lock()
1903 if self.svfs.exists("undo"):
1909 if self.svfs.exists("undo"):
1904 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1910 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1905
1911
1906 return self._rollback(dryrun, force, dsguard)
1912 return self._rollback(dryrun, force, dsguard)
1907 else:
1913 else:
1908 self.ui.warn(_("no rollback information available\n"))
1914 self.ui.warn(_("no rollback information available\n"))
1909 return 1
1915 return 1
1910 finally:
1916 finally:
1911 release(dsguard, lock, wlock)
1917 release(dsguard, lock, wlock)
1912
1918
1913 @unfilteredmethod # Until we get smarter cache management
1919 @unfilteredmethod # Until we get smarter cache management
1914 def _rollback(self, dryrun, force, dsguard):
1920 def _rollback(self, dryrun, force, dsguard):
1915 ui = self.ui
1921 ui = self.ui
1916 try:
1922 try:
1917 args = self.vfs.read('undo.desc').splitlines()
1923 args = self.vfs.read('undo.desc').splitlines()
1918 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1924 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1919 if len(args) >= 3:
1925 if len(args) >= 3:
1920 detail = args[2]
1926 detail = args[2]
1921 oldtip = oldlen - 1
1927 oldtip = oldlen - 1
1922
1928
1923 if detail and ui.verbose:
1929 if detail and ui.verbose:
1924 msg = (_('repository tip rolled back to revision %d'
1930 msg = (_('repository tip rolled back to revision %d'
1925 ' (undo %s: %s)\n')
1931 ' (undo %s: %s)\n')
1926 % (oldtip, desc, detail))
1932 % (oldtip, desc, detail))
1927 else:
1933 else:
1928 msg = (_('repository tip rolled back to revision %d'
1934 msg = (_('repository tip rolled back to revision %d'
1929 ' (undo %s)\n')
1935 ' (undo %s)\n')
1930 % (oldtip, desc))
1936 % (oldtip, desc))
1931 except IOError:
1937 except IOError:
1932 msg = _('rolling back unknown transaction\n')
1938 msg = _('rolling back unknown transaction\n')
1933 desc = None
1939 desc = None
1934
1940
1935 if not force and self['.'] != self['tip'] and desc == 'commit':
1941 if not force and self['.'] != self['tip'] and desc == 'commit':
1936 raise error.Abort(
1942 raise error.Abort(
1937 _('rollback of last commit while not checked out '
1943 _('rollback of last commit while not checked out '
1938 'may lose data'), hint=_('use -f to force'))
1944 'may lose data'), hint=_('use -f to force'))
1939
1945
1940 ui.status(msg)
1946 ui.status(msg)
1941 if dryrun:
1947 if dryrun:
1942 return 0
1948 return 0
1943
1949
1944 parents = self.dirstate.parents()
1950 parents = self.dirstate.parents()
1945 self.destroying()
1951 self.destroying()
1946 vfsmap = {'plain': self.vfs, '': self.svfs}
1952 vfsmap = {'plain': self.vfs, '': self.svfs}
1947 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1953 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1948 checkambigfiles=_cachedfiles)
1954 checkambigfiles=_cachedfiles)
1949 if self.vfs.exists('undo.bookmarks'):
1955 if self.vfs.exists('undo.bookmarks'):
1950 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1956 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1951 if self.svfs.exists('undo.phaseroots'):
1957 if self.svfs.exists('undo.phaseroots'):
1952 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1958 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1953 self.invalidate()
1959 self.invalidate()
1954
1960
1955 parentgone = (parents[0] not in self.changelog.nodemap or
1961 parentgone = (parents[0] not in self.changelog.nodemap or
1956 parents[1] not in self.changelog.nodemap)
1962 parents[1] not in self.changelog.nodemap)
1957 if parentgone:
1963 if parentgone:
1958 # prevent dirstateguard from overwriting already restored one
1964 # prevent dirstateguard from overwriting already restored one
1959 dsguard.close()
1965 dsguard.close()
1960
1966
1961 narrowspec.restorebackup(self, 'undo.narrowspec')
1967 narrowspec.restorebackup(self, 'undo.narrowspec')
1962 self.dirstate.restorebackup(None, 'undo.dirstate')
1968 self.dirstate.restorebackup(None, 'undo.dirstate')
1963 try:
1969 try:
1964 branch = self.vfs.read('undo.branch')
1970 branch = self.vfs.read('undo.branch')
1965 self.dirstate.setbranch(encoding.tolocal(branch))
1971 self.dirstate.setbranch(encoding.tolocal(branch))
1966 except IOError:
1972 except IOError:
1967 ui.warn(_('named branch could not be reset: '
1973 ui.warn(_('named branch could not be reset: '
1968 'current branch is still \'%s\'\n')
1974 'current branch is still \'%s\'\n')
1969 % self.dirstate.branch())
1975 % self.dirstate.branch())
1970
1976
1971 parents = tuple([p.rev() for p in self[None].parents()])
1977 parents = tuple([p.rev() for p in self[None].parents()])
1972 if len(parents) > 1:
1978 if len(parents) > 1:
1973 ui.status(_('working directory now based on '
1979 ui.status(_('working directory now based on '
1974 'revisions %d and %d\n') % parents)
1980 'revisions %d and %d\n') % parents)
1975 else:
1981 else:
1976 ui.status(_('working directory now based on '
1982 ui.status(_('working directory now based on '
1977 'revision %d\n') % parents)
1983 'revision %d\n') % parents)
1978 mergemod.mergestate.clean(self, self['.'].node())
1984 mergemod.mergestate.clean(self, self['.'].node())
1979
1985
1980 # TODO: if we know which new heads may result from this rollback, pass
1986 # TODO: if we know which new heads may result from this rollback, pass
1981 # them to destroy(), which will prevent the branchhead cache from being
1987 # them to destroy(), which will prevent the branchhead cache from being
1982 # invalidated.
1988 # invalidated.
1983 self.destroyed()
1989 self.destroyed()
1984 return 0
1990 return 0
1985
1991
1986 def _buildcacheupdater(self, newtransaction):
1992 def _buildcacheupdater(self, newtransaction):
1987 """called during transaction to build the callback updating cache
1993 """called during transaction to build the callback updating cache
1988
1994
1989 Lives on the repository to help extension who might want to augment
1995 Lives on the repository to help extension who might want to augment
1990 this logic. For this purpose, the created transaction is passed to the
1996 this logic. For this purpose, the created transaction is passed to the
1991 method.
1997 method.
1992 """
1998 """
1993 # we must avoid cyclic reference between repo and transaction.
1999 # we must avoid cyclic reference between repo and transaction.
1994 reporef = weakref.ref(self)
2000 reporef = weakref.ref(self)
1995 def updater(tr):
2001 def updater(tr):
1996 repo = reporef()
2002 repo = reporef()
1997 repo.updatecaches(tr)
2003 repo.updatecaches(tr)
1998 return updater
2004 return updater
1999
2005
2000 @unfilteredmethod
2006 @unfilteredmethod
2001 def updatecaches(self, tr=None, full=False):
2007 def updatecaches(self, tr=None, full=False):
2002 """warm appropriate caches
2008 """warm appropriate caches
2003
2009
2004 If this function is called after a transaction closed. The transaction
2010 If this function is called after a transaction closed. The transaction
2005 will be available in the 'tr' argument. This can be used to selectively
2011 will be available in the 'tr' argument. This can be used to selectively
2006 update caches relevant to the changes in that transaction.
2012 update caches relevant to the changes in that transaction.
2007
2013
2008 If 'full' is set, make sure all caches the function knows about have
2014 If 'full' is set, make sure all caches the function knows about have
2009 up-to-date data. Even the ones usually loaded more lazily.
2015 up-to-date data. Even the ones usually loaded more lazily.
2010 """
2016 """
2011 if tr is not None and tr.hookargs.get('source') == 'strip':
2017 if tr is not None and tr.hookargs.get('source') == 'strip':
2012 # During strip, many caches are invalid but
2018 # During strip, many caches are invalid but
2013 # later call to `destroyed` will refresh them.
2019 # later call to `destroyed` will refresh them.
2014 return
2020 return
2015
2021
2016 if tr is None or tr.changes['origrepolen'] < len(self):
2022 if tr is None or tr.changes['origrepolen'] < len(self):
2017 # updating the unfiltered branchmap should refresh all the others,
2023 # updating the unfiltered branchmap should refresh all the others,
2018 self.ui.debug('updating the branch cache\n')
2024 self.ui.debug('updating the branch cache\n')
2019 branchmap.updatecache(self.filtered('served'))
2025 branchmap.updatecache(self.filtered('served'))
2020
2026
2021 if full:
2027 if full:
2022 rbc = self.revbranchcache()
2028 rbc = self.revbranchcache()
2023 for r in self.changelog:
2029 for r in self.changelog:
2024 rbc.branchinfo(r)
2030 rbc.branchinfo(r)
2025 rbc.write()
2031 rbc.write()
2026
2032
2027 # ensure the working copy parents are in the manifestfulltextcache
2033 # ensure the working copy parents are in the manifestfulltextcache
2028 for ctx in self['.'].parents():
2034 for ctx in self['.'].parents():
2029 ctx.manifest() # accessing the manifest is enough
2035 ctx.manifest() # accessing the manifest is enough
2030
2036
2031 def invalidatecaches(self):
2037 def invalidatecaches(self):
2032
2038
2033 if '_tagscache' in vars(self):
2039 if '_tagscache' in vars(self):
2034 # can't use delattr on proxy
2040 # can't use delattr on proxy
2035 del self.__dict__['_tagscache']
2041 del self.__dict__['_tagscache']
2036
2042
2037 self.unfiltered()._branchcaches.clear()
2043 self.unfiltered()._branchcaches.clear()
2038 self.invalidatevolatilesets()
2044 self.invalidatevolatilesets()
2039 self._sparsesignaturecache.clear()
2045 self._sparsesignaturecache.clear()
2040
2046
2041 def invalidatevolatilesets(self):
2047 def invalidatevolatilesets(self):
2042 self.filteredrevcache.clear()
2048 self.filteredrevcache.clear()
2043 obsolete.clearobscaches(self)
2049 obsolete.clearobscaches(self)
2044
2050
2045 def invalidatedirstate(self):
2051 def invalidatedirstate(self):
2046 '''Invalidates the dirstate, causing the next call to dirstate
2052 '''Invalidates the dirstate, causing the next call to dirstate
2047 to check if it was modified since the last time it was read,
2053 to check if it was modified since the last time it was read,
2048 rereading it if it has.
2054 rereading it if it has.
2049
2055
2050 This is different to dirstate.invalidate() that it doesn't always
2056 This is different to dirstate.invalidate() that it doesn't always
2051 rereads the dirstate. Use dirstate.invalidate() if you want to
2057 rereads the dirstate. Use dirstate.invalidate() if you want to
2052 explicitly read the dirstate again (i.e. restoring it to a previous
2058 explicitly read the dirstate again (i.e. restoring it to a previous
2053 known good state).'''
2059 known good state).'''
2054 if hasunfilteredcache(self, 'dirstate'):
2060 if hasunfilteredcache(self, 'dirstate'):
2055 for k in self.dirstate._filecache:
2061 for k in self.dirstate._filecache:
2056 try:
2062 try:
2057 delattr(self.dirstate, k)
2063 delattr(self.dirstate, k)
2058 except AttributeError:
2064 except AttributeError:
2059 pass
2065 pass
2060 delattr(self.unfiltered(), 'dirstate')
2066 delattr(self.unfiltered(), 'dirstate')
2061
2067
2062 def invalidate(self, clearfilecache=False):
2068 def invalidate(self, clearfilecache=False):
2063 '''Invalidates both store and non-store parts other than dirstate
2069 '''Invalidates both store and non-store parts other than dirstate
2064
2070
2065 If a transaction is running, invalidation of store is omitted,
2071 If a transaction is running, invalidation of store is omitted,
2066 because discarding in-memory changes might cause inconsistency
2072 because discarding in-memory changes might cause inconsistency
2067 (e.g. incomplete fncache causes unintentional failure, but
2073 (e.g. incomplete fncache causes unintentional failure, but
2068 redundant one doesn't).
2074 redundant one doesn't).
2069 '''
2075 '''
2070 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2076 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2071 for k in list(self._filecache.keys()):
2077 for k in list(self._filecache.keys()):
2072 # dirstate is invalidated separately in invalidatedirstate()
2078 # dirstate is invalidated separately in invalidatedirstate()
2073 if k == 'dirstate':
2079 if k == 'dirstate':
2074 continue
2080 continue
2075 if (k == 'changelog' and
2081 if (k == 'changelog' and
2076 self.currenttransaction() and
2082 self.currenttransaction() and
2077 self.changelog._delayed):
2083 self.changelog._delayed):
2078 # The changelog object may store unwritten revisions. We don't
2084 # The changelog object may store unwritten revisions. We don't
2079 # want to lose them.
2085 # want to lose them.
2080 # TODO: Solve the problem instead of working around it.
2086 # TODO: Solve the problem instead of working around it.
2081 continue
2087 continue
2082
2088
2083 if clearfilecache:
2089 if clearfilecache:
2084 del self._filecache[k]
2090 del self._filecache[k]
2085 try:
2091 try:
2086 delattr(unfiltered, k)
2092 delattr(unfiltered, k)
2087 except AttributeError:
2093 except AttributeError:
2088 pass
2094 pass
2089 self.invalidatecaches()
2095 self.invalidatecaches()
2090 if not self.currenttransaction():
2096 if not self.currenttransaction():
2091 # TODO: Changing contents of store outside transaction
2097 # TODO: Changing contents of store outside transaction
2092 # causes inconsistency. We should make in-memory store
2098 # causes inconsistency. We should make in-memory store
2093 # changes detectable, and abort if changed.
2099 # changes detectable, and abort if changed.
2094 self.store.invalidatecaches()
2100 self.store.invalidatecaches()
2095
2101
2096 def invalidateall(self):
2102 def invalidateall(self):
2097 '''Fully invalidates both store and non-store parts, causing the
2103 '''Fully invalidates both store and non-store parts, causing the
2098 subsequent operation to reread any outside changes.'''
2104 subsequent operation to reread any outside changes.'''
2099 # extension should hook this to invalidate its caches
2105 # extension should hook this to invalidate its caches
2100 self.invalidate()
2106 self.invalidate()
2101 self.invalidatedirstate()
2107 self.invalidatedirstate()
2102
2108
2103 @unfilteredmethod
2109 @unfilteredmethod
2104 def _refreshfilecachestats(self, tr):
2110 def _refreshfilecachestats(self, tr):
2105 """Reload stats of cached files so that they are flagged as valid"""
2111 """Reload stats of cached files so that they are flagged as valid"""
2106 for k, ce in self._filecache.items():
2112 for k, ce in self._filecache.items():
2107 k = pycompat.sysstr(k)
2113 k = pycompat.sysstr(k)
2108 if k == r'dirstate' or k not in self.__dict__:
2114 if k == r'dirstate' or k not in self.__dict__:
2109 continue
2115 continue
2110 ce.refresh()
2116 ce.refresh()
2111
2117
2112 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
2118 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
2113 inheritchecker=None, parentenvvar=None):
2119 inheritchecker=None, parentenvvar=None):
2114 parentlock = None
2120 parentlock = None
2115 # the contents of parentenvvar are used by the underlying lock to
2121 # the contents of parentenvvar are used by the underlying lock to
2116 # determine whether it can be inherited
2122 # determine whether it can be inherited
2117 if parentenvvar is not None:
2123 if parentenvvar is not None:
2118 parentlock = encoding.environ.get(parentenvvar)
2124 parentlock = encoding.environ.get(parentenvvar)
2119
2125
2120 timeout = 0
2126 timeout = 0
2121 warntimeout = 0
2127 warntimeout = 0
2122 if wait:
2128 if wait:
2123 timeout = self.ui.configint("ui", "timeout")
2129 timeout = self.ui.configint("ui", "timeout")
2124 warntimeout = self.ui.configint("ui", "timeout.warn")
2130 warntimeout = self.ui.configint("ui", "timeout.warn")
2125 # internal config: ui.signal-safe-lock
2131 # internal config: ui.signal-safe-lock
2126 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
2132 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
2127
2133
2128 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
2134 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
2129 releasefn=releasefn,
2135 releasefn=releasefn,
2130 acquirefn=acquirefn, desc=desc,
2136 acquirefn=acquirefn, desc=desc,
2131 inheritchecker=inheritchecker,
2137 inheritchecker=inheritchecker,
2132 parentlock=parentlock,
2138 parentlock=parentlock,
2133 signalsafe=signalsafe)
2139 signalsafe=signalsafe)
2134 return l
2140 return l
2135
2141
2136 def _afterlock(self, callback):
2142 def _afterlock(self, callback):
2137 """add a callback to be run when the repository is fully unlocked
2143 """add a callback to be run when the repository is fully unlocked
2138
2144
2139 The callback will be executed when the outermost lock is released
2145 The callback will be executed when the outermost lock is released
2140 (with wlock being higher level than 'lock')."""
2146 (with wlock being higher level than 'lock')."""
2141 for ref in (self._wlockref, self._lockref):
2147 for ref in (self._wlockref, self._lockref):
2142 l = ref and ref()
2148 l = ref and ref()
2143 if l and l.held:
2149 if l and l.held:
2144 l.postrelease.append(callback)
2150 l.postrelease.append(callback)
2145 break
2151 break
2146 else: # no lock have been found.
2152 else: # no lock have been found.
2147 callback()
2153 callback()
2148
2154
2149 def lock(self, wait=True):
2155 def lock(self, wait=True):
2150 '''Lock the repository store (.hg/store) and return a weak reference
2156 '''Lock the repository store (.hg/store) and return a weak reference
2151 to the lock. Use this before modifying the store (e.g. committing or
2157 to the lock. Use this before modifying the store (e.g. committing or
2152 stripping). If you are opening a transaction, get a lock as well.)
2158 stripping). If you are opening a transaction, get a lock as well.)
2153
2159
2154 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2160 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2155 'wlock' first to avoid a dead-lock hazard.'''
2161 'wlock' first to avoid a dead-lock hazard.'''
2156 l = self._currentlock(self._lockref)
2162 l = self._currentlock(self._lockref)
2157 if l is not None:
2163 if l is not None:
2158 l.lock()
2164 l.lock()
2159 return l
2165 return l
2160
2166
2161 l = self._lock(self.svfs, "lock", wait, None,
2167 l = self._lock(self.svfs, "lock", wait, None,
2162 self.invalidate, _('repository %s') % self.origroot)
2168 self.invalidate, _('repository %s') % self.origroot)
2163 self._lockref = weakref.ref(l)
2169 self._lockref = weakref.ref(l)
2164 return l
2170 return l
2165
2171
2166 def _wlockchecktransaction(self):
2172 def _wlockchecktransaction(self):
2167 if self.currenttransaction() is not None:
2173 if self.currenttransaction() is not None:
2168 raise error.LockInheritanceContractViolation(
2174 raise error.LockInheritanceContractViolation(
2169 'wlock cannot be inherited in the middle of a transaction')
2175 'wlock cannot be inherited in the middle of a transaction')
2170
2176
2171 def wlock(self, wait=True):
2177 def wlock(self, wait=True):
2172 '''Lock the non-store parts of the repository (everything under
2178 '''Lock the non-store parts of the repository (everything under
2173 .hg except .hg/store) and return a weak reference to the lock.
2179 .hg except .hg/store) and return a weak reference to the lock.
2174
2180
2175 Use this before modifying files in .hg.
2181 Use this before modifying files in .hg.
2176
2182
2177 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2183 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2178 'wlock' first to avoid a dead-lock hazard.'''
2184 'wlock' first to avoid a dead-lock hazard.'''
2179 l = self._wlockref and self._wlockref()
2185 l = self._wlockref and self._wlockref()
2180 if l is not None and l.held:
2186 if l is not None and l.held:
2181 l.lock()
2187 l.lock()
2182 return l
2188 return l
2183
2189
2184 # We do not need to check for non-waiting lock acquisition. Such
2190 # We do not need to check for non-waiting lock acquisition. Such
2185 # acquisition would not cause dead-lock as they would just fail.
2191 # acquisition would not cause dead-lock as they would just fail.
2186 if wait and (self.ui.configbool('devel', 'all-warnings')
2192 if wait and (self.ui.configbool('devel', 'all-warnings')
2187 or self.ui.configbool('devel', 'check-locks')):
2193 or self.ui.configbool('devel', 'check-locks')):
2188 if self._currentlock(self._lockref) is not None:
2194 if self._currentlock(self._lockref) is not None:
2189 self.ui.develwarn('"wlock" acquired after "lock"')
2195 self.ui.develwarn('"wlock" acquired after "lock"')
2190
2196
2191 def unlock():
2197 def unlock():
2192 if self.dirstate.pendingparentchange():
2198 if self.dirstate.pendingparentchange():
2193 self.dirstate.invalidate()
2199 self.dirstate.invalidate()
2194 else:
2200 else:
2195 self.dirstate.write(None)
2201 self.dirstate.write(None)
2196
2202
2197 self._filecache['dirstate'].refresh()
2203 self._filecache['dirstate'].refresh()
2198
2204
2199 l = self._lock(self.vfs, "wlock", wait, unlock,
2205 l = self._lock(self.vfs, "wlock", wait, unlock,
2200 self.invalidatedirstate, _('working directory of %s') %
2206 self.invalidatedirstate, _('working directory of %s') %
2201 self.origroot,
2207 self.origroot,
2202 inheritchecker=self._wlockchecktransaction,
2208 inheritchecker=self._wlockchecktransaction,
2203 parentenvvar='HG_WLOCK_LOCKER')
2209 parentenvvar='HG_WLOCK_LOCKER')
2204 self._wlockref = weakref.ref(l)
2210 self._wlockref = weakref.ref(l)
2205 return l
2211 return l
2206
2212
2207 def _currentlock(self, lockref):
2213 def _currentlock(self, lockref):
2208 """Returns the lock if it's held, or None if it's not."""
2214 """Returns the lock if it's held, or None if it's not."""
2209 if lockref is None:
2215 if lockref is None:
2210 return None
2216 return None
2211 l = lockref()
2217 l = lockref()
2212 if l is None or not l.held:
2218 if l is None or not l.held:
2213 return None
2219 return None
2214 return l
2220 return l
2215
2221
2216 def currentwlock(self):
2222 def currentwlock(self):
2217 """Returns the wlock if it's held, or None if it's not."""
2223 """Returns the wlock if it's held, or None if it's not."""
2218 return self._currentlock(self._wlockref)
2224 return self._currentlock(self._wlockref)
2219
2225
2220 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
2226 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
2221 """
2227 """
2222 commit an individual file as part of a larger transaction
2228 commit an individual file as part of a larger transaction
2223 """
2229 """
2224
2230
2225 fname = fctx.path()
2231 fname = fctx.path()
2226 fparent1 = manifest1.get(fname, nullid)
2232 fparent1 = manifest1.get(fname, nullid)
2227 fparent2 = manifest2.get(fname, nullid)
2233 fparent2 = manifest2.get(fname, nullid)
2228 if isinstance(fctx, context.filectx):
2234 if isinstance(fctx, context.filectx):
2229 node = fctx.filenode()
2235 node = fctx.filenode()
2230 if node in [fparent1, fparent2]:
2236 if node in [fparent1, fparent2]:
2231 self.ui.debug('reusing %s filelog entry\n' % fname)
2237 self.ui.debug('reusing %s filelog entry\n' % fname)
2232 if manifest1.flags(fname) != fctx.flags():
2238 if manifest1.flags(fname) != fctx.flags():
2233 changelist.append(fname)
2239 changelist.append(fname)
2234 return node
2240 return node
2235
2241
2236 flog = self.file(fname)
2242 flog = self.file(fname)
2237 meta = {}
2243 meta = {}
2238 copy = fctx.renamed()
2244 copy = fctx.renamed()
2239 if copy and copy[0] != fname:
2245 if copy and copy[0] != fname:
2240 # Mark the new revision of this file as a copy of another
2246 # Mark the new revision of this file as a copy of another
2241 # file. This copy data will effectively act as a parent
2247 # file. This copy data will effectively act as a parent
2242 # of this new revision. If this is a merge, the first
2248 # of this new revision. If this is a merge, the first
2243 # parent will be the nullid (meaning "look up the copy data")
2249 # parent will be the nullid (meaning "look up the copy data")
2244 # and the second one will be the other parent. For example:
2250 # and the second one will be the other parent. For example:
2245 #
2251 #
2246 # 0 --- 1 --- 3 rev1 changes file foo
2252 # 0 --- 1 --- 3 rev1 changes file foo
2247 # \ / rev2 renames foo to bar and changes it
2253 # \ / rev2 renames foo to bar and changes it
2248 # \- 2 -/ rev3 should have bar with all changes and
2254 # \- 2 -/ rev3 should have bar with all changes and
2249 # should record that bar descends from
2255 # should record that bar descends from
2250 # bar in rev2 and foo in rev1
2256 # bar in rev2 and foo in rev1
2251 #
2257 #
2252 # this allows this merge to succeed:
2258 # this allows this merge to succeed:
2253 #
2259 #
2254 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
2260 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
2255 # \ / merging rev3 and rev4 should use bar@rev2
2261 # \ / merging rev3 and rev4 should use bar@rev2
2256 # \- 2 --- 4 as the merge base
2262 # \- 2 --- 4 as the merge base
2257 #
2263 #
2258
2264
2259 cfname = copy[0]
2265 cfname = copy[0]
2260 crev = manifest1.get(cfname)
2266 crev = manifest1.get(cfname)
2261 newfparent = fparent2
2267 newfparent = fparent2
2262
2268
2263 if manifest2: # branch merge
2269 if manifest2: # branch merge
2264 if fparent2 == nullid or crev is None: # copied on remote side
2270 if fparent2 == nullid or crev is None: # copied on remote side
2265 if cfname in manifest2:
2271 if cfname in manifest2:
2266 crev = manifest2[cfname]
2272 crev = manifest2[cfname]
2267 newfparent = fparent1
2273 newfparent = fparent1
2268
2274
2269 # Here, we used to search backwards through history to try to find
2275 # Here, we used to search backwards through history to try to find
2270 # where the file copy came from if the source of a copy was not in
2276 # where the file copy came from if the source of a copy was not in
2271 # the parent directory. However, this doesn't actually make sense to
2277 # the parent directory. However, this doesn't actually make sense to
2272 # do (what does a copy from something not in your working copy even
2278 # do (what does a copy from something not in your working copy even
2273 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
2279 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
2274 # the user that copy information was dropped, so if they didn't
2280 # the user that copy information was dropped, so if they didn't
2275 # expect this outcome it can be fixed, but this is the correct
2281 # expect this outcome it can be fixed, but this is the correct
2276 # behavior in this circumstance.
2282 # behavior in this circumstance.
2277
2283
2278 if crev:
2284 if crev:
2279 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
2285 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
2280 meta["copy"] = cfname
2286 meta["copy"] = cfname
2281 meta["copyrev"] = hex(crev)
2287 meta["copyrev"] = hex(crev)
2282 fparent1, fparent2 = nullid, newfparent
2288 fparent1, fparent2 = nullid, newfparent
2283 else:
2289 else:
2284 self.ui.warn(_("warning: can't find ancestor for '%s' "
2290 self.ui.warn(_("warning: can't find ancestor for '%s' "
2285 "copied from '%s'!\n") % (fname, cfname))
2291 "copied from '%s'!\n") % (fname, cfname))
2286
2292
2287 elif fparent1 == nullid:
2293 elif fparent1 == nullid:
2288 fparent1, fparent2 = fparent2, nullid
2294 fparent1, fparent2 = fparent2, nullid
2289 elif fparent2 != nullid:
2295 elif fparent2 != nullid:
2290 # is one parent an ancestor of the other?
2296 # is one parent an ancestor of the other?
2291 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
2297 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
2292 if fparent1 in fparentancestors:
2298 if fparent1 in fparentancestors:
2293 fparent1, fparent2 = fparent2, nullid
2299 fparent1, fparent2 = fparent2, nullid
2294 elif fparent2 in fparentancestors:
2300 elif fparent2 in fparentancestors:
2295 fparent2 = nullid
2301 fparent2 = nullid
2296
2302
2297 # is the file changed?
2303 # is the file changed?
2298 text = fctx.data()
2304 text = fctx.data()
2299 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
2305 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
2300 changelist.append(fname)
2306 changelist.append(fname)
2301 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
2307 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
2302 # are just the flags changed during merge?
2308 # are just the flags changed during merge?
2303 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
2309 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
2304 changelist.append(fname)
2310 changelist.append(fname)
2305
2311
2306 return fparent1
2312 return fparent1
2307
2313
2308 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
2314 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
2309 """check for commit arguments that aren't committable"""
2315 """check for commit arguments that aren't committable"""
2310 if match.isexact() or match.prefix():
2316 if match.isexact() or match.prefix():
2311 matched = set(status.modified + status.added + status.removed)
2317 matched = set(status.modified + status.added + status.removed)
2312
2318
2313 for f in match.files():
2319 for f in match.files():
2314 f = self.dirstate.normalize(f)
2320 f = self.dirstate.normalize(f)
2315 if f == '.' or f in matched or f in wctx.substate:
2321 if f == '.' or f in matched or f in wctx.substate:
2316 continue
2322 continue
2317 if f in status.deleted:
2323 if f in status.deleted:
2318 fail(f, _('file not found!'))
2324 fail(f, _('file not found!'))
2319 if f in vdirs: # visited directory
2325 if f in vdirs: # visited directory
2320 d = f + '/'
2326 d = f + '/'
2321 for mf in matched:
2327 for mf in matched:
2322 if mf.startswith(d):
2328 if mf.startswith(d):
2323 break
2329 break
2324 else:
2330 else:
2325 fail(f, _("no match under directory!"))
2331 fail(f, _("no match under directory!"))
2326 elif f not in self.dirstate:
2332 elif f not in self.dirstate:
2327 fail(f, _("file not tracked!"))
2333 fail(f, _("file not tracked!"))
2328
2334
2329 @unfilteredmethod
2335 @unfilteredmethod
2330 def commit(self, text="", user=None, date=None, match=None, force=False,
2336 def commit(self, text="", user=None, date=None, match=None, force=False,
2331 editor=False, extra=None):
2337 editor=False, extra=None):
2332 """Add a new revision to current repository.
2338 """Add a new revision to current repository.
2333
2339
2334 Revision information is gathered from the working directory,
2340 Revision information is gathered from the working directory,
2335 match can be used to filter the committed files. If editor is
2341 match can be used to filter the committed files. If editor is
2336 supplied, it is called to get a commit message.
2342 supplied, it is called to get a commit message.
2337 """
2343 """
2338 if extra is None:
2344 if extra is None:
2339 extra = {}
2345 extra = {}
2340
2346
2341 def fail(f, msg):
2347 def fail(f, msg):
2342 raise error.Abort('%s: %s' % (f, msg))
2348 raise error.Abort('%s: %s' % (f, msg))
2343
2349
2344 if not match:
2350 if not match:
2345 match = matchmod.always(self.root, '')
2351 match = matchmod.always(self.root, '')
2346
2352
2347 if not force:
2353 if not force:
2348 vdirs = []
2354 vdirs = []
2349 match.explicitdir = vdirs.append
2355 match.explicitdir = vdirs.append
2350 match.bad = fail
2356 match.bad = fail
2351
2357
2352 wlock = lock = tr = None
2358 wlock = lock = tr = None
2353 try:
2359 try:
2354 wlock = self.wlock()
2360 wlock = self.wlock()
2355 lock = self.lock() # for recent changelog (see issue4368)
2361 lock = self.lock() # for recent changelog (see issue4368)
2356
2362
2357 wctx = self[None]
2363 wctx = self[None]
2358 merge = len(wctx.parents()) > 1
2364 merge = len(wctx.parents()) > 1
2359
2365
2360 if not force and merge and not match.always():
2366 if not force and merge and not match.always():
2361 raise error.Abort(_('cannot partially commit a merge '
2367 raise error.Abort(_('cannot partially commit a merge '
2362 '(do not specify files or patterns)'))
2368 '(do not specify files or patterns)'))
2363
2369
2364 status = self.status(match=match, clean=force)
2370 status = self.status(match=match, clean=force)
2365 if force:
2371 if force:
2366 status.modified.extend(status.clean) # mq may commit clean files
2372 status.modified.extend(status.clean) # mq may commit clean files
2367
2373
2368 # check subrepos
2374 # check subrepos
2369 subs, commitsubs, newstate = subrepoutil.precommit(
2375 subs, commitsubs, newstate = subrepoutil.precommit(
2370 self.ui, wctx, status, match, force=force)
2376 self.ui, wctx, status, match, force=force)
2371
2377
2372 # make sure all explicit patterns are matched
2378 # make sure all explicit patterns are matched
2373 if not force:
2379 if not force:
2374 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
2380 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
2375
2381
2376 cctx = context.workingcommitctx(self, status,
2382 cctx = context.workingcommitctx(self, status,
2377 text, user, date, extra)
2383 text, user, date, extra)
2378
2384
2379 # internal config: ui.allowemptycommit
2385 # internal config: ui.allowemptycommit
2380 allowemptycommit = (wctx.branch() != wctx.p1().branch()
2386 allowemptycommit = (wctx.branch() != wctx.p1().branch()
2381 or extra.get('close') or merge or cctx.files()
2387 or extra.get('close') or merge or cctx.files()
2382 or self.ui.configbool('ui', 'allowemptycommit'))
2388 or self.ui.configbool('ui', 'allowemptycommit'))
2383 if not allowemptycommit:
2389 if not allowemptycommit:
2384 return None
2390 return None
2385
2391
2386 if merge and cctx.deleted():
2392 if merge and cctx.deleted():
2387 raise error.Abort(_("cannot commit merge with missing files"))
2393 raise error.Abort(_("cannot commit merge with missing files"))
2388
2394
2389 ms = mergemod.mergestate.read(self)
2395 ms = mergemod.mergestate.read(self)
2390 mergeutil.checkunresolved(ms)
2396 mergeutil.checkunresolved(ms)
2391
2397
2392 if editor:
2398 if editor:
2393 cctx._text = editor(self, cctx, subs)
2399 cctx._text = editor(self, cctx, subs)
2394 edited = (text != cctx._text)
2400 edited = (text != cctx._text)
2395
2401
2396 # Save commit message in case this transaction gets rolled back
2402 # Save commit message in case this transaction gets rolled back
2397 # (e.g. by a pretxncommit hook). Leave the content alone on
2403 # (e.g. by a pretxncommit hook). Leave the content alone on
2398 # the assumption that the user will use the same editor again.
2404 # the assumption that the user will use the same editor again.
2399 msgfn = self.savecommitmessage(cctx._text)
2405 msgfn = self.savecommitmessage(cctx._text)
2400
2406
2401 # commit subs and write new state
2407 # commit subs and write new state
2402 if subs:
2408 if subs:
2403 for s in sorted(commitsubs):
2409 for s in sorted(commitsubs):
2404 sub = wctx.sub(s)
2410 sub = wctx.sub(s)
2405 self.ui.status(_('committing subrepository %s\n') %
2411 self.ui.status(_('committing subrepository %s\n') %
2406 subrepoutil.subrelpath(sub))
2412 subrepoutil.subrelpath(sub))
2407 sr = sub.commit(cctx._text, user, date)
2413 sr = sub.commit(cctx._text, user, date)
2408 newstate[s] = (newstate[s][0], sr)
2414 newstate[s] = (newstate[s][0], sr)
2409 subrepoutil.writestate(self, newstate)
2415 subrepoutil.writestate(self, newstate)
2410
2416
2411 p1, p2 = self.dirstate.parents()
2417 p1, p2 = self.dirstate.parents()
2412 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2418 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2413 try:
2419 try:
2414 self.hook("precommit", throw=True, parent1=hookp1,
2420 self.hook("precommit", throw=True, parent1=hookp1,
2415 parent2=hookp2)
2421 parent2=hookp2)
2416 tr = self.transaction('commit')
2422 tr = self.transaction('commit')
2417 ret = self.commitctx(cctx, True)
2423 ret = self.commitctx(cctx, True)
2418 except: # re-raises
2424 except: # re-raises
2419 if edited:
2425 if edited:
2420 self.ui.write(
2426 self.ui.write(
2421 _('note: commit message saved in %s\n') % msgfn)
2427 _('note: commit message saved in %s\n') % msgfn)
2422 raise
2428 raise
2423 # update bookmarks, dirstate and mergestate
2429 # update bookmarks, dirstate and mergestate
2424 bookmarks.update(self, [p1, p2], ret)
2430 bookmarks.update(self, [p1, p2], ret)
2425 cctx.markcommitted(ret)
2431 cctx.markcommitted(ret)
2426 ms.reset()
2432 ms.reset()
2427 tr.close()
2433 tr.close()
2428
2434
2429 finally:
2435 finally:
2430 lockmod.release(tr, lock, wlock)
2436 lockmod.release(tr, lock, wlock)
2431
2437
2432 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2438 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2433 # hack for command that use a temporary commit (eg: histedit)
2439 # hack for command that use a temporary commit (eg: histedit)
2434 # temporary commit got stripped before hook release
2440 # temporary commit got stripped before hook release
2435 if self.changelog.hasnode(ret):
2441 if self.changelog.hasnode(ret):
2436 self.hook("commit", node=node, parent1=parent1,
2442 self.hook("commit", node=node, parent1=parent1,
2437 parent2=parent2)
2443 parent2=parent2)
2438 self._afterlock(commithook)
2444 self._afterlock(commithook)
2439 return ret
2445 return ret
2440
2446
2441 @unfilteredmethod
2447 @unfilteredmethod
2442 def commitctx(self, ctx, error=False):
2448 def commitctx(self, ctx, error=False):
2443 """Add a new revision to current repository.
2449 """Add a new revision to current repository.
2444 Revision information is passed via the context argument.
2450 Revision information is passed via the context argument.
2445
2451
2446 ctx.files() should list all files involved in this commit, i.e.
2452 ctx.files() should list all files involved in this commit, i.e.
2447 modified/added/removed files. On merge, it may be wider than the
2453 modified/added/removed files. On merge, it may be wider than the
2448 ctx.files() to be committed, since any file nodes derived directly
2454 ctx.files() to be committed, since any file nodes derived directly
2449 from p1 or p2 are excluded from the committed ctx.files().
2455 from p1 or p2 are excluded from the committed ctx.files().
2450 """
2456 """
2451
2457
2452 tr = None
2458 tr = None
2453 p1, p2 = ctx.p1(), ctx.p2()
2459 p1, p2 = ctx.p1(), ctx.p2()
2454 user = ctx.user()
2460 user = ctx.user()
2455
2461
2456 lock = self.lock()
2462 lock = self.lock()
2457 try:
2463 try:
2458 tr = self.transaction("commit")
2464 tr = self.transaction("commit")
2459 trp = weakref.proxy(tr)
2465 trp = weakref.proxy(tr)
2460
2466
2461 if ctx.manifestnode():
2467 if ctx.manifestnode():
2462 # reuse an existing manifest revision
2468 # reuse an existing manifest revision
2463 self.ui.debug('reusing known manifest\n')
2469 self.ui.debug('reusing known manifest\n')
2464 mn = ctx.manifestnode()
2470 mn = ctx.manifestnode()
2465 files = ctx.files()
2471 files = ctx.files()
2466 elif ctx.files():
2472 elif ctx.files():
2467 m1ctx = p1.manifestctx()
2473 m1ctx = p1.manifestctx()
2468 m2ctx = p2.manifestctx()
2474 m2ctx = p2.manifestctx()
2469 mctx = m1ctx.copy()
2475 mctx = m1ctx.copy()
2470
2476
2471 m = mctx.read()
2477 m = mctx.read()
2472 m1 = m1ctx.read()
2478 m1 = m1ctx.read()
2473 m2 = m2ctx.read()
2479 m2 = m2ctx.read()
2474
2480
2475 # check in files
2481 # check in files
2476 added = []
2482 added = []
2477 changed = []
2483 changed = []
2478 removed = list(ctx.removed())
2484 removed = list(ctx.removed())
2479 linkrev = len(self)
2485 linkrev = len(self)
2480 self.ui.note(_("committing files:\n"))
2486 self.ui.note(_("committing files:\n"))
2481 for f in sorted(ctx.modified() + ctx.added()):
2487 for f in sorted(ctx.modified() + ctx.added()):
2482 self.ui.note(f + "\n")
2488 self.ui.note(f + "\n")
2483 try:
2489 try:
2484 fctx = ctx[f]
2490 fctx = ctx[f]
2485 if fctx is None:
2491 if fctx is None:
2486 removed.append(f)
2492 removed.append(f)
2487 else:
2493 else:
2488 added.append(f)
2494 added.append(f)
2489 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2495 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2490 trp, changed)
2496 trp, changed)
2491 m.setflag(f, fctx.flags())
2497 m.setflag(f, fctx.flags())
2492 except OSError as inst:
2498 except OSError as inst:
2493 self.ui.warn(_("trouble committing %s!\n") % f)
2499 self.ui.warn(_("trouble committing %s!\n") % f)
2494 raise
2500 raise
2495 except IOError as inst:
2501 except IOError as inst:
2496 errcode = getattr(inst, 'errno', errno.ENOENT)
2502 errcode = getattr(inst, 'errno', errno.ENOENT)
2497 if error or errcode and errcode != errno.ENOENT:
2503 if error or errcode and errcode != errno.ENOENT:
2498 self.ui.warn(_("trouble committing %s!\n") % f)
2504 self.ui.warn(_("trouble committing %s!\n") % f)
2499 raise
2505 raise
2500
2506
2501 # update manifest
2507 # update manifest
2502 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2508 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2503 drop = [f for f in removed if f in m]
2509 drop = [f for f in removed if f in m]
2504 for f in drop:
2510 for f in drop:
2505 del m[f]
2511 del m[f]
2506 files = changed + removed
2512 files = changed + removed
2507 md = None
2513 md = None
2508 if not files:
2514 if not files:
2509 # if no "files" actually changed in terms of the changelog,
2515 # if no "files" actually changed in terms of the changelog,
2510 # try hard to detect unmodified manifest entry so that the
2516 # try hard to detect unmodified manifest entry so that the
2511 # exact same commit can be reproduced later on convert.
2517 # exact same commit can be reproduced later on convert.
2512 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2518 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2513 if not files and md:
2519 if not files and md:
2514 self.ui.debug('not reusing manifest (no file change in '
2520 self.ui.debug('not reusing manifest (no file change in '
2515 'changelog, but manifest differs)\n')
2521 'changelog, but manifest differs)\n')
2516 if files or md:
2522 if files or md:
2517 self.ui.note(_("committing manifest\n"))
2523 self.ui.note(_("committing manifest\n"))
2518 # we're using narrowmatch here since it's already applied at
2524 # we're using narrowmatch here since it's already applied at
2519 # other stages (such as dirstate.walk), so we're already
2525 # other stages (such as dirstate.walk), so we're already
2520 # ignoring things outside of narrowspec in most cases. The
2526 # ignoring things outside of narrowspec in most cases. The
2521 # one case where we might have files outside the narrowspec
2527 # one case where we might have files outside the narrowspec
2522 # at this point is merges, and we already error out in the
2528 # at this point is merges, and we already error out in the
2523 # case where the merge has files outside of the narrowspec,
2529 # case where the merge has files outside of the narrowspec,
2524 # so this is safe.
2530 # so this is safe.
2525 mn = mctx.write(trp, linkrev,
2531 mn = mctx.write(trp, linkrev,
2526 p1.manifestnode(), p2.manifestnode(),
2532 p1.manifestnode(), p2.manifestnode(),
2527 added, drop, match=self.narrowmatch())
2533 added, drop, match=self.narrowmatch())
2528 else:
2534 else:
2529 self.ui.debug('reusing manifest form p1 (listed files '
2535 self.ui.debug('reusing manifest form p1 (listed files '
2530 'actually unchanged)\n')
2536 'actually unchanged)\n')
2531 mn = p1.manifestnode()
2537 mn = p1.manifestnode()
2532 else:
2538 else:
2533 self.ui.debug('reusing manifest from p1 (no file change)\n')
2539 self.ui.debug('reusing manifest from p1 (no file change)\n')
2534 mn = p1.manifestnode()
2540 mn = p1.manifestnode()
2535 files = []
2541 files = []
2536
2542
2537 # update changelog
2543 # update changelog
2538 self.ui.note(_("committing changelog\n"))
2544 self.ui.note(_("committing changelog\n"))
2539 self.changelog.delayupdate(tr)
2545 self.changelog.delayupdate(tr)
2540 n = self.changelog.add(mn, files, ctx.description(),
2546 n = self.changelog.add(mn, files, ctx.description(),
2541 trp, p1.node(), p2.node(),
2547 trp, p1.node(), p2.node(),
2542 user, ctx.date(), ctx.extra().copy())
2548 user, ctx.date(), ctx.extra().copy())
2543 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2549 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2544 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2545 parent2=xp2)
2551 parent2=xp2)
2546 # set the new commit is proper phase
2552 # set the new commit is proper phase
2547 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2553 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2548 if targetphase:
2554 if targetphase:
2549 # retract boundary do not alter parent changeset.
2555 # retract boundary do not alter parent changeset.
2550 # if a parent have higher the resulting phase will
2556 # if a parent have higher the resulting phase will
2551 # be compliant anyway
2557 # be compliant anyway
2552 #
2558 #
2553 # if minimal phase was 0 we don't need to retract anything
2559 # if minimal phase was 0 we don't need to retract anything
2554 phases.registernew(self, tr, targetphase, [n])
2560 phases.registernew(self, tr, targetphase, [n])
2555 tr.close()
2561 tr.close()
2556 return n
2562 return n
2557 finally:
2563 finally:
2558 if tr:
2564 if tr:
2559 tr.release()
2565 tr.release()
2560 lock.release()
2566 lock.release()
2561
2567
2562 @unfilteredmethod
2568 @unfilteredmethod
2563 def destroying(self):
2569 def destroying(self):
2564 '''Inform the repository that nodes are about to be destroyed.
2570 '''Inform the repository that nodes are about to be destroyed.
2565 Intended for use by strip and rollback, so there's a common
2571 Intended for use by strip and rollback, so there's a common
2566 place for anything that has to be done before destroying history.
2572 place for anything that has to be done before destroying history.
2567
2573
2568 This is mostly useful for saving state that is in memory and waiting
2574 This is mostly useful for saving state that is in memory and waiting
2569 to be flushed when the current lock is released. Because a call to
2575 to be flushed when the current lock is released. Because a call to
2570 destroyed is imminent, the repo will be invalidated causing those
2576 destroyed is imminent, the repo will be invalidated causing those
2571 changes to stay in memory (waiting for the next unlock), or vanish
2577 changes to stay in memory (waiting for the next unlock), or vanish
2572 completely.
2578 completely.
2573 '''
2579 '''
2574 # When using the same lock to commit and strip, the phasecache is left
2580 # When using the same lock to commit and strip, the phasecache is left
2575 # dirty after committing. Then when we strip, the repo is invalidated,
2581 # dirty after committing. Then when we strip, the repo is invalidated,
2576 # causing those changes to disappear.
2582 # causing those changes to disappear.
2577 if '_phasecache' in vars(self):
2583 if '_phasecache' in vars(self):
2578 self._phasecache.write()
2584 self._phasecache.write()
2579
2585
2580 @unfilteredmethod
2586 @unfilteredmethod
2581 def destroyed(self):
2587 def destroyed(self):
2582 '''Inform the repository that nodes have been destroyed.
2588 '''Inform the repository that nodes have been destroyed.
2583 Intended for use by strip and rollback, so there's a common
2589 Intended for use by strip and rollback, so there's a common
2584 place for anything that has to be done after destroying history.
2590 place for anything that has to be done after destroying history.
2585 '''
2591 '''
2586 # When one tries to:
2592 # When one tries to:
2587 # 1) destroy nodes thus calling this method (e.g. strip)
2593 # 1) destroy nodes thus calling this method (e.g. strip)
2588 # 2) use phasecache somewhere (e.g. commit)
2594 # 2) use phasecache somewhere (e.g. commit)
2589 #
2595 #
2590 # then 2) will fail because the phasecache contains nodes that were
2596 # then 2) will fail because the phasecache contains nodes that were
2591 # removed. We can either remove phasecache from the filecache,
2597 # removed. We can either remove phasecache from the filecache,
2592 # causing it to reload next time it is accessed, or simply filter
2598 # causing it to reload next time it is accessed, or simply filter
2593 # the removed nodes now and write the updated cache.
2599 # the removed nodes now and write the updated cache.
2594 self._phasecache.filterunknown(self)
2600 self._phasecache.filterunknown(self)
2595 self._phasecache.write()
2601 self._phasecache.write()
2596
2602
2597 # refresh all repository caches
2603 # refresh all repository caches
2598 self.updatecaches()
2604 self.updatecaches()
2599
2605
2600 # Ensure the persistent tag cache is updated. Doing it now
2606 # Ensure the persistent tag cache is updated. Doing it now
2601 # means that the tag cache only has to worry about destroyed
2607 # means that the tag cache only has to worry about destroyed
2602 # heads immediately after a strip/rollback. That in turn
2608 # heads immediately after a strip/rollback. That in turn
2603 # guarantees that "cachetip == currenttip" (comparing both rev
2609 # guarantees that "cachetip == currenttip" (comparing both rev
2604 # and node) always means no nodes have been added or destroyed.
2610 # and node) always means no nodes have been added or destroyed.
2605
2611
2606 # XXX this is suboptimal when qrefresh'ing: we strip the current
2612 # XXX this is suboptimal when qrefresh'ing: we strip the current
2607 # head, refresh the tag cache, then immediately add a new head.
2613 # head, refresh the tag cache, then immediately add a new head.
2608 # But I think doing it this way is necessary for the "instant
2614 # But I think doing it this way is necessary for the "instant
2609 # tag cache retrieval" case to work.
2615 # tag cache retrieval" case to work.
2610 self.invalidate()
2616 self.invalidate()
2611
2617
2612 def status(self, node1='.', node2=None, match=None,
2618 def status(self, node1='.', node2=None, match=None,
2613 ignored=False, clean=False, unknown=False,
2619 ignored=False, clean=False, unknown=False,
2614 listsubrepos=False):
2620 listsubrepos=False):
2615 '''a convenience method that calls node1.status(node2)'''
2621 '''a convenience method that calls node1.status(node2)'''
2616 return self[node1].status(node2, match, ignored, clean, unknown,
2622 return self[node1].status(node2, match, ignored, clean, unknown,
2617 listsubrepos)
2623 listsubrepos)
2618
2624
2619 def addpostdsstatus(self, ps):
2625 def addpostdsstatus(self, ps):
2620 """Add a callback to run within the wlock, at the point at which status
2626 """Add a callback to run within the wlock, at the point at which status
2621 fixups happen.
2627 fixups happen.
2622
2628
2623 On status completion, callback(wctx, status) will be called with the
2629 On status completion, callback(wctx, status) will be called with the
2624 wlock held, unless the dirstate has changed from underneath or the wlock
2630 wlock held, unless the dirstate has changed from underneath or the wlock
2625 couldn't be grabbed.
2631 couldn't be grabbed.
2626
2632
2627 Callbacks should not capture and use a cached copy of the dirstate --
2633 Callbacks should not capture and use a cached copy of the dirstate --
2628 it might change in the meanwhile. Instead, they should access the
2634 it might change in the meanwhile. Instead, they should access the
2629 dirstate via wctx.repo().dirstate.
2635 dirstate via wctx.repo().dirstate.
2630
2636
2631 This list is emptied out after each status run -- extensions should
2637 This list is emptied out after each status run -- extensions should
2632 make sure it adds to this list each time dirstate.status is called.
2638 make sure it adds to this list each time dirstate.status is called.
2633 Extensions should also make sure they don't call this for statuses
2639 Extensions should also make sure they don't call this for statuses
2634 that don't involve the dirstate.
2640 that don't involve the dirstate.
2635 """
2641 """
2636
2642
2637 # The list is located here for uniqueness reasons -- it is actually
2643 # The list is located here for uniqueness reasons -- it is actually
2638 # managed by the workingctx, but that isn't unique per-repo.
2644 # managed by the workingctx, but that isn't unique per-repo.
2639 self._postdsstatus.append(ps)
2645 self._postdsstatus.append(ps)
2640
2646
2641 def postdsstatus(self):
2647 def postdsstatus(self):
2642 """Used by workingctx to get the list of post-dirstate-status hooks."""
2648 """Used by workingctx to get the list of post-dirstate-status hooks."""
2643 return self._postdsstatus
2649 return self._postdsstatus
2644
2650
2645 def clearpostdsstatus(self):
2651 def clearpostdsstatus(self):
2646 """Used by workingctx to clear post-dirstate-status hooks."""
2652 """Used by workingctx to clear post-dirstate-status hooks."""
2647 del self._postdsstatus[:]
2653 del self._postdsstatus[:]
2648
2654
2649 def heads(self, start=None):
2655 def heads(self, start=None):
2650 if start is None:
2656 if start is None:
2651 cl = self.changelog
2657 cl = self.changelog
2652 headrevs = reversed(cl.headrevs())
2658 headrevs = reversed(cl.headrevs())
2653 return [cl.node(rev) for rev in headrevs]
2659 return [cl.node(rev) for rev in headrevs]
2654
2660
2655 heads = self.changelog.heads(start)
2661 heads = self.changelog.heads(start)
2656 # sort the output in rev descending order
2662 # sort the output in rev descending order
2657 return sorted(heads, key=self.changelog.rev, reverse=True)
2663 return sorted(heads, key=self.changelog.rev, reverse=True)
2658
2664
2659 def branchheads(self, branch=None, start=None, closed=False):
2665 def branchheads(self, branch=None, start=None, closed=False):
2660 '''return a (possibly filtered) list of heads for the given branch
2666 '''return a (possibly filtered) list of heads for the given branch
2661
2667
2662 Heads are returned in topological order, from newest to oldest.
2668 Heads are returned in topological order, from newest to oldest.
2663 If branch is None, use the dirstate branch.
2669 If branch is None, use the dirstate branch.
2664 If start is not None, return only heads reachable from start.
2670 If start is not None, return only heads reachable from start.
2665 If closed is True, return heads that are marked as closed as well.
2671 If closed is True, return heads that are marked as closed as well.
2666 '''
2672 '''
2667 if branch is None:
2673 if branch is None:
2668 branch = self[None].branch()
2674 branch = self[None].branch()
2669 branches = self.branchmap()
2675 branches = self.branchmap()
2670 if branch not in branches:
2676 if branch not in branches:
2671 return []
2677 return []
2672 # the cache returns heads ordered lowest to highest
2678 # the cache returns heads ordered lowest to highest
2673 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2679 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2674 if start is not None:
2680 if start is not None:
2675 # filter out the heads that cannot be reached from startrev
2681 # filter out the heads that cannot be reached from startrev
2676 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2682 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2677 bheads = [h for h in bheads if h in fbheads]
2683 bheads = [h for h in bheads if h in fbheads]
2678 return bheads
2684 return bheads
2679
2685
2680 def branches(self, nodes):
2686 def branches(self, nodes):
2681 if not nodes:
2687 if not nodes:
2682 nodes = [self.changelog.tip()]
2688 nodes = [self.changelog.tip()]
2683 b = []
2689 b = []
2684 for n in nodes:
2690 for n in nodes:
2685 t = n
2691 t = n
2686 while True:
2692 while True:
2687 p = self.changelog.parents(n)
2693 p = self.changelog.parents(n)
2688 if p[1] != nullid or p[0] == nullid:
2694 if p[1] != nullid or p[0] == nullid:
2689 b.append((t, n, p[0], p[1]))
2695 b.append((t, n, p[0], p[1]))
2690 break
2696 break
2691 n = p[0]
2697 n = p[0]
2692 return b
2698 return b
2693
2699
2694 def between(self, pairs):
2700 def between(self, pairs):
2695 r = []
2701 r = []
2696
2702
2697 for top, bottom in pairs:
2703 for top, bottom in pairs:
2698 n, l, i = top, [], 0
2704 n, l, i = top, [], 0
2699 f = 1
2705 f = 1
2700
2706
2701 while n != bottom and n != nullid:
2707 while n != bottom and n != nullid:
2702 p = self.changelog.parents(n)[0]
2708 p = self.changelog.parents(n)[0]
2703 if i == f:
2709 if i == f:
2704 l.append(n)
2710 l.append(n)
2705 f = f * 2
2711 f = f * 2
2706 n = p
2712 n = p
2707 i += 1
2713 i += 1
2708
2714
2709 r.append(l)
2715 r.append(l)
2710
2716
2711 return r
2717 return r
2712
2718
2713 def checkpush(self, pushop):
2719 def checkpush(self, pushop):
2714 """Extensions can override this function if additional checks have
2720 """Extensions can override this function if additional checks have
2715 to be performed before pushing, or call it if they override push
2721 to be performed before pushing, or call it if they override push
2716 command.
2722 command.
2717 """
2723 """
2718
2724
2719 @unfilteredpropertycache
2725 @unfilteredpropertycache
2720 def prepushoutgoinghooks(self):
2726 def prepushoutgoinghooks(self):
2721 """Return util.hooks consists of a pushop with repo, remote, outgoing
2727 """Return util.hooks consists of a pushop with repo, remote, outgoing
2722 methods, which are called before pushing changesets.
2728 methods, which are called before pushing changesets.
2723 """
2729 """
2724 return util.hooks()
2730 return util.hooks()
2725
2731
2726 def pushkey(self, namespace, key, old, new):
2732 def pushkey(self, namespace, key, old, new):
2727 try:
2733 try:
2728 tr = self.currenttransaction()
2734 tr = self.currenttransaction()
2729 hookargs = {}
2735 hookargs = {}
2730 if tr is not None:
2736 if tr is not None:
2731 hookargs.update(tr.hookargs)
2737 hookargs.update(tr.hookargs)
2732 hookargs = pycompat.strkwargs(hookargs)
2738 hookargs = pycompat.strkwargs(hookargs)
2733 hookargs[r'namespace'] = namespace
2739 hookargs[r'namespace'] = namespace
2734 hookargs[r'key'] = key
2740 hookargs[r'key'] = key
2735 hookargs[r'old'] = old
2741 hookargs[r'old'] = old
2736 hookargs[r'new'] = new
2742 hookargs[r'new'] = new
2737 self.hook('prepushkey', throw=True, **hookargs)
2743 self.hook('prepushkey', throw=True, **hookargs)
2738 except error.HookAbort as exc:
2744 except error.HookAbort as exc:
2739 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2745 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2740 if exc.hint:
2746 if exc.hint:
2741 self.ui.write_err(_("(%s)\n") % exc.hint)
2747 self.ui.write_err(_("(%s)\n") % exc.hint)
2742 return False
2748 return False
2743 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2749 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2744 ret = pushkey.push(self, namespace, key, old, new)
2750 ret = pushkey.push(self, namespace, key, old, new)
2745 def runhook():
2751 def runhook():
2746 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2752 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2747 ret=ret)
2753 ret=ret)
2748 self._afterlock(runhook)
2754 self._afterlock(runhook)
2749 return ret
2755 return ret
2750
2756
2751 def listkeys(self, namespace):
2757 def listkeys(self, namespace):
2752 self.hook('prelistkeys', throw=True, namespace=namespace)
2758 self.hook('prelistkeys', throw=True, namespace=namespace)
2753 self.ui.debug('listing keys for "%s"\n' % namespace)
2759 self.ui.debug('listing keys for "%s"\n' % namespace)
2754 values = pushkey.list(self, namespace)
2760 values = pushkey.list(self, namespace)
2755 self.hook('listkeys', namespace=namespace, values=values)
2761 self.hook('listkeys', namespace=namespace, values=values)
2756 return values
2762 return values
2757
2763
2758 def debugwireargs(self, one, two, three=None, four=None, five=None):
2764 def debugwireargs(self, one, two, three=None, four=None, five=None):
2759 '''used to test argument passing over the wire'''
2765 '''used to test argument passing over the wire'''
2760 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2766 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2761 pycompat.bytestr(four),
2767 pycompat.bytestr(four),
2762 pycompat.bytestr(five))
2768 pycompat.bytestr(five))
2763
2769
2764 def savecommitmessage(self, text):
2770 def savecommitmessage(self, text):
2765 fp = self.vfs('last-message.txt', 'wb')
2771 fp = self.vfs('last-message.txt', 'wb')
2766 try:
2772 try:
2767 fp.write(text)
2773 fp.write(text)
2768 finally:
2774 finally:
2769 fp.close()
2775 fp.close()
2770 return self.pathto(fp.name[len(self.root) + 1:])
2776 return self.pathto(fp.name[len(self.root) + 1:])
2771
2777
2772 # used to avoid circular references so destructors work
2778 # used to avoid circular references so destructors work
2773 def aftertrans(files):
2779 def aftertrans(files):
2774 renamefiles = [tuple(t) for t in files]
2780 renamefiles = [tuple(t) for t in files]
2775 def a():
2781 def a():
2776 for vfs, src, dest in renamefiles:
2782 for vfs, src, dest in renamefiles:
2777 # if src and dest refer to a same file, vfs.rename is a no-op,
2783 # if src and dest refer to a same file, vfs.rename is a no-op,
2778 # leaving both src and dest on disk. delete dest to make sure
2784 # leaving both src and dest on disk. delete dest to make sure
2779 # the rename couldn't be such a no-op.
2785 # the rename couldn't be such a no-op.
2780 vfs.tryunlink(dest)
2786 vfs.tryunlink(dest)
2781 try:
2787 try:
2782 vfs.rename(src, dest)
2788 vfs.rename(src, dest)
2783 except OSError: # journal file does not yet exist
2789 except OSError: # journal file does not yet exist
2784 pass
2790 pass
2785 return a
2791 return a
2786
2792
2787 def undoname(fn):
2793 def undoname(fn):
2788 base, name = os.path.split(fn)
2794 base, name = os.path.split(fn)
2789 assert name.startswith('journal')
2795 assert name.startswith('journal')
2790 return os.path.join(base, name.replace('journal', 'undo', 1))
2796 return os.path.join(base, name.replace('journal', 'undo', 1))
2791
2797
2792 def instance(ui, path, create, intents=None, createopts=None):
2798 def instance(ui, path, create, intents=None, createopts=None):
2793 localpath = util.urllocalpath(path)
2799 localpath = util.urllocalpath(path)
2794 if create:
2800 if create:
2795 createrepository(ui, localpath, createopts=createopts)
2801 createrepository(ui, localpath, createopts=createopts)
2796
2802
2797 return makelocalrepository(ui, localpath, intents=intents)
2803 return makelocalrepository(ui, localpath, intents=intents)
2798
2804
2799 def islocal(path):
2805 def islocal(path):
2800 return True
2806 return True
2801
2807
2802 def defaultcreateopts(ui, createopts=None):
2808 def defaultcreateopts(ui, createopts=None):
2803 """Populate the default creation options for a repository.
2809 """Populate the default creation options for a repository.
2804
2810
2805 A dictionary of explicitly requested creation options can be passed
2811 A dictionary of explicitly requested creation options can be passed
2806 in. Missing keys will be populated.
2812 in. Missing keys will be populated.
2807 """
2813 """
2808 createopts = dict(createopts or {})
2814 createopts = dict(createopts or {})
2809
2815
2810 if 'backend' not in createopts:
2816 if 'backend' not in createopts:
2811 # experimental config: storage.new-repo-backend
2817 # experimental config: storage.new-repo-backend
2812 createopts['backend'] = ui.config('storage', 'new-repo-backend')
2818 createopts['backend'] = ui.config('storage', 'new-repo-backend')
2813
2819
2814 return createopts
2820 return createopts
2815
2821
2816 def newreporequirements(ui, createopts):
2822 def newreporequirements(ui, createopts):
2817 """Determine the set of requirements for a new local repository.
2823 """Determine the set of requirements for a new local repository.
2818
2824
2819 Extensions can wrap this function to specify custom requirements for
2825 Extensions can wrap this function to specify custom requirements for
2820 new repositories.
2826 new repositories.
2821 """
2827 """
2822 # If the repo is being created from a shared repository, we copy
2828 # If the repo is being created from a shared repository, we copy
2823 # its requirements.
2829 # its requirements.
2824 if 'sharedrepo' in createopts:
2830 if 'sharedrepo' in createopts:
2825 requirements = set(createopts['sharedrepo'].requirements)
2831 requirements = set(createopts['sharedrepo'].requirements)
2826 if createopts.get('sharedrelative'):
2832 if createopts.get('sharedrelative'):
2827 requirements.add('relshared')
2833 requirements.add('relshared')
2828 else:
2834 else:
2829 requirements.add('shared')
2835 requirements.add('shared')
2830
2836
2831 return requirements
2837 return requirements
2832
2838
2833 if 'backend' not in createopts:
2839 if 'backend' not in createopts:
2834 raise error.ProgrammingError('backend key not present in createopts; '
2840 raise error.ProgrammingError('backend key not present in createopts; '
2835 'was defaultcreateopts() called?')
2841 'was defaultcreateopts() called?')
2836
2842
2837 if createopts['backend'] != 'revlogv1':
2843 if createopts['backend'] != 'revlogv1':
2838 raise error.Abort(_('unable to determine repository requirements for '
2844 raise error.Abort(_('unable to determine repository requirements for '
2839 'storage backend: %s') % createopts['backend'])
2845 'storage backend: %s') % createopts['backend'])
2840
2846
2841 requirements = {'revlogv1'}
2847 requirements = {'revlogv1'}
2842 if ui.configbool('format', 'usestore'):
2848 if ui.configbool('format', 'usestore'):
2843 requirements.add('store')
2849 requirements.add('store')
2844 if ui.configbool('format', 'usefncache'):
2850 if ui.configbool('format', 'usefncache'):
2845 requirements.add('fncache')
2851 requirements.add('fncache')
2846 if ui.configbool('format', 'dotencode'):
2852 if ui.configbool('format', 'dotencode'):
2847 requirements.add('dotencode')
2853 requirements.add('dotencode')
2848
2854
2849 compengine = ui.config('experimental', 'format.compression')
2855 compengine = ui.config('experimental', 'format.compression')
2850 if compengine not in util.compengines:
2856 if compengine not in util.compengines:
2851 raise error.Abort(_('compression engine %s defined by '
2857 raise error.Abort(_('compression engine %s defined by '
2852 'experimental.format.compression not available') %
2858 'experimental.format.compression not available') %
2853 compengine,
2859 compengine,
2854 hint=_('run "hg debuginstall" to list available '
2860 hint=_('run "hg debuginstall" to list available '
2855 'compression engines'))
2861 'compression engines'))
2856
2862
2857 # zlib is the historical default and doesn't need an explicit requirement.
2863 # zlib is the historical default and doesn't need an explicit requirement.
2858 if compengine != 'zlib':
2864 if compengine != 'zlib':
2859 requirements.add('exp-compression-%s' % compengine)
2865 requirements.add('exp-compression-%s' % compengine)
2860
2866
2861 if scmutil.gdinitconfig(ui):
2867 if scmutil.gdinitconfig(ui):
2862 requirements.add('generaldelta')
2868 requirements.add('generaldelta')
2863 if ui.configbool('experimental', 'treemanifest'):
2869 if ui.configbool('experimental', 'treemanifest'):
2864 requirements.add('treemanifest')
2870 requirements.add('treemanifest')
2865 # experimental config: format.sparse-revlog
2871 # experimental config: format.sparse-revlog
2866 if ui.configbool('format', 'sparse-revlog'):
2872 if ui.configbool('format', 'sparse-revlog'):
2867 requirements.add(SPARSEREVLOG_REQUIREMENT)
2873 requirements.add(SPARSEREVLOG_REQUIREMENT)
2868
2874
2869 revlogv2 = ui.config('experimental', 'revlogv2')
2875 revlogv2 = ui.config('experimental', 'revlogv2')
2870 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2876 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2871 requirements.remove('revlogv1')
2877 requirements.remove('revlogv1')
2872 # generaldelta is implied by revlogv2.
2878 # generaldelta is implied by revlogv2.
2873 requirements.discard('generaldelta')
2879 requirements.discard('generaldelta')
2874 requirements.add(REVLOGV2_REQUIREMENT)
2880 requirements.add(REVLOGV2_REQUIREMENT)
2875 # experimental config: format.internal-phase
2881 # experimental config: format.internal-phase
2876 if ui.configbool('format', 'internal-phase'):
2882 if ui.configbool('format', 'internal-phase'):
2877 requirements.add('internal-phase')
2883 requirements.add('internal-phase')
2878
2884
2879 if createopts.get('narrowfiles'):
2885 if createopts.get('narrowfiles'):
2880 requirements.add(repository.NARROW_REQUIREMENT)
2886 requirements.add(repository.NARROW_REQUIREMENT)
2881
2887
2882 return requirements
2888 return requirements
2883
2889
2884 def filterknowncreateopts(ui, createopts):
2890 def filterknowncreateopts(ui, createopts):
2885 """Filters a dict of repo creation options against options that are known.
2891 """Filters a dict of repo creation options against options that are known.
2886
2892
2887 Receives a dict of repo creation options and returns a dict of those
2893 Receives a dict of repo creation options and returns a dict of those
2888 options that we don't know how to handle.
2894 options that we don't know how to handle.
2889
2895
2890 This function is called as part of repository creation. If the
2896 This function is called as part of repository creation. If the
2891 returned dict contains any items, repository creation will not
2897 returned dict contains any items, repository creation will not
2892 be allowed, as it means there was a request to create a repository
2898 be allowed, as it means there was a request to create a repository
2893 with options not recognized by loaded code.
2899 with options not recognized by loaded code.
2894
2900
2895 Extensions can wrap this function to filter out creation options
2901 Extensions can wrap this function to filter out creation options
2896 they know how to handle.
2902 they know how to handle.
2897 """
2903 """
2898 known = {
2904 known = {
2899 'backend',
2905 'backend',
2900 'narrowfiles',
2906 'narrowfiles',
2901 'sharedrepo',
2907 'sharedrepo',
2902 'sharedrelative',
2908 'sharedrelative',
2903 'shareditems',
2909 'shareditems',
2904 }
2910 }
2905
2911
2906 return {k: v for k, v in createopts.items() if k not in known}
2912 return {k: v for k, v in createopts.items() if k not in known}
2907
2913
2908 def createrepository(ui, path, createopts=None):
2914 def createrepository(ui, path, createopts=None):
2909 """Create a new repository in a vfs.
2915 """Create a new repository in a vfs.
2910
2916
2911 ``path`` path to the new repo's working directory.
2917 ``path`` path to the new repo's working directory.
2912 ``createopts`` options for the new repository.
2918 ``createopts`` options for the new repository.
2913
2919
2914 The following keys for ``createopts`` are recognized:
2920 The following keys for ``createopts`` are recognized:
2915
2921
2916 backend
2922 backend
2917 The storage backend to use.
2923 The storage backend to use.
2918 narrowfiles
2924 narrowfiles
2919 Set up repository to support narrow file storage.
2925 Set up repository to support narrow file storage.
2920 sharedrepo
2926 sharedrepo
2921 Repository object from which storage should be shared.
2927 Repository object from which storage should be shared.
2922 sharedrelative
2928 sharedrelative
2923 Boolean indicating if the path to the shared repo should be
2929 Boolean indicating if the path to the shared repo should be
2924 stored as relative. By default, the pointer to the "parent" repo
2930 stored as relative. By default, the pointer to the "parent" repo
2925 is stored as an absolute path.
2931 is stored as an absolute path.
2926 shareditems
2932 shareditems
2927 Set of items to share to the new repository (in addition to storage).
2933 Set of items to share to the new repository (in addition to storage).
2928 """
2934 """
2929 createopts = defaultcreateopts(ui, createopts=createopts)
2935 createopts = defaultcreateopts(ui, createopts=createopts)
2930
2936
2931 unknownopts = filterknowncreateopts(ui, createopts)
2937 unknownopts = filterknowncreateopts(ui, createopts)
2932
2938
2933 if not isinstance(unknownopts, dict):
2939 if not isinstance(unknownopts, dict):
2934 raise error.ProgrammingError('filterknowncreateopts() did not return '
2940 raise error.ProgrammingError('filterknowncreateopts() did not return '
2935 'a dict')
2941 'a dict')
2936
2942
2937 if unknownopts:
2943 if unknownopts:
2938 raise error.Abort(_('unable to create repository because of unknown '
2944 raise error.Abort(_('unable to create repository because of unknown '
2939 'creation option: %s') %
2945 'creation option: %s') %
2940 ', '.join(sorted(unknownopts)),
2946 ', '.join(sorted(unknownopts)),
2941 hint=_('is a required extension not loaded?'))
2947 hint=_('is a required extension not loaded?'))
2942
2948
2943 requirements = newreporequirements(ui, createopts=createopts)
2949 requirements = newreporequirements(ui, createopts=createopts)
2944
2950
2945 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
2951 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
2946
2952
2947 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
2953 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
2948 if hgvfs.exists():
2954 if hgvfs.exists():
2949 raise error.RepoError(_('repository %s already exists') % path)
2955 raise error.RepoError(_('repository %s already exists') % path)
2950
2956
2951 if 'sharedrepo' in createopts:
2957 if 'sharedrepo' in createopts:
2952 sharedpath = createopts['sharedrepo'].sharedpath
2958 sharedpath = createopts['sharedrepo'].sharedpath
2953
2959
2954 if createopts.get('sharedrelative'):
2960 if createopts.get('sharedrelative'):
2955 try:
2961 try:
2956 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
2962 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
2957 except (IOError, ValueError) as e:
2963 except (IOError, ValueError) as e:
2958 # ValueError is raised on Windows if the drive letters differ
2964 # ValueError is raised on Windows if the drive letters differ
2959 # on each path.
2965 # on each path.
2960 raise error.Abort(_('cannot calculate relative path'),
2966 raise error.Abort(_('cannot calculate relative path'),
2961 hint=stringutil.forcebytestr(e))
2967 hint=stringutil.forcebytestr(e))
2962
2968
2963 if not wdirvfs.exists():
2969 if not wdirvfs.exists():
2964 wdirvfs.makedirs()
2970 wdirvfs.makedirs()
2965
2971
2966 hgvfs.makedir(notindexed=True)
2972 hgvfs.makedir(notindexed=True)
2967
2973
2968 if b'store' in requirements and 'sharedrepo' not in createopts:
2974 if b'store' in requirements and 'sharedrepo' not in createopts:
2969 hgvfs.mkdir(b'store')
2975 hgvfs.mkdir(b'store')
2970
2976
2971 # We create an invalid changelog outside the store so very old
2977 # We create an invalid changelog outside the store so very old
2972 # Mercurial versions (which didn't know about the requirements
2978 # Mercurial versions (which didn't know about the requirements
2973 # file) encounter an error on reading the changelog. This
2979 # file) encounter an error on reading the changelog. This
2974 # effectively locks out old clients and prevents them from
2980 # effectively locks out old clients and prevents them from
2975 # mucking with a repo in an unknown format.
2981 # mucking with a repo in an unknown format.
2976 #
2982 #
2977 # The revlog header has version 2, which won't be recognized by
2983 # The revlog header has version 2, which won't be recognized by
2978 # such old clients.
2984 # such old clients.
2979 hgvfs.append(b'00changelog.i',
2985 hgvfs.append(b'00changelog.i',
2980 b'\0\0\0\2 dummy changelog to prevent using the old repo '
2986 b'\0\0\0\2 dummy changelog to prevent using the old repo '
2981 b'layout')
2987 b'layout')
2982
2988
2983 scmutil.writerequires(hgvfs, requirements)
2989 scmutil.writerequires(hgvfs, requirements)
2984
2990
2985 # Write out file telling readers where to find the shared store.
2991 # Write out file telling readers where to find the shared store.
2986 if 'sharedrepo' in createopts:
2992 if 'sharedrepo' in createopts:
2987 hgvfs.write(b'sharedpath', sharedpath)
2993 hgvfs.write(b'sharedpath', sharedpath)
2988
2994
2989 if createopts.get('shareditems'):
2995 if createopts.get('shareditems'):
2990 shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
2996 shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
2991 hgvfs.write(b'shared', shared)
2997 hgvfs.write(b'shared', shared)
2992
2998
2993 def poisonrepository(repo):
2999 def poisonrepository(repo):
2994 """Poison a repository instance so it can no longer be used."""
3000 """Poison a repository instance so it can no longer be used."""
2995 # Perform any cleanup on the instance.
3001 # Perform any cleanup on the instance.
2996 repo.close()
3002 repo.close()
2997
3003
2998 # Our strategy is to replace the type of the object with one that
3004 # Our strategy is to replace the type of the object with one that
2999 # has all attribute lookups result in error.
3005 # has all attribute lookups result in error.
3000 #
3006 #
3001 # But we have to allow the close() method because some constructors
3007 # But we have to allow the close() method because some constructors
3002 # of repos call close() on repo references.
3008 # of repos call close() on repo references.
3003 class poisonedrepository(object):
3009 class poisonedrepository(object):
3004 def __getattribute__(self, item):
3010 def __getattribute__(self, item):
3005 if item == r'close':
3011 if item == r'close':
3006 return object.__getattribute__(self, item)
3012 return object.__getattribute__(self, item)
3007
3013
3008 raise error.ProgrammingError('repo instances should not be used '
3014 raise error.ProgrammingError('repo instances should not be used '
3009 'after unshare')
3015 'after unshare')
3010
3016
3011 def close(self):
3017 def close(self):
3012 pass
3018 pass
3013
3019
3014 # We may have a repoview, which intercepts __setattr__. So be sure
3020 # We may have a repoview, which intercepts __setattr__. So be sure
3015 # we operate at the lowest level possible.
3021 # we operate at the lowest level possible.
3016 object.__setattr__(repo, r'__class__', poisonedrepository)
3022 object.__setattr__(repo, r'__class__', poisonedrepository)
General Comments 0
You need to be logged in to leave comments. Login now