##// END OF EJS Templates
localrepo: move the getlocalbundle method in changegroup module...
Pierre-Yves David -
r20928:91b47139 default
parent child Browse files
Show More
@@ -1,482 +1,492 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import nullrev, nullid, hex
9 from node import nullrev, nullid, hex
10 import mdiff, util, dagutil
10 import mdiff, util, dagutil
11 import struct, os, bz2, zlib, tempfile
11 import struct, os, bz2, zlib, tempfile
12 import discovery
12 import discovery
13
13
14 _BUNDLE10_DELTA_HEADER = "20s20s20s20s"
14 _BUNDLE10_DELTA_HEADER = "20s20s20s20s"
15
15
16 def readexactly(stream, n):
16 def readexactly(stream, n):
17 '''read n bytes from stream.read and abort if less was available'''
17 '''read n bytes from stream.read and abort if less was available'''
18 s = stream.read(n)
18 s = stream.read(n)
19 if len(s) < n:
19 if len(s) < n:
20 raise util.Abort(_("stream ended unexpectedly"
20 raise util.Abort(_("stream ended unexpectedly"
21 " (got %d bytes, expected %d)")
21 " (got %d bytes, expected %d)")
22 % (len(s), n))
22 % (len(s), n))
23 return s
23 return s
24
24
25 def getchunk(stream):
25 def getchunk(stream):
26 """return the next chunk from stream as a string"""
26 """return the next chunk from stream as a string"""
27 d = readexactly(stream, 4)
27 d = readexactly(stream, 4)
28 l = struct.unpack(">l", d)[0]
28 l = struct.unpack(">l", d)[0]
29 if l <= 4:
29 if l <= 4:
30 if l:
30 if l:
31 raise util.Abort(_("invalid chunk length %d") % l)
31 raise util.Abort(_("invalid chunk length %d") % l)
32 return ""
32 return ""
33 return readexactly(stream, l - 4)
33 return readexactly(stream, l - 4)
34
34
35 def chunkheader(length):
35 def chunkheader(length):
36 """return a changegroup chunk header (string)"""
36 """return a changegroup chunk header (string)"""
37 return struct.pack(">l", length + 4)
37 return struct.pack(">l", length + 4)
38
38
39 def closechunk():
39 def closechunk():
40 """return a changegroup chunk header (string) for a zero-length chunk"""
40 """return a changegroup chunk header (string) for a zero-length chunk"""
41 return struct.pack(">l", 0)
41 return struct.pack(">l", 0)
42
42
43 class nocompress(object):
43 class nocompress(object):
44 def compress(self, x):
44 def compress(self, x):
45 return x
45 return x
46 def flush(self):
46 def flush(self):
47 return ""
47 return ""
48
48
49 bundletypes = {
49 bundletypes = {
50 "": ("", nocompress), # only when using unbundle on ssh and old http servers
50 "": ("", nocompress), # only when using unbundle on ssh and old http servers
51 # since the unification ssh accepts a header but there
51 # since the unification ssh accepts a header but there
52 # is no capability signaling it.
52 # is no capability signaling it.
53 "HG10UN": ("HG10UN", nocompress),
53 "HG10UN": ("HG10UN", nocompress),
54 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
54 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
55 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
55 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
56 }
56 }
57
57
58 # hgweb uses this list to communicate its preferred type
58 # hgweb uses this list to communicate its preferred type
59 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
59 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
60
60
61 def writebundle(cg, filename, bundletype):
61 def writebundle(cg, filename, bundletype):
62 """Write a bundle file and return its filename.
62 """Write a bundle file and return its filename.
63
63
64 Existing files will not be overwritten.
64 Existing files will not be overwritten.
65 If no filename is specified, a temporary file is created.
65 If no filename is specified, a temporary file is created.
66 bz2 compression can be turned off.
66 bz2 compression can be turned off.
67 The bundle file will be deleted in case of errors.
67 The bundle file will be deleted in case of errors.
68 """
68 """
69
69
70 fh = None
70 fh = None
71 cleanup = None
71 cleanup = None
72 try:
72 try:
73 if filename:
73 if filename:
74 fh = open(filename, "wb")
74 fh = open(filename, "wb")
75 else:
75 else:
76 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
76 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
77 fh = os.fdopen(fd, "wb")
77 fh = os.fdopen(fd, "wb")
78 cleanup = filename
78 cleanup = filename
79
79
80 header, compressor = bundletypes[bundletype]
80 header, compressor = bundletypes[bundletype]
81 fh.write(header)
81 fh.write(header)
82 z = compressor()
82 z = compressor()
83
83
84 # parse the changegroup data, otherwise we will block
84 # parse the changegroup data, otherwise we will block
85 # in case of sshrepo because we don't know the end of the stream
85 # in case of sshrepo because we don't know the end of the stream
86
86
87 # an empty chunkgroup is the end of the changegroup
87 # an empty chunkgroup is the end of the changegroup
88 # a changegroup has at least 2 chunkgroups (changelog and manifest).
88 # a changegroup has at least 2 chunkgroups (changelog and manifest).
89 # after that, an empty chunkgroup is the end of the changegroup
89 # after that, an empty chunkgroup is the end of the changegroup
90 empty = False
90 empty = False
91 count = 0
91 count = 0
92 while not empty or count <= 2:
92 while not empty or count <= 2:
93 empty = True
93 empty = True
94 count += 1
94 count += 1
95 while True:
95 while True:
96 chunk = getchunk(cg)
96 chunk = getchunk(cg)
97 if not chunk:
97 if not chunk:
98 break
98 break
99 empty = False
99 empty = False
100 fh.write(z.compress(chunkheader(len(chunk))))
100 fh.write(z.compress(chunkheader(len(chunk))))
101 pos = 0
101 pos = 0
102 while pos < len(chunk):
102 while pos < len(chunk):
103 next = pos + 2**20
103 next = pos + 2**20
104 fh.write(z.compress(chunk[pos:next]))
104 fh.write(z.compress(chunk[pos:next]))
105 pos = next
105 pos = next
106 fh.write(z.compress(closechunk()))
106 fh.write(z.compress(closechunk()))
107 fh.write(z.flush())
107 fh.write(z.flush())
108 cleanup = None
108 cleanup = None
109 return filename
109 return filename
110 finally:
110 finally:
111 if fh is not None:
111 if fh is not None:
112 fh.close()
112 fh.close()
113 if cleanup is not None:
113 if cleanup is not None:
114 os.unlink(cleanup)
114 os.unlink(cleanup)
115
115
116 def decompressor(fh, alg):
116 def decompressor(fh, alg):
117 if alg == 'UN':
117 if alg == 'UN':
118 return fh
118 return fh
119 elif alg == 'GZ':
119 elif alg == 'GZ':
120 def generator(f):
120 def generator(f):
121 zd = zlib.decompressobj()
121 zd = zlib.decompressobj()
122 for chunk in util.filechunkiter(f):
122 for chunk in util.filechunkiter(f):
123 yield zd.decompress(chunk)
123 yield zd.decompress(chunk)
124 elif alg == 'BZ':
124 elif alg == 'BZ':
125 def generator(f):
125 def generator(f):
126 zd = bz2.BZ2Decompressor()
126 zd = bz2.BZ2Decompressor()
127 zd.decompress("BZ")
127 zd.decompress("BZ")
128 for chunk in util.filechunkiter(f, 4096):
128 for chunk in util.filechunkiter(f, 4096):
129 yield zd.decompress(chunk)
129 yield zd.decompress(chunk)
130 else:
130 else:
131 raise util.Abort("unknown bundle compression '%s'" % alg)
131 raise util.Abort("unknown bundle compression '%s'" % alg)
132 return util.chunkbuffer(generator(fh))
132 return util.chunkbuffer(generator(fh))
133
133
134 class unbundle10(object):
134 class unbundle10(object):
135 deltaheader = _BUNDLE10_DELTA_HEADER
135 deltaheader = _BUNDLE10_DELTA_HEADER
136 deltaheadersize = struct.calcsize(deltaheader)
136 deltaheadersize = struct.calcsize(deltaheader)
137 def __init__(self, fh, alg):
137 def __init__(self, fh, alg):
138 self._stream = decompressor(fh, alg)
138 self._stream = decompressor(fh, alg)
139 self._type = alg
139 self._type = alg
140 self.callback = None
140 self.callback = None
141 def compressed(self):
141 def compressed(self):
142 return self._type != 'UN'
142 return self._type != 'UN'
143 def read(self, l):
143 def read(self, l):
144 return self._stream.read(l)
144 return self._stream.read(l)
145 def seek(self, pos):
145 def seek(self, pos):
146 return self._stream.seek(pos)
146 return self._stream.seek(pos)
147 def tell(self):
147 def tell(self):
148 return self._stream.tell()
148 return self._stream.tell()
149 def close(self):
149 def close(self):
150 return self._stream.close()
150 return self._stream.close()
151
151
152 def chunklength(self):
152 def chunklength(self):
153 d = readexactly(self._stream, 4)
153 d = readexactly(self._stream, 4)
154 l = struct.unpack(">l", d)[0]
154 l = struct.unpack(">l", d)[0]
155 if l <= 4:
155 if l <= 4:
156 if l:
156 if l:
157 raise util.Abort(_("invalid chunk length %d") % l)
157 raise util.Abort(_("invalid chunk length %d") % l)
158 return 0
158 return 0
159 if self.callback:
159 if self.callback:
160 self.callback()
160 self.callback()
161 return l - 4
161 return l - 4
162
162
163 def changelogheader(self):
163 def changelogheader(self):
164 """v10 does not have a changelog header chunk"""
164 """v10 does not have a changelog header chunk"""
165 return {}
165 return {}
166
166
167 def manifestheader(self):
167 def manifestheader(self):
168 """v10 does not have a manifest header chunk"""
168 """v10 does not have a manifest header chunk"""
169 return {}
169 return {}
170
170
171 def filelogheader(self):
171 def filelogheader(self):
172 """return the header of the filelogs chunk, v10 only has the filename"""
172 """return the header of the filelogs chunk, v10 only has the filename"""
173 l = self.chunklength()
173 l = self.chunklength()
174 if not l:
174 if not l:
175 return {}
175 return {}
176 fname = readexactly(self._stream, l)
176 fname = readexactly(self._stream, l)
177 return {'filename': fname}
177 return {'filename': fname}
178
178
179 def _deltaheader(self, headertuple, prevnode):
179 def _deltaheader(self, headertuple, prevnode):
180 node, p1, p2, cs = headertuple
180 node, p1, p2, cs = headertuple
181 if prevnode is None:
181 if prevnode is None:
182 deltabase = p1
182 deltabase = p1
183 else:
183 else:
184 deltabase = prevnode
184 deltabase = prevnode
185 return node, p1, p2, deltabase, cs
185 return node, p1, p2, deltabase, cs
186
186
187 def deltachunk(self, prevnode):
187 def deltachunk(self, prevnode):
188 l = self.chunklength()
188 l = self.chunklength()
189 if not l:
189 if not l:
190 return {}
190 return {}
191 headerdata = readexactly(self._stream, self.deltaheadersize)
191 headerdata = readexactly(self._stream, self.deltaheadersize)
192 header = struct.unpack(self.deltaheader, headerdata)
192 header = struct.unpack(self.deltaheader, headerdata)
193 delta = readexactly(self._stream, l - self.deltaheadersize)
193 delta = readexactly(self._stream, l - self.deltaheadersize)
194 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
194 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
195 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
195 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
196 'deltabase': deltabase, 'delta': delta}
196 'deltabase': deltabase, 'delta': delta}
197
197
198 class headerlessfixup(object):
198 class headerlessfixup(object):
199 def __init__(self, fh, h):
199 def __init__(self, fh, h):
200 self._h = h
200 self._h = h
201 self._fh = fh
201 self._fh = fh
202 def read(self, n):
202 def read(self, n):
203 if self._h:
203 if self._h:
204 d, self._h = self._h[:n], self._h[n:]
204 d, self._h = self._h[:n], self._h[n:]
205 if len(d) < n:
205 if len(d) < n:
206 d += readexactly(self._fh, n - len(d))
206 d += readexactly(self._fh, n - len(d))
207 return d
207 return d
208 return readexactly(self._fh, n)
208 return readexactly(self._fh, n)
209
209
210 def readbundle(fh, fname):
210 def readbundle(fh, fname):
211 header = readexactly(fh, 6)
211 header = readexactly(fh, 6)
212
212
213 if not fname:
213 if not fname:
214 fname = "stream"
214 fname = "stream"
215 if not header.startswith('HG') and header.startswith('\0'):
215 if not header.startswith('HG') and header.startswith('\0'):
216 fh = headerlessfixup(fh, header)
216 fh = headerlessfixup(fh, header)
217 header = "HG10UN"
217 header = "HG10UN"
218
218
219 magic, version, alg = header[0:2], header[2:4], header[4:6]
219 magic, version, alg = header[0:2], header[2:4], header[4:6]
220
220
221 if magic != 'HG':
221 if magic != 'HG':
222 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
222 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
223 if version != '10':
223 if version != '10':
224 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
224 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
225 return unbundle10(fh, alg)
225 return unbundle10(fh, alg)
226
226
227 class bundle10(object):
227 class bundle10(object):
228 deltaheader = _BUNDLE10_DELTA_HEADER
228 deltaheader = _BUNDLE10_DELTA_HEADER
229 def __init__(self, repo, bundlecaps=None):
229 def __init__(self, repo, bundlecaps=None):
230 """Given a source repo, construct a bundler.
230 """Given a source repo, construct a bundler.
231
231
232 bundlecaps is optional and can be used to specify the set of
232 bundlecaps is optional and can be used to specify the set of
233 capabilities which can be used to build the bundle.
233 capabilities which can be used to build the bundle.
234 """
234 """
235 # Set of capabilities we can use to build the bundle.
235 # Set of capabilities we can use to build the bundle.
236 if bundlecaps is None:
236 if bundlecaps is None:
237 bundlecaps = set()
237 bundlecaps = set()
238 self._bundlecaps = bundlecaps
238 self._bundlecaps = bundlecaps
239 self._changelog = repo.changelog
239 self._changelog = repo.changelog
240 self._manifest = repo.manifest
240 self._manifest = repo.manifest
241 reorder = repo.ui.config('bundle', 'reorder', 'auto')
241 reorder = repo.ui.config('bundle', 'reorder', 'auto')
242 if reorder == 'auto':
242 if reorder == 'auto':
243 reorder = None
243 reorder = None
244 else:
244 else:
245 reorder = util.parsebool(reorder)
245 reorder = util.parsebool(reorder)
246 self._repo = repo
246 self._repo = repo
247 self._reorder = reorder
247 self._reorder = reorder
248 self._progress = repo.ui.progress
248 self._progress = repo.ui.progress
249 def close(self):
249 def close(self):
250 return closechunk()
250 return closechunk()
251
251
252 def fileheader(self, fname):
252 def fileheader(self, fname):
253 return chunkheader(len(fname)) + fname
253 return chunkheader(len(fname)) + fname
254
254
255 def group(self, nodelist, revlog, lookup, units=None, reorder=None):
255 def group(self, nodelist, revlog, lookup, units=None, reorder=None):
256 """Calculate a delta group, yielding a sequence of changegroup chunks
256 """Calculate a delta group, yielding a sequence of changegroup chunks
257 (strings).
257 (strings).
258
258
259 Given a list of changeset revs, return a set of deltas and
259 Given a list of changeset revs, return a set of deltas and
260 metadata corresponding to nodes. The first delta is
260 metadata corresponding to nodes. The first delta is
261 first parent(nodelist[0]) -> nodelist[0], the receiver is
261 first parent(nodelist[0]) -> nodelist[0], the receiver is
262 guaranteed to have this parent as it has all history before
262 guaranteed to have this parent as it has all history before
263 these changesets. In the case firstparent is nullrev the
263 these changesets. In the case firstparent is nullrev the
264 changegroup starts with a full revision.
264 changegroup starts with a full revision.
265
265
266 If units is not None, progress detail will be generated, units specifies
266 If units is not None, progress detail will be generated, units specifies
267 the type of revlog that is touched (changelog, manifest, etc.).
267 the type of revlog that is touched (changelog, manifest, etc.).
268 """
268 """
269 # if we don't have any revisions touched by these changesets, bail
269 # if we don't have any revisions touched by these changesets, bail
270 if len(nodelist) == 0:
270 if len(nodelist) == 0:
271 yield self.close()
271 yield self.close()
272 return
272 return
273
273
274 # for generaldelta revlogs, we linearize the revs; this will both be
274 # for generaldelta revlogs, we linearize the revs; this will both be
275 # much quicker and generate a much smaller bundle
275 # much quicker and generate a much smaller bundle
276 if (revlog._generaldelta and reorder is not False) or reorder:
276 if (revlog._generaldelta and reorder is not False) or reorder:
277 dag = dagutil.revlogdag(revlog)
277 dag = dagutil.revlogdag(revlog)
278 revs = set(revlog.rev(n) for n in nodelist)
278 revs = set(revlog.rev(n) for n in nodelist)
279 revs = dag.linearize(revs)
279 revs = dag.linearize(revs)
280 else:
280 else:
281 revs = sorted([revlog.rev(n) for n in nodelist])
281 revs = sorted([revlog.rev(n) for n in nodelist])
282
282
283 # add the parent of the first rev
283 # add the parent of the first rev
284 p = revlog.parentrevs(revs[0])[0]
284 p = revlog.parentrevs(revs[0])[0]
285 revs.insert(0, p)
285 revs.insert(0, p)
286
286
287 # build deltas
287 # build deltas
288 total = len(revs) - 1
288 total = len(revs) - 1
289 msgbundling = _('bundling')
289 msgbundling = _('bundling')
290 for r in xrange(len(revs) - 1):
290 for r in xrange(len(revs) - 1):
291 if units is not None:
291 if units is not None:
292 self._progress(msgbundling, r + 1, unit=units, total=total)
292 self._progress(msgbundling, r + 1, unit=units, total=total)
293 prev, curr = revs[r], revs[r + 1]
293 prev, curr = revs[r], revs[r + 1]
294 linknode = lookup(revlog.node(curr))
294 linknode = lookup(revlog.node(curr))
295 for c in self.revchunk(revlog, curr, prev, linknode):
295 for c in self.revchunk(revlog, curr, prev, linknode):
296 yield c
296 yield c
297
297
298 yield self.close()
298 yield self.close()
299
299
300 # filter any nodes that claim to be part of the known set
300 # filter any nodes that claim to be part of the known set
301 def prune(self, revlog, missing, commonrevs, source):
301 def prune(self, revlog, missing, commonrevs, source):
302 rr, rl = revlog.rev, revlog.linkrev
302 rr, rl = revlog.rev, revlog.linkrev
303 return [n for n in missing if rl(rr(n)) not in commonrevs]
303 return [n for n in missing if rl(rr(n)) not in commonrevs]
304
304
305 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
305 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
306 '''yield a sequence of changegroup chunks (strings)'''
306 '''yield a sequence of changegroup chunks (strings)'''
307 repo = self._repo
307 repo = self._repo
308 cl = self._changelog
308 cl = self._changelog
309 mf = self._manifest
309 mf = self._manifest
310 reorder = self._reorder
310 reorder = self._reorder
311 progress = self._progress
311 progress = self._progress
312
312
313 # for progress output
313 # for progress output
314 msgbundling = _('bundling')
314 msgbundling = _('bundling')
315
315
316 mfs = {} # needed manifests
316 mfs = {} # needed manifests
317 fnodes = {} # needed file nodes
317 fnodes = {} # needed file nodes
318 changedfiles = set()
318 changedfiles = set()
319
319
320 # Callback for the changelog, used to collect changed files and manifest
320 # Callback for the changelog, used to collect changed files and manifest
321 # nodes.
321 # nodes.
322 # Returns the linkrev node (identity in the changelog case).
322 # Returns the linkrev node (identity in the changelog case).
323 def lookupcl(x):
323 def lookupcl(x):
324 c = cl.read(x)
324 c = cl.read(x)
325 changedfiles.update(c[3])
325 changedfiles.update(c[3])
326 # record the first changeset introducing this manifest version
326 # record the first changeset introducing this manifest version
327 mfs.setdefault(c[0], x)
327 mfs.setdefault(c[0], x)
328 return x
328 return x
329
329
330 # Callback for the manifest, used to collect linkrevs for filelog
330 # Callback for the manifest, used to collect linkrevs for filelog
331 # revisions.
331 # revisions.
332 # Returns the linkrev node (collected in lookupcl).
332 # Returns the linkrev node (collected in lookupcl).
333 def lookupmf(x):
333 def lookupmf(x):
334 clnode = mfs[x]
334 clnode = mfs[x]
335 if not fastpathlinkrev:
335 if not fastpathlinkrev:
336 mdata = mf.readfast(x)
336 mdata = mf.readfast(x)
337 for f, n in mdata.iteritems():
337 for f, n in mdata.iteritems():
338 if f in changedfiles:
338 if f in changedfiles:
339 # record the first changeset introducing this filelog
339 # record the first changeset introducing this filelog
340 # version
340 # version
341 fnodes[f].setdefault(n, clnode)
341 fnodes[f].setdefault(n, clnode)
342 return clnode
342 return clnode
343
343
344 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
344 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets'),
345 reorder=reorder):
345 reorder=reorder):
346 yield chunk
346 yield chunk
347 progress(msgbundling, None)
347 progress(msgbundling, None)
348
348
349 for f in changedfiles:
349 for f in changedfiles:
350 fnodes[f] = {}
350 fnodes[f] = {}
351 mfnodes = self.prune(mf, mfs, commonrevs, source)
351 mfnodes = self.prune(mf, mfs, commonrevs, source)
352 for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
352 for chunk in self.group(mfnodes, mf, lookupmf, units=_('manifests'),
353 reorder=reorder):
353 reorder=reorder):
354 yield chunk
354 yield chunk
355 progress(msgbundling, None)
355 progress(msgbundling, None)
356
356
357 mfs.clear()
357 mfs.clear()
358 needed = set(cl.rev(x) for x in clnodes)
358 needed = set(cl.rev(x) for x in clnodes)
359
359
360 def linknodes(filerevlog, fname):
360 def linknodes(filerevlog, fname):
361 if fastpathlinkrev:
361 if fastpathlinkrev:
362 ln, llr = filerevlog.node, filerevlog.linkrev
362 ln, llr = filerevlog.node, filerevlog.linkrev
363 def genfilenodes():
363 def genfilenodes():
364 for r in filerevlog:
364 for r in filerevlog:
365 linkrev = llr(r)
365 linkrev = llr(r)
366 if linkrev in needed:
366 if linkrev in needed:
367 yield filerevlog.node(r), cl.node(linkrev)
367 yield filerevlog.node(r), cl.node(linkrev)
368 fnodes[fname] = dict(genfilenodes())
368 fnodes[fname] = dict(genfilenodes())
369 return fnodes.get(fname, {})
369 return fnodes.get(fname, {})
370
370
371 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
371 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
372 source):
372 source):
373 yield chunk
373 yield chunk
374
374
375 yield self.close()
375 yield self.close()
376 progress(msgbundling, None)
376 progress(msgbundling, None)
377
377
378 if clnodes:
378 if clnodes:
379 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
379 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
380
380
381 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
381 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
382 repo = self._repo
382 repo = self._repo
383 progress = self._progress
383 progress = self._progress
384 reorder = self._reorder
384 reorder = self._reorder
385 msgbundling = _('bundling')
385 msgbundling = _('bundling')
386
386
387 total = len(changedfiles)
387 total = len(changedfiles)
388 # for progress output
388 # for progress output
389 msgfiles = _('files')
389 msgfiles = _('files')
390 for i, fname in enumerate(sorted(changedfiles)):
390 for i, fname in enumerate(sorted(changedfiles)):
391 filerevlog = repo.file(fname)
391 filerevlog = repo.file(fname)
392 if not filerevlog:
392 if not filerevlog:
393 raise util.Abort(_("empty or missing revlog for %s") % fname)
393 raise util.Abort(_("empty or missing revlog for %s") % fname)
394
394
395 linkrevnodes = linknodes(filerevlog, fname)
395 linkrevnodes = linknodes(filerevlog, fname)
396 # Lookup for filenodes, we collected the linkrev nodes above in the
396 # Lookup for filenodes, we collected the linkrev nodes above in the
397 # fastpath case and with lookupmf in the slowpath case.
397 # fastpath case and with lookupmf in the slowpath case.
398 def lookupfilelog(x):
398 def lookupfilelog(x):
399 return linkrevnodes[x]
399 return linkrevnodes[x]
400
400
401 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs, source)
401 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs, source)
402 if filenodes:
402 if filenodes:
403 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
403 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
404 total=total)
404 total=total)
405 yield self.fileheader(fname)
405 yield self.fileheader(fname)
406 for chunk in self.group(filenodes, filerevlog, lookupfilelog,
406 for chunk in self.group(filenodes, filerevlog, lookupfilelog,
407 reorder=reorder):
407 reorder=reorder):
408 yield chunk
408 yield chunk
409
409
410 def revchunk(self, revlog, rev, prev, linknode):
410 def revchunk(self, revlog, rev, prev, linknode):
411 node = revlog.node(rev)
411 node = revlog.node(rev)
412 p1, p2 = revlog.parentrevs(rev)
412 p1, p2 = revlog.parentrevs(rev)
413 base = prev
413 base = prev
414
414
415 prefix = ''
415 prefix = ''
416 if base == nullrev:
416 if base == nullrev:
417 delta = revlog.revision(node)
417 delta = revlog.revision(node)
418 prefix = mdiff.trivialdiffheader(len(delta))
418 prefix = mdiff.trivialdiffheader(len(delta))
419 else:
419 else:
420 delta = revlog.revdiff(base, rev)
420 delta = revlog.revdiff(base, rev)
421 p1n, p2n = revlog.parents(node)
421 p1n, p2n = revlog.parents(node)
422 basenode = revlog.node(base)
422 basenode = revlog.node(base)
423 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
423 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
424 meta += prefix
424 meta += prefix
425 l = len(meta) + len(delta)
425 l = len(meta) + len(delta)
426 yield chunkheader(l)
426 yield chunkheader(l)
427 yield meta
427 yield meta
428 yield delta
428 yield delta
429 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
429 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
430 # do nothing with basenode, it is implicitly the previous one in HG10
430 # do nothing with basenode, it is implicitly the previous one in HG10
431 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
431 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
432
432
433 def _changegroupinfo(repo, nodes, source):
433 def _changegroupinfo(repo, nodes, source):
434 if repo.ui.verbose or source == 'bundle':
434 if repo.ui.verbose or source == 'bundle':
435 repo.ui.status(_("%d changesets found\n") % len(nodes))
435 repo.ui.status(_("%d changesets found\n") % len(nodes))
436 if repo.ui.debugflag:
436 if repo.ui.debugflag:
437 repo.ui.debug("list of changesets:\n")
437 repo.ui.debug("list of changesets:\n")
438 for node in nodes:
438 for node in nodes:
439 repo.ui.debug("%s\n" % hex(node))
439 repo.ui.debug("%s\n" % hex(node))
440
440
441 def getsubset(repo, outgoing, bundler, source, fastpath=False):
441 def getsubset(repo, outgoing, bundler, source, fastpath=False):
442 repo = repo.unfiltered()
442 repo = repo.unfiltered()
443 commonrevs = outgoing.common
443 commonrevs = outgoing.common
444 csets = outgoing.missing
444 csets = outgoing.missing
445 heads = outgoing.missingheads
445 heads = outgoing.missingheads
446 # We go through the fast path if we get told to, or if all (unfiltered
446 # We go through the fast path if we get told to, or if all (unfiltered
447 # heads have been requested (since we then know there all linkrevs will
447 # heads have been requested (since we then know there all linkrevs will
448 # be pulled by the client).
448 # be pulled by the client).
449 heads.sort()
449 heads.sort()
450 fastpathlinkrev = fastpath or (
450 fastpathlinkrev = fastpath or (
451 repo.filtername is None and heads == sorted(repo.heads()))
451 repo.filtername is None and heads == sorted(repo.heads()))
452
452
453 repo.hook('preoutgoing', throw=True, source=source)
453 repo.hook('preoutgoing', throw=True, source=source)
454 _changegroupinfo(repo, csets, source)
454 _changegroupinfo(repo, csets, source)
455 gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
455 gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
456 return unbundle10(util.chunkbuffer(gengroup), 'UN')
456 return unbundle10(util.chunkbuffer(gengroup), 'UN')
457
457
458 def changegroupsubset(repo, roots, heads, source):
458 def changegroupsubset(repo, roots, heads, source):
459 """Compute a changegroup consisting of all the nodes that are
459 """Compute a changegroup consisting of all the nodes that are
460 descendants of any of the roots and ancestors of any of the heads.
460 descendants of any of the roots and ancestors of any of the heads.
461 Return a chunkbuffer object whose read() method will return
461 Return a chunkbuffer object whose read() method will return
462 successive changegroup chunks.
462 successive changegroup chunks.
463
463
464 It is fairly complex as determining which filenodes and which
464 It is fairly complex as determining which filenodes and which
465 manifest nodes need to be included for the changeset to be complete
465 manifest nodes need to be included for the changeset to be complete
466 is non-trivial.
466 is non-trivial.
467
467
468 Another wrinkle is doing the reverse, figuring out which changeset in
468 Another wrinkle is doing the reverse, figuring out which changeset in
469 the changegroup a particular filenode or manifestnode belongs to.
469 the changegroup a particular filenode or manifestnode belongs to.
470 """
470 """
471 cl = repo.changelog
471 cl = repo.changelog
472 if not roots:
472 if not roots:
473 roots = [nullid]
473 roots = [nullid]
474 # TODO: remove call to nodesbetween.
474 # TODO: remove call to nodesbetween.
475 csets, roots, heads = cl.nodesbetween(roots, heads)
475 csets, roots, heads = cl.nodesbetween(roots, heads)
476 discbases = []
476 discbases = []
477 for n in roots:
477 for n in roots:
478 discbases.extend([p for p in cl.parents(n) if p != nullid])
478 discbases.extend([p for p in cl.parents(n) if p != nullid])
479 outgoing = discovery.outgoing(cl, discbases, heads)
479 outgoing = discovery.outgoing(cl, discbases, heads)
480 bundler = bundle10(repo)
480 bundler = bundle10(repo)
481 return getsubset(repo, outgoing, bundler, source)
481 return getsubset(repo, outgoing, bundler, source)
482
482
483 def getlocalbundle(repo, source, outgoing, bundlecaps=None):
484 """Like getbundle, but taking a discovery.outgoing as an argument.
485
486 This is only implemented for local repos and reuses potentially
487 precomputed sets in outgoing."""
488 if not outgoing.missing:
489 return None
490 bundler = bundle10(repo, bundlecaps)
491 return getsubset(repo, outgoing, bundler, source)
492
@@ -1,5930 +1,5930 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _
10 from i18n import _
11 import os, re, difflib, time, tempfile, errno
11 import os, re, difflib, time, tempfile, errno
12 import sys
12 import sys
13 import hg, scmutil, util, revlog, copies, error, bookmarks
13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 import patch, help, encoding, templatekw, discovery
14 import patch, help, encoding, templatekw, discovery
15 import archival, changegroup, cmdutil, hbisect
15 import archival, changegroup, cmdutil, hbisect
16 import sshserver, hgweb, commandserver
16 import sshserver, hgweb, commandserver
17 from hgweb import server as hgweb_server
17 from hgweb import server as hgweb_server
18 import merge as mergemod
18 import merge as mergemod
19 import minirst, revset, fileset
19 import minirst, revset, fileset
20 import dagparser, context, simplemerge, graphmod
20 import dagparser, context, simplemerge, graphmod
21 import random
21 import random
22 import setdiscovery, treediscovery, dagutil, pvec, localrepo
22 import setdiscovery, treediscovery, dagutil, pvec, localrepo
23 import phases, obsolete
23 import phases, obsolete
24
24
25 table = {}
25 table = {}
26
26
27 command = cmdutil.command(table)
27 command = cmdutil.command(table)
28
28
29 # common command options
29 # common command options
30
30
31 globalopts = [
31 globalopts = [
32 ('R', 'repository', '',
32 ('R', 'repository', '',
33 _('repository root directory or name of overlay bundle file'),
33 _('repository root directory or name of overlay bundle file'),
34 _('REPO')),
34 _('REPO')),
35 ('', 'cwd', '',
35 ('', 'cwd', '',
36 _('change working directory'), _('DIR')),
36 _('change working directory'), _('DIR')),
37 ('y', 'noninteractive', None,
37 ('y', 'noninteractive', None,
38 _('do not prompt, automatically pick the first choice for all prompts')),
38 _('do not prompt, automatically pick the first choice for all prompts')),
39 ('q', 'quiet', None, _('suppress output')),
39 ('q', 'quiet', None, _('suppress output')),
40 ('v', 'verbose', None, _('enable additional output')),
40 ('v', 'verbose', None, _('enable additional output')),
41 ('', 'config', [],
41 ('', 'config', [],
42 _('set/override config option (use \'section.name=value\')'),
42 _('set/override config option (use \'section.name=value\')'),
43 _('CONFIG')),
43 _('CONFIG')),
44 ('', 'debug', None, _('enable debugging output')),
44 ('', 'debug', None, _('enable debugging output')),
45 ('', 'debugger', None, _('start debugger')),
45 ('', 'debugger', None, _('start debugger')),
46 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
46 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
47 _('ENCODE')),
47 _('ENCODE')),
48 ('', 'encodingmode', encoding.encodingmode,
48 ('', 'encodingmode', encoding.encodingmode,
49 _('set the charset encoding mode'), _('MODE')),
49 _('set the charset encoding mode'), _('MODE')),
50 ('', 'traceback', None, _('always print a traceback on exception')),
50 ('', 'traceback', None, _('always print a traceback on exception')),
51 ('', 'time', None, _('time how long the command takes')),
51 ('', 'time', None, _('time how long the command takes')),
52 ('', 'profile', None, _('print command execution profile')),
52 ('', 'profile', None, _('print command execution profile')),
53 ('', 'version', None, _('output version information and exit')),
53 ('', 'version', None, _('output version information and exit')),
54 ('h', 'help', None, _('display help and exit')),
54 ('h', 'help', None, _('display help and exit')),
55 ('', 'hidden', False, _('consider hidden changesets')),
55 ('', 'hidden', False, _('consider hidden changesets')),
56 ]
56 ]
57
57
58 dryrunopts = [('n', 'dry-run', None,
58 dryrunopts = [('n', 'dry-run', None,
59 _('do not perform actions, just print output'))]
59 _('do not perform actions, just print output'))]
60
60
61 remoteopts = [
61 remoteopts = [
62 ('e', 'ssh', '',
62 ('e', 'ssh', '',
63 _('specify ssh command to use'), _('CMD')),
63 _('specify ssh command to use'), _('CMD')),
64 ('', 'remotecmd', '',
64 ('', 'remotecmd', '',
65 _('specify hg command to run on the remote side'), _('CMD')),
65 _('specify hg command to run on the remote side'), _('CMD')),
66 ('', 'insecure', None,
66 ('', 'insecure', None,
67 _('do not verify server certificate (ignoring web.cacerts config)')),
67 _('do not verify server certificate (ignoring web.cacerts config)')),
68 ]
68 ]
69
69
70 walkopts = [
70 walkopts = [
71 ('I', 'include', [],
71 ('I', 'include', [],
72 _('include names matching the given patterns'), _('PATTERN')),
72 _('include names matching the given patterns'), _('PATTERN')),
73 ('X', 'exclude', [],
73 ('X', 'exclude', [],
74 _('exclude names matching the given patterns'), _('PATTERN')),
74 _('exclude names matching the given patterns'), _('PATTERN')),
75 ]
75 ]
76
76
77 commitopts = [
77 commitopts = [
78 ('m', 'message', '',
78 ('m', 'message', '',
79 _('use text as commit message'), _('TEXT')),
79 _('use text as commit message'), _('TEXT')),
80 ('l', 'logfile', '',
80 ('l', 'logfile', '',
81 _('read commit message from file'), _('FILE')),
81 _('read commit message from file'), _('FILE')),
82 ]
82 ]
83
83
84 commitopts2 = [
84 commitopts2 = [
85 ('d', 'date', '',
85 ('d', 'date', '',
86 _('record the specified date as commit date'), _('DATE')),
86 _('record the specified date as commit date'), _('DATE')),
87 ('u', 'user', '',
87 ('u', 'user', '',
88 _('record the specified user as committer'), _('USER')),
88 _('record the specified user as committer'), _('USER')),
89 ]
89 ]
90
90
91 templateopts = [
91 templateopts = [
92 ('', 'style', '',
92 ('', 'style', '',
93 _('display using template map file (DEPRECATED)'), _('STYLE')),
93 _('display using template map file (DEPRECATED)'), _('STYLE')),
94 ('T', 'template', '',
94 ('T', 'template', '',
95 _('display with template'), _('TEMPLATE')),
95 _('display with template'), _('TEMPLATE')),
96 ]
96 ]
97
97
98 logopts = [
98 logopts = [
99 ('p', 'patch', None, _('show patch')),
99 ('p', 'patch', None, _('show patch')),
100 ('g', 'git', None, _('use git extended diff format')),
100 ('g', 'git', None, _('use git extended diff format')),
101 ('l', 'limit', '',
101 ('l', 'limit', '',
102 _('limit number of changes displayed'), _('NUM')),
102 _('limit number of changes displayed'), _('NUM')),
103 ('M', 'no-merges', None, _('do not show merges')),
103 ('M', 'no-merges', None, _('do not show merges')),
104 ('', 'stat', None, _('output diffstat-style summary of changes')),
104 ('', 'stat', None, _('output diffstat-style summary of changes')),
105 ('G', 'graph', None, _("show the revision DAG")),
105 ('G', 'graph', None, _("show the revision DAG")),
106 ] + templateopts
106 ] + templateopts
107
107
108 diffopts = [
108 diffopts = [
109 ('a', 'text', None, _('treat all files as text')),
109 ('a', 'text', None, _('treat all files as text')),
110 ('g', 'git', None, _('use git extended diff format')),
110 ('g', 'git', None, _('use git extended diff format')),
111 ('', 'nodates', None, _('omit dates from diff headers'))
111 ('', 'nodates', None, _('omit dates from diff headers'))
112 ]
112 ]
113
113
114 diffwsopts = [
114 diffwsopts = [
115 ('w', 'ignore-all-space', None,
115 ('w', 'ignore-all-space', None,
116 _('ignore white space when comparing lines')),
116 _('ignore white space when comparing lines')),
117 ('b', 'ignore-space-change', None,
117 ('b', 'ignore-space-change', None,
118 _('ignore changes in the amount of white space')),
118 _('ignore changes in the amount of white space')),
119 ('B', 'ignore-blank-lines', None,
119 ('B', 'ignore-blank-lines', None,
120 _('ignore changes whose lines are all blank')),
120 _('ignore changes whose lines are all blank')),
121 ]
121 ]
122
122
123 diffopts2 = [
123 diffopts2 = [
124 ('p', 'show-function', None, _('show which function each change is in')),
124 ('p', 'show-function', None, _('show which function each change is in')),
125 ('', 'reverse', None, _('produce a diff that undoes the changes')),
125 ('', 'reverse', None, _('produce a diff that undoes the changes')),
126 ] + diffwsopts + [
126 ] + diffwsopts + [
127 ('U', 'unified', '',
127 ('U', 'unified', '',
128 _('number of lines of context to show'), _('NUM')),
128 _('number of lines of context to show'), _('NUM')),
129 ('', 'stat', None, _('output diffstat-style summary of changes')),
129 ('', 'stat', None, _('output diffstat-style summary of changes')),
130 ]
130 ]
131
131
132 mergetoolopts = [
132 mergetoolopts = [
133 ('t', 'tool', '', _('specify merge tool')),
133 ('t', 'tool', '', _('specify merge tool')),
134 ]
134 ]
135
135
136 similarityopts = [
136 similarityopts = [
137 ('s', 'similarity', '',
137 ('s', 'similarity', '',
138 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
138 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
139 ]
139 ]
140
140
141 subrepoopts = [
141 subrepoopts = [
142 ('S', 'subrepos', None,
142 ('S', 'subrepos', None,
143 _('recurse into subrepositories'))
143 _('recurse into subrepositories'))
144 ]
144 ]
145
145
146 # Commands start here, listed alphabetically
146 # Commands start here, listed alphabetically
147
147
148 @command('^add',
148 @command('^add',
149 walkopts + subrepoopts + dryrunopts,
149 walkopts + subrepoopts + dryrunopts,
150 _('[OPTION]... [FILE]...'))
150 _('[OPTION]... [FILE]...'))
151 def add(ui, repo, *pats, **opts):
151 def add(ui, repo, *pats, **opts):
152 """add the specified files on the next commit
152 """add the specified files on the next commit
153
153
154 Schedule files to be version controlled and added to the
154 Schedule files to be version controlled and added to the
155 repository.
155 repository.
156
156
157 The files will be added to the repository at the next commit. To
157 The files will be added to the repository at the next commit. To
158 undo an add before that, see :hg:`forget`.
158 undo an add before that, see :hg:`forget`.
159
159
160 If no names are given, add all files to the repository.
160 If no names are given, add all files to the repository.
161
161
162 .. container:: verbose
162 .. container:: verbose
163
163
164 An example showing how new (unknown) files are added
164 An example showing how new (unknown) files are added
165 automatically by :hg:`add`::
165 automatically by :hg:`add`::
166
166
167 $ ls
167 $ ls
168 foo.c
168 foo.c
169 $ hg status
169 $ hg status
170 ? foo.c
170 ? foo.c
171 $ hg add
171 $ hg add
172 adding foo.c
172 adding foo.c
173 $ hg status
173 $ hg status
174 A foo.c
174 A foo.c
175
175
176 Returns 0 if all files are successfully added.
176 Returns 0 if all files are successfully added.
177 """
177 """
178
178
179 m = scmutil.match(repo[None], pats, opts)
179 m = scmutil.match(repo[None], pats, opts)
180 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
180 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
181 opts.get('subrepos'), prefix="", explicitonly=False)
181 opts.get('subrepos'), prefix="", explicitonly=False)
182 return rejected and 1 or 0
182 return rejected and 1 or 0
183
183
184 @command('addremove',
184 @command('addremove',
185 similarityopts + walkopts + dryrunopts,
185 similarityopts + walkopts + dryrunopts,
186 _('[OPTION]... [FILE]...'))
186 _('[OPTION]... [FILE]...'))
187 def addremove(ui, repo, *pats, **opts):
187 def addremove(ui, repo, *pats, **opts):
188 """add all new files, delete all missing files
188 """add all new files, delete all missing files
189
189
190 Add all new files and remove all missing files from the
190 Add all new files and remove all missing files from the
191 repository.
191 repository.
192
192
193 New files are ignored if they match any of the patterns in
193 New files are ignored if they match any of the patterns in
194 ``.hgignore``. As with add, these changes take effect at the next
194 ``.hgignore``. As with add, these changes take effect at the next
195 commit.
195 commit.
196
196
197 Use the -s/--similarity option to detect renamed files. This
197 Use the -s/--similarity option to detect renamed files. This
198 option takes a percentage between 0 (disabled) and 100 (files must
198 option takes a percentage between 0 (disabled) and 100 (files must
199 be identical) as its parameter. With a parameter greater than 0,
199 be identical) as its parameter. With a parameter greater than 0,
200 this compares every removed file with every added file and records
200 this compares every removed file with every added file and records
201 those similar enough as renames. Detecting renamed files this way
201 those similar enough as renames. Detecting renamed files this way
202 can be expensive. After using this option, :hg:`status -C` can be
202 can be expensive. After using this option, :hg:`status -C` can be
203 used to check which files were identified as moved or renamed. If
203 used to check which files were identified as moved or renamed. If
204 not specified, -s/--similarity defaults to 100 and only renames of
204 not specified, -s/--similarity defaults to 100 and only renames of
205 identical files are detected.
205 identical files are detected.
206
206
207 Returns 0 if all files are successfully added.
207 Returns 0 if all files are successfully added.
208 """
208 """
209 try:
209 try:
210 sim = float(opts.get('similarity') or 100)
210 sim = float(opts.get('similarity') or 100)
211 except ValueError:
211 except ValueError:
212 raise util.Abort(_('similarity must be a number'))
212 raise util.Abort(_('similarity must be a number'))
213 if sim < 0 or sim > 100:
213 if sim < 0 or sim > 100:
214 raise util.Abort(_('similarity must be between 0 and 100'))
214 raise util.Abort(_('similarity must be between 0 and 100'))
215 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
215 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
216
216
217 @command('^annotate|blame',
217 @command('^annotate|blame',
218 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
218 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
219 ('', 'follow', None,
219 ('', 'follow', None,
220 _('follow copies/renames and list the filename (DEPRECATED)')),
220 _('follow copies/renames and list the filename (DEPRECATED)')),
221 ('', 'no-follow', None, _("don't follow copies and renames")),
221 ('', 'no-follow', None, _("don't follow copies and renames")),
222 ('a', 'text', None, _('treat all files as text')),
222 ('a', 'text', None, _('treat all files as text')),
223 ('u', 'user', None, _('list the author (long with -v)')),
223 ('u', 'user', None, _('list the author (long with -v)')),
224 ('f', 'file', None, _('list the filename')),
224 ('f', 'file', None, _('list the filename')),
225 ('d', 'date', None, _('list the date (short with -q)')),
225 ('d', 'date', None, _('list the date (short with -q)')),
226 ('n', 'number', None, _('list the revision number (default)')),
226 ('n', 'number', None, _('list the revision number (default)')),
227 ('c', 'changeset', None, _('list the changeset')),
227 ('c', 'changeset', None, _('list the changeset')),
228 ('l', 'line-number', None, _('show line number at the first appearance'))
228 ('l', 'line-number', None, _('show line number at the first appearance'))
229 ] + diffwsopts + walkopts,
229 ] + diffwsopts + walkopts,
230 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
230 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
231 def annotate(ui, repo, *pats, **opts):
231 def annotate(ui, repo, *pats, **opts):
232 """show changeset information by line for each file
232 """show changeset information by line for each file
233
233
234 List changes in files, showing the revision id responsible for
234 List changes in files, showing the revision id responsible for
235 each line
235 each line
236
236
237 This command is useful for discovering when a change was made and
237 This command is useful for discovering when a change was made and
238 by whom.
238 by whom.
239
239
240 Without the -a/--text option, annotate will avoid processing files
240 Without the -a/--text option, annotate will avoid processing files
241 it detects as binary. With -a, annotate will annotate the file
241 it detects as binary. With -a, annotate will annotate the file
242 anyway, although the results will probably be neither useful
242 anyway, although the results will probably be neither useful
243 nor desirable.
243 nor desirable.
244
244
245 Returns 0 on success.
245 Returns 0 on success.
246 """
246 """
247 if opts.get('follow'):
247 if opts.get('follow'):
248 # --follow is deprecated and now just an alias for -f/--file
248 # --follow is deprecated and now just an alias for -f/--file
249 # to mimic the behavior of Mercurial before version 1.5
249 # to mimic the behavior of Mercurial before version 1.5
250 opts['file'] = True
250 opts['file'] = True
251
251
252 datefunc = ui.quiet and util.shortdate or util.datestr
252 datefunc = ui.quiet and util.shortdate or util.datestr
253 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
253 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
254
254
255 if not pats:
255 if not pats:
256 raise util.Abort(_('at least one filename or pattern is required'))
256 raise util.Abort(_('at least one filename or pattern is required'))
257
257
258 hexfn = ui.debugflag and hex or short
258 hexfn = ui.debugflag and hex or short
259
259
260 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
260 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
261 ('number', ' ', lambda x: str(x[0].rev())),
261 ('number', ' ', lambda x: str(x[0].rev())),
262 ('changeset', ' ', lambda x: hexfn(x[0].node())),
262 ('changeset', ' ', lambda x: hexfn(x[0].node())),
263 ('date', ' ', getdate),
263 ('date', ' ', getdate),
264 ('file', ' ', lambda x: x[0].path()),
264 ('file', ' ', lambda x: x[0].path()),
265 ('line_number', ':', lambda x: str(x[1])),
265 ('line_number', ':', lambda x: str(x[1])),
266 ]
266 ]
267
267
268 if (not opts.get('user') and not opts.get('changeset')
268 if (not opts.get('user') and not opts.get('changeset')
269 and not opts.get('date') and not opts.get('file')):
269 and not opts.get('date') and not opts.get('file')):
270 opts['number'] = True
270 opts['number'] = True
271
271
272 linenumber = opts.get('line_number') is not None
272 linenumber = opts.get('line_number') is not None
273 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
273 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
274 raise util.Abort(_('at least one of -n/-c is required for -l'))
274 raise util.Abort(_('at least one of -n/-c is required for -l'))
275
275
276 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
276 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
277 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
277 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
278
278
279 def bad(x, y):
279 def bad(x, y):
280 raise util.Abort("%s: %s" % (x, y))
280 raise util.Abort("%s: %s" % (x, y))
281
281
282 ctx = scmutil.revsingle(repo, opts.get('rev'))
282 ctx = scmutil.revsingle(repo, opts.get('rev'))
283 m = scmutil.match(ctx, pats, opts)
283 m = scmutil.match(ctx, pats, opts)
284 m.bad = bad
284 m.bad = bad
285 follow = not opts.get('no_follow')
285 follow = not opts.get('no_follow')
286 diffopts = patch.diffopts(ui, opts, section='annotate')
286 diffopts = patch.diffopts(ui, opts, section='annotate')
287 for abs in ctx.walk(m):
287 for abs in ctx.walk(m):
288 fctx = ctx[abs]
288 fctx = ctx[abs]
289 if not opts.get('text') and util.binary(fctx.data()):
289 if not opts.get('text') and util.binary(fctx.data()):
290 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
290 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
291 continue
291 continue
292
292
293 lines = fctx.annotate(follow=follow, linenumber=linenumber,
293 lines = fctx.annotate(follow=follow, linenumber=linenumber,
294 diffopts=diffopts)
294 diffopts=diffopts)
295 pieces = []
295 pieces = []
296
296
297 for f, sep in funcmap:
297 for f, sep in funcmap:
298 l = [f(n) for n, dummy in lines]
298 l = [f(n) for n, dummy in lines]
299 if l:
299 if l:
300 sized = [(x, encoding.colwidth(x)) for x in l]
300 sized = [(x, encoding.colwidth(x)) for x in l]
301 ml = max([w for x, w in sized])
301 ml = max([w for x, w in sized])
302 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
302 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
303 for x, w in sized])
303 for x, w in sized])
304
304
305 if pieces:
305 if pieces:
306 for p, l in zip(zip(*pieces), lines):
306 for p, l in zip(zip(*pieces), lines):
307 ui.write("%s: %s" % ("".join(p), l[1]))
307 ui.write("%s: %s" % ("".join(p), l[1]))
308
308
309 if lines and not lines[-1][1].endswith('\n'):
309 if lines and not lines[-1][1].endswith('\n'):
310 ui.write('\n')
310 ui.write('\n')
311
311
312 @command('archive',
312 @command('archive',
313 [('', 'no-decode', None, _('do not pass files through decoders')),
313 [('', 'no-decode', None, _('do not pass files through decoders')),
314 ('p', 'prefix', '', _('directory prefix for files in archive'),
314 ('p', 'prefix', '', _('directory prefix for files in archive'),
315 _('PREFIX')),
315 _('PREFIX')),
316 ('r', 'rev', '', _('revision to distribute'), _('REV')),
316 ('r', 'rev', '', _('revision to distribute'), _('REV')),
317 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
317 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
318 ] + subrepoopts + walkopts,
318 ] + subrepoopts + walkopts,
319 _('[OPTION]... DEST'))
319 _('[OPTION]... DEST'))
320 def archive(ui, repo, dest, **opts):
320 def archive(ui, repo, dest, **opts):
321 '''create an unversioned archive of a repository revision
321 '''create an unversioned archive of a repository revision
322
322
323 By default, the revision used is the parent of the working
323 By default, the revision used is the parent of the working
324 directory; use -r/--rev to specify a different revision.
324 directory; use -r/--rev to specify a different revision.
325
325
326 The archive type is automatically detected based on file
326 The archive type is automatically detected based on file
327 extension (or override using -t/--type).
327 extension (or override using -t/--type).
328
328
329 .. container:: verbose
329 .. container:: verbose
330
330
331 Examples:
331 Examples:
332
332
333 - create a zip file containing the 1.0 release::
333 - create a zip file containing the 1.0 release::
334
334
335 hg archive -r 1.0 project-1.0.zip
335 hg archive -r 1.0 project-1.0.zip
336
336
337 - create a tarball excluding .hg files::
337 - create a tarball excluding .hg files::
338
338
339 hg archive project.tar.gz -X ".hg*"
339 hg archive project.tar.gz -X ".hg*"
340
340
341 Valid types are:
341 Valid types are:
342
342
343 :``files``: a directory full of files (default)
343 :``files``: a directory full of files (default)
344 :``tar``: tar archive, uncompressed
344 :``tar``: tar archive, uncompressed
345 :``tbz2``: tar archive, compressed using bzip2
345 :``tbz2``: tar archive, compressed using bzip2
346 :``tgz``: tar archive, compressed using gzip
346 :``tgz``: tar archive, compressed using gzip
347 :``uzip``: zip archive, uncompressed
347 :``uzip``: zip archive, uncompressed
348 :``zip``: zip archive, compressed using deflate
348 :``zip``: zip archive, compressed using deflate
349
349
350 The exact name of the destination archive or directory is given
350 The exact name of the destination archive or directory is given
351 using a format string; see :hg:`help export` for details.
351 using a format string; see :hg:`help export` for details.
352
352
353 Each member added to an archive file has a directory prefix
353 Each member added to an archive file has a directory prefix
354 prepended. Use -p/--prefix to specify a format string for the
354 prepended. Use -p/--prefix to specify a format string for the
355 prefix. The default is the basename of the archive, with suffixes
355 prefix. The default is the basename of the archive, with suffixes
356 removed.
356 removed.
357
357
358 Returns 0 on success.
358 Returns 0 on success.
359 '''
359 '''
360
360
361 ctx = scmutil.revsingle(repo, opts.get('rev'))
361 ctx = scmutil.revsingle(repo, opts.get('rev'))
362 if not ctx:
362 if not ctx:
363 raise util.Abort(_('no working directory: please specify a revision'))
363 raise util.Abort(_('no working directory: please specify a revision'))
364 node = ctx.node()
364 node = ctx.node()
365 dest = cmdutil.makefilename(repo, dest, node)
365 dest = cmdutil.makefilename(repo, dest, node)
366 if os.path.realpath(dest) == repo.root:
366 if os.path.realpath(dest) == repo.root:
367 raise util.Abort(_('repository root cannot be destination'))
367 raise util.Abort(_('repository root cannot be destination'))
368
368
369 kind = opts.get('type') or archival.guesskind(dest) or 'files'
369 kind = opts.get('type') or archival.guesskind(dest) or 'files'
370 prefix = opts.get('prefix')
370 prefix = opts.get('prefix')
371
371
372 if dest == '-':
372 if dest == '-':
373 if kind == 'files':
373 if kind == 'files':
374 raise util.Abort(_('cannot archive plain files to stdout'))
374 raise util.Abort(_('cannot archive plain files to stdout'))
375 dest = cmdutil.makefileobj(repo, dest)
375 dest = cmdutil.makefileobj(repo, dest)
376 if not prefix:
376 if not prefix:
377 prefix = os.path.basename(repo.root) + '-%h'
377 prefix = os.path.basename(repo.root) + '-%h'
378
378
379 prefix = cmdutil.makefilename(repo, prefix, node)
379 prefix = cmdutil.makefilename(repo, prefix, node)
380 matchfn = scmutil.match(ctx, [], opts)
380 matchfn = scmutil.match(ctx, [], opts)
381 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
381 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
382 matchfn, prefix, subrepos=opts.get('subrepos'))
382 matchfn, prefix, subrepos=opts.get('subrepos'))
383
383
384 @command('backout',
384 @command('backout',
385 [('', 'merge', None, _('merge with old dirstate parent after backout')),
385 [('', 'merge', None, _('merge with old dirstate parent after backout')),
386 ('', 'parent', '',
386 ('', 'parent', '',
387 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
387 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
388 ('r', 'rev', '', _('revision to backout'), _('REV')),
388 ('r', 'rev', '', _('revision to backout'), _('REV')),
389 ] + mergetoolopts + walkopts + commitopts + commitopts2,
389 ] + mergetoolopts + walkopts + commitopts + commitopts2,
390 _('[OPTION]... [-r] REV'))
390 _('[OPTION]... [-r] REV'))
391 def backout(ui, repo, node=None, rev=None, **opts):
391 def backout(ui, repo, node=None, rev=None, **opts):
392 '''reverse effect of earlier changeset
392 '''reverse effect of earlier changeset
393
393
394 Prepare a new changeset with the effect of REV undone in the
394 Prepare a new changeset with the effect of REV undone in the
395 current working directory.
395 current working directory.
396
396
397 If REV is the parent of the working directory, then this new changeset
397 If REV is the parent of the working directory, then this new changeset
398 is committed automatically. Otherwise, hg needs to merge the
398 is committed automatically. Otherwise, hg needs to merge the
399 changes and the merged result is left uncommitted.
399 changes and the merged result is left uncommitted.
400
400
401 .. note::
401 .. note::
402
402
403 backout cannot be used to fix either an unwanted or
403 backout cannot be used to fix either an unwanted or
404 incorrect merge.
404 incorrect merge.
405
405
406 .. container:: verbose
406 .. container:: verbose
407
407
408 By default, the pending changeset will have one parent,
408 By default, the pending changeset will have one parent,
409 maintaining a linear history. With --merge, the pending
409 maintaining a linear history. With --merge, the pending
410 changeset will instead have two parents: the old parent of the
410 changeset will instead have two parents: the old parent of the
411 working directory and a new child of REV that simply undoes REV.
411 working directory and a new child of REV that simply undoes REV.
412
412
413 Before version 1.7, the behavior without --merge was equivalent
413 Before version 1.7, the behavior without --merge was equivalent
414 to specifying --merge followed by :hg:`update --clean .` to
414 to specifying --merge followed by :hg:`update --clean .` to
415 cancel the merge and leave the child of REV as a head to be
415 cancel the merge and leave the child of REV as a head to be
416 merged separately.
416 merged separately.
417
417
418 See :hg:`help dates` for a list of formats valid for -d/--date.
418 See :hg:`help dates` for a list of formats valid for -d/--date.
419
419
420 Returns 0 on success, 1 if nothing to backout or there are unresolved
420 Returns 0 on success, 1 if nothing to backout or there are unresolved
421 files.
421 files.
422 '''
422 '''
423 if rev and node:
423 if rev and node:
424 raise util.Abort(_("please specify just one revision"))
424 raise util.Abort(_("please specify just one revision"))
425
425
426 if not rev:
426 if not rev:
427 rev = node
427 rev = node
428
428
429 if not rev:
429 if not rev:
430 raise util.Abort(_("please specify a revision to backout"))
430 raise util.Abort(_("please specify a revision to backout"))
431
431
432 date = opts.get('date')
432 date = opts.get('date')
433 if date:
433 if date:
434 opts['date'] = util.parsedate(date)
434 opts['date'] = util.parsedate(date)
435
435
436 cmdutil.checkunfinished(repo)
436 cmdutil.checkunfinished(repo)
437 cmdutil.bailifchanged(repo)
437 cmdutil.bailifchanged(repo)
438 node = scmutil.revsingle(repo, rev).node()
438 node = scmutil.revsingle(repo, rev).node()
439
439
440 op1, op2 = repo.dirstate.parents()
440 op1, op2 = repo.dirstate.parents()
441 a = repo.changelog.ancestor(op1, node)
441 a = repo.changelog.ancestor(op1, node)
442 if a != node:
442 if a != node:
443 raise util.Abort(_('cannot backout change that is not an ancestor'))
443 raise util.Abort(_('cannot backout change that is not an ancestor'))
444
444
445 p1, p2 = repo.changelog.parents(node)
445 p1, p2 = repo.changelog.parents(node)
446 if p1 == nullid:
446 if p1 == nullid:
447 raise util.Abort(_('cannot backout a change with no parents'))
447 raise util.Abort(_('cannot backout a change with no parents'))
448 if p2 != nullid:
448 if p2 != nullid:
449 if not opts.get('parent'):
449 if not opts.get('parent'):
450 raise util.Abort(_('cannot backout a merge changeset'))
450 raise util.Abort(_('cannot backout a merge changeset'))
451 p = repo.lookup(opts['parent'])
451 p = repo.lookup(opts['parent'])
452 if p not in (p1, p2):
452 if p not in (p1, p2):
453 raise util.Abort(_('%s is not a parent of %s') %
453 raise util.Abort(_('%s is not a parent of %s') %
454 (short(p), short(node)))
454 (short(p), short(node)))
455 parent = p
455 parent = p
456 else:
456 else:
457 if opts.get('parent'):
457 if opts.get('parent'):
458 raise util.Abort(_('cannot use --parent on non-merge changeset'))
458 raise util.Abort(_('cannot use --parent on non-merge changeset'))
459 parent = p1
459 parent = p1
460
460
461 # the backout should appear on the same branch
461 # the backout should appear on the same branch
462 wlock = repo.wlock()
462 wlock = repo.wlock()
463 try:
463 try:
464 branch = repo.dirstate.branch()
464 branch = repo.dirstate.branch()
465 bheads = repo.branchheads(branch)
465 bheads = repo.branchheads(branch)
466 rctx = scmutil.revsingle(repo, hex(parent))
466 rctx = scmutil.revsingle(repo, hex(parent))
467 if not opts.get('merge') and op1 != node:
467 if not opts.get('merge') and op1 != node:
468 try:
468 try:
469 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
469 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
470 'backout')
470 'backout')
471 stats = mergemod.update(repo, parent, True, True, False,
471 stats = mergemod.update(repo, parent, True, True, False,
472 node, False)
472 node, False)
473 repo.setparents(op1, op2)
473 repo.setparents(op1, op2)
474 hg._showstats(repo, stats)
474 hg._showstats(repo, stats)
475 if stats[3]:
475 if stats[3]:
476 repo.ui.status(_("use 'hg resolve' to retry unresolved "
476 repo.ui.status(_("use 'hg resolve' to retry unresolved "
477 "file merges\n"))
477 "file merges\n"))
478 else:
478 else:
479 msg = _("changeset %s backed out, "
479 msg = _("changeset %s backed out, "
480 "don't forget to commit.\n")
480 "don't forget to commit.\n")
481 ui.status(msg % short(node))
481 ui.status(msg % short(node))
482 return stats[3] > 0
482 return stats[3] > 0
483 finally:
483 finally:
484 ui.setconfig('ui', 'forcemerge', '', '')
484 ui.setconfig('ui', 'forcemerge', '', '')
485 else:
485 else:
486 hg.clean(repo, node, show_stats=False)
486 hg.clean(repo, node, show_stats=False)
487 repo.dirstate.setbranch(branch)
487 repo.dirstate.setbranch(branch)
488 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
488 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
489
489
490
490
491 e = cmdutil.commiteditor
491 e = cmdutil.commiteditor
492 if not opts['message'] and not opts['logfile']:
492 if not opts['message'] and not opts['logfile']:
493 # we don't translate commit messages
493 # we don't translate commit messages
494 opts['message'] = "Backed out changeset %s" % short(node)
494 opts['message'] = "Backed out changeset %s" % short(node)
495 e = cmdutil.commitforceeditor
495 e = cmdutil.commitforceeditor
496
496
497 def commitfunc(ui, repo, message, match, opts):
497 def commitfunc(ui, repo, message, match, opts):
498 return repo.commit(message, opts.get('user'), opts.get('date'),
498 return repo.commit(message, opts.get('user'), opts.get('date'),
499 match, editor=e)
499 match, editor=e)
500 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
500 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
501 if not newnode:
501 if not newnode:
502 ui.status(_("nothing changed\n"))
502 ui.status(_("nothing changed\n"))
503 return 1
503 return 1
504 cmdutil.commitstatus(repo, newnode, branch, bheads)
504 cmdutil.commitstatus(repo, newnode, branch, bheads)
505
505
506 def nice(node):
506 def nice(node):
507 return '%d:%s' % (repo.changelog.rev(node), short(node))
507 return '%d:%s' % (repo.changelog.rev(node), short(node))
508 ui.status(_('changeset %s backs out changeset %s\n') %
508 ui.status(_('changeset %s backs out changeset %s\n') %
509 (nice(repo.changelog.tip()), nice(node)))
509 (nice(repo.changelog.tip()), nice(node)))
510 if opts.get('merge') and op1 != node:
510 if opts.get('merge') and op1 != node:
511 hg.clean(repo, op1, show_stats=False)
511 hg.clean(repo, op1, show_stats=False)
512 ui.status(_('merging with changeset %s\n')
512 ui.status(_('merging with changeset %s\n')
513 % nice(repo.changelog.tip()))
513 % nice(repo.changelog.tip()))
514 try:
514 try:
515 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
515 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
516 'backout')
516 'backout')
517 return hg.merge(repo, hex(repo.changelog.tip()))
517 return hg.merge(repo, hex(repo.changelog.tip()))
518 finally:
518 finally:
519 ui.setconfig('ui', 'forcemerge', '', '')
519 ui.setconfig('ui', 'forcemerge', '', '')
520 finally:
520 finally:
521 wlock.release()
521 wlock.release()
522 return 0
522 return 0
523
523
524 @command('bisect',
524 @command('bisect',
525 [('r', 'reset', False, _('reset bisect state')),
525 [('r', 'reset', False, _('reset bisect state')),
526 ('g', 'good', False, _('mark changeset good')),
526 ('g', 'good', False, _('mark changeset good')),
527 ('b', 'bad', False, _('mark changeset bad')),
527 ('b', 'bad', False, _('mark changeset bad')),
528 ('s', 'skip', False, _('skip testing changeset')),
528 ('s', 'skip', False, _('skip testing changeset')),
529 ('e', 'extend', False, _('extend the bisect range')),
529 ('e', 'extend', False, _('extend the bisect range')),
530 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
530 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
531 ('U', 'noupdate', False, _('do not update to target'))],
531 ('U', 'noupdate', False, _('do not update to target'))],
532 _("[-gbsr] [-U] [-c CMD] [REV]"))
532 _("[-gbsr] [-U] [-c CMD] [REV]"))
533 def bisect(ui, repo, rev=None, extra=None, command=None,
533 def bisect(ui, repo, rev=None, extra=None, command=None,
534 reset=None, good=None, bad=None, skip=None, extend=None,
534 reset=None, good=None, bad=None, skip=None, extend=None,
535 noupdate=None):
535 noupdate=None):
536 """subdivision search of changesets
536 """subdivision search of changesets
537
537
538 This command helps to find changesets which introduce problems. To
538 This command helps to find changesets which introduce problems. To
539 use, mark the earliest changeset you know exhibits the problem as
539 use, mark the earliest changeset you know exhibits the problem as
540 bad, then mark the latest changeset which is free from the problem
540 bad, then mark the latest changeset which is free from the problem
541 as good. Bisect will update your working directory to a revision
541 as good. Bisect will update your working directory to a revision
542 for testing (unless the -U/--noupdate option is specified). Once
542 for testing (unless the -U/--noupdate option is specified). Once
543 you have performed tests, mark the working directory as good or
543 you have performed tests, mark the working directory as good or
544 bad, and bisect will either update to another candidate changeset
544 bad, and bisect will either update to another candidate changeset
545 or announce that it has found the bad revision.
545 or announce that it has found the bad revision.
546
546
547 As a shortcut, you can also use the revision argument to mark a
547 As a shortcut, you can also use the revision argument to mark a
548 revision as good or bad without checking it out first.
548 revision as good or bad without checking it out first.
549
549
550 If you supply a command, it will be used for automatic bisection.
550 If you supply a command, it will be used for automatic bisection.
551 The environment variable HG_NODE will contain the ID of the
551 The environment variable HG_NODE will contain the ID of the
552 changeset being tested. The exit status of the command will be
552 changeset being tested. The exit status of the command will be
553 used to mark revisions as good or bad: status 0 means good, 125
553 used to mark revisions as good or bad: status 0 means good, 125
554 means to skip the revision, 127 (command not found) will abort the
554 means to skip the revision, 127 (command not found) will abort the
555 bisection, and any other non-zero exit status means the revision
555 bisection, and any other non-zero exit status means the revision
556 is bad.
556 is bad.
557
557
558 .. container:: verbose
558 .. container:: verbose
559
559
560 Some examples:
560 Some examples:
561
561
562 - start a bisection with known bad revision 34, and good revision 12::
562 - start a bisection with known bad revision 34, and good revision 12::
563
563
564 hg bisect --bad 34
564 hg bisect --bad 34
565 hg bisect --good 12
565 hg bisect --good 12
566
566
567 - advance the current bisection by marking current revision as good or
567 - advance the current bisection by marking current revision as good or
568 bad::
568 bad::
569
569
570 hg bisect --good
570 hg bisect --good
571 hg bisect --bad
571 hg bisect --bad
572
572
573 - mark the current revision, or a known revision, to be skipped (e.g. if
573 - mark the current revision, or a known revision, to be skipped (e.g. if
574 that revision is not usable because of another issue)::
574 that revision is not usable because of another issue)::
575
575
576 hg bisect --skip
576 hg bisect --skip
577 hg bisect --skip 23
577 hg bisect --skip 23
578
578
579 - skip all revisions that do not touch directories ``foo`` or ``bar``::
579 - skip all revisions that do not touch directories ``foo`` or ``bar``::
580
580
581 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
581 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
582
582
583 - forget the current bisection::
583 - forget the current bisection::
584
584
585 hg bisect --reset
585 hg bisect --reset
586
586
587 - use 'make && make tests' to automatically find the first broken
587 - use 'make && make tests' to automatically find the first broken
588 revision::
588 revision::
589
589
590 hg bisect --reset
590 hg bisect --reset
591 hg bisect --bad 34
591 hg bisect --bad 34
592 hg bisect --good 12
592 hg bisect --good 12
593 hg bisect --command "make && make tests"
593 hg bisect --command "make && make tests"
594
594
595 - see all changesets whose states are already known in the current
595 - see all changesets whose states are already known in the current
596 bisection::
596 bisection::
597
597
598 hg log -r "bisect(pruned)"
598 hg log -r "bisect(pruned)"
599
599
600 - see the changeset currently being bisected (especially useful
600 - see the changeset currently being bisected (especially useful
601 if running with -U/--noupdate)::
601 if running with -U/--noupdate)::
602
602
603 hg log -r "bisect(current)"
603 hg log -r "bisect(current)"
604
604
605 - see all changesets that took part in the current bisection::
605 - see all changesets that took part in the current bisection::
606
606
607 hg log -r "bisect(range)"
607 hg log -r "bisect(range)"
608
608
609 - you can even get a nice graph::
609 - you can even get a nice graph::
610
610
611 hg log --graph -r "bisect(range)"
611 hg log --graph -r "bisect(range)"
612
612
613 See :hg:`help revsets` for more about the `bisect()` keyword.
613 See :hg:`help revsets` for more about the `bisect()` keyword.
614
614
615 Returns 0 on success.
615 Returns 0 on success.
616 """
616 """
617 def extendbisectrange(nodes, good):
617 def extendbisectrange(nodes, good):
618 # bisect is incomplete when it ends on a merge node and
618 # bisect is incomplete when it ends on a merge node and
619 # one of the parent was not checked.
619 # one of the parent was not checked.
620 parents = repo[nodes[0]].parents()
620 parents = repo[nodes[0]].parents()
621 if len(parents) > 1:
621 if len(parents) > 1:
622 side = good and state['bad'] or state['good']
622 side = good and state['bad'] or state['good']
623 num = len(set(i.node() for i in parents) & set(side))
623 num = len(set(i.node() for i in parents) & set(side))
624 if num == 1:
624 if num == 1:
625 return parents[0].ancestor(parents[1])
625 return parents[0].ancestor(parents[1])
626 return None
626 return None
627
627
628 def print_result(nodes, good):
628 def print_result(nodes, good):
629 displayer = cmdutil.show_changeset(ui, repo, {})
629 displayer = cmdutil.show_changeset(ui, repo, {})
630 if len(nodes) == 1:
630 if len(nodes) == 1:
631 # narrowed it down to a single revision
631 # narrowed it down to a single revision
632 if good:
632 if good:
633 ui.write(_("The first good revision is:\n"))
633 ui.write(_("The first good revision is:\n"))
634 else:
634 else:
635 ui.write(_("The first bad revision is:\n"))
635 ui.write(_("The first bad revision is:\n"))
636 displayer.show(repo[nodes[0]])
636 displayer.show(repo[nodes[0]])
637 extendnode = extendbisectrange(nodes, good)
637 extendnode = extendbisectrange(nodes, good)
638 if extendnode is not None:
638 if extendnode is not None:
639 ui.write(_('Not all ancestors of this changeset have been'
639 ui.write(_('Not all ancestors of this changeset have been'
640 ' checked.\nUse bisect --extend to continue the '
640 ' checked.\nUse bisect --extend to continue the '
641 'bisection from\nthe common ancestor, %s.\n')
641 'bisection from\nthe common ancestor, %s.\n')
642 % extendnode)
642 % extendnode)
643 else:
643 else:
644 # multiple possible revisions
644 # multiple possible revisions
645 if good:
645 if good:
646 ui.write(_("Due to skipped revisions, the first "
646 ui.write(_("Due to skipped revisions, the first "
647 "good revision could be any of:\n"))
647 "good revision could be any of:\n"))
648 else:
648 else:
649 ui.write(_("Due to skipped revisions, the first "
649 ui.write(_("Due to skipped revisions, the first "
650 "bad revision could be any of:\n"))
650 "bad revision could be any of:\n"))
651 for n in nodes:
651 for n in nodes:
652 displayer.show(repo[n])
652 displayer.show(repo[n])
653 displayer.close()
653 displayer.close()
654
654
655 def check_state(state, interactive=True):
655 def check_state(state, interactive=True):
656 if not state['good'] or not state['bad']:
656 if not state['good'] or not state['bad']:
657 if (good or bad or skip or reset) and interactive:
657 if (good or bad or skip or reset) and interactive:
658 return
658 return
659 if not state['good']:
659 if not state['good']:
660 raise util.Abort(_('cannot bisect (no known good revisions)'))
660 raise util.Abort(_('cannot bisect (no known good revisions)'))
661 else:
661 else:
662 raise util.Abort(_('cannot bisect (no known bad revisions)'))
662 raise util.Abort(_('cannot bisect (no known bad revisions)'))
663 return True
663 return True
664
664
665 # backward compatibility
665 # backward compatibility
666 if rev in "good bad reset init".split():
666 if rev in "good bad reset init".split():
667 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
667 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
668 cmd, rev, extra = rev, extra, None
668 cmd, rev, extra = rev, extra, None
669 if cmd == "good":
669 if cmd == "good":
670 good = True
670 good = True
671 elif cmd == "bad":
671 elif cmd == "bad":
672 bad = True
672 bad = True
673 else:
673 else:
674 reset = True
674 reset = True
675 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
675 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
676 raise util.Abort(_('incompatible arguments'))
676 raise util.Abort(_('incompatible arguments'))
677
677
678 cmdutil.checkunfinished(repo)
678 cmdutil.checkunfinished(repo)
679
679
680 if reset:
680 if reset:
681 p = repo.join("bisect.state")
681 p = repo.join("bisect.state")
682 if os.path.exists(p):
682 if os.path.exists(p):
683 os.unlink(p)
683 os.unlink(p)
684 return
684 return
685
685
686 state = hbisect.load_state(repo)
686 state = hbisect.load_state(repo)
687
687
688 if command:
688 if command:
689 changesets = 1
689 changesets = 1
690 if noupdate:
690 if noupdate:
691 try:
691 try:
692 node = state['current'][0]
692 node = state['current'][0]
693 except LookupError:
693 except LookupError:
694 raise util.Abort(_('current bisect revision is unknown - '
694 raise util.Abort(_('current bisect revision is unknown - '
695 'start a new bisect to fix'))
695 'start a new bisect to fix'))
696 else:
696 else:
697 node, p2 = repo.dirstate.parents()
697 node, p2 = repo.dirstate.parents()
698 if p2 != nullid:
698 if p2 != nullid:
699 raise util.Abort(_('current bisect revision is a merge'))
699 raise util.Abort(_('current bisect revision is a merge'))
700 try:
700 try:
701 while changesets:
701 while changesets:
702 # update state
702 # update state
703 state['current'] = [node]
703 state['current'] = [node]
704 hbisect.save_state(repo, state)
704 hbisect.save_state(repo, state)
705 status = util.system(command,
705 status = util.system(command,
706 environ={'HG_NODE': hex(node)},
706 environ={'HG_NODE': hex(node)},
707 out=ui.fout)
707 out=ui.fout)
708 if status == 125:
708 if status == 125:
709 transition = "skip"
709 transition = "skip"
710 elif status == 0:
710 elif status == 0:
711 transition = "good"
711 transition = "good"
712 # status < 0 means process was killed
712 # status < 0 means process was killed
713 elif status == 127:
713 elif status == 127:
714 raise util.Abort(_("failed to execute %s") % command)
714 raise util.Abort(_("failed to execute %s") % command)
715 elif status < 0:
715 elif status < 0:
716 raise util.Abort(_("%s killed") % command)
716 raise util.Abort(_("%s killed") % command)
717 else:
717 else:
718 transition = "bad"
718 transition = "bad"
719 ctx = scmutil.revsingle(repo, rev, node)
719 ctx = scmutil.revsingle(repo, rev, node)
720 rev = None # clear for future iterations
720 rev = None # clear for future iterations
721 state[transition].append(ctx.node())
721 state[transition].append(ctx.node())
722 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
722 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
723 check_state(state, interactive=False)
723 check_state(state, interactive=False)
724 # bisect
724 # bisect
725 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
725 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
726 # update to next check
726 # update to next check
727 node = nodes[0]
727 node = nodes[0]
728 if not noupdate:
728 if not noupdate:
729 cmdutil.bailifchanged(repo)
729 cmdutil.bailifchanged(repo)
730 hg.clean(repo, node, show_stats=False)
730 hg.clean(repo, node, show_stats=False)
731 finally:
731 finally:
732 state['current'] = [node]
732 state['current'] = [node]
733 hbisect.save_state(repo, state)
733 hbisect.save_state(repo, state)
734 print_result(nodes, bgood)
734 print_result(nodes, bgood)
735 return
735 return
736
736
737 # update state
737 # update state
738
738
739 if rev:
739 if rev:
740 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
740 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
741 else:
741 else:
742 nodes = [repo.lookup('.')]
742 nodes = [repo.lookup('.')]
743
743
744 if good or bad or skip:
744 if good or bad or skip:
745 if good:
745 if good:
746 state['good'] += nodes
746 state['good'] += nodes
747 elif bad:
747 elif bad:
748 state['bad'] += nodes
748 state['bad'] += nodes
749 elif skip:
749 elif skip:
750 state['skip'] += nodes
750 state['skip'] += nodes
751 hbisect.save_state(repo, state)
751 hbisect.save_state(repo, state)
752
752
753 if not check_state(state):
753 if not check_state(state):
754 return
754 return
755
755
756 # actually bisect
756 # actually bisect
757 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
757 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
758 if extend:
758 if extend:
759 if not changesets:
759 if not changesets:
760 extendnode = extendbisectrange(nodes, good)
760 extendnode = extendbisectrange(nodes, good)
761 if extendnode is not None:
761 if extendnode is not None:
762 ui.write(_("Extending search to changeset %d:%s\n")
762 ui.write(_("Extending search to changeset %d:%s\n")
763 % (extendnode.rev(), extendnode))
763 % (extendnode.rev(), extendnode))
764 state['current'] = [extendnode.node()]
764 state['current'] = [extendnode.node()]
765 hbisect.save_state(repo, state)
765 hbisect.save_state(repo, state)
766 if noupdate:
766 if noupdate:
767 return
767 return
768 cmdutil.bailifchanged(repo)
768 cmdutil.bailifchanged(repo)
769 return hg.clean(repo, extendnode.node())
769 return hg.clean(repo, extendnode.node())
770 raise util.Abort(_("nothing to extend"))
770 raise util.Abort(_("nothing to extend"))
771
771
772 if changesets == 0:
772 if changesets == 0:
773 print_result(nodes, good)
773 print_result(nodes, good)
774 else:
774 else:
775 assert len(nodes) == 1 # only a single node can be tested next
775 assert len(nodes) == 1 # only a single node can be tested next
776 node = nodes[0]
776 node = nodes[0]
777 # compute the approximate number of remaining tests
777 # compute the approximate number of remaining tests
778 tests, size = 0, 2
778 tests, size = 0, 2
779 while size <= changesets:
779 while size <= changesets:
780 tests, size = tests + 1, size * 2
780 tests, size = tests + 1, size * 2
781 rev = repo.changelog.rev(node)
781 rev = repo.changelog.rev(node)
782 ui.write(_("Testing changeset %d:%s "
782 ui.write(_("Testing changeset %d:%s "
783 "(%d changesets remaining, ~%d tests)\n")
783 "(%d changesets remaining, ~%d tests)\n")
784 % (rev, short(node), changesets, tests))
784 % (rev, short(node), changesets, tests))
785 state['current'] = [node]
785 state['current'] = [node]
786 hbisect.save_state(repo, state)
786 hbisect.save_state(repo, state)
787 if not noupdate:
787 if not noupdate:
788 cmdutil.bailifchanged(repo)
788 cmdutil.bailifchanged(repo)
789 return hg.clean(repo, node)
789 return hg.clean(repo, node)
790
790
791 @command('bookmarks|bookmark',
791 @command('bookmarks|bookmark',
792 [('f', 'force', False, _('force')),
792 [('f', 'force', False, _('force')),
793 ('r', 'rev', '', _('revision'), _('REV')),
793 ('r', 'rev', '', _('revision'), _('REV')),
794 ('d', 'delete', False, _('delete a given bookmark')),
794 ('d', 'delete', False, _('delete a given bookmark')),
795 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
795 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
796 ('i', 'inactive', False, _('mark a bookmark inactive'))],
796 ('i', 'inactive', False, _('mark a bookmark inactive'))],
797 _('hg bookmarks [OPTIONS]... [NAME]...'))
797 _('hg bookmarks [OPTIONS]... [NAME]...'))
798 def bookmark(ui, repo, *names, **opts):
798 def bookmark(ui, repo, *names, **opts):
799 '''track a line of development with movable markers
799 '''track a line of development with movable markers
800
800
801 Bookmarks are pointers to certain commits that move when committing.
801 Bookmarks are pointers to certain commits that move when committing.
802 Bookmarks are local. They can be renamed, copied and deleted. It is
802 Bookmarks are local. They can be renamed, copied and deleted. It is
803 possible to use :hg:`merge NAME` to merge from a given bookmark, and
803 possible to use :hg:`merge NAME` to merge from a given bookmark, and
804 :hg:`update NAME` to update to a given bookmark.
804 :hg:`update NAME` to update to a given bookmark.
805
805
806 You can use :hg:`bookmark NAME` to set a bookmark on the working
806 You can use :hg:`bookmark NAME` to set a bookmark on the working
807 directory's parent revision with the given name. If you specify
807 directory's parent revision with the given name. If you specify
808 a revision using -r REV (where REV may be an existing bookmark),
808 a revision using -r REV (where REV may be an existing bookmark),
809 the bookmark is assigned to that revision.
809 the bookmark is assigned to that revision.
810
810
811 Bookmarks can be pushed and pulled between repositories (see :hg:`help
811 Bookmarks can be pushed and pulled between repositories (see :hg:`help
812 push` and :hg:`help pull`). This requires both the local and remote
812 push` and :hg:`help pull`). This requires both the local and remote
813 repositories to support bookmarks. For versions prior to 1.8, this means
813 repositories to support bookmarks. For versions prior to 1.8, this means
814 the bookmarks extension must be enabled.
814 the bookmarks extension must be enabled.
815
815
816 If you set a bookmark called '@', new clones of the repository will
816 If you set a bookmark called '@', new clones of the repository will
817 have that revision checked out (and the bookmark made active) by
817 have that revision checked out (and the bookmark made active) by
818 default.
818 default.
819
819
820 With -i/--inactive, the new bookmark will not be made the active
820 With -i/--inactive, the new bookmark will not be made the active
821 bookmark. If -r/--rev is given, the new bookmark will not be made
821 bookmark. If -r/--rev is given, the new bookmark will not be made
822 active even if -i/--inactive is not given. If no NAME is given, the
822 active even if -i/--inactive is not given. If no NAME is given, the
823 current active bookmark will be marked inactive.
823 current active bookmark will be marked inactive.
824 '''
824 '''
825 force = opts.get('force')
825 force = opts.get('force')
826 rev = opts.get('rev')
826 rev = opts.get('rev')
827 delete = opts.get('delete')
827 delete = opts.get('delete')
828 rename = opts.get('rename')
828 rename = opts.get('rename')
829 inactive = opts.get('inactive')
829 inactive = opts.get('inactive')
830
830
831 def checkformat(mark):
831 def checkformat(mark):
832 mark = mark.strip()
832 mark = mark.strip()
833 if not mark:
833 if not mark:
834 raise util.Abort(_("bookmark names cannot consist entirely of "
834 raise util.Abort(_("bookmark names cannot consist entirely of "
835 "whitespace"))
835 "whitespace"))
836 scmutil.checknewlabel(repo, mark, 'bookmark')
836 scmutil.checknewlabel(repo, mark, 'bookmark')
837 return mark
837 return mark
838
838
839 def checkconflict(repo, mark, cur, force=False, target=None):
839 def checkconflict(repo, mark, cur, force=False, target=None):
840 if mark in marks and not force:
840 if mark in marks and not force:
841 if target:
841 if target:
842 if marks[mark] == target and target == cur:
842 if marks[mark] == target and target == cur:
843 # re-activating a bookmark
843 # re-activating a bookmark
844 return
844 return
845 anc = repo.changelog.ancestors([repo[target].rev()])
845 anc = repo.changelog.ancestors([repo[target].rev()])
846 bmctx = repo[marks[mark]]
846 bmctx = repo[marks[mark]]
847 divs = [repo[b].node() for b in marks
847 divs = [repo[b].node() for b in marks
848 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
848 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
849
849
850 # allow resolving a single divergent bookmark even if moving
850 # allow resolving a single divergent bookmark even if moving
851 # the bookmark across branches when a revision is specified
851 # the bookmark across branches when a revision is specified
852 # that contains a divergent bookmark
852 # that contains a divergent bookmark
853 if bmctx.rev() not in anc and target in divs:
853 if bmctx.rev() not in anc and target in divs:
854 bookmarks.deletedivergent(repo, [target], mark)
854 bookmarks.deletedivergent(repo, [target], mark)
855 return
855 return
856
856
857 deletefrom = [b for b in divs
857 deletefrom = [b for b in divs
858 if repo[b].rev() in anc or b == target]
858 if repo[b].rev() in anc or b == target]
859 bookmarks.deletedivergent(repo, deletefrom, mark)
859 bookmarks.deletedivergent(repo, deletefrom, mark)
860 if bookmarks.validdest(repo, bmctx, repo[target]):
860 if bookmarks.validdest(repo, bmctx, repo[target]):
861 ui.status(_("moving bookmark '%s' forward from %s\n") %
861 ui.status(_("moving bookmark '%s' forward from %s\n") %
862 (mark, short(bmctx.node())))
862 (mark, short(bmctx.node())))
863 return
863 return
864 raise util.Abort(_("bookmark '%s' already exists "
864 raise util.Abort(_("bookmark '%s' already exists "
865 "(use -f to force)") % mark)
865 "(use -f to force)") % mark)
866 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
866 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
867 and not force):
867 and not force):
868 raise util.Abort(
868 raise util.Abort(
869 _("a bookmark cannot have the name of an existing branch"))
869 _("a bookmark cannot have the name of an existing branch"))
870
870
871 if delete and rename:
871 if delete and rename:
872 raise util.Abort(_("--delete and --rename are incompatible"))
872 raise util.Abort(_("--delete and --rename are incompatible"))
873 if delete and rev:
873 if delete and rev:
874 raise util.Abort(_("--rev is incompatible with --delete"))
874 raise util.Abort(_("--rev is incompatible with --delete"))
875 if rename and rev:
875 if rename and rev:
876 raise util.Abort(_("--rev is incompatible with --rename"))
876 raise util.Abort(_("--rev is incompatible with --rename"))
877 if not names and (delete or rev):
877 if not names and (delete or rev):
878 raise util.Abort(_("bookmark name required"))
878 raise util.Abort(_("bookmark name required"))
879
879
880 if delete or rename or names or inactive:
880 if delete or rename or names or inactive:
881 wlock = repo.wlock()
881 wlock = repo.wlock()
882 try:
882 try:
883 cur = repo.changectx('.').node()
883 cur = repo.changectx('.').node()
884 marks = repo._bookmarks
884 marks = repo._bookmarks
885 if delete:
885 if delete:
886 for mark in names:
886 for mark in names:
887 if mark not in marks:
887 if mark not in marks:
888 raise util.Abort(_("bookmark '%s' does not exist") %
888 raise util.Abort(_("bookmark '%s' does not exist") %
889 mark)
889 mark)
890 if mark == repo._bookmarkcurrent:
890 if mark == repo._bookmarkcurrent:
891 bookmarks.unsetcurrent(repo)
891 bookmarks.unsetcurrent(repo)
892 del marks[mark]
892 del marks[mark]
893 marks.write()
893 marks.write()
894
894
895 elif rename:
895 elif rename:
896 if not names:
896 if not names:
897 raise util.Abort(_("new bookmark name required"))
897 raise util.Abort(_("new bookmark name required"))
898 elif len(names) > 1:
898 elif len(names) > 1:
899 raise util.Abort(_("only one new bookmark name allowed"))
899 raise util.Abort(_("only one new bookmark name allowed"))
900 mark = checkformat(names[0])
900 mark = checkformat(names[0])
901 if rename not in marks:
901 if rename not in marks:
902 raise util.Abort(_("bookmark '%s' does not exist") % rename)
902 raise util.Abort(_("bookmark '%s' does not exist") % rename)
903 checkconflict(repo, mark, cur, force)
903 checkconflict(repo, mark, cur, force)
904 marks[mark] = marks[rename]
904 marks[mark] = marks[rename]
905 if repo._bookmarkcurrent == rename and not inactive:
905 if repo._bookmarkcurrent == rename and not inactive:
906 bookmarks.setcurrent(repo, mark)
906 bookmarks.setcurrent(repo, mark)
907 del marks[rename]
907 del marks[rename]
908 marks.write()
908 marks.write()
909
909
910 elif names:
910 elif names:
911 newact = None
911 newact = None
912 for mark in names:
912 for mark in names:
913 mark = checkformat(mark)
913 mark = checkformat(mark)
914 if newact is None:
914 if newact is None:
915 newact = mark
915 newact = mark
916 if inactive and mark == repo._bookmarkcurrent:
916 if inactive and mark == repo._bookmarkcurrent:
917 bookmarks.unsetcurrent(repo)
917 bookmarks.unsetcurrent(repo)
918 return
918 return
919 tgt = cur
919 tgt = cur
920 if rev:
920 if rev:
921 tgt = scmutil.revsingle(repo, rev).node()
921 tgt = scmutil.revsingle(repo, rev).node()
922 checkconflict(repo, mark, cur, force, tgt)
922 checkconflict(repo, mark, cur, force, tgt)
923 marks[mark] = tgt
923 marks[mark] = tgt
924 if not inactive and cur == marks[newact] and not rev:
924 if not inactive and cur == marks[newact] and not rev:
925 bookmarks.setcurrent(repo, newact)
925 bookmarks.setcurrent(repo, newact)
926 elif cur != tgt and newact == repo._bookmarkcurrent:
926 elif cur != tgt and newact == repo._bookmarkcurrent:
927 bookmarks.unsetcurrent(repo)
927 bookmarks.unsetcurrent(repo)
928 marks.write()
928 marks.write()
929
929
930 elif inactive:
930 elif inactive:
931 if len(marks) == 0:
931 if len(marks) == 0:
932 ui.status(_("no bookmarks set\n"))
932 ui.status(_("no bookmarks set\n"))
933 elif not repo._bookmarkcurrent:
933 elif not repo._bookmarkcurrent:
934 ui.status(_("no active bookmark\n"))
934 ui.status(_("no active bookmark\n"))
935 else:
935 else:
936 bookmarks.unsetcurrent(repo)
936 bookmarks.unsetcurrent(repo)
937 finally:
937 finally:
938 wlock.release()
938 wlock.release()
939 else: # show bookmarks
939 else: # show bookmarks
940 hexfn = ui.debugflag and hex or short
940 hexfn = ui.debugflag and hex or short
941 marks = repo._bookmarks
941 marks = repo._bookmarks
942 if len(marks) == 0:
942 if len(marks) == 0:
943 ui.status(_("no bookmarks set\n"))
943 ui.status(_("no bookmarks set\n"))
944 else:
944 else:
945 for bmark, n in sorted(marks.iteritems()):
945 for bmark, n in sorted(marks.iteritems()):
946 current = repo._bookmarkcurrent
946 current = repo._bookmarkcurrent
947 if bmark == current:
947 if bmark == current:
948 prefix, label = '*', 'bookmarks.current'
948 prefix, label = '*', 'bookmarks.current'
949 else:
949 else:
950 prefix, label = ' ', ''
950 prefix, label = ' ', ''
951
951
952 if ui.quiet:
952 if ui.quiet:
953 ui.write("%s\n" % bmark, label=label)
953 ui.write("%s\n" % bmark, label=label)
954 else:
954 else:
955 ui.write(" %s %-25s %d:%s\n" % (
955 ui.write(" %s %-25s %d:%s\n" % (
956 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
956 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
957 label=label)
957 label=label)
958
958
959 @command('branch',
959 @command('branch',
960 [('f', 'force', None,
960 [('f', 'force', None,
961 _('set branch name even if it shadows an existing branch')),
961 _('set branch name even if it shadows an existing branch')),
962 ('C', 'clean', None, _('reset branch name to parent branch name'))],
962 ('C', 'clean', None, _('reset branch name to parent branch name'))],
963 _('[-fC] [NAME]'))
963 _('[-fC] [NAME]'))
964 def branch(ui, repo, label=None, **opts):
964 def branch(ui, repo, label=None, **opts):
965 """set or show the current branch name
965 """set or show the current branch name
966
966
967 .. note::
967 .. note::
968
968
969 Branch names are permanent and global. Use :hg:`bookmark` to create a
969 Branch names are permanent and global. Use :hg:`bookmark` to create a
970 light-weight bookmark instead. See :hg:`help glossary` for more
970 light-weight bookmark instead. See :hg:`help glossary` for more
971 information about named branches and bookmarks.
971 information about named branches and bookmarks.
972
972
973 With no argument, show the current branch name. With one argument,
973 With no argument, show the current branch name. With one argument,
974 set the working directory branch name (the branch will not exist
974 set the working directory branch name (the branch will not exist
975 in the repository until the next commit). Standard practice
975 in the repository until the next commit). Standard practice
976 recommends that primary development take place on the 'default'
976 recommends that primary development take place on the 'default'
977 branch.
977 branch.
978
978
979 Unless -f/--force is specified, branch will not let you set a
979 Unless -f/--force is specified, branch will not let you set a
980 branch name that already exists, even if it's inactive.
980 branch name that already exists, even if it's inactive.
981
981
982 Use -C/--clean to reset the working directory branch to that of
982 Use -C/--clean to reset the working directory branch to that of
983 the parent of the working directory, negating a previous branch
983 the parent of the working directory, negating a previous branch
984 change.
984 change.
985
985
986 Use the command :hg:`update` to switch to an existing branch. Use
986 Use the command :hg:`update` to switch to an existing branch. Use
987 :hg:`commit --close-branch` to mark this branch as closed.
987 :hg:`commit --close-branch` to mark this branch as closed.
988
988
989 Returns 0 on success.
989 Returns 0 on success.
990 """
990 """
991 if label:
991 if label:
992 label = label.strip()
992 label = label.strip()
993
993
994 if not opts.get('clean') and not label:
994 if not opts.get('clean') and not label:
995 ui.write("%s\n" % repo.dirstate.branch())
995 ui.write("%s\n" % repo.dirstate.branch())
996 return
996 return
997
997
998 wlock = repo.wlock()
998 wlock = repo.wlock()
999 try:
999 try:
1000 if opts.get('clean'):
1000 if opts.get('clean'):
1001 label = repo[None].p1().branch()
1001 label = repo[None].p1().branch()
1002 repo.dirstate.setbranch(label)
1002 repo.dirstate.setbranch(label)
1003 ui.status(_('reset working directory to branch %s\n') % label)
1003 ui.status(_('reset working directory to branch %s\n') % label)
1004 elif label:
1004 elif label:
1005 if not opts.get('force') and label in repo.branchmap():
1005 if not opts.get('force') and label in repo.branchmap():
1006 if label not in [p.branch() for p in repo.parents()]:
1006 if label not in [p.branch() for p in repo.parents()]:
1007 raise util.Abort(_('a branch of the same name already'
1007 raise util.Abort(_('a branch of the same name already'
1008 ' exists'),
1008 ' exists'),
1009 # i18n: "it" refers to an existing branch
1009 # i18n: "it" refers to an existing branch
1010 hint=_("use 'hg update' to switch to it"))
1010 hint=_("use 'hg update' to switch to it"))
1011 scmutil.checknewlabel(repo, label, 'branch')
1011 scmutil.checknewlabel(repo, label, 'branch')
1012 repo.dirstate.setbranch(label)
1012 repo.dirstate.setbranch(label)
1013 ui.status(_('marked working directory as branch %s\n') % label)
1013 ui.status(_('marked working directory as branch %s\n') % label)
1014 ui.status(_('(branches are permanent and global, '
1014 ui.status(_('(branches are permanent and global, '
1015 'did you want a bookmark?)\n'))
1015 'did you want a bookmark?)\n'))
1016 finally:
1016 finally:
1017 wlock.release()
1017 wlock.release()
1018
1018
1019 @command('branches',
1019 @command('branches',
1020 [('a', 'active', False, _('show only branches that have unmerged heads')),
1020 [('a', 'active', False, _('show only branches that have unmerged heads')),
1021 ('c', 'closed', False, _('show normal and closed branches'))],
1021 ('c', 'closed', False, _('show normal and closed branches'))],
1022 _('[-ac]'))
1022 _('[-ac]'))
1023 def branches(ui, repo, active=False, closed=False):
1023 def branches(ui, repo, active=False, closed=False):
1024 """list repository named branches
1024 """list repository named branches
1025
1025
1026 List the repository's named branches, indicating which ones are
1026 List the repository's named branches, indicating which ones are
1027 inactive. If -c/--closed is specified, also list branches which have
1027 inactive. If -c/--closed is specified, also list branches which have
1028 been marked closed (see :hg:`commit --close-branch`).
1028 been marked closed (see :hg:`commit --close-branch`).
1029
1029
1030 If -a/--active is specified, only show active branches. A branch
1030 If -a/--active is specified, only show active branches. A branch
1031 is considered active if it contains repository heads.
1031 is considered active if it contains repository heads.
1032
1032
1033 Use the command :hg:`update` to switch to an existing branch.
1033 Use the command :hg:`update` to switch to an existing branch.
1034
1034
1035 Returns 0.
1035 Returns 0.
1036 """
1036 """
1037
1037
1038 hexfunc = ui.debugflag and hex or short
1038 hexfunc = ui.debugflag and hex or short
1039
1039
1040 allheads = set(repo.heads())
1040 allheads = set(repo.heads())
1041 branches = []
1041 branches = []
1042 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1042 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1043 isactive = not isclosed and bool(set(heads) & allheads)
1043 isactive = not isclosed and bool(set(heads) & allheads)
1044 branches.append((tag, repo[tip], isactive, not isclosed))
1044 branches.append((tag, repo[tip], isactive, not isclosed))
1045 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1045 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1046 reverse=True)
1046 reverse=True)
1047
1047
1048 for tag, ctx, isactive, isopen in branches:
1048 for tag, ctx, isactive, isopen in branches:
1049 if (not active) or isactive:
1049 if (not active) or isactive:
1050 if isactive:
1050 if isactive:
1051 label = 'branches.active'
1051 label = 'branches.active'
1052 notice = ''
1052 notice = ''
1053 elif not isopen:
1053 elif not isopen:
1054 if not closed:
1054 if not closed:
1055 continue
1055 continue
1056 label = 'branches.closed'
1056 label = 'branches.closed'
1057 notice = _(' (closed)')
1057 notice = _(' (closed)')
1058 else:
1058 else:
1059 label = 'branches.inactive'
1059 label = 'branches.inactive'
1060 notice = _(' (inactive)')
1060 notice = _(' (inactive)')
1061 if tag == repo.dirstate.branch():
1061 if tag == repo.dirstate.branch():
1062 label = 'branches.current'
1062 label = 'branches.current'
1063 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1063 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1064 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1064 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1065 'log.changeset changeset.%s' % ctx.phasestr())
1065 'log.changeset changeset.%s' % ctx.phasestr())
1066 labeledtag = ui.label(tag, label)
1066 labeledtag = ui.label(tag, label)
1067 if ui.quiet:
1067 if ui.quiet:
1068 ui.write("%s\n" % labeledtag)
1068 ui.write("%s\n" % labeledtag)
1069 else:
1069 else:
1070 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1070 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1071
1071
1072 @command('bundle',
1072 @command('bundle',
1073 [('f', 'force', None, _('run even when the destination is unrelated')),
1073 [('f', 'force', None, _('run even when the destination is unrelated')),
1074 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1074 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1075 _('REV')),
1075 _('REV')),
1076 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1076 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1077 _('BRANCH')),
1077 _('BRANCH')),
1078 ('', 'base', [],
1078 ('', 'base', [],
1079 _('a base changeset assumed to be available at the destination'),
1079 _('a base changeset assumed to be available at the destination'),
1080 _('REV')),
1080 _('REV')),
1081 ('a', 'all', None, _('bundle all changesets in the repository')),
1081 ('a', 'all', None, _('bundle all changesets in the repository')),
1082 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1082 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1083 ] + remoteopts,
1083 ] + remoteopts,
1084 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1084 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1085 def bundle(ui, repo, fname, dest=None, **opts):
1085 def bundle(ui, repo, fname, dest=None, **opts):
1086 """create a changegroup file
1086 """create a changegroup file
1087
1087
1088 Generate a compressed changegroup file collecting changesets not
1088 Generate a compressed changegroup file collecting changesets not
1089 known to be in another repository.
1089 known to be in another repository.
1090
1090
1091 If you omit the destination repository, then hg assumes the
1091 If you omit the destination repository, then hg assumes the
1092 destination will have all the nodes you specify with --base
1092 destination will have all the nodes you specify with --base
1093 parameters. To create a bundle containing all changesets, use
1093 parameters. To create a bundle containing all changesets, use
1094 -a/--all (or --base null).
1094 -a/--all (or --base null).
1095
1095
1096 You can change compression method with the -t/--type option.
1096 You can change compression method with the -t/--type option.
1097 The available compression methods are: none, bzip2, and
1097 The available compression methods are: none, bzip2, and
1098 gzip (by default, bundles are compressed using bzip2).
1098 gzip (by default, bundles are compressed using bzip2).
1099
1099
1100 The bundle file can then be transferred using conventional means
1100 The bundle file can then be transferred using conventional means
1101 and applied to another repository with the unbundle or pull
1101 and applied to another repository with the unbundle or pull
1102 command. This is useful when direct push and pull are not
1102 command. This is useful when direct push and pull are not
1103 available or when exporting an entire repository is undesirable.
1103 available or when exporting an entire repository is undesirable.
1104
1104
1105 Applying bundles preserves all changeset contents including
1105 Applying bundles preserves all changeset contents including
1106 permissions, copy/rename information, and revision history.
1106 permissions, copy/rename information, and revision history.
1107
1107
1108 Returns 0 on success, 1 if no changes found.
1108 Returns 0 on success, 1 if no changes found.
1109 """
1109 """
1110 revs = None
1110 revs = None
1111 if 'rev' in opts:
1111 if 'rev' in opts:
1112 revs = scmutil.revrange(repo, opts['rev'])
1112 revs = scmutil.revrange(repo, opts['rev'])
1113
1113
1114 bundletype = opts.get('type', 'bzip2').lower()
1114 bundletype = opts.get('type', 'bzip2').lower()
1115 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1115 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1116 bundletype = btypes.get(bundletype)
1116 bundletype = btypes.get(bundletype)
1117 if bundletype not in changegroup.bundletypes:
1117 if bundletype not in changegroup.bundletypes:
1118 raise util.Abort(_('unknown bundle type specified with --type'))
1118 raise util.Abort(_('unknown bundle type specified with --type'))
1119
1119
1120 if opts.get('all'):
1120 if opts.get('all'):
1121 base = ['null']
1121 base = ['null']
1122 else:
1122 else:
1123 base = scmutil.revrange(repo, opts.get('base'))
1123 base = scmutil.revrange(repo, opts.get('base'))
1124 # TODO: get desired bundlecaps from command line.
1124 # TODO: get desired bundlecaps from command line.
1125 bundlecaps = None
1125 bundlecaps = None
1126 if base:
1126 if base:
1127 if dest:
1127 if dest:
1128 raise util.Abort(_("--base is incompatible with specifying "
1128 raise util.Abort(_("--base is incompatible with specifying "
1129 "a destination"))
1129 "a destination"))
1130 common = [repo.lookup(rev) for rev in base]
1130 common = [repo.lookup(rev) for rev in base]
1131 heads = revs and map(repo.lookup, revs) or revs
1131 heads = revs and map(repo.lookup, revs) or revs
1132 cg = repo.getbundle('bundle', heads=heads, common=common,
1132 cg = repo.getbundle('bundle', heads=heads, common=common,
1133 bundlecaps=bundlecaps)
1133 bundlecaps=bundlecaps)
1134 outgoing = None
1134 outgoing = None
1135 else:
1135 else:
1136 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1136 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1137 dest, branches = hg.parseurl(dest, opts.get('branch'))
1137 dest, branches = hg.parseurl(dest, opts.get('branch'))
1138 other = hg.peer(repo, opts, dest)
1138 other = hg.peer(repo, opts, dest)
1139 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1139 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1140 heads = revs and map(repo.lookup, revs) or revs
1140 heads = revs and map(repo.lookup, revs) or revs
1141 outgoing = discovery.findcommonoutgoing(repo, other,
1141 outgoing = discovery.findcommonoutgoing(repo, other,
1142 onlyheads=heads,
1142 onlyheads=heads,
1143 force=opts.get('force'),
1143 force=opts.get('force'),
1144 portable=True)
1144 portable=True)
1145 cg = repo.getlocalbundle('bundle', outgoing, bundlecaps)
1145 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1146 if not cg:
1146 if not cg:
1147 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1147 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1148 return 1
1148 return 1
1149
1149
1150 changegroup.writebundle(cg, fname, bundletype)
1150 changegroup.writebundle(cg, fname, bundletype)
1151
1151
1152 @command('cat',
1152 @command('cat',
1153 [('o', 'output', '',
1153 [('o', 'output', '',
1154 _('print output to file with formatted name'), _('FORMAT')),
1154 _('print output to file with formatted name'), _('FORMAT')),
1155 ('r', 'rev', '', _('print the given revision'), _('REV')),
1155 ('r', 'rev', '', _('print the given revision'), _('REV')),
1156 ('', 'decode', None, _('apply any matching decode filter')),
1156 ('', 'decode', None, _('apply any matching decode filter')),
1157 ] + walkopts,
1157 ] + walkopts,
1158 _('[OPTION]... FILE...'))
1158 _('[OPTION]... FILE...'))
1159 def cat(ui, repo, file1, *pats, **opts):
1159 def cat(ui, repo, file1, *pats, **opts):
1160 """output the current or given revision of files
1160 """output the current or given revision of files
1161
1161
1162 Print the specified files as they were at the given revision. If
1162 Print the specified files as they were at the given revision. If
1163 no revision is given, the parent of the working directory is used.
1163 no revision is given, the parent of the working directory is used.
1164
1164
1165 Output may be to a file, in which case the name of the file is
1165 Output may be to a file, in which case the name of the file is
1166 given using a format string. The formatting rules are the same as
1166 given using a format string. The formatting rules are the same as
1167 for the export command, with the following additions:
1167 for the export command, with the following additions:
1168
1168
1169 :``%s``: basename of file being printed
1169 :``%s``: basename of file being printed
1170 :``%d``: dirname of file being printed, or '.' if in repository root
1170 :``%d``: dirname of file being printed, or '.' if in repository root
1171 :``%p``: root-relative path name of file being printed
1171 :``%p``: root-relative path name of file being printed
1172
1172
1173 Returns 0 on success.
1173 Returns 0 on success.
1174 """
1174 """
1175 ctx = scmutil.revsingle(repo, opts.get('rev'))
1175 ctx = scmutil.revsingle(repo, opts.get('rev'))
1176 err = 1
1176 err = 1
1177 m = scmutil.match(ctx, (file1,) + pats, opts)
1177 m = scmutil.match(ctx, (file1,) + pats, opts)
1178
1178
1179 def write(path):
1179 def write(path):
1180 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1180 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1181 pathname=path)
1181 pathname=path)
1182 data = ctx[path].data()
1182 data = ctx[path].data()
1183 if opts.get('decode'):
1183 if opts.get('decode'):
1184 data = repo.wwritedata(path, data)
1184 data = repo.wwritedata(path, data)
1185 fp.write(data)
1185 fp.write(data)
1186 fp.close()
1186 fp.close()
1187
1187
1188 # Automation often uses hg cat on single files, so special case it
1188 # Automation often uses hg cat on single files, so special case it
1189 # for performance to avoid the cost of parsing the manifest.
1189 # for performance to avoid the cost of parsing the manifest.
1190 if len(m.files()) == 1 and not m.anypats():
1190 if len(m.files()) == 1 and not m.anypats():
1191 file = m.files()[0]
1191 file = m.files()[0]
1192 mf = repo.manifest
1192 mf = repo.manifest
1193 mfnode = ctx._changeset[0]
1193 mfnode = ctx._changeset[0]
1194 if mf.find(mfnode, file)[0]:
1194 if mf.find(mfnode, file)[0]:
1195 write(file)
1195 write(file)
1196 return 0
1196 return 0
1197
1197
1198 for abs in ctx.walk(m):
1198 for abs in ctx.walk(m):
1199 write(abs)
1199 write(abs)
1200 err = 0
1200 err = 0
1201 return err
1201 return err
1202
1202
1203 @command('^clone',
1203 @command('^clone',
1204 [('U', 'noupdate', None,
1204 [('U', 'noupdate', None,
1205 _('the clone will include an empty working copy (only a repository)')),
1205 _('the clone will include an empty working copy (only a repository)')),
1206 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1206 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1207 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1207 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1208 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1208 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1209 ('', 'pull', None, _('use pull protocol to copy metadata')),
1209 ('', 'pull', None, _('use pull protocol to copy metadata')),
1210 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1210 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1211 ] + remoteopts,
1211 ] + remoteopts,
1212 _('[OPTION]... SOURCE [DEST]'))
1212 _('[OPTION]... SOURCE [DEST]'))
1213 def clone(ui, source, dest=None, **opts):
1213 def clone(ui, source, dest=None, **opts):
1214 """make a copy of an existing repository
1214 """make a copy of an existing repository
1215
1215
1216 Create a copy of an existing repository in a new directory.
1216 Create a copy of an existing repository in a new directory.
1217
1217
1218 If no destination directory name is specified, it defaults to the
1218 If no destination directory name is specified, it defaults to the
1219 basename of the source.
1219 basename of the source.
1220
1220
1221 The location of the source is added to the new repository's
1221 The location of the source is added to the new repository's
1222 ``.hg/hgrc`` file, as the default to be used for future pulls.
1222 ``.hg/hgrc`` file, as the default to be used for future pulls.
1223
1223
1224 Only local paths and ``ssh://`` URLs are supported as
1224 Only local paths and ``ssh://`` URLs are supported as
1225 destinations. For ``ssh://`` destinations, no working directory or
1225 destinations. For ``ssh://`` destinations, no working directory or
1226 ``.hg/hgrc`` will be created on the remote side.
1226 ``.hg/hgrc`` will be created on the remote side.
1227
1227
1228 To pull only a subset of changesets, specify one or more revisions
1228 To pull only a subset of changesets, specify one or more revisions
1229 identifiers with -r/--rev or branches with -b/--branch. The
1229 identifiers with -r/--rev or branches with -b/--branch. The
1230 resulting clone will contain only the specified changesets and
1230 resulting clone will contain only the specified changesets and
1231 their ancestors. These options (or 'clone src#rev dest') imply
1231 their ancestors. These options (or 'clone src#rev dest') imply
1232 --pull, even for local source repositories. Note that specifying a
1232 --pull, even for local source repositories. Note that specifying a
1233 tag will include the tagged changeset but not the changeset
1233 tag will include the tagged changeset but not the changeset
1234 containing the tag.
1234 containing the tag.
1235
1235
1236 If the source repository has a bookmark called '@' set, that
1236 If the source repository has a bookmark called '@' set, that
1237 revision will be checked out in the new repository by default.
1237 revision will be checked out in the new repository by default.
1238
1238
1239 To check out a particular version, use -u/--update, or
1239 To check out a particular version, use -u/--update, or
1240 -U/--noupdate to create a clone with no working directory.
1240 -U/--noupdate to create a clone with no working directory.
1241
1241
1242 .. container:: verbose
1242 .. container:: verbose
1243
1243
1244 For efficiency, hardlinks are used for cloning whenever the
1244 For efficiency, hardlinks are used for cloning whenever the
1245 source and destination are on the same filesystem (note this
1245 source and destination are on the same filesystem (note this
1246 applies only to the repository data, not to the working
1246 applies only to the repository data, not to the working
1247 directory). Some filesystems, such as AFS, implement hardlinking
1247 directory). Some filesystems, such as AFS, implement hardlinking
1248 incorrectly, but do not report errors. In these cases, use the
1248 incorrectly, but do not report errors. In these cases, use the
1249 --pull option to avoid hardlinking.
1249 --pull option to avoid hardlinking.
1250
1250
1251 In some cases, you can clone repositories and the working
1251 In some cases, you can clone repositories and the working
1252 directory using full hardlinks with ::
1252 directory using full hardlinks with ::
1253
1253
1254 $ cp -al REPO REPOCLONE
1254 $ cp -al REPO REPOCLONE
1255
1255
1256 This is the fastest way to clone, but it is not always safe. The
1256 This is the fastest way to clone, but it is not always safe. The
1257 operation is not atomic (making sure REPO is not modified during
1257 operation is not atomic (making sure REPO is not modified during
1258 the operation is up to you) and you have to make sure your
1258 the operation is up to you) and you have to make sure your
1259 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1259 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1260 so). Also, this is not compatible with certain extensions that
1260 so). Also, this is not compatible with certain extensions that
1261 place their metadata under the .hg directory, such as mq.
1261 place their metadata under the .hg directory, such as mq.
1262
1262
1263 Mercurial will update the working directory to the first applicable
1263 Mercurial will update the working directory to the first applicable
1264 revision from this list:
1264 revision from this list:
1265
1265
1266 a) null if -U or the source repository has no changesets
1266 a) null if -U or the source repository has no changesets
1267 b) if -u . and the source repository is local, the first parent of
1267 b) if -u . and the source repository is local, the first parent of
1268 the source repository's working directory
1268 the source repository's working directory
1269 c) the changeset specified with -u (if a branch name, this means the
1269 c) the changeset specified with -u (if a branch name, this means the
1270 latest head of that branch)
1270 latest head of that branch)
1271 d) the changeset specified with -r
1271 d) the changeset specified with -r
1272 e) the tipmost head specified with -b
1272 e) the tipmost head specified with -b
1273 f) the tipmost head specified with the url#branch source syntax
1273 f) the tipmost head specified with the url#branch source syntax
1274 g) the revision marked with the '@' bookmark, if present
1274 g) the revision marked with the '@' bookmark, if present
1275 h) the tipmost head of the default branch
1275 h) the tipmost head of the default branch
1276 i) tip
1276 i) tip
1277
1277
1278 Examples:
1278 Examples:
1279
1279
1280 - clone a remote repository to a new directory named hg/::
1280 - clone a remote repository to a new directory named hg/::
1281
1281
1282 hg clone http://selenic.com/hg
1282 hg clone http://selenic.com/hg
1283
1283
1284 - create a lightweight local clone::
1284 - create a lightweight local clone::
1285
1285
1286 hg clone project/ project-feature/
1286 hg clone project/ project-feature/
1287
1287
1288 - clone from an absolute path on an ssh server (note double-slash)::
1288 - clone from an absolute path on an ssh server (note double-slash)::
1289
1289
1290 hg clone ssh://user@server//home/projects/alpha/
1290 hg clone ssh://user@server//home/projects/alpha/
1291
1291
1292 - do a high-speed clone over a LAN while checking out a
1292 - do a high-speed clone over a LAN while checking out a
1293 specified version::
1293 specified version::
1294
1294
1295 hg clone --uncompressed http://server/repo -u 1.5
1295 hg clone --uncompressed http://server/repo -u 1.5
1296
1296
1297 - create a repository without changesets after a particular revision::
1297 - create a repository without changesets after a particular revision::
1298
1298
1299 hg clone -r 04e544 experimental/ good/
1299 hg clone -r 04e544 experimental/ good/
1300
1300
1301 - clone (and track) a particular named branch::
1301 - clone (and track) a particular named branch::
1302
1302
1303 hg clone http://selenic.com/hg#stable
1303 hg clone http://selenic.com/hg#stable
1304
1304
1305 See :hg:`help urls` for details on specifying URLs.
1305 See :hg:`help urls` for details on specifying URLs.
1306
1306
1307 Returns 0 on success.
1307 Returns 0 on success.
1308 """
1308 """
1309 if opts.get('noupdate') and opts.get('updaterev'):
1309 if opts.get('noupdate') and opts.get('updaterev'):
1310 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1310 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1311
1311
1312 r = hg.clone(ui, opts, source, dest,
1312 r = hg.clone(ui, opts, source, dest,
1313 pull=opts.get('pull'),
1313 pull=opts.get('pull'),
1314 stream=opts.get('uncompressed'),
1314 stream=opts.get('uncompressed'),
1315 rev=opts.get('rev'),
1315 rev=opts.get('rev'),
1316 update=opts.get('updaterev') or not opts.get('noupdate'),
1316 update=opts.get('updaterev') or not opts.get('noupdate'),
1317 branch=opts.get('branch'))
1317 branch=opts.get('branch'))
1318
1318
1319 return r is None
1319 return r is None
1320
1320
1321 @command('^commit|ci',
1321 @command('^commit|ci',
1322 [('A', 'addremove', None,
1322 [('A', 'addremove', None,
1323 _('mark new/missing files as added/removed before committing')),
1323 _('mark new/missing files as added/removed before committing')),
1324 ('', 'close-branch', None,
1324 ('', 'close-branch', None,
1325 _('mark a branch as closed, hiding it from the branch list')),
1325 _('mark a branch as closed, hiding it from the branch list')),
1326 ('', 'amend', None, _('amend the parent of the working dir')),
1326 ('', 'amend', None, _('amend the parent of the working dir')),
1327 ('s', 'secret', None, _('use the secret phase for committing')),
1327 ('s', 'secret', None, _('use the secret phase for committing')),
1328 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1328 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1329 _('[OPTION]... [FILE]...'))
1329 _('[OPTION]... [FILE]...'))
1330 def commit(ui, repo, *pats, **opts):
1330 def commit(ui, repo, *pats, **opts):
1331 """commit the specified files or all outstanding changes
1331 """commit the specified files or all outstanding changes
1332
1332
1333 Commit changes to the given files into the repository. Unlike a
1333 Commit changes to the given files into the repository. Unlike a
1334 centralized SCM, this operation is a local operation. See
1334 centralized SCM, this operation is a local operation. See
1335 :hg:`push` for a way to actively distribute your changes.
1335 :hg:`push` for a way to actively distribute your changes.
1336
1336
1337 If a list of files is omitted, all changes reported by :hg:`status`
1337 If a list of files is omitted, all changes reported by :hg:`status`
1338 will be committed.
1338 will be committed.
1339
1339
1340 If you are committing the result of a merge, do not provide any
1340 If you are committing the result of a merge, do not provide any
1341 filenames or -I/-X filters.
1341 filenames or -I/-X filters.
1342
1342
1343 If no commit message is specified, Mercurial starts your
1343 If no commit message is specified, Mercurial starts your
1344 configured editor where you can enter a message. In case your
1344 configured editor where you can enter a message. In case your
1345 commit fails, you will find a backup of your message in
1345 commit fails, you will find a backup of your message in
1346 ``.hg/last-message.txt``.
1346 ``.hg/last-message.txt``.
1347
1347
1348 The --amend flag can be used to amend the parent of the
1348 The --amend flag can be used to amend the parent of the
1349 working directory with a new commit that contains the changes
1349 working directory with a new commit that contains the changes
1350 in the parent in addition to those currently reported by :hg:`status`,
1350 in the parent in addition to those currently reported by :hg:`status`,
1351 if there are any. The old commit is stored in a backup bundle in
1351 if there are any. The old commit is stored in a backup bundle in
1352 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1352 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1353 on how to restore it).
1353 on how to restore it).
1354
1354
1355 Message, user and date are taken from the amended commit unless
1355 Message, user and date are taken from the amended commit unless
1356 specified. When a message isn't specified on the command line,
1356 specified. When a message isn't specified on the command line,
1357 the editor will open with the message of the amended commit.
1357 the editor will open with the message of the amended commit.
1358
1358
1359 It is not possible to amend public changesets (see :hg:`help phases`)
1359 It is not possible to amend public changesets (see :hg:`help phases`)
1360 or changesets that have children.
1360 or changesets that have children.
1361
1361
1362 See :hg:`help dates` for a list of formats valid for -d/--date.
1362 See :hg:`help dates` for a list of formats valid for -d/--date.
1363
1363
1364 Returns 0 on success, 1 if nothing changed.
1364 Returns 0 on success, 1 if nothing changed.
1365 """
1365 """
1366 if opts.get('subrepos'):
1366 if opts.get('subrepos'):
1367 if opts.get('amend'):
1367 if opts.get('amend'):
1368 raise util.Abort(_('cannot amend with --subrepos'))
1368 raise util.Abort(_('cannot amend with --subrepos'))
1369 # Let --subrepos on the command line override config setting.
1369 # Let --subrepos on the command line override config setting.
1370 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1370 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1371
1371
1372 # Save this for restoring it later
1372 # Save this for restoring it later
1373 oldcommitphase = ui.config('phases', 'new-commit')
1373 oldcommitphase = ui.config('phases', 'new-commit')
1374
1374
1375 cmdutil.checkunfinished(repo, commit=True)
1375 cmdutil.checkunfinished(repo, commit=True)
1376
1376
1377 branch = repo[None].branch()
1377 branch = repo[None].branch()
1378 bheads = repo.branchheads(branch)
1378 bheads = repo.branchheads(branch)
1379
1379
1380 extra = {}
1380 extra = {}
1381 if opts.get('close_branch'):
1381 if opts.get('close_branch'):
1382 extra['close'] = 1
1382 extra['close'] = 1
1383
1383
1384 if not bheads:
1384 if not bheads:
1385 raise util.Abort(_('can only close branch heads'))
1385 raise util.Abort(_('can only close branch heads'))
1386 elif opts.get('amend'):
1386 elif opts.get('amend'):
1387 if repo.parents()[0].p1().branch() != branch and \
1387 if repo.parents()[0].p1().branch() != branch and \
1388 repo.parents()[0].p2().branch() != branch:
1388 repo.parents()[0].p2().branch() != branch:
1389 raise util.Abort(_('can only close branch heads'))
1389 raise util.Abort(_('can only close branch heads'))
1390
1390
1391 if opts.get('amend'):
1391 if opts.get('amend'):
1392 if ui.configbool('ui', 'commitsubrepos'):
1392 if ui.configbool('ui', 'commitsubrepos'):
1393 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1393 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1394
1394
1395 old = repo['.']
1395 old = repo['.']
1396 if old.phase() == phases.public:
1396 if old.phase() == phases.public:
1397 raise util.Abort(_('cannot amend public changesets'))
1397 raise util.Abort(_('cannot amend public changesets'))
1398 if len(repo[None].parents()) > 1:
1398 if len(repo[None].parents()) > 1:
1399 raise util.Abort(_('cannot amend while merging'))
1399 raise util.Abort(_('cannot amend while merging'))
1400 if (not obsolete._enabled) and old.children():
1400 if (not obsolete._enabled) and old.children():
1401 raise util.Abort(_('cannot amend changeset with children'))
1401 raise util.Abort(_('cannot amend changeset with children'))
1402
1402
1403 e = cmdutil.commiteditor
1403 e = cmdutil.commiteditor
1404 if opts.get('force_editor'):
1404 if opts.get('force_editor'):
1405 e = cmdutil.commitforceeditor
1405 e = cmdutil.commitforceeditor
1406
1406
1407 # commitfunc is used only for temporary amend commit by cmdutil.amend
1407 # commitfunc is used only for temporary amend commit by cmdutil.amend
1408 def commitfunc(ui, repo, message, match, opts):
1408 def commitfunc(ui, repo, message, match, opts):
1409 editor = e
1409 editor = e
1410 # message contains text from -m or -l, if it's empty,
1410 # message contains text from -m or -l, if it's empty,
1411 # open the editor with the old message
1411 # open the editor with the old message
1412 if not message:
1412 if not message:
1413 message = old.description()
1413 message = old.description()
1414 editor = cmdutil.commitforceeditor
1414 editor = cmdutil.commitforceeditor
1415 return repo.commit(message,
1415 return repo.commit(message,
1416 opts.get('user') or old.user(),
1416 opts.get('user') or old.user(),
1417 opts.get('date') or old.date(),
1417 opts.get('date') or old.date(),
1418 match,
1418 match,
1419 editor=editor,
1419 editor=editor,
1420 extra=extra)
1420 extra=extra)
1421
1421
1422 current = repo._bookmarkcurrent
1422 current = repo._bookmarkcurrent
1423 marks = old.bookmarks()
1423 marks = old.bookmarks()
1424 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1424 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1425 if node == old.node():
1425 if node == old.node():
1426 ui.status(_("nothing changed\n"))
1426 ui.status(_("nothing changed\n"))
1427 return 1
1427 return 1
1428 elif marks:
1428 elif marks:
1429 ui.debug('moving bookmarks %r from %s to %s\n' %
1429 ui.debug('moving bookmarks %r from %s to %s\n' %
1430 (marks, old.hex(), hex(node)))
1430 (marks, old.hex(), hex(node)))
1431 newmarks = repo._bookmarks
1431 newmarks = repo._bookmarks
1432 for bm in marks:
1432 for bm in marks:
1433 newmarks[bm] = node
1433 newmarks[bm] = node
1434 if bm == current:
1434 if bm == current:
1435 bookmarks.setcurrent(repo, bm)
1435 bookmarks.setcurrent(repo, bm)
1436 newmarks.write()
1436 newmarks.write()
1437 else:
1437 else:
1438 e = cmdutil.commiteditor
1438 e = cmdutil.commiteditor
1439 if opts.get('force_editor'):
1439 if opts.get('force_editor'):
1440 e = cmdutil.commitforceeditor
1440 e = cmdutil.commitforceeditor
1441
1441
1442 def commitfunc(ui, repo, message, match, opts):
1442 def commitfunc(ui, repo, message, match, opts):
1443 try:
1443 try:
1444 if opts.get('secret'):
1444 if opts.get('secret'):
1445 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1445 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1446 # Propagate to subrepos
1446 # Propagate to subrepos
1447 repo.baseui.setconfig('phases', 'new-commit', 'secret',
1447 repo.baseui.setconfig('phases', 'new-commit', 'secret',
1448 'commit')
1448 'commit')
1449
1449
1450 return repo.commit(message, opts.get('user'), opts.get('date'),
1450 return repo.commit(message, opts.get('user'), opts.get('date'),
1451 match, editor=e, extra=extra)
1451 match, editor=e, extra=extra)
1452 finally:
1452 finally:
1453 ui.setconfig('phases', 'new-commit', oldcommitphase, 'commit')
1453 ui.setconfig('phases', 'new-commit', oldcommitphase, 'commit')
1454 repo.baseui.setconfig('phases', 'new-commit', oldcommitphase,
1454 repo.baseui.setconfig('phases', 'new-commit', oldcommitphase,
1455 'commit')
1455 'commit')
1456
1456
1457
1457
1458 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1458 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1459
1459
1460 if not node:
1460 if not node:
1461 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1461 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1462 if stat[3]:
1462 if stat[3]:
1463 ui.status(_("nothing changed (%d missing files, see "
1463 ui.status(_("nothing changed (%d missing files, see "
1464 "'hg status')\n") % len(stat[3]))
1464 "'hg status')\n") % len(stat[3]))
1465 else:
1465 else:
1466 ui.status(_("nothing changed\n"))
1466 ui.status(_("nothing changed\n"))
1467 return 1
1467 return 1
1468
1468
1469 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1469 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1470
1470
1471 @command('config|showconfig|debugconfig',
1471 @command('config|showconfig|debugconfig',
1472 [('u', 'untrusted', None, _('show untrusted configuration options')),
1472 [('u', 'untrusted', None, _('show untrusted configuration options')),
1473 ('e', 'edit', None, _('edit user config')),
1473 ('e', 'edit', None, _('edit user config')),
1474 ('l', 'local', None, _('edit repository config')),
1474 ('l', 'local', None, _('edit repository config')),
1475 ('g', 'global', None, _('edit global config'))],
1475 ('g', 'global', None, _('edit global config'))],
1476 _('[-u] [NAME]...'))
1476 _('[-u] [NAME]...'))
1477 def config(ui, repo, *values, **opts):
1477 def config(ui, repo, *values, **opts):
1478 """show combined config settings from all hgrc files
1478 """show combined config settings from all hgrc files
1479
1479
1480 With no arguments, print names and values of all config items.
1480 With no arguments, print names and values of all config items.
1481
1481
1482 With one argument of the form section.name, print just the value
1482 With one argument of the form section.name, print just the value
1483 of that config item.
1483 of that config item.
1484
1484
1485 With multiple arguments, print names and values of all config
1485 With multiple arguments, print names and values of all config
1486 items with matching section names.
1486 items with matching section names.
1487
1487
1488 With --edit, start an editor on the user-level config file. With
1488 With --edit, start an editor on the user-level config file. With
1489 --global, edit the system-wide config file. With --local, edit the
1489 --global, edit the system-wide config file. With --local, edit the
1490 repository-level config file.
1490 repository-level config file.
1491
1491
1492 With --debug, the source (filename and line number) is printed
1492 With --debug, the source (filename and line number) is printed
1493 for each config item.
1493 for each config item.
1494
1494
1495 See :hg:`help config` for more information about config files.
1495 See :hg:`help config` for more information about config files.
1496
1496
1497 Returns 0 on success.
1497 Returns 0 on success.
1498
1498
1499 """
1499 """
1500
1500
1501 if opts.get('edit') or opts.get('local') or opts.get('global'):
1501 if opts.get('edit') or opts.get('local') or opts.get('global'):
1502 if opts.get('local') and opts.get('global'):
1502 if opts.get('local') and opts.get('global'):
1503 raise util.Abort(_("can't use --local and --global together"))
1503 raise util.Abort(_("can't use --local and --global together"))
1504
1504
1505 if opts.get('local'):
1505 if opts.get('local'):
1506 if not repo:
1506 if not repo:
1507 raise util.Abort(_("can't use --local outside a repository"))
1507 raise util.Abort(_("can't use --local outside a repository"))
1508 paths = [repo.join('hgrc')]
1508 paths = [repo.join('hgrc')]
1509 elif opts.get('global'):
1509 elif opts.get('global'):
1510 paths = scmutil.systemrcpath()
1510 paths = scmutil.systemrcpath()
1511 else:
1511 else:
1512 paths = scmutil.userrcpath()
1512 paths = scmutil.userrcpath()
1513
1513
1514 for f in paths:
1514 for f in paths:
1515 if os.path.exists(f):
1515 if os.path.exists(f):
1516 break
1516 break
1517 else:
1517 else:
1518 f = paths[0]
1518 f = paths[0]
1519 fp = open(f, "w")
1519 fp = open(f, "w")
1520 fp.write(
1520 fp.write(
1521 '# example config (see "hg help config" for more info)\n'
1521 '# example config (see "hg help config" for more info)\n'
1522 '\n'
1522 '\n'
1523 '[ui]\n'
1523 '[ui]\n'
1524 '# name and email, e.g.\n'
1524 '# name and email, e.g.\n'
1525 '# username = Jane Doe <jdoe@example.com>\n'
1525 '# username = Jane Doe <jdoe@example.com>\n'
1526 'username =\n'
1526 'username =\n'
1527 '\n'
1527 '\n'
1528 '[extensions]\n'
1528 '[extensions]\n'
1529 '# uncomment these lines to enable some popular extensions\n'
1529 '# uncomment these lines to enable some popular extensions\n'
1530 '# (see "hg help extensions" for more info)\n'
1530 '# (see "hg help extensions" for more info)\n'
1531 '# pager =\n'
1531 '# pager =\n'
1532 '# progress =\n'
1532 '# progress =\n'
1533 '# color =\n')
1533 '# color =\n')
1534 fp.close()
1534 fp.close()
1535
1535
1536 editor = ui.geteditor()
1536 editor = ui.geteditor()
1537 util.system("%s \"%s\"" % (editor, f),
1537 util.system("%s \"%s\"" % (editor, f),
1538 onerr=util.Abort, errprefix=_("edit failed"),
1538 onerr=util.Abort, errprefix=_("edit failed"),
1539 out=ui.fout)
1539 out=ui.fout)
1540 return
1540 return
1541
1541
1542 for f in scmutil.rcpath():
1542 for f in scmutil.rcpath():
1543 ui.debug('read config from: %s\n' % f)
1543 ui.debug('read config from: %s\n' % f)
1544 untrusted = bool(opts.get('untrusted'))
1544 untrusted = bool(opts.get('untrusted'))
1545 if values:
1545 if values:
1546 sections = [v for v in values if '.' not in v]
1546 sections = [v for v in values if '.' not in v]
1547 items = [v for v in values if '.' in v]
1547 items = [v for v in values if '.' in v]
1548 if len(items) > 1 or items and sections:
1548 if len(items) > 1 or items and sections:
1549 raise util.Abort(_('only one config item permitted'))
1549 raise util.Abort(_('only one config item permitted'))
1550 for section, name, value in ui.walkconfig(untrusted=untrusted):
1550 for section, name, value in ui.walkconfig(untrusted=untrusted):
1551 value = str(value).replace('\n', '\\n')
1551 value = str(value).replace('\n', '\\n')
1552 sectname = section + '.' + name
1552 sectname = section + '.' + name
1553 if values:
1553 if values:
1554 for v in values:
1554 for v in values:
1555 if v == section:
1555 if v == section:
1556 ui.debug('%s: ' %
1556 ui.debug('%s: ' %
1557 ui.configsource(section, name, untrusted))
1557 ui.configsource(section, name, untrusted))
1558 ui.write('%s=%s\n' % (sectname, value))
1558 ui.write('%s=%s\n' % (sectname, value))
1559 elif v == sectname:
1559 elif v == sectname:
1560 ui.debug('%s: ' %
1560 ui.debug('%s: ' %
1561 ui.configsource(section, name, untrusted))
1561 ui.configsource(section, name, untrusted))
1562 ui.write(value, '\n')
1562 ui.write(value, '\n')
1563 else:
1563 else:
1564 ui.debug('%s: ' %
1564 ui.debug('%s: ' %
1565 ui.configsource(section, name, untrusted))
1565 ui.configsource(section, name, untrusted))
1566 ui.write('%s=%s\n' % (sectname, value))
1566 ui.write('%s=%s\n' % (sectname, value))
1567
1567
1568 @command('copy|cp',
1568 @command('copy|cp',
1569 [('A', 'after', None, _('record a copy that has already occurred')),
1569 [('A', 'after', None, _('record a copy that has already occurred')),
1570 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1570 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1571 ] + walkopts + dryrunopts,
1571 ] + walkopts + dryrunopts,
1572 _('[OPTION]... [SOURCE]... DEST'))
1572 _('[OPTION]... [SOURCE]... DEST'))
1573 def copy(ui, repo, *pats, **opts):
1573 def copy(ui, repo, *pats, **opts):
1574 """mark files as copied for the next commit
1574 """mark files as copied for the next commit
1575
1575
1576 Mark dest as having copies of source files. If dest is a
1576 Mark dest as having copies of source files. If dest is a
1577 directory, copies are put in that directory. If dest is a file,
1577 directory, copies are put in that directory. If dest is a file,
1578 the source must be a single file.
1578 the source must be a single file.
1579
1579
1580 By default, this command copies the contents of files as they
1580 By default, this command copies the contents of files as they
1581 exist in the working directory. If invoked with -A/--after, the
1581 exist in the working directory. If invoked with -A/--after, the
1582 operation is recorded, but no copying is performed.
1582 operation is recorded, but no copying is performed.
1583
1583
1584 This command takes effect with the next commit. To undo a copy
1584 This command takes effect with the next commit. To undo a copy
1585 before that, see :hg:`revert`.
1585 before that, see :hg:`revert`.
1586
1586
1587 Returns 0 on success, 1 if errors are encountered.
1587 Returns 0 on success, 1 if errors are encountered.
1588 """
1588 """
1589 wlock = repo.wlock(False)
1589 wlock = repo.wlock(False)
1590 try:
1590 try:
1591 return cmdutil.copy(ui, repo, pats, opts)
1591 return cmdutil.copy(ui, repo, pats, opts)
1592 finally:
1592 finally:
1593 wlock.release()
1593 wlock.release()
1594
1594
1595 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1595 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1596 def debugancestor(ui, repo, *args):
1596 def debugancestor(ui, repo, *args):
1597 """find the ancestor revision of two revisions in a given index"""
1597 """find the ancestor revision of two revisions in a given index"""
1598 if len(args) == 3:
1598 if len(args) == 3:
1599 index, rev1, rev2 = args
1599 index, rev1, rev2 = args
1600 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1600 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1601 lookup = r.lookup
1601 lookup = r.lookup
1602 elif len(args) == 2:
1602 elif len(args) == 2:
1603 if not repo:
1603 if not repo:
1604 raise util.Abort(_("there is no Mercurial repository here "
1604 raise util.Abort(_("there is no Mercurial repository here "
1605 "(.hg not found)"))
1605 "(.hg not found)"))
1606 rev1, rev2 = args
1606 rev1, rev2 = args
1607 r = repo.changelog
1607 r = repo.changelog
1608 lookup = repo.lookup
1608 lookup = repo.lookup
1609 else:
1609 else:
1610 raise util.Abort(_('either two or three arguments required'))
1610 raise util.Abort(_('either two or three arguments required'))
1611 a = r.ancestor(lookup(rev1), lookup(rev2))
1611 a = r.ancestor(lookup(rev1), lookup(rev2))
1612 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1612 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1613
1613
1614 @command('debugbuilddag',
1614 @command('debugbuilddag',
1615 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1615 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1616 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1616 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1617 ('n', 'new-file', None, _('add new file at each rev'))],
1617 ('n', 'new-file', None, _('add new file at each rev'))],
1618 _('[OPTION]... [TEXT]'))
1618 _('[OPTION]... [TEXT]'))
1619 def debugbuilddag(ui, repo, text=None,
1619 def debugbuilddag(ui, repo, text=None,
1620 mergeable_file=False,
1620 mergeable_file=False,
1621 overwritten_file=False,
1621 overwritten_file=False,
1622 new_file=False):
1622 new_file=False):
1623 """builds a repo with a given DAG from scratch in the current empty repo
1623 """builds a repo with a given DAG from scratch in the current empty repo
1624
1624
1625 The description of the DAG is read from stdin if not given on the
1625 The description of the DAG is read from stdin if not given on the
1626 command line.
1626 command line.
1627
1627
1628 Elements:
1628 Elements:
1629
1629
1630 - "+n" is a linear run of n nodes based on the current default parent
1630 - "+n" is a linear run of n nodes based on the current default parent
1631 - "." is a single node based on the current default parent
1631 - "." is a single node based on the current default parent
1632 - "$" resets the default parent to null (implied at the start);
1632 - "$" resets the default parent to null (implied at the start);
1633 otherwise the default parent is always the last node created
1633 otherwise the default parent is always the last node created
1634 - "<p" sets the default parent to the backref p
1634 - "<p" sets the default parent to the backref p
1635 - "*p" is a fork at parent p, which is a backref
1635 - "*p" is a fork at parent p, which is a backref
1636 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1636 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1637 - "/p2" is a merge of the preceding node and p2
1637 - "/p2" is a merge of the preceding node and p2
1638 - ":tag" defines a local tag for the preceding node
1638 - ":tag" defines a local tag for the preceding node
1639 - "@branch" sets the named branch for subsequent nodes
1639 - "@branch" sets the named branch for subsequent nodes
1640 - "#...\\n" is a comment up to the end of the line
1640 - "#...\\n" is a comment up to the end of the line
1641
1641
1642 Whitespace between the above elements is ignored.
1642 Whitespace between the above elements is ignored.
1643
1643
1644 A backref is either
1644 A backref is either
1645
1645
1646 - a number n, which references the node curr-n, where curr is the current
1646 - a number n, which references the node curr-n, where curr is the current
1647 node, or
1647 node, or
1648 - the name of a local tag you placed earlier using ":tag", or
1648 - the name of a local tag you placed earlier using ":tag", or
1649 - empty to denote the default parent.
1649 - empty to denote the default parent.
1650
1650
1651 All string valued-elements are either strictly alphanumeric, or must
1651 All string valued-elements are either strictly alphanumeric, or must
1652 be enclosed in double quotes ("..."), with "\\" as escape character.
1652 be enclosed in double quotes ("..."), with "\\" as escape character.
1653 """
1653 """
1654
1654
1655 if text is None:
1655 if text is None:
1656 ui.status(_("reading DAG from stdin\n"))
1656 ui.status(_("reading DAG from stdin\n"))
1657 text = ui.fin.read()
1657 text = ui.fin.read()
1658
1658
1659 cl = repo.changelog
1659 cl = repo.changelog
1660 if len(cl) > 0:
1660 if len(cl) > 0:
1661 raise util.Abort(_('repository is not empty'))
1661 raise util.Abort(_('repository is not empty'))
1662
1662
1663 # determine number of revs in DAG
1663 # determine number of revs in DAG
1664 total = 0
1664 total = 0
1665 for type, data in dagparser.parsedag(text):
1665 for type, data in dagparser.parsedag(text):
1666 if type == 'n':
1666 if type == 'n':
1667 total += 1
1667 total += 1
1668
1668
1669 if mergeable_file:
1669 if mergeable_file:
1670 linesperrev = 2
1670 linesperrev = 2
1671 # make a file with k lines per rev
1671 # make a file with k lines per rev
1672 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1672 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1673 initialmergedlines.append("")
1673 initialmergedlines.append("")
1674
1674
1675 tags = []
1675 tags = []
1676
1676
1677 lock = tr = None
1677 lock = tr = None
1678 try:
1678 try:
1679 lock = repo.lock()
1679 lock = repo.lock()
1680 tr = repo.transaction("builddag")
1680 tr = repo.transaction("builddag")
1681
1681
1682 at = -1
1682 at = -1
1683 atbranch = 'default'
1683 atbranch = 'default'
1684 nodeids = []
1684 nodeids = []
1685 id = 0
1685 id = 0
1686 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1686 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1687 for type, data in dagparser.parsedag(text):
1687 for type, data in dagparser.parsedag(text):
1688 if type == 'n':
1688 if type == 'n':
1689 ui.note(('node %s\n' % str(data)))
1689 ui.note(('node %s\n' % str(data)))
1690 id, ps = data
1690 id, ps = data
1691
1691
1692 files = []
1692 files = []
1693 fctxs = {}
1693 fctxs = {}
1694
1694
1695 p2 = None
1695 p2 = None
1696 if mergeable_file:
1696 if mergeable_file:
1697 fn = "mf"
1697 fn = "mf"
1698 p1 = repo[ps[0]]
1698 p1 = repo[ps[0]]
1699 if len(ps) > 1:
1699 if len(ps) > 1:
1700 p2 = repo[ps[1]]
1700 p2 = repo[ps[1]]
1701 pa = p1.ancestor(p2)
1701 pa = p1.ancestor(p2)
1702 base, local, other = [x[fn].data() for x in (pa, p1,
1702 base, local, other = [x[fn].data() for x in (pa, p1,
1703 p2)]
1703 p2)]
1704 m3 = simplemerge.Merge3Text(base, local, other)
1704 m3 = simplemerge.Merge3Text(base, local, other)
1705 ml = [l.strip() for l in m3.merge_lines()]
1705 ml = [l.strip() for l in m3.merge_lines()]
1706 ml.append("")
1706 ml.append("")
1707 elif at > 0:
1707 elif at > 0:
1708 ml = p1[fn].data().split("\n")
1708 ml = p1[fn].data().split("\n")
1709 else:
1709 else:
1710 ml = initialmergedlines
1710 ml = initialmergedlines
1711 ml[id * linesperrev] += " r%i" % id
1711 ml[id * linesperrev] += " r%i" % id
1712 mergedtext = "\n".join(ml)
1712 mergedtext = "\n".join(ml)
1713 files.append(fn)
1713 files.append(fn)
1714 fctxs[fn] = context.memfilectx(fn, mergedtext)
1714 fctxs[fn] = context.memfilectx(fn, mergedtext)
1715
1715
1716 if overwritten_file:
1716 if overwritten_file:
1717 fn = "of"
1717 fn = "of"
1718 files.append(fn)
1718 files.append(fn)
1719 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1719 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1720
1720
1721 if new_file:
1721 if new_file:
1722 fn = "nf%i" % id
1722 fn = "nf%i" % id
1723 files.append(fn)
1723 files.append(fn)
1724 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1724 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1725 if len(ps) > 1:
1725 if len(ps) > 1:
1726 if not p2:
1726 if not p2:
1727 p2 = repo[ps[1]]
1727 p2 = repo[ps[1]]
1728 for fn in p2:
1728 for fn in p2:
1729 if fn.startswith("nf"):
1729 if fn.startswith("nf"):
1730 files.append(fn)
1730 files.append(fn)
1731 fctxs[fn] = p2[fn]
1731 fctxs[fn] = p2[fn]
1732
1732
1733 def fctxfn(repo, cx, path):
1733 def fctxfn(repo, cx, path):
1734 return fctxs.get(path)
1734 return fctxs.get(path)
1735
1735
1736 if len(ps) == 0 or ps[0] < 0:
1736 if len(ps) == 0 or ps[0] < 0:
1737 pars = [None, None]
1737 pars = [None, None]
1738 elif len(ps) == 1:
1738 elif len(ps) == 1:
1739 pars = [nodeids[ps[0]], None]
1739 pars = [nodeids[ps[0]], None]
1740 else:
1740 else:
1741 pars = [nodeids[p] for p in ps]
1741 pars = [nodeids[p] for p in ps]
1742 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1742 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1743 date=(id, 0),
1743 date=(id, 0),
1744 user="debugbuilddag",
1744 user="debugbuilddag",
1745 extra={'branch': atbranch})
1745 extra={'branch': atbranch})
1746 nodeid = repo.commitctx(cx)
1746 nodeid = repo.commitctx(cx)
1747 nodeids.append(nodeid)
1747 nodeids.append(nodeid)
1748 at = id
1748 at = id
1749 elif type == 'l':
1749 elif type == 'l':
1750 id, name = data
1750 id, name = data
1751 ui.note(('tag %s\n' % name))
1751 ui.note(('tag %s\n' % name))
1752 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1752 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1753 elif type == 'a':
1753 elif type == 'a':
1754 ui.note(('branch %s\n' % data))
1754 ui.note(('branch %s\n' % data))
1755 atbranch = data
1755 atbranch = data
1756 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1756 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1757 tr.close()
1757 tr.close()
1758
1758
1759 if tags:
1759 if tags:
1760 repo.opener.write("localtags", "".join(tags))
1760 repo.opener.write("localtags", "".join(tags))
1761 finally:
1761 finally:
1762 ui.progress(_('building'), None)
1762 ui.progress(_('building'), None)
1763 release(tr, lock)
1763 release(tr, lock)
1764
1764
1765 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1765 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1766 def debugbundle(ui, bundlepath, all=None, **opts):
1766 def debugbundle(ui, bundlepath, all=None, **opts):
1767 """lists the contents of a bundle"""
1767 """lists the contents of a bundle"""
1768 f = hg.openpath(ui, bundlepath)
1768 f = hg.openpath(ui, bundlepath)
1769 try:
1769 try:
1770 gen = changegroup.readbundle(f, bundlepath)
1770 gen = changegroup.readbundle(f, bundlepath)
1771 if all:
1771 if all:
1772 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1772 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1773
1773
1774 def showchunks(named):
1774 def showchunks(named):
1775 ui.write("\n%s\n" % named)
1775 ui.write("\n%s\n" % named)
1776 chain = None
1776 chain = None
1777 while True:
1777 while True:
1778 chunkdata = gen.deltachunk(chain)
1778 chunkdata = gen.deltachunk(chain)
1779 if not chunkdata:
1779 if not chunkdata:
1780 break
1780 break
1781 node = chunkdata['node']
1781 node = chunkdata['node']
1782 p1 = chunkdata['p1']
1782 p1 = chunkdata['p1']
1783 p2 = chunkdata['p2']
1783 p2 = chunkdata['p2']
1784 cs = chunkdata['cs']
1784 cs = chunkdata['cs']
1785 deltabase = chunkdata['deltabase']
1785 deltabase = chunkdata['deltabase']
1786 delta = chunkdata['delta']
1786 delta = chunkdata['delta']
1787 ui.write("%s %s %s %s %s %s\n" %
1787 ui.write("%s %s %s %s %s %s\n" %
1788 (hex(node), hex(p1), hex(p2),
1788 (hex(node), hex(p1), hex(p2),
1789 hex(cs), hex(deltabase), len(delta)))
1789 hex(cs), hex(deltabase), len(delta)))
1790 chain = node
1790 chain = node
1791
1791
1792 chunkdata = gen.changelogheader()
1792 chunkdata = gen.changelogheader()
1793 showchunks("changelog")
1793 showchunks("changelog")
1794 chunkdata = gen.manifestheader()
1794 chunkdata = gen.manifestheader()
1795 showchunks("manifest")
1795 showchunks("manifest")
1796 while True:
1796 while True:
1797 chunkdata = gen.filelogheader()
1797 chunkdata = gen.filelogheader()
1798 if not chunkdata:
1798 if not chunkdata:
1799 break
1799 break
1800 fname = chunkdata['filename']
1800 fname = chunkdata['filename']
1801 showchunks(fname)
1801 showchunks(fname)
1802 else:
1802 else:
1803 chunkdata = gen.changelogheader()
1803 chunkdata = gen.changelogheader()
1804 chain = None
1804 chain = None
1805 while True:
1805 while True:
1806 chunkdata = gen.deltachunk(chain)
1806 chunkdata = gen.deltachunk(chain)
1807 if not chunkdata:
1807 if not chunkdata:
1808 break
1808 break
1809 node = chunkdata['node']
1809 node = chunkdata['node']
1810 ui.write("%s\n" % hex(node))
1810 ui.write("%s\n" % hex(node))
1811 chain = node
1811 chain = node
1812 finally:
1812 finally:
1813 f.close()
1813 f.close()
1814
1814
1815 @command('debugcheckstate', [], '')
1815 @command('debugcheckstate', [], '')
1816 def debugcheckstate(ui, repo):
1816 def debugcheckstate(ui, repo):
1817 """validate the correctness of the current dirstate"""
1817 """validate the correctness of the current dirstate"""
1818 parent1, parent2 = repo.dirstate.parents()
1818 parent1, parent2 = repo.dirstate.parents()
1819 m1 = repo[parent1].manifest()
1819 m1 = repo[parent1].manifest()
1820 m2 = repo[parent2].manifest()
1820 m2 = repo[parent2].manifest()
1821 errors = 0
1821 errors = 0
1822 for f in repo.dirstate:
1822 for f in repo.dirstate:
1823 state = repo.dirstate[f]
1823 state = repo.dirstate[f]
1824 if state in "nr" and f not in m1:
1824 if state in "nr" and f not in m1:
1825 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1825 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1826 errors += 1
1826 errors += 1
1827 if state in "a" and f in m1:
1827 if state in "a" and f in m1:
1828 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1828 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1829 errors += 1
1829 errors += 1
1830 if state in "m" and f not in m1 and f not in m2:
1830 if state in "m" and f not in m1 and f not in m2:
1831 ui.warn(_("%s in state %s, but not in either manifest\n") %
1831 ui.warn(_("%s in state %s, but not in either manifest\n") %
1832 (f, state))
1832 (f, state))
1833 errors += 1
1833 errors += 1
1834 for f in m1:
1834 for f in m1:
1835 state = repo.dirstate[f]
1835 state = repo.dirstate[f]
1836 if state not in "nrm":
1836 if state not in "nrm":
1837 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1837 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1838 errors += 1
1838 errors += 1
1839 if errors:
1839 if errors:
1840 error = _(".hg/dirstate inconsistent with current parent's manifest")
1840 error = _(".hg/dirstate inconsistent with current parent's manifest")
1841 raise util.Abort(error)
1841 raise util.Abort(error)
1842
1842
1843 @command('debugcommands', [], _('[COMMAND]'))
1843 @command('debugcommands', [], _('[COMMAND]'))
1844 def debugcommands(ui, cmd='', *args):
1844 def debugcommands(ui, cmd='', *args):
1845 """list all available commands and options"""
1845 """list all available commands and options"""
1846 for cmd, vals in sorted(table.iteritems()):
1846 for cmd, vals in sorted(table.iteritems()):
1847 cmd = cmd.split('|')[0].strip('^')
1847 cmd = cmd.split('|')[0].strip('^')
1848 opts = ', '.join([i[1] for i in vals[1]])
1848 opts = ', '.join([i[1] for i in vals[1]])
1849 ui.write('%s: %s\n' % (cmd, opts))
1849 ui.write('%s: %s\n' % (cmd, opts))
1850
1850
1851 @command('debugcomplete',
1851 @command('debugcomplete',
1852 [('o', 'options', None, _('show the command options'))],
1852 [('o', 'options', None, _('show the command options'))],
1853 _('[-o] CMD'))
1853 _('[-o] CMD'))
1854 def debugcomplete(ui, cmd='', **opts):
1854 def debugcomplete(ui, cmd='', **opts):
1855 """returns the completion list associated with the given command"""
1855 """returns the completion list associated with the given command"""
1856
1856
1857 if opts.get('options'):
1857 if opts.get('options'):
1858 options = []
1858 options = []
1859 otables = [globalopts]
1859 otables = [globalopts]
1860 if cmd:
1860 if cmd:
1861 aliases, entry = cmdutil.findcmd(cmd, table, False)
1861 aliases, entry = cmdutil.findcmd(cmd, table, False)
1862 otables.append(entry[1])
1862 otables.append(entry[1])
1863 for t in otables:
1863 for t in otables:
1864 for o in t:
1864 for o in t:
1865 if "(DEPRECATED)" in o[3]:
1865 if "(DEPRECATED)" in o[3]:
1866 continue
1866 continue
1867 if o[0]:
1867 if o[0]:
1868 options.append('-%s' % o[0])
1868 options.append('-%s' % o[0])
1869 options.append('--%s' % o[1])
1869 options.append('--%s' % o[1])
1870 ui.write("%s\n" % "\n".join(options))
1870 ui.write("%s\n" % "\n".join(options))
1871 return
1871 return
1872
1872
1873 cmdlist = cmdutil.findpossible(cmd, table)
1873 cmdlist = cmdutil.findpossible(cmd, table)
1874 if ui.verbose:
1874 if ui.verbose:
1875 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1875 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1876 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1876 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1877
1877
1878 @command('debugdag',
1878 @command('debugdag',
1879 [('t', 'tags', None, _('use tags as labels')),
1879 [('t', 'tags', None, _('use tags as labels')),
1880 ('b', 'branches', None, _('annotate with branch names')),
1880 ('b', 'branches', None, _('annotate with branch names')),
1881 ('', 'dots', None, _('use dots for runs')),
1881 ('', 'dots', None, _('use dots for runs')),
1882 ('s', 'spaces', None, _('separate elements by spaces'))],
1882 ('s', 'spaces', None, _('separate elements by spaces'))],
1883 _('[OPTION]... [FILE [REV]...]'))
1883 _('[OPTION]... [FILE [REV]...]'))
1884 def debugdag(ui, repo, file_=None, *revs, **opts):
1884 def debugdag(ui, repo, file_=None, *revs, **opts):
1885 """format the changelog or an index DAG as a concise textual description
1885 """format the changelog or an index DAG as a concise textual description
1886
1886
1887 If you pass a revlog index, the revlog's DAG is emitted. If you list
1887 If you pass a revlog index, the revlog's DAG is emitted. If you list
1888 revision numbers, they get labeled in the output as rN.
1888 revision numbers, they get labeled in the output as rN.
1889
1889
1890 Otherwise, the changelog DAG of the current repo is emitted.
1890 Otherwise, the changelog DAG of the current repo is emitted.
1891 """
1891 """
1892 spaces = opts.get('spaces')
1892 spaces = opts.get('spaces')
1893 dots = opts.get('dots')
1893 dots = opts.get('dots')
1894 if file_:
1894 if file_:
1895 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1895 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1896 revs = set((int(r) for r in revs))
1896 revs = set((int(r) for r in revs))
1897 def events():
1897 def events():
1898 for r in rlog:
1898 for r in rlog:
1899 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1899 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1900 if p != -1)))
1900 if p != -1)))
1901 if r in revs:
1901 if r in revs:
1902 yield 'l', (r, "r%i" % r)
1902 yield 'l', (r, "r%i" % r)
1903 elif repo:
1903 elif repo:
1904 cl = repo.changelog
1904 cl = repo.changelog
1905 tags = opts.get('tags')
1905 tags = opts.get('tags')
1906 branches = opts.get('branches')
1906 branches = opts.get('branches')
1907 if tags:
1907 if tags:
1908 labels = {}
1908 labels = {}
1909 for l, n in repo.tags().items():
1909 for l, n in repo.tags().items():
1910 labels.setdefault(cl.rev(n), []).append(l)
1910 labels.setdefault(cl.rev(n), []).append(l)
1911 def events():
1911 def events():
1912 b = "default"
1912 b = "default"
1913 for r in cl:
1913 for r in cl:
1914 if branches:
1914 if branches:
1915 newb = cl.read(cl.node(r))[5]['branch']
1915 newb = cl.read(cl.node(r))[5]['branch']
1916 if newb != b:
1916 if newb != b:
1917 yield 'a', newb
1917 yield 'a', newb
1918 b = newb
1918 b = newb
1919 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1919 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1920 if p != -1)))
1920 if p != -1)))
1921 if tags:
1921 if tags:
1922 ls = labels.get(r)
1922 ls = labels.get(r)
1923 if ls:
1923 if ls:
1924 for l in ls:
1924 for l in ls:
1925 yield 'l', (r, l)
1925 yield 'l', (r, l)
1926 else:
1926 else:
1927 raise util.Abort(_('need repo for changelog dag'))
1927 raise util.Abort(_('need repo for changelog dag'))
1928
1928
1929 for line in dagparser.dagtextlines(events(),
1929 for line in dagparser.dagtextlines(events(),
1930 addspaces=spaces,
1930 addspaces=spaces,
1931 wraplabels=True,
1931 wraplabels=True,
1932 wrapannotations=True,
1932 wrapannotations=True,
1933 wrapnonlinear=dots,
1933 wrapnonlinear=dots,
1934 usedots=dots,
1934 usedots=dots,
1935 maxlinewidth=70):
1935 maxlinewidth=70):
1936 ui.write(line)
1936 ui.write(line)
1937 ui.write("\n")
1937 ui.write("\n")
1938
1938
1939 @command('debugdata',
1939 @command('debugdata',
1940 [('c', 'changelog', False, _('open changelog')),
1940 [('c', 'changelog', False, _('open changelog')),
1941 ('m', 'manifest', False, _('open manifest'))],
1941 ('m', 'manifest', False, _('open manifest'))],
1942 _('-c|-m|FILE REV'))
1942 _('-c|-m|FILE REV'))
1943 def debugdata(ui, repo, file_, rev=None, **opts):
1943 def debugdata(ui, repo, file_, rev=None, **opts):
1944 """dump the contents of a data file revision"""
1944 """dump the contents of a data file revision"""
1945 if opts.get('changelog') or opts.get('manifest'):
1945 if opts.get('changelog') or opts.get('manifest'):
1946 file_, rev = None, file_
1946 file_, rev = None, file_
1947 elif rev is None:
1947 elif rev is None:
1948 raise error.CommandError('debugdata', _('invalid arguments'))
1948 raise error.CommandError('debugdata', _('invalid arguments'))
1949 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1949 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1950 try:
1950 try:
1951 ui.write(r.revision(r.lookup(rev)))
1951 ui.write(r.revision(r.lookup(rev)))
1952 except KeyError:
1952 except KeyError:
1953 raise util.Abort(_('invalid revision identifier %s') % rev)
1953 raise util.Abort(_('invalid revision identifier %s') % rev)
1954
1954
1955 @command('debugdate',
1955 @command('debugdate',
1956 [('e', 'extended', None, _('try extended date formats'))],
1956 [('e', 'extended', None, _('try extended date formats'))],
1957 _('[-e] DATE [RANGE]'))
1957 _('[-e] DATE [RANGE]'))
1958 def debugdate(ui, date, range=None, **opts):
1958 def debugdate(ui, date, range=None, **opts):
1959 """parse and display a date"""
1959 """parse and display a date"""
1960 if opts["extended"]:
1960 if opts["extended"]:
1961 d = util.parsedate(date, util.extendeddateformats)
1961 d = util.parsedate(date, util.extendeddateformats)
1962 else:
1962 else:
1963 d = util.parsedate(date)
1963 d = util.parsedate(date)
1964 ui.write(("internal: %s %s\n") % d)
1964 ui.write(("internal: %s %s\n") % d)
1965 ui.write(("standard: %s\n") % util.datestr(d))
1965 ui.write(("standard: %s\n") % util.datestr(d))
1966 if range:
1966 if range:
1967 m = util.matchdate(range)
1967 m = util.matchdate(range)
1968 ui.write(("match: %s\n") % m(d[0]))
1968 ui.write(("match: %s\n") % m(d[0]))
1969
1969
1970 @command('debugdiscovery',
1970 @command('debugdiscovery',
1971 [('', 'old', None, _('use old-style discovery')),
1971 [('', 'old', None, _('use old-style discovery')),
1972 ('', 'nonheads', None,
1972 ('', 'nonheads', None,
1973 _('use old-style discovery with non-heads included')),
1973 _('use old-style discovery with non-heads included')),
1974 ] + remoteopts,
1974 ] + remoteopts,
1975 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1975 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1976 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1976 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1977 """runs the changeset discovery protocol in isolation"""
1977 """runs the changeset discovery protocol in isolation"""
1978 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1978 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1979 opts.get('branch'))
1979 opts.get('branch'))
1980 remote = hg.peer(repo, opts, remoteurl)
1980 remote = hg.peer(repo, opts, remoteurl)
1981 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1981 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1982
1982
1983 # make sure tests are repeatable
1983 # make sure tests are repeatable
1984 random.seed(12323)
1984 random.seed(12323)
1985
1985
1986 def doit(localheads, remoteheads, remote=remote):
1986 def doit(localheads, remoteheads, remote=remote):
1987 if opts.get('old'):
1987 if opts.get('old'):
1988 if localheads:
1988 if localheads:
1989 raise util.Abort('cannot use localheads with old style '
1989 raise util.Abort('cannot use localheads with old style '
1990 'discovery')
1990 'discovery')
1991 if not util.safehasattr(remote, 'branches'):
1991 if not util.safehasattr(remote, 'branches'):
1992 # enable in-client legacy support
1992 # enable in-client legacy support
1993 remote = localrepo.locallegacypeer(remote.local())
1993 remote = localrepo.locallegacypeer(remote.local())
1994 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1994 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1995 force=True)
1995 force=True)
1996 common = set(common)
1996 common = set(common)
1997 if not opts.get('nonheads'):
1997 if not opts.get('nonheads'):
1998 ui.write(("unpruned common: %s\n") %
1998 ui.write(("unpruned common: %s\n") %
1999 " ".join(sorted(short(n) for n in common)))
1999 " ".join(sorted(short(n) for n in common)))
2000 dag = dagutil.revlogdag(repo.changelog)
2000 dag = dagutil.revlogdag(repo.changelog)
2001 all = dag.ancestorset(dag.internalizeall(common))
2001 all = dag.ancestorset(dag.internalizeall(common))
2002 common = dag.externalizeall(dag.headsetofconnecteds(all))
2002 common = dag.externalizeall(dag.headsetofconnecteds(all))
2003 else:
2003 else:
2004 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2004 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2005 common = set(common)
2005 common = set(common)
2006 rheads = set(hds)
2006 rheads = set(hds)
2007 lheads = set(repo.heads())
2007 lheads = set(repo.heads())
2008 ui.write(("common heads: %s\n") %
2008 ui.write(("common heads: %s\n") %
2009 " ".join(sorted(short(n) for n in common)))
2009 " ".join(sorted(short(n) for n in common)))
2010 if lheads <= common:
2010 if lheads <= common:
2011 ui.write(("local is subset\n"))
2011 ui.write(("local is subset\n"))
2012 elif rheads <= common:
2012 elif rheads <= common:
2013 ui.write(("remote is subset\n"))
2013 ui.write(("remote is subset\n"))
2014
2014
2015 serverlogs = opts.get('serverlog')
2015 serverlogs = opts.get('serverlog')
2016 if serverlogs:
2016 if serverlogs:
2017 for filename in serverlogs:
2017 for filename in serverlogs:
2018 logfile = open(filename, 'r')
2018 logfile = open(filename, 'r')
2019 try:
2019 try:
2020 line = logfile.readline()
2020 line = logfile.readline()
2021 while line:
2021 while line:
2022 parts = line.strip().split(';')
2022 parts = line.strip().split(';')
2023 op = parts[1]
2023 op = parts[1]
2024 if op == 'cg':
2024 if op == 'cg':
2025 pass
2025 pass
2026 elif op == 'cgss':
2026 elif op == 'cgss':
2027 doit(parts[2].split(' '), parts[3].split(' '))
2027 doit(parts[2].split(' '), parts[3].split(' '))
2028 elif op == 'unb':
2028 elif op == 'unb':
2029 doit(parts[3].split(' '), parts[2].split(' '))
2029 doit(parts[3].split(' '), parts[2].split(' '))
2030 line = logfile.readline()
2030 line = logfile.readline()
2031 finally:
2031 finally:
2032 logfile.close()
2032 logfile.close()
2033
2033
2034 else:
2034 else:
2035 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2035 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2036 opts.get('remote_head'))
2036 opts.get('remote_head'))
2037 localrevs = opts.get('local_head')
2037 localrevs = opts.get('local_head')
2038 doit(localrevs, remoterevs)
2038 doit(localrevs, remoterevs)
2039
2039
2040 @command('debugfileset',
2040 @command('debugfileset',
2041 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2041 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2042 _('[-r REV] FILESPEC'))
2042 _('[-r REV] FILESPEC'))
2043 def debugfileset(ui, repo, expr, **opts):
2043 def debugfileset(ui, repo, expr, **opts):
2044 '''parse and apply a fileset specification'''
2044 '''parse and apply a fileset specification'''
2045 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2045 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2046 if ui.verbose:
2046 if ui.verbose:
2047 tree = fileset.parse(expr)[0]
2047 tree = fileset.parse(expr)[0]
2048 ui.note(tree, "\n")
2048 ui.note(tree, "\n")
2049
2049
2050 for f in ctx.getfileset(expr):
2050 for f in ctx.getfileset(expr):
2051 ui.write("%s\n" % f)
2051 ui.write("%s\n" % f)
2052
2052
2053 @command('debugfsinfo', [], _('[PATH]'))
2053 @command('debugfsinfo', [], _('[PATH]'))
2054 def debugfsinfo(ui, path="."):
2054 def debugfsinfo(ui, path="."):
2055 """show information detected about current filesystem"""
2055 """show information detected about current filesystem"""
2056 util.writefile('.debugfsinfo', '')
2056 util.writefile('.debugfsinfo', '')
2057 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2057 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2058 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2058 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2059 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2059 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2060 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2060 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2061 and 'yes' or 'no'))
2061 and 'yes' or 'no'))
2062 os.unlink('.debugfsinfo')
2062 os.unlink('.debugfsinfo')
2063
2063
2064 @command('debuggetbundle',
2064 @command('debuggetbundle',
2065 [('H', 'head', [], _('id of head node'), _('ID')),
2065 [('H', 'head', [], _('id of head node'), _('ID')),
2066 ('C', 'common', [], _('id of common node'), _('ID')),
2066 ('C', 'common', [], _('id of common node'), _('ID')),
2067 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2067 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2068 _('REPO FILE [-H|-C ID]...'))
2068 _('REPO FILE [-H|-C ID]...'))
2069 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2069 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2070 """retrieves a bundle from a repo
2070 """retrieves a bundle from a repo
2071
2071
2072 Every ID must be a full-length hex node id string. Saves the bundle to the
2072 Every ID must be a full-length hex node id string. Saves the bundle to the
2073 given file.
2073 given file.
2074 """
2074 """
2075 repo = hg.peer(ui, opts, repopath)
2075 repo = hg.peer(ui, opts, repopath)
2076 if not repo.capable('getbundle'):
2076 if not repo.capable('getbundle'):
2077 raise util.Abort("getbundle() not supported by target repository")
2077 raise util.Abort("getbundle() not supported by target repository")
2078 args = {}
2078 args = {}
2079 if common:
2079 if common:
2080 args['common'] = [bin(s) for s in common]
2080 args['common'] = [bin(s) for s in common]
2081 if head:
2081 if head:
2082 args['heads'] = [bin(s) for s in head]
2082 args['heads'] = [bin(s) for s in head]
2083 # TODO: get desired bundlecaps from command line.
2083 # TODO: get desired bundlecaps from command line.
2084 args['bundlecaps'] = None
2084 args['bundlecaps'] = None
2085 bundle = repo.getbundle('debug', **args)
2085 bundle = repo.getbundle('debug', **args)
2086
2086
2087 bundletype = opts.get('type', 'bzip2').lower()
2087 bundletype = opts.get('type', 'bzip2').lower()
2088 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2088 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2089 bundletype = btypes.get(bundletype)
2089 bundletype = btypes.get(bundletype)
2090 if bundletype not in changegroup.bundletypes:
2090 if bundletype not in changegroup.bundletypes:
2091 raise util.Abort(_('unknown bundle type specified with --type'))
2091 raise util.Abort(_('unknown bundle type specified with --type'))
2092 changegroup.writebundle(bundle, bundlepath, bundletype)
2092 changegroup.writebundle(bundle, bundlepath, bundletype)
2093
2093
2094 @command('debugignore', [], '')
2094 @command('debugignore', [], '')
2095 def debugignore(ui, repo, *values, **opts):
2095 def debugignore(ui, repo, *values, **opts):
2096 """display the combined ignore pattern"""
2096 """display the combined ignore pattern"""
2097 ignore = repo.dirstate._ignore
2097 ignore = repo.dirstate._ignore
2098 includepat = getattr(ignore, 'includepat', None)
2098 includepat = getattr(ignore, 'includepat', None)
2099 if includepat is not None:
2099 if includepat is not None:
2100 ui.write("%s\n" % includepat)
2100 ui.write("%s\n" % includepat)
2101 else:
2101 else:
2102 raise util.Abort(_("no ignore patterns found"))
2102 raise util.Abort(_("no ignore patterns found"))
2103
2103
2104 @command('debugindex',
2104 @command('debugindex',
2105 [('c', 'changelog', False, _('open changelog')),
2105 [('c', 'changelog', False, _('open changelog')),
2106 ('m', 'manifest', False, _('open manifest')),
2106 ('m', 'manifest', False, _('open manifest')),
2107 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2107 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2108 _('[-f FORMAT] -c|-m|FILE'))
2108 _('[-f FORMAT] -c|-m|FILE'))
2109 def debugindex(ui, repo, file_=None, **opts):
2109 def debugindex(ui, repo, file_=None, **opts):
2110 """dump the contents of an index file"""
2110 """dump the contents of an index file"""
2111 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2111 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2112 format = opts.get('format', 0)
2112 format = opts.get('format', 0)
2113 if format not in (0, 1):
2113 if format not in (0, 1):
2114 raise util.Abort(_("unknown format %d") % format)
2114 raise util.Abort(_("unknown format %d") % format)
2115
2115
2116 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2116 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2117 if generaldelta:
2117 if generaldelta:
2118 basehdr = ' delta'
2118 basehdr = ' delta'
2119 else:
2119 else:
2120 basehdr = ' base'
2120 basehdr = ' base'
2121
2121
2122 if format == 0:
2122 if format == 0:
2123 ui.write(" rev offset length " + basehdr + " linkrev"
2123 ui.write(" rev offset length " + basehdr + " linkrev"
2124 " nodeid p1 p2\n")
2124 " nodeid p1 p2\n")
2125 elif format == 1:
2125 elif format == 1:
2126 ui.write(" rev flag offset length"
2126 ui.write(" rev flag offset length"
2127 " size " + basehdr + " link p1 p2"
2127 " size " + basehdr + " link p1 p2"
2128 " nodeid\n")
2128 " nodeid\n")
2129
2129
2130 for i in r:
2130 for i in r:
2131 node = r.node(i)
2131 node = r.node(i)
2132 if generaldelta:
2132 if generaldelta:
2133 base = r.deltaparent(i)
2133 base = r.deltaparent(i)
2134 else:
2134 else:
2135 base = r.chainbase(i)
2135 base = r.chainbase(i)
2136 if format == 0:
2136 if format == 0:
2137 try:
2137 try:
2138 pp = r.parents(node)
2138 pp = r.parents(node)
2139 except Exception:
2139 except Exception:
2140 pp = [nullid, nullid]
2140 pp = [nullid, nullid]
2141 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2141 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2142 i, r.start(i), r.length(i), base, r.linkrev(i),
2142 i, r.start(i), r.length(i), base, r.linkrev(i),
2143 short(node), short(pp[0]), short(pp[1])))
2143 short(node), short(pp[0]), short(pp[1])))
2144 elif format == 1:
2144 elif format == 1:
2145 pr = r.parentrevs(i)
2145 pr = r.parentrevs(i)
2146 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2146 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2147 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2147 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2148 base, r.linkrev(i), pr[0], pr[1], short(node)))
2148 base, r.linkrev(i), pr[0], pr[1], short(node)))
2149
2149
2150 @command('debugindexdot', [], _('FILE'))
2150 @command('debugindexdot', [], _('FILE'))
2151 def debugindexdot(ui, repo, file_):
2151 def debugindexdot(ui, repo, file_):
2152 """dump an index DAG as a graphviz dot file"""
2152 """dump an index DAG as a graphviz dot file"""
2153 r = None
2153 r = None
2154 if repo:
2154 if repo:
2155 filelog = repo.file(file_)
2155 filelog = repo.file(file_)
2156 if len(filelog):
2156 if len(filelog):
2157 r = filelog
2157 r = filelog
2158 if not r:
2158 if not r:
2159 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2159 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2160 ui.write(("digraph G {\n"))
2160 ui.write(("digraph G {\n"))
2161 for i in r:
2161 for i in r:
2162 node = r.node(i)
2162 node = r.node(i)
2163 pp = r.parents(node)
2163 pp = r.parents(node)
2164 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2164 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2165 if pp[1] != nullid:
2165 if pp[1] != nullid:
2166 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2166 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2167 ui.write("}\n")
2167 ui.write("}\n")
2168
2168
2169 @command('debuginstall', [], '')
2169 @command('debuginstall', [], '')
2170 def debuginstall(ui):
2170 def debuginstall(ui):
2171 '''test Mercurial installation
2171 '''test Mercurial installation
2172
2172
2173 Returns 0 on success.
2173 Returns 0 on success.
2174 '''
2174 '''
2175
2175
2176 def writetemp(contents):
2176 def writetemp(contents):
2177 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2177 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2178 f = os.fdopen(fd, "wb")
2178 f = os.fdopen(fd, "wb")
2179 f.write(contents)
2179 f.write(contents)
2180 f.close()
2180 f.close()
2181 return name
2181 return name
2182
2182
2183 problems = 0
2183 problems = 0
2184
2184
2185 # encoding
2185 # encoding
2186 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2186 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2187 try:
2187 try:
2188 encoding.fromlocal("test")
2188 encoding.fromlocal("test")
2189 except util.Abort, inst:
2189 except util.Abort, inst:
2190 ui.write(" %s\n" % inst)
2190 ui.write(" %s\n" % inst)
2191 ui.write(_(" (check that your locale is properly set)\n"))
2191 ui.write(_(" (check that your locale is properly set)\n"))
2192 problems += 1
2192 problems += 1
2193
2193
2194 # Python
2194 # Python
2195 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2195 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2196 ui.status(_("checking Python version (%s)\n")
2196 ui.status(_("checking Python version (%s)\n")
2197 % ("%s.%s.%s" % sys.version_info[:3]))
2197 % ("%s.%s.%s" % sys.version_info[:3]))
2198 ui.status(_("checking Python lib (%s)...\n")
2198 ui.status(_("checking Python lib (%s)...\n")
2199 % os.path.dirname(os.__file__))
2199 % os.path.dirname(os.__file__))
2200
2200
2201 # compiled modules
2201 # compiled modules
2202 ui.status(_("checking installed modules (%s)...\n")
2202 ui.status(_("checking installed modules (%s)...\n")
2203 % os.path.dirname(__file__))
2203 % os.path.dirname(__file__))
2204 try:
2204 try:
2205 import bdiff, mpatch, base85, osutil
2205 import bdiff, mpatch, base85, osutil
2206 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2206 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2207 except Exception, inst:
2207 except Exception, inst:
2208 ui.write(" %s\n" % inst)
2208 ui.write(" %s\n" % inst)
2209 ui.write(_(" One or more extensions could not be found"))
2209 ui.write(_(" One or more extensions could not be found"))
2210 ui.write(_(" (check that you compiled the extensions)\n"))
2210 ui.write(_(" (check that you compiled the extensions)\n"))
2211 problems += 1
2211 problems += 1
2212
2212
2213 # templates
2213 # templates
2214 import templater
2214 import templater
2215 p = templater.templatepath()
2215 p = templater.templatepath()
2216 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2216 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2217 if p:
2217 if p:
2218 m = templater.templatepath("map-cmdline.default")
2218 m = templater.templatepath("map-cmdline.default")
2219 if m:
2219 if m:
2220 # template found, check if it is working
2220 # template found, check if it is working
2221 try:
2221 try:
2222 templater.templater(m)
2222 templater.templater(m)
2223 except Exception, inst:
2223 except Exception, inst:
2224 ui.write(" %s\n" % inst)
2224 ui.write(" %s\n" % inst)
2225 p = None
2225 p = None
2226 else:
2226 else:
2227 ui.write(_(" template 'default' not found\n"))
2227 ui.write(_(" template 'default' not found\n"))
2228 p = None
2228 p = None
2229 else:
2229 else:
2230 ui.write(_(" no template directories found\n"))
2230 ui.write(_(" no template directories found\n"))
2231 if not p:
2231 if not p:
2232 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2232 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2233 problems += 1
2233 problems += 1
2234
2234
2235 # editor
2235 # editor
2236 ui.status(_("checking commit editor...\n"))
2236 ui.status(_("checking commit editor...\n"))
2237 editor = ui.geteditor()
2237 editor = ui.geteditor()
2238 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2238 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2239 if not cmdpath:
2239 if not cmdpath:
2240 if editor == 'vi':
2240 if editor == 'vi':
2241 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2241 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2242 ui.write(_(" (specify a commit editor in your configuration"
2242 ui.write(_(" (specify a commit editor in your configuration"
2243 " file)\n"))
2243 " file)\n"))
2244 else:
2244 else:
2245 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2245 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2246 ui.write(_(" (specify a commit editor in your configuration"
2246 ui.write(_(" (specify a commit editor in your configuration"
2247 " file)\n"))
2247 " file)\n"))
2248 problems += 1
2248 problems += 1
2249
2249
2250 # check username
2250 # check username
2251 ui.status(_("checking username...\n"))
2251 ui.status(_("checking username...\n"))
2252 try:
2252 try:
2253 ui.username()
2253 ui.username()
2254 except util.Abort, e:
2254 except util.Abort, e:
2255 ui.write(" %s\n" % e)
2255 ui.write(" %s\n" % e)
2256 ui.write(_(" (specify a username in your configuration file)\n"))
2256 ui.write(_(" (specify a username in your configuration file)\n"))
2257 problems += 1
2257 problems += 1
2258
2258
2259 if not problems:
2259 if not problems:
2260 ui.status(_("no problems detected\n"))
2260 ui.status(_("no problems detected\n"))
2261 else:
2261 else:
2262 ui.write(_("%s problems detected,"
2262 ui.write(_("%s problems detected,"
2263 " please check your install!\n") % problems)
2263 " please check your install!\n") % problems)
2264
2264
2265 return problems
2265 return problems
2266
2266
2267 @command('debugknown', [], _('REPO ID...'))
2267 @command('debugknown', [], _('REPO ID...'))
2268 def debugknown(ui, repopath, *ids, **opts):
2268 def debugknown(ui, repopath, *ids, **opts):
2269 """test whether node ids are known to a repo
2269 """test whether node ids are known to a repo
2270
2270
2271 Every ID must be a full-length hex node id string. Returns a list of 0s
2271 Every ID must be a full-length hex node id string. Returns a list of 0s
2272 and 1s indicating unknown/known.
2272 and 1s indicating unknown/known.
2273 """
2273 """
2274 repo = hg.peer(ui, opts, repopath)
2274 repo = hg.peer(ui, opts, repopath)
2275 if not repo.capable('known'):
2275 if not repo.capable('known'):
2276 raise util.Abort("known() not supported by target repository")
2276 raise util.Abort("known() not supported by target repository")
2277 flags = repo.known([bin(s) for s in ids])
2277 flags = repo.known([bin(s) for s in ids])
2278 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2278 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2279
2279
2280 @command('debuglabelcomplete', [], _('LABEL...'))
2280 @command('debuglabelcomplete', [], _('LABEL...'))
2281 def debuglabelcomplete(ui, repo, *args):
2281 def debuglabelcomplete(ui, repo, *args):
2282 '''complete "labels" - tags, open branch names, bookmark names'''
2282 '''complete "labels" - tags, open branch names, bookmark names'''
2283
2283
2284 labels = set()
2284 labels = set()
2285 labels.update(t[0] for t in repo.tagslist())
2285 labels.update(t[0] for t in repo.tagslist())
2286 labels.update(repo._bookmarks.keys())
2286 labels.update(repo._bookmarks.keys())
2287 labels.update(tag for (tag, heads, tip, closed)
2287 labels.update(tag for (tag, heads, tip, closed)
2288 in repo.branchmap().iterbranches() if not closed)
2288 in repo.branchmap().iterbranches() if not closed)
2289 completions = set()
2289 completions = set()
2290 if not args:
2290 if not args:
2291 args = ['']
2291 args = ['']
2292 for a in args:
2292 for a in args:
2293 completions.update(l for l in labels if l.startswith(a))
2293 completions.update(l for l in labels if l.startswith(a))
2294 ui.write('\n'.join(sorted(completions)))
2294 ui.write('\n'.join(sorted(completions)))
2295 ui.write('\n')
2295 ui.write('\n')
2296
2296
2297 @command('debugobsolete',
2297 @command('debugobsolete',
2298 [('', 'flags', 0, _('markers flag')),
2298 [('', 'flags', 0, _('markers flag')),
2299 ] + commitopts2,
2299 ] + commitopts2,
2300 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2300 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2301 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2301 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2302 """create arbitrary obsolete marker
2302 """create arbitrary obsolete marker
2303
2303
2304 With no arguments, displays the list of obsolescence markers."""
2304 With no arguments, displays the list of obsolescence markers."""
2305 def parsenodeid(s):
2305 def parsenodeid(s):
2306 try:
2306 try:
2307 # We do not use revsingle/revrange functions here to accept
2307 # We do not use revsingle/revrange functions here to accept
2308 # arbitrary node identifiers, possibly not present in the
2308 # arbitrary node identifiers, possibly not present in the
2309 # local repository.
2309 # local repository.
2310 n = bin(s)
2310 n = bin(s)
2311 if len(n) != len(nullid):
2311 if len(n) != len(nullid):
2312 raise TypeError()
2312 raise TypeError()
2313 return n
2313 return n
2314 except TypeError:
2314 except TypeError:
2315 raise util.Abort('changeset references must be full hexadecimal '
2315 raise util.Abort('changeset references must be full hexadecimal '
2316 'node identifiers')
2316 'node identifiers')
2317
2317
2318 if precursor is not None:
2318 if precursor is not None:
2319 metadata = {}
2319 metadata = {}
2320 if 'date' in opts:
2320 if 'date' in opts:
2321 metadata['date'] = opts['date']
2321 metadata['date'] = opts['date']
2322 metadata['user'] = opts['user'] or ui.username()
2322 metadata['user'] = opts['user'] or ui.username()
2323 succs = tuple(parsenodeid(succ) for succ in successors)
2323 succs = tuple(parsenodeid(succ) for succ in successors)
2324 l = repo.lock()
2324 l = repo.lock()
2325 try:
2325 try:
2326 tr = repo.transaction('debugobsolete')
2326 tr = repo.transaction('debugobsolete')
2327 try:
2327 try:
2328 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2328 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2329 opts['flags'], metadata)
2329 opts['flags'], metadata)
2330 tr.close()
2330 tr.close()
2331 finally:
2331 finally:
2332 tr.release()
2332 tr.release()
2333 finally:
2333 finally:
2334 l.release()
2334 l.release()
2335 else:
2335 else:
2336 for m in obsolete.allmarkers(repo):
2336 for m in obsolete.allmarkers(repo):
2337 cmdutil.showmarker(ui, m)
2337 cmdutil.showmarker(ui, m)
2338
2338
2339 @command('debugpathcomplete',
2339 @command('debugpathcomplete',
2340 [('f', 'full', None, _('complete an entire path')),
2340 [('f', 'full', None, _('complete an entire path')),
2341 ('n', 'normal', None, _('show only normal files')),
2341 ('n', 'normal', None, _('show only normal files')),
2342 ('a', 'added', None, _('show only added files')),
2342 ('a', 'added', None, _('show only added files')),
2343 ('r', 'removed', None, _('show only removed files'))],
2343 ('r', 'removed', None, _('show only removed files'))],
2344 _('FILESPEC...'))
2344 _('FILESPEC...'))
2345 def debugpathcomplete(ui, repo, *specs, **opts):
2345 def debugpathcomplete(ui, repo, *specs, **opts):
2346 '''complete part or all of a tracked path
2346 '''complete part or all of a tracked path
2347
2347
2348 This command supports shells that offer path name completion. It
2348 This command supports shells that offer path name completion. It
2349 currently completes only files already known to the dirstate.
2349 currently completes only files already known to the dirstate.
2350
2350
2351 Completion extends only to the next path segment unless
2351 Completion extends only to the next path segment unless
2352 --full is specified, in which case entire paths are used.'''
2352 --full is specified, in which case entire paths are used.'''
2353
2353
2354 def complete(path, acceptable):
2354 def complete(path, acceptable):
2355 dirstate = repo.dirstate
2355 dirstate = repo.dirstate
2356 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2356 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2357 rootdir = repo.root + os.sep
2357 rootdir = repo.root + os.sep
2358 if spec != repo.root and not spec.startswith(rootdir):
2358 if spec != repo.root and not spec.startswith(rootdir):
2359 return [], []
2359 return [], []
2360 if os.path.isdir(spec):
2360 if os.path.isdir(spec):
2361 spec += '/'
2361 spec += '/'
2362 spec = spec[len(rootdir):]
2362 spec = spec[len(rootdir):]
2363 fixpaths = os.sep != '/'
2363 fixpaths = os.sep != '/'
2364 if fixpaths:
2364 if fixpaths:
2365 spec = spec.replace(os.sep, '/')
2365 spec = spec.replace(os.sep, '/')
2366 speclen = len(spec)
2366 speclen = len(spec)
2367 fullpaths = opts['full']
2367 fullpaths = opts['full']
2368 files, dirs = set(), set()
2368 files, dirs = set(), set()
2369 adddir, addfile = dirs.add, files.add
2369 adddir, addfile = dirs.add, files.add
2370 for f, st in dirstate.iteritems():
2370 for f, st in dirstate.iteritems():
2371 if f.startswith(spec) and st[0] in acceptable:
2371 if f.startswith(spec) and st[0] in acceptable:
2372 if fixpaths:
2372 if fixpaths:
2373 f = f.replace('/', os.sep)
2373 f = f.replace('/', os.sep)
2374 if fullpaths:
2374 if fullpaths:
2375 addfile(f)
2375 addfile(f)
2376 continue
2376 continue
2377 s = f.find(os.sep, speclen)
2377 s = f.find(os.sep, speclen)
2378 if s >= 0:
2378 if s >= 0:
2379 adddir(f[:s])
2379 adddir(f[:s])
2380 else:
2380 else:
2381 addfile(f)
2381 addfile(f)
2382 return files, dirs
2382 return files, dirs
2383
2383
2384 acceptable = ''
2384 acceptable = ''
2385 if opts['normal']:
2385 if opts['normal']:
2386 acceptable += 'nm'
2386 acceptable += 'nm'
2387 if opts['added']:
2387 if opts['added']:
2388 acceptable += 'a'
2388 acceptable += 'a'
2389 if opts['removed']:
2389 if opts['removed']:
2390 acceptable += 'r'
2390 acceptable += 'r'
2391 cwd = repo.getcwd()
2391 cwd = repo.getcwd()
2392 if not specs:
2392 if not specs:
2393 specs = ['.']
2393 specs = ['.']
2394
2394
2395 files, dirs = set(), set()
2395 files, dirs = set(), set()
2396 for spec in specs:
2396 for spec in specs:
2397 f, d = complete(spec, acceptable or 'nmar')
2397 f, d = complete(spec, acceptable or 'nmar')
2398 files.update(f)
2398 files.update(f)
2399 dirs.update(d)
2399 dirs.update(d)
2400 files.update(dirs)
2400 files.update(dirs)
2401 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2401 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2402 ui.write('\n')
2402 ui.write('\n')
2403
2403
2404 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2404 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2405 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2405 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2406 '''access the pushkey key/value protocol
2406 '''access the pushkey key/value protocol
2407
2407
2408 With two args, list the keys in the given namespace.
2408 With two args, list the keys in the given namespace.
2409
2409
2410 With five args, set a key to new if it currently is set to old.
2410 With five args, set a key to new if it currently is set to old.
2411 Reports success or failure.
2411 Reports success or failure.
2412 '''
2412 '''
2413
2413
2414 target = hg.peer(ui, {}, repopath)
2414 target = hg.peer(ui, {}, repopath)
2415 if keyinfo:
2415 if keyinfo:
2416 key, old, new = keyinfo
2416 key, old, new = keyinfo
2417 r = target.pushkey(namespace, key, old, new)
2417 r = target.pushkey(namespace, key, old, new)
2418 ui.status(str(r) + '\n')
2418 ui.status(str(r) + '\n')
2419 return not r
2419 return not r
2420 else:
2420 else:
2421 for k, v in sorted(target.listkeys(namespace).iteritems()):
2421 for k, v in sorted(target.listkeys(namespace).iteritems()):
2422 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2422 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2423 v.encode('string-escape')))
2423 v.encode('string-escape')))
2424
2424
2425 @command('debugpvec', [], _('A B'))
2425 @command('debugpvec', [], _('A B'))
2426 def debugpvec(ui, repo, a, b=None):
2426 def debugpvec(ui, repo, a, b=None):
2427 ca = scmutil.revsingle(repo, a)
2427 ca = scmutil.revsingle(repo, a)
2428 cb = scmutil.revsingle(repo, b)
2428 cb = scmutil.revsingle(repo, b)
2429 pa = pvec.ctxpvec(ca)
2429 pa = pvec.ctxpvec(ca)
2430 pb = pvec.ctxpvec(cb)
2430 pb = pvec.ctxpvec(cb)
2431 if pa == pb:
2431 if pa == pb:
2432 rel = "="
2432 rel = "="
2433 elif pa > pb:
2433 elif pa > pb:
2434 rel = ">"
2434 rel = ">"
2435 elif pa < pb:
2435 elif pa < pb:
2436 rel = "<"
2436 rel = "<"
2437 elif pa | pb:
2437 elif pa | pb:
2438 rel = "|"
2438 rel = "|"
2439 ui.write(_("a: %s\n") % pa)
2439 ui.write(_("a: %s\n") % pa)
2440 ui.write(_("b: %s\n") % pb)
2440 ui.write(_("b: %s\n") % pb)
2441 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2441 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2442 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2442 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2443 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2443 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2444 pa.distance(pb), rel))
2444 pa.distance(pb), rel))
2445
2445
2446 @command('debugrebuilddirstate|debugrebuildstate',
2446 @command('debugrebuilddirstate|debugrebuildstate',
2447 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2447 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2448 _('[-r REV]'))
2448 _('[-r REV]'))
2449 def debugrebuilddirstate(ui, repo, rev):
2449 def debugrebuilddirstate(ui, repo, rev):
2450 """rebuild the dirstate as it would look like for the given revision
2450 """rebuild the dirstate as it would look like for the given revision
2451
2451
2452 If no revision is specified the first current parent will be used.
2452 If no revision is specified the first current parent will be used.
2453
2453
2454 The dirstate will be set to the files of the given revision.
2454 The dirstate will be set to the files of the given revision.
2455 The actual working directory content or existing dirstate
2455 The actual working directory content or existing dirstate
2456 information such as adds or removes is not considered.
2456 information such as adds or removes is not considered.
2457
2457
2458 One use of this command is to make the next :hg:`status` invocation
2458 One use of this command is to make the next :hg:`status` invocation
2459 check the actual file content.
2459 check the actual file content.
2460 """
2460 """
2461 ctx = scmutil.revsingle(repo, rev)
2461 ctx = scmutil.revsingle(repo, rev)
2462 wlock = repo.wlock()
2462 wlock = repo.wlock()
2463 try:
2463 try:
2464 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2464 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2465 finally:
2465 finally:
2466 wlock.release()
2466 wlock.release()
2467
2467
2468 @command('debugrename',
2468 @command('debugrename',
2469 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2469 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2470 _('[-r REV] FILE'))
2470 _('[-r REV] FILE'))
2471 def debugrename(ui, repo, file1, *pats, **opts):
2471 def debugrename(ui, repo, file1, *pats, **opts):
2472 """dump rename information"""
2472 """dump rename information"""
2473
2473
2474 ctx = scmutil.revsingle(repo, opts.get('rev'))
2474 ctx = scmutil.revsingle(repo, opts.get('rev'))
2475 m = scmutil.match(ctx, (file1,) + pats, opts)
2475 m = scmutil.match(ctx, (file1,) + pats, opts)
2476 for abs in ctx.walk(m):
2476 for abs in ctx.walk(m):
2477 fctx = ctx[abs]
2477 fctx = ctx[abs]
2478 o = fctx.filelog().renamed(fctx.filenode())
2478 o = fctx.filelog().renamed(fctx.filenode())
2479 rel = m.rel(abs)
2479 rel = m.rel(abs)
2480 if o:
2480 if o:
2481 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2481 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2482 else:
2482 else:
2483 ui.write(_("%s not renamed\n") % rel)
2483 ui.write(_("%s not renamed\n") % rel)
2484
2484
2485 @command('debugrevlog',
2485 @command('debugrevlog',
2486 [('c', 'changelog', False, _('open changelog')),
2486 [('c', 'changelog', False, _('open changelog')),
2487 ('m', 'manifest', False, _('open manifest')),
2487 ('m', 'manifest', False, _('open manifest')),
2488 ('d', 'dump', False, _('dump index data'))],
2488 ('d', 'dump', False, _('dump index data'))],
2489 _('-c|-m|FILE'))
2489 _('-c|-m|FILE'))
2490 def debugrevlog(ui, repo, file_=None, **opts):
2490 def debugrevlog(ui, repo, file_=None, **opts):
2491 """show data and statistics about a revlog"""
2491 """show data and statistics about a revlog"""
2492 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2492 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2493
2493
2494 if opts.get("dump"):
2494 if opts.get("dump"):
2495 numrevs = len(r)
2495 numrevs = len(r)
2496 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2496 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2497 " rawsize totalsize compression heads\n")
2497 " rawsize totalsize compression heads\n")
2498 ts = 0
2498 ts = 0
2499 heads = set()
2499 heads = set()
2500 for rev in xrange(numrevs):
2500 for rev in xrange(numrevs):
2501 dbase = r.deltaparent(rev)
2501 dbase = r.deltaparent(rev)
2502 if dbase == -1:
2502 if dbase == -1:
2503 dbase = rev
2503 dbase = rev
2504 cbase = r.chainbase(rev)
2504 cbase = r.chainbase(rev)
2505 p1, p2 = r.parentrevs(rev)
2505 p1, p2 = r.parentrevs(rev)
2506 rs = r.rawsize(rev)
2506 rs = r.rawsize(rev)
2507 ts = ts + rs
2507 ts = ts + rs
2508 heads -= set(r.parentrevs(rev))
2508 heads -= set(r.parentrevs(rev))
2509 heads.add(rev)
2509 heads.add(rev)
2510 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2510 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2511 (rev, p1, p2, r.start(rev), r.end(rev),
2511 (rev, p1, p2, r.start(rev), r.end(rev),
2512 r.start(dbase), r.start(cbase),
2512 r.start(dbase), r.start(cbase),
2513 r.start(p1), r.start(p2),
2513 r.start(p1), r.start(p2),
2514 rs, ts, ts / r.end(rev), len(heads)))
2514 rs, ts, ts / r.end(rev), len(heads)))
2515 return 0
2515 return 0
2516
2516
2517 v = r.version
2517 v = r.version
2518 format = v & 0xFFFF
2518 format = v & 0xFFFF
2519 flags = []
2519 flags = []
2520 gdelta = False
2520 gdelta = False
2521 if v & revlog.REVLOGNGINLINEDATA:
2521 if v & revlog.REVLOGNGINLINEDATA:
2522 flags.append('inline')
2522 flags.append('inline')
2523 if v & revlog.REVLOGGENERALDELTA:
2523 if v & revlog.REVLOGGENERALDELTA:
2524 gdelta = True
2524 gdelta = True
2525 flags.append('generaldelta')
2525 flags.append('generaldelta')
2526 if not flags:
2526 if not flags:
2527 flags = ['(none)']
2527 flags = ['(none)']
2528
2528
2529 nummerges = 0
2529 nummerges = 0
2530 numfull = 0
2530 numfull = 0
2531 numprev = 0
2531 numprev = 0
2532 nump1 = 0
2532 nump1 = 0
2533 nump2 = 0
2533 nump2 = 0
2534 numother = 0
2534 numother = 0
2535 nump1prev = 0
2535 nump1prev = 0
2536 nump2prev = 0
2536 nump2prev = 0
2537 chainlengths = []
2537 chainlengths = []
2538
2538
2539 datasize = [None, 0, 0L]
2539 datasize = [None, 0, 0L]
2540 fullsize = [None, 0, 0L]
2540 fullsize = [None, 0, 0L]
2541 deltasize = [None, 0, 0L]
2541 deltasize = [None, 0, 0L]
2542
2542
2543 def addsize(size, l):
2543 def addsize(size, l):
2544 if l[0] is None or size < l[0]:
2544 if l[0] is None or size < l[0]:
2545 l[0] = size
2545 l[0] = size
2546 if size > l[1]:
2546 if size > l[1]:
2547 l[1] = size
2547 l[1] = size
2548 l[2] += size
2548 l[2] += size
2549
2549
2550 numrevs = len(r)
2550 numrevs = len(r)
2551 for rev in xrange(numrevs):
2551 for rev in xrange(numrevs):
2552 p1, p2 = r.parentrevs(rev)
2552 p1, p2 = r.parentrevs(rev)
2553 delta = r.deltaparent(rev)
2553 delta = r.deltaparent(rev)
2554 if format > 0:
2554 if format > 0:
2555 addsize(r.rawsize(rev), datasize)
2555 addsize(r.rawsize(rev), datasize)
2556 if p2 != nullrev:
2556 if p2 != nullrev:
2557 nummerges += 1
2557 nummerges += 1
2558 size = r.length(rev)
2558 size = r.length(rev)
2559 if delta == nullrev:
2559 if delta == nullrev:
2560 chainlengths.append(0)
2560 chainlengths.append(0)
2561 numfull += 1
2561 numfull += 1
2562 addsize(size, fullsize)
2562 addsize(size, fullsize)
2563 else:
2563 else:
2564 chainlengths.append(chainlengths[delta] + 1)
2564 chainlengths.append(chainlengths[delta] + 1)
2565 addsize(size, deltasize)
2565 addsize(size, deltasize)
2566 if delta == rev - 1:
2566 if delta == rev - 1:
2567 numprev += 1
2567 numprev += 1
2568 if delta == p1:
2568 if delta == p1:
2569 nump1prev += 1
2569 nump1prev += 1
2570 elif delta == p2:
2570 elif delta == p2:
2571 nump2prev += 1
2571 nump2prev += 1
2572 elif delta == p1:
2572 elif delta == p1:
2573 nump1 += 1
2573 nump1 += 1
2574 elif delta == p2:
2574 elif delta == p2:
2575 nump2 += 1
2575 nump2 += 1
2576 elif delta != nullrev:
2576 elif delta != nullrev:
2577 numother += 1
2577 numother += 1
2578
2578
2579 # Adjust size min value for empty cases
2579 # Adjust size min value for empty cases
2580 for size in (datasize, fullsize, deltasize):
2580 for size in (datasize, fullsize, deltasize):
2581 if size[0] is None:
2581 if size[0] is None:
2582 size[0] = 0
2582 size[0] = 0
2583
2583
2584 numdeltas = numrevs - numfull
2584 numdeltas = numrevs - numfull
2585 numoprev = numprev - nump1prev - nump2prev
2585 numoprev = numprev - nump1prev - nump2prev
2586 totalrawsize = datasize[2]
2586 totalrawsize = datasize[2]
2587 datasize[2] /= numrevs
2587 datasize[2] /= numrevs
2588 fulltotal = fullsize[2]
2588 fulltotal = fullsize[2]
2589 fullsize[2] /= numfull
2589 fullsize[2] /= numfull
2590 deltatotal = deltasize[2]
2590 deltatotal = deltasize[2]
2591 if numrevs - numfull > 0:
2591 if numrevs - numfull > 0:
2592 deltasize[2] /= numrevs - numfull
2592 deltasize[2] /= numrevs - numfull
2593 totalsize = fulltotal + deltatotal
2593 totalsize = fulltotal + deltatotal
2594 avgchainlen = sum(chainlengths) / numrevs
2594 avgchainlen = sum(chainlengths) / numrevs
2595 compratio = totalrawsize / totalsize
2595 compratio = totalrawsize / totalsize
2596
2596
2597 basedfmtstr = '%%%dd\n'
2597 basedfmtstr = '%%%dd\n'
2598 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2598 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2599
2599
2600 def dfmtstr(max):
2600 def dfmtstr(max):
2601 return basedfmtstr % len(str(max))
2601 return basedfmtstr % len(str(max))
2602 def pcfmtstr(max, padding=0):
2602 def pcfmtstr(max, padding=0):
2603 return basepcfmtstr % (len(str(max)), ' ' * padding)
2603 return basepcfmtstr % (len(str(max)), ' ' * padding)
2604
2604
2605 def pcfmt(value, total):
2605 def pcfmt(value, total):
2606 return (value, 100 * float(value) / total)
2606 return (value, 100 * float(value) / total)
2607
2607
2608 ui.write(('format : %d\n') % format)
2608 ui.write(('format : %d\n') % format)
2609 ui.write(('flags : %s\n') % ', '.join(flags))
2609 ui.write(('flags : %s\n') % ', '.join(flags))
2610
2610
2611 ui.write('\n')
2611 ui.write('\n')
2612 fmt = pcfmtstr(totalsize)
2612 fmt = pcfmtstr(totalsize)
2613 fmt2 = dfmtstr(totalsize)
2613 fmt2 = dfmtstr(totalsize)
2614 ui.write(('revisions : ') + fmt2 % numrevs)
2614 ui.write(('revisions : ') + fmt2 % numrevs)
2615 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2615 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2616 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2616 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2617 ui.write(('revisions : ') + fmt2 % numrevs)
2617 ui.write(('revisions : ') + fmt2 % numrevs)
2618 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2618 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2619 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2619 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2620 ui.write(('revision size : ') + fmt2 % totalsize)
2620 ui.write(('revision size : ') + fmt2 % totalsize)
2621 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2621 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2622 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2622 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2623
2623
2624 ui.write('\n')
2624 ui.write('\n')
2625 fmt = dfmtstr(max(avgchainlen, compratio))
2625 fmt = dfmtstr(max(avgchainlen, compratio))
2626 ui.write(('avg chain length : ') + fmt % avgchainlen)
2626 ui.write(('avg chain length : ') + fmt % avgchainlen)
2627 ui.write(('compression ratio : ') + fmt % compratio)
2627 ui.write(('compression ratio : ') + fmt % compratio)
2628
2628
2629 if format > 0:
2629 if format > 0:
2630 ui.write('\n')
2630 ui.write('\n')
2631 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2631 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2632 % tuple(datasize))
2632 % tuple(datasize))
2633 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2633 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2634 % tuple(fullsize))
2634 % tuple(fullsize))
2635 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2635 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2636 % tuple(deltasize))
2636 % tuple(deltasize))
2637
2637
2638 if numdeltas > 0:
2638 if numdeltas > 0:
2639 ui.write('\n')
2639 ui.write('\n')
2640 fmt = pcfmtstr(numdeltas)
2640 fmt = pcfmtstr(numdeltas)
2641 fmt2 = pcfmtstr(numdeltas, 4)
2641 fmt2 = pcfmtstr(numdeltas, 4)
2642 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2642 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2643 if numprev > 0:
2643 if numprev > 0:
2644 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2644 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2645 numprev))
2645 numprev))
2646 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2646 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2647 numprev))
2647 numprev))
2648 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2648 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2649 numprev))
2649 numprev))
2650 if gdelta:
2650 if gdelta:
2651 ui.write(('deltas against p1 : ')
2651 ui.write(('deltas against p1 : ')
2652 + fmt % pcfmt(nump1, numdeltas))
2652 + fmt % pcfmt(nump1, numdeltas))
2653 ui.write(('deltas against p2 : ')
2653 ui.write(('deltas against p2 : ')
2654 + fmt % pcfmt(nump2, numdeltas))
2654 + fmt % pcfmt(nump2, numdeltas))
2655 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2655 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2656 numdeltas))
2656 numdeltas))
2657
2657
2658 @command('debugrevspec',
2658 @command('debugrevspec',
2659 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2659 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2660 ('REVSPEC'))
2660 ('REVSPEC'))
2661 def debugrevspec(ui, repo, expr, **opts):
2661 def debugrevspec(ui, repo, expr, **opts):
2662 """parse and apply a revision specification
2662 """parse and apply a revision specification
2663
2663
2664 Use --verbose to print the parsed tree before and after aliases
2664 Use --verbose to print the parsed tree before and after aliases
2665 expansion.
2665 expansion.
2666 """
2666 """
2667 if ui.verbose:
2667 if ui.verbose:
2668 tree = revset.parse(expr)[0]
2668 tree = revset.parse(expr)[0]
2669 ui.note(revset.prettyformat(tree), "\n")
2669 ui.note(revset.prettyformat(tree), "\n")
2670 newtree = revset.findaliases(ui, tree)
2670 newtree = revset.findaliases(ui, tree)
2671 if newtree != tree:
2671 if newtree != tree:
2672 ui.note(revset.prettyformat(newtree), "\n")
2672 ui.note(revset.prettyformat(newtree), "\n")
2673 if opts["optimize"]:
2673 if opts["optimize"]:
2674 weight, optimizedtree = revset.optimize(newtree, True)
2674 weight, optimizedtree = revset.optimize(newtree, True)
2675 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2675 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2676 func = revset.match(ui, expr)
2676 func = revset.match(ui, expr)
2677 for c in func(repo, revset.spanset(repo)):
2677 for c in func(repo, revset.spanset(repo)):
2678 ui.write("%s\n" % c)
2678 ui.write("%s\n" % c)
2679
2679
2680 @command('debugsetparents', [], _('REV1 [REV2]'))
2680 @command('debugsetparents', [], _('REV1 [REV2]'))
2681 def debugsetparents(ui, repo, rev1, rev2=None):
2681 def debugsetparents(ui, repo, rev1, rev2=None):
2682 """manually set the parents of the current working directory
2682 """manually set the parents of the current working directory
2683
2683
2684 This is useful for writing repository conversion tools, but should
2684 This is useful for writing repository conversion tools, but should
2685 be used with care.
2685 be used with care.
2686
2686
2687 Returns 0 on success.
2687 Returns 0 on success.
2688 """
2688 """
2689
2689
2690 r1 = scmutil.revsingle(repo, rev1).node()
2690 r1 = scmutil.revsingle(repo, rev1).node()
2691 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2691 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2692
2692
2693 wlock = repo.wlock()
2693 wlock = repo.wlock()
2694 try:
2694 try:
2695 repo.setparents(r1, r2)
2695 repo.setparents(r1, r2)
2696 finally:
2696 finally:
2697 wlock.release()
2697 wlock.release()
2698
2698
2699 @command('debugdirstate|debugstate',
2699 @command('debugdirstate|debugstate',
2700 [('', 'nodates', None, _('do not display the saved mtime')),
2700 [('', 'nodates', None, _('do not display the saved mtime')),
2701 ('', 'datesort', None, _('sort by saved mtime'))],
2701 ('', 'datesort', None, _('sort by saved mtime'))],
2702 _('[OPTION]...'))
2702 _('[OPTION]...'))
2703 def debugstate(ui, repo, nodates=None, datesort=None):
2703 def debugstate(ui, repo, nodates=None, datesort=None):
2704 """show the contents of the current dirstate"""
2704 """show the contents of the current dirstate"""
2705 timestr = ""
2705 timestr = ""
2706 showdate = not nodates
2706 showdate = not nodates
2707 if datesort:
2707 if datesort:
2708 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2708 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2709 else:
2709 else:
2710 keyfunc = None # sort by filename
2710 keyfunc = None # sort by filename
2711 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2711 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2712 if showdate:
2712 if showdate:
2713 if ent[3] == -1:
2713 if ent[3] == -1:
2714 # Pad or slice to locale representation
2714 # Pad or slice to locale representation
2715 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2715 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2716 time.localtime(0)))
2716 time.localtime(0)))
2717 timestr = 'unset'
2717 timestr = 'unset'
2718 timestr = (timestr[:locale_len] +
2718 timestr = (timestr[:locale_len] +
2719 ' ' * (locale_len - len(timestr)))
2719 ' ' * (locale_len - len(timestr)))
2720 else:
2720 else:
2721 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2721 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2722 time.localtime(ent[3]))
2722 time.localtime(ent[3]))
2723 if ent[1] & 020000:
2723 if ent[1] & 020000:
2724 mode = 'lnk'
2724 mode = 'lnk'
2725 else:
2725 else:
2726 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2726 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2727 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2727 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2728 for f in repo.dirstate.copies():
2728 for f in repo.dirstate.copies():
2729 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2729 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2730
2730
2731 @command('debugsub',
2731 @command('debugsub',
2732 [('r', 'rev', '',
2732 [('r', 'rev', '',
2733 _('revision to check'), _('REV'))],
2733 _('revision to check'), _('REV'))],
2734 _('[-r REV] [REV]'))
2734 _('[-r REV] [REV]'))
2735 def debugsub(ui, repo, rev=None):
2735 def debugsub(ui, repo, rev=None):
2736 ctx = scmutil.revsingle(repo, rev, None)
2736 ctx = scmutil.revsingle(repo, rev, None)
2737 for k, v in sorted(ctx.substate.items()):
2737 for k, v in sorted(ctx.substate.items()):
2738 ui.write(('path %s\n') % k)
2738 ui.write(('path %s\n') % k)
2739 ui.write((' source %s\n') % v[0])
2739 ui.write((' source %s\n') % v[0])
2740 ui.write((' revision %s\n') % v[1])
2740 ui.write((' revision %s\n') % v[1])
2741
2741
2742 @command('debugsuccessorssets',
2742 @command('debugsuccessorssets',
2743 [],
2743 [],
2744 _('[REV]'))
2744 _('[REV]'))
2745 def debugsuccessorssets(ui, repo, *revs):
2745 def debugsuccessorssets(ui, repo, *revs):
2746 """show set of successors for revision
2746 """show set of successors for revision
2747
2747
2748 A successors set of changeset A is a consistent group of revisions that
2748 A successors set of changeset A is a consistent group of revisions that
2749 succeed A. It contains non-obsolete changesets only.
2749 succeed A. It contains non-obsolete changesets only.
2750
2750
2751 In most cases a changeset A has a single successors set containing a single
2751 In most cases a changeset A has a single successors set containing a single
2752 successor (changeset A replaced by A').
2752 successor (changeset A replaced by A').
2753
2753
2754 A changeset that is made obsolete with no successors are called "pruned".
2754 A changeset that is made obsolete with no successors are called "pruned".
2755 Such changesets have no successors sets at all.
2755 Such changesets have no successors sets at all.
2756
2756
2757 A changeset that has been "split" will have a successors set containing
2757 A changeset that has been "split" will have a successors set containing
2758 more than one successor.
2758 more than one successor.
2759
2759
2760 A changeset that has been rewritten in multiple different ways is called
2760 A changeset that has been rewritten in multiple different ways is called
2761 "divergent". Such changesets have multiple successor sets (each of which
2761 "divergent". Such changesets have multiple successor sets (each of which
2762 may also be split, i.e. have multiple successors).
2762 may also be split, i.e. have multiple successors).
2763
2763
2764 Results are displayed as follows::
2764 Results are displayed as follows::
2765
2765
2766 <rev1>
2766 <rev1>
2767 <successors-1A>
2767 <successors-1A>
2768 <rev2>
2768 <rev2>
2769 <successors-2A>
2769 <successors-2A>
2770 <successors-2B1> <successors-2B2> <successors-2B3>
2770 <successors-2B1> <successors-2B2> <successors-2B3>
2771
2771
2772 Here rev2 has two possible (i.e. divergent) successors sets. The first
2772 Here rev2 has two possible (i.e. divergent) successors sets. The first
2773 holds one element, whereas the second holds three (i.e. the changeset has
2773 holds one element, whereas the second holds three (i.e. the changeset has
2774 been split).
2774 been split).
2775 """
2775 """
2776 # passed to successorssets caching computation from one call to another
2776 # passed to successorssets caching computation from one call to another
2777 cache = {}
2777 cache = {}
2778 ctx2str = str
2778 ctx2str = str
2779 node2str = short
2779 node2str = short
2780 if ui.debug():
2780 if ui.debug():
2781 def ctx2str(ctx):
2781 def ctx2str(ctx):
2782 return ctx.hex()
2782 return ctx.hex()
2783 node2str = hex
2783 node2str = hex
2784 for rev in scmutil.revrange(repo, revs):
2784 for rev in scmutil.revrange(repo, revs):
2785 ctx = repo[rev]
2785 ctx = repo[rev]
2786 ui.write('%s\n'% ctx2str(ctx))
2786 ui.write('%s\n'% ctx2str(ctx))
2787 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2787 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2788 if succsset:
2788 if succsset:
2789 ui.write(' ')
2789 ui.write(' ')
2790 ui.write(node2str(succsset[0]))
2790 ui.write(node2str(succsset[0]))
2791 for node in succsset[1:]:
2791 for node in succsset[1:]:
2792 ui.write(' ')
2792 ui.write(' ')
2793 ui.write(node2str(node))
2793 ui.write(node2str(node))
2794 ui.write('\n')
2794 ui.write('\n')
2795
2795
2796 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2796 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2797 def debugwalk(ui, repo, *pats, **opts):
2797 def debugwalk(ui, repo, *pats, **opts):
2798 """show how files match on given patterns"""
2798 """show how files match on given patterns"""
2799 m = scmutil.match(repo[None], pats, opts)
2799 m = scmutil.match(repo[None], pats, opts)
2800 items = list(repo.walk(m))
2800 items = list(repo.walk(m))
2801 if not items:
2801 if not items:
2802 return
2802 return
2803 f = lambda fn: fn
2803 f = lambda fn: fn
2804 if ui.configbool('ui', 'slash') and os.sep != '/':
2804 if ui.configbool('ui', 'slash') and os.sep != '/':
2805 f = lambda fn: util.normpath(fn)
2805 f = lambda fn: util.normpath(fn)
2806 fmt = 'f %%-%ds %%-%ds %%s' % (
2806 fmt = 'f %%-%ds %%-%ds %%s' % (
2807 max([len(abs) for abs in items]),
2807 max([len(abs) for abs in items]),
2808 max([len(m.rel(abs)) for abs in items]))
2808 max([len(m.rel(abs)) for abs in items]))
2809 for abs in items:
2809 for abs in items:
2810 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2810 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2811 ui.write("%s\n" % line.rstrip())
2811 ui.write("%s\n" % line.rstrip())
2812
2812
2813 @command('debugwireargs',
2813 @command('debugwireargs',
2814 [('', 'three', '', 'three'),
2814 [('', 'three', '', 'three'),
2815 ('', 'four', '', 'four'),
2815 ('', 'four', '', 'four'),
2816 ('', 'five', '', 'five'),
2816 ('', 'five', '', 'five'),
2817 ] + remoteopts,
2817 ] + remoteopts,
2818 _('REPO [OPTIONS]... [ONE [TWO]]'))
2818 _('REPO [OPTIONS]... [ONE [TWO]]'))
2819 def debugwireargs(ui, repopath, *vals, **opts):
2819 def debugwireargs(ui, repopath, *vals, **opts):
2820 repo = hg.peer(ui, opts, repopath)
2820 repo = hg.peer(ui, opts, repopath)
2821 for opt in remoteopts:
2821 for opt in remoteopts:
2822 del opts[opt[1]]
2822 del opts[opt[1]]
2823 args = {}
2823 args = {}
2824 for k, v in opts.iteritems():
2824 for k, v in opts.iteritems():
2825 if v:
2825 if v:
2826 args[k] = v
2826 args[k] = v
2827 # run twice to check that we don't mess up the stream for the next command
2827 # run twice to check that we don't mess up the stream for the next command
2828 res1 = repo.debugwireargs(*vals, **args)
2828 res1 = repo.debugwireargs(*vals, **args)
2829 res2 = repo.debugwireargs(*vals, **args)
2829 res2 = repo.debugwireargs(*vals, **args)
2830 ui.write("%s\n" % res1)
2830 ui.write("%s\n" % res1)
2831 if res1 != res2:
2831 if res1 != res2:
2832 ui.warn("%s\n" % res2)
2832 ui.warn("%s\n" % res2)
2833
2833
2834 @command('^diff',
2834 @command('^diff',
2835 [('r', 'rev', [], _('revision'), _('REV')),
2835 [('r', 'rev', [], _('revision'), _('REV')),
2836 ('c', 'change', '', _('change made by revision'), _('REV'))
2836 ('c', 'change', '', _('change made by revision'), _('REV'))
2837 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2837 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2838 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2838 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2839 def diff(ui, repo, *pats, **opts):
2839 def diff(ui, repo, *pats, **opts):
2840 """diff repository (or selected files)
2840 """diff repository (or selected files)
2841
2841
2842 Show differences between revisions for the specified files.
2842 Show differences between revisions for the specified files.
2843
2843
2844 Differences between files are shown using the unified diff format.
2844 Differences between files are shown using the unified diff format.
2845
2845
2846 .. note::
2846 .. note::
2847
2847
2848 diff may generate unexpected results for merges, as it will
2848 diff may generate unexpected results for merges, as it will
2849 default to comparing against the working directory's first
2849 default to comparing against the working directory's first
2850 parent changeset if no revisions are specified.
2850 parent changeset if no revisions are specified.
2851
2851
2852 When two revision arguments are given, then changes are shown
2852 When two revision arguments are given, then changes are shown
2853 between those revisions. If only one revision is specified then
2853 between those revisions. If only one revision is specified then
2854 that revision is compared to the working directory, and, when no
2854 that revision is compared to the working directory, and, when no
2855 revisions are specified, the working directory files are compared
2855 revisions are specified, the working directory files are compared
2856 to its parent.
2856 to its parent.
2857
2857
2858 Alternatively you can specify -c/--change with a revision to see
2858 Alternatively you can specify -c/--change with a revision to see
2859 the changes in that changeset relative to its first parent.
2859 the changes in that changeset relative to its first parent.
2860
2860
2861 Without the -a/--text option, diff will avoid generating diffs of
2861 Without the -a/--text option, diff will avoid generating diffs of
2862 files it detects as binary. With -a, diff will generate a diff
2862 files it detects as binary. With -a, diff will generate a diff
2863 anyway, probably with undesirable results.
2863 anyway, probably with undesirable results.
2864
2864
2865 Use the -g/--git option to generate diffs in the git extended diff
2865 Use the -g/--git option to generate diffs in the git extended diff
2866 format. For more information, read :hg:`help diffs`.
2866 format. For more information, read :hg:`help diffs`.
2867
2867
2868 .. container:: verbose
2868 .. container:: verbose
2869
2869
2870 Examples:
2870 Examples:
2871
2871
2872 - compare a file in the current working directory to its parent::
2872 - compare a file in the current working directory to its parent::
2873
2873
2874 hg diff foo.c
2874 hg diff foo.c
2875
2875
2876 - compare two historical versions of a directory, with rename info::
2876 - compare two historical versions of a directory, with rename info::
2877
2877
2878 hg diff --git -r 1.0:1.2 lib/
2878 hg diff --git -r 1.0:1.2 lib/
2879
2879
2880 - get change stats relative to the last change on some date::
2880 - get change stats relative to the last change on some date::
2881
2881
2882 hg diff --stat -r "date('may 2')"
2882 hg diff --stat -r "date('may 2')"
2883
2883
2884 - diff all newly-added files that contain a keyword::
2884 - diff all newly-added files that contain a keyword::
2885
2885
2886 hg diff "set:added() and grep(GNU)"
2886 hg diff "set:added() and grep(GNU)"
2887
2887
2888 - compare a revision and its parents::
2888 - compare a revision and its parents::
2889
2889
2890 hg diff -c 9353 # compare against first parent
2890 hg diff -c 9353 # compare against first parent
2891 hg diff -r 9353^:9353 # same using revset syntax
2891 hg diff -r 9353^:9353 # same using revset syntax
2892 hg diff -r 9353^2:9353 # compare against the second parent
2892 hg diff -r 9353^2:9353 # compare against the second parent
2893
2893
2894 Returns 0 on success.
2894 Returns 0 on success.
2895 """
2895 """
2896
2896
2897 revs = opts.get('rev')
2897 revs = opts.get('rev')
2898 change = opts.get('change')
2898 change = opts.get('change')
2899 stat = opts.get('stat')
2899 stat = opts.get('stat')
2900 reverse = opts.get('reverse')
2900 reverse = opts.get('reverse')
2901
2901
2902 if revs and change:
2902 if revs and change:
2903 msg = _('cannot specify --rev and --change at the same time')
2903 msg = _('cannot specify --rev and --change at the same time')
2904 raise util.Abort(msg)
2904 raise util.Abort(msg)
2905 elif change:
2905 elif change:
2906 node2 = scmutil.revsingle(repo, change, None).node()
2906 node2 = scmutil.revsingle(repo, change, None).node()
2907 node1 = repo[node2].p1().node()
2907 node1 = repo[node2].p1().node()
2908 else:
2908 else:
2909 node1, node2 = scmutil.revpair(repo, revs)
2909 node1, node2 = scmutil.revpair(repo, revs)
2910
2910
2911 if reverse:
2911 if reverse:
2912 node1, node2 = node2, node1
2912 node1, node2 = node2, node1
2913
2913
2914 diffopts = patch.diffopts(ui, opts)
2914 diffopts = patch.diffopts(ui, opts)
2915 m = scmutil.match(repo[node2], pats, opts)
2915 m = scmutil.match(repo[node2], pats, opts)
2916 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2916 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2917 listsubrepos=opts.get('subrepos'))
2917 listsubrepos=opts.get('subrepos'))
2918
2918
2919 @command('^export',
2919 @command('^export',
2920 [('o', 'output', '',
2920 [('o', 'output', '',
2921 _('print output to file with formatted name'), _('FORMAT')),
2921 _('print output to file with formatted name'), _('FORMAT')),
2922 ('', 'switch-parent', None, _('diff against the second parent')),
2922 ('', 'switch-parent', None, _('diff against the second parent')),
2923 ('r', 'rev', [], _('revisions to export'), _('REV')),
2923 ('r', 'rev', [], _('revisions to export'), _('REV')),
2924 ] + diffopts,
2924 ] + diffopts,
2925 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2925 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2926 def export(ui, repo, *changesets, **opts):
2926 def export(ui, repo, *changesets, **opts):
2927 """dump the header and diffs for one or more changesets
2927 """dump the header and diffs for one or more changesets
2928
2928
2929 Print the changeset header and diffs for one or more revisions.
2929 Print the changeset header and diffs for one or more revisions.
2930 If no revision is given, the parent of the working directory is used.
2930 If no revision is given, the parent of the working directory is used.
2931
2931
2932 The information shown in the changeset header is: author, date,
2932 The information shown in the changeset header is: author, date,
2933 branch name (if non-default), changeset hash, parent(s) and commit
2933 branch name (if non-default), changeset hash, parent(s) and commit
2934 comment.
2934 comment.
2935
2935
2936 .. note::
2936 .. note::
2937
2937
2938 export may generate unexpected diff output for merge
2938 export may generate unexpected diff output for merge
2939 changesets, as it will compare the merge changeset against its
2939 changesets, as it will compare the merge changeset against its
2940 first parent only.
2940 first parent only.
2941
2941
2942 Output may be to a file, in which case the name of the file is
2942 Output may be to a file, in which case the name of the file is
2943 given using a format string. The formatting rules are as follows:
2943 given using a format string. The formatting rules are as follows:
2944
2944
2945 :``%%``: literal "%" character
2945 :``%%``: literal "%" character
2946 :``%H``: changeset hash (40 hexadecimal digits)
2946 :``%H``: changeset hash (40 hexadecimal digits)
2947 :``%N``: number of patches being generated
2947 :``%N``: number of patches being generated
2948 :``%R``: changeset revision number
2948 :``%R``: changeset revision number
2949 :``%b``: basename of the exporting repository
2949 :``%b``: basename of the exporting repository
2950 :``%h``: short-form changeset hash (12 hexadecimal digits)
2950 :``%h``: short-form changeset hash (12 hexadecimal digits)
2951 :``%m``: first line of the commit message (only alphanumeric characters)
2951 :``%m``: first line of the commit message (only alphanumeric characters)
2952 :``%n``: zero-padded sequence number, starting at 1
2952 :``%n``: zero-padded sequence number, starting at 1
2953 :``%r``: zero-padded changeset revision number
2953 :``%r``: zero-padded changeset revision number
2954
2954
2955 Without the -a/--text option, export will avoid generating diffs
2955 Without the -a/--text option, export will avoid generating diffs
2956 of files it detects as binary. With -a, export will generate a
2956 of files it detects as binary. With -a, export will generate a
2957 diff anyway, probably with undesirable results.
2957 diff anyway, probably with undesirable results.
2958
2958
2959 Use the -g/--git option to generate diffs in the git extended diff
2959 Use the -g/--git option to generate diffs in the git extended diff
2960 format. See :hg:`help diffs` for more information.
2960 format. See :hg:`help diffs` for more information.
2961
2961
2962 With the --switch-parent option, the diff will be against the
2962 With the --switch-parent option, the diff will be against the
2963 second parent. It can be useful to review a merge.
2963 second parent. It can be useful to review a merge.
2964
2964
2965 .. container:: verbose
2965 .. container:: verbose
2966
2966
2967 Examples:
2967 Examples:
2968
2968
2969 - use export and import to transplant a bugfix to the current
2969 - use export and import to transplant a bugfix to the current
2970 branch::
2970 branch::
2971
2971
2972 hg export -r 9353 | hg import -
2972 hg export -r 9353 | hg import -
2973
2973
2974 - export all the changesets between two revisions to a file with
2974 - export all the changesets between two revisions to a file with
2975 rename information::
2975 rename information::
2976
2976
2977 hg export --git -r 123:150 > changes.txt
2977 hg export --git -r 123:150 > changes.txt
2978
2978
2979 - split outgoing changes into a series of patches with
2979 - split outgoing changes into a series of patches with
2980 descriptive names::
2980 descriptive names::
2981
2981
2982 hg export -r "outgoing()" -o "%n-%m.patch"
2982 hg export -r "outgoing()" -o "%n-%m.patch"
2983
2983
2984 Returns 0 on success.
2984 Returns 0 on success.
2985 """
2985 """
2986 changesets += tuple(opts.get('rev', []))
2986 changesets += tuple(opts.get('rev', []))
2987 if not changesets:
2987 if not changesets:
2988 changesets = ['.']
2988 changesets = ['.']
2989 revs = scmutil.revrange(repo, changesets)
2989 revs = scmutil.revrange(repo, changesets)
2990 if not revs:
2990 if not revs:
2991 raise util.Abort(_("export requires at least one changeset"))
2991 raise util.Abort(_("export requires at least one changeset"))
2992 if len(revs) > 1:
2992 if len(revs) > 1:
2993 ui.note(_('exporting patches:\n'))
2993 ui.note(_('exporting patches:\n'))
2994 else:
2994 else:
2995 ui.note(_('exporting patch:\n'))
2995 ui.note(_('exporting patch:\n'))
2996 cmdutil.export(repo, revs, template=opts.get('output'),
2996 cmdutil.export(repo, revs, template=opts.get('output'),
2997 switch_parent=opts.get('switch_parent'),
2997 switch_parent=opts.get('switch_parent'),
2998 opts=patch.diffopts(ui, opts))
2998 opts=patch.diffopts(ui, opts))
2999
2999
3000 @command('^forget', walkopts, _('[OPTION]... FILE...'))
3000 @command('^forget', walkopts, _('[OPTION]... FILE...'))
3001 def forget(ui, repo, *pats, **opts):
3001 def forget(ui, repo, *pats, **opts):
3002 """forget the specified files on the next commit
3002 """forget the specified files on the next commit
3003
3003
3004 Mark the specified files so they will no longer be tracked
3004 Mark the specified files so they will no longer be tracked
3005 after the next commit.
3005 after the next commit.
3006
3006
3007 This only removes files from the current branch, not from the
3007 This only removes files from the current branch, not from the
3008 entire project history, and it does not delete them from the
3008 entire project history, and it does not delete them from the
3009 working directory.
3009 working directory.
3010
3010
3011 To undo a forget before the next commit, see :hg:`add`.
3011 To undo a forget before the next commit, see :hg:`add`.
3012
3012
3013 .. container:: verbose
3013 .. container:: verbose
3014
3014
3015 Examples:
3015 Examples:
3016
3016
3017 - forget newly-added binary files::
3017 - forget newly-added binary files::
3018
3018
3019 hg forget "set:added() and binary()"
3019 hg forget "set:added() and binary()"
3020
3020
3021 - forget files that would be excluded by .hgignore::
3021 - forget files that would be excluded by .hgignore::
3022
3022
3023 hg forget "set:hgignore()"
3023 hg forget "set:hgignore()"
3024
3024
3025 Returns 0 on success.
3025 Returns 0 on success.
3026 """
3026 """
3027
3027
3028 if not pats:
3028 if not pats:
3029 raise util.Abort(_('no files specified'))
3029 raise util.Abort(_('no files specified'))
3030
3030
3031 m = scmutil.match(repo[None], pats, opts)
3031 m = scmutil.match(repo[None], pats, opts)
3032 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3032 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3033 return rejected and 1 or 0
3033 return rejected and 1 or 0
3034
3034
3035 @command(
3035 @command(
3036 'graft',
3036 'graft',
3037 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3037 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3038 ('c', 'continue', False, _('resume interrupted graft')),
3038 ('c', 'continue', False, _('resume interrupted graft')),
3039 ('e', 'edit', False, _('invoke editor on commit messages')),
3039 ('e', 'edit', False, _('invoke editor on commit messages')),
3040 ('', 'log', None, _('append graft info to log message')),
3040 ('', 'log', None, _('append graft info to log message')),
3041 ('D', 'currentdate', False,
3041 ('D', 'currentdate', False,
3042 _('record the current date as commit date')),
3042 _('record the current date as commit date')),
3043 ('U', 'currentuser', False,
3043 ('U', 'currentuser', False,
3044 _('record the current user as committer'), _('DATE'))]
3044 _('record the current user as committer'), _('DATE'))]
3045 + commitopts2 + mergetoolopts + dryrunopts,
3045 + commitopts2 + mergetoolopts + dryrunopts,
3046 _('[OPTION]... [-r] REV...'))
3046 _('[OPTION]... [-r] REV...'))
3047 def graft(ui, repo, *revs, **opts):
3047 def graft(ui, repo, *revs, **opts):
3048 '''copy changes from other branches onto the current branch
3048 '''copy changes from other branches onto the current branch
3049
3049
3050 This command uses Mercurial's merge logic to copy individual
3050 This command uses Mercurial's merge logic to copy individual
3051 changes from other branches without merging branches in the
3051 changes from other branches without merging branches in the
3052 history graph. This is sometimes known as 'backporting' or
3052 history graph. This is sometimes known as 'backporting' or
3053 'cherry-picking'. By default, graft will copy user, date, and
3053 'cherry-picking'. By default, graft will copy user, date, and
3054 description from the source changesets.
3054 description from the source changesets.
3055
3055
3056 Changesets that are ancestors of the current revision, that have
3056 Changesets that are ancestors of the current revision, that have
3057 already been grafted, or that are merges will be skipped.
3057 already been grafted, or that are merges will be skipped.
3058
3058
3059 If --log is specified, log messages will have a comment appended
3059 If --log is specified, log messages will have a comment appended
3060 of the form::
3060 of the form::
3061
3061
3062 (grafted from CHANGESETHASH)
3062 (grafted from CHANGESETHASH)
3063
3063
3064 If a graft merge results in conflicts, the graft process is
3064 If a graft merge results in conflicts, the graft process is
3065 interrupted so that the current merge can be manually resolved.
3065 interrupted so that the current merge can be manually resolved.
3066 Once all conflicts are addressed, the graft process can be
3066 Once all conflicts are addressed, the graft process can be
3067 continued with the -c/--continue option.
3067 continued with the -c/--continue option.
3068
3068
3069 .. note::
3069 .. note::
3070
3070
3071 The -c/--continue option does not reapply earlier options.
3071 The -c/--continue option does not reapply earlier options.
3072
3072
3073 .. container:: verbose
3073 .. container:: verbose
3074
3074
3075 Examples:
3075 Examples:
3076
3076
3077 - copy a single change to the stable branch and edit its description::
3077 - copy a single change to the stable branch and edit its description::
3078
3078
3079 hg update stable
3079 hg update stable
3080 hg graft --edit 9393
3080 hg graft --edit 9393
3081
3081
3082 - graft a range of changesets with one exception, updating dates::
3082 - graft a range of changesets with one exception, updating dates::
3083
3083
3084 hg graft -D "2085::2093 and not 2091"
3084 hg graft -D "2085::2093 and not 2091"
3085
3085
3086 - continue a graft after resolving conflicts::
3086 - continue a graft after resolving conflicts::
3087
3087
3088 hg graft -c
3088 hg graft -c
3089
3089
3090 - show the source of a grafted changeset::
3090 - show the source of a grafted changeset::
3091
3091
3092 hg log --debug -r .
3092 hg log --debug -r .
3093
3093
3094 Returns 0 on successful completion.
3094 Returns 0 on successful completion.
3095 '''
3095 '''
3096
3096
3097 revs = list(revs)
3097 revs = list(revs)
3098 revs.extend(opts['rev'])
3098 revs.extend(opts['rev'])
3099
3099
3100 if not opts.get('user') and opts.get('currentuser'):
3100 if not opts.get('user') and opts.get('currentuser'):
3101 opts['user'] = ui.username()
3101 opts['user'] = ui.username()
3102 if not opts.get('date') and opts.get('currentdate'):
3102 if not opts.get('date') and opts.get('currentdate'):
3103 opts['date'] = "%d %d" % util.makedate()
3103 opts['date'] = "%d %d" % util.makedate()
3104
3104
3105 editor = None
3105 editor = None
3106 if opts.get('edit'):
3106 if opts.get('edit'):
3107 editor = cmdutil.commitforceeditor
3107 editor = cmdutil.commitforceeditor
3108
3108
3109 cont = False
3109 cont = False
3110 if opts['continue']:
3110 if opts['continue']:
3111 cont = True
3111 cont = True
3112 if revs:
3112 if revs:
3113 raise util.Abort(_("can't specify --continue and revisions"))
3113 raise util.Abort(_("can't specify --continue and revisions"))
3114 # read in unfinished revisions
3114 # read in unfinished revisions
3115 try:
3115 try:
3116 nodes = repo.opener.read('graftstate').splitlines()
3116 nodes = repo.opener.read('graftstate').splitlines()
3117 revs = [repo[node].rev() for node in nodes]
3117 revs = [repo[node].rev() for node in nodes]
3118 except IOError, inst:
3118 except IOError, inst:
3119 if inst.errno != errno.ENOENT:
3119 if inst.errno != errno.ENOENT:
3120 raise
3120 raise
3121 raise util.Abort(_("no graft state found, can't continue"))
3121 raise util.Abort(_("no graft state found, can't continue"))
3122 else:
3122 else:
3123 cmdutil.checkunfinished(repo)
3123 cmdutil.checkunfinished(repo)
3124 cmdutil.bailifchanged(repo)
3124 cmdutil.bailifchanged(repo)
3125 if not revs:
3125 if not revs:
3126 raise util.Abort(_('no revisions specified'))
3126 raise util.Abort(_('no revisions specified'))
3127 revs = scmutil.revrange(repo, revs)
3127 revs = scmutil.revrange(repo, revs)
3128
3128
3129 # check for merges
3129 # check for merges
3130 for rev in repo.revs('%ld and merge()', revs):
3130 for rev in repo.revs('%ld and merge()', revs):
3131 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3131 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3132 revs.remove(rev)
3132 revs.remove(rev)
3133 if not revs:
3133 if not revs:
3134 return -1
3134 return -1
3135
3135
3136 # check for ancestors of dest branch
3136 # check for ancestors of dest branch
3137 crev = repo['.'].rev()
3137 crev = repo['.'].rev()
3138 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3138 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3139 # don't mutate while iterating, create a copy
3139 # don't mutate while iterating, create a copy
3140 for rev in list(revs):
3140 for rev in list(revs):
3141 if rev in ancestors:
3141 if rev in ancestors:
3142 ui.warn(_('skipping ancestor revision %s\n') % rev)
3142 ui.warn(_('skipping ancestor revision %s\n') % rev)
3143 revs.remove(rev)
3143 revs.remove(rev)
3144 if not revs:
3144 if not revs:
3145 return -1
3145 return -1
3146
3146
3147 # analyze revs for earlier grafts
3147 # analyze revs for earlier grafts
3148 ids = {}
3148 ids = {}
3149 for ctx in repo.set("%ld", revs):
3149 for ctx in repo.set("%ld", revs):
3150 ids[ctx.hex()] = ctx.rev()
3150 ids[ctx.hex()] = ctx.rev()
3151 n = ctx.extra().get('source')
3151 n = ctx.extra().get('source')
3152 if n:
3152 if n:
3153 ids[n] = ctx.rev()
3153 ids[n] = ctx.rev()
3154
3154
3155 # check ancestors for earlier grafts
3155 # check ancestors for earlier grafts
3156 ui.debug('scanning for duplicate grafts\n')
3156 ui.debug('scanning for duplicate grafts\n')
3157
3157
3158 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3158 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3159 ctx = repo[rev]
3159 ctx = repo[rev]
3160 n = ctx.extra().get('source')
3160 n = ctx.extra().get('source')
3161 if n in ids:
3161 if n in ids:
3162 r = repo[n].rev()
3162 r = repo[n].rev()
3163 if r in revs:
3163 if r in revs:
3164 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3164 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3165 % (r, rev))
3165 % (r, rev))
3166 revs.remove(r)
3166 revs.remove(r)
3167 elif ids[n] in revs:
3167 elif ids[n] in revs:
3168 ui.warn(_('skipping already grafted revision %s '
3168 ui.warn(_('skipping already grafted revision %s '
3169 '(%s also has origin %d)\n') % (ids[n], rev, r))
3169 '(%s also has origin %d)\n') % (ids[n], rev, r))
3170 revs.remove(ids[n])
3170 revs.remove(ids[n])
3171 elif ctx.hex() in ids:
3171 elif ctx.hex() in ids:
3172 r = ids[ctx.hex()]
3172 r = ids[ctx.hex()]
3173 ui.warn(_('skipping already grafted revision %s '
3173 ui.warn(_('skipping already grafted revision %s '
3174 '(was grafted from %d)\n') % (r, rev))
3174 '(was grafted from %d)\n') % (r, rev))
3175 revs.remove(r)
3175 revs.remove(r)
3176 if not revs:
3176 if not revs:
3177 return -1
3177 return -1
3178
3178
3179 wlock = repo.wlock()
3179 wlock = repo.wlock()
3180 try:
3180 try:
3181 current = repo['.']
3181 current = repo['.']
3182 for pos, ctx in enumerate(repo.set("%ld", revs)):
3182 for pos, ctx in enumerate(repo.set("%ld", revs)):
3183
3183
3184 ui.status(_('grafting revision %s\n') % ctx.rev())
3184 ui.status(_('grafting revision %s\n') % ctx.rev())
3185 if opts.get('dry_run'):
3185 if opts.get('dry_run'):
3186 continue
3186 continue
3187
3187
3188 source = ctx.extra().get('source')
3188 source = ctx.extra().get('source')
3189 if not source:
3189 if not source:
3190 source = ctx.hex()
3190 source = ctx.hex()
3191 extra = {'source': source}
3191 extra = {'source': source}
3192 user = ctx.user()
3192 user = ctx.user()
3193 if opts.get('user'):
3193 if opts.get('user'):
3194 user = opts['user']
3194 user = opts['user']
3195 date = ctx.date()
3195 date = ctx.date()
3196 if opts.get('date'):
3196 if opts.get('date'):
3197 date = opts['date']
3197 date = opts['date']
3198 message = ctx.description()
3198 message = ctx.description()
3199 if opts.get('log'):
3199 if opts.get('log'):
3200 message += '\n(grafted from %s)' % ctx.hex()
3200 message += '\n(grafted from %s)' % ctx.hex()
3201
3201
3202 # we don't merge the first commit when continuing
3202 # we don't merge the first commit when continuing
3203 if not cont:
3203 if not cont:
3204 # perform the graft merge with p1(rev) as 'ancestor'
3204 # perform the graft merge with p1(rev) as 'ancestor'
3205 try:
3205 try:
3206 # ui.forcemerge is an internal variable, do not document
3206 # ui.forcemerge is an internal variable, do not document
3207 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3207 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3208 'graft')
3208 'graft')
3209 stats = mergemod.update(repo, ctx.node(), True, True, False,
3209 stats = mergemod.update(repo, ctx.node(), True, True, False,
3210 ctx.p1().node())
3210 ctx.p1().node())
3211 finally:
3211 finally:
3212 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3212 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3213 # report any conflicts
3213 # report any conflicts
3214 if stats and stats[3] > 0:
3214 if stats and stats[3] > 0:
3215 # write out state for --continue
3215 # write out state for --continue
3216 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3216 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3217 repo.opener.write('graftstate', ''.join(nodelines))
3217 repo.opener.write('graftstate', ''.join(nodelines))
3218 raise util.Abort(
3218 raise util.Abort(
3219 _("unresolved conflicts, can't continue"),
3219 _("unresolved conflicts, can't continue"),
3220 hint=_('use hg resolve and hg graft --continue'))
3220 hint=_('use hg resolve and hg graft --continue'))
3221 else:
3221 else:
3222 cont = False
3222 cont = False
3223
3223
3224 # drop the second merge parent
3224 # drop the second merge parent
3225 repo.setparents(current.node(), nullid)
3225 repo.setparents(current.node(), nullid)
3226 repo.dirstate.write()
3226 repo.dirstate.write()
3227 # fix up dirstate for copies and renames
3227 # fix up dirstate for copies and renames
3228 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3228 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3229
3229
3230 # commit
3230 # commit
3231 node = repo.commit(text=message, user=user,
3231 node = repo.commit(text=message, user=user,
3232 date=date, extra=extra, editor=editor)
3232 date=date, extra=extra, editor=editor)
3233 if node is None:
3233 if node is None:
3234 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3234 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3235 else:
3235 else:
3236 current = repo[node]
3236 current = repo[node]
3237 finally:
3237 finally:
3238 wlock.release()
3238 wlock.release()
3239
3239
3240 # remove state when we complete successfully
3240 # remove state when we complete successfully
3241 if not opts.get('dry_run'):
3241 if not opts.get('dry_run'):
3242 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3242 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3243
3243
3244 return 0
3244 return 0
3245
3245
3246 @command('grep',
3246 @command('grep',
3247 [('0', 'print0', None, _('end fields with NUL')),
3247 [('0', 'print0', None, _('end fields with NUL')),
3248 ('', 'all', None, _('print all revisions that match')),
3248 ('', 'all', None, _('print all revisions that match')),
3249 ('a', 'text', None, _('treat all files as text')),
3249 ('a', 'text', None, _('treat all files as text')),
3250 ('f', 'follow', None,
3250 ('f', 'follow', None,
3251 _('follow changeset history,'
3251 _('follow changeset history,'
3252 ' or file history across copies and renames')),
3252 ' or file history across copies and renames')),
3253 ('i', 'ignore-case', None, _('ignore case when matching')),
3253 ('i', 'ignore-case', None, _('ignore case when matching')),
3254 ('l', 'files-with-matches', None,
3254 ('l', 'files-with-matches', None,
3255 _('print only filenames and revisions that match')),
3255 _('print only filenames and revisions that match')),
3256 ('n', 'line-number', None, _('print matching line numbers')),
3256 ('n', 'line-number', None, _('print matching line numbers')),
3257 ('r', 'rev', [],
3257 ('r', 'rev', [],
3258 _('only search files changed within revision range'), _('REV')),
3258 _('only search files changed within revision range'), _('REV')),
3259 ('u', 'user', None, _('list the author (long with -v)')),
3259 ('u', 'user', None, _('list the author (long with -v)')),
3260 ('d', 'date', None, _('list the date (short with -q)')),
3260 ('d', 'date', None, _('list the date (short with -q)')),
3261 ] + walkopts,
3261 ] + walkopts,
3262 _('[OPTION]... PATTERN [FILE]...'))
3262 _('[OPTION]... PATTERN [FILE]...'))
3263 def grep(ui, repo, pattern, *pats, **opts):
3263 def grep(ui, repo, pattern, *pats, **opts):
3264 """search for a pattern in specified files and revisions
3264 """search for a pattern in specified files and revisions
3265
3265
3266 Search revisions of files for a regular expression.
3266 Search revisions of files for a regular expression.
3267
3267
3268 This command behaves differently than Unix grep. It only accepts
3268 This command behaves differently than Unix grep. It only accepts
3269 Python/Perl regexps. It searches repository history, not the
3269 Python/Perl regexps. It searches repository history, not the
3270 working directory. It always prints the revision number in which a
3270 working directory. It always prints the revision number in which a
3271 match appears.
3271 match appears.
3272
3272
3273 By default, grep only prints output for the first revision of a
3273 By default, grep only prints output for the first revision of a
3274 file in which it finds a match. To get it to print every revision
3274 file in which it finds a match. To get it to print every revision
3275 that contains a change in match status ("-" for a match that
3275 that contains a change in match status ("-" for a match that
3276 becomes a non-match, or "+" for a non-match that becomes a match),
3276 becomes a non-match, or "+" for a non-match that becomes a match),
3277 use the --all flag.
3277 use the --all flag.
3278
3278
3279 Returns 0 if a match is found, 1 otherwise.
3279 Returns 0 if a match is found, 1 otherwise.
3280 """
3280 """
3281 reflags = re.M
3281 reflags = re.M
3282 if opts.get('ignore_case'):
3282 if opts.get('ignore_case'):
3283 reflags |= re.I
3283 reflags |= re.I
3284 try:
3284 try:
3285 regexp = util.compilere(pattern, reflags)
3285 regexp = util.compilere(pattern, reflags)
3286 except re.error, inst:
3286 except re.error, inst:
3287 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3287 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3288 return 1
3288 return 1
3289 sep, eol = ':', '\n'
3289 sep, eol = ':', '\n'
3290 if opts.get('print0'):
3290 if opts.get('print0'):
3291 sep = eol = '\0'
3291 sep = eol = '\0'
3292
3292
3293 getfile = util.lrucachefunc(repo.file)
3293 getfile = util.lrucachefunc(repo.file)
3294
3294
3295 def matchlines(body):
3295 def matchlines(body):
3296 begin = 0
3296 begin = 0
3297 linenum = 0
3297 linenum = 0
3298 while begin < len(body):
3298 while begin < len(body):
3299 match = regexp.search(body, begin)
3299 match = regexp.search(body, begin)
3300 if not match:
3300 if not match:
3301 break
3301 break
3302 mstart, mend = match.span()
3302 mstart, mend = match.span()
3303 linenum += body.count('\n', begin, mstart) + 1
3303 linenum += body.count('\n', begin, mstart) + 1
3304 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3304 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3305 begin = body.find('\n', mend) + 1 or len(body) + 1
3305 begin = body.find('\n', mend) + 1 or len(body) + 1
3306 lend = begin - 1
3306 lend = begin - 1
3307 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3307 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3308
3308
3309 class linestate(object):
3309 class linestate(object):
3310 def __init__(self, line, linenum, colstart, colend):
3310 def __init__(self, line, linenum, colstart, colend):
3311 self.line = line
3311 self.line = line
3312 self.linenum = linenum
3312 self.linenum = linenum
3313 self.colstart = colstart
3313 self.colstart = colstart
3314 self.colend = colend
3314 self.colend = colend
3315
3315
3316 def __hash__(self):
3316 def __hash__(self):
3317 return hash((self.linenum, self.line))
3317 return hash((self.linenum, self.line))
3318
3318
3319 def __eq__(self, other):
3319 def __eq__(self, other):
3320 return self.line == other.line
3320 return self.line == other.line
3321
3321
3322 matches = {}
3322 matches = {}
3323 copies = {}
3323 copies = {}
3324 def grepbody(fn, rev, body):
3324 def grepbody(fn, rev, body):
3325 matches[rev].setdefault(fn, [])
3325 matches[rev].setdefault(fn, [])
3326 m = matches[rev][fn]
3326 m = matches[rev][fn]
3327 for lnum, cstart, cend, line in matchlines(body):
3327 for lnum, cstart, cend, line in matchlines(body):
3328 s = linestate(line, lnum, cstart, cend)
3328 s = linestate(line, lnum, cstart, cend)
3329 m.append(s)
3329 m.append(s)
3330
3330
3331 def difflinestates(a, b):
3331 def difflinestates(a, b):
3332 sm = difflib.SequenceMatcher(None, a, b)
3332 sm = difflib.SequenceMatcher(None, a, b)
3333 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3333 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3334 if tag == 'insert':
3334 if tag == 'insert':
3335 for i in xrange(blo, bhi):
3335 for i in xrange(blo, bhi):
3336 yield ('+', b[i])
3336 yield ('+', b[i])
3337 elif tag == 'delete':
3337 elif tag == 'delete':
3338 for i in xrange(alo, ahi):
3338 for i in xrange(alo, ahi):
3339 yield ('-', a[i])
3339 yield ('-', a[i])
3340 elif tag == 'replace':
3340 elif tag == 'replace':
3341 for i in xrange(alo, ahi):
3341 for i in xrange(alo, ahi):
3342 yield ('-', a[i])
3342 yield ('-', a[i])
3343 for i in xrange(blo, bhi):
3343 for i in xrange(blo, bhi):
3344 yield ('+', b[i])
3344 yield ('+', b[i])
3345
3345
3346 def display(fn, ctx, pstates, states):
3346 def display(fn, ctx, pstates, states):
3347 rev = ctx.rev()
3347 rev = ctx.rev()
3348 datefunc = ui.quiet and util.shortdate or util.datestr
3348 datefunc = ui.quiet and util.shortdate or util.datestr
3349 found = False
3349 found = False
3350 @util.cachefunc
3350 @util.cachefunc
3351 def binary():
3351 def binary():
3352 flog = getfile(fn)
3352 flog = getfile(fn)
3353 return util.binary(flog.read(ctx.filenode(fn)))
3353 return util.binary(flog.read(ctx.filenode(fn)))
3354
3354
3355 if opts.get('all'):
3355 if opts.get('all'):
3356 iter = difflinestates(pstates, states)
3356 iter = difflinestates(pstates, states)
3357 else:
3357 else:
3358 iter = [('', l) for l in states]
3358 iter = [('', l) for l in states]
3359 for change, l in iter:
3359 for change, l in iter:
3360 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3360 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3361 before, match, after = None, None, None
3361 before, match, after = None, None, None
3362
3362
3363 if opts.get('line_number'):
3363 if opts.get('line_number'):
3364 cols.append((str(l.linenum), 'grep.linenumber'))
3364 cols.append((str(l.linenum), 'grep.linenumber'))
3365 if opts.get('all'):
3365 if opts.get('all'):
3366 cols.append((change, 'grep.change'))
3366 cols.append((change, 'grep.change'))
3367 if opts.get('user'):
3367 if opts.get('user'):
3368 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3368 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3369 if opts.get('date'):
3369 if opts.get('date'):
3370 cols.append((datefunc(ctx.date()), 'grep.date'))
3370 cols.append((datefunc(ctx.date()), 'grep.date'))
3371 if not opts.get('files_with_matches'):
3371 if not opts.get('files_with_matches'):
3372 before = l.line[:l.colstart]
3372 before = l.line[:l.colstart]
3373 match = l.line[l.colstart:l.colend]
3373 match = l.line[l.colstart:l.colend]
3374 after = l.line[l.colend:]
3374 after = l.line[l.colend:]
3375 for col, label in cols[:-1]:
3375 for col, label in cols[:-1]:
3376 ui.write(col, label=label)
3376 ui.write(col, label=label)
3377 ui.write(sep, label='grep.sep')
3377 ui.write(sep, label='grep.sep')
3378 ui.write(cols[-1][0], label=cols[-1][1])
3378 ui.write(cols[-1][0], label=cols[-1][1])
3379 if before is not None:
3379 if before is not None:
3380 ui.write(sep, label='grep.sep')
3380 ui.write(sep, label='grep.sep')
3381 if not opts.get('text') and binary():
3381 if not opts.get('text') and binary():
3382 ui.write(" Binary file matches")
3382 ui.write(" Binary file matches")
3383 else:
3383 else:
3384 ui.write(before)
3384 ui.write(before)
3385 ui.write(match, label='grep.match')
3385 ui.write(match, label='grep.match')
3386 ui.write(after)
3386 ui.write(after)
3387 ui.write(eol)
3387 ui.write(eol)
3388 found = True
3388 found = True
3389 if before is None:
3389 if before is None:
3390 break
3390 break
3391 return found
3391 return found
3392
3392
3393 skip = {}
3393 skip = {}
3394 revfiles = {}
3394 revfiles = {}
3395 matchfn = scmutil.match(repo[None], pats, opts)
3395 matchfn = scmutil.match(repo[None], pats, opts)
3396 found = False
3396 found = False
3397 follow = opts.get('follow')
3397 follow = opts.get('follow')
3398
3398
3399 def prep(ctx, fns):
3399 def prep(ctx, fns):
3400 rev = ctx.rev()
3400 rev = ctx.rev()
3401 pctx = ctx.p1()
3401 pctx = ctx.p1()
3402 parent = pctx.rev()
3402 parent = pctx.rev()
3403 matches.setdefault(rev, {})
3403 matches.setdefault(rev, {})
3404 matches.setdefault(parent, {})
3404 matches.setdefault(parent, {})
3405 files = revfiles.setdefault(rev, [])
3405 files = revfiles.setdefault(rev, [])
3406 for fn in fns:
3406 for fn in fns:
3407 flog = getfile(fn)
3407 flog = getfile(fn)
3408 try:
3408 try:
3409 fnode = ctx.filenode(fn)
3409 fnode = ctx.filenode(fn)
3410 except error.LookupError:
3410 except error.LookupError:
3411 continue
3411 continue
3412
3412
3413 copied = flog.renamed(fnode)
3413 copied = flog.renamed(fnode)
3414 copy = follow and copied and copied[0]
3414 copy = follow and copied and copied[0]
3415 if copy:
3415 if copy:
3416 copies.setdefault(rev, {})[fn] = copy
3416 copies.setdefault(rev, {})[fn] = copy
3417 if fn in skip:
3417 if fn in skip:
3418 if copy:
3418 if copy:
3419 skip[copy] = True
3419 skip[copy] = True
3420 continue
3420 continue
3421 files.append(fn)
3421 files.append(fn)
3422
3422
3423 if fn not in matches[rev]:
3423 if fn not in matches[rev]:
3424 grepbody(fn, rev, flog.read(fnode))
3424 grepbody(fn, rev, flog.read(fnode))
3425
3425
3426 pfn = copy or fn
3426 pfn = copy or fn
3427 if pfn not in matches[parent]:
3427 if pfn not in matches[parent]:
3428 try:
3428 try:
3429 fnode = pctx.filenode(pfn)
3429 fnode = pctx.filenode(pfn)
3430 grepbody(pfn, parent, flog.read(fnode))
3430 grepbody(pfn, parent, flog.read(fnode))
3431 except error.LookupError:
3431 except error.LookupError:
3432 pass
3432 pass
3433
3433
3434 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3434 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3435 rev = ctx.rev()
3435 rev = ctx.rev()
3436 parent = ctx.p1().rev()
3436 parent = ctx.p1().rev()
3437 for fn in sorted(revfiles.get(rev, [])):
3437 for fn in sorted(revfiles.get(rev, [])):
3438 states = matches[rev][fn]
3438 states = matches[rev][fn]
3439 copy = copies.get(rev, {}).get(fn)
3439 copy = copies.get(rev, {}).get(fn)
3440 if fn in skip:
3440 if fn in skip:
3441 if copy:
3441 if copy:
3442 skip[copy] = True
3442 skip[copy] = True
3443 continue
3443 continue
3444 pstates = matches.get(parent, {}).get(copy or fn, [])
3444 pstates = matches.get(parent, {}).get(copy or fn, [])
3445 if pstates or states:
3445 if pstates or states:
3446 r = display(fn, ctx, pstates, states)
3446 r = display(fn, ctx, pstates, states)
3447 found = found or r
3447 found = found or r
3448 if r and not opts.get('all'):
3448 if r and not opts.get('all'):
3449 skip[fn] = True
3449 skip[fn] = True
3450 if copy:
3450 if copy:
3451 skip[copy] = True
3451 skip[copy] = True
3452 del matches[rev]
3452 del matches[rev]
3453 del revfiles[rev]
3453 del revfiles[rev]
3454
3454
3455 return not found
3455 return not found
3456
3456
3457 @command('heads',
3457 @command('heads',
3458 [('r', 'rev', '',
3458 [('r', 'rev', '',
3459 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3459 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3460 ('t', 'topo', False, _('show topological heads only')),
3460 ('t', 'topo', False, _('show topological heads only')),
3461 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3461 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3462 ('c', 'closed', False, _('show normal and closed branch heads')),
3462 ('c', 'closed', False, _('show normal and closed branch heads')),
3463 ] + templateopts,
3463 ] + templateopts,
3464 _('[-ct] [-r STARTREV] [REV]...'))
3464 _('[-ct] [-r STARTREV] [REV]...'))
3465 def heads(ui, repo, *branchrevs, **opts):
3465 def heads(ui, repo, *branchrevs, **opts):
3466 """show branch heads
3466 """show branch heads
3467
3467
3468 With no arguments, show all open branch heads in the repository.
3468 With no arguments, show all open branch heads in the repository.
3469 Branch heads are changesets that have no descendants on the
3469 Branch heads are changesets that have no descendants on the
3470 same branch. They are where development generally takes place and
3470 same branch. They are where development generally takes place and
3471 are the usual targets for update and merge operations.
3471 are the usual targets for update and merge operations.
3472
3472
3473 If one or more REVs are given, only open branch heads on the
3473 If one or more REVs are given, only open branch heads on the
3474 branches associated with the specified changesets are shown. This
3474 branches associated with the specified changesets are shown. This
3475 means that you can use :hg:`heads .` to see the heads on the
3475 means that you can use :hg:`heads .` to see the heads on the
3476 currently checked-out branch.
3476 currently checked-out branch.
3477
3477
3478 If -c/--closed is specified, also show branch heads marked closed
3478 If -c/--closed is specified, also show branch heads marked closed
3479 (see :hg:`commit --close-branch`).
3479 (see :hg:`commit --close-branch`).
3480
3480
3481 If STARTREV is specified, only those heads that are descendants of
3481 If STARTREV is specified, only those heads that are descendants of
3482 STARTREV will be displayed.
3482 STARTREV will be displayed.
3483
3483
3484 If -t/--topo is specified, named branch mechanics will be ignored and only
3484 If -t/--topo is specified, named branch mechanics will be ignored and only
3485 topological heads (changesets with no children) will be shown.
3485 topological heads (changesets with no children) will be shown.
3486
3486
3487 Returns 0 if matching heads are found, 1 if not.
3487 Returns 0 if matching heads are found, 1 if not.
3488 """
3488 """
3489
3489
3490 start = None
3490 start = None
3491 if 'rev' in opts:
3491 if 'rev' in opts:
3492 start = scmutil.revsingle(repo, opts['rev'], None).node()
3492 start = scmutil.revsingle(repo, opts['rev'], None).node()
3493
3493
3494 if opts.get('topo'):
3494 if opts.get('topo'):
3495 heads = [repo[h] for h in repo.heads(start)]
3495 heads = [repo[h] for h in repo.heads(start)]
3496 else:
3496 else:
3497 heads = []
3497 heads = []
3498 for branch in repo.branchmap():
3498 for branch in repo.branchmap():
3499 heads += repo.branchheads(branch, start, opts.get('closed'))
3499 heads += repo.branchheads(branch, start, opts.get('closed'))
3500 heads = [repo[h] for h in heads]
3500 heads = [repo[h] for h in heads]
3501
3501
3502 if branchrevs:
3502 if branchrevs:
3503 branches = set(repo[br].branch() for br in branchrevs)
3503 branches = set(repo[br].branch() for br in branchrevs)
3504 heads = [h for h in heads if h.branch() in branches]
3504 heads = [h for h in heads if h.branch() in branches]
3505
3505
3506 if opts.get('active') and branchrevs:
3506 if opts.get('active') and branchrevs:
3507 dagheads = repo.heads(start)
3507 dagheads = repo.heads(start)
3508 heads = [h for h in heads if h.node() in dagheads]
3508 heads = [h for h in heads if h.node() in dagheads]
3509
3509
3510 if branchrevs:
3510 if branchrevs:
3511 haveheads = set(h.branch() for h in heads)
3511 haveheads = set(h.branch() for h in heads)
3512 if branches - haveheads:
3512 if branches - haveheads:
3513 headless = ', '.join(b for b in branches - haveheads)
3513 headless = ', '.join(b for b in branches - haveheads)
3514 msg = _('no open branch heads found on branches %s')
3514 msg = _('no open branch heads found on branches %s')
3515 if opts.get('rev'):
3515 if opts.get('rev'):
3516 msg += _(' (started at %s)') % opts['rev']
3516 msg += _(' (started at %s)') % opts['rev']
3517 ui.warn((msg + '\n') % headless)
3517 ui.warn((msg + '\n') % headless)
3518
3518
3519 if not heads:
3519 if not heads:
3520 return 1
3520 return 1
3521
3521
3522 heads = sorted(heads, key=lambda x: -x.rev())
3522 heads = sorted(heads, key=lambda x: -x.rev())
3523 displayer = cmdutil.show_changeset(ui, repo, opts)
3523 displayer = cmdutil.show_changeset(ui, repo, opts)
3524 for ctx in heads:
3524 for ctx in heads:
3525 displayer.show(ctx)
3525 displayer.show(ctx)
3526 displayer.close()
3526 displayer.close()
3527
3527
3528 @command('help',
3528 @command('help',
3529 [('e', 'extension', None, _('show only help for extensions')),
3529 [('e', 'extension', None, _('show only help for extensions')),
3530 ('c', 'command', None, _('show only help for commands')),
3530 ('c', 'command', None, _('show only help for commands')),
3531 ('k', 'keyword', '', _('show topics matching keyword')),
3531 ('k', 'keyword', '', _('show topics matching keyword')),
3532 ],
3532 ],
3533 _('[-ec] [TOPIC]'))
3533 _('[-ec] [TOPIC]'))
3534 def help_(ui, name=None, **opts):
3534 def help_(ui, name=None, **opts):
3535 """show help for a given topic or a help overview
3535 """show help for a given topic or a help overview
3536
3536
3537 With no arguments, print a list of commands with short help messages.
3537 With no arguments, print a list of commands with short help messages.
3538
3538
3539 Given a topic, extension, or command name, print help for that
3539 Given a topic, extension, or command name, print help for that
3540 topic.
3540 topic.
3541
3541
3542 Returns 0 if successful.
3542 Returns 0 if successful.
3543 """
3543 """
3544
3544
3545 textwidth = min(ui.termwidth(), 80) - 2
3545 textwidth = min(ui.termwidth(), 80) - 2
3546
3546
3547 keep = ui.verbose and ['verbose'] or []
3547 keep = ui.verbose and ['verbose'] or []
3548 text = help.help_(ui, name, **opts)
3548 text = help.help_(ui, name, **opts)
3549
3549
3550 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3550 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3551 if 'verbose' in pruned:
3551 if 'verbose' in pruned:
3552 keep.append('omitted')
3552 keep.append('omitted')
3553 else:
3553 else:
3554 keep.append('notomitted')
3554 keep.append('notomitted')
3555 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3555 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3556 ui.write(formatted)
3556 ui.write(formatted)
3557
3557
3558
3558
3559 @command('identify|id',
3559 @command('identify|id',
3560 [('r', 'rev', '',
3560 [('r', 'rev', '',
3561 _('identify the specified revision'), _('REV')),
3561 _('identify the specified revision'), _('REV')),
3562 ('n', 'num', None, _('show local revision number')),
3562 ('n', 'num', None, _('show local revision number')),
3563 ('i', 'id', None, _('show global revision id')),
3563 ('i', 'id', None, _('show global revision id')),
3564 ('b', 'branch', None, _('show branch')),
3564 ('b', 'branch', None, _('show branch')),
3565 ('t', 'tags', None, _('show tags')),
3565 ('t', 'tags', None, _('show tags')),
3566 ('B', 'bookmarks', None, _('show bookmarks')),
3566 ('B', 'bookmarks', None, _('show bookmarks')),
3567 ] + remoteopts,
3567 ] + remoteopts,
3568 _('[-nibtB] [-r REV] [SOURCE]'))
3568 _('[-nibtB] [-r REV] [SOURCE]'))
3569 def identify(ui, repo, source=None, rev=None,
3569 def identify(ui, repo, source=None, rev=None,
3570 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3570 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3571 """identify the working copy or specified revision
3571 """identify the working copy or specified revision
3572
3572
3573 Print a summary identifying the repository state at REV using one or
3573 Print a summary identifying the repository state at REV using one or
3574 two parent hash identifiers, followed by a "+" if the working
3574 two parent hash identifiers, followed by a "+" if the working
3575 directory has uncommitted changes, the branch name (if not default),
3575 directory has uncommitted changes, the branch name (if not default),
3576 a list of tags, and a list of bookmarks.
3576 a list of tags, and a list of bookmarks.
3577
3577
3578 When REV is not given, print a summary of the current state of the
3578 When REV is not given, print a summary of the current state of the
3579 repository.
3579 repository.
3580
3580
3581 Specifying a path to a repository root or Mercurial bundle will
3581 Specifying a path to a repository root or Mercurial bundle will
3582 cause lookup to operate on that repository/bundle.
3582 cause lookup to operate on that repository/bundle.
3583
3583
3584 .. container:: verbose
3584 .. container:: verbose
3585
3585
3586 Examples:
3586 Examples:
3587
3587
3588 - generate a build identifier for the working directory::
3588 - generate a build identifier for the working directory::
3589
3589
3590 hg id --id > build-id.dat
3590 hg id --id > build-id.dat
3591
3591
3592 - find the revision corresponding to a tag::
3592 - find the revision corresponding to a tag::
3593
3593
3594 hg id -n -r 1.3
3594 hg id -n -r 1.3
3595
3595
3596 - check the most recent revision of a remote repository::
3596 - check the most recent revision of a remote repository::
3597
3597
3598 hg id -r tip http://selenic.com/hg/
3598 hg id -r tip http://selenic.com/hg/
3599
3599
3600 Returns 0 if successful.
3600 Returns 0 if successful.
3601 """
3601 """
3602
3602
3603 if not repo and not source:
3603 if not repo and not source:
3604 raise util.Abort(_("there is no Mercurial repository here "
3604 raise util.Abort(_("there is no Mercurial repository here "
3605 "(.hg not found)"))
3605 "(.hg not found)"))
3606
3606
3607 hexfunc = ui.debugflag and hex or short
3607 hexfunc = ui.debugflag and hex or short
3608 default = not (num or id or branch or tags or bookmarks)
3608 default = not (num or id or branch or tags or bookmarks)
3609 output = []
3609 output = []
3610 revs = []
3610 revs = []
3611
3611
3612 if source:
3612 if source:
3613 source, branches = hg.parseurl(ui.expandpath(source))
3613 source, branches = hg.parseurl(ui.expandpath(source))
3614 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3614 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3615 repo = peer.local()
3615 repo = peer.local()
3616 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3616 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3617
3617
3618 if not repo:
3618 if not repo:
3619 if num or branch or tags:
3619 if num or branch or tags:
3620 raise util.Abort(
3620 raise util.Abort(
3621 _("can't query remote revision number, branch, or tags"))
3621 _("can't query remote revision number, branch, or tags"))
3622 if not rev and revs:
3622 if not rev and revs:
3623 rev = revs[0]
3623 rev = revs[0]
3624 if not rev:
3624 if not rev:
3625 rev = "tip"
3625 rev = "tip"
3626
3626
3627 remoterev = peer.lookup(rev)
3627 remoterev = peer.lookup(rev)
3628 if default or id:
3628 if default or id:
3629 output = [hexfunc(remoterev)]
3629 output = [hexfunc(remoterev)]
3630
3630
3631 def getbms():
3631 def getbms():
3632 bms = []
3632 bms = []
3633
3633
3634 if 'bookmarks' in peer.listkeys('namespaces'):
3634 if 'bookmarks' in peer.listkeys('namespaces'):
3635 hexremoterev = hex(remoterev)
3635 hexremoterev = hex(remoterev)
3636 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3636 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3637 if bmr == hexremoterev]
3637 if bmr == hexremoterev]
3638
3638
3639 return sorted(bms)
3639 return sorted(bms)
3640
3640
3641 if bookmarks:
3641 if bookmarks:
3642 output.extend(getbms())
3642 output.extend(getbms())
3643 elif default and not ui.quiet:
3643 elif default and not ui.quiet:
3644 # multiple bookmarks for a single parent separated by '/'
3644 # multiple bookmarks for a single parent separated by '/'
3645 bm = '/'.join(getbms())
3645 bm = '/'.join(getbms())
3646 if bm:
3646 if bm:
3647 output.append(bm)
3647 output.append(bm)
3648 else:
3648 else:
3649 if not rev:
3649 if not rev:
3650 ctx = repo[None]
3650 ctx = repo[None]
3651 parents = ctx.parents()
3651 parents = ctx.parents()
3652 changed = ""
3652 changed = ""
3653 if default or id or num:
3653 if default or id or num:
3654 if (util.any(repo.status())
3654 if (util.any(repo.status())
3655 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3655 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3656 changed = '+'
3656 changed = '+'
3657 if default or id:
3657 if default or id:
3658 output = ["%s%s" %
3658 output = ["%s%s" %
3659 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3659 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3660 if num:
3660 if num:
3661 output.append("%s%s" %
3661 output.append("%s%s" %
3662 ('+'.join([str(p.rev()) for p in parents]), changed))
3662 ('+'.join([str(p.rev()) for p in parents]), changed))
3663 else:
3663 else:
3664 ctx = scmutil.revsingle(repo, rev)
3664 ctx = scmutil.revsingle(repo, rev)
3665 if default or id:
3665 if default or id:
3666 output = [hexfunc(ctx.node())]
3666 output = [hexfunc(ctx.node())]
3667 if num:
3667 if num:
3668 output.append(str(ctx.rev()))
3668 output.append(str(ctx.rev()))
3669
3669
3670 if default and not ui.quiet:
3670 if default and not ui.quiet:
3671 b = ctx.branch()
3671 b = ctx.branch()
3672 if b != 'default':
3672 if b != 'default':
3673 output.append("(%s)" % b)
3673 output.append("(%s)" % b)
3674
3674
3675 # multiple tags for a single parent separated by '/'
3675 # multiple tags for a single parent separated by '/'
3676 t = '/'.join(ctx.tags())
3676 t = '/'.join(ctx.tags())
3677 if t:
3677 if t:
3678 output.append(t)
3678 output.append(t)
3679
3679
3680 # multiple bookmarks for a single parent separated by '/'
3680 # multiple bookmarks for a single parent separated by '/'
3681 bm = '/'.join(ctx.bookmarks())
3681 bm = '/'.join(ctx.bookmarks())
3682 if bm:
3682 if bm:
3683 output.append(bm)
3683 output.append(bm)
3684 else:
3684 else:
3685 if branch:
3685 if branch:
3686 output.append(ctx.branch())
3686 output.append(ctx.branch())
3687
3687
3688 if tags:
3688 if tags:
3689 output.extend(ctx.tags())
3689 output.extend(ctx.tags())
3690
3690
3691 if bookmarks:
3691 if bookmarks:
3692 output.extend(ctx.bookmarks())
3692 output.extend(ctx.bookmarks())
3693
3693
3694 ui.write("%s\n" % ' '.join(output))
3694 ui.write("%s\n" % ' '.join(output))
3695
3695
3696 @command('import|patch',
3696 @command('import|patch',
3697 [('p', 'strip', 1,
3697 [('p', 'strip', 1,
3698 _('directory strip option for patch. This has the same '
3698 _('directory strip option for patch. This has the same '
3699 'meaning as the corresponding patch option'), _('NUM')),
3699 'meaning as the corresponding patch option'), _('NUM')),
3700 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3700 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3701 ('e', 'edit', False, _('invoke editor on commit messages')),
3701 ('e', 'edit', False, _('invoke editor on commit messages')),
3702 ('f', 'force', None,
3702 ('f', 'force', None,
3703 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3703 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3704 ('', 'no-commit', None,
3704 ('', 'no-commit', None,
3705 _("don't commit, just update the working directory")),
3705 _("don't commit, just update the working directory")),
3706 ('', 'bypass', None,
3706 ('', 'bypass', None,
3707 _("apply patch without touching the working directory")),
3707 _("apply patch without touching the working directory")),
3708 ('', 'exact', None,
3708 ('', 'exact', None,
3709 _('apply patch to the nodes from which it was generated')),
3709 _('apply patch to the nodes from which it was generated')),
3710 ('', 'import-branch', None,
3710 ('', 'import-branch', None,
3711 _('use any branch information in patch (implied by --exact)'))] +
3711 _('use any branch information in patch (implied by --exact)'))] +
3712 commitopts + commitopts2 + similarityopts,
3712 commitopts + commitopts2 + similarityopts,
3713 _('[OPTION]... PATCH...'))
3713 _('[OPTION]... PATCH...'))
3714 def import_(ui, repo, patch1=None, *patches, **opts):
3714 def import_(ui, repo, patch1=None, *patches, **opts):
3715 """import an ordered set of patches
3715 """import an ordered set of patches
3716
3716
3717 Import a list of patches and commit them individually (unless
3717 Import a list of patches and commit them individually (unless
3718 --no-commit is specified).
3718 --no-commit is specified).
3719
3719
3720 Because import first applies changes to the working directory,
3720 Because import first applies changes to the working directory,
3721 import will abort if there are outstanding changes.
3721 import will abort if there are outstanding changes.
3722
3722
3723 You can import a patch straight from a mail message. Even patches
3723 You can import a patch straight from a mail message. Even patches
3724 as attachments work (to use the body part, it must have type
3724 as attachments work (to use the body part, it must have type
3725 text/plain or text/x-patch). From and Subject headers of email
3725 text/plain or text/x-patch). From and Subject headers of email
3726 message are used as default committer and commit message. All
3726 message are used as default committer and commit message. All
3727 text/plain body parts before first diff are added to commit
3727 text/plain body parts before first diff are added to commit
3728 message.
3728 message.
3729
3729
3730 If the imported patch was generated by :hg:`export`, user and
3730 If the imported patch was generated by :hg:`export`, user and
3731 description from patch override values from message headers and
3731 description from patch override values from message headers and
3732 body. Values given on command line with -m/--message and -u/--user
3732 body. Values given on command line with -m/--message and -u/--user
3733 override these.
3733 override these.
3734
3734
3735 If --exact is specified, import will set the working directory to
3735 If --exact is specified, import will set the working directory to
3736 the parent of each patch before applying it, and will abort if the
3736 the parent of each patch before applying it, and will abort if the
3737 resulting changeset has a different ID than the one recorded in
3737 resulting changeset has a different ID than the one recorded in
3738 the patch. This may happen due to character set problems or other
3738 the patch. This may happen due to character set problems or other
3739 deficiencies in the text patch format.
3739 deficiencies in the text patch format.
3740
3740
3741 Use --bypass to apply and commit patches directly to the
3741 Use --bypass to apply and commit patches directly to the
3742 repository, not touching the working directory. Without --exact,
3742 repository, not touching the working directory. Without --exact,
3743 patches will be applied on top of the working directory parent
3743 patches will be applied on top of the working directory parent
3744 revision.
3744 revision.
3745
3745
3746 With -s/--similarity, hg will attempt to discover renames and
3746 With -s/--similarity, hg will attempt to discover renames and
3747 copies in the patch in the same way as :hg:`addremove`.
3747 copies in the patch in the same way as :hg:`addremove`.
3748
3748
3749 To read a patch from standard input, use "-" as the patch name. If
3749 To read a patch from standard input, use "-" as the patch name. If
3750 a URL is specified, the patch will be downloaded from it.
3750 a URL is specified, the patch will be downloaded from it.
3751 See :hg:`help dates` for a list of formats valid for -d/--date.
3751 See :hg:`help dates` for a list of formats valid for -d/--date.
3752
3752
3753 .. container:: verbose
3753 .. container:: verbose
3754
3754
3755 Examples:
3755 Examples:
3756
3756
3757 - import a traditional patch from a website and detect renames::
3757 - import a traditional patch from a website and detect renames::
3758
3758
3759 hg import -s 80 http://example.com/bugfix.patch
3759 hg import -s 80 http://example.com/bugfix.patch
3760
3760
3761 - import a changeset from an hgweb server::
3761 - import a changeset from an hgweb server::
3762
3762
3763 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3763 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3764
3764
3765 - import all the patches in an Unix-style mbox::
3765 - import all the patches in an Unix-style mbox::
3766
3766
3767 hg import incoming-patches.mbox
3767 hg import incoming-patches.mbox
3768
3768
3769 - attempt to exactly restore an exported changeset (not always
3769 - attempt to exactly restore an exported changeset (not always
3770 possible)::
3770 possible)::
3771
3771
3772 hg import --exact proposed-fix.patch
3772 hg import --exact proposed-fix.patch
3773
3773
3774 Returns 0 on success.
3774 Returns 0 on success.
3775 """
3775 """
3776
3776
3777 if not patch1:
3777 if not patch1:
3778 raise util.Abort(_('need at least one patch to import'))
3778 raise util.Abort(_('need at least one patch to import'))
3779
3779
3780 patches = (patch1,) + patches
3780 patches = (patch1,) + patches
3781
3781
3782 date = opts.get('date')
3782 date = opts.get('date')
3783 if date:
3783 if date:
3784 opts['date'] = util.parsedate(date)
3784 opts['date'] = util.parsedate(date)
3785
3785
3786 update = not opts.get('bypass')
3786 update = not opts.get('bypass')
3787 if not update and opts.get('no_commit'):
3787 if not update and opts.get('no_commit'):
3788 raise util.Abort(_('cannot use --no-commit with --bypass'))
3788 raise util.Abort(_('cannot use --no-commit with --bypass'))
3789 try:
3789 try:
3790 sim = float(opts.get('similarity') or 0)
3790 sim = float(opts.get('similarity') or 0)
3791 except ValueError:
3791 except ValueError:
3792 raise util.Abort(_('similarity must be a number'))
3792 raise util.Abort(_('similarity must be a number'))
3793 if sim < 0 or sim > 100:
3793 if sim < 0 or sim > 100:
3794 raise util.Abort(_('similarity must be between 0 and 100'))
3794 raise util.Abort(_('similarity must be between 0 and 100'))
3795 if sim and not update:
3795 if sim and not update:
3796 raise util.Abort(_('cannot use --similarity with --bypass'))
3796 raise util.Abort(_('cannot use --similarity with --bypass'))
3797
3797
3798 if update:
3798 if update:
3799 cmdutil.checkunfinished(repo)
3799 cmdutil.checkunfinished(repo)
3800 if (opts.get('exact') or not opts.get('force')) and update:
3800 if (opts.get('exact') or not opts.get('force')) and update:
3801 cmdutil.bailifchanged(repo)
3801 cmdutil.bailifchanged(repo)
3802
3802
3803 base = opts["base"]
3803 base = opts["base"]
3804 wlock = lock = tr = None
3804 wlock = lock = tr = None
3805 msgs = []
3805 msgs = []
3806
3806
3807
3807
3808 try:
3808 try:
3809 try:
3809 try:
3810 wlock = repo.wlock()
3810 wlock = repo.wlock()
3811 if not opts.get('no_commit'):
3811 if not opts.get('no_commit'):
3812 lock = repo.lock()
3812 lock = repo.lock()
3813 tr = repo.transaction('import')
3813 tr = repo.transaction('import')
3814 parents = repo.parents()
3814 parents = repo.parents()
3815 for patchurl in patches:
3815 for patchurl in patches:
3816 if patchurl == '-':
3816 if patchurl == '-':
3817 ui.status(_('applying patch from stdin\n'))
3817 ui.status(_('applying patch from stdin\n'))
3818 patchfile = ui.fin
3818 patchfile = ui.fin
3819 patchurl = 'stdin' # for error message
3819 patchurl = 'stdin' # for error message
3820 else:
3820 else:
3821 patchurl = os.path.join(base, patchurl)
3821 patchurl = os.path.join(base, patchurl)
3822 ui.status(_('applying %s\n') % patchurl)
3822 ui.status(_('applying %s\n') % patchurl)
3823 patchfile = hg.openpath(ui, patchurl)
3823 patchfile = hg.openpath(ui, patchurl)
3824
3824
3825 haspatch = False
3825 haspatch = False
3826 for hunk in patch.split(patchfile):
3826 for hunk in patch.split(patchfile):
3827 (msg, node) = cmdutil.tryimportone(ui, repo, hunk, parents,
3827 (msg, node) = cmdutil.tryimportone(ui, repo, hunk, parents,
3828 opts, msgs, hg.clean)
3828 opts, msgs, hg.clean)
3829 if msg:
3829 if msg:
3830 haspatch = True
3830 haspatch = True
3831 ui.note(msg + '\n')
3831 ui.note(msg + '\n')
3832 if update or opts.get('exact'):
3832 if update or opts.get('exact'):
3833 parents = repo.parents()
3833 parents = repo.parents()
3834 else:
3834 else:
3835 parents = [repo[node]]
3835 parents = [repo[node]]
3836
3836
3837 if not haspatch:
3837 if not haspatch:
3838 raise util.Abort(_('%s: no diffs found') % patchurl)
3838 raise util.Abort(_('%s: no diffs found') % patchurl)
3839
3839
3840 if tr:
3840 if tr:
3841 tr.close()
3841 tr.close()
3842 if msgs:
3842 if msgs:
3843 repo.savecommitmessage('\n* * *\n'.join(msgs))
3843 repo.savecommitmessage('\n* * *\n'.join(msgs))
3844 except: # re-raises
3844 except: # re-raises
3845 # wlock.release() indirectly calls dirstate.write(): since
3845 # wlock.release() indirectly calls dirstate.write(): since
3846 # we're crashing, we do not want to change the working dir
3846 # we're crashing, we do not want to change the working dir
3847 # parent after all, so make sure it writes nothing
3847 # parent after all, so make sure it writes nothing
3848 repo.dirstate.invalidate()
3848 repo.dirstate.invalidate()
3849 raise
3849 raise
3850 finally:
3850 finally:
3851 if tr:
3851 if tr:
3852 tr.release()
3852 tr.release()
3853 release(lock, wlock)
3853 release(lock, wlock)
3854
3854
3855 @command('incoming|in',
3855 @command('incoming|in',
3856 [('f', 'force', None,
3856 [('f', 'force', None,
3857 _('run even if remote repository is unrelated')),
3857 _('run even if remote repository is unrelated')),
3858 ('n', 'newest-first', None, _('show newest record first')),
3858 ('n', 'newest-first', None, _('show newest record first')),
3859 ('', 'bundle', '',
3859 ('', 'bundle', '',
3860 _('file to store the bundles into'), _('FILE')),
3860 _('file to store the bundles into'), _('FILE')),
3861 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3861 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3862 ('B', 'bookmarks', False, _("compare bookmarks")),
3862 ('B', 'bookmarks', False, _("compare bookmarks")),
3863 ('b', 'branch', [],
3863 ('b', 'branch', [],
3864 _('a specific branch you would like to pull'), _('BRANCH')),
3864 _('a specific branch you would like to pull'), _('BRANCH')),
3865 ] + logopts + remoteopts + subrepoopts,
3865 ] + logopts + remoteopts + subrepoopts,
3866 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3866 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3867 def incoming(ui, repo, source="default", **opts):
3867 def incoming(ui, repo, source="default", **opts):
3868 """show new changesets found in source
3868 """show new changesets found in source
3869
3869
3870 Show new changesets found in the specified path/URL or the default
3870 Show new changesets found in the specified path/URL or the default
3871 pull location. These are the changesets that would have been pulled
3871 pull location. These are the changesets that would have been pulled
3872 if a pull at the time you issued this command.
3872 if a pull at the time you issued this command.
3873
3873
3874 For remote repository, using --bundle avoids downloading the
3874 For remote repository, using --bundle avoids downloading the
3875 changesets twice if the incoming is followed by a pull.
3875 changesets twice if the incoming is followed by a pull.
3876
3876
3877 See pull for valid source format details.
3877 See pull for valid source format details.
3878
3878
3879 .. container:: verbose
3879 .. container:: verbose
3880
3880
3881 Examples:
3881 Examples:
3882
3882
3883 - show incoming changes with patches and full description::
3883 - show incoming changes with patches and full description::
3884
3884
3885 hg incoming -vp
3885 hg incoming -vp
3886
3886
3887 - show incoming changes excluding merges, store a bundle::
3887 - show incoming changes excluding merges, store a bundle::
3888
3888
3889 hg in -vpM --bundle incoming.hg
3889 hg in -vpM --bundle incoming.hg
3890 hg pull incoming.hg
3890 hg pull incoming.hg
3891
3891
3892 - briefly list changes inside a bundle::
3892 - briefly list changes inside a bundle::
3893
3893
3894 hg in changes.hg -T "{desc|firstline}\\n"
3894 hg in changes.hg -T "{desc|firstline}\\n"
3895
3895
3896 Returns 0 if there are incoming changes, 1 otherwise.
3896 Returns 0 if there are incoming changes, 1 otherwise.
3897 """
3897 """
3898 if opts.get('graph'):
3898 if opts.get('graph'):
3899 cmdutil.checkunsupportedgraphflags([], opts)
3899 cmdutil.checkunsupportedgraphflags([], opts)
3900 def display(other, chlist, displayer):
3900 def display(other, chlist, displayer):
3901 revdag = cmdutil.graphrevs(other, chlist, opts)
3901 revdag = cmdutil.graphrevs(other, chlist, opts)
3902 showparents = [ctx.node() for ctx in repo[None].parents()]
3902 showparents = [ctx.node() for ctx in repo[None].parents()]
3903 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3903 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3904 graphmod.asciiedges)
3904 graphmod.asciiedges)
3905
3905
3906 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3906 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3907 return 0
3907 return 0
3908
3908
3909 if opts.get('bundle') and opts.get('subrepos'):
3909 if opts.get('bundle') and opts.get('subrepos'):
3910 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3910 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3911
3911
3912 if opts.get('bookmarks'):
3912 if opts.get('bookmarks'):
3913 source, branches = hg.parseurl(ui.expandpath(source),
3913 source, branches = hg.parseurl(ui.expandpath(source),
3914 opts.get('branch'))
3914 opts.get('branch'))
3915 other = hg.peer(repo, opts, source)
3915 other = hg.peer(repo, opts, source)
3916 if 'bookmarks' not in other.listkeys('namespaces'):
3916 if 'bookmarks' not in other.listkeys('namespaces'):
3917 ui.warn(_("remote doesn't support bookmarks\n"))
3917 ui.warn(_("remote doesn't support bookmarks\n"))
3918 return 0
3918 return 0
3919 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3919 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3920 return bookmarks.diff(ui, repo, other)
3920 return bookmarks.diff(ui, repo, other)
3921
3921
3922 repo._subtoppath = ui.expandpath(source)
3922 repo._subtoppath = ui.expandpath(source)
3923 try:
3923 try:
3924 return hg.incoming(ui, repo, source, opts)
3924 return hg.incoming(ui, repo, source, opts)
3925 finally:
3925 finally:
3926 del repo._subtoppath
3926 del repo._subtoppath
3927
3927
3928
3928
3929 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3929 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3930 def init(ui, dest=".", **opts):
3930 def init(ui, dest=".", **opts):
3931 """create a new repository in the given directory
3931 """create a new repository in the given directory
3932
3932
3933 Initialize a new repository in the given directory. If the given
3933 Initialize a new repository in the given directory. If the given
3934 directory does not exist, it will be created.
3934 directory does not exist, it will be created.
3935
3935
3936 If no directory is given, the current directory is used.
3936 If no directory is given, the current directory is used.
3937
3937
3938 It is possible to specify an ``ssh://`` URL as the destination.
3938 It is possible to specify an ``ssh://`` URL as the destination.
3939 See :hg:`help urls` for more information.
3939 See :hg:`help urls` for more information.
3940
3940
3941 Returns 0 on success.
3941 Returns 0 on success.
3942 """
3942 """
3943 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3943 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3944
3944
3945 @command('locate',
3945 @command('locate',
3946 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3946 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3947 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3947 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3948 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3948 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3949 ] + walkopts,
3949 ] + walkopts,
3950 _('[OPTION]... [PATTERN]...'))
3950 _('[OPTION]... [PATTERN]...'))
3951 def locate(ui, repo, *pats, **opts):
3951 def locate(ui, repo, *pats, **opts):
3952 """locate files matching specific patterns
3952 """locate files matching specific patterns
3953
3953
3954 Print files under Mercurial control in the working directory whose
3954 Print files under Mercurial control in the working directory whose
3955 names match the given patterns.
3955 names match the given patterns.
3956
3956
3957 By default, this command searches all directories in the working
3957 By default, this command searches all directories in the working
3958 directory. To search just the current directory and its
3958 directory. To search just the current directory and its
3959 subdirectories, use "--include .".
3959 subdirectories, use "--include .".
3960
3960
3961 If no patterns are given to match, this command prints the names
3961 If no patterns are given to match, this command prints the names
3962 of all files under Mercurial control in the working directory.
3962 of all files under Mercurial control in the working directory.
3963
3963
3964 If you want to feed the output of this command into the "xargs"
3964 If you want to feed the output of this command into the "xargs"
3965 command, use the -0 option to both this command and "xargs". This
3965 command, use the -0 option to both this command and "xargs". This
3966 will avoid the problem of "xargs" treating single filenames that
3966 will avoid the problem of "xargs" treating single filenames that
3967 contain whitespace as multiple filenames.
3967 contain whitespace as multiple filenames.
3968
3968
3969 Returns 0 if a match is found, 1 otherwise.
3969 Returns 0 if a match is found, 1 otherwise.
3970 """
3970 """
3971 end = opts.get('print0') and '\0' or '\n'
3971 end = opts.get('print0') and '\0' or '\n'
3972 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3972 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3973
3973
3974 ret = 1
3974 ret = 1
3975 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3975 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3976 m.bad = lambda x, y: False
3976 m.bad = lambda x, y: False
3977 for abs in repo[rev].walk(m):
3977 for abs in repo[rev].walk(m):
3978 if not rev and abs not in repo.dirstate:
3978 if not rev and abs not in repo.dirstate:
3979 continue
3979 continue
3980 if opts.get('fullpath'):
3980 if opts.get('fullpath'):
3981 ui.write(repo.wjoin(abs), end)
3981 ui.write(repo.wjoin(abs), end)
3982 else:
3982 else:
3983 ui.write(((pats and m.rel(abs)) or abs), end)
3983 ui.write(((pats and m.rel(abs)) or abs), end)
3984 ret = 0
3984 ret = 0
3985
3985
3986 return ret
3986 return ret
3987
3987
3988 @command('^log|history',
3988 @command('^log|history',
3989 [('f', 'follow', None,
3989 [('f', 'follow', None,
3990 _('follow changeset history, or file history across copies and renames')),
3990 _('follow changeset history, or file history across copies and renames')),
3991 ('', 'follow-first', None,
3991 ('', 'follow-first', None,
3992 _('only follow the first parent of merge changesets (DEPRECATED)')),
3992 _('only follow the first parent of merge changesets (DEPRECATED)')),
3993 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3993 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3994 ('C', 'copies', None, _('show copied files')),
3994 ('C', 'copies', None, _('show copied files')),
3995 ('k', 'keyword', [],
3995 ('k', 'keyword', [],
3996 _('do case-insensitive search for a given text'), _('TEXT')),
3996 _('do case-insensitive search for a given text'), _('TEXT')),
3997 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3997 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3998 ('', 'removed', None, _('include revisions where files were removed')),
3998 ('', 'removed', None, _('include revisions where files were removed')),
3999 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3999 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4000 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4000 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4001 ('', 'only-branch', [],
4001 ('', 'only-branch', [],
4002 _('show only changesets within the given named branch (DEPRECATED)'),
4002 _('show only changesets within the given named branch (DEPRECATED)'),
4003 _('BRANCH')),
4003 _('BRANCH')),
4004 ('b', 'branch', [],
4004 ('b', 'branch', [],
4005 _('show changesets within the given named branch'), _('BRANCH')),
4005 _('show changesets within the given named branch'), _('BRANCH')),
4006 ('P', 'prune', [],
4006 ('P', 'prune', [],
4007 _('do not display revision or any of its ancestors'), _('REV')),
4007 _('do not display revision or any of its ancestors'), _('REV')),
4008 ] + logopts + walkopts,
4008 ] + logopts + walkopts,
4009 _('[OPTION]... [FILE]'))
4009 _('[OPTION]... [FILE]'))
4010 def log(ui, repo, *pats, **opts):
4010 def log(ui, repo, *pats, **opts):
4011 """show revision history of entire repository or files
4011 """show revision history of entire repository or files
4012
4012
4013 Print the revision history of the specified files or the entire
4013 Print the revision history of the specified files or the entire
4014 project.
4014 project.
4015
4015
4016 If no revision range is specified, the default is ``tip:0`` unless
4016 If no revision range is specified, the default is ``tip:0`` unless
4017 --follow is set, in which case the working directory parent is
4017 --follow is set, in which case the working directory parent is
4018 used as the starting revision.
4018 used as the starting revision.
4019
4019
4020 File history is shown without following rename or copy history of
4020 File history is shown without following rename or copy history of
4021 files. Use -f/--follow with a filename to follow history across
4021 files. Use -f/--follow with a filename to follow history across
4022 renames and copies. --follow without a filename will only show
4022 renames and copies. --follow without a filename will only show
4023 ancestors or descendants of the starting revision.
4023 ancestors or descendants of the starting revision.
4024
4024
4025 By default this command prints revision number and changeset id,
4025 By default this command prints revision number and changeset id,
4026 tags, non-trivial parents, user, date and time, and a summary for
4026 tags, non-trivial parents, user, date and time, and a summary for
4027 each commit. When the -v/--verbose switch is used, the list of
4027 each commit. When the -v/--verbose switch is used, the list of
4028 changed files and full commit message are shown.
4028 changed files and full commit message are shown.
4029
4029
4030 With --graph the revisions are shown as an ASCII art DAG with the most
4030 With --graph the revisions are shown as an ASCII art DAG with the most
4031 recent changeset at the top.
4031 recent changeset at the top.
4032 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4032 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4033 and '+' represents a fork where the changeset from the lines below is a
4033 and '+' represents a fork where the changeset from the lines below is a
4034 parent of the 'o' merge on the same same line.
4034 parent of the 'o' merge on the same same line.
4035
4035
4036 .. note::
4036 .. note::
4037
4037
4038 log -p/--patch may generate unexpected diff output for merge
4038 log -p/--patch may generate unexpected diff output for merge
4039 changesets, as it will only compare the merge changeset against
4039 changesets, as it will only compare the merge changeset against
4040 its first parent. Also, only files different from BOTH parents
4040 its first parent. Also, only files different from BOTH parents
4041 will appear in files:.
4041 will appear in files:.
4042
4042
4043 .. note::
4043 .. note::
4044
4044
4045 for performance reasons, log FILE may omit duplicate changes
4045 for performance reasons, log FILE may omit duplicate changes
4046 made on branches and will not show deletions. To see all
4046 made on branches and will not show deletions. To see all
4047 changes including duplicates and deletions, use the --removed
4047 changes including duplicates and deletions, use the --removed
4048 switch.
4048 switch.
4049
4049
4050 .. container:: verbose
4050 .. container:: verbose
4051
4051
4052 Some examples:
4052 Some examples:
4053
4053
4054 - changesets with full descriptions and file lists::
4054 - changesets with full descriptions and file lists::
4055
4055
4056 hg log -v
4056 hg log -v
4057
4057
4058 - changesets ancestral to the working directory::
4058 - changesets ancestral to the working directory::
4059
4059
4060 hg log -f
4060 hg log -f
4061
4061
4062 - last 10 commits on the current branch::
4062 - last 10 commits on the current branch::
4063
4063
4064 hg log -l 10 -b .
4064 hg log -l 10 -b .
4065
4065
4066 - changesets showing all modifications of a file, including removals::
4066 - changesets showing all modifications of a file, including removals::
4067
4067
4068 hg log --removed file.c
4068 hg log --removed file.c
4069
4069
4070 - all changesets that touch a directory, with diffs, excluding merges::
4070 - all changesets that touch a directory, with diffs, excluding merges::
4071
4071
4072 hg log -Mp lib/
4072 hg log -Mp lib/
4073
4073
4074 - all revision numbers that match a keyword::
4074 - all revision numbers that match a keyword::
4075
4075
4076 hg log -k bug --template "{rev}\\n"
4076 hg log -k bug --template "{rev}\\n"
4077
4077
4078 - check if a given changeset is included is a tagged release::
4078 - check if a given changeset is included is a tagged release::
4079
4079
4080 hg log -r "a21ccf and ancestor(1.9)"
4080 hg log -r "a21ccf and ancestor(1.9)"
4081
4081
4082 - find all changesets by some user in a date range::
4082 - find all changesets by some user in a date range::
4083
4083
4084 hg log -k alice -d "may 2008 to jul 2008"
4084 hg log -k alice -d "may 2008 to jul 2008"
4085
4085
4086 - summary of all changesets after the last tag::
4086 - summary of all changesets after the last tag::
4087
4087
4088 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4088 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4089
4089
4090 See :hg:`help dates` for a list of formats valid for -d/--date.
4090 See :hg:`help dates` for a list of formats valid for -d/--date.
4091
4091
4092 See :hg:`help revisions` and :hg:`help revsets` for more about
4092 See :hg:`help revisions` and :hg:`help revsets` for more about
4093 specifying revisions.
4093 specifying revisions.
4094
4094
4095 See :hg:`help templates` for more about pre-packaged styles and
4095 See :hg:`help templates` for more about pre-packaged styles and
4096 specifying custom templates.
4096 specifying custom templates.
4097
4097
4098 Returns 0 on success.
4098 Returns 0 on success.
4099 """
4099 """
4100 if opts.get('graph'):
4100 if opts.get('graph'):
4101 return cmdutil.graphlog(ui, repo, *pats, **opts)
4101 return cmdutil.graphlog(ui, repo, *pats, **opts)
4102
4102
4103 matchfn = scmutil.match(repo[None], pats, opts)
4103 matchfn = scmutil.match(repo[None], pats, opts)
4104 limit = cmdutil.loglimit(opts)
4104 limit = cmdutil.loglimit(opts)
4105 count = 0
4105 count = 0
4106
4106
4107 getrenamed, endrev = None, None
4107 getrenamed, endrev = None, None
4108 if opts.get('copies'):
4108 if opts.get('copies'):
4109 if opts.get('rev'):
4109 if opts.get('rev'):
4110 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4110 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4111 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4111 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4112
4112
4113 df = False
4113 df = False
4114 if opts.get("date"):
4114 if opts.get("date"):
4115 df = util.matchdate(opts["date"])
4115 df = util.matchdate(opts["date"])
4116
4116
4117 branches = opts.get('branch', []) + opts.get('only_branch', [])
4117 branches = opts.get('branch', []) + opts.get('only_branch', [])
4118 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4118 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4119
4119
4120 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4120 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4121 def prep(ctx, fns):
4121 def prep(ctx, fns):
4122 rev = ctx.rev()
4122 rev = ctx.rev()
4123 parents = [p for p in repo.changelog.parentrevs(rev)
4123 parents = [p for p in repo.changelog.parentrevs(rev)
4124 if p != nullrev]
4124 if p != nullrev]
4125 if opts.get('no_merges') and len(parents) == 2:
4125 if opts.get('no_merges') and len(parents) == 2:
4126 return
4126 return
4127 if opts.get('only_merges') and len(parents) != 2:
4127 if opts.get('only_merges') and len(parents) != 2:
4128 return
4128 return
4129 if opts.get('branch') and ctx.branch() not in opts['branch']:
4129 if opts.get('branch') and ctx.branch() not in opts['branch']:
4130 return
4130 return
4131 if df and not df(ctx.date()[0]):
4131 if df and not df(ctx.date()[0]):
4132 return
4132 return
4133
4133
4134 lower = encoding.lower
4134 lower = encoding.lower
4135 if opts.get('user'):
4135 if opts.get('user'):
4136 luser = lower(ctx.user())
4136 luser = lower(ctx.user())
4137 for k in [lower(x) for x in opts['user']]:
4137 for k in [lower(x) for x in opts['user']]:
4138 if (k in luser):
4138 if (k in luser):
4139 break
4139 break
4140 else:
4140 else:
4141 return
4141 return
4142 if opts.get('keyword'):
4142 if opts.get('keyword'):
4143 luser = lower(ctx.user())
4143 luser = lower(ctx.user())
4144 ldesc = lower(ctx.description())
4144 ldesc = lower(ctx.description())
4145 lfiles = lower(" ".join(ctx.files()))
4145 lfiles = lower(" ".join(ctx.files()))
4146 for k in [lower(x) for x in opts['keyword']]:
4146 for k in [lower(x) for x in opts['keyword']]:
4147 if (k in luser or k in ldesc or k in lfiles):
4147 if (k in luser or k in ldesc or k in lfiles):
4148 break
4148 break
4149 else:
4149 else:
4150 return
4150 return
4151
4151
4152 copies = None
4152 copies = None
4153 if getrenamed is not None and rev:
4153 if getrenamed is not None and rev:
4154 copies = []
4154 copies = []
4155 for fn in ctx.files():
4155 for fn in ctx.files():
4156 rename = getrenamed(fn, rev)
4156 rename = getrenamed(fn, rev)
4157 if rename:
4157 if rename:
4158 copies.append((fn, rename[0]))
4158 copies.append((fn, rename[0]))
4159
4159
4160 revmatchfn = None
4160 revmatchfn = None
4161 if opts.get('patch') or opts.get('stat'):
4161 if opts.get('patch') or opts.get('stat'):
4162 if opts.get('follow') or opts.get('follow_first'):
4162 if opts.get('follow') or opts.get('follow_first'):
4163 # note: this might be wrong when following through merges
4163 # note: this might be wrong when following through merges
4164 revmatchfn = scmutil.match(repo[None], fns, default='path')
4164 revmatchfn = scmutil.match(repo[None], fns, default='path')
4165 else:
4165 else:
4166 revmatchfn = matchfn
4166 revmatchfn = matchfn
4167
4167
4168 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4168 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4169
4169
4170 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4170 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4171 if displayer.flush(ctx.rev()):
4171 if displayer.flush(ctx.rev()):
4172 count += 1
4172 count += 1
4173 if count == limit:
4173 if count == limit:
4174 break
4174 break
4175 displayer.close()
4175 displayer.close()
4176
4176
4177 @command('manifest',
4177 @command('manifest',
4178 [('r', 'rev', '', _('revision to display'), _('REV')),
4178 [('r', 'rev', '', _('revision to display'), _('REV')),
4179 ('', 'all', False, _("list files from all revisions"))],
4179 ('', 'all', False, _("list files from all revisions"))],
4180 _('[-r REV]'))
4180 _('[-r REV]'))
4181 def manifest(ui, repo, node=None, rev=None, **opts):
4181 def manifest(ui, repo, node=None, rev=None, **opts):
4182 """output the current or given revision of the project manifest
4182 """output the current or given revision of the project manifest
4183
4183
4184 Print a list of version controlled files for the given revision.
4184 Print a list of version controlled files for the given revision.
4185 If no revision is given, the first parent of the working directory
4185 If no revision is given, the first parent of the working directory
4186 is used, or the null revision if no revision is checked out.
4186 is used, or the null revision if no revision is checked out.
4187
4187
4188 With -v, print file permissions, symlink and executable bits.
4188 With -v, print file permissions, symlink and executable bits.
4189 With --debug, print file revision hashes.
4189 With --debug, print file revision hashes.
4190
4190
4191 If option --all is specified, the list of all files from all revisions
4191 If option --all is specified, the list of all files from all revisions
4192 is printed. This includes deleted and renamed files.
4192 is printed. This includes deleted and renamed files.
4193
4193
4194 Returns 0 on success.
4194 Returns 0 on success.
4195 """
4195 """
4196
4196
4197 fm = ui.formatter('manifest', opts)
4197 fm = ui.formatter('manifest', opts)
4198
4198
4199 if opts.get('all'):
4199 if opts.get('all'):
4200 if rev or node:
4200 if rev or node:
4201 raise util.Abort(_("can't specify a revision with --all"))
4201 raise util.Abort(_("can't specify a revision with --all"))
4202
4202
4203 res = []
4203 res = []
4204 prefix = "data/"
4204 prefix = "data/"
4205 suffix = ".i"
4205 suffix = ".i"
4206 plen = len(prefix)
4206 plen = len(prefix)
4207 slen = len(suffix)
4207 slen = len(suffix)
4208 lock = repo.lock()
4208 lock = repo.lock()
4209 try:
4209 try:
4210 for fn, b, size in repo.store.datafiles():
4210 for fn, b, size in repo.store.datafiles():
4211 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4211 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4212 res.append(fn[plen:-slen])
4212 res.append(fn[plen:-slen])
4213 finally:
4213 finally:
4214 lock.release()
4214 lock.release()
4215 for f in res:
4215 for f in res:
4216 fm.startitem()
4216 fm.startitem()
4217 fm.write("path", '%s\n', f)
4217 fm.write("path", '%s\n', f)
4218 fm.end()
4218 fm.end()
4219 return
4219 return
4220
4220
4221 if rev and node:
4221 if rev and node:
4222 raise util.Abort(_("please specify just one revision"))
4222 raise util.Abort(_("please specify just one revision"))
4223
4223
4224 if not node:
4224 if not node:
4225 node = rev
4225 node = rev
4226
4226
4227 char = {'l': '@', 'x': '*', '': ''}
4227 char = {'l': '@', 'x': '*', '': ''}
4228 mode = {'l': '644', 'x': '755', '': '644'}
4228 mode = {'l': '644', 'x': '755', '': '644'}
4229 ctx = scmutil.revsingle(repo, node)
4229 ctx = scmutil.revsingle(repo, node)
4230 mf = ctx.manifest()
4230 mf = ctx.manifest()
4231 for f in ctx:
4231 for f in ctx:
4232 fm.startitem()
4232 fm.startitem()
4233 fl = ctx[f].flags()
4233 fl = ctx[f].flags()
4234 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4234 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4235 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4235 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4236 fm.write('path', '%s\n', f)
4236 fm.write('path', '%s\n', f)
4237 fm.end()
4237 fm.end()
4238
4238
4239 @command('^merge',
4239 @command('^merge',
4240 [('f', 'force', None,
4240 [('f', 'force', None,
4241 _('force a merge including outstanding changes (DEPRECATED)')),
4241 _('force a merge including outstanding changes (DEPRECATED)')),
4242 ('r', 'rev', '', _('revision to merge'), _('REV')),
4242 ('r', 'rev', '', _('revision to merge'), _('REV')),
4243 ('P', 'preview', None,
4243 ('P', 'preview', None,
4244 _('review revisions to merge (no merge is performed)'))
4244 _('review revisions to merge (no merge is performed)'))
4245 ] + mergetoolopts,
4245 ] + mergetoolopts,
4246 _('[-P] [-f] [[-r] REV]'))
4246 _('[-P] [-f] [[-r] REV]'))
4247 def merge(ui, repo, node=None, **opts):
4247 def merge(ui, repo, node=None, **opts):
4248 """merge working directory with another revision
4248 """merge working directory with another revision
4249
4249
4250 The current working directory is updated with all changes made in
4250 The current working directory is updated with all changes made in
4251 the requested revision since the last common predecessor revision.
4251 the requested revision since the last common predecessor revision.
4252
4252
4253 Files that changed between either parent are marked as changed for
4253 Files that changed between either parent are marked as changed for
4254 the next commit and a commit must be performed before any further
4254 the next commit and a commit must be performed before any further
4255 updates to the repository are allowed. The next commit will have
4255 updates to the repository are allowed. The next commit will have
4256 two parents.
4256 two parents.
4257
4257
4258 ``--tool`` can be used to specify the merge tool used for file
4258 ``--tool`` can be used to specify the merge tool used for file
4259 merges. It overrides the HGMERGE environment variable and your
4259 merges. It overrides the HGMERGE environment variable and your
4260 configuration files. See :hg:`help merge-tools` for options.
4260 configuration files. See :hg:`help merge-tools` for options.
4261
4261
4262 If no revision is specified, the working directory's parent is a
4262 If no revision is specified, the working directory's parent is a
4263 head revision, and the current branch contains exactly one other
4263 head revision, and the current branch contains exactly one other
4264 head, the other head is merged with by default. Otherwise, an
4264 head, the other head is merged with by default. Otherwise, an
4265 explicit revision with which to merge with must be provided.
4265 explicit revision with which to merge with must be provided.
4266
4266
4267 :hg:`resolve` must be used to resolve unresolved files.
4267 :hg:`resolve` must be used to resolve unresolved files.
4268
4268
4269 To undo an uncommitted merge, use :hg:`update --clean .` which
4269 To undo an uncommitted merge, use :hg:`update --clean .` which
4270 will check out a clean copy of the original merge parent, losing
4270 will check out a clean copy of the original merge parent, losing
4271 all changes.
4271 all changes.
4272
4272
4273 Returns 0 on success, 1 if there are unresolved files.
4273 Returns 0 on success, 1 if there are unresolved files.
4274 """
4274 """
4275
4275
4276 if opts.get('rev') and node:
4276 if opts.get('rev') and node:
4277 raise util.Abort(_("please specify just one revision"))
4277 raise util.Abort(_("please specify just one revision"))
4278 if not node:
4278 if not node:
4279 node = opts.get('rev')
4279 node = opts.get('rev')
4280
4280
4281 if node:
4281 if node:
4282 node = scmutil.revsingle(repo, node).node()
4282 node = scmutil.revsingle(repo, node).node()
4283
4283
4284 if not node and repo._bookmarkcurrent:
4284 if not node and repo._bookmarkcurrent:
4285 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4285 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4286 curhead = repo[repo._bookmarkcurrent].node()
4286 curhead = repo[repo._bookmarkcurrent].node()
4287 if len(bmheads) == 2:
4287 if len(bmheads) == 2:
4288 if curhead == bmheads[0]:
4288 if curhead == bmheads[0]:
4289 node = bmheads[1]
4289 node = bmheads[1]
4290 else:
4290 else:
4291 node = bmheads[0]
4291 node = bmheads[0]
4292 elif len(bmheads) > 2:
4292 elif len(bmheads) > 2:
4293 raise util.Abort(_("multiple matching bookmarks to merge - "
4293 raise util.Abort(_("multiple matching bookmarks to merge - "
4294 "please merge with an explicit rev or bookmark"),
4294 "please merge with an explicit rev or bookmark"),
4295 hint=_("run 'hg heads' to see all heads"))
4295 hint=_("run 'hg heads' to see all heads"))
4296 elif len(bmheads) <= 1:
4296 elif len(bmheads) <= 1:
4297 raise util.Abort(_("no matching bookmark to merge - "
4297 raise util.Abort(_("no matching bookmark to merge - "
4298 "please merge with an explicit rev or bookmark"),
4298 "please merge with an explicit rev or bookmark"),
4299 hint=_("run 'hg heads' to see all heads"))
4299 hint=_("run 'hg heads' to see all heads"))
4300
4300
4301 if not node and not repo._bookmarkcurrent:
4301 if not node and not repo._bookmarkcurrent:
4302 branch = repo[None].branch()
4302 branch = repo[None].branch()
4303 bheads = repo.branchheads(branch)
4303 bheads = repo.branchheads(branch)
4304 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4304 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4305
4305
4306 if len(nbhs) > 2:
4306 if len(nbhs) > 2:
4307 raise util.Abort(_("branch '%s' has %d heads - "
4307 raise util.Abort(_("branch '%s' has %d heads - "
4308 "please merge with an explicit rev")
4308 "please merge with an explicit rev")
4309 % (branch, len(bheads)),
4309 % (branch, len(bheads)),
4310 hint=_("run 'hg heads .' to see heads"))
4310 hint=_("run 'hg heads .' to see heads"))
4311
4311
4312 parent = repo.dirstate.p1()
4312 parent = repo.dirstate.p1()
4313 if len(nbhs) <= 1:
4313 if len(nbhs) <= 1:
4314 if len(bheads) > 1:
4314 if len(bheads) > 1:
4315 raise util.Abort(_("heads are bookmarked - "
4315 raise util.Abort(_("heads are bookmarked - "
4316 "please merge with an explicit rev"),
4316 "please merge with an explicit rev"),
4317 hint=_("run 'hg heads' to see all heads"))
4317 hint=_("run 'hg heads' to see all heads"))
4318 if len(repo.heads()) > 1:
4318 if len(repo.heads()) > 1:
4319 raise util.Abort(_("branch '%s' has one head - "
4319 raise util.Abort(_("branch '%s' has one head - "
4320 "please merge with an explicit rev")
4320 "please merge with an explicit rev")
4321 % branch,
4321 % branch,
4322 hint=_("run 'hg heads' to see all heads"))
4322 hint=_("run 'hg heads' to see all heads"))
4323 msg, hint = _('nothing to merge'), None
4323 msg, hint = _('nothing to merge'), None
4324 if parent != repo.lookup(branch):
4324 if parent != repo.lookup(branch):
4325 hint = _("use 'hg update' instead")
4325 hint = _("use 'hg update' instead")
4326 raise util.Abort(msg, hint=hint)
4326 raise util.Abort(msg, hint=hint)
4327
4327
4328 if parent not in bheads:
4328 if parent not in bheads:
4329 raise util.Abort(_('working directory not at a head revision'),
4329 raise util.Abort(_('working directory not at a head revision'),
4330 hint=_("use 'hg update' or merge with an "
4330 hint=_("use 'hg update' or merge with an "
4331 "explicit revision"))
4331 "explicit revision"))
4332 if parent == nbhs[0]:
4332 if parent == nbhs[0]:
4333 node = nbhs[-1]
4333 node = nbhs[-1]
4334 else:
4334 else:
4335 node = nbhs[0]
4335 node = nbhs[0]
4336
4336
4337 if opts.get('preview'):
4337 if opts.get('preview'):
4338 # find nodes that are ancestors of p2 but not of p1
4338 # find nodes that are ancestors of p2 but not of p1
4339 p1 = repo.lookup('.')
4339 p1 = repo.lookup('.')
4340 p2 = repo.lookup(node)
4340 p2 = repo.lookup(node)
4341 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4341 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4342
4342
4343 displayer = cmdutil.show_changeset(ui, repo, opts)
4343 displayer = cmdutil.show_changeset(ui, repo, opts)
4344 for node in nodes:
4344 for node in nodes:
4345 displayer.show(repo[node])
4345 displayer.show(repo[node])
4346 displayer.close()
4346 displayer.close()
4347 return 0
4347 return 0
4348
4348
4349 try:
4349 try:
4350 # ui.forcemerge is an internal variable, do not document
4350 # ui.forcemerge is an internal variable, do not document
4351 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4351 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4352 return hg.merge(repo, node, force=opts.get('force'))
4352 return hg.merge(repo, node, force=opts.get('force'))
4353 finally:
4353 finally:
4354 ui.setconfig('ui', 'forcemerge', '', 'merge')
4354 ui.setconfig('ui', 'forcemerge', '', 'merge')
4355
4355
4356 @command('outgoing|out',
4356 @command('outgoing|out',
4357 [('f', 'force', None, _('run even when the destination is unrelated')),
4357 [('f', 'force', None, _('run even when the destination is unrelated')),
4358 ('r', 'rev', [],
4358 ('r', 'rev', [],
4359 _('a changeset intended to be included in the destination'), _('REV')),
4359 _('a changeset intended to be included in the destination'), _('REV')),
4360 ('n', 'newest-first', None, _('show newest record first')),
4360 ('n', 'newest-first', None, _('show newest record first')),
4361 ('B', 'bookmarks', False, _('compare bookmarks')),
4361 ('B', 'bookmarks', False, _('compare bookmarks')),
4362 ('b', 'branch', [], _('a specific branch you would like to push'),
4362 ('b', 'branch', [], _('a specific branch you would like to push'),
4363 _('BRANCH')),
4363 _('BRANCH')),
4364 ] + logopts + remoteopts + subrepoopts,
4364 ] + logopts + remoteopts + subrepoopts,
4365 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4365 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4366 def outgoing(ui, repo, dest=None, **opts):
4366 def outgoing(ui, repo, dest=None, **opts):
4367 """show changesets not found in the destination
4367 """show changesets not found in the destination
4368
4368
4369 Show changesets not found in the specified destination repository
4369 Show changesets not found in the specified destination repository
4370 or the default push location. These are the changesets that would
4370 or the default push location. These are the changesets that would
4371 be pushed if a push was requested.
4371 be pushed if a push was requested.
4372
4372
4373 See pull for details of valid destination formats.
4373 See pull for details of valid destination formats.
4374
4374
4375 Returns 0 if there are outgoing changes, 1 otherwise.
4375 Returns 0 if there are outgoing changes, 1 otherwise.
4376 """
4376 """
4377 if opts.get('graph'):
4377 if opts.get('graph'):
4378 cmdutil.checkunsupportedgraphflags([], opts)
4378 cmdutil.checkunsupportedgraphflags([], opts)
4379 o = hg._outgoing(ui, repo, dest, opts)
4379 o = hg._outgoing(ui, repo, dest, opts)
4380 if o is None:
4380 if o is None:
4381 return
4381 return
4382
4382
4383 revdag = cmdutil.graphrevs(repo, o, opts)
4383 revdag = cmdutil.graphrevs(repo, o, opts)
4384 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4384 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4385 showparents = [ctx.node() for ctx in repo[None].parents()]
4385 showparents = [ctx.node() for ctx in repo[None].parents()]
4386 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4386 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4387 graphmod.asciiedges)
4387 graphmod.asciiedges)
4388 return 0
4388 return 0
4389
4389
4390 if opts.get('bookmarks'):
4390 if opts.get('bookmarks'):
4391 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4391 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4392 dest, branches = hg.parseurl(dest, opts.get('branch'))
4392 dest, branches = hg.parseurl(dest, opts.get('branch'))
4393 other = hg.peer(repo, opts, dest)
4393 other = hg.peer(repo, opts, dest)
4394 if 'bookmarks' not in other.listkeys('namespaces'):
4394 if 'bookmarks' not in other.listkeys('namespaces'):
4395 ui.warn(_("remote doesn't support bookmarks\n"))
4395 ui.warn(_("remote doesn't support bookmarks\n"))
4396 return 0
4396 return 0
4397 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4397 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4398 return bookmarks.diff(ui, other, repo)
4398 return bookmarks.diff(ui, other, repo)
4399
4399
4400 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4400 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4401 try:
4401 try:
4402 return hg.outgoing(ui, repo, dest, opts)
4402 return hg.outgoing(ui, repo, dest, opts)
4403 finally:
4403 finally:
4404 del repo._subtoppath
4404 del repo._subtoppath
4405
4405
4406 @command('parents',
4406 @command('parents',
4407 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4407 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4408 ] + templateopts,
4408 ] + templateopts,
4409 _('[-r REV] [FILE]'))
4409 _('[-r REV] [FILE]'))
4410 def parents(ui, repo, file_=None, **opts):
4410 def parents(ui, repo, file_=None, **opts):
4411 """show the parents of the working directory or revision
4411 """show the parents of the working directory or revision
4412
4412
4413 Print the working directory's parent revisions. If a revision is
4413 Print the working directory's parent revisions. If a revision is
4414 given via -r/--rev, the parent of that revision will be printed.
4414 given via -r/--rev, the parent of that revision will be printed.
4415 If a file argument is given, the revision in which the file was
4415 If a file argument is given, the revision in which the file was
4416 last changed (before the working directory revision or the
4416 last changed (before the working directory revision or the
4417 argument to --rev if given) is printed.
4417 argument to --rev if given) is printed.
4418
4418
4419 Returns 0 on success.
4419 Returns 0 on success.
4420 """
4420 """
4421
4421
4422 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4422 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4423
4423
4424 if file_:
4424 if file_:
4425 m = scmutil.match(ctx, (file_,), opts)
4425 m = scmutil.match(ctx, (file_,), opts)
4426 if m.anypats() or len(m.files()) != 1:
4426 if m.anypats() or len(m.files()) != 1:
4427 raise util.Abort(_('can only specify an explicit filename'))
4427 raise util.Abort(_('can only specify an explicit filename'))
4428 file_ = m.files()[0]
4428 file_ = m.files()[0]
4429 filenodes = []
4429 filenodes = []
4430 for cp in ctx.parents():
4430 for cp in ctx.parents():
4431 if not cp:
4431 if not cp:
4432 continue
4432 continue
4433 try:
4433 try:
4434 filenodes.append(cp.filenode(file_))
4434 filenodes.append(cp.filenode(file_))
4435 except error.LookupError:
4435 except error.LookupError:
4436 pass
4436 pass
4437 if not filenodes:
4437 if not filenodes:
4438 raise util.Abort(_("'%s' not found in manifest!") % file_)
4438 raise util.Abort(_("'%s' not found in manifest!") % file_)
4439 p = []
4439 p = []
4440 for fn in filenodes:
4440 for fn in filenodes:
4441 fctx = repo.filectx(file_, fileid=fn)
4441 fctx = repo.filectx(file_, fileid=fn)
4442 p.append(fctx.node())
4442 p.append(fctx.node())
4443 else:
4443 else:
4444 p = [cp.node() for cp in ctx.parents()]
4444 p = [cp.node() for cp in ctx.parents()]
4445
4445
4446 displayer = cmdutil.show_changeset(ui, repo, opts)
4446 displayer = cmdutil.show_changeset(ui, repo, opts)
4447 for n in p:
4447 for n in p:
4448 if n != nullid:
4448 if n != nullid:
4449 displayer.show(repo[n])
4449 displayer.show(repo[n])
4450 displayer.close()
4450 displayer.close()
4451
4451
4452 @command('paths', [], _('[NAME]'))
4452 @command('paths', [], _('[NAME]'))
4453 def paths(ui, repo, search=None):
4453 def paths(ui, repo, search=None):
4454 """show aliases for remote repositories
4454 """show aliases for remote repositories
4455
4455
4456 Show definition of symbolic path name NAME. If no name is given,
4456 Show definition of symbolic path name NAME. If no name is given,
4457 show definition of all available names.
4457 show definition of all available names.
4458
4458
4459 Option -q/--quiet suppresses all output when searching for NAME
4459 Option -q/--quiet suppresses all output when searching for NAME
4460 and shows only the path names when listing all definitions.
4460 and shows only the path names when listing all definitions.
4461
4461
4462 Path names are defined in the [paths] section of your
4462 Path names are defined in the [paths] section of your
4463 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4463 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4464 repository, ``.hg/hgrc`` is used, too.
4464 repository, ``.hg/hgrc`` is used, too.
4465
4465
4466 The path names ``default`` and ``default-push`` have a special
4466 The path names ``default`` and ``default-push`` have a special
4467 meaning. When performing a push or pull operation, they are used
4467 meaning. When performing a push or pull operation, they are used
4468 as fallbacks if no location is specified on the command-line.
4468 as fallbacks if no location is specified on the command-line.
4469 When ``default-push`` is set, it will be used for push and
4469 When ``default-push`` is set, it will be used for push and
4470 ``default`` will be used for pull; otherwise ``default`` is used
4470 ``default`` will be used for pull; otherwise ``default`` is used
4471 as the fallback for both. When cloning a repository, the clone
4471 as the fallback for both. When cloning a repository, the clone
4472 source is written as ``default`` in ``.hg/hgrc``. Note that
4472 source is written as ``default`` in ``.hg/hgrc``. Note that
4473 ``default`` and ``default-push`` apply to all inbound (e.g.
4473 ``default`` and ``default-push`` apply to all inbound (e.g.
4474 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4474 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4475 :hg:`bundle`) operations.
4475 :hg:`bundle`) operations.
4476
4476
4477 See :hg:`help urls` for more information.
4477 See :hg:`help urls` for more information.
4478
4478
4479 Returns 0 on success.
4479 Returns 0 on success.
4480 """
4480 """
4481 if search:
4481 if search:
4482 for name, path in ui.configitems("paths"):
4482 for name, path in ui.configitems("paths"):
4483 if name == search:
4483 if name == search:
4484 ui.status("%s\n" % util.hidepassword(path))
4484 ui.status("%s\n" % util.hidepassword(path))
4485 return
4485 return
4486 if not ui.quiet:
4486 if not ui.quiet:
4487 ui.warn(_("not found!\n"))
4487 ui.warn(_("not found!\n"))
4488 return 1
4488 return 1
4489 else:
4489 else:
4490 for name, path in ui.configitems("paths"):
4490 for name, path in ui.configitems("paths"):
4491 if ui.quiet:
4491 if ui.quiet:
4492 ui.write("%s\n" % name)
4492 ui.write("%s\n" % name)
4493 else:
4493 else:
4494 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4494 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4495
4495
4496 @command('phase',
4496 @command('phase',
4497 [('p', 'public', False, _('set changeset phase to public')),
4497 [('p', 'public', False, _('set changeset phase to public')),
4498 ('d', 'draft', False, _('set changeset phase to draft')),
4498 ('d', 'draft', False, _('set changeset phase to draft')),
4499 ('s', 'secret', False, _('set changeset phase to secret')),
4499 ('s', 'secret', False, _('set changeset phase to secret')),
4500 ('f', 'force', False, _('allow to move boundary backward')),
4500 ('f', 'force', False, _('allow to move boundary backward')),
4501 ('r', 'rev', [], _('target revision'), _('REV')),
4501 ('r', 'rev', [], _('target revision'), _('REV')),
4502 ],
4502 ],
4503 _('[-p|-d|-s] [-f] [-r] REV...'))
4503 _('[-p|-d|-s] [-f] [-r] REV...'))
4504 def phase(ui, repo, *revs, **opts):
4504 def phase(ui, repo, *revs, **opts):
4505 """set or show the current phase name
4505 """set or show the current phase name
4506
4506
4507 With no argument, show the phase name of specified revisions.
4507 With no argument, show the phase name of specified revisions.
4508
4508
4509 With one of -p/--public, -d/--draft or -s/--secret, change the
4509 With one of -p/--public, -d/--draft or -s/--secret, change the
4510 phase value of the specified revisions.
4510 phase value of the specified revisions.
4511
4511
4512 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4512 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4513 lower phase to an higher phase. Phases are ordered as follows::
4513 lower phase to an higher phase. Phases are ordered as follows::
4514
4514
4515 public < draft < secret
4515 public < draft < secret
4516
4516
4517 Returns 0 on success, 1 if no phases were changed or some could not
4517 Returns 0 on success, 1 if no phases were changed or some could not
4518 be changed.
4518 be changed.
4519 """
4519 """
4520 # search for a unique phase argument
4520 # search for a unique phase argument
4521 targetphase = None
4521 targetphase = None
4522 for idx, name in enumerate(phases.phasenames):
4522 for idx, name in enumerate(phases.phasenames):
4523 if opts[name]:
4523 if opts[name]:
4524 if targetphase is not None:
4524 if targetphase is not None:
4525 raise util.Abort(_('only one phase can be specified'))
4525 raise util.Abort(_('only one phase can be specified'))
4526 targetphase = idx
4526 targetphase = idx
4527
4527
4528 # look for specified revision
4528 # look for specified revision
4529 revs = list(revs)
4529 revs = list(revs)
4530 revs.extend(opts['rev'])
4530 revs.extend(opts['rev'])
4531 if not revs:
4531 if not revs:
4532 raise util.Abort(_('no revisions specified'))
4532 raise util.Abort(_('no revisions specified'))
4533
4533
4534 revs = scmutil.revrange(repo, revs)
4534 revs = scmutil.revrange(repo, revs)
4535
4535
4536 lock = None
4536 lock = None
4537 ret = 0
4537 ret = 0
4538 if targetphase is None:
4538 if targetphase is None:
4539 # display
4539 # display
4540 for r in revs:
4540 for r in revs:
4541 ctx = repo[r]
4541 ctx = repo[r]
4542 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4542 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4543 else:
4543 else:
4544 lock = repo.lock()
4544 lock = repo.lock()
4545 try:
4545 try:
4546 # set phase
4546 # set phase
4547 if not revs:
4547 if not revs:
4548 raise util.Abort(_('empty revision set'))
4548 raise util.Abort(_('empty revision set'))
4549 nodes = [repo[r].node() for r in revs]
4549 nodes = [repo[r].node() for r in revs]
4550 olddata = repo._phasecache.getphaserevs(repo)[:]
4550 olddata = repo._phasecache.getphaserevs(repo)[:]
4551 phases.advanceboundary(repo, targetphase, nodes)
4551 phases.advanceboundary(repo, targetphase, nodes)
4552 if opts['force']:
4552 if opts['force']:
4553 phases.retractboundary(repo, targetphase, nodes)
4553 phases.retractboundary(repo, targetphase, nodes)
4554 finally:
4554 finally:
4555 lock.release()
4555 lock.release()
4556 # moving revision from public to draft may hide them
4556 # moving revision from public to draft may hide them
4557 # We have to check result on an unfiltered repository
4557 # We have to check result on an unfiltered repository
4558 unfi = repo.unfiltered()
4558 unfi = repo.unfiltered()
4559 newdata = repo._phasecache.getphaserevs(unfi)
4559 newdata = repo._phasecache.getphaserevs(unfi)
4560 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4560 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4561 cl = unfi.changelog
4561 cl = unfi.changelog
4562 rejected = [n for n in nodes
4562 rejected = [n for n in nodes
4563 if newdata[cl.rev(n)] < targetphase]
4563 if newdata[cl.rev(n)] < targetphase]
4564 if rejected:
4564 if rejected:
4565 ui.warn(_('cannot move %i changesets to a higher '
4565 ui.warn(_('cannot move %i changesets to a higher '
4566 'phase, use --force\n') % len(rejected))
4566 'phase, use --force\n') % len(rejected))
4567 ret = 1
4567 ret = 1
4568 if changes:
4568 if changes:
4569 msg = _('phase changed for %i changesets\n') % changes
4569 msg = _('phase changed for %i changesets\n') % changes
4570 if ret:
4570 if ret:
4571 ui.status(msg)
4571 ui.status(msg)
4572 else:
4572 else:
4573 ui.note(msg)
4573 ui.note(msg)
4574 else:
4574 else:
4575 ui.warn(_('no phases changed\n'))
4575 ui.warn(_('no phases changed\n'))
4576 ret = 1
4576 ret = 1
4577 return ret
4577 return ret
4578
4578
4579 def postincoming(ui, repo, modheads, optupdate, checkout):
4579 def postincoming(ui, repo, modheads, optupdate, checkout):
4580 if modheads == 0:
4580 if modheads == 0:
4581 return
4581 return
4582 if optupdate:
4582 if optupdate:
4583 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4583 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4584 try:
4584 try:
4585 ret = hg.update(repo, checkout)
4585 ret = hg.update(repo, checkout)
4586 except util.Abort, inst:
4586 except util.Abort, inst:
4587 ui.warn(_("not updating: %s\n") % str(inst))
4587 ui.warn(_("not updating: %s\n") % str(inst))
4588 if inst.hint:
4588 if inst.hint:
4589 ui.warn(_("(%s)\n") % inst.hint)
4589 ui.warn(_("(%s)\n") % inst.hint)
4590 return 0
4590 return 0
4591 if not ret and not checkout:
4591 if not ret and not checkout:
4592 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4592 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4593 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4593 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4594 return ret
4594 return ret
4595 if modheads > 1:
4595 if modheads > 1:
4596 currentbranchheads = len(repo.branchheads())
4596 currentbranchheads = len(repo.branchheads())
4597 if currentbranchheads == modheads:
4597 if currentbranchheads == modheads:
4598 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4598 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4599 elif currentbranchheads > 1:
4599 elif currentbranchheads > 1:
4600 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4600 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4601 "merge)\n"))
4601 "merge)\n"))
4602 else:
4602 else:
4603 ui.status(_("(run 'hg heads' to see heads)\n"))
4603 ui.status(_("(run 'hg heads' to see heads)\n"))
4604 else:
4604 else:
4605 ui.status(_("(run 'hg update' to get a working copy)\n"))
4605 ui.status(_("(run 'hg update' to get a working copy)\n"))
4606
4606
4607 @command('^pull',
4607 @command('^pull',
4608 [('u', 'update', None,
4608 [('u', 'update', None,
4609 _('update to new branch head if changesets were pulled')),
4609 _('update to new branch head if changesets were pulled')),
4610 ('f', 'force', None, _('run even when remote repository is unrelated')),
4610 ('f', 'force', None, _('run even when remote repository is unrelated')),
4611 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4611 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4612 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4612 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4613 ('b', 'branch', [], _('a specific branch you would like to pull'),
4613 ('b', 'branch', [], _('a specific branch you would like to pull'),
4614 _('BRANCH')),
4614 _('BRANCH')),
4615 ] + remoteopts,
4615 ] + remoteopts,
4616 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4616 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4617 def pull(ui, repo, source="default", **opts):
4617 def pull(ui, repo, source="default", **opts):
4618 """pull changes from the specified source
4618 """pull changes from the specified source
4619
4619
4620 Pull changes from a remote repository to a local one.
4620 Pull changes from a remote repository to a local one.
4621
4621
4622 This finds all changes from the repository at the specified path
4622 This finds all changes from the repository at the specified path
4623 or URL and adds them to a local repository (the current one unless
4623 or URL and adds them to a local repository (the current one unless
4624 -R is specified). By default, this does not update the copy of the
4624 -R is specified). By default, this does not update the copy of the
4625 project in the working directory.
4625 project in the working directory.
4626
4626
4627 Use :hg:`incoming` if you want to see what would have been added
4627 Use :hg:`incoming` if you want to see what would have been added
4628 by a pull at the time you issued this command. If you then decide
4628 by a pull at the time you issued this command. If you then decide
4629 to add those changes to the repository, you should use :hg:`pull
4629 to add those changes to the repository, you should use :hg:`pull
4630 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4630 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4631
4631
4632 If SOURCE is omitted, the 'default' path will be used.
4632 If SOURCE is omitted, the 'default' path will be used.
4633 See :hg:`help urls` for more information.
4633 See :hg:`help urls` for more information.
4634
4634
4635 Returns 0 on success, 1 if an update had unresolved files.
4635 Returns 0 on success, 1 if an update had unresolved files.
4636 """
4636 """
4637 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4637 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4638 other = hg.peer(repo, opts, source)
4638 other = hg.peer(repo, opts, source)
4639 try:
4639 try:
4640 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4640 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4641 revs, checkout = hg.addbranchrevs(repo, other, branches,
4641 revs, checkout = hg.addbranchrevs(repo, other, branches,
4642 opts.get('rev'))
4642 opts.get('rev'))
4643
4643
4644 remotebookmarks = other.listkeys('bookmarks')
4644 remotebookmarks = other.listkeys('bookmarks')
4645
4645
4646 if opts.get('bookmark'):
4646 if opts.get('bookmark'):
4647 if not revs:
4647 if not revs:
4648 revs = []
4648 revs = []
4649 for b in opts['bookmark']:
4649 for b in opts['bookmark']:
4650 if b not in remotebookmarks:
4650 if b not in remotebookmarks:
4651 raise util.Abort(_('remote bookmark %s not found!') % b)
4651 raise util.Abort(_('remote bookmark %s not found!') % b)
4652 revs.append(remotebookmarks[b])
4652 revs.append(remotebookmarks[b])
4653
4653
4654 if revs:
4654 if revs:
4655 try:
4655 try:
4656 revs = [other.lookup(rev) for rev in revs]
4656 revs = [other.lookup(rev) for rev in revs]
4657 except error.CapabilityError:
4657 except error.CapabilityError:
4658 err = _("other repository doesn't support revision lookup, "
4658 err = _("other repository doesn't support revision lookup, "
4659 "so a rev cannot be specified.")
4659 "so a rev cannot be specified.")
4660 raise util.Abort(err)
4660 raise util.Abort(err)
4661
4661
4662 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4662 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4663 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4663 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4664 if checkout:
4664 if checkout:
4665 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4665 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4666 repo._subtoppath = source
4666 repo._subtoppath = source
4667 try:
4667 try:
4668 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4668 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4669
4669
4670 finally:
4670 finally:
4671 del repo._subtoppath
4671 del repo._subtoppath
4672
4672
4673 # update specified bookmarks
4673 # update specified bookmarks
4674 if opts.get('bookmark'):
4674 if opts.get('bookmark'):
4675 marks = repo._bookmarks
4675 marks = repo._bookmarks
4676 for b in opts['bookmark']:
4676 for b in opts['bookmark']:
4677 # explicit pull overrides local bookmark if any
4677 # explicit pull overrides local bookmark if any
4678 ui.status(_("importing bookmark %s\n") % b)
4678 ui.status(_("importing bookmark %s\n") % b)
4679 marks[b] = repo[remotebookmarks[b]].node()
4679 marks[b] = repo[remotebookmarks[b]].node()
4680 marks.write()
4680 marks.write()
4681 finally:
4681 finally:
4682 other.close()
4682 other.close()
4683 return ret
4683 return ret
4684
4684
4685 @command('^push',
4685 @command('^push',
4686 [('f', 'force', None, _('force push')),
4686 [('f', 'force', None, _('force push')),
4687 ('r', 'rev', [],
4687 ('r', 'rev', [],
4688 _('a changeset intended to be included in the destination'),
4688 _('a changeset intended to be included in the destination'),
4689 _('REV')),
4689 _('REV')),
4690 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4690 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4691 ('b', 'branch', [],
4691 ('b', 'branch', [],
4692 _('a specific branch you would like to push'), _('BRANCH')),
4692 _('a specific branch you would like to push'), _('BRANCH')),
4693 ('', 'new-branch', False, _('allow pushing a new branch')),
4693 ('', 'new-branch', False, _('allow pushing a new branch')),
4694 ] + remoteopts,
4694 ] + remoteopts,
4695 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4695 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4696 def push(ui, repo, dest=None, **opts):
4696 def push(ui, repo, dest=None, **opts):
4697 """push changes to the specified destination
4697 """push changes to the specified destination
4698
4698
4699 Push changesets from the local repository to the specified
4699 Push changesets from the local repository to the specified
4700 destination.
4700 destination.
4701
4701
4702 This operation is symmetrical to pull: it is identical to a pull
4702 This operation is symmetrical to pull: it is identical to a pull
4703 in the destination repository from the current one.
4703 in the destination repository from the current one.
4704
4704
4705 By default, push will not allow creation of new heads at the
4705 By default, push will not allow creation of new heads at the
4706 destination, since multiple heads would make it unclear which head
4706 destination, since multiple heads would make it unclear which head
4707 to use. In this situation, it is recommended to pull and merge
4707 to use. In this situation, it is recommended to pull and merge
4708 before pushing.
4708 before pushing.
4709
4709
4710 Use --new-branch if you want to allow push to create a new named
4710 Use --new-branch if you want to allow push to create a new named
4711 branch that is not present at the destination. This allows you to
4711 branch that is not present at the destination. This allows you to
4712 only create a new branch without forcing other changes.
4712 only create a new branch without forcing other changes.
4713
4713
4714 .. note::
4714 .. note::
4715
4715
4716 Extra care should be taken with the -f/--force option,
4716 Extra care should be taken with the -f/--force option,
4717 which will push all new heads on all branches, an action which will
4717 which will push all new heads on all branches, an action which will
4718 almost always cause confusion for collaborators.
4718 almost always cause confusion for collaborators.
4719
4719
4720 If -r/--rev is used, the specified revision and all its ancestors
4720 If -r/--rev is used, the specified revision and all its ancestors
4721 will be pushed to the remote repository.
4721 will be pushed to the remote repository.
4722
4722
4723 If -B/--bookmark is used, the specified bookmarked revision, its
4723 If -B/--bookmark is used, the specified bookmarked revision, its
4724 ancestors, and the bookmark will be pushed to the remote
4724 ancestors, and the bookmark will be pushed to the remote
4725 repository.
4725 repository.
4726
4726
4727 Please see :hg:`help urls` for important details about ``ssh://``
4727 Please see :hg:`help urls` for important details about ``ssh://``
4728 URLs. If DESTINATION is omitted, a default path will be used.
4728 URLs. If DESTINATION is omitted, a default path will be used.
4729
4729
4730 Returns 0 if push was successful, 1 if nothing to push.
4730 Returns 0 if push was successful, 1 if nothing to push.
4731 """
4731 """
4732
4732
4733 if opts.get('bookmark'):
4733 if opts.get('bookmark'):
4734 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4734 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4735 for b in opts['bookmark']:
4735 for b in opts['bookmark']:
4736 # translate -B options to -r so changesets get pushed
4736 # translate -B options to -r so changesets get pushed
4737 if b in repo._bookmarks:
4737 if b in repo._bookmarks:
4738 opts.setdefault('rev', []).append(b)
4738 opts.setdefault('rev', []).append(b)
4739 else:
4739 else:
4740 # if we try to push a deleted bookmark, translate it to null
4740 # if we try to push a deleted bookmark, translate it to null
4741 # this lets simultaneous -r, -b options continue working
4741 # this lets simultaneous -r, -b options continue working
4742 opts.setdefault('rev', []).append("null")
4742 opts.setdefault('rev', []).append("null")
4743
4743
4744 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4744 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4745 dest, branches = hg.parseurl(dest, opts.get('branch'))
4745 dest, branches = hg.parseurl(dest, opts.get('branch'))
4746 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4746 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4747 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4747 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4748 try:
4748 try:
4749 other = hg.peer(repo, opts, dest)
4749 other = hg.peer(repo, opts, dest)
4750 except error.RepoError:
4750 except error.RepoError:
4751 if dest == "default-push":
4751 if dest == "default-push":
4752 raise util.Abort(_("default repository not configured!"),
4752 raise util.Abort(_("default repository not configured!"),
4753 hint=_('see the "path" section in "hg help config"'))
4753 hint=_('see the "path" section in "hg help config"'))
4754 else:
4754 else:
4755 raise
4755 raise
4756
4756
4757 if revs:
4757 if revs:
4758 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4758 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4759
4759
4760 repo._subtoppath = dest
4760 repo._subtoppath = dest
4761 try:
4761 try:
4762 # push subrepos depth-first for coherent ordering
4762 # push subrepos depth-first for coherent ordering
4763 c = repo['']
4763 c = repo['']
4764 subs = c.substate # only repos that are committed
4764 subs = c.substate # only repos that are committed
4765 for s in sorted(subs):
4765 for s in sorted(subs):
4766 if c.sub(s).push(opts) == 0:
4766 if c.sub(s).push(opts) == 0:
4767 return False
4767 return False
4768 finally:
4768 finally:
4769 del repo._subtoppath
4769 del repo._subtoppath
4770 result = repo.push(other, opts.get('force'), revs=revs,
4770 result = repo.push(other, opts.get('force'), revs=revs,
4771 newbranch=opts.get('new_branch'))
4771 newbranch=opts.get('new_branch'))
4772
4772
4773 result = not result
4773 result = not result
4774
4774
4775 if opts.get('bookmark'):
4775 if opts.get('bookmark'):
4776 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4776 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4777 if bresult == 2:
4777 if bresult == 2:
4778 return 2
4778 return 2
4779 if not result and bresult:
4779 if not result and bresult:
4780 result = 2
4780 result = 2
4781
4781
4782 return result
4782 return result
4783
4783
4784 @command('recover', [])
4784 @command('recover', [])
4785 def recover(ui, repo):
4785 def recover(ui, repo):
4786 """roll back an interrupted transaction
4786 """roll back an interrupted transaction
4787
4787
4788 Recover from an interrupted commit or pull.
4788 Recover from an interrupted commit or pull.
4789
4789
4790 This command tries to fix the repository status after an
4790 This command tries to fix the repository status after an
4791 interrupted operation. It should only be necessary when Mercurial
4791 interrupted operation. It should only be necessary when Mercurial
4792 suggests it.
4792 suggests it.
4793
4793
4794 Returns 0 if successful, 1 if nothing to recover or verify fails.
4794 Returns 0 if successful, 1 if nothing to recover or verify fails.
4795 """
4795 """
4796 if repo.recover():
4796 if repo.recover():
4797 return hg.verify(repo)
4797 return hg.verify(repo)
4798 return 1
4798 return 1
4799
4799
4800 @command('^remove|rm',
4800 @command('^remove|rm',
4801 [('A', 'after', None, _('record delete for missing files')),
4801 [('A', 'after', None, _('record delete for missing files')),
4802 ('f', 'force', None,
4802 ('f', 'force', None,
4803 _('remove (and delete) file even if added or modified')),
4803 _('remove (and delete) file even if added or modified')),
4804 ] + walkopts,
4804 ] + walkopts,
4805 _('[OPTION]... FILE...'))
4805 _('[OPTION]... FILE...'))
4806 def remove(ui, repo, *pats, **opts):
4806 def remove(ui, repo, *pats, **opts):
4807 """remove the specified files on the next commit
4807 """remove the specified files on the next commit
4808
4808
4809 Schedule the indicated files for removal from the current branch.
4809 Schedule the indicated files for removal from the current branch.
4810
4810
4811 This command schedules the files to be removed at the next commit.
4811 This command schedules the files to be removed at the next commit.
4812 To undo a remove before that, see :hg:`revert`. To undo added
4812 To undo a remove before that, see :hg:`revert`. To undo added
4813 files, see :hg:`forget`.
4813 files, see :hg:`forget`.
4814
4814
4815 .. container:: verbose
4815 .. container:: verbose
4816
4816
4817 -A/--after can be used to remove only files that have already
4817 -A/--after can be used to remove only files that have already
4818 been deleted, -f/--force can be used to force deletion, and -Af
4818 been deleted, -f/--force can be used to force deletion, and -Af
4819 can be used to remove files from the next revision without
4819 can be used to remove files from the next revision without
4820 deleting them from the working directory.
4820 deleting them from the working directory.
4821
4821
4822 The following table details the behavior of remove for different
4822 The following table details the behavior of remove for different
4823 file states (columns) and option combinations (rows). The file
4823 file states (columns) and option combinations (rows). The file
4824 states are Added [A], Clean [C], Modified [M] and Missing [!]
4824 states are Added [A], Clean [C], Modified [M] and Missing [!]
4825 (as reported by :hg:`status`). The actions are Warn, Remove
4825 (as reported by :hg:`status`). The actions are Warn, Remove
4826 (from branch) and Delete (from disk):
4826 (from branch) and Delete (from disk):
4827
4827
4828 ========= == == == ==
4828 ========= == == == ==
4829 opt/state A C M !
4829 opt/state A C M !
4830 ========= == == == ==
4830 ========= == == == ==
4831 none W RD W R
4831 none W RD W R
4832 -f R RD RD R
4832 -f R RD RD R
4833 -A W W W R
4833 -A W W W R
4834 -Af R R R R
4834 -Af R R R R
4835 ========= == == == ==
4835 ========= == == == ==
4836
4836
4837 Note that remove never deletes files in Added [A] state from the
4837 Note that remove never deletes files in Added [A] state from the
4838 working directory, not even if option --force is specified.
4838 working directory, not even if option --force is specified.
4839
4839
4840 Returns 0 on success, 1 if any warnings encountered.
4840 Returns 0 on success, 1 if any warnings encountered.
4841 """
4841 """
4842
4842
4843 ret = 0
4843 ret = 0
4844 after, force = opts.get('after'), opts.get('force')
4844 after, force = opts.get('after'), opts.get('force')
4845 if not pats and not after:
4845 if not pats and not after:
4846 raise util.Abort(_('no files specified'))
4846 raise util.Abort(_('no files specified'))
4847
4847
4848 m = scmutil.match(repo[None], pats, opts)
4848 m = scmutil.match(repo[None], pats, opts)
4849 s = repo.status(match=m, clean=True)
4849 s = repo.status(match=m, clean=True)
4850 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4850 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4851
4851
4852 # warn about failure to delete explicit files/dirs
4852 # warn about failure to delete explicit files/dirs
4853 wctx = repo[None]
4853 wctx = repo[None]
4854 for f in m.files():
4854 for f in m.files():
4855 if f in repo.dirstate or f in wctx.dirs():
4855 if f in repo.dirstate or f in wctx.dirs():
4856 continue
4856 continue
4857 if os.path.exists(m.rel(f)):
4857 if os.path.exists(m.rel(f)):
4858 if os.path.isdir(m.rel(f)):
4858 if os.path.isdir(m.rel(f)):
4859 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4859 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4860 else:
4860 else:
4861 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4861 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4862 # missing files will generate a warning elsewhere
4862 # missing files will generate a warning elsewhere
4863 ret = 1
4863 ret = 1
4864
4864
4865 if force:
4865 if force:
4866 list = modified + deleted + clean + added
4866 list = modified + deleted + clean + added
4867 elif after:
4867 elif after:
4868 list = deleted
4868 list = deleted
4869 for f in modified + added + clean:
4869 for f in modified + added + clean:
4870 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4870 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4871 ret = 1
4871 ret = 1
4872 else:
4872 else:
4873 list = deleted + clean
4873 list = deleted + clean
4874 for f in modified:
4874 for f in modified:
4875 ui.warn(_('not removing %s: file is modified (use -f'
4875 ui.warn(_('not removing %s: file is modified (use -f'
4876 ' to force removal)\n') % m.rel(f))
4876 ' to force removal)\n') % m.rel(f))
4877 ret = 1
4877 ret = 1
4878 for f in added:
4878 for f in added:
4879 ui.warn(_('not removing %s: file has been marked for add'
4879 ui.warn(_('not removing %s: file has been marked for add'
4880 ' (use forget to undo)\n') % m.rel(f))
4880 ' (use forget to undo)\n') % m.rel(f))
4881 ret = 1
4881 ret = 1
4882
4882
4883 for f in sorted(list):
4883 for f in sorted(list):
4884 if ui.verbose or not m.exact(f):
4884 if ui.verbose or not m.exact(f):
4885 ui.status(_('removing %s\n') % m.rel(f))
4885 ui.status(_('removing %s\n') % m.rel(f))
4886
4886
4887 wlock = repo.wlock()
4887 wlock = repo.wlock()
4888 try:
4888 try:
4889 if not after:
4889 if not after:
4890 for f in list:
4890 for f in list:
4891 if f in added:
4891 if f in added:
4892 continue # we never unlink added files on remove
4892 continue # we never unlink added files on remove
4893 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4893 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4894 repo[None].forget(list)
4894 repo[None].forget(list)
4895 finally:
4895 finally:
4896 wlock.release()
4896 wlock.release()
4897
4897
4898 return ret
4898 return ret
4899
4899
4900 @command('rename|move|mv',
4900 @command('rename|move|mv',
4901 [('A', 'after', None, _('record a rename that has already occurred')),
4901 [('A', 'after', None, _('record a rename that has already occurred')),
4902 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4902 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4903 ] + walkopts + dryrunopts,
4903 ] + walkopts + dryrunopts,
4904 _('[OPTION]... SOURCE... DEST'))
4904 _('[OPTION]... SOURCE... DEST'))
4905 def rename(ui, repo, *pats, **opts):
4905 def rename(ui, repo, *pats, **opts):
4906 """rename files; equivalent of copy + remove
4906 """rename files; equivalent of copy + remove
4907
4907
4908 Mark dest as copies of sources; mark sources for deletion. If dest
4908 Mark dest as copies of sources; mark sources for deletion. If dest
4909 is a directory, copies are put in that directory. If dest is a
4909 is a directory, copies are put in that directory. If dest is a
4910 file, there can only be one source.
4910 file, there can only be one source.
4911
4911
4912 By default, this command copies the contents of files as they
4912 By default, this command copies the contents of files as they
4913 exist in the working directory. If invoked with -A/--after, the
4913 exist in the working directory. If invoked with -A/--after, the
4914 operation is recorded, but no copying is performed.
4914 operation is recorded, but no copying is performed.
4915
4915
4916 This command takes effect at the next commit. To undo a rename
4916 This command takes effect at the next commit. To undo a rename
4917 before that, see :hg:`revert`.
4917 before that, see :hg:`revert`.
4918
4918
4919 Returns 0 on success, 1 if errors are encountered.
4919 Returns 0 on success, 1 if errors are encountered.
4920 """
4920 """
4921 wlock = repo.wlock(False)
4921 wlock = repo.wlock(False)
4922 try:
4922 try:
4923 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4923 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4924 finally:
4924 finally:
4925 wlock.release()
4925 wlock.release()
4926
4926
4927 @command('resolve',
4927 @command('resolve',
4928 [('a', 'all', None, _('select all unresolved files')),
4928 [('a', 'all', None, _('select all unresolved files')),
4929 ('l', 'list', None, _('list state of files needing merge')),
4929 ('l', 'list', None, _('list state of files needing merge')),
4930 ('m', 'mark', None, _('mark files as resolved')),
4930 ('m', 'mark', None, _('mark files as resolved')),
4931 ('u', 'unmark', None, _('mark files as unresolved')),
4931 ('u', 'unmark', None, _('mark files as unresolved')),
4932 ('n', 'no-status', None, _('hide status prefix'))]
4932 ('n', 'no-status', None, _('hide status prefix'))]
4933 + mergetoolopts + walkopts,
4933 + mergetoolopts + walkopts,
4934 _('[OPTION]... [FILE]...'))
4934 _('[OPTION]... [FILE]...'))
4935 def resolve(ui, repo, *pats, **opts):
4935 def resolve(ui, repo, *pats, **opts):
4936 """redo merges or set/view the merge status of files
4936 """redo merges or set/view the merge status of files
4937
4937
4938 Merges with unresolved conflicts are often the result of
4938 Merges with unresolved conflicts are often the result of
4939 non-interactive merging using the ``internal:merge`` configuration
4939 non-interactive merging using the ``internal:merge`` configuration
4940 setting, or a command-line merge tool like ``diff3``. The resolve
4940 setting, or a command-line merge tool like ``diff3``. The resolve
4941 command is used to manage the files involved in a merge, after
4941 command is used to manage the files involved in a merge, after
4942 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4942 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4943 working directory must have two parents). See :hg:`help
4943 working directory must have two parents). See :hg:`help
4944 merge-tools` for information on configuring merge tools.
4944 merge-tools` for information on configuring merge tools.
4945
4945
4946 The resolve command can be used in the following ways:
4946 The resolve command can be used in the following ways:
4947
4947
4948 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4948 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4949 files, discarding any previous merge attempts. Re-merging is not
4949 files, discarding any previous merge attempts. Re-merging is not
4950 performed for files already marked as resolved. Use ``--all/-a``
4950 performed for files already marked as resolved. Use ``--all/-a``
4951 to select all unresolved files. ``--tool`` can be used to specify
4951 to select all unresolved files. ``--tool`` can be used to specify
4952 the merge tool used for the given files. It overrides the HGMERGE
4952 the merge tool used for the given files. It overrides the HGMERGE
4953 environment variable and your configuration files. Previous file
4953 environment variable and your configuration files. Previous file
4954 contents are saved with a ``.orig`` suffix.
4954 contents are saved with a ``.orig`` suffix.
4955
4955
4956 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4956 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4957 (e.g. after having manually fixed-up the files). The default is
4957 (e.g. after having manually fixed-up the files). The default is
4958 to mark all unresolved files.
4958 to mark all unresolved files.
4959
4959
4960 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4960 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4961 default is to mark all resolved files.
4961 default is to mark all resolved files.
4962
4962
4963 - :hg:`resolve -l`: list files which had or still have conflicts.
4963 - :hg:`resolve -l`: list files which had or still have conflicts.
4964 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4964 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4965
4965
4966 Note that Mercurial will not let you commit files with unresolved
4966 Note that Mercurial will not let you commit files with unresolved
4967 merge conflicts. You must use :hg:`resolve -m ...` before you can
4967 merge conflicts. You must use :hg:`resolve -m ...` before you can
4968 commit after a conflicting merge.
4968 commit after a conflicting merge.
4969
4969
4970 Returns 0 on success, 1 if any files fail a resolve attempt.
4970 Returns 0 on success, 1 if any files fail a resolve attempt.
4971 """
4971 """
4972
4972
4973 all, mark, unmark, show, nostatus = \
4973 all, mark, unmark, show, nostatus = \
4974 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4974 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4975
4975
4976 if (show and (mark or unmark)) or (mark and unmark):
4976 if (show and (mark or unmark)) or (mark and unmark):
4977 raise util.Abort(_("too many options specified"))
4977 raise util.Abort(_("too many options specified"))
4978 if pats and all:
4978 if pats and all:
4979 raise util.Abort(_("can't specify --all and patterns"))
4979 raise util.Abort(_("can't specify --all and patterns"))
4980 if not (all or pats or show or mark or unmark):
4980 if not (all or pats or show or mark or unmark):
4981 raise util.Abort(_('no files or directories specified; '
4981 raise util.Abort(_('no files or directories specified; '
4982 'use --all to remerge all files'))
4982 'use --all to remerge all files'))
4983
4983
4984 ms = mergemod.mergestate(repo)
4984 ms = mergemod.mergestate(repo)
4985 m = scmutil.match(repo[None], pats, opts)
4985 m = scmutil.match(repo[None], pats, opts)
4986 ret = 0
4986 ret = 0
4987
4987
4988 for f in ms:
4988 for f in ms:
4989 if m(f):
4989 if m(f):
4990 if show:
4990 if show:
4991 if nostatus:
4991 if nostatus:
4992 ui.write("%s\n" % f)
4992 ui.write("%s\n" % f)
4993 else:
4993 else:
4994 ui.write("%s %s\n" % (ms[f].upper(), f),
4994 ui.write("%s %s\n" % (ms[f].upper(), f),
4995 label='resolve.' +
4995 label='resolve.' +
4996 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4996 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4997 elif mark:
4997 elif mark:
4998 ms.mark(f, "r")
4998 ms.mark(f, "r")
4999 elif unmark:
4999 elif unmark:
5000 ms.mark(f, "u")
5000 ms.mark(f, "u")
5001 else:
5001 else:
5002 wctx = repo[None]
5002 wctx = repo[None]
5003
5003
5004 # backup pre-resolve (merge uses .orig for its own purposes)
5004 # backup pre-resolve (merge uses .orig for its own purposes)
5005 a = repo.wjoin(f)
5005 a = repo.wjoin(f)
5006 util.copyfile(a, a + ".resolve")
5006 util.copyfile(a, a + ".resolve")
5007
5007
5008 try:
5008 try:
5009 # resolve file
5009 # resolve file
5010 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5010 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5011 'resolve')
5011 'resolve')
5012 if ms.resolve(f, wctx):
5012 if ms.resolve(f, wctx):
5013 ret = 1
5013 ret = 1
5014 finally:
5014 finally:
5015 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5015 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5016 ms.commit()
5016 ms.commit()
5017
5017
5018 # replace filemerge's .orig file with our resolve file
5018 # replace filemerge's .orig file with our resolve file
5019 util.rename(a + ".resolve", a + ".orig")
5019 util.rename(a + ".resolve", a + ".orig")
5020
5020
5021 ms.commit()
5021 ms.commit()
5022 return ret
5022 return ret
5023
5023
5024 @command('revert',
5024 @command('revert',
5025 [('a', 'all', None, _('revert all changes when no arguments given')),
5025 [('a', 'all', None, _('revert all changes when no arguments given')),
5026 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5026 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5027 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5027 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5028 ('C', 'no-backup', None, _('do not save backup copies of files')),
5028 ('C', 'no-backup', None, _('do not save backup copies of files')),
5029 ] + walkopts + dryrunopts,
5029 ] + walkopts + dryrunopts,
5030 _('[OPTION]... [-r REV] [NAME]...'))
5030 _('[OPTION]... [-r REV] [NAME]...'))
5031 def revert(ui, repo, *pats, **opts):
5031 def revert(ui, repo, *pats, **opts):
5032 """restore files to their checkout state
5032 """restore files to their checkout state
5033
5033
5034 .. note::
5034 .. note::
5035
5035
5036 To check out earlier revisions, you should use :hg:`update REV`.
5036 To check out earlier revisions, you should use :hg:`update REV`.
5037 To cancel an uncommitted merge (and lose your changes),
5037 To cancel an uncommitted merge (and lose your changes),
5038 use :hg:`update --clean .`.
5038 use :hg:`update --clean .`.
5039
5039
5040 With no revision specified, revert the specified files or directories
5040 With no revision specified, revert the specified files or directories
5041 to the contents they had in the parent of the working directory.
5041 to the contents they had in the parent of the working directory.
5042 This restores the contents of files to an unmodified
5042 This restores the contents of files to an unmodified
5043 state and unschedules adds, removes, copies, and renames. If the
5043 state and unschedules adds, removes, copies, and renames. If the
5044 working directory has two parents, you must explicitly specify a
5044 working directory has two parents, you must explicitly specify a
5045 revision.
5045 revision.
5046
5046
5047 Using the -r/--rev or -d/--date options, revert the given files or
5047 Using the -r/--rev or -d/--date options, revert the given files or
5048 directories to their states as of a specific revision. Because
5048 directories to their states as of a specific revision. Because
5049 revert does not change the working directory parents, this will
5049 revert does not change the working directory parents, this will
5050 cause these files to appear modified. This can be helpful to "back
5050 cause these files to appear modified. This can be helpful to "back
5051 out" some or all of an earlier change. See :hg:`backout` for a
5051 out" some or all of an earlier change. See :hg:`backout` for a
5052 related method.
5052 related method.
5053
5053
5054 Modified files are saved with a .orig suffix before reverting.
5054 Modified files are saved with a .orig suffix before reverting.
5055 To disable these backups, use --no-backup.
5055 To disable these backups, use --no-backup.
5056
5056
5057 See :hg:`help dates` for a list of formats valid for -d/--date.
5057 See :hg:`help dates` for a list of formats valid for -d/--date.
5058
5058
5059 Returns 0 on success.
5059 Returns 0 on success.
5060 """
5060 """
5061
5061
5062 if opts.get("date"):
5062 if opts.get("date"):
5063 if opts.get("rev"):
5063 if opts.get("rev"):
5064 raise util.Abort(_("you can't specify a revision and a date"))
5064 raise util.Abort(_("you can't specify a revision and a date"))
5065 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5065 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5066
5066
5067 parent, p2 = repo.dirstate.parents()
5067 parent, p2 = repo.dirstate.parents()
5068 if not opts.get('rev') and p2 != nullid:
5068 if not opts.get('rev') and p2 != nullid:
5069 # revert after merge is a trap for new users (issue2915)
5069 # revert after merge is a trap for new users (issue2915)
5070 raise util.Abort(_('uncommitted merge with no revision specified'),
5070 raise util.Abort(_('uncommitted merge with no revision specified'),
5071 hint=_('use "hg update" or see "hg help revert"'))
5071 hint=_('use "hg update" or see "hg help revert"'))
5072
5072
5073 ctx = scmutil.revsingle(repo, opts.get('rev'))
5073 ctx = scmutil.revsingle(repo, opts.get('rev'))
5074
5074
5075 if not pats and not opts.get('all'):
5075 if not pats and not opts.get('all'):
5076 msg = _("no files or directories specified")
5076 msg = _("no files or directories specified")
5077 if p2 != nullid:
5077 if p2 != nullid:
5078 hint = _("uncommitted merge, use --all to discard all changes,"
5078 hint = _("uncommitted merge, use --all to discard all changes,"
5079 " or 'hg update -C .' to abort the merge")
5079 " or 'hg update -C .' to abort the merge")
5080 raise util.Abort(msg, hint=hint)
5080 raise util.Abort(msg, hint=hint)
5081 dirty = util.any(repo.status())
5081 dirty = util.any(repo.status())
5082 node = ctx.node()
5082 node = ctx.node()
5083 if node != parent:
5083 if node != parent:
5084 if dirty:
5084 if dirty:
5085 hint = _("uncommitted changes, use --all to discard all"
5085 hint = _("uncommitted changes, use --all to discard all"
5086 " changes, or 'hg update %s' to update") % ctx.rev()
5086 " changes, or 'hg update %s' to update") % ctx.rev()
5087 else:
5087 else:
5088 hint = _("use --all to revert all files,"
5088 hint = _("use --all to revert all files,"
5089 " or 'hg update %s' to update") % ctx.rev()
5089 " or 'hg update %s' to update") % ctx.rev()
5090 elif dirty:
5090 elif dirty:
5091 hint = _("uncommitted changes, use --all to discard all changes")
5091 hint = _("uncommitted changes, use --all to discard all changes")
5092 else:
5092 else:
5093 hint = _("use --all to revert all files")
5093 hint = _("use --all to revert all files")
5094 raise util.Abort(msg, hint=hint)
5094 raise util.Abort(msg, hint=hint)
5095
5095
5096 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5096 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5097
5097
5098 @command('rollback', dryrunopts +
5098 @command('rollback', dryrunopts +
5099 [('f', 'force', False, _('ignore safety measures'))])
5099 [('f', 'force', False, _('ignore safety measures'))])
5100 def rollback(ui, repo, **opts):
5100 def rollback(ui, repo, **opts):
5101 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5101 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5102
5102
5103 Please use :hg:`commit --amend` instead of rollback to correct
5103 Please use :hg:`commit --amend` instead of rollback to correct
5104 mistakes in the last commit.
5104 mistakes in the last commit.
5105
5105
5106 This command should be used with care. There is only one level of
5106 This command should be used with care. There is only one level of
5107 rollback, and there is no way to undo a rollback. It will also
5107 rollback, and there is no way to undo a rollback. It will also
5108 restore the dirstate at the time of the last transaction, losing
5108 restore the dirstate at the time of the last transaction, losing
5109 any dirstate changes since that time. This command does not alter
5109 any dirstate changes since that time. This command does not alter
5110 the working directory.
5110 the working directory.
5111
5111
5112 Transactions are used to encapsulate the effects of all commands
5112 Transactions are used to encapsulate the effects of all commands
5113 that create new changesets or propagate existing changesets into a
5113 that create new changesets or propagate existing changesets into a
5114 repository.
5114 repository.
5115
5115
5116 .. container:: verbose
5116 .. container:: verbose
5117
5117
5118 For example, the following commands are transactional, and their
5118 For example, the following commands are transactional, and their
5119 effects can be rolled back:
5119 effects can be rolled back:
5120
5120
5121 - commit
5121 - commit
5122 - import
5122 - import
5123 - pull
5123 - pull
5124 - push (with this repository as the destination)
5124 - push (with this repository as the destination)
5125 - unbundle
5125 - unbundle
5126
5126
5127 To avoid permanent data loss, rollback will refuse to rollback a
5127 To avoid permanent data loss, rollback will refuse to rollback a
5128 commit transaction if it isn't checked out. Use --force to
5128 commit transaction if it isn't checked out. Use --force to
5129 override this protection.
5129 override this protection.
5130
5130
5131 This command is not intended for use on public repositories. Once
5131 This command is not intended for use on public repositories. Once
5132 changes are visible for pull by other users, rolling a transaction
5132 changes are visible for pull by other users, rolling a transaction
5133 back locally is ineffective (someone else may already have pulled
5133 back locally is ineffective (someone else may already have pulled
5134 the changes). Furthermore, a race is possible with readers of the
5134 the changes). Furthermore, a race is possible with readers of the
5135 repository; for example an in-progress pull from the repository
5135 repository; for example an in-progress pull from the repository
5136 may fail if a rollback is performed.
5136 may fail if a rollback is performed.
5137
5137
5138 Returns 0 on success, 1 if no rollback data is available.
5138 Returns 0 on success, 1 if no rollback data is available.
5139 """
5139 """
5140 return repo.rollback(dryrun=opts.get('dry_run'),
5140 return repo.rollback(dryrun=opts.get('dry_run'),
5141 force=opts.get('force'))
5141 force=opts.get('force'))
5142
5142
5143 @command('root', [])
5143 @command('root', [])
5144 def root(ui, repo):
5144 def root(ui, repo):
5145 """print the root (top) of the current working directory
5145 """print the root (top) of the current working directory
5146
5146
5147 Print the root directory of the current repository.
5147 Print the root directory of the current repository.
5148
5148
5149 Returns 0 on success.
5149 Returns 0 on success.
5150 """
5150 """
5151 ui.write(repo.root + "\n")
5151 ui.write(repo.root + "\n")
5152
5152
5153 @command('^serve',
5153 @command('^serve',
5154 [('A', 'accesslog', '', _('name of access log file to write to'),
5154 [('A', 'accesslog', '', _('name of access log file to write to'),
5155 _('FILE')),
5155 _('FILE')),
5156 ('d', 'daemon', None, _('run server in background')),
5156 ('d', 'daemon', None, _('run server in background')),
5157 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5157 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5158 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5158 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5159 # use string type, then we can check if something was passed
5159 # use string type, then we can check if something was passed
5160 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5160 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5161 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5161 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5162 _('ADDR')),
5162 _('ADDR')),
5163 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5163 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5164 _('PREFIX')),
5164 _('PREFIX')),
5165 ('n', 'name', '',
5165 ('n', 'name', '',
5166 _('name to show in web pages (default: working directory)'), _('NAME')),
5166 _('name to show in web pages (default: working directory)'), _('NAME')),
5167 ('', 'web-conf', '',
5167 ('', 'web-conf', '',
5168 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5168 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5169 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5169 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5170 _('FILE')),
5170 _('FILE')),
5171 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5171 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5172 ('', 'stdio', None, _('for remote clients')),
5172 ('', 'stdio', None, _('for remote clients')),
5173 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5173 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5174 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5174 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5175 ('', 'style', '', _('template style to use'), _('STYLE')),
5175 ('', 'style', '', _('template style to use'), _('STYLE')),
5176 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5176 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5177 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5177 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5178 _('[OPTION]...'))
5178 _('[OPTION]...'))
5179 def serve(ui, repo, **opts):
5179 def serve(ui, repo, **opts):
5180 """start stand-alone webserver
5180 """start stand-alone webserver
5181
5181
5182 Start a local HTTP repository browser and pull server. You can use
5182 Start a local HTTP repository browser and pull server. You can use
5183 this for ad-hoc sharing and browsing of repositories. It is
5183 this for ad-hoc sharing and browsing of repositories. It is
5184 recommended to use a real web server to serve a repository for
5184 recommended to use a real web server to serve a repository for
5185 longer periods of time.
5185 longer periods of time.
5186
5186
5187 Please note that the server does not implement access control.
5187 Please note that the server does not implement access control.
5188 This means that, by default, anybody can read from the server and
5188 This means that, by default, anybody can read from the server and
5189 nobody can write to it by default. Set the ``web.allow_push``
5189 nobody can write to it by default. Set the ``web.allow_push``
5190 option to ``*`` to allow everybody to push to the server. You
5190 option to ``*`` to allow everybody to push to the server. You
5191 should use a real web server if you need to authenticate users.
5191 should use a real web server if you need to authenticate users.
5192
5192
5193 By default, the server logs accesses to stdout and errors to
5193 By default, the server logs accesses to stdout and errors to
5194 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5194 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5195 files.
5195 files.
5196
5196
5197 To have the server choose a free port number to listen on, specify
5197 To have the server choose a free port number to listen on, specify
5198 a port number of 0; in this case, the server will print the port
5198 a port number of 0; in this case, the server will print the port
5199 number it uses.
5199 number it uses.
5200
5200
5201 Returns 0 on success.
5201 Returns 0 on success.
5202 """
5202 """
5203
5203
5204 if opts["stdio"] and opts["cmdserver"]:
5204 if opts["stdio"] and opts["cmdserver"]:
5205 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5205 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5206
5206
5207 def checkrepo():
5207 def checkrepo():
5208 if repo is None:
5208 if repo is None:
5209 raise error.RepoError(_("there is no Mercurial repository here"
5209 raise error.RepoError(_("there is no Mercurial repository here"
5210 " (.hg not found)"))
5210 " (.hg not found)"))
5211
5211
5212 if opts["stdio"]:
5212 if opts["stdio"]:
5213 checkrepo()
5213 checkrepo()
5214 s = sshserver.sshserver(ui, repo)
5214 s = sshserver.sshserver(ui, repo)
5215 s.serve_forever()
5215 s.serve_forever()
5216
5216
5217 if opts["cmdserver"]:
5217 if opts["cmdserver"]:
5218 s = commandserver.server(ui, repo, opts["cmdserver"])
5218 s = commandserver.server(ui, repo, opts["cmdserver"])
5219 return s.serve()
5219 return s.serve()
5220
5220
5221 # this way we can check if something was given in the command-line
5221 # this way we can check if something was given in the command-line
5222 if opts.get('port'):
5222 if opts.get('port'):
5223 opts['port'] = util.getport(opts.get('port'))
5223 opts['port'] = util.getport(opts.get('port'))
5224
5224
5225 baseui = repo and repo.baseui or ui
5225 baseui = repo and repo.baseui or ui
5226 optlist = ("name templates style address port prefix ipv6"
5226 optlist = ("name templates style address port prefix ipv6"
5227 " accesslog errorlog certificate encoding")
5227 " accesslog errorlog certificate encoding")
5228 for o in optlist.split():
5228 for o in optlist.split():
5229 val = opts.get(o, '')
5229 val = opts.get(o, '')
5230 if val in (None, ''): # should check against default options instead
5230 if val in (None, ''): # should check against default options instead
5231 continue
5231 continue
5232 baseui.setconfig("web", o, val, 'serve')
5232 baseui.setconfig("web", o, val, 'serve')
5233 if repo and repo.ui != baseui:
5233 if repo and repo.ui != baseui:
5234 repo.ui.setconfig("web", o, val, 'serve')
5234 repo.ui.setconfig("web", o, val, 'serve')
5235
5235
5236 o = opts.get('web_conf') or opts.get('webdir_conf')
5236 o = opts.get('web_conf') or opts.get('webdir_conf')
5237 if not o:
5237 if not o:
5238 if not repo:
5238 if not repo:
5239 raise error.RepoError(_("there is no Mercurial repository"
5239 raise error.RepoError(_("there is no Mercurial repository"
5240 " here (.hg not found)"))
5240 " here (.hg not found)"))
5241 o = repo
5241 o = repo
5242
5242
5243 app = hgweb.hgweb(o, baseui=baseui)
5243 app = hgweb.hgweb(o, baseui=baseui)
5244 service = httpservice(ui, app, opts)
5244 service = httpservice(ui, app, opts)
5245 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5245 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5246
5246
5247 class httpservice(object):
5247 class httpservice(object):
5248 def __init__(self, ui, app, opts):
5248 def __init__(self, ui, app, opts):
5249 self.ui = ui
5249 self.ui = ui
5250 self.app = app
5250 self.app = app
5251 self.opts = opts
5251 self.opts = opts
5252
5252
5253 def init(self):
5253 def init(self):
5254 util.setsignalhandler()
5254 util.setsignalhandler()
5255 self.httpd = hgweb_server.create_server(self.ui, self.app)
5255 self.httpd = hgweb_server.create_server(self.ui, self.app)
5256
5256
5257 if self.opts['port'] and not self.ui.verbose:
5257 if self.opts['port'] and not self.ui.verbose:
5258 return
5258 return
5259
5259
5260 if self.httpd.prefix:
5260 if self.httpd.prefix:
5261 prefix = self.httpd.prefix.strip('/') + '/'
5261 prefix = self.httpd.prefix.strip('/') + '/'
5262 else:
5262 else:
5263 prefix = ''
5263 prefix = ''
5264
5264
5265 port = ':%d' % self.httpd.port
5265 port = ':%d' % self.httpd.port
5266 if port == ':80':
5266 if port == ':80':
5267 port = ''
5267 port = ''
5268
5268
5269 bindaddr = self.httpd.addr
5269 bindaddr = self.httpd.addr
5270 if bindaddr == '0.0.0.0':
5270 if bindaddr == '0.0.0.0':
5271 bindaddr = '*'
5271 bindaddr = '*'
5272 elif ':' in bindaddr: # IPv6
5272 elif ':' in bindaddr: # IPv6
5273 bindaddr = '[%s]' % bindaddr
5273 bindaddr = '[%s]' % bindaddr
5274
5274
5275 fqaddr = self.httpd.fqaddr
5275 fqaddr = self.httpd.fqaddr
5276 if ':' in fqaddr:
5276 if ':' in fqaddr:
5277 fqaddr = '[%s]' % fqaddr
5277 fqaddr = '[%s]' % fqaddr
5278 if self.opts['port']:
5278 if self.opts['port']:
5279 write = self.ui.status
5279 write = self.ui.status
5280 else:
5280 else:
5281 write = self.ui.write
5281 write = self.ui.write
5282 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5282 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5283 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5283 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5284
5284
5285 def run(self):
5285 def run(self):
5286 self.httpd.serve_forever()
5286 self.httpd.serve_forever()
5287
5287
5288
5288
5289 @command('^status|st',
5289 @command('^status|st',
5290 [('A', 'all', None, _('show status of all files')),
5290 [('A', 'all', None, _('show status of all files')),
5291 ('m', 'modified', None, _('show only modified files')),
5291 ('m', 'modified', None, _('show only modified files')),
5292 ('a', 'added', None, _('show only added files')),
5292 ('a', 'added', None, _('show only added files')),
5293 ('r', 'removed', None, _('show only removed files')),
5293 ('r', 'removed', None, _('show only removed files')),
5294 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5294 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5295 ('c', 'clean', None, _('show only files without changes')),
5295 ('c', 'clean', None, _('show only files without changes')),
5296 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5296 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5297 ('i', 'ignored', None, _('show only ignored files')),
5297 ('i', 'ignored', None, _('show only ignored files')),
5298 ('n', 'no-status', None, _('hide status prefix')),
5298 ('n', 'no-status', None, _('hide status prefix')),
5299 ('C', 'copies', None, _('show source of copied files')),
5299 ('C', 'copies', None, _('show source of copied files')),
5300 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5300 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5301 ('', 'rev', [], _('show difference from revision'), _('REV')),
5301 ('', 'rev', [], _('show difference from revision'), _('REV')),
5302 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5302 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5303 ] + walkopts + subrepoopts,
5303 ] + walkopts + subrepoopts,
5304 _('[OPTION]... [FILE]...'))
5304 _('[OPTION]... [FILE]...'))
5305 def status(ui, repo, *pats, **opts):
5305 def status(ui, repo, *pats, **opts):
5306 """show changed files in the working directory
5306 """show changed files in the working directory
5307
5307
5308 Show status of files in the repository. If names are given, only
5308 Show status of files in the repository. If names are given, only
5309 files that match are shown. Files that are clean or ignored or
5309 files that match are shown. Files that are clean or ignored or
5310 the source of a copy/move operation, are not listed unless
5310 the source of a copy/move operation, are not listed unless
5311 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5311 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5312 Unless options described with "show only ..." are given, the
5312 Unless options described with "show only ..." are given, the
5313 options -mardu are used.
5313 options -mardu are used.
5314
5314
5315 Option -q/--quiet hides untracked (unknown and ignored) files
5315 Option -q/--quiet hides untracked (unknown and ignored) files
5316 unless explicitly requested with -u/--unknown or -i/--ignored.
5316 unless explicitly requested with -u/--unknown or -i/--ignored.
5317
5317
5318 .. note::
5318 .. note::
5319
5319
5320 status may appear to disagree with diff if permissions have
5320 status may appear to disagree with diff if permissions have
5321 changed or a merge has occurred. The standard diff format does
5321 changed or a merge has occurred. The standard diff format does
5322 not report permission changes and diff only reports changes
5322 not report permission changes and diff only reports changes
5323 relative to one merge parent.
5323 relative to one merge parent.
5324
5324
5325 If one revision is given, it is used as the base revision.
5325 If one revision is given, it is used as the base revision.
5326 If two revisions are given, the differences between them are
5326 If two revisions are given, the differences between them are
5327 shown. The --change option can also be used as a shortcut to list
5327 shown. The --change option can also be used as a shortcut to list
5328 the changed files of a revision from its first parent.
5328 the changed files of a revision from its first parent.
5329
5329
5330 The codes used to show the status of files are::
5330 The codes used to show the status of files are::
5331
5331
5332 M = modified
5332 M = modified
5333 A = added
5333 A = added
5334 R = removed
5334 R = removed
5335 C = clean
5335 C = clean
5336 ! = missing (deleted by non-hg command, but still tracked)
5336 ! = missing (deleted by non-hg command, but still tracked)
5337 ? = not tracked
5337 ? = not tracked
5338 I = ignored
5338 I = ignored
5339 = origin of the previous file (with --copies)
5339 = origin of the previous file (with --copies)
5340
5340
5341 .. container:: verbose
5341 .. container:: verbose
5342
5342
5343 Examples:
5343 Examples:
5344
5344
5345 - show changes in the working directory relative to a
5345 - show changes in the working directory relative to a
5346 changeset::
5346 changeset::
5347
5347
5348 hg status --rev 9353
5348 hg status --rev 9353
5349
5349
5350 - show all changes including copies in an existing changeset::
5350 - show all changes including copies in an existing changeset::
5351
5351
5352 hg status --copies --change 9353
5352 hg status --copies --change 9353
5353
5353
5354 - get a NUL separated list of added files, suitable for xargs::
5354 - get a NUL separated list of added files, suitable for xargs::
5355
5355
5356 hg status -an0
5356 hg status -an0
5357
5357
5358 Returns 0 on success.
5358 Returns 0 on success.
5359 """
5359 """
5360
5360
5361 revs = opts.get('rev')
5361 revs = opts.get('rev')
5362 change = opts.get('change')
5362 change = opts.get('change')
5363
5363
5364 if revs and change:
5364 if revs and change:
5365 msg = _('cannot specify --rev and --change at the same time')
5365 msg = _('cannot specify --rev and --change at the same time')
5366 raise util.Abort(msg)
5366 raise util.Abort(msg)
5367 elif change:
5367 elif change:
5368 node2 = scmutil.revsingle(repo, change, None).node()
5368 node2 = scmutil.revsingle(repo, change, None).node()
5369 node1 = repo[node2].p1().node()
5369 node1 = repo[node2].p1().node()
5370 else:
5370 else:
5371 node1, node2 = scmutil.revpair(repo, revs)
5371 node1, node2 = scmutil.revpair(repo, revs)
5372
5372
5373 cwd = (pats and repo.getcwd()) or ''
5373 cwd = (pats and repo.getcwd()) or ''
5374 end = opts.get('print0') and '\0' or '\n'
5374 end = opts.get('print0') and '\0' or '\n'
5375 copy = {}
5375 copy = {}
5376 states = 'modified added removed deleted unknown ignored clean'.split()
5376 states = 'modified added removed deleted unknown ignored clean'.split()
5377 show = [k for k in states if opts.get(k)]
5377 show = [k for k in states if opts.get(k)]
5378 if opts.get('all'):
5378 if opts.get('all'):
5379 show += ui.quiet and (states[:4] + ['clean']) or states
5379 show += ui.quiet and (states[:4] + ['clean']) or states
5380 if not show:
5380 if not show:
5381 show = ui.quiet and states[:4] or states[:5]
5381 show = ui.quiet and states[:4] or states[:5]
5382
5382
5383 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5383 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5384 'ignored' in show, 'clean' in show, 'unknown' in show,
5384 'ignored' in show, 'clean' in show, 'unknown' in show,
5385 opts.get('subrepos'))
5385 opts.get('subrepos'))
5386 changestates = zip(states, 'MAR!?IC', stat)
5386 changestates = zip(states, 'MAR!?IC', stat)
5387
5387
5388 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5388 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5389 copy = copies.pathcopies(repo[node1], repo[node2])
5389 copy = copies.pathcopies(repo[node1], repo[node2])
5390
5390
5391 fm = ui.formatter('status', opts)
5391 fm = ui.formatter('status', opts)
5392 fmt = '%s' + end
5392 fmt = '%s' + end
5393 showchar = not opts.get('no_status')
5393 showchar = not opts.get('no_status')
5394
5394
5395 for state, char, files in changestates:
5395 for state, char, files in changestates:
5396 if state in show:
5396 if state in show:
5397 label = 'status.' + state
5397 label = 'status.' + state
5398 for f in files:
5398 for f in files:
5399 fm.startitem()
5399 fm.startitem()
5400 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5400 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5401 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5401 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5402 if f in copy:
5402 if f in copy:
5403 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5403 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5404 label='status.copied')
5404 label='status.copied')
5405 fm.end()
5405 fm.end()
5406
5406
5407 @command('^summary|sum',
5407 @command('^summary|sum',
5408 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5408 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5409 def summary(ui, repo, **opts):
5409 def summary(ui, repo, **opts):
5410 """summarize working directory state
5410 """summarize working directory state
5411
5411
5412 This generates a brief summary of the working directory state,
5412 This generates a brief summary of the working directory state,
5413 including parents, branch, commit status, and available updates.
5413 including parents, branch, commit status, and available updates.
5414
5414
5415 With the --remote option, this will check the default paths for
5415 With the --remote option, this will check the default paths for
5416 incoming and outgoing changes. This can be time-consuming.
5416 incoming and outgoing changes. This can be time-consuming.
5417
5417
5418 Returns 0 on success.
5418 Returns 0 on success.
5419 """
5419 """
5420
5420
5421 ctx = repo[None]
5421 ctx = repo[None]
5422 parents = ctx.parents()
5422 parents = ctx.parents()
5423 pnode = parents[0].node()
5423 pnode = parents[0].node()
5424 marks = []
5424 marks = []
5425
5425
5426 for p in parents:
5426 for p in parents:
5427 # label with log.changeset (instead of log.parent) since this
5427 # label with log.changeset (instead of log.parent) since this
5428 # shows a working directory parent *changeset*:
5428 # shows a working directory parent *changeset*:
5429 # i18n: column positioning for "hg summary"
5429 # i18n: column positioning for "hg summary"
5430 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5430 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5431 label='log.changeset changeset.%s' % p.phasestr())
5431 label='log.changeset changeset.%s' % p.phasestr())
5432 ui.write(' '.join(p.tags()), label='log.tag')
5432 ui.write(' '.join(p.tags()), label='log.tag')
5433 if p.bookmarks():
5433 if p.bookmarks():
5434 marks.extend(p.bookmarks())
5434 marks.extend(p.bookmarks())
5435 if p.rev() == -1:
5435 if p.rev() == -1:
5436 if not len(repo):
5436 if not len(repo):
5437 ui.write(_(' (empty repository)'))
5437 ui.write(_(' (empty repository)'))
5438 else:
5438 else:
5439 ui.write(_(' (no revision checked out)'))
5439 ui.write(_(' (no revision checked out)'))
5440 ui.write('\n')
5440 ui.write('\n')
5441 if p.description():
5441 if p.description():
5442 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5442 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5443 label='log.summary')
5443 label='log.summary')
5444
5444
5445 branch = ctx.branch()
5445 branch = ctx.branch()
5446 bheads = repo.branchheads(branch)
5446 bheads = repo.branchheads(branch)
5447 # i18n: column positioning for "hg summary"
5447 # i18n: column positioning for "hg summary"
5448 m = _('branch: %s\n') % branch
5448 m = _('branch: %s\n') % branch
5449 if branch != 'default':
5449 if branch != 'default':
5450 ui.write(m, label='log.branch')
5450 ui.write(m, label='log.branch')
5451 else:
5451 else:
5452 ui.status(m, label='log.branch')
5452 ui.status(m, label='log.branch')
5453
5453
5454 if marks:
5454 if marks:
5455 current = repo._bookmarkcurrent
5455 current = repo._bookmarkcurrent
5456 # i18n: column positioning for "hg summary"
5456 # i18n: column positioning for "hg summary"
5457 ui.write(_('bookmarks:'), label='log.bookmark')
5457 ui.write(_('bookmarks:'), label='log.bookmark')
5458 if current is not None:
5458 if current is not None:
5459 if current in marks:
5459 if current in marks:
5460 ui.write(' *' + current, label='bookmarks.current')
5460 ui.write(' *' + current, label='bookmarks.current')
5461 marks.remove(current)
5461 marks.remove(current)
5462 else:
5462 else:
5463 ui.write(' [%s]' % current, label='bookmarks.current')
5463 ui.write(' [%s]' % current, label='bookmarks.current')
5464 for m in marks:
5464 for m in marks:
5465 ui.write(' ' + m, label='log.bookmark')
5465 ui.write(' ' + m, label='log.bookmark')
5466 ui.write('\n', label='log.bookmark')
5466 ui.write('\n', label='log.bookmark')
5467
5467
5468 st = list(repo.status(unknown=True))[:6]
5468 st = list(repo.status(unknown=True))[:6]
5469
5469
5470 c = repo.dirstate.copies()
5470 c = repo.dirstate.copies()
5471 copied, renamed = [], []
5471 copied, renamed = [], []
5472 for d, s in c.iteritems():
5472 for d, s in c.iteritems():
5473 if s in st[2]:
5473 if s in st[2]:
5474 st[2].remove(s)
5474 st[2].remove(s)
5475 renamed.append(d)
5475 renamed.append(d)
5476 else:
5476 else:
5477 copied.append(d)
5477 copied.append(d)
5478 if d in st[1]:
5478 if d in st[1]:
5479 st[1].remove(d)
5479 st[1].remove(d)
5480 st.insert(3, renamed)
5480 st.insert(3, renamed)
5481 st.insert(4, copied)
5481 st.insert(4, copied)
5482
5482
5483 ms = mergemod.mergestate(repo)
5483 ms = mergemod.mergestate(repo)
5484 st.append([f for f in ms if ms[f] == 'u'])
5484 st.append([f for f in ms if ms[f] == 'u'])
5485
5485
5486 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5486 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5487 st.append(subs)
5487 st.append(subs)
5488
5488
5489 labels = [ui.label(_('%d modified'), 'status.modified'),
5489 labels = [ui.label(_('%d modified'), 'status.modified'),
5490 ui.label(_('%d added'), 'status.added'),
5490 ui.label(_('%d added'), 'status.added'),
5491 ui.label(_('%d removed'), 'status.removed'),
5491 ui.label(_('%d removed'), 'status.removed'),
5492 ui.label(_('%d renamed'), 'status.copied'),
5492 ui.label(_('%d renamed'), 'status.copied'),
5493 ui.label(_('%d copied'), 'status.copied'),
5493 ui.label(_('%d copied'), 'status.copied'),
5494 ui.label(_('%d deleted'), 'status.deleted'),
5494 ui.label(_('%d deleted'), 'status.deleted'),
5495 ui.label(_('%d unknown'), 'status.unknown'),
5495 ui.label(_('%d unknown'), 'status.unknown'),
5496 ui.label(_('%d ignored'), 'status.ignored'),
5496 ui.label(_('%d ignored'), 'status.ignored'),
5497 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5497 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5498 ui.label(_('%d subrepos'), 'status.modified')]
5498 ui.label(_('%d subrepos'), 'status.modified')]
5499 t = []
5499 t = []
5500 for s, l in zip(st, labels):
5500 for s, l in zip(st, labels):
5501 if s:
5501 if s:
5502 t.append(l % len(s))
5502 t.append(l % len(s))
5503
5503
5504 t = ', '.join(t)
5504 t = ', '.join(t)
5505 cleanworkdir = False
5505 cleanworkdir = False
5506
5506
5507 if repo.vfs.exists('updatestate'):
5507 if repo.vfs.exists('updatestate'):
5508 t += _(' (interrupted update)')
5508 t += _(' (interrupted update)')
5509 elif len(parents) > 1:
5509 elif len(parents) > 1:
5510 t += _(' (merge)')
5510 t += _(' (merge)')
5511 elif branch != parents[0].branch():
5511 elif branch != parents[0].branch():
5512 t += _(' (new branch)')
5512 t += _(' (new branch)')
5513 elif (parents[0].closesbranch() and
5513 elif (parents[0].closesbranch() and
5514 pnode in repo.branchheads(branch, closed=True)):
5514 pnode in repo.branchheads(branch, closed=True)):
5515 t += _(' (head closed)')
5515 t += _(' (head closed)')
5516 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5516 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5517 t += _(' (clean)')
5517 t += _(' (clean)')
5518 cleanworkdir = True
5518 cleanworkdir = True
5519 elif pnode not in bheads:
5519 elif pnode not in bheads:
5520 t += _(' (new branch head)')
5520 t += _(' (new branch head)')
5521
5521
5522 if cleanworkdir:
5522 if cleanworkdir:
5523 # i18n: column positioning for "hg summary"
5523 # i18n: column positioning for "hg summary"
5524 ui.status(_('commit: %s\n') % t.strip())
5524 ui.status(_('commit: %s\n') % t.strip())
5525 else:
5525 else:
5526 # i18n: column positioning for "hg summary"
5526 # i18n: column positioning for "hg summary"
5527 ui.write(_('commit: %s\n') % t.strip())
5527 ui.write(_('commit: %s\n') % t.strip())
5528
5528
5529 # all ancestors of branch heads - all ancestors of parent = new csets
5529 # all ancestors of branch heads - all ancestors of parent = new csets
5530 new = len(repo.changelog.findmissing([ctx.node() for ctx in parents],
5530 new = len(repo.changelog.findmissing([ctx.node() for ctx in parents],
5531 bheads))
5531 bheads))
5532
5532
5533 if new == 0:
5533 if new == 0:
5534 # i18n: column positioning for "hg summary"
5534 # i18n: column positioning for "hg summary"
5535 ui.status(_('update: (current)\n'))
5535 ui.status(_('update: (current)\n'))
5536 elif pnode not in bheads:
5536 elif pnode not in bheads:
5537 # i18n: column positioning for "hg summary"
5537 # i18n: column positioning for "hg summary"
5538 ui.write(_('update: %d new changesets (update)\n') % new)
5538 ui.write(_('update: %d new changesets (update)\n') % new)
5539 else:
5539 else:
5540 # i18n: column positioning for "hg summary"
5540 # i18n: column positioning for "hg summary"
5541 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5541 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5542 (new, len(bheads)))
5542 (new, len(bheads)))
5543
5543
5544 cmdutil.summaryhooks(ui, repo)
5544 cmdutil.summaryhooks(ui, repo)
5545
5545
5546 if opts.get('remote'):
5546 if opts.get('remote'):
5547 t = []
5547 t = []
5548 source, branches = hg.parseurl(ui.expandpath('default'))
5548 source, branches = hg.parseurl(ui.expandpath('default'))
5549 sbranch = branches[0]
5549 sbranch = branches[0]
5550 other = hg.peer(repo, {}, source)
5550 other = hg.peer(repo, {}, source)
5551 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5551 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5552 if revs:
5552 if revs:
5553 revs = [other.lookup(rev) for rev in revs]
5553 revs = [other.lookup(rev) for rev in revs]
5554 ui.debug('comparing with %s\n' % util.hidepassword(source))
5554 ui.debug('comparing with %s\n' % util.hidepassword(source))
5555 repo.ui.pushbuffer()
5555 repo.ui.pushbuffer()
5556 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5556 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5557 _common, incoming, _rheads = commoninc
5557 _common, incoming, _rheads = commoninc
5558 repo.ui.popbuffer()
5558 repo.ui.popbuffer()
5559 if incoming:
5559 if incoming:
5560 t.append(_('1 or more incoming'))
5560 t.append(_('1 or more incoming'))
5561
5561
5562 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5562 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5563 dbranch = branches[0]
5563 dbranch = branches[0]
5564 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5564 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5565 if source != dest:
5565 if source != dest:
5566 other = hg.peer(repo, {}, dest)
5566 other = hg.peer(repo, {}, dest)
5567 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5567 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5568 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5568 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5569 commoninc = None
5569 commoninc = None
5570 if revs:
5570 if revs:
5571 revs = [repo.lookup(rev) for rev in revs]
5571 revs = [repo.lookup(rev) for rev in revs]
5572 repo.ui.pushbuffer()
5572 repo.ui.pushbuffer()
5573 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs,
5573 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs,
5574 commoninc=commoninc)
5574 commoninc=commoninc)
5575 repo.ui.popbuffer()
5575 repo.ui.popbuffer()
5576 o = outgoing.missing
5576 o = outgoing.missing
5577 if o:
5577 if o:
5578 t.append(_('%d outgoing') % len(o))
5578 t.append(_('%d outgoing') % len(o))
5579 if 'bookmarks' in other.listkeys('namespaces'):
5579 if 'bookmarks' in other.listkeys('namespaces'):
5580 lmarks = repo.listkeys('bookmarks')
5580 lmarks = repo.listkeys('bookmarks')
5581 rmarks = other.listkeys('bookmarks')
5581 rmarks = other.listkeys('bookmarks')
5582 diff = set(rmarks) - set(lmarks)
5582 diff = set(rmarks) - set(lmarks)
5583 if len(diff) > 0:
5583 if len(diff) > 0:
5584 t.append(_('%d incoming bookmarks') % len(diff))
5584 t.append(_('%d incoming bookmarks') % len(diff))
5585 diff = set(lmarks) - set(rmarks)
5585 diff = set(lmarks) - set(rmarks)
5586 if len(diff) > 0:
5586 if len(diff) > 0:
5587 t.append(_('%d outgoing bookmarks') % len(diff))
5587 t.append(_('%d outgoing bookmarks') % len(diff))
5588
5588
5589 if t:
5589 if t:
5590 # i18n: column positioning for "hg summary"
5590 # i18n: column positioning for "hg summary"
5591 ui.write(_('remote: %s\n') % (', '.join(t)))
5591 ui.write(_('remote: %s\n') % (', '.join(t)))
5592 else:
5592 else:
5593 # i18n: column positioning for "hg summary"
5593 # i18n: column positioning for "hg summary"
5594 ui.status(_('remote: (synced)\n'))
5594 ui.status(_('remote: (synced)\n'))
5595
5595
5596 @command('tag',
5596 @command('tag',
5597 [('f', 'force', None, _('force tag')),
5597 [('f', 'force', None, _('force tag')),
5598 ('l', 'local', None, _('make the tag local')),
5598 ('l', 'local', None, _('make the tag local')),
5599 ('r', 'rev', '', _('revision to tag'), _('REV')),
5599 ('r', 'rev', '', _('revision to tag'), _('REV')),
5600 ('', 'remove', None, _('remove a tag')),
5600 ('', 'remove', None, _('remove a tag')),
5601 # -l/--local is already there, commitopts cannot be used
5601 # -l/--local is already there, commitopts cannot be used
5602 ('e', 'edit', None, _('edit commit message')),
5602 ('e', 'edit', None, _('edit commit message')),
5603 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5603 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5604 ] + commitopts2,
5604 ] + commitopts2,
5605 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5605 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5606 def tag(ui, repo, name1, *names, **opts):
5606 def tag(ui, repo, name1, *names, **opts):
5607 """add one or more tags for the current or given revision
5607 """add one or more tags for the current or given revision
5608
5608
5609 Name a particular revision using <name>.
5609 Name a particular revision using <name>.
5610
5610
5611 Tags are used to name particular revisions of the repository and are
5611 Tags are used to name particular revisions of the repository and are
5612 very useful to compare different revisions, to go back to significant
5612 very useful to compare different revisions, to go back to significant
5613 earlier versions or to mark branch points as releases, etc. Changing
5613 earlier versions or to mark branch points as releases, etc. Changing
5614 an existing tag is normally disallowed; use -f/--force to override.
5614 an existing tag is normally disallowed; use -f/--force to override.
5615
5615
5616 If no revision is given, the parent of the working directory is
5616 If no revision is given, the parent of the working directory is
5617 used.
5617 used.
5618
5618
5619 To facilitate version control, distribution, and merging of tags,
5619 To facilitate version control, distribution, and merging of tags,
5620 they are stored as a file named ".hgtags" which is managed similarly
5620 they are stored as a file named ".hgtags" which is managed similarly
5621 to other project files and can be hand-edited if necessary. This
5621 to other project files and can be hand-edited if necessary. This
5622 also means that tagging creates a new commit. The file
5622 also means that tagging creates a new commit. The file
5623 ".hg/localtags" is used for local tags (not shared among
5623 ".hg/localtags" is used for local tags (not shared among
5624 repositories).
5624 repositories).
5625
5625
5626 Tag commits are usually made at the head of a branch. If the parent
5626 Tag commits are usually made at the head of a branch. If the parent
5627 of the working directory is not a branch head, :hg:`tag` aborts; use
5627 of the working directory is not a branch head, :hg:`tag` aborts; use
5628 -f/--force to force the tag commit to be based on a non-head
5628 -f/--force to force the tag commit to be based on a non-head
5629 changeset.
5629 changeset.
5630
5630
5631 See :hg:`help dates` for a list of formats valid for -d/--date.
5631 See :hg:`help dates` for a list of formats valid for -d/--date.
5632
5632
5633 Since tag names have priority over branch names during revision
5633 Since tag names have priority over branch names during revision
5634 lookup, using an existing branch name as a tag name is discouraged.
5634 lookup, using an existing branch name as a tag name is discouraged.
5635
5635
5636 Returns 0 on success.
5636 Returns 0 on success.
5637 """
5637 """
5638 wlock = lock = None
5638 wlock = lock = None
5639 try:
5639 try:
5640 wlock = repo.wlock()
5640 wlock = repo.wlock()
5641 lock = repo.lock()
5641 lock = repo.lock()
5642 rev_ = "."
5642 rev_ = "."
5643 names = [t.strip() for t in (name1,) + names]
5643 names = [t.strip() for t in (name1,) + names]
5644 if len(names) != len(set(names)):
5644 if len(names) != len(set(names)):
5645 raise util.Abort(_('tag names must be unique'))
5645 raise util.Abort(_('tag names must be unique'))
5646 for n in names:
5646 for n in names:
5647 scmutil.checknewlabel(repo, n, 'tag')
5647 scmutil.checknewlabel(repo, n, 'tag')
5648 if not n:
5648 if not n:
5649 raise util.Abort(_('tag names cannot consist entirely of '
5649 raise util.Abort(_('tag names cannot consist entirely of '
5650 'whitespace'))
5650 'whitespace'))
5651 if opts.get('rev') and opts.get('remove'):
5651 if opts.get('rev') and opts.get('remove'):
5652 raise util.Abort(_("--rev and --remove are incompatible"))
5652 raise util.Abort(_("--rev and --remove are incompatible"))
5653 if opts.get('rev'):
5653 if opts.get('rev'):
5654 rev_ = opts['rev']
5654 rev_ = opts['rev']
5655 message = opts.get('message')
5655 message = opts.get('message')
5656 if opts.get('remove'):
5656 if opts.get('remove'):
5657 expectedtype = opts.get('local') and 'local' or 'global'
5657 expectedtype = opts.get('local') and 'local' or 'global'
5658 for n in names:
5658 for n in names:
5659 if not repo.tagtype(n):
5659 if not repo.tagtype(n):
5660 raise util.Abort(_("tag '%s' does not exist") % n)
5660 raise util.Abort(_("tag '%s' does not exist") % n)
5661 if repo.tagtype(n) != expectedtype:
5661 if repo.tagtype(n) != expectedtype:
5662 if expectedtype == 'global':
5662 if expectedtype == 'global':
5663 raise util.Abort(_("tag '%s' is not a global tag") % n)
5663 raise util.Abort(_("tag '%s' is not a global tag") % n)
5664 else:
5664 else:
5665 raise util.Abort(_("tag '%s' is not a local tag") % n)
5665 raise util.Abort(_("tag '%s' is not a local tag") % n)
5666 rev_ = nullid
5666 rev_ = nullid
5667 if not message:
5667 if not message:
5668 # we don't translate commit messages
5668 # we don't translate commit messages
5669 message = 'Removed tag %s' % ', '.join(names)
5669 message = 'Removed tag %s' % ', '.join(names)
5670 elif not opts.get('force'):
5670 elif not opts.get('force'):
5671 for n in names:
5671 for n in names:
5672 if n in repo.tags():
5672 if n in repo.tags():
5673 raise util.Abort(_("tag '%s' already exists "
5673 raise util.Abort(_("tag '%s' already exists "
5674 "(use -f to force)") % n)
5674 "(use -f to force)") % n)
5675 if not opts.get('local'):
5675 if not opts.get('local'):
5676 p1, p2 = repo.dirstate.parents()
5676 p1, p2 = repo.dirstate.parents()
5677 if p2 != nullid:
5677 if p2 != nullid:
5678 raise util.Abort(_('uncommitted merge'))
5678 raise util.Abort(_('uncommitted merge'))
5679 bheads = repo.branchheads()
5679 bheads = repo.branchheads()
5680 if not opts.get('force') and bheads and p1 not in bheads:
5680 if not opts.get('force') and bheads and p1 not in bheads:
5681 raise util.Abort(_('not at a branch head (use -f to force)'))
5681 raise util.Abort(_('not at a branch head (use -f to force)'))
5682 r = scmutil.revsingle(repo, rev_).node()
5682 r = scmutil.revsingle(repo, rev_).node()
5683
5683
5684 if not message:
5684 if not message:
5685 # we don't translate commit messages
5685 # we don't translate commit messages
5686 message = ('Added tag %s for changeset %s' %
5686 message = ('Added tag %s for changeset %s' %
5687 (', '.join(names), short(r)))
5687 (', '.join(names), short(r)))
5688
5688
5689 date = opts.get('date')
5689 date = opts.get('date')
5690 if date:
5690 if date:
5691 date = util.parsedate(date)
5691 date = util.parsedate(date)
5692
5692
5693 if opts.get('edit'):
5693 if opts.get('edit'):
5694 message = ui.edit(message, ui.username())
5694 message = ui.edit(message, ui.username())
5695 repo.savecommitmessage(message)
5695 repo.savecommitmessage(message)
5696
5696
5697 # don't allow tagging the null rev
5697 # don't allow tagging the null rev
5698 if (not opts.get('remove') and
5698 if (not opts.get('remove') and
5699 scmutil.revsingle(repo, rev_).rev() == nullrev):
5699 scmutil.revsingle(repo, rev_).rev() == nullrev):
5700 raise util.Abort(_("cannot tag null revision"))
5700 raise util.Abort(_("cannot tag null revision"))
5701
5701
5702 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5702 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5703 finally:
5703 finally:
5704 release(lock, wlock)
5704 release(lock, wlock)
5705
5705
5706 @command('tags', [], '')
5706 @command('tags', [], '')
5707 def tags(ui, repo, **opts):
5707 def tags(ui, repo, **opts):
5708 """list repository tags
5708 """list repository tags
5709
5709
5710 This lists both regular and local tags. When the -v/--verbose
5710 This lists both regular and local tags. When the -v/--verbose
5711 switch is used, a third column "local" is printed for local tags.
5711 switch is used, a third column "local" is printed for local tags.
5712
5712
5713 Returns 0 on success.
5713 Returns 0 on success.
5714 """
5714 """
5715
5715
5716 fm = ui.formatter('tags', opts)
5716 fm = ui.formatter('tags', opts)
5717 hexfunc = ui.debugflag and hex or short
5717 hexfunc = ui.debugflag and hex or short
5718 tagtype = ""
5718 tagtype = ""
5719
5719
5720 for t, n in reversed(repo.tagslist()):
5720 for t, n in reversed(repo.tagslist()):
5721 hn = hexfunc(n)
5721 hn = hexfunc(n)
5722 label = 'tags.normal'
5722 label = 'tags.normal'
5723 tagtype = ''
5723 tagtype = ''
5724 if repo.tagtype(t) == 'local':
5724 if repo.tagtype(t) == 'local':
5725 label = 'tags.local'
5725 label = 'tags.local'
5726 tagtype = 'local'
5726 tagtype = 'local'
5727
5727
5728 fm.startitem()
5728 fm.startitem()
5729 fm.write('tag', '%s', t, label=label)
5729 fm.write('tag', '%s', t, label=label)
5730 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5730 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5731 fm.condwrite(not ui.quiet, 'rev id', fmt,
5731 fm.condwrite(not ui.quiet, 'rev id', fmt,
5732 repo.changelog.rev(n), hn, label=label)
5732 repo.changelog.rev(n), hn, label=label)
5733 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5733 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5734 tagtype, label=label)
5734 tagtype, label=label)
5735 fm.plain('\n')
5735 fm.plain('\n')
5736 fm.end()
5736 fm.end()
5737
5737
5738 @command('tip',
5738 @command('tip',
5739 [('p', 'patch', None, _('show patch')),
5739 [('p', 'patch', None, _('show patch')),
5740 ('g', 'git', None, _('use git extended diff format')),
5740 ('g', 'git', None, _('use git extended diff format')),
5741 ] + templateopts,
5741 ] + templateopts,
5742 _('[-p] [-g]'))
5742 _('[-p] [-g]'))
5743 def tip(ui, repo, **opts):
5743 def tip(ui, repo, **opts):
5744 """show the tip revision (DEPRECATED)
5744 """show the tip revision (DEPRECATED)
5745
5745
5746 The tip revision (usually just called the tip) is the changeset
5746 The tip revision (usually just called the tip) is the changeset
5747 most recently added to the repository (and therefore the most
5747 most recently added to the repository (and therefore the most
5748 recently changed head).
5748 recently changed head).
5749
5749
5750 If you have just made a commit, that commit will be the tip. If
5750 If you have just made a commit, that commit will be the tip. If
5751 you have just pulled changes from another repository, the tip of
5751 you have just pulled changes from another repository, the tip of
5752 that repository becomes the current tip. The "tip" tag is special
5752 that repository becomes the current tip. The "tip" tag is special
5753 and cannot be renamed or assigned to a different changeset.
5753 and cannot be renamed or assigned to a different changeset.
5754
5754
5755 This command is deprecated, please use :hg:`heads` instead.
5755 This command is deprecated, please use :hg:`heads` instead.
5756
5756
5757 Returns 0 on success.
5757 Returns 0 on success.
5758 """
5758 """
5759 displayer = cmdutil.show_changeset(ui, repo, opts)
5759 displayer = cmdutil.show_changeset(ui, repo, opts)
5760 displayer.show(repo['tip'])
5760 displayer.show(repo['tip'])
5761 displayer.close()
5761 displayer.close()
5762
5762
5763 @command('unbundle',
5763 @command('unbundle',
5764 [('u', 'update', None,
5764 [('u', 'update', None,
5765 _('update to new branch head if changesets were unbundled'))],
5765 _('update to new branch head if changesets were unbundled'))],
5766 _('[-u] FILE...'))
5766 _('[-u] FILE...'))
5767 def unbundle(ui, repo, fname1, *fnames, **opts):
5767 def unbundle(ui, repo, fname1, *fnames, **opts):
5768 """apply one or more changegroup files
5768 """apply one or more changegroup files
5769
5769
5770 Apply one or more compressed changegroup files generated by the
5770 Apply one or more compressed changegroup files generated by the
5771 bundle command.
5771 bundle command.
5772
5772
5773 Returns 0 on success, 1 if an update has unresolved files.
5773 Returns 0 on success, 1 if an update has unresolved files.
5774 """
5774 """
5775 fnames = (fname1,) + fnames
5775 fnames = (fname1,) + fnames
5776
5776
5777 lock = repo.lock()
5777 lock = repo.lock()
5778 wc = repo['.']
5778 wc = repo['.']
5779 try:
5779 try:
5780 for fname in fnames:
5780 for fname in fnames:
5781 f = hg.openpath(ui, fname)
5781 f = hg.openpath(ui, fname)
5782 gen = changegroup.readbundle(f, fname)
5782 gen = changegroup.readbundle(f, fname)
5783 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5783 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5784 finally:
5784 finally:
5785 lock.release()
5785 lock.release()
5786 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5786 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5787 return postincoming(ui, repo, modheads, opts.get('update'), None)
5787 return postincoming(ui, repo, modheads, opts.get('update'), None)
5788
5788
5789 @command('^update|up|checkout|co',
5789 @command('^update|up|checkout|co',
5790 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5790 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5791 ('c', 'check', None,
5791 ('c', 'check', None,
5792 _('update across branches if no uncommitted changes')),
5792 _('update across branches if no uncommitted changes')),
5793 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5793 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5794 ('r', 'rev', '', _('revision'), _('REV'))],
5794 ('r', 'rev', '', _('revision'), _('REV'))],
5795 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5795 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5796 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5796 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5797 """update working directory (or switch revisions)
5797 """update working directory (or switch revisions)
5798
5798
5799 Update the repository's working directory to the specified
5799 Update the repository's working directory to the specified
5800 changeset. If no changeset is specified, update to the tip of the
5800 changeset. If no changeset is specified, update to the tip of the
5801 current named branch and move the current bookmark (see :hg:`help
5801 current named branch and move the current bookmark (see :hg:`help
5802 bookmarks`).
5802 bookmarks`).
5803
5803
5804 Update sets the working directory's parent revision to the specified
5804 Update sets the working directory's parent revision to the specified
5805 changeset (see :hg:`help parents`).
5805 changeset (see :hg:`help parents`).
5806
5806
5807 If the changeset is not a descendant or ancestor of the working
5807 If the changeset is not a descendant or ancestor of the working
5808 directory's parent, the update is aborted. With the -c/--check
5808 directory's parent, the update is aborted. With the -c/--check
5809 option, the working directory is checked for uncommitted changes; if
5809 option, the working directory is checked for uncommitted changes; if
5810 none are found, the working directory is updated to the specified
5810 none are found, the working directory is updated to the specified
5811 changeset.
5811 changeset.
5812
5812
5813 .. container:: verbose
5813 .. container:: verbose
5814
5814
5815 The following rules apply when the working directory contains
5815 The following rules apply when the working directory contains
5816 uncommitted changes:
5816 uncommitted changes:
5817
5817
5818 1. If neither -c/--check nor -C/--clean is specified, and if
5818 1. If neither -c/--check nor -C/--clean is specified, and if
5819 the requested changeset is an ancestor or descendant of
5819 the requested changeset is an ancestor or descendant of
5820 the working directory's parent, the uncommitted changes
5820 the working directory's parent, the uncommitted changes
5821 are merged into the requested changeset and the merged
5821 are merged into the requested changeset and the merged
5822 result is left uncommitted. If the requested changeset is
5822 result is left uncommitted. If the requested changeset is
5823 not an ancestor or descendant (that is, it is on another
5823 not an ancestor or descendant (that is, it is on another
5824 branch), the update is aborted and the uncommitted changes
5824 branch), the update is aborted and the uncommitted changes
5825 are preserved.
5825 are preserved.
5826
5826
5827 2. With the -c/--check option, the update is aborted and the
5827 2. With the -c/--check option, the update is aborted and the
5828 uncommitted changes are preserved.
5828 uncommitted changes are preserved.
5829
5829
5830 3. With the -C/--clean option, uncommitted changes are discarded and
5830 3. With the -C/--clean option, uncommitted changes are discarded and
5831 the working directory is updated to the requested changeset.
5831 the working directory is updated to the requested changeset.
5832
5832
5833 To cancel an uncommitted merge (and lose your changes), use
5833 To cancel an uncommitted merge (and lose your changes), use
5834 :hg:`update --clean .`.
5834 :hg:`update --clean .`.
5835
5835
5836 Use null as the changeset to remove the working directory (like
5836 Use null as the changeset to remove the working directory (like
5837 :hg:`clone -U`).
5837 :hg:`clone -U`).
5838
5838
5839 If you want to revert just one file to an older revision, use
5839 If you want to revert just one file to an older revision, use
5840 :hg:`revert [-r REV] NAME`.
5840 :hg:`revert [-r REV] NAME`.
5841
5841
5842 See :hg:`help dates` for a list of formats valid for -d/--date.
5842 See :hg:`help dates` for a list of formats valid for -d/--date.
5843
5843
5844 Returns 0 on success, 1 if there are unresolved files.
5844 Returns 0 on success, 1 if there are unresolved files.
5845 """
5845 """
5846 if rev and node:
5846 if rev and node:
5847 raise util.Abort(_("please specify just one revision"))
5847 raise util.Abort(_("please specify just one revision"))
5848
5848
5849 if rev is None or rev == '':
5849 if rev is None or rev == '':
5850 rev = node
5850 rev = node
5851
5851
5852 cmdutil.clearunfinished(repo)
5852 cmdutil.clearunfinished(repo)
5853
5853
5854 # with no argument, we also move the current bookmark, if any
5854 # with no argument, we also move the current bookmark, if any
5855 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5855 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5856
5856
5857 # if we defined a bookmark, we have to remember the original bookmark name
5857 # if we defined a bookmark, we have to remember the original bookmark name
5858 brev = rev
5858 brev = rev
5859 rev = scmutil.revsingle(repo, rev, rev).rev()
5859 rev = scmutil.revsingle(repo, rev, rev).rev()
5860
5860
5861 if check and clean:
5861 if check and clean:
5862 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5862 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5863
5863
5864 if date:
5864 if date:
5865 if rev is not None:
5865 if rev is not None:
5866 raise util.Abort(_("you can't specify a revision and a date"))
5866 raise util.Abort(_("you can't specify a revision and a date"))
5867 rev = cmdutil.finddate(ui, repo, date)
5867 rev = cmdutil.finddate(ui, repo, date)
5868
5868
5869 if check:
5869 if check:
5870 c = repo[None]
5870 c = repo[None]
5871 if c.dirty(merge=False, branch=False, missing=True):
5871 if c.dirty(merge=False, branch=False, missing=True):
5872 raise util.Abort(_("uncommitted changes"))
5872 raise util.Abort(_("uncommitted changes"))
5873 if rev is None:
5873 if rev is None:
5874 rev = repo[repo[None].branch()].rev()
5874 rev = repo[repo[None].branch()].rev()
5875 mergemod._checkunknown(repo, repo[None], repo[rev])
5875 mergemod._checkunknown(repo, repo[None], repo[rev])
5876
5876
5877 if clean:
5877 if clean:
5878 ret = hg.clean(repo, rev)
5878 ret = hg.clean(repo, rev)
5879 else:
5879 else:
5880 ret = hg.update(repo, rev)
5880 ret = hg.update(repo, rev)
5881
5881
5882 if not ret and movemarkfrom:
5882 if not ret and movemarkfrom:
5883 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5883 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5884 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5884 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5885 elif brev in repo._bookmarks:
5885 elif brev in repo._bookmarks:
5886 bookmarks.setcurrent(repo, brev)
5886 bookmarks.setcurrent(repo, brev)
5887 elif brev:
5887 elif brev:
5888 bookmarks.unsetcurrent(repo)
5888 bookmarks.unsetcurrent(repo)
5889
5889
5890 return ret
5890 return ret
5891
5891
5892 @command('verify', [])
5892 @command('verify', [])
5893 def verify(ui, repo):
5893 def verify(ui, repo):
5894 """verify the integrity of the repository
5894 """verify the integrity of the repository
5895
5895
5896 Verify the integrity of the current repository.
5896 Verify the integrity of the current repository.
5897
5897
5898 This will perform an extensive check of the repository's
5898 This will perform an extensive check of the repository's
5899 integrity, validating the hashes and checksums of each entry in
5899 integrity, validating the hashes and checksums of each entry in
5900 the changelog, manifest, and tracked files, as well as the
5900 the changelog, manifest, and tracked files, as well as the
5901 integrity of their crosslinks and indices.
5901 integrity of their crosslinks and indices.
5902
5902
5903 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5903 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5904 for more information about recovery from corruption of the
5904 for more information about recovery from corruption of the
5905 repository.
5905 repository.
5906
5906
5907 Returns 0 on success, 1 if errors are encountered.
5907 Returns 0 on success, 1 if errors are encountered.
5908 """
5908 """
5909 return hg.verify(repo)
5909 return hg.verify(repo)
5910
5910
5911 @command('version', [])
5911 @command('version', [])
5912 def version_(ui):
5912 def version_(ui):
5913 """output version and copyright information"""
5913 """output version and copyright information"""
5914 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5914 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5915 % util.version())
5915 % util.version())
5916 ui.status(_(
5916 ui.status(_(
5917 "(see http://mercurial.selenic.com for more information)\n"
5917 "(see http://mercurial.selenic.com for more information)\n"
5918 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
5918 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
5919 "This is free software; see the source for copying conditions. "
5919 "This is free software; see the source for copying conditions. "
5920 "There is NO\nwarranty; "
5920 "There is NO\nwarranty; "
5921 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5921 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5922 ))
5922 ))
5923
5923
5924 norepo = ("clone init version help debugcommands debugcomplete"
5924 norepo = ("clone init version help debugcommands debugcomplete"
5925 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5925 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5926 " debugknown debuggetbundle debugbundle")
5926 " debugknown debuggetbundle debugbundle")
5927 optionalrepo = ("identify paths serve config showconfig debugancestor debugdag"
5927 optionalrepo = ("identify paths serve config showconfig debugancestor debugdag"
5928 " debugdata debugindex debugindexdot debugrevlog")
5928 " debugdata debugindex debugindexdot debugrevlog")
5929 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5929 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5930 " remove resolve status debugwalk")
5930 " remove resolve status debugwalk")
@@ -1,553 +1,554 b''
1 # exchange.py - utily to exchange data between repo.
1 # exchange.py - utily to exchange data between repo.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno
10 import errno
11 import util, scmutil, changegroup, base85
11 import util, scmutil, changegroup, base85
12 import discovery, phases, obsolete, bookmarks
12 import discovery, phases, obsolete, bookmarks
13
13
14
14
15 class pushoperation(object):
15 class pushoperation(object):
16 """A object that represent a single push operation
16 """A object that represent a single push operation
17
17
18 It purpose is to carry push related state and very common operation.
18 It purpose is to carry push related state and very common operation.
19
19
20 A new should be created at the begining of each push and discarded
20 A new should be created at the begining of each push and discarded
21 afterward.
21 afterward.
22 """
22 """
23
23
24 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
24 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
25 # repo we push from
25 # repo we push from
26 self.repo = repo
26 self.repo = repo
27 self.ui = repo.ui
27 self.ui = repo.ui
28 # repo we push to
28 # repo we push to
29 self.remote = remote
29 self.remote = remote
30 # force option provided
30 # force option provided
31 self.force = force
31 self.force = force
32 # revs to be pushed (None is "all")
32 # revs to be pushed (None is "all")
33 self.revs = revs
33 self.revs = revs
34 # allow push of new branch
34 # allow push of new branch
35 self.newbranch = newbranch
35 self.newbranch = newbranch
36 # did a local lock get acquired?
36 # did a local lock get acquired?
37 self.locallocked = None
37 self.locallocked = None
38 # Integer version of the push result
38 # Integer version of the push result
39 # - None means nothing to push
39 # - None means nothing to push
40 # - 0 means HTTP error
40 # - 0 means HTTP error
41 # - 1 means we pushed and remote head count is unchanged *or*
41 # - 1 means we pushed and remote head count is unchanged *or*
42 # we have outgoing changesets but refused to push
42 # we have outgoing changesets but refused to push
43 # - other values as described by addchangegroup()
43 # - other values as described by addchangegroup()
44 self.ret = None
44 self.ret = None
45 # discover.outgoing object (contains common and outgoin data)
45 # discover.outgoing object (contains common and outgoin data)
46 self.outgoing = None
46 self.outgoing = None
47 # all remote heads before the push
47 # all remote heads before the push
48 self.remoteheads = None
48 self.remoteheads = None
49 # testable as a boolean indicating if any nodes are missing locally.
49 # testable as a boolean indicating if any nodes are missing locally.
50 self.incoming = None
50 self.incoming = None
51 # set of all heads common after changeset bundle push
51 # set of all heads common after changeset bundle push
52 self.commonheads = None
52 self.commonheads = None
53
53
54 def push(repo, remote, force=False, revs=None, newbranch=False):
54 def push(repo, remote, force=False, revs=None, newbranch=False):
55 '''Push outgoing changesets (limited by revs) from a local
55 '''Push outgoing changesets (limited by revs) from a local
56 repository to remote. Return an integer:
56 repository to remote. Return an integer:
57 - None means nothing to push
57 - None means nothing to push
58 - 0 means HTTP error
58 - 0 means HTTP error
59 - 1 means we pushed and remote head count is unchanged *or*
59 - 1 means we pushed and remote head count is unchanged *or*
60 we have outgoing changesets but refused to push
60 we have outgoing changesets but refused to push
61 - other values as described by addchangegroup()
61 - other values as described by addchangegroup()
62 '''
62 '''
63 pushop = pushoperation(repo, remote, force, revs, newbranch)
63 pushop = pushoperation(repo, remote, force, revs, newbranch)
64 if pushop.remote.local():
64 if pushop.remote.local():
65 missing = (set(pushop.repo.requirements)
65 missing = (set(pushop.repo.requirements)
66 - pushop.remote.local().supported)
66 - pushop.remote.local().supported)
67 if missing:
67 if missing:
68 msg = _("required features are not"
68 msg = _("required features are not"
69 " supported in the destination:"
69 " supported in the destination:"
70 " %s") % (', '.join(sorted(missing)))
70 " %s") % (', '.join(sorted(missing)))
71 raise util.Abort(msg)
71 raise util.Abort(msg)
72
72
73 # there are two ways to push to remote repo:
73 # there are two ways to push to remote repo:
74 #
74 #
75 # addchangegroup assumes local user can lock remote
75 # addchangegroup assumes local user can lock remote
76 # repo (local filesystem, old ssh servers).
76 # repo (local filesystem, old ssh servers).
77 #
77 #
78 # unbundle assumes local user cannot lock remote repo (new ssh
78 # unbundle assumes local user cannot lock remote repo (new ssh
79 # servers, http servers).
79 # servers, http servers).
80
80
81 if not pushop.remote.canpush():
81 if not pushop.remote.canpush():
82 raise util.Abort(_("destination does not support push"))
82 raise util.Abort(_("destination does not support push"))
83 # get local lock as we might write phase data
83 # get local lock as we might write phase data
84 locallock = None
84 locallock = None
85 try:
85 try:
86 locallock = pushop.repo.lock()
86 locallock = pushop.repo.lock()
87 pushop.locallocked = True
87 pushop.locallocked = True
88 except IOError, err:
88 except IOError, err:
89 pushop.locallocked = False
89 pushop.locallocked = False
90 if err.errno != errno.EACCES:
90 if err.errno != errno.EACCES:
91 raise
91 raise
92 # source repo cannot be locked.
92 # source repo cannot be locked.
93 # We do not abort the push, but just disable the local phase
93 # We do not abort the push, but just disable the local phase
94 # synchronisation.
94 # synchronisation.
95 msg = 'cannot lock source repository: %s\n' % err
95 msg = 'cannot lock source repository: %s\n' % err
96 pushop.ui.debug(msg)
96 pushop.ui.debug(msg)
97 try:
97 try:
98 pushop.repo.checkpush(pushop)
98 pushop.repo.checkpush(pushop)
99 lock = None
99 lock = None
100 unbundle = pushop.remote.capable('unbundle')
100 unbundle = pushop.remote.capable('unbundle')
101 if not unbundle:
101 if not unbundle:
102 lock = pushop.remote.lock()
102 lock = pushop.remote.lock()
103 try:
103 try:
104 _pushdiscovery(pushop)
104 _pushdiscovery(pushop)
105 if _pushcheckoutgoing(pushop):
105 if _pushcheckoutgoing(pushop):
106 _pushchangeset(pushop)
106 _pushchangeset(pushop)
107 _pushcomputecommonheads(pushop)
107 _pushcomputecommonheads(pushop)
108 _pushsyncphase(pushop)
108 _pushsyncphase(pushop)
109 _pushobsolete(pushop)
109 _pushobsolete(pushop)
110 finally:
110 finally:
111 if lock is not None:
111 if lock is not None:
112 lock.release()
112 lock.release()
113 finally:
113 finally:
114 if locallock is not None:
114 if locallock is not None:
115 locallock.release()
115 locallock.release()
116
116
117 _pushbookmark(pushop)
117 _pushbookmark(pushop)
118 return pushop.ret
118 return pushop.ret
119
119
120 def _pushdiscovery(pushop):
120 def _pushdiscovery(pushop):
121 # discovery
121 # discovery
122 unfi = pushop.repo.unfiltered()
122 unfi = pushop.repo.unfiltered()
123 fci = discovery.findcommonincoming
123 fci = discovery.findcommonincoming
124 commoninc = fci(unfi, pushop.remote, force=pushop.force)
124 commoninc = fci(unfi, pushop.remote, force=pushop.force)
125 common, inc, remoteheads = commoninc
125 common, inc, remoteheads = commoninc
126 fco = discovery.findcommonoutgoing
126 fco = discovery.findcommonoutgoing
127 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
127 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
128 commoninc=commoninc, force=pushop.force)
128 commoninc=commoninc, force=pushop.force)
129 pushop.outgoing = outgoing
129 pushop.outgoing = outgoing
130 pushop.remoteheads = remoteheads
130 pushop.remoteheads = remoteheads
131 pushop.incoming = inc
131 pushop.incoming = inc
132
132
133 def _pushcheckoutgoing(pushop):
133 def _pushcheckoutgoing(pushop):
134 outgoing = pushop.outgoing
134 outgoing = pushop.outgoing
135 unfi = pushop.repo.unfiltered()
135 unfi = pushop.repo.unfiltered()
136 if not outgoing.missing:
136 if not outgoing.missing:
137 # nothing to push
137 # nothing to push
138 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
138 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
139 return False
139 return False
140 # something to push
140 # something to push
141 if not pushop.force:
141 if not pushop.force:
142 # if repo.obsstore == False --> no obsolete
142 # if repo.obsstore == False --> no obsolete
143 # then, save the iteration
143 # then, save the iteration
144 if unfi.obsstore:
144 if unfi.obsstore:
145 # this message are here for 80 char limit reason
145 # this message are here for 80 char limit reason
146 mso = _("push includes obsolete changeset: %s!")
146 mso = _("push includes obsolete changeset: %s!")
147 mst = "push includes %s changeset: %s!"
147 mst = "push includes %s changeset: %s!"
148 # plain versions for i18n tool to detect them
148 # plain versions for i18n tool to detect them
149 _("push includes unstable changeset: %s!")
149 _("push includes unstable changeset: %s!")
150 _("push includes bumped changeset: %s!")
150 _("push includes bumped changeset: %s!")
151 _("push includes divergent changeset: %s!")
151 _("push includes divergent changeset: %s!")
152 # If we are to push if there is at least one
152 # If we are to push if there is at least one
153 # obsolete or unstable changeset in missing, at
153 # obsolete or unstable changeset in missing, at
154 # least one of the missinghead will be obsolete or
154 # least one of the missinghead will be obsolete or
155 # unstable. So checking heads only is ok
155 # unstable. So checking heads only is ok
156 for node in outgoing.missingheads:
156 for node in outgoing.missingheads:
157 ctx = unfi[node]
157 ctx = unfi[node]
158 if ctx.obsolete():
158 if ctx.obsolete():
159 raise util.Abort(mso % ctx)
159 raise util.Abort(mso % ctx)
160 elif ctx.troubled():
160 elif ctx.troubled():
161 raise util.Abort(_(mst)
161 raise util.Abort(_(mst)
162 % (ctx.troubles()[0],
162 % (ctx.troubles()[0],
163 ctx))
163 ctx))
164 newbm = pushop.ui.configlist('bookmarks', 'pushing')
164 newbm = pushop.ui.configlist('bookmarks', 'pushing')
165 discovery.checkheads(unfi, pushop.remote, outgoing,
165 discovery.checkheads(unfi, pushop.remote, outgoing,
166 pushop.remoteheads,
166 pushop.remoteheads,
167 pushop.newbranch,
167 pushop.newbranch,
168 bool(pushop.incoming),
168 bool(pushop.incoming),
169 newbm)
169 newbm)
170 return True
170 return True
171
171
172 def _pushchangeset(pushop):
172 def _pushchangeset(pushop):
173 """Make the actual push of changeset bundle to remote repo"""
173 """Make the actual push of changeset bundle to remote repo"""
174 outgoing = pushop.outgoing
174 outgoing = pushop.outgoing
175 unbundle = pushop.remote.capable('unbundle')
175 unbundle = pushop.remote.capable('unbundle')
176 # TODO: get bundlecaps from remote
176 # TODO: get bundlecaps from remote
177 bundlecaps = None
177 bundlecaps = None
178 # create a changegroup from local
178 # create a changegroup from local
179 if pushop.revs is None and not (outgoing.excluded
179 if pushop.revs is None and not (outgoing.excluded
180 or pushop.repo.changelog.filteredrevs):
180 or pushop.repo.changelog.filteredrevs):
181 # push everything,
181 # push everything,
182 # use the fast path, no race possible on push
182 # use the fast path, no race possible on push
183 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
183 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
184 cg = changegroup.getsubset(pushop.repo,
184 cg = changegroup.getsubset(pushop.repo,
185 outgoing,
185 outgoing,
186 bundler,
186 bundler,
187 'push',
187 'push',
188 fastpath=True)
188 fastpath=True)
189 else:
189 else:
190 cg = pushop.repo.getlocalbundle('push', outgoing, bundlecaps)
190 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
191 bundlecaps)
191
192
192 # apply changegroup to remote
193 # apply changegroup to remote
193 if unbundle:
194 if unbundle:
194 # local repo finds heads on server, finds out what
195 # local repo finds heads on server, finds out what
195 # revs it must push. once revs transferred, if server
196 # revs it must push. once revs transferred, if server
196 # finds it has different heads (someone else won
197 # finds it has different heads (someone else won
197 # commit/push race), server aborts.
198 # commit/push race), server aborts.
198 if pushop.force:
199 if pushop.force:
199 remoteheads = ['force']
200 remoteheads = ['force']
200 else:
201 else:
201 remoteheads = pushop.remoteheads
202 remoteheads = pushop.remoteheads
202 # ssh: return remote's addchangegroup()
203 # ssh: return remote's addchangegroup()
203 # http: return remote's addchangegroup() or 0 for error
204 # http: return remote's addchangegroup() or 0 for error
204 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
205 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
205 'push')
206 'push')
206 else:
207 else:
207 # we return an integer indicating remote head count
208 # we return an integer indicating remote head count
208 # change
209 # change
209 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
210 pushop.ret = pushop.remote.addchangegroup(cg, 'push',
210 pushop.repo.url())
211 pushop.repo.url())
211
212
212 def _pushcomputecommonheads(pushop):
213 def _pushcomputecommonheads(pushop):
213 unfi = pushop.repo.unfiltered()
214 unfi = pushop.repo.unfiltered()
214 if pushop.ret:
215 if pushop.ret:
215 # push succeed, synchronize target of the push
216 # push succeed, synchronize target of the push
216 cheads = pushop.outgoing.missingheads
217 cheads = pushop.outgoing.missingheads
217 elif pushop.revs is None:
218 elif pushop.revs is None:
218 # All out push fails. synchronize all common
219 # All out push fails. synchronize all common
219 cheads = pushop.outgoing.commonheads
220 cheads = pushop.outgoing.commonheads
220 else:
221 else:
221 # I want cheads = heads(::missingheads and ::commonheads)
222 # I want cheads = heads(::missingheads and ::commonheads)
222 # (missingheads is revs with secret changeset filtered out)
223 # (missingheads is revs with secret changeset filtered out)
223 #
224 #
224 # This can be expressed as:
225 # This can be expressed as:
225 # cheads = ( (missingheads and ::commonheads)
226 # cheads = ( (missingheads and ::commonheads)
226 # + (commonheads and ::missingheads))"
227 # + (commonheads and ::missingheads))"
227 # )
228 # )
228 #
229 #
229 # while trying to push we already computed the following:
230 # while trying to push we already computed the following:
230 # common = (::commonheads)
231 # common = (::commonheads)
231 # missing = ((commonheads::missingheads) - commonheads)
232 # missing = ((commonheads::missingheads) - commonheads)
232 #
233 #
233 # We can pick:
234 # We can pick:
234 # * missingheads part of common (::commonheads)
235 # * missingheads part of common (::commonheads)
235 common = set(pushop.outgoing.common)
236 common = set(pushop.outgoing.common)
236 nm = pushop.repo.changelog.nodemap
237 nm = pushop.repo.changelog.nodemap
237 cheads = [node for node in pushop.revs if nm[node] in common]
238 cheads = [node for node in pushop.revs if nm[node] in common]
238 # and
239 # and
239 # * commonheads parents on missing
240 # * commonheads parents on missing
240 revset = unfi.set('%ln and parents(roots(%ln))',
241 revset = unfi.set('%ln and parents(roots(%ln))',
241 pushop.outgoing.commonheads,
242 pushop.outgoing.commonheads,
242 pushop.outgoing.missing)
243 pushop.outgoing.missing)
243 cheads.extend(c.node() for c in revset)
244 cheads.extend(c.node() for c in revset)
244 pushop.commonheads = cheads
245 pushop.commonheads = cheads
245
246
246 def _pushsyncphase(pushop):
247 def _pushsyncphase(pushop):
247 """synchronise phase information locally and remotly"""
248 """synchronise phase information locally and remotly"""
248 unfi = pushop.repo.unfiltered()
249 unfi = pushop.repo.unfiltered()
249 cheads = pushop.commonheads
250 cheads = pushop.commonheads
250 if pushop.ret:
251 if pushop.ret:
251 # push succeed, synchronize target of the push
252 # push succeed, synchronize target of the push
252 cheads = pushop.outgoing.missingheads
253 cheads = pushop.outgoing.missingheads
253 elif pushop.revs is None:
254 elif pushop.revs is None:
254 # All out push fails. synchronize all common
255 # All out push fails. synchronize all common
255 cheads = pushop.outgoing.commonheads
256 cheads = pushop.outgoing.commonheads
256 else:
257 else:
257 # I want cheads = heads(::missingheads and ::commonheads)
258 # I want cheads = heads(::missingheads and ::commonheads)
258 # (missingheads is revs with secret changeset filtered out)
259 # (missingheads is revs with secret changeset filtered out)
259 #
260 #
260 # This can be expressed as:
261 # This can be expressed as:
261 # cheads = ( (missingheads and ::commonheads)
262 # cheads = ( (missingheads and ::commonheads)
262 # + (commonheads and ::missingheads))"
263 # + (commonheads and ::missingheads))"
263 # )
264 # )
264 #
265 #
265 # while trying to push we already computed the following:
266 # while trying to push we already computed the following:
266 # common = (::commonheads)
267 # common = (::commonheads)
267 # missing = ((commonheads::missingheads) - commonheads)
268 # missing = ((commonheads::missingheads) - commonheads)
268 #
269 #
269 # We can pick:
270 # We can pick:
270 # * missingheads part of common (::commonheads)
271 # * missingheads part of common (::commonheads)
271 common = set(pushop.outgoing.common)
272 common = set(pushop.outgoing.common)
272 nm = pushop.repo.changelog.nodemap
273 nm = pushop.repo.changelog.nodemap
273 cheads = [node for node in pushop.revs if nm[node] in common]
274 cheads = [node for node in pushop.revs if nm[node] in common]
274 # and
275 # and
275 # * commonheads parents on missing
276 # * commonheads parents on missing
276 revset = unfi.set('%ln and parents(roots(%ln))',
277 revset = unfi.set('%ln and parents(roots(%ln))',
277 pushop.outgoing.commonheads,
278 pushop.outgoing.commonheads,
278 pushop.outgoing.missing)
279 pushop.outgoing.missing)
279 cheads.extend(c.node() for c in revset)
280 cheads.extend(c.node() for c in revset)
280 pushop.commonheads = cheads
281 pushop.commonheads = cheads
281 # even when we don't push, exchanging phase data is useful
282 # even when we don't push, exchanging phase data is useful
282 remotephases = pushop.remote.listkeys('phases')
283 remotephases = pushop.remote.listkeys('phases')
283 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
284 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
284 and remotephases # server supports phases
285 and remotephases # server supports phases
285 and pushop.ret is None # nothing was pushed
286 and pushop.ret is None # nothing was pushed
286 and remotephases.get('publishing', False)):
287 and remotephases.get('publishing', False)):
287 # When:
288 # When:
288 # - this is a subrepo push
289 # - this is a subrepo push
289 # - and remote support phase
290 # - and remote support phase
290 # - and no changeset was pushed
291 # - and no changeset was pushed
291 # - and remote is publishing
292 # - and remote is publishing
292 # We may be in issue 3871 case!
293 # We may be in issue 3871 case!
293 # We drop the possible phase synchronisation done by
294 # We drop the possible phase synchronisation done by
294 # courtesy to publish changesets possibly locally draft
295 # courtesy to publish changesets possibly locally draft
295 # on the remote.
296 # on the remote.
296 remotephases = {'publishing': 'True'}
297 remotephases = {'publishing': 'True'}
297 if not remotephases: # old server or public only rer
298 if not remotephases: # old server or public only rer
298 _localphasemove(pushop, cheads)
299 _localphasemove(pushop, cheads)
299 # don't push any phase data as there is nothing to push
300 # don't push any phase data as there is nothing to push
300 else:
301 else:
301 ana = phases.analyzeremotephases(pushop.repo, cheads,
302 ana = phases.analyzeremotephases(pushop.repo, cheads,
302 remotephases)
303 remotephases)
303 pheads, droots = ana
304 pheads, droots = ana
304 ### Apply remote phase on local
305 ### Apply remote phase on local
305 if remotephases.get('publishing', False):
306 if remotephases.get('publishing', False):
306 _localphasemove(pushop, cheads)
307 _localphasemove(pushop, cheads)
307 else: # publish = False
308 else: # publish = False
308 _localphasemove(pushop, pheads)
309 _localphasemove(pushop, pheads)
309 _localphasemove(pushop, cheads, phases.draft)
310 _localphasemove(pushop, cheads, phases.draft)
310 ### Apply local phase on remote
311 ### Apply local phase on remote
311
312
312 # Get the list of all revs draft on remote by public here.
313 # Get the list of all revs draft on remote by public here.
313 # XXX Beware that revset break if droots is not strictly
314 # XXX Beware that revset break if droots is not strictly
314 # XXX root we may want to ensure it is but it is costly
315 # XXX root we may want to ensure it is but it is costly
315 outdated = unfi.set('heads((%ln::%ln) and public())',
316 outdated = unfi.set('heads((%ln::%ln) and public())',
316 droots, cheads)
317 droots, cheads)
317 for newremotehead in outdated:
318 for newremotehead in outdated:
318 r = pushop.remote.pushkey('phases',
319 r = pushop.remote.pushkey('phases',
319 newremotehead.hex(),
320 newremotehead.hex(),
320 str(phases.draft),
321 str(phases.draft),
321 str(phases.public))
322 str(phases.public))
322 if not r:
323 if not r:
323 pushop.ui.warn(_('updating %s to public failed!\n')
324 pushop.ui.warn(_('updating %s to public failed!\n')
324 % newremotehead)
325 % newremotehead)
325
326
326 def _localphasemove(pushop, nodes, phase=phases.public):
327 def _localphasemove(pushop, nodes, phase=phases.public):
327 """move <nodes> to <phase> in the local source repo"""
328 """move <nodes> to <phase> in the local source repo"""
328 if pushop.locallocked:
329 if pushop.locallocked:
329 phases.advanceboundary(pushop.repo, phase, nodes)
330 phases.advanceboundary(pushop.repo, phase, nodes)
330 else:
331 else:
331 # repo is not locked, do not change any phases!
332 # repo is not locked, do not change any phases!
332 # Informs the user that phases should have been moved when
333 # Informs the user that phases should have been moved when
333 # applicable.
334 # applicable.
334 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
335 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
335 phasestr = phases.phasenames[phase]
336 phasestr = phases.phasenames[phase]
336 if actualmoves:
337 if actualmoves:
337 pushop.ui.status(_('cannot lock source repo, skipping '
338 pushop.ui.status(_('cannot lock source repo, skipping '
338 'local %s phase update\n') % phasestr)
339 'local %s phase update\n') % phasestr)
339
340
340 def _pushobsolete(pushop):
341 def _pushobsolete(pushop):
341 """utility function to push obsolete markers to a remote"""
342 """utility function to push obsolete markers to a remote"""
342 pushop.ui.debug('try to push obsolete markers to remote\n')
343 pushop.ui.debug('try to push obsolete markers to remote\n')
343 repo = pushop.repo
344 repo = pushop.repo
344 remote = pushop.remote
345 remote = pushop.remote
345 if (obsolete._enabled and repo.obsstore and
346 if (obsolete._enabled and repo.obsstore and
346 'obsolete' in remote.listkeys('namespaces')):
347 'obsolete' in remote.listkeys('namespaces')):
347 rslts = []
348 rslts = []
348 remotedata = repo.listkeys('obsolete')
349 remotedata = repo.listkeys('obsolete')
349 for key in sorted(remotedata, reverse=True):
350 for key in sorted(remotedata, reverse=True):
350 # reverse sort to ensure we end with dump0
351 # reverse sort to ensure we end with dump0
351 data = remotedata[key]
352 data = remotedata[key]
352 rslts.append(remote.pushkey('obsolete', key, '', data))
353 rslts.append(remote.pushkey('obsolete', key, '', data))
353 if [r for r in rslts if not r]:
354 if [r for r in rslts if not r]:
354 msg = _('failed to push some obsolete markers!\n')
355 msg = _('failed to push some obsolete markers!\n')
355 repo.ui.warn(msg)
356 repo.ui.warn(msg)
356
357
357 def _pushbookmark(pushop):
358 def _pushbookmark(pushop):
358 """Update bookmark position on remote"""
359 """Update bookmark position on remote"""
359 ui = pushop.ui
360 ui = pushop.ui
360 repo = pushop.repo.unfiltered()
361 repo = pushop.repo.unfiltered()
361 remote = pushop.remote
362 remote = pushop.remote
362 ui.debug("checking for updated bookmarks\n")
363 ui.debug("checking for updated bookmarks\n")
363 revnums = map(repo.changelog.rev, pushop.revs or [])
364 revnums = map(repo.changelog.rev, pushop.revs or [])
364 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
365 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
365 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
366 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
366 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
367 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
367 srchex=hex)
368 srchex=hex)
368
369
369 for b, scid, dcid in advsrc:
370 for b, scid, dcid in advsrc:
370 if ancestors and repo[scid].rev() not in ancestors:
371 if ancestors and repo[scid].rev() not in ancestors:
371 continue
372 continue
372 if remote.pushkey('bookmarks', b, dcid, scid):
373 if remote.pushkey('bookmarks', b, dcid, scid):
373 ui.status(_("updating bookmark %s\n") % b)
374 ui.status(_("updating bookmark %s\n") % b)
374 else:
375 else:
375 ui.warn(_('updating bookmark %s failed!\n') % b)
376 ui.warn(_('updating bookmark %s failed!\n') % b)
376
377
377 class pulloperation(object):
378 class pulloperation(object):
378 """A object that represent a single pull operation
379 """A object that represent a single pull operation
379
380
380 It purpose is to carry push related state and very common operation.
381 It purpose is to carry push related state and very common operation.
381
382
382 A new should be created at the begining of each pull and discarded
383 A new should be created at the begining of each pull and discarded
383 afterward.
384 afterward.
384 """
385 """
385
386
386 def __init__(self, repo, remote, heads=None, force=False):
387 def __init__(self, repo, remote, heads=None, force=False):
387 # repo we pull into
388 # repo we pull into
388 self.repo = repo
389 self.repo = repo
389 # repo we pull from
390 # repo we pull from
390 self.remote = remote
391 self.remote = remote
391 # revision we try to pull (None is "all")
392 # revision we try to pull (None is "all")
392 self.heads = heads
393 self.heads = heads
393 # do we force pull?
394 # do we force pull?
394 self.force = force
395 self.force = force
395 # the name the pull transaction
396 # the name the pull transaction
396 self._trname = 'pull\n' + util.hidepassword(remote.url())
397 self._trname = 'pull\n' + util.hidepassword(remote.url())
397 # hold the transaction once created
398 # hold the transaction once created
398 self._tr = None
399 self._tr = None
399 # set of common changeset between local and remote before pull
400 # set of common changeset between local and remote before pull
400 self.common = None
401 self.common = None
401 # set of pulled head
402 # set of pulled head
402 self.rheads = None
403 self.rheads = None
403 # list of missing changeset to fetch remotly
404 # list of missing changeset to fetch remotly
404 self.fetch = None
405 self.fetch = None
405 # result of changegroup pulling (used as returng code by pull)
406 # result of changegroup pulling (used as returng code by pull)
406 self.cgresult = None
407 self.cgresult = None
407 # list of step remaining todo (related to future bundle2 usage)
408 # list of step remaining todo (related to future bundle2 usage)
408 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
409 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
409
410
410 @util.propertycache
411 @util.propertycache
411 def pulledsubset(self):
412 def pulledsubset(self):
412 """heads of the set of changeset target by the pull"""
413 """heads of the set of changeset target by the pull"""
413 # compute target subset
414 # compute target subset
414 if self.heads is None:
415 if self.heads is None:
415 # We pulled every thing possible
416 # We pulled every thing possible
416 # sync on everything common
417 # sync on everything common
417 c = set(self.common)
418 c = set(self.common)
418 ret = list(self.common)
419 ret = list(self.common)
419 for n in self.rheads:
420 for n in self.rheads:
420 if n not in c:
421 if n not in c:
421 ret.append(n)
422 ret.append(n)
422 return ret
423 return ret
423 else:
424 else:
424 # We pulled a specific subset
425 # We pulled a specific subset
425 # sync on this subset
426 # sync on this subset
426 return self.heads
427 return self.heads
427
428
428 def gettransaction(self):
429 def gettransaction(self):
429 """get appropriate pull transaction, creating it if needed"""
430 """get appropriate pull transaction, creating it if needed"""
430 if self._tr is None:
431 if self._tr is None:
431 self._tr = self.repo.transaction(self._trname)
432 self._tr = self.repo.transaction(self._trname)
432 return self._tr
433 return self._tr
433
434
434 def closetransaction(self):
435 def closetransaction(self):
435 """close transaction if created"""
436 """close transaction if created"""
436 if self._tr is not None:
437 if self._tr is not None:
437 self._tr.close()
438 self._tr.close()
438
439
439 def releasetransaction(self):
440 def releasetransaction(self):
440 """release transaction if created"""
441 """release transaction if created"""
441 if self._tr is not None:
442 if self._tr is not None:
442 self._tr.release()
443 self._tr.release()
443
444
444 def pull(repo, remote, heads=None, force=False):
445 def pull(repo, remote, heads=None, force=False):
445 pullop = pulloperation(repo, remote, heads, force)
446 pullop = pulloperation(repo, remote, heads, force)
446 if pullop.remote.local():
447 if pullop.remote.local():
447 missing = set(pullop.remote.requirements) - pullop.repo.supported
448 missing = set(pullop.remote.requirements) - pullop.repo.supported
448 if missing:
449 if missing:
449 msg = _("required features are not"
450 msg = _("required features are not"
450 " supported in the destination:"
451 " supported in the destination:"
451 " %s") % (', '.join(sorted(missing)))
452 " %s") % (', '.join(sorted(missing)))
452 raise util.Abort(msg)
453 raise util.Abort(msg)
453
454
454 lock = pullop.repo.lock()
455 lock = pullop.repo.lock()
455 try:
456 try:
456 _pulldiscovery(pullop)
457 _pulldiscovery(pullop)
457 if 'changegroup' in pullop.todosteps:
458 if 'changegroup' in pullop.todosteps:
458 _pullchangeset(pullop)
459 _pullchangeset(pullop)
459 if 'phases' in pullop.todosteps:
460 if 'phases' in pullop.todosteps:
460 _pullphase(pullop)
461 _pullphase(pullop)
461 if 'obsmarkers' in pullop.todosteps:
462 if 'obsmarkers' in pullop.todosteps:
462 _pullobsolete(pullop)
463 _pullobsolete(pullop)
463 pullop.closetransaction()
464 pullop.closetransaction()
464 finally:
465 finally:
465 pullop.releasetransaction()
466 pullop.releasetransaction()
466 lock.release()
467 lock.release()
467
468
468 return pullop.cgresult
469 return pullop.cgresult
469
470
470 def _pulldiscovery(pullop):
471 def _pulldiscovery(pullop):
471 """discovery phase for the pull
472 """discovery phase for the pull
472
473
473 Current handle changeset discovery only, will change handle all discovery
474 Current handle changeset discovery only, will change handle all discovery
474 at some point."""
475 at some point."""
475 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
476 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
476 pullop.remote,
477 pullop.remote,
477 heads=pullop.heads,
478 heads=pullop.heads,
478 force=pullop.force)
479 force=pullop.force)
479 pullop.common, pullop.fetch, pullop.rheads = tmp
480 pullop.common, pullop.fetch, pullop.rheads = tmp
480
481
481 def _pullchangeset(pullop):
482 def _pullchangeset(pullop):
482 """pull changeset from unbundle into the local repo"""
483 """pull changeset from unbundle into the local repo"""
483 # We delay the open of the transaction as late as possible so we
484 # We delay the open of the transaction as late as possible so we
484 # don't open transaction for nothing or you break future useful
485 # don't open transaction for nothing or you break future useful
485 # rollback call
486 # rollback call
486 pullop.todosteps.remove('changegroup')
487 pullop.todosteps.remove('changegroup')
487 if not pullop.fetch:
488 if not pullop.fetch:
488 pullop.repo.ui.status(_("no changes found\n"))
489 pullop.repo.ui.status(_("no changes found\n"))
489 pullop.cgresult = 0
490 pullop.cgresult = 0
490 return
491 return
491 pullop.gettransaction()
492 pullop.gettransaction()
492 if pullop.heads is None and list(pullop.common) == [nullid]:
493 if pullop.heads is None and list(pullop.common) == [nullid]:
493 pullop.repo.ui.status(_("requesting all changes\n"))
494 pullop.repo.ui.status(_("requesting all changes\n"))
494 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
495 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
495 # issue1320, avoid a race if remote changed after discovery
496 # issue1320, avoid a race if remote changed after discovery
496 pullop.heads = pullop.rheads
497 pullop.heads = pullop.rheads
497
498
498 if pullop.remote.capable('getbundle'):
499 if pullop.remote.capable('getbundle'):
499 # TODO: get bundlecaps from remote
500 # TODO: get bundlecaps from remote
500 cg = pullop.remote.getbundle('pull', common=pullop.common,
501 cg = pullop.remote.getbundle('pull', common=pullop.common,
501 heads=pullop.heads or pullop.rheads)
502 heads=pullop.heads or pullop.rheads)
502 elif pullop.heads is None:
503 elif pullop.heads is None:
503 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
504 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
504 elif not pullop.remote.capable('changegroupsubset'):
505 elif not pullop.remote.capable('changegroupsubset'):
505 raise util.Abort(_("partial pull cannot be done because "
506 raise util.Abort(_("partial pull cannot be done because "
506 "other repository doesn't support "
507 "other repository doesn't support "
507 "changegroupsubset."))
508 "changegroupsubset."))
508 else:
509 else:
509 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
510 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
510 pullop.cgresult = pullop.repo.addchangegroup(cg, 'pull',
511 pullop.cgresult = pullop.repo.addchangegroup(cg, 'pull',
511 pullop.remote.url())
512 pullop.remote.url())
512
513
513 def _pullphase(pullop):
514 def _pullphase(pullop):
514 # Get remote phases data from remote
515 # Get remote phases data from remote
515 pullop.todosteps.remove('phases')
516 pullop.todosteps.remove('phases')
516 remotephases = pullop.remote.listkeys('phases')
517 remotephases = pullop.remote.listkeys('phases')
517 publishing = bool(remotephases.get('publishing', False))
518 publishing = bool(remotephases.get('publishing', False))
518 if remotephases and not publishing:
519 if remotephases and not publishing:
519 # remote is new and unpublishing
520 # remote is new and unpublishing
520 pheads, _dr = phases.analyzeremotephases(pullop.repo,
521 pheads, _dr = phases.analyzeremotephases(pullop.repo,
521 pullop.pulledsubset,
522 pullop.pulledsubset,
522 remotephases)
523 remotephases)
523 phases.advanceboundary(pullop.repo, phases.public, pheads)
524 phases.advanceboundary(pullop.repo, phases.public, pheads)
524 phases.advanceboundary(pullop.repo, phases.draft,
525 phases.advanceboundary(pullop.repo, phases.draft,
525 pullop.pulledsubset)
526 pullop.pulledsubset)
526 else:
527 else:
527 # Remote is old or publishing all common changesets
528 # Remote is old or publishing all common changesets
528 # should be seen as public
529 # should be seen as public
529 phases.advanceboundary(pullop.repo, phases.public,
530 phases.advanceboundary(pullop.repo, phases.public,
530 pullop.pulledsubset)
531 pullop.pulledsubset)
531
532
532 def _pullobsolete(pullop):
533 def _pullobsolete(pullop):
533 """utility function to pull obsolete markers from a remote
534 """utility function to pull obsolete markers from a remote
534
535
535 The `gettransaction` is function that return the pull transaction, creating
536 The `gettransaction` is function that return the pull transaction, creating
536 one if necessary. We return the transaction to inform the calling code that
537 one if necessary. We return the transaction to inform the calling code that
537 a new transaction have been created (when applicable).
538 a new transaction have been created (when applicable).
538
539
539 Exists mostly to allow overriding for experimentation purpose"""
540 Exists mostly to allow overriding for experimentation purpose"""
540 pullop.todosteps.remove('obsmarkers')
541 pullop.todosteps.remove('obsmarkers')
541 tr = None
542 tr = None
542 if obsolete._enabled:
543 if obsolete._enabled:
543 pullop.repo.ui.debug('fetching remote obsolete markers\n')
544 pullop.repo.ui.debug('fetching remote obsolete markers\n')
544 remoteobs = pullop.remote.listkeys('obsolete')
545 remoteobs = pullop.remote.listkeys('obsolete')
545 if 'dump0' in remoteobs:
546 if 'dump0' in remoteobs:
546 tr = pullop.gettransaction()
547 tr = pullop.gettransaction()
547 for key in sorted(remoteobs, reverse=True):
548 for key in sorted(remoteobs, reverse=True):
548 if key.startswith('dump'):
549 if key.startswith('dump'):
549 data = base85.b85decode(remoteobs[key])
550 data = base85.b85decode(remoteobs[key])
550 pullop.repo.obsstore.mergemarkers(tr, data)
551 pullop.repo.obsstore.mergemarkers(tr, data)
551 pullop.repo.invalidatevolatilesets()
552 pullop.repo.invalidatevolatilesets()
552 return tr
553 return tr
553
554
@@ -1,2127 +1,2117 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock as lockmod
11 import lock as lockmod
12 import transaction, store, encoding, exchange
12 import transaction, store, encoding, exchange
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 import branchmap, pathutil
19 import branchmap, pathutil
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21 filecache = scmutil.filecache
21 filecache = scmutil.filecache
22
22
23 class repofilecache(filecache):
23 class repofilecache(filecache):
24 """All filecache usage on repo are done for logic that should be unfiltered
24 """All filecache usage on repo are done for logic that should be unfiltered
25 """
25 """
26
26
27 def __get__(self, repo, type=None):
27 def __get__(self, repo, type=None):
28 return super(repofilecache, self).__get__(repo.unfiltered(), type)
28 return super(repofilecache, self).__get__(repo.unfiltered(), type)
29 def __set__(self, repo, value):
29 def __set__(self, repo, value):
30 return super(repofilecache, self).__set__(repo.unfiltered(), value)
30 return super(repofilecache, self).__set__(repo.unfiltered(), value)
31 def __delete__(self, repo):
31 def __delete__(self, repo):
32 return super(repofilecache, self).__delete__(repo.unfiltered())
32 return super(repofilecache, self).__delete__(repo.unfiltered())
33
33
34 class storecache(repofilecache):
34 class storecache(repofilecache):
35 """filecache for files in the store"""
35 """filecache for files in the store"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj.sjoin(fname)
37 return obj.sjoin(fname)
38
38
39 class unfilteredpropertycache(propertycache):
39 class unfilteredpropertycache(propertycache):
40 """propertycache that apply to unfiltered repo only"""
40 """propertycache that apply to unfiltered repo only"""
41
41
42 def __get__(self, repo, type=None):
42 def __get__(self, repo, type=None):
43 unfi = repo.unfiltered()
43 unfi = repo.unfiltered()
44 if unfi is repo:
44 if unfi is repo:
45 return super(unfilteredpropertycache, self).__get__(unfi)
45 return super(unfilteredpropertycache, self).__get__(unfi)
46 return getattr(unfi, self.name)
46 return getattr(unfi, self.name)
47
47
48 class filteredpropertycache(propertycache):
48 class filteredpropertycache(propertycache):
49 """propertycache that must take filtering in account"""
49 """propertycache that must take filtering in account"""
50
50
51 def cachevalue(self, obj, value):
51 def cachevalue(self, obj, value):
52 object.__setattr__(obj, self.name, value)
52 object.__setattr__(obj, self.name, value)
53
53
54
54
55 def hasunfilteredcache(repo, name):
55 def hasunfilteredcache(repo, name):
56 """check if a repo has an unfilteredpropertycache value for <name>"""
56 """check if a repo has an unfilteredpropertycache value for <name>"""
57 return name in vars(repo.unfiltered())
57 return name in vars(repo.unfiltered())
58
58
59 def unfilteredmethod(orig):
59 def unfilteredmethod(orig):
60 """decorate method that always need to be run on unfiltered version"""
60 """decorate method that always need to be run on unfiltered version"""
61 def wrapper(repo, *args, **kwargs):
61 def wrapper(repo, *args, **kwargs):
62 return orig(repo.unfiltered(), *args, **kwargs)
62 return orig(repo.unfiltered(), *args, **kwargs)
63 return wrapper
63 return wrapper
64
64
65 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
65 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
66 legacycaps = moderncaps.union(set(['changegroupsubset']))
66 legacycaps = moderncaps.union(set(['changegroupsubset']))
67
67
68 class localpeer(peer.peerrepository):
68 class localpeer(peer.peerrepository):
69 '''peer for a local repo; reflects only the most recent API'''
69 '''peer for a local repo; reflects only the most recent API'''
70
70
71 def __init__(self, repo, caps=moderncaps):
71 def __init__(self, repo, caps=moderncaps):
72 peer.peerrepository.__init__(self)
72 peer.peerrepository.__init__(self)
73 self._repo = repo.filtered('served')
73 self._repo = repo.filtered('served')
74 self.ui = repo.ui
74 self.ui = repo.ui
75 self._caps = repo._restrictcapabilities(caps)
75 self._caps = repo._restrictcapabilities(caps)
76 self.requirements = repo.requirements
76 self.requirements = repo.requirements
77 self.supportedformats = repo.supportedformats
77 self.supportedformats = repo.supportedformats
78
78
79 def close(self):
79 def close(self):
80 self._repo.close()
80 self._repo.close()
81
81
82 def _capabilities(self):
82 def _capabilities(self):
83 return self._caps
83 return self._caps
84
84
85 def local(self):
85 def local(self):
86 return self._repo
86 return self._repo
87
87
88 def canpush(self):
88 def canpush(self):
89 return True
89 return True
90
90
91 def url(self):
91 def url(self):
92 return self._repo.url()
92 return self._repo.url()
93
93
94 def lookup(self, key):
94 def lookup(self, key):
95 return self._repo.lookup(key)
95 return self._repo.lookup(key)
96
96
97 def branchmap(self):
97 def branchmap(self):
98 return self._repo.branchmap()
98 return self._repo.branchmap()
99
99
100 def heads(self):
100 def heads(self):
101 return self._repo.heads()
101 return self._repo.heads()
102
102
103 def known(self, nodes):
103 def known(self, nodes):
104 return self._repo.known(nodes)
104 return self._repo.known(nodes)
105
105
106 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
106 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
107 return self._repo.getbundle(source, heads=heads, common=common,
107 return self._repo.getbundle(source, heads=heads, common=common,
108 bundlecaps=None)
108 bundlecaps=None)
109
109
110 # TODO We might want to move the next two calls into legacypeer and add
110 # TODO We might want to move the next two calls into legacypeer and add
111 # unbundle instead.
111 # unbundle instead.
112
112
113 def lock(self):
113 def lock(self):
114 return self._repo.lock()
114 return self._repo.lock()
115
115
116 def addchangegroup(self, cg, source, url):
116 def addchangegroup(self, cg, source, url):
117 return self._repo.addchangegroup(cg, source, url)
117 return self._repo.addchangegroup(cg, source, url)
118
118
119 def pushkey(self, namespace, key, old, new):
119 def pushkey(self, namespace, key, old, new):
120 return self._repo.pushkey(namespace, key, old, new)
120 return self._repo.pushkey(namespace, key, old, new)
121
121
122 def listkeys(self, namespace):
122 def listkeys(self, namespace):
123 return self._repo.listkeys(namespace)
123 return self._repo.listkeys(namespace)
124
124
125 def debugwireargs(self, one, two, three=None, four=None, five=None):
125 def debugwireargs(self, one, two, three=None, four=None, five=None):
126 '''used to test argument passing over the wire'''
126 '''used to test argument passing over the wire'''
127 return "%s %s %s %s %s" % (one, two, three, four, five)
127 return "%s %s %s %s %s" % (one, two, three, four, five)
128
128
129 class locallegacypeer(localpeer):
129 class locallegacypeer(localpeer):
130 '''peer extension which implements legacy methods too; used for tests with
130 '''peer extension which implements legacy methods too; used for tests with
131 restricted capabilities'''
131 restricted capabilities'''
132
132
133 def __init__(self, repo):
133 def __init__(self, repo):
134 localpeer.__init__(self, repo, caps=legacycaps)
134 localpeer.__init__(self, repo, caps=legacycaps)
135
135
136 def branches(self, nodes):
136 def branches(self, nodes):
137 return self._repo.branches(nodes)
137 return self._repo.branches(nodes)
138
138
139 def between(self, pairs):
139 def between(self, pairs):
140 return self._repo.between(pairs)
140 return self._repo.between(pairs)
141
141
142 def changegroup(self, basenodes, source):
142 def changegroup(self, basenodes, source):
143 return self._repo.changegroup(basenodes, source)
143 return self._repo.changegroup(basenodes, source)
144
144
145 def changegroupsubset(self, bases, heads, source):
145 def changegroupsubset(self, bases, heads, source):
146 return changegroup.changegroupsubset(self._repo, bases, heads, source)
146 return changegroup.changegroupsubset(self._repo, bases, heads, source)
147
147
148 class localrepository(object):
148 class localrepository(object):
149
149
150 supportedformats = set(('revlogv1', 'generaldelta'))
150 supportedformats = set(('revlogv1', 'generaldelta'))
151 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
151 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
152 'dotencode'))
152 'dotencode'))
153 openerreqs = set(('revlogv1', 'generaldelta'))
153 openerreqs = set(('revlogv1', 'generaldelta'))
154 requirements = ['revlogv1']
154 requirements = ['revlogv1']
155 filtername = None
155 filtername = None
156
156
157 # a list of (ui, featureset) functions.
157 # a list of (ui, featureset) functions.
158 # only functions defined in module of enabled extensions are invoked
158 # only functions defined in module of enabled extensions are invoked
159 featuresetupfuncs = set()
159 featuresetupfuncs = set()
160
160
161 def _baserequirements(self, create):
161 def _baserequirements(self, create):
162 return self.requirements[:]
162 return self.requirements[:]
163
163
164 def __init__(self, baseui, path=None, create=False):
164 def __init__(self, baseui, path=None, create=False):
165 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
165 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
166 self.wopener = self.wvfs
166 self.wopener = self.wvfs
167 self.root = self.wvfs.base
167 self.root = self.wvfs.base
168 self.path = self.wvfs.join(".hg")
168 self.path = self.wvfs.join(".hg")
169 self.origroot = path
169 self.origroot = path
170 self.auditor = pathutil.pathauditor(self.root, self._checknested)
170 self.auditor = pathutil.pathauditor(self.root, self._checknested)
171 self.vfs = scmutil.vfs(self.path)
171 self.vfs = scmutil.vfs(self.path)
172 self.opener = self.vfs
172 self.opener = self.vfs
173 self.baseui = baseui
173 self.baseui = baseui
174 self.ui = baseui.copy()
174 self.ui = baseui.copy()
175 self.ui.copy = baseui.copy # prevent copying repo configuration
175 self.ui.copy = baseui.copy # prevent copying repo configuration
176 # A list of callback to shape the phase if no data were found.
176 # A list of callback to shape the phase if no data were found.
177 # Callback are in the form: func(repo, roots) --> processed root.
177 # Callback are in the form: func(repo, roots) --> processed root.
178 # This list it to be filled by extension during repo setup
178 # This list it to be filled by extension during repo setup
179 self._phasedefaults = []
179 self._phasedefaults = []
180 try:
180 try:
181 self.ui.readconfig(self.join("hgrc"), self.root)
181 self.ui.readconfig(self.join("hgrc"), self.root)
182 extensions.loadall(self.ui)
182 extensions.loadall(self.ui)
183 except IOError:
183 except IOError:
184 pass
184 pass
185
185
186 if self.featuresetupfuncs:
186 if self.featuresetupfuncs:
187 self.supported = set(self._basesupported) # use private copy
187 self.supported = set(self._basesupported) # use private copy
188 extmods = set(m.__name__ for n, m
188 extmods = set(m.__name__ for n, m
189 in extensions.extensions(self.ui))
189 in extensions.extensions(self.ui))
190 for setupfunc in self.featuresetupfuncs:
190 for setupfunc in self.featuresetupfuncs:
191 if setupfunc.__module__ in extmods:
191 if setupfunc.__module__ in extmods:
192 setupfunc(self.ui, self.supported)
192 setupfunc(self.ui, self.supported)
193 else:
193 else:
194 self.supported = self._basesupported
194 self.supported = self._basesupported
195
195
196 if not self.vfs.isdir():
196 if not self.vfs.isdir():
197 if create:
197 if create:
198 if not self.wvfs.exists():
198 if not self.wvfs.exists():
199 self.wvfs.makedirs()
199 self.wvfs.makedirs()
200 self.vfs.makedir(notindexed=True)
200 self.vfs.makedir(notindexed=True)
201 requirements = self._baserequirements(create)
201 requirements = self._baserequirements(create)
202 if self.ui.configbool('format', 'usestore', True):
202 if self.ui.configbool('format', 'usestore', True):
203 self.vfs.mkdir("store")
203 self.vfs.mkdir("store")
204 requirements.append("store")
204 requirements.append("store")
205 if self.ui.configbool('format', 'usefncache', True):
205 if self.ui.configbool('format', 'usefncache', True):
206 requirements.append("fncache")
206 requirements.append("fncache")
207 if self.ui.configbool('format', 'dotencode', True):
207 if self.ui.configbool('format', 'dotencode', True):
208 requirements.append('dotencode')
208 requirements.append('dotencode')
209 # create an invalid changelog
209 # create an invalid changelog
210 self.vfs.append(
210 self.vfs.append(
211 "00changelog.i",
211 "00changelog.i",
212 '\0\0\0\2' # represents revlogv2
212 '\0\0\0\2' # represents revlogv2
213 ' dummy changelog to prevent using the old repo layout'
213 ' dummy changelog to prevent using the old repo layout'
214 )
214 )
215 if self.ui.configbool('format', 'generaldelta', False):
215 if self.ui.configbool('format', 'generaldelta', False):
216 requirements.append("generaldelta")
216 requirements.append("generaldelta")
217 requirements = set(requirements)
217 requirements = set(requirements)
218 else:
218 else:
219 raise error.RepoError(_("repository %s not found") % path)
219 raise error.RepoError(_("repository %s not found") % path)
220 elif create:
220 elif create:
221 raise error.RepoError(_("repository %s already exists") % path)
221 raise error.RepoError(_("repository %s already exists") % path)
222 else:
222 else:
223 try:
223 try:
224 requirements = scmutil.readrequires(self.vfs, self.supported)
224 requirements = scmutil.readrequires(self.vfs, self.supported)
225 except IOError, inst:
225 except IOError, inst:
226 if inst.errno != errno.ENOENT:
226 if inst.errno != errno.ENOENT:
227 raise
227 raise
228 requirements = set()
228 requirements = set()
229
229
230 self.sharedpath = self.path
230 self.sharedpath = self.path
231 try:
231 try:
232 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
232 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
233 realpath=True)
233 realpath=True)
234 s = vfs.base
234 s = vfs.base
235 if not vfs.exists():
235 if not vfs.exists():
236 raise error.RepoError(
236 raise error.RepoError(
237 _('.hg/sharedpath points to nonexistent directory %s') % s)
237 _('.hg/sharedpath points to nonexistent directory %s') % s)
238 self.sharedpath = s
238 self.sharedpath = s
239 except IOError, inst:
239 except IOError, inst:
240 if inst.errno != errno.ENOENT:
240 if inst.errno != errno.ENOENT:
241 raise
241 raise
242
242
243 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
243 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
244 self.spath = self.store.path
244 self.spath = self.store.path
245 self.svfs = self.store.vfs
245 self.svfs = self.store.vfs
246 self.sopener = self.svfs
246 self.sopener = self.svfs
247 self.sjoin = self.store.join
247 self.sjoin = self.store.join
248 self.vfs.createmode = self.store.createmode
248 self.vfs.createmode = self.store.createmode
249 self._applyrequirements(requirements)
249 self._applyrequirements(requirements)
250 if create:
250 if create:
251 self._writerequirements()
251 self._writerequirements()
252
252
253
253
254 self._branchcaches = {}
254 self._branchcaches = {}
255 self.filterpats = {}
255 self.filterpats = {}
256 self._datafilters = {}
256 self._datafilters = {}
257 self._transref = self._lockref = self._wlockref = None
257 self._transref = self._lockref = self._wlockref = None
258
258
259 # A cache for various files under .hg/ that tracks file changes,
259 # A cache for various files under .hg/ that tracks file changes,
260 # (used by the filecache decorator)
260 # (used by the filecache decorator)
261 #
261 #
262 # Maps a property name to its util.filecacheentry
262 # Maps a property name to its util.filecacheentry
263 self._filecache = {}
263 self._filecache = {}
264
264
265 # hold sets of revision to be filtered
265 # hold sets of revision to be filtered
266 # should be cleared when something might have changed the filter value:
266 # should be cleared when something might have changed the filter value:
267 # - new changesets,
267 # - new changesets,
268 # - phase change,
268 # - phase change,
269 # - new obsolescence marker,
269 # - new obsolescence marker,
270 # - working directory parent change,
270 # - working directory parent change,
271 # - bookmark changes
271 # - bookmark changes
272 self.filteredrevcache = {}
272 self.filteredrevcache = {}
273
273
274 def close(self):
274 def close(self):
275 pass
275 pass
276
276
277 def _restrictcapabilities(self, caps):
277 def _restrictcapabilities(self, caps):
278 return caps
278 return caps
279
279
280 def _applyrequirements(self, requirements):
280 def _applyrequirements(self, requirements):
281 self.requirements = requirements
281 self.requirements = requirements
282 self.sopener.options = dict((r, 1) for r in requirements
282 self.sopener.options = dict((r, 1) for r in requirements
283 if r in self.openerreqs)
283 if r in self.openerreqs)
284 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
284 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
285 if chunkcachesize is not None:
285 if chunkcachesize is not None:
286 self.sopener.options['chunkcachesize'] = chunkcachesize
286 self.sopener.options['chunkcachesize'] = chunkcachesize
287
287
288 def _writerequirements(self):
288 def _writerequirements(self):
289 reqfile = self.opener("requires", "w")
289 reqfile = self.opener("requires", "w")
290 for r in sorted(self.requirements):
290 for r in sorted(self.requirements):
291 reqfile.write("%s\n" % r)
291 reqfile.write("%s\n" % r)
292 reqfile.close()
292 reqfile.close()
293
293
294 def _checknested(self, path):
294 def _checknested(self, path):
295 """Determine if path is a legal nested repository."""
295 """Determine if path is a legal nested repository."""
296 if not path.startswith(self.root):
296 if not path.startswith(self.root):
297 return False
297 return False
298 subpath = path[len(self.root) + 1:]
298 subpath = path[len(self.root) + 1:]
299 normsubpath = util.pconvert(subpath)
299 normsubpath = util.pconvert(subpath)
300
300
301 # XXX: Checking against the current working copy is wrong in
301 # XXX: Checking against the current working copy is wrong in
302 # the sense that it can reject things like
302 # the sense that it can reject things like
303 #
303 #
304 # $ hg cat -r 10 sub/x.txt
304 # $ hg cat -r 10 sub/x.txt
305 #
305 #
306 # if sub/ is no longer a subrepository in the working copy
306 # if sub/ is no longer a subrepository in the working copy
307 # parent revision.
307 # parent revision.
308 #
308 #
309 # However, it can of course also allow things that would have
309 # However, it can of course also allow things that would have
310 # been rejected before, such as the above cat command if sub/
310 # been rejected before, such as the above cat command if sub/
311 # is a subrepository now, but was a normal directory before.
311 # is a subrepository now, but was a normal directory before.
312 # The old path auditor would have rejected by mistake since it
312 # The old path auditor would have rejected by mistake since it
313 # panics when it sees sub/.hg/.
313 # panics when it sees sub/.hg/.
314 #
314 #
315 # All in all, checking against the working copy seems sensible
315 # All in all, checking against the working copy seems sensible
316 # since we want to prevent access to nested repositories on
316 # since we want to prevent access to nested repositories on
317 # the filesystem *now*.
317 # the filesystem *now*.
318 ctx = self[None]
318 ctx = self[None]
319 parts = util.splitpath(subpath)
319 parts = util.splitpath(subpath)
320 while parts:
320 while parts:
321 prefix = '/'.join(parts)
321 prefix = '/'.join(parts)
322 if prefix in ctx.substate:
322 if prefix in ctx.substate:
323 if prefix == normsubpath:
323 if prefix == normsubpath:
324 return True
324 return True
325 else:
325 else:
326 sub = ctx.sub(prefix)
326 sub = ctx.sub(prefix)
327 return sub.checknested(subpath[len(prefix) + 1:])
327 return sub.checknested(subpath[len(prefix) + 1:])
328 else:
328 else:
329 parts.pop()
329 parts.pop()
330 return False
330 return False
331
331
332 def peer(self):
332 def peer(self):
333 return localpeer(self) # not cached to avoid reference cycle
333 return localpeer(self) # not cached to avoid reference cycle
334
334
335 def unfiltered(self):
335 def unfiltered(self):
336 """Return unfiltered version of the repository
336 """Return unfiltered version of the repository
337
337
338 Intended to be overwritten by filtered repo."""
338 Intended to be overwritten by filtered repo."""
339 return self
339 return self
340
340
341 def filtered(self, name):
341 def filtered(self, name):
342 """Return a filtered version of a repository"""
342 """Return a filtered version of a repository"""
343 # build a new class with the mixin and the current class
343 # build a new class with the mixin and the current class
344 # (possibly subclass of the repo)
344 # (possibly subclass of the repo)
345 class proxycls(repoview.repoview, self.unfiltered().__class__):
345 class proxycls(repoview.repoview, self.unfiltered().__class__):
346 pass
346 pass
347 return proxycls(self, name)
347 return proxycls(self, name)
348
348
349 @repofilecache('bookmarks')
349 @repofilecache('bookmarks')
350 def _bookmarks(self):
350 def _bookmarks(self):
351 return bookmarks.bmstore(self)
351 return bookmarks.bmstore(self)
352
352
353 @repofilecache('bookmarks.current')
353 @repofilecache('bookmarks.current')
354 def _bookmarkcurrent(self):
354 def _bookmarkcurrent(self):
355 return bookmarks.readcurrent(self)
355 return bookmarks.readcurrent(self)
356
356
357 def bookmarkheads(self, bookmark):
357 def bookmarkheads(self, bookmark):
358 name = bookmark.split('@', 1)[0]
358 name = bookmark.split('@', 1)[0]
359 heads = []
359 heads = []
360 for mark, n in self._bookmarks.iteritems():
360 for mark, n in self._bookmarks.iteritems():
361 if mark.split('@', 1)[0] == name:
361 if mark.split('@', 1)[0] == name:
362 heads.append(n)
362 heads.append(n)
363 return heads
363 return heads
364
364
365 @storecache('phaseroots')
365 @storecache('phaseroots')
366 def _phasecache(self):
366 def _phasecache(self):
367 return phases.phasecache(self, self._phasedefaults)
367 return phases.phasecache(self, self._phasedefaults)
368
368
369 @storecache('obsstore')
369 @storecache('obsstore')
370 def obsstore(self):
370 def obsstore(self):
371 store = obsolete.obsstore(self.sopener)
371 store = obsolete.obsstore(self.sopener)
372 if store and not obsolete._enabled:
372 if store and not obsolete._enabled:
373 # message is rare enough to not be translated
373 # message is rare enough to not be translated
374 msg = 'obsolete feature not enabled but %i markers found!\n'
374 msg = 'obsolete feature not enabled but %i markers found!\n'
375 self.ui.warn(msg % len(list(store)))
375 self.ui.warn(msg % len(list(store)))
376 return store
376 return store
377
377
378 @storecache('00changelog.i')
378 @storecache('00changelog.i')
379 def changelog(self):
379 def changelog(self):
380 c = changelog.changelog(self.sopener)
380 c = changelog.changelog(self.sopener)
381 if 'HG_PENDING' in os.environ:
381 if 'HG_PENDING' in os.environ:
382 p = os.environ['HG_PENDING']
382 p = os.environ['HG_PENDING']
383 if p.startswith(self.root):
383 if p.startswith(self.root):
384 c.readpending('00changelog.i.a')
384 c.readpending('00changelog.i.a')
385 return c
385 return c
386
386
387 @storecache('00manifest.i')
387 @storecache('00manifest.i')
388 def manifest(self):
388 def manifest(self):
389 return manifest.manifest(self.sopener)
389 return manifest.manifest(self.sopener)
390
390
391 @repofilecache('dirstate')
391 @repofilecache('dirstate')
392 def dirstate(self):
392 def dirstate(self):
393 warned = [0]
393 warned = [0]
394 def validate(node):
394 def validate(node):
395 try:
395 try:
396 self.changelog.rev(node)
396 self.changelog.rev(node)
397 return node
397 return node
398 except error.LookupError:
398 except error.LookupError:
399 if not warned[0]:
399 if not warned[0]:
400 warned[0] = True
400 warned[0] = True
401 self.ui.warn(_("warning: ignoring unknown"
401 self.ui.warn(_("warning: ignoring unknown"
402 " working parent %s!\n") % short(node))
402 " working parent %s!\n") % short(node))
403 return nullid
403 return nullid
404
404
405 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
405 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
406
406
407 def __getitem__(self, changeid):
407 def __getitem__(self, changeid):
408 if changeid is None:
408 if changeid is None:
409 return context.workingctx(self)
409 return context.workingctx(self)
410 return context.changectx(self, changeid)
410 return context.changectx(self, changeid)
411
411
412 def __contains__(self, changeid):
412 def __contains__(self, changeid):
413 try:
413 try:
414 return bool(self.lookup(changeid))
414 return bool(self.lookup(changeid))
415 except error.RepoLookupError:
415 except error.RepoLookupError:
416 return False
416 return False
417
417
418 def __nonzero__(self):
418 def __nonzero__(self):
419 return True
419 return True
420
420
421 def __len__(self):
421 def __len__(self):
422 return len(self.changelog)
422 return len(self.changelog)
423
423
424 def __iter__(self):
424 def __iter__(self):
425 return iter(self.changelog)
425 return iter(self.changelog)
426
426
427 def revs(self, expr, *args):
427 def revs(self, expr, *args):
428 '''Return a list of revisions matching the given revset'''
428 '''Return a list of revisions matching the given revset'''
429 expr = revset.formatspec(expr, *args)
429 expr = revset.formatspec(expr, *args)
430 m = revset.match(None, expr)
430 m = revset.match(None, expr)
431 return m(self, revset.spanset(self))
431 return m(self, revset.spanset(self))
432
432
433 def set(self, expr, *args):
433 def set(self, expr, *args):
434 '''
434 '''
435 Yield a context for each matching revision, after doing arg
435 Yield a context for each matching revision, after doing arg
436 replacement via revset.formatspec
436 replacement via revset.formatspec
437 '''
437 '''
438 for r in self.revs(expr, *args):
438 for r in self.revs(expr, *args):
439 yield self[r]
439 yield self[r]
440
440
441 def url(self):
441 def url(self):
442 return 'file:' + self.root
442 return 'file:' + self.root
443
443
444 def hook(self, name, throw=False, **args):
444 def hook(self, name, throw=False, **args):
445 return hook.hook(self.ui, self, name, throw, **args)
445 return hook.hook(self.ui, self, name, throw, **args)
446
446
447 @unfilteredmethod
447 @unfilteredmethod
448 def _tag(self, names, node, message, local, user, date, extra={}):
448 def _tag(self, names, node, message, local, user, date, extra={}):
449 if isinstance(names, str):
449 if isinstance(names, str):
450 names = (names,)
450 names = (names,)
451
451
452 branches = self.branchmap()
452 branches = self.branchmap()
453 for name in names:
453 for name in names:
454 self.hook('pretag', throw=True, node=hex(node), tag=name,
454 self.hook('pretag', throw=True, node=hex(node), tag=name,
455 local=local)
455 local=local)
456 if name in branches:
456 if name in branches:
457 self.ui.warn(_("warning: tag %s conflicts with existing"
457 self.ui.warn(_("warning: tag %s conflicts with existing"
458 " branch name\n") % name)
458 " branch name\n") % name)
459
459
460 def writetags(fp, names, munge, prevtags):
460 def writetags(fp, names, munge, prevtags):
461 fp.seek(0, 2)
461 fp.seek(0, 2)
462 if prevtags and prevtags[-1] != '\n':
462 if prevtags and prevtags[-1] != '\n':
463 fp.write('\n')
463 fp.write('\n')
464 for name in names:
464 for name in names:
465 m = munge and munge(name) or name
465 m = munge and munge(name) or name
466 if (self._tagscache.tagtypes and
466 if (self._tagscache.tagtypes and
467 name in self._tagscache.tagtypes):
467 name in self._tagscache.tagtypes):
468 old = self.tags().get(name, nullid)
468 old = self.tags().get(name, nullid)
469 fp.write('%s %s\n' % (hex(old), m))
469 fp.write('%s %s\n' % (hex(old), m))
470 fp.write('%s %s\n' % (hex(node), m))
470 fp.write('%s %s\n' % (hex(node), m))
471 fp.close()
471 fp.close()
472
472
473 prevtags = ''
473 prevtags = ''
474 if local:
474 if local:
475 try:
475 try:
476 fp = self.opener('localtags', 'r+')
476 fp = self.opener('localtags', 'r+')
477 except IOError:
477 except IOError:
478 fp = self.opener('localtags', 'a')
478 fp = self.opener('localtags', 'a')
479 else:
479 else:
480 prevtags = fp.read()
480 prevtags = fp.read()
481
481
482 # local tags are stored in the current charset
482 # local tags are stored in the current charset
483 writetags(fp, names, None, prevtags)
483 writetags(fp, names, None, prevtags)
484 for name in names:
484 for name in names:
485 self.hook('tag', node=hex(node), tag=name, local=local)
485 self.hook('tag', node=hex(node), tag=name, local=local)
486 return
486 return
487
487
488 try:
488 try:
489 fp = self.wfile('.hgtags', 'rb+')
489 fp = self.wfile('.hgtags', 'rb+')
490 except IOError, e:
490 except IOError, e:
491 if e.errno != errno.ENOENT:
491 if e.errno != errno.ENOENT:
492 raise
492 raise
493 fp = self.wfile('.hgtags', 'ab')
493 fp = self.wfile('.hgtags', 'ab')
494 else:
494 else:
495 prevtags = fp.read()
495 prevtags = fp.read()
496
496
497 # committed tags are stored in UTF-8
497 # committed tags are stored in UTF-8
498 writetags(fp, names, encoding.fromlocal, prevtags)
498 writetags(fp, names, encoding.fromlocal, prevtags)
499
499
500 fp.close()
500 fp.close()
501
501
502 self.invalidatecaches()
502 self.invalidatecaches()
503
503
504 if '.hgtags' not in self.dirstate:
504 if '.hgtags' not in self.dirstate:
505 self[None].add(['.hgtags'])
505 self[None].add(['.hgtags'])
506
506
507 m = matchmod.exact(self.root, '', ['.hgtags'])
507 m = matchmod.exact(self.root, '', ['.hgtags'])
508 tagnode = self.commit(message, user, date, extra=extra, match=m)
508 tagnode = self.commit(message, user, date, extra=extra, match=m)
509
509
510 for name in names:
510 for name in names:
511 self.hook('tag', node=hex(node), tag=name, local=local)
511 self.hook('tag', node=hex(node), tag=name, local=local)
512
512
513 return tagnode
513 return tagnode
514
514
515 def tag(self, names, node, message, local, user, date):
515 def tag(self, names, node, message, local, user, date):
516 '''tag a revision with one or more symbolic names.
516 '''tag a revision with one or more symbolic names.
517
517
518 names is a list of strings or, when adding a single tag, names may be a
518 names is a list of strings or, when adding a single tag, names may be a
519 string.
519 string.
520
520
521 if local is True, the tags are stored in a per-repository file.
521 if local is True, the tags are stored in a per-repository file.
522 otherwise, they are stored in the .hgtags file, and a new
522 otherwise, they are stored in the .hgtags file, and a new
523 changeset is committed with the change.
523 changeset is committed with the change.
524
524
525 keyword arguments:
525 keyword arguments:
526
526
527 local: whether to store tags in non-version-controlled file
527 local: whether to store tags in non-version-controlled file
528 (default False)
528 (default False)
529
529
530 message: commit message to use if committing
530 message: commit message to use if committing
531
531
532 user: name of user to use if committing
532 user: name of user to use if committing
533
533
534 date: date tuple to use if committing'''
534 date: date tuple to use if committing'''
535
535
536 if not local:
536 if not local:
537 for x in self.status()[:5]:
537 for x in self.status()[:5]:
538 if '.hgtags' in x:
538 if '.hgtags' in x:
539 raise util.Abort(_('working copy of .hgtags is changed '
539 raise util.Abort(_('working copy of .hgtags is changed '
540 '(please commit .hgtags manually)'))
540 '(please commit .hgtags manually)'))
541
541
542 self.tags() # instantiate the cache
542 self.tags() # instantiate the cache
543 self._tag(names, node, message, local, user, date)
543 self._tag(names, node, message, local, user, date)
544
544
545 @filteredpropertycache
545 @filteredpropertycache
546 def _tagscache(self):
546 def _tagscache(self):
547 '''Returns a tagscache object that contains various tags related
547 '''Returns a tagscache object that contains various tags related
548 caches.'''
548 caches.'''
549
549
550 # This simplifies its cache management by having one decorated
550 # This simplifies its cache management by having one decorated
551 # function (this one) and the rest simply fetch things from it.
551 # function (this one) and the rest simply fetch things from it.
552 class tagscache(object):
552 class tagscache(object):
553 def __init__(self):
553 def __init__(self):
554 # These two define the set of tags for this repository. tags
554 # These two define the set of tags for this repository. tags
555 # maps tag name to node; tagtypes maps tag name to 'global' or
555 # maps tag name to node; tagtypes maps tag name to 'global' or
556 # 'local'. (Global tags are defined by .hgtags across all
556 # 'local'. (Global tags are defined by .hgtags across all
557 # heads, and local tags are defined in .hg/localtags.)
557 # heads, and local tags are defined in .hg/localtags.)
558 # They constitute the in-memory cache of tags.
558 # They constitute the in-memory cache of tags.
559 self.tags = self.tagtypes = None
559 self.tags = self.tagtypes = None
560
560
561 self.nodetagscache = self.tagslist = None
561 self.nodetagscache = self.tagslist = None
562
562
563 cache = tagscache()
563 cache = tagscache()
564 cache.tags, cache.tagtypes = self._findtags()
564 cache.tags, cache.tagtypes = self._findtags()
565
565
566 return cache
566 return cache
567
567
568 def tags(self):
568 def tags(self):
569 '''return a mapping of tag to node'''
569 '''return a mapping of tag to node'''
570 t = {}
570 t = {}
571 if self.changelog.filteredrevs:
571 if self.changelog.filteredrevs:
572 tags, tt = self._findtags()
572 tags, tt = self._findtags()
573 else:
573 else:
574 tags = self._tagscache.tags
574 tags = self._tagscache.tags
575 for k, v in tags.iteritems():
575 for k, v in tags.iteritems():
576 try:
576 try:
577 # ignore tags to unknown nodes
577 # ignore tags to unknown nodes
578 self.changelog.rev(v)
578 self.changelog.rev(v)
579 t[k] = v
579 t[k] = v
580 except (error.LookupError, ValueError):
580 except (error.LookupError, ValueError):
581 pass
581 pass
582 return t
582 return t
583
583
584 def _findtags(self):
584 def _findtags(self):
585 '''Do the hard work of finding tags. Return a pair of dicts
585 '''Do the hard work of finding tags. Return a pair of dicts
586 (tags, tagtypes) where tags maps tag name to node, and tagtypes
586 (tags, tagtypes) where tags maps tag name to node, and tagtypes
587 maps tag name to a string like \'global\' or \'local\'.
587 maps tag name to a string like \'global\' or \'local\'.
588 Subclasses or extensions are free to add their own tags, but
588 Subclasses or extensions are free to add their own tags, but
589 should be aware that the returned dicts will be retained for the
589 should be aware that the returned dicts will be retained for the
590 duration of the localrepo object.'''
590 duration of the localrepo object.'''
591
591
592 # XXX what tagtype should subclasses/extensions use? Currently
592 # XXX what tagtype should subclasses/extensions use? Currently
593 # mq and bookmarks add tags, but do not set the tagtype at all.
593 # mq and bookmarks add tags, but do not set the tagtype at all.
594 # Should each extension invent its own tag type? Should there
594 # Should each extension invent its own tag type? Should there
595 # be one tagtype for all such "virtual" tags? Or is the status
595 # be one tagtype for all such "virtual" tags? Or is the status
596 # quo fine?
596 # quo fine?
597
597
598 alltags = {} # map tag name to (node, hist)
598 alltags = {} # map tag name to (node, hist)
599 tagtypes = {}
599 tagtypes = {}
600
600
601 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
601 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
602 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
602 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
603
603
604 # Build the return dicts. Have to re-encode tag names because
604 # Build the return dicts. Have to re-encode tag names because
605 # the tags module always uses UTF-8 (in order not to lose info
605 # the tags module always uses UTF-8 (in order not to lose info
606 # writing to the cache), but the rest of Mercurial wants them in
606 # writing to the cache), but the rest of Mercurial wants them in
607 # local encoding.
607 # local encoding.
608 tags = {}
608 tags = {}
609 for (name, (node, hist)) in alltags.iteritems():
609 for (name, (node, hist)) in alltags.iteritems():
610 if node != nullid:
610 if node != nullid:
611 tags[encoding.tolocal(name)] = node
611 tags[encoding.tolocal(name)] = node
612 tags['tip'] = self.changelog.tip()
612 tags['tip'] = self.changelog.tip()
613 tagtypes = dict([(encoding.tolocal(name), value)
613 tagtypes = dict([(encoding.tolocal(name), value)
614 for (name, value) in tagtypes.iteritems()])
614 for (name, value) in tagtypes.iteritems()])
615 return (tags, tagtypes)
615 return (tags, tagtypes)
616
616
617 def tagtype(self, tagname):
617 def tagtype(self, tagname):
618 '''
618 '''
619 return the type of the given tag. result can be:
619 return the type of the given tag. result can be:
620
620
621 'local' : a local tag
621 'local' : a local tag
622 'global' : a global tag
622 'global' : a global tag
623 None : tag does not exist
623 None : tag does not exist
624 '''
624 '''
625
625
626 return self._tagscache.tagtypes.get(tagname)
626 return self._tagscache.tagtypes.get(tagname)
627
627
628 def tagslist(self):
628 def tagslist(self):
629 '''return a list of tags ordered by revision'''
629 '''return a list of tags ordered by revision'''
630 if not self._tagscache.tagslist:
630 if not self._tagscache.tagslist:
631 l = []
631 l = []
632 for t, n in self.tags().iteritems():
632 for t, n in self.tags().iteritems():
633 r = self.changelog.rev(n)
633 r = self.changelog.rev(n)
634 l.append((r, t, n))
634 l.append((r, t, n))
635 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
635 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
636
636
637 return self._tagscache.tagslist
637 return self._tagscache.tagslist
638
638
639 def nodetags(self, node):
639 def nodetags(self, node):
640 '''return the tags associated with a node'''
640 '''return the tags associated with a node'''
641 if not self._tagscache.nodetagscache:
641 if not self._tagscache.nodetagscache:
642 nodetagscache = {}
642 nodetagscache = {}
643 for t, n in self._tagscache.tags.iteritems():
643 for t, n in self._tagscache.tags.iteritems():
644 nodetagscache.setdefault(n, []).append(t)
644 nodetagscache.setdefault(n, []).append(t)
645 for tags in nodetagscache.itervalues():
645 for tags in nodetagscache.itervalues():
646 tags.sort()
646 tags.sort()
647 self._tagscache.nodetagscache = nodetagscache
647 self._tagscache.nodetagscache = nodetagscache
648 return self._tagscache.nodetagscache.get(node, [])
648 return self._tagscache.nodetagscache.get(node, [])
649
649
650 def nodebookmarks(self, node):
650 def nodebookmarks(self, node):
651 marks = []
651 marks = []
652 for bookmark, n in self._bookmarks.iteritems():
652 for bookmark, n in self._bookmarks.iteritems():
653 if n == node:
653 if n == node:
654 marks.append(bookmark)
654 marks.append(bookmark)
655 return sorted(marks)
655 return sorted(marks)
656
656
657 def branchmap(self):
657 def branchmap(self):
658 '''returns a dictionary {branch: [branchheads]} with branchheads
658 '''returns a dictionary {branch: [branchheads]} with branchheads
659 ordered by increasing revision number'''
659 ordered by increasing revision number'''
660 branchmap.updatecache(self)
660 branchmap.updatecache(self)
661 return self._branchcaches[self.filtername]
661 return self._branchcaches[self.filtername]
662
662
663 def branchtip(self, branch):
663 def branchtip(self, branch):
664 '''return the tip node for a given branch'''
664 '''return the tip node for a given branch'''
665 try:
665 try:
666 return self.branchmap().branchtip(branch)
666 return self.branchmap().branchtip(branch)
667 except KeyError:
667 except KeyError:
668 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
668 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
669
669
670 def lookup(self, key):
670 def lookup(self, key):
671 return self[key].node()
671 return self[key].node()
672
672
673 def lookupbranch(self, key, remote=None):
673 def lookupbranch(self, key, remote=None):
674 repo = remote or self
674 repo = remote or self
675 if key in repo.branchmap():
675 if key in repo.branchmap():
676 return key
676 return key
677
677
678 repo = (remote and remote.local()) and remote or self
678 repo = (remote and remote.local()) and remote or self
679 return repo[key].branch()
679 return repo[key].branch()
680
680
681 def known(self, nodes):
681 def known(self, nodes):
682 nm = self.changelog.nodemap
682 nm = self.changelog.nodemap
683 pc = self._phasecache
683 pc = self._phasecache
684 result = []
684 result = []
685 for n in nodes:
685 for n in nodes:
686 r = nm.get(n)
686 r = nm.get(n)
687 resp = not (r is None or pc.phase(self, r) >= phases.secret)
687 resp = not (r is None or pc.phase(self, r) >= phases.secret)
688 result.append(resp)
688 result.append(resp)
689 return result
689 return result
690
690
691 def local(self):
691 def local(self):
692 return self
692 return self
693
693
694 def cancopy(self):
694 def cancopy(self):
695 # so statichttprepo's override of local() works
695 # so statichttprepo's override of local() works
696 if not self.local():
696 if not self.local():
697 return False
697 return False
698 if not self.ui.configbool('phases', 'publish', True):
698 if not self.ui.configbool('phases', 'publish', True):
699 return True
699 return True
700 # if publishing we can't copy if there is filtered content
700 # if publishing we can't copy if there is filtered content
701 return not self.filtered('visible').changelog.filteredrevs
701 return not self.filtered('visible').changelog.filteredrevs
702
702
703 def join(self, f):
703 def join(self, f):
704 return os.path.join(self.path, f)
704 return os.path.join(self.path, f)
705
705
706 def wjoin(self, f):
706 def wjoin(self, f):
707 return os.path.join(self.root, f)
707 return os.path.join(self.root, f)
708
708
709 def file(self, f):
709 def file(self, f):
710 if f[0] == '/':
710 if f[0] == '/':
711 f = f[1:]
711 f = f[1:]
712 return filelog.filelog(self.sopener, f)
712 return filelog.filelog(self.sopener, f)
713
713
714 def changectx(self, changeid):
714 def changectx(self, changeid):
715 return self[changeid]
715 return self[changeid]
716
716
717 def parents(self, changeid=None):
717 def parents(self, changeid=None):
718 '''get list of changectxs for parents of changeid'''
718 '''get list of changectxs for parents of changeid'''
719 return self[changeid].parents()
719 return self[changeid].parents()
720
720
721 def setparents(self, p1, p2=nullid):
721 def setparents(self, p1, p2=nullid):
722 copies = self.dirstate.setparents(p1, p2)
722 copies = self.dirstate.setparents(p1, p2)
723 pctx = self[p1]
723 pctx = self[p1]
724 if copies:
724 if copies:
725 # Adjust copy records, the dirstate cannot do it, it
725 # Adjust copy records, the dirstate cannot do it, it
726 # requires access to parents manifests. Preserve them
726 # requires access to parents manifests. Preserve them
727 # only for entries added to first parent.
727 # only for entries added to first parent.
728 for f in copies:
728 for f in copies:
729 if f not in pctx and copies[f] in pctx:
729 if f not in pctx and copies[f] in pctx:
730 self.dirstate.copy(copies[f], f)
730 self.dirstate.copy(copies[f], f)
731 if p2 == nullid:
731 if p2 == nullid:
732 for f, s in sorted(self.dirstate.copies().items()):
732 for f, s in sorted(self.dirstate.copies().items()):
733 if f not in pctx and s not in pctx:
733 if f not in pctx and s not in pctx:
734 self.dirstate.copy(None, f)
734 self.dirstate.copy(None, f)
735
735
736 def filectx(self, path, changeid=None, fileid=None):
736 def filectx(self, path, changeid=None, fileid=None):
737 """changeid can be a changeset revision, node, or tag.
737 """changeid can be a changeset revision, node, or tag.
738 fileid can be a file revision or node."""
738 fileid can be a file revision or node."""
739 return context.filectx(self, path, changeid, fileid)
739 return context.filectx(self, path, changeid, fileid)
740
740
741 def getcwd(self):
741 def getcwd(self):
742 return self.dirstate.getcwd()
742 return self.dirstate.getcwd()
743
743
744 def pathto(self, f, cwd=None):
744 def pathto(self, f, cwd=None):
745 return self.dirstate.pathto(f, cwd)
745 return self.dirstate.pathto(f, cwd)
746
746
747 def wfile(self, f, mode='r'):
747 def wfile(self, f, mode='r'):
748 return self.wopener(f, mode)
748 return self.wopener(f, mode)
749
749
750 def _link(self, f):
750 def _link(self, f):
751 return self.wvfs.islink(f)
751 return self.wvfs.islink(f)
752
752
753 def _loadfilter(self, filter):
753 def _loadfilter(self, filter):
754 if filter not in self.filterpats:
754 if filter not in self.filterpats:
755 l = []
755 l = []
756 for pat, cmd in self.ui.configitems(filter):
756 for pat, cmd in self.ui.configitems(filter):
757 if cmd == '!':
757 if cmd == '!':
758 continue
758 continue
759 mf = matchmod.match(self.root, '', [pat])
759 mf = matchmod.match(self.root, '', [pat])
760 fn = None
760 fn = None
761 params = cmd
761 params = cmd
762 for name, filterfn in self._datafilters.iteritems():
762 for name, filterfn in self._datafilters.iteritems():
763 if cmd.startswith(name):
763 if cmd.startswith(name):
764 fn = filterfn
764 fn = filterfn
765 params = cmd[len(name):].lstrip()
765 params = cmd[len(name):].lstrip()
766 break
766 break
767 if not fn:
767 if not fn:
768 fn = lambda s, c, **kwargs: util.filter(s, c)
768 fn = lambda s, c, **kwargs: util.filter(s, c)
769 # Wrap old filters not supporting keyword arguments
769 # Wrap old filters not supporting keyword arguments
770 if not inspect.getargspec(fn)[2]:
770 if not inspect.getargspec(fn)[2]:
771 oldfn = fn
771 oldfn = fn
772 fn = lambda s, c, **kwargs: oldfn(s, c)
772 fn = lambda s, c, **kwargs: oldfn(s, c)
773 l.append((mf, fn, params))
773 l.append((mf, fn, params))
774 self.filterpats[filter] = l
774 self.filterpats[filter] = l
775 return self.filterpats[filter]
775 return self.filterpats[filter]
776
776
777 def _filter(self, filterpats, filename, data):
777 def _filter(self, filterpats, filename, data):
778 for mf, fn, cmd in filterpats:
778 for mf, fn, cmd in filterpats:
779 if mf(filename):
779 if mf(filename):
780 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
780 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
781 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
781 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
782 break
782 break
783
783
784 return data
784 return data
785
785
786 @unfilteredpropertycache
786 @unfilteredpropertycache
787 def _encodefilterpats(self):
787 def _encodefilterpats(self):
788 return self._loadfilter('encode')
788 return self._loadfilter('encode')
789
789
790 @unfilteredpropertycache
790 @unfilteredpropertycache
791 def _decodefilterpats(self):
791 def _decodefilterpats(self):
792 return self._loadfilter('decode')
792 return self._loadfilter('decode')
793
793
794 def adddatafilter(self, name, filter):
794 def adddatafilter(self, name, filter):
795 self._datafilters[name] = filter
795 self._datafilters[name] = filter
796
796
797 def wread(self, filename):
797 def wread(self, filename):
798 if self._link(filename):
798 if self._link(filename):
799 data = self.wvfs.readlink(filename)
799 data = self.wvfs.readlink(filename)
800 else:
800 else:
801 data = self.wopener.read(filename)
801 data = self.wopener.read(filename)
802 return self._filter(self._encodefilterpats, filename, data)
802 return self._filter(self._encodefilterpats, filename, data)
803
803
804 def wwrite(self, filename, data, flags):
804 def wwrite(self, filename, data, flags):
805 data = self._filter(self._decodefilterpats, filename, data)
805 data = self._filter(self._decodefilterpats, filename, data)
806 if 'l' in flags:
806 if 'l' in flags:
807 self.wopener.symlink(data, filename)
807 self.wopener.symlink(data, filename)
808 else:
808 else:
809 self.wopener.write(filename, data)
809 self.wopener.write(filename, data)
810 if 'x' in flags:
810 if 'x' in flags:
811 self.wvfs.setflags(filename, False, True)
811 self.wvfs.setflags(filename, False, True)
812
812
813 def wwritedata(self, filename, data):
813 def wwritedata(self, filename, data):
814 return self._filter(self._decodefilterpats, filename, data)
814 return self._filter(self._decodefilterpats, filename, data)
815
815
816 def transaction(self, desc, report=None):
816 def transaction(self, desc, report=None):
817 tr = self._transref and self._transref() or None
817 tr = self._transref and self._transref() or None
818 if tr and tr.running():
818 if tr and tr.running():
819 return tr.nest()
819 return tr.nest()
820
820
821 # abort here if the journal already exists
821 # abort here if the journal already exists
822 if self.svfs.exists("journal"):
822 if self.svfs.exists("journal"):
823 raise error.RepoError(
823 raise error.RepoError(
824 _("abandoned transaction found - run hg recover"))
824 _("abandoned transaction found - run hg recover"))
825
825
826 def onclose():
826 def onclose():
827 self.store.write(tr)
827 self.store.write(tr)
828
828
829 self._writejournal(desc)
829 self._writejournal(desc)
830 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
830 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
831 rp = report and report or self.ui.warn
831 rp = report and report or self.ui.warn
832 tr = transaction.transaction(rp, self.sopener,
832 tr = transaction.transaction(rp, self.sopener,
833 "journal",
833 "journal",
834 aftertrans(renames),
834 aftertrans(renames),
835 self.store.createmode,
835 self.store.createmode,
836 onclose)
836 onclose)
837 self._transref = weakref.ref(tr)
837 self._transref = weakref.ref(tr)
838 return tr
838 return tr
839
839
840 def _journalfiles(self):
840 def _journalfiles(self):
841 return ((self.svfs, 'journal'),
841 return ((self.svfs, 'journal'),
842 (self.vfs, 'journal.dirstate'),
842 (self.vfs, 'journal.dirstate'),
843 (self.vfs, 'journal.branch'),
843 (self.vfs, 'journal.branch'),
844 (self.vfs, 'journal.desc'),
844 (self.vfs, 'journal.desc'),
845 (self.vfs, 'journal.bookmarks'),
845 (self.vfs, 'journal.bookmarks'),
846 (self.svfs, 'journal.phaseroots'))
846 (self.svfs, 'journal.phaseroots'))
847
847
848 def undofiles(self):
848 def undofiles(self):
849 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
849 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
850
850
851 def _writejournal(self, desc):
851 def _writejournal(self, desc):
852 self.opener.write("journal.dirstate",
852 self.opener.write("journal.dirstate",
853 self.opener.tryread("dirstate"))
853 self.opener.tryread("dirstate"))
854 self.opener.write("journal.branch",
854 self.opener.write("journal.branch",
855 encoding.fromlocal(self.dirstate.branch()))
855 encoding.fromlocal(self.dirstate.branch()))
856 self.opener.write("journal.desc",
856 self.opener.write("journal.desc",
857 "%d\n%s\n" % (len(self), desc))
857 "%d\n%s\n" % (len(self), desc))
858 self.opener.write("journal.bookmarks",
858 self.opener.write("journal.bookmarks",
859 self.opener.tryread("bookmarks"))
859 self.opener.tryread("bookmarks"))
860 self.sopener.write("journal.phaseroots",
860 self.sopener.write("journal.phaseroots",
861 self.sopener.tryread("phaseroots"))
861 self.sopener.tryread("phaseroots"))
862
862
863 def recover(self):
863 def recover(self):
864 lock = self.lock()
864 lock = self.lock()
865 try:
865 try:
866 if self.svfs.exists("journal"):
866 if self.svfs.exists("journal"):
867 self.ui.status(_("rolling back interrupted transaction\n"))
867 self.ui.status(_("rolling back interrupted transaction\n"))
868 transaction.rollback(self.sopener, "journal",
868 transaction.rollback(self.sopener, "journal",
869 self.ui.warn)
869 self.ui.warn)
870 self.invalidate()
870 self.invalidate()
871 return True
871 return True
872 else:
872 else:
873 self.ui.warn(_("no interrupted transaction available\n"))
873 self.ui.warn(_("no interrupted transaction available\n"))
874 return False
874 return False
875 finally:
875 finally:
876 lock.release()
876 lock.release()
877
877
878 def rollback(self, dryrun=False, force=False):
878 def rollback(self, dryrun=False, force=False):
879 wlock = lock = None
879 wlock = lock = None
880 try:
880 try:
881 wlock = self.wlock()
881 wlock = self.wlock()
882 lock = self.lock()
882 lock = self.lock()
883 if self.svfs.exists("undo"):
883 if self.svfs.exists("undo"):
884 return self._rollback(dryrun, force)
884 return self._rollback(dryrun, force)
885 else:
885 else:
886 self.ui.warn(_("no rollback information available\n"))
886 self.ui.warn(_("no rollback information available\n"))
887 return 1
887 return 1
888 finally:
888 finally:
889 release(lock, wlock)
889 release(lock, wlock)
890
890
891 @unfilteredmethod # Until we get smarter cache management
891 @unfilteredmethod # Until we get smarter cache management
892 def _rollback(self, dryrun, force):
892 def _rollback(self, dryrun, force):
893 ui = self.ui
893 ui = self.ui
894 try:
894 try:
895 args = self.opener.read('undo.desc').splitlines()
895 args = self.opener.read('undo.desc').splitlines()
896 (oldlen, desc, detail) = (int(args[0]), args[1], None)
896 (oldlen, desc, detail) = (int(args[0]), args[1], None)
897 if len(args) >= 3:
897 if len(args) >= 3:
898 detail = args[2]
898 detail = args[2]
899 oldtip = oldlen - 1
899 oldtip = oldlen - 1
900
900
901 if detail and ui.verbose:
901 if detail and ui.verbose:
902 msg = (_('repository tip rolled back to revision %s'
902 msg = (_('repository tip rolled back to revision %s'
903 ' (undo %s: %s)\n')
903 ' (undo %s: %s)\n')
904 % (oldtip, desc, detail))
904 % (oldtip, desc, detail))
905 else:
905 else:
906 msg = (_('repository tip rolled back to revision %s'
906 msg = (_('repository tip rolled back to revision %s'
907 ' (undo %s)\n')
907 ' (undo %s)\n')
908 % (oldtip, desc))
908 % (oldtip, desc))
909 except IOError:
909 except IOError:
910 msg = _('rolling back unknown transaction\n')
910 msg = _('rolling back unknown transaction\n')
911 desc = None
911 desc = None
912
912
913 if not force and self['.'] != self['tip'] and desc == 'commit':
913 if not force and self['.'] != self['tip'] and desc == 'commit':
914 raise util.Abort(
914 raise util.Abort(
915 _('rollback of last commit while not checked out '
915 _('rollback of last commit while not checked out '
916 'may lose data'), hint=_('use -f to force'))
916 'may lose data'), hint=_('use -f to force'))
917
917
918 ui.status(msg)
918 ui.status(msg)
919 if dryrun:
919 if dryrun:
920 return 0
920 return 0
921
921
922 parents = self.dirstate.parents()
922 parents = self.dirstate.parents()
923 self.destroying()
923 self.destroying()
924 transaction.rollback(self.sopener, 'undo', ui.warn)
924 transaction.rollback(self.sopener, 'undo', ui.warn)
925 if self.vfs.exists('undo.bookmarks'):
925 if self.vfs.exists('undo.bookmarks'):
926 self.vfs.rename('undo.bookmarks', 'bookmarks')
926 self.vfs.rename('undo.bookmarks', 'bookmarks')
927 if self.svfs.exists('undo.phaseroots'):
927 if self.svfs.exists('undo.phaseroots'):
928 self.svfs.rename('undo.phaseroots', 'phaseroots')
928 self.svfs.rename('undo.phaseroots', 'phaseroots')
929 self.invalidate()
929 self.invalidate()
930
930
931 parentgone = (parents[0] not in self.changelog.nodemap or
931 parentgone = (parents[0] not in self.changelog.nodemap or
932 parents[1] not in self.changelog.nodemap)
932 parents[1] not in self.changelog.nodemap)
933 if parentgone:
933 if parentgone:
934 self.vfs.rename('undo.dirstate', 'dirstate')
934 self.vfs.rename('undo.dirstate', 'dirstate')
935 try:
935 try:
936 branch = self.opener.read('undo.branch')
936 branch = self.opener.read('undo.branch')
937 self.dirstate.setbranch(encoding.tolocal(branch))
937 self.dirstate.setbranch(encoding.tolocal(branch))
938 except IOError:
938 except IOError:
939 ui.warn(_('named branch could not be reset: '
939 ui.warn(_('named branch could not be reset: '
940 'current branch is still \'%s\'\n')
940 'current branch is still \'%s\'\n')
941 % self.dirstate.branch())
941 % self.dirstate.branch())
942
942
943 self.dirstate.invalidate()
943 self.dirstate.invalidate()
944 parents = tuple([p.rev() for p in self.parents()])
944 parents = tuple([p.rev() for p in self.parents()])
945 if len(parents) > 1:
945 if len(parents) > 1:
946 ui.status(_('working directory now based on '
946 ui.status(_('working directory now based on '
947 'revisions %d and %d\n') % parents)
947 'revisions %d and %d\n') % parents)
948 else:
948 else:
949 ui.status(_('working directory now based on '
949 ui.status(_('working directory now based on '
950 'revision %d\n') % parents)
950 'revision %d\n') % parents)
951 # TODO: if we know which new heads may result from this rollback, pass
951 # TODO: if we know which new heads may result from this rollback, pass
952 # them to destroy(), which will prevent the branchhead cache from being
952 # them to destroy(), which will prevent the branchhead cache from being
953 # invalidated.
953 # invalidated.
954 self.destroyed()
954 self.destroyed()
955 return 0
955 return 0
956
956
957 def invalidatecaches(self):
957 def invalidatecaches(self):
958
958
959 if '_tagscache' in vars(self):
959 if '_tagscache' in vars(self):
960 # can't use delattr on proxy
960 # can't use delattr on proxy
961 del self.__dict__['_tagscache']
961 del self.__dict__['_tagscache']
962
962
963 self.unfiltered()._branchcaches.clear()
963 self.unfiltered()._branchcaches.clear()
964 self.invalidatevolatilesets()
964 self.invalidatevolatilesets()
965
965
966 def invalidatevolatilesets(self):
966 def invalidatevolatilesets(self):
967 self.filteredrevcache.clear()
967 self.filteredrevcache.clear()
968 obsolete.clearobscaches(self)
968 obsolete.clearobscaches(self)
969
969
970 def invalidatedirstate(self):
970 def invalidatedirstate(self):
971 '''Invalidates the dirstate, causing the next call to dirstate
971 '''Invalidates the dirstate, causing the next call to dirstate
972 to check if it was modified since the last time it was read,
972 to check if it was modified since the last time it was read,
973 rereading it if it has.
973 rereading it if it has.
974
974
975 This is different to dirstate.invalidate() that it doesn't always
975 This is different to dirstate.invalidate() that it doesn't always
976 rereads the dirstate. Use dirstate.invalidate() if you want to
976 rereads the dirstate. Use dirstate.invalidate() if you want to
977 explicitly read the dirstate again (i.e. restoring it to a previous
977 explicitly read the dirstate again (i.e. restoring it to a previous
978 known good state).'''
978 known good state).'''
979 if hasunfilteredcache(self, 'dirstate'):
979 if hasunfilteredcache(self, 'dirstate'):
980 for k in self.dirstate._filecache:
980 for k in self.dirstate._filecache:
981 try:
981 try:
982 delattr(self.dirstate, k)
982 delattr(self.dirstate, k)
983 except AttributeError:
983 except AttributeError:
984 pass
984 pass
985 delattr(self.unfiltered(), 'dirstate')
985 delattr(self.unfiltered(), 'dirstate')
986
986
987 def invalidate(self):
987 def invalidate(self):
988 unfiltered = self.unfiltered() # all file caches are stored unfiltered
988 unfiltered = self.unfiltered() # all file caches are stored unfiltered
989 for k in self._filecache:
989 for k in self._filecache:
990 # dirstate is invalidated separately in invalidatedirstate()
990 # dirstate is invalidated separately in invalidatedirstate()
991 if k == 'dirstate':
991 if k == 'dirstate':
992 continue
992 continue
993
993
994 try:
994 try:
995 delattr(unfiltered, k)
995 delattr(unfiltered, k)
996 except AttributeError:
996 except AttributeError:
997 pass
997 pass
998 self.invalidatecaches()
998 self.invalidatecaches()
999 self.store.invalidatecaches()
999 self.store.invalidatecaches()
1000
1000
1001 def invalidateall(self):
1001 def invalidateall(self):
1002 '''Fully invalidates both store and non-store parts, causing the
1002 '''Fully invalidates both store and non-store parts, causing the
1003 subsequent operation to reread any outside changes.'''
1003 subsequent operation to reread any outside changes.'''
1004 # extension should hook this to invalidate its caches
1004 # extension should hook this to invalidate its caches
1005 self.invalidate()
1005 self.invalidate()
1006 self.invalidatedirstate()
1006 self.invalidatedirstate()
1007
1007
1008 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1008 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1009 try:
1009 try:
1010 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1010 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1011 except error.LockHeld, inst:
1011 except error.LockHeld, inst:
1012 if not wait:
1012 if not wait:
1013 raise
1013 raise
1014 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1014 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1015 (desc, inst.locker))
1015 (desc, inst.locker))
1016 # default to 600 seconds timeout
1016 # default to 600 seconds timeout
1017 l = lockmod.lock(vfs, lockname,
1017 l = lockmod.lock(vfs, lockname,
1018 int(self.ui.config("ui", "timeout", "600")),
1018 int(self.ui.config("ui", "timeout", "600")),
1019 releasefn, desc=desc)
1019 releasefn, desc=desc)
1020 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1020 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1021 if acquirefn:
1021 if acquirefn:
1022 acquirefn()
1022 acquirefn()
1023 return l
1023 return l
1024
1024
1025 def _afterlock(self, callback):
1025 def _afterlock(self, callback):
1026 """add a callback to the current repository lock.
1026 """add a callback to the current repository lock.
1027
1027
1028 The callback will be executed on lock release."""
1028 The callback will be executed on lock release."""
1029 l = self._lockref and self._lockref()
1029 l = self._lockref and self._lockref()
1030 if l:
1030 if l:
1031 l.postrelease.append(callback)
1031 l.postrelease.append(callback)
1032 else:
1032 else:
1033 callback()
1033 callback()
1034
1034
1035 def lock(self, wait=True):
1035 def lock(self, wait=True):
1036 '''Lock the repository store (.hg/store) and return a weak reference
1036 '''Lock the repository store (.hg/store) and return a weak reference
1037 to the lock. Use this before modifying the store (e.g. committing or
1037 to the lock. Use this before modifying the store (e.g. committing or
1038 stripping). If you are opening a transaction, get a lock as well.)'''
1038 stripping). If you are opening a transaction, get a lock as well.)'''
1039 l = self._lockref and self._lockref()
1039 l = self._lockref and self._lockref()
1040 if l is not None and l.held:
1040 if l is not None and l.held:
1041 l.lock()
1041 l.lock()
1042 return l
1042 return l
1043
1043
1044 def unlock():
1044 def unlock():
1045 if hasunfilteredcache(self, '_phasecache'):
1045 if hasunfilteredcache(self, '_phasecache'):
1046 self._phasecache.write()
1046 self._phasecache.write()
1047 for k, ce in self._filecache.items():
1047 for k, ce in self._filecache.items():
1048 if k == 'dirstate' or k not in self.__dict__:
1048 if k == 'dirstate' or k not in self.__dict__:
1049 continue
1049 continue
1050 ce.refresh()
1050 ce.refresh()
1051
1051
1052 l = self._lock(self.svfs, "lock", wait, unlock,
1052 l = self._lock(self.svfs, "lock", wait, unlock,
1053 self.invalidate, _('repository %s') % self.origroot)
1053 self.invalidate, _('repository %s') % self.origroot)
1054 self._lockref = weakref.ref(l)
1054 self._lockref = weakref.ref(l)
1055 return l
1055 return l
1056
1056
1057 def wlock(self, wait=True):
1057 def wlock(self, wait=True):
1058 '''Lock the non-store parts of the repository (everything under
1058 '''Lock the non-store parts of the repository (everything under
1059 .hg except .hg/store) and return a weak reference to the lock.
1059 .hg except .hg/store) and return a weak reference to the lock.
1060 Use this before modifying files in .hg.'''
1060 Use this before modifying files in .hg.'''
1061 l = self._wlockref and self._wlockref()
1061 l = self._wlockref and self._wlockref()
1062 if l is not None and l.held:
1062 if l is not None and l.held:
1063 l.lock()
1063 l.lock()
1064 return l
1064 return l
1065
1065
1066 def unlock():
1066 def unlock():
1067 self.dirstate.write()
1067 self.dirstate.write()
1068 self._filecache['dirstate'].refresh()
1068 self._filecache['dirstate'].refresh()
1069
1069
1070 l = self._lock(self.vfs, "wlock", wait, unlock,
1070 l = self._lock(self.vfs, "wlock", wait, unlock,
1071 self.invalidatedirstate, _('working directory of %s') %
1071 self.invalidatedirstate, _('working directory of %s') %
1072 self.origroot)
1072 self.origroot)
1073 self._wlockref = weakref.ref(l)
1073 self._wlockref = weakref.ref(l)
1074 return l
1074 return l
1075
1075
1076 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1076 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1077 """
1077 """
1078 commit an individual file as part of a larger transaction
1078 commit an individual file as part of a larger transaction
1079 """
1079 """
1080
1080
1081 fname = fctx.path()
1081 fname = fctx.path()
1082 text = fctx.data()
1082 text = fctx.data()
1083 flog = self.file(fname)
1083 flog = self.file(fname)
1084 fparent1 = manifest1.get(fname, nullid)
1084 fparent1 = manifest1.get(fname, nullid)
1085 fparent2 = fparent2o = manifest2.get(fname, nullid)
1085 fparent2 = fparent2o = manifest2.get(fname, nullid)
1086
1086
1087 meta = {}
1087 meta = {}
1088 copy = fctx.renamed()
1088 copy = fctx.renamed()
1089 if copy and copy[0] != fname:
1089 if copy and copy[0] != fname:
1090 # Mark the new revision of this file as a copy of another
1090 # Mark the new revision of this file as a copy of another
1091 # file. This copy data will effectively act as a parent
1091 # file. This copy data will effectively act as a parent
1092 # of this new revision. If this is a merge, the first
1092 # of this new revision. If this is a merge, the first
1093 # parent will be the nullid (meaning "look up the copy data")
1093 # parent will be the nullid (meaning "look up the copy data")
1094 # and the second one will be the other parent. For example:
1094 # and the second one will be the other parent. For example:
1095 #
1095 #
1096 # 0 --- 1 --- 3 rev1 changes file foo
1096 # 0 --- 1 --- 3 rev1 changes file foo
1097 # \ / rev2 renames foo to bar and changes it
1097 # \ / rev2 renames foo to bar and changes it
1098 # \- 2 -/ rev3 should have bar with all changes and
1098 # \- 2 -/ rev3 should have bar with all changes and
1099 # should record that bar descends from
1099 # should record that bar descends from
1100 # bar in rev2 and foo in rev1
1100 # bar in rev2 and foo in rev1
1101 #
1101 #
1102 # this allows this merge to succeed:
1102 # this allows this merge to succeed:
1103 #
1103 #
1104 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1104 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1105 # \ / merging rev3 and rev4 should use bar@rev2
1105 # \ / merging rev3 and rev4 should use bar@rev2
1106 # \- 2 --- 4 as the merge base
1106 # \- 2 --- 4 as the merge base
1107 #
1107 #
1108
1108
1109 cfname = copy[0]
1109 cfname = copy[0]
1110 crev = manifest1.get(cfname)
1110 crev = manifest1.get(cfname)
1111 newfparent = fparent2
1111 newfparent = fparent2
1112
1112
1113 if manifest2: # branch merge
1113 if manifest2: # branch merge
1114 if fparent2 == nullid or crev is None: # copied on remote side
1114 if fparent2 == nullid or crev is None: # copied on remote side
1115 if cfname in manifest2:
1115 if cfname in manifest2:
1116 crev = manifest2[cfname]
1116 crev = manifest2[cfname]
1117 newfparent = fparent1
1117 newfparent = fparent1
1118
1118
1119 # find source in nearest ancestor if we've lost track
1119 # find source in nearest ancestor if we've lost track
1120 if not crev:
1120 if not crev:
1121 self.ui.debug(" %s: searching for copy revision for %s\n" %
1121 self.ui.debug(" %s: searching for copy revision for %s\n" %
1122 (fname, cfname))
1122 (fname, cfname))
1123 for ancestor in self[None].ancestors():
1123 for ancestor in self[None].ancestors():
1124 if cfname in ancestor:
1124 if cfname in ancestor:
1125 crev = ancestor[cfname].filenode()
1125 crev = ancestor[cfname].filenode()
1126 break
1126 break
1127
1127
1128 if crev:
1128 if crev:
1129 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1129 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1130 meta["copy"] = cfname
1130 meta["copy"] = cfname
1131 meta["copyrev"] = hex(crev)
1131 meta["copyrev"] = hex(crev)
1132 fparent1, fparent2 = nullid, newfparent
1132 fparent1, fparent2 = nullid, newfparent
1133 else:
1133 else:
1134 self.ui.warn(_("warning: can't find ancestor for '%s' "
1134 self.ui.warn(_("warning: can't find ancestor for '%s' "
1135 "copied from '%s'!\n") % (fname, cfname))
1135 "copied from '%s'!\n") % (fname, cfname))
1136
1136
1137 elif fparent1 == nullid:
1137 elif fparent1 == nullid:
1138 fparent1, fparent2 = fparent2, nullid
1138 fparent1, fparent2 = fparent2, nullid
1139 elif fparent2 != nullid:
1139 elif fparent2 != nullid:
1140 # is one parent an ancestor of the other?
1140 # is one parent an ancestor of the other?
1141 fparentancestor = flog.ancestor(fparent1, fparent2)
1141 fparentancestor = flog.ancestor(fparent1, fparent2)
1142 if fparentancestor == fparent1:
1142 if fparentancestor == fparent1:
1143 fparent1, fparent2 = fparent2, nullid
1143 fparent1, fparent2 = fparent2, nullid
1144 elif fparentancestor == fparent2:
1144 elif fparentancestor == fparent2:
1145 fparent2 = nullid
1145 fparent2 = nullid
1146
1146
1147 # is the file changed?
1147 # is the file changed?
1148 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1148 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1149 changelist.append(fname)
1149 changelist.append(fname)
1150 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1150 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1151
1151
1152 # are just the flags changed during merge?
1152 # are just the flags changed during merge?
1153 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1153 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1154 changelist.append(fname)
1154 changelist.append(fname)
1155
1155
1156 return fparent1
1156 return fparent1
1157
1157
1158 @unfilteredmethod
1158 @unfilteredmethod
1159 def commit(self, text="", user=None, date=None, match=None, force=False,
1159 def commit(self, text="", user=None, date=None, match=None, force=False,
1160 editor=False, extra={}):
1160 editor=False, extra={}):
1161 """Add a new revision to current repository.
1161 """Add a new revision to current repository.
1162
1162
1163 Revision information is gathered from the working directory,
1163 Revision information is gathered from the working directory,
1164 match can be used to filter the committed files. If editor is
1164 match can be used to filter the committed files. If editor is
1165 supplied, it is called to get a commit message.
1165 supplied, it is called to get a commit message.
1166 """
1166 """
1167
1167
1168 def fail(f, msg):
1168 def fail(f, msg):
1169 raise util.Abort('%s: %s' % (f, msg))
1169 raise util.Abort('%s: %s' % (f, msg))
1170
1170
1171 if not match:
1171 if not match:
1172 match = matchmod.always(self.root, '')
1172 match = matchmod.always(self.root, '')
1173
1173
1174 if not force:
1174 if not force:
1175 vdirs = []
1175 vdirs = []
1176 match.explicitdir = vdirs.append
1176 match.explicitdir = vdirs.append
1177 match.bad = fail
1177 match.bad = fail
1178
1178
1179 wlock = self.wlock()
1179 wlock = self.wlock()
1180 try:
1180 try:
1181 wctx = self[None]
1181 wctx = self[None]
1182 merge = len(wctx.parents()) > 1
1182 merge = len(wctx.parents()) > 1
1183
1183
1184 if (not force and merge and match and
1184 if (not force and merge and match and
1185 (match.files() or match.anypats())):
1185 (match.files() or match.anypats())):
1186 raise util.Abort(_('cannot partially commit a merge '
1186 raise util.Abort(_('cannot partially commit a merge '
1187 '(do not specify files or patterns)'))
1187 '(do not specify files or patterns)'))
1188
1188
1189 changes = self.status(match=match, clean=force)
1189 changes = self.status(match=match, clean=force)
1190 if force:
1190 if force:
1191 changes[0].extend(changes[6]) # mq may commit unchanged files
1191 changes[0].extend(changes[6]) # mq may commit unchanged files
1192
1192
1193 # check subrepos
1193 # check subrepos
1194 subs = []
1194 subs = []
1195 commitsubs = set()
1195 commitsubs = set()
1196 newstate = wctx.substate.copy()
1196 newstate = wctx.substate.copy()
1197 # only manage subrepos and .hgsubstate if .hgsub is present
1197 # only manage subrepos and .hgsubstate if .hgsub is present
1198 if '.hgsub' in wctx:
1198 if '.hgsub' in wctx:
1199 # we'll decide whether to track this ourselves, thanks
1199 # we'll decide whether to track this ourselves, thanks
1200 for c in changes[:3]:
1200 for c in changes[:3]:
1201 if '.hgsubstate' in c:
1201 if '.hgsubstate' in c:
1202 c.remove('.hgsubstate')
1202 c.remove('.hgsubstate')
1203
1203
1204 # compare current state to last committed state
1204 # compare current state to last committed state
1205 # build new substate based on last committed state
1205 # build new substate based on last committed state
1206 oldstate = wctx.p1().substate
1206 oldstate = wctx.p1().substate
1207 for s in sorted(newstate.keys()):
1207 for s in sorted(newstate.keys()):
1208 if not match(s):
1208 if not match(s):
1209 # ignore working copy, use old state if present
1209 # ignore working copy, use old state if present
1210 if s in oldstate:
1210 if s in oldstate:
1211 newstate[s] = oldstate[s]
1211 newstate[s] = oldstate[s]
1212 continue
1212 continue
1213 if not force:
1213 if not force:
1214 raise util.Abort(
1214 raise util.Abort(
1215 _("commit with new subrepo %s excluded") % s)
1215 _("commit with new subrepo %s excluded") % s)
1216 if wctx.sub(s).dirty(True):
1216 if wctx.sub(s).dirty(True):
1217 if not self.ui.configbool('ui', 'commitsubrepos'):
1217 if not self.ui.configbool('ui', 'commitsubrepos'):
1218 raise util.Abort(
1218 raise util.Abort(
1219 _("uncommitted changes in subrepo %s") % s,
1219 _("uncommitted changes in subrepo %s") % s,
1220 hint=_("use --subrepos for recursive commit"))
1220 hint=_("use --subrepos for recursive commit"))
1221 subs.append(s)
1221 subs.append(s)
1222 commitsubs.add(s)
1222 commitsubs.add(s)
1223 else:
1223 else:
1224 bs = wctx.sub(s).basestate()
1224 bs = wctx.sub(s).basestate()
1225 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1225 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1226 if oldstate.get(s, (None, None, None))[1] != bs:
1226 if oldstate.get(s, (None, None, None))[1] != bs:
1227 subs.append(s)
1227 subs.append(s)
1228
1228
1229 # check for removed subrepos
1229 # check for removed subrepos
1230 for p in wctx.parents():
1230 for p in wctx.parents():
1231 r = [s for s in p.substate if s not in newstate]
1231 r = [s for s in p.substate if s not in newstate]
1232 subs += [s for s in r if match(s)]
1232 subs += [s for s in r if match(s)]
1233 if subs:
1233 if subs:
1234 if (not match('.hgsub') and
1234 if (not match('.hgsub') and
1235 '.hgsub' in (wctx.modified() + wctx.added())):
1235 '.hgsub' in (wctx.modified() + wctx.added())):
1236 raise util.Abort(
1236 raise util.Abort(
1237 _("can't commit subrepos without .hgsub"))
1237 _("can't commit subrepos without .hgsub"))
1238 changes[0].insert(0, '.hgsubstate')
1238 changes[0].insert(0, '.hgsubstate')
1239
1239
1240 elif '.hgsub' in changes[2]:
1240 elif '.hgsub' in changes[2]:
1241 # clean up .hgsubstate when .hgsub is removed
1241 # clean up .hgsubstate when .hgsub is removed
1242 if ('.hgsubstate' in wctx and
1242 if ('.hgsubstate' in wctx and
1243 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1243 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1244 changes[2].insert(0, '.hgsubstate')
1244 changes[2].insert(0, '.hgsubstate')
1245
1245
1246 # make sure all explicit patterns are matched
1246 # make sure all explicit patterns are matched
1247 if not force and match.files():
1247 if not force and match.files():
1248 matched = set(changes[0] + changes[1] + changes[2])
1248 matched = set(changes[0] + changes[1] + changes[2])
1249
1249
1250 for f in match.files():
1250 for f in match.files():
1251 f = self.dirstate.normalize(f)
1251 f = self.dirstate.normalize(f)
1252 if f == '.' or f in matched or f in wctx.substate:
1252 if f == '.' or f in matched or f in wctx.substate:
1253 continue
1253 continue
1254 if f in changes[3]: # missing
1254 if f in changes[3]: # missing
1255 fail(f, _('file not found!'))
1255 fail(f, _('file not found!'))
1256 if f in vdirs: # visited directory
1256 if f in vdirs: # visited directory
1257 d = f + '/'
1257 d = f + '/'
1258 for mf in matched:
1258 for mf in matched:
1259 if mf.startswith(d):
1259 if mf.startswith(d):
1260 break
1260 break
1261 else:
1261 else:
1262 fail(f, _("no match under directory!"))
1262 fail(f, _("no match under directory!"))
1263 elif f not in self.dirstate:
1263 elif f not in self.dirstate:
1264 fail(f, _("file not tracked!"))
1264 fail(f, _("file not tracked!"))
1265
1265
1266 cctx = context.workingctx(self, text, user, date, extra, changes)
1266 cctx = context.workingctx(self, text, user, date, extra, changes)
1267
1267
1268 if (not force and not extra.get("close") and not merge
1268 if (not force and not extra.get("close") and not merge
1269 and not cctx.files()
1269 and not cctx.files()
1270 and wctx.branch() == wctx.p1().branch()):
1270 and wctx.branch() == wctx.p1().branch()):
1271 return None
1271 return None
1272
1272
1273 if merge and cctx.deleted():
1273 if merge and cctx.deleted():
1274 raise util.Abort(_("cannot commit merge with missing files"))
1274 raise util.Abort(_("cannot commit merge with missing files"))
1275
1275
1276 ms = mergemod.mergestate(self)
1276 ms = mergemod.mergestate(self)
1277 for f in changes[0]:
1277 for f in changes[0]:
1278 if f in ms and ms[f] == 'u':
1278 if f in ms and ms[f] == 'u':
1279 raise util.Abort(_("unresolved merge conflicts "
1279 raise util.Abort(_("unresolved merge conflicts "
1280 "(see hg help resolve)"))
1280 "(see hg help resolve)"))
1281
1281
1282 if editor:
1282 if editor:
1283 cctx._text = editor(self, cctx, subs)
1283 cctx._text = editor(self, cctx, subs)
1284 edited = (text != cctx._text)
1284 edited = (text != cctx._text)
1285
1285
1286 # Save commit message in case this transaction gets rolled back
1286 # Save commit message in case this transaction gets rolled back
1287 # (e.g. by a pretxncommit hook). Leave the content alone on
1287 # (e.g. by a pretxncommit hook). Leave the content alone on
1288 # the assumption that the user will use the same editor again.
1288 # the assumption that the user will use the same editor again.
1289 msgfn = self.savecommitmessage(cctx._text)
1289 msgfn = self.savecommitmessage(cctx._text)
1290
1290
1291 # commit subs and write new state
1291 # commit subs and write new state
1292 if subs:
1292 if subs:
1293 for s in sorted(commitsubs):
1293 for s in sorted(commitsubs):
1294 sub = wctx.sub(s)
1294 sub = wctx.sub(s)
1295 self.ui.status(_('committing subrepository %s\n') %
1295 self.ui.status(_('committing subrepository %s\n') %
1296 subrepo.subrelpath(sub))
1296 subrepo.subrelpath(sub))
1297 sr = sub.commit(cctx._text, user, date)
1297 sr = sub.commit(cctx._text, user, date)
1298 newstate[s] = (newstate[s][0], sr)
1298 newstate[s] = (newstate[s][0], sr)
1299 subrepo.writestate(self, newstate)
1299 subrepo.writestate(self, newstate)
1300
1300
1301 p1, p2 = self.dirstate.parents()
1301 p1, p2 = self.dirstate.parents()
1302 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1302 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1303 try:
1303 try:
1304 self.hook("precommit", throw=True, parent1=hookp1,
1304 self.hook("precommit", throw=True, parent1=hookp1,
1305 parent2=hookp2)
1305 parent2=hookp2)
1306 ret = self.commitctx(cctx, True)
1306 ret = self.commitctx(cctx, True)
1307 except: # re-raises
1307 except: # re-raises
1308 if edited:
1308 if edited:
1309 self.ui.write(
1309 self.ui.write(
1310 _('note: commit message saved in %s\n') % msgfn)
1310 _('note: commit message saved in %s\n') % msgfn)
1311 raise
1311 raise
1312
1312
1313 # update bookmarks, dirstate and mergestate
1313 # update bookmarks, dirstate and mergestate
1314 bookmarks.update(self, [p1, p2], ret)
1314 bookmarks.update(self, [p1, p2], ret)
1315 cctx.markcommitted(ret)
1315 cctx.markcommitted(ret)
1316 ms.reset()
1316 ms.reset()
1317 finally:
1317 finally:
1318 wlock.release()
1318 wlock.release()
1319
1319
1320 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1320 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1321 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1321 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1322 self._afterlock(commithook)
1322 self._afterlock(commithook)
1323 return ret
1323 return ret
1324
1324
1325 @unfilteredmethod
1325 @unfilteredmethod
1326 def commitctx(self, ctx, error=False):
1326 def commitctx(self, ctx, error=False):
1327 """Add a new revision to current repository.
1327 """Add a new revision to current repository.
1328 Revision information is passed via the context argument.
1328 Revision information is passed via the context argument.
1329 """
1329 """
1330
1330
1331 tr = lock = None
1331 tr = lock = None
1332 removed = list(ctx.removed())
1332 removed = list(ctx.removed())
1333 p1, p2 = ctx.p1(), ctx.p2()
1333 p1, p2 = ctx.p1(), ctx.p2()
1334 user = ctx.user()
1334 user = ctx.user()
1335
1335
1336 lock = self.lock()
1336 lock = self.lock()
1337 try:
1337 try:
1338 tr = self.transaction("commit")
1338 tr = self.transaction("commit")
1339 trp = weakref.proxy(tr)
1339 trp = weakref.proxy(tr)
1340
1340
1341 if ctx.files():
1341 if ctx.files():
1342 m1 = p1.manifest().copy()
1342 m1 = p1.manifest().copy()
1343 m2 = p2.manifest()
1343 m2 = p2.manifest()
1344
1344
1345 # check in files
1345 # check in files
1346 new = {}
1346 new = {}
1347 changed = []
1347 changed = []
1348 linkrev = len(self)
1348 linkrev = len(self)
1349 for f in sorted(ctx.modified() + ctx.added()):
1349 for f in sorted(ctx.modified() + ctx.added()):
1350 self.ui.note(f + "\n")
1350 self.ui.note(f + "\n")
1351 try:
1351 try:
1352 fctx = ctx[f]
1352 fctx = ctx[f]
1353 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1353 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1354 changed)
1354 changed)
1355 m1.set(f, fctx.flags())
1355 m1.set(f, fctx.flags())
1356 except OSError, inst:
1356 except OSError, inst:
1357 self.ui.warn(_("trouble committing %s!\n") % f)
1357 self.ui.warn(_("trouble committing %s!\n") % f)
1358 raise
1358 raise
1359 except IOError, inst:
1359 except IOError, inst:
1360 errcode = getattr(inst, 'errno', errno.ENOENT)
1360 errcode = getattr(inst, 'errno', errno.ENOENT)
1361 if error or errcode and errcode != errno.ENOENT:
1361 if error or errcode and errcode != errno.ENOENT:
1362 self.ui.warn(_("trouble committing %s!\n") % f)
1362 self.ui.warn(_("trouble committing %s!\n") % f)
1363 raise
1363 raise
1364 else:
1364 else:
1365 removed.append(f)
1365 removed.append(f)
1366
1366
1367 # update manifest
1367 # update manifest
1368 m1.update(new)
1368 m1.update(new)
1369 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1369 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1370 drop = [f for f in removed if f in m1]
1370 drop = [f for f in removed if f in m1]
1371 for f in drop:
1371 for f in drop:
1372 del m1[f]
1372 del m1[f]
1373 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1373 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1374 p2.manifestnode(), (new, drop))
1374 p2.manifestnode(), (new, drop))
1375 files = changed + removed
1375 files = changed + removed
1376 else:
1376 else:
1377 mn = p1.manifestnode()
1377 mn = p1.manifestnode()
1378 files = []
1378 files = []
1379
1379
1380 # update changelog
1380 # update changelog
1381 self.changelog.delayupdate()
1381 self.changelog.delayupdate()
1382 n = self.changelog.add(mn, files, ctx.description(),
1382 n = self.changelog.add(mn, files, ctx.description(),
1383 trp, p1.node(), p2.node(),
1383 trp, p1.node(), p2.node(),
1384 user, ctx.date(), ctx.extra().copy())
1384 user, ctx.date(), ctx.extra().copy())
1385 p = lambda: self.changelog.writepending() and self.root or ""
1385 p = lambda: self.changelog.writepending() and self.root or ""
1386 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1386 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1387 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1387 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1388 parent2=xp2, pending=p)
1388 parent2=xp2, pending=p)
1389 self.changelog.finalize(trp)
1389 self.changelog.finalize(trp)
1390 # set the new commit is proper phase
1390 # set the new commit is proper phase
1391 targetphase = subrepo.newcommitphase(self.ui, ctx)
1391 targetphase = subrepo.newcommitphase(self.ui, ctx)
1392 if targetphase:
1392 if targetphase:
1393 # retract boundary do not alter parent changeset.
1393 # retract boundary do not alter parent changeset.
1394 # if a parent have higher the resulting phase will
1394 # if a parent have higher the resulting phase will
1395 # be compliant anyway
1395 # be compliant anyway
1396 #
1396 #
1397 # if minimal phase was 0 we don't need to retract anything
1397 # if minimal phase was 0 we don't need to retract anything
1398 phases.retractboundary(self, targetphase, [n])
1398 phases.retractboundary(self, targetphase, [n])
1399 tr.close()
1399 tr.close()
1400 branchmap.updatecache(self.filtered('served'))
1400 branchmap.updatecache(self.filtered('served'))
1401 return n
1401 return n
1402 finally:
1402 finally:
1403 if tr:
1403 if tr:
1404 tr.release()
1404 tr.release()
1405 lock.release()
1405 lock.release()
1406
1406
1407 @unfilteredmethod
1407 @unfilteredmethod
1408 def destroying(self):
1408 def destroying(self):
1409 '''Inform the repository that nodes are about to be destroyed.
1409 '''Inform the repository that nodes are about to be destroyed.
1410 Intended for use by strip and rollback, so there's a common
1410 Intended for use by strip and rollback, so there's a common
1411 place for anything that has to be done before destroying history.
1411 place for anything that has to be done before destroying history.
1412
1412
1413 This is mostly useful for saving state that is in memory and waiting
1413 This is mostly useful for saving state that is in memory and waiting
1414 to be flushed when the current lock is released. Because a call to
1414 to be flushed when the current lock is released. Because a call to
1415 destroyed is imminent, the repo will be invalidated causing those
1415 destroyed is imminent, the repo will be invalidated causing those
1416 changes to stay in memory (waiting for the next unlock), or vanish
1416 changes to stay in memory (waiting for the next unlock), or vanish
1417 completely.
1417 completely.
1418 '''
1418 '''
1419 # When using the same lock to commit and strip, the phasecache is left
1419 # When using the same lock to commit and strip, the phasecache is left
1420 # dirty after committing. Then when we strip, the repo is invalidated,
1420 # dirty after committing. Then when we strip, the repo is invalidated,
1421 # causing those changes to disappear.
1421 # causing those changes to disappear.
1422 if '_phasecache' in vars(self):
1422 if '_phasecache' in vars(self):
1423 self._phasecache.write()
1423 self._phasecache.write()
1424
1424
1425 @unfilteredmethod
1425 @unfilteredmethod
1426 def destroyed(self):
1426 def destroyed(self):
1427 '''Inform the repository that nodes have been destroyed.
1427 '''Inform the repository that nodes have been destroyed.
1428 Intended for use by strip and rollback, so there's a common
1428 Intended for use by strip and rollback, so there's a common
1429 place for anything that has to be done after destroying history.
1429 place for anything that has to be done after destroying history.
1430 '''
1430 '''
1431 # When one tries to:
1431 # When one tries to:
1432 # 1) destroy nodes thus calling this method (e.g. strip)
1432 # 1) destroy nodes thus calling this method (e.g. strip)
1433 # 2) use phasecache somewhere (e.g. commit)
1433 # 2) use phasecache somewhere (e.g. commit)
1434 #
1434 #
1435 # then 2) will fail because the phasecache contains nodes that were
1435 # then 2) will fail because the phasecache contains nodes that were
1436 # removed. We can either remove phasecache from the filecache,
1436 # removed. We can either remove phasecache from the filecache,
1437 # causing it to reload next time it is accessed, or simply filter
1437 # causing it to reload next time it is accessed, or simply filter
1438 # the removed nodes now and write the updated cache.
1438 # the removed nodes now and write the updated cache.
1439 self._phasecache.filterunknown(self)
1439 self._phasecache.filterunknown(self)
1440 self._phasecache.write()
1440 self._phasecache.write()
1441
1441
1442 # update the 'served' branch cache to help read only server process
1442 # update the 'served' branch cache to help read only server process
1443 # Thanks to branchcache collaboration this is done from the nearest
1443 # Thanks to branchcache collaboration this is done from the nearest
1444 # filtered subset and it is expected to be fast.
1444 # filtered subset and it is expected to be fast.
1445 branchmap.updatecache(self.filtered('served'))
1445 branchmap.updatecache(self.filtered('served'))
1446
1446
1447 # Ensure the persistent tag cache is updated. Doing it now
1447 # Ensure the persistent tag cache is updated. Doing it now
1448 # means that the tag cache only has to worry about destroyed
1448 # means that the tag cache only has to worry about destroyed
1449 # heads immediately after a strip/rollback. That in turn
1449 # heads immediately after a strip/rollback. That in turn
1450 # guarantees that "cachetip == currenttip" (comparing both rev
1450 # guarantees that "cachetip == currenttip" (comparing both rev
1451 # and node) always means no nodes have been added or destroyed.
1451 # and node) always means no nodes have been added or destroyed.
1452
1452
1453 # XXX this is suboptimal when qrefresh'ing: we strip the current
1453 # XXX this is suboptimal when qrefresh'ing: we strip the current
1454 # head, refresh the tag cache, then immediately add a new head.
1454 # head, refresh the tag cache, then immediately add a new head.
1455 # But I think doing it this way is necessary for the "instant
1455 # But I think doing it this way is necessary for the "instant
1456 # tag cache retrieval" case to work.
1456 # tag cache retrieval" case to work.
1457 self.invalidate()
1457 self.invalidate()
1458
1458
1459 def walk(self, match, node=None):
1459 def walk(self, match, node=None):
1460 '''
1460 '''
1461 walk recursively through the directory tree or a given
1461 walk recursively through the directory tree or a given
1462 changeset, finding all files matched by the match
1462 changeset, finding all files matched by the match
1463 function
1463 function
1464 '''
1464 '''
1465 return self[node].walk(match)
1465 return self[node].walk(match)
1466
1466
1467 def status(self, node1='.', node2=None, match=None,
1467 def status(self, node1='.', node2=None, match=None,
1468 ignored=False, clean=False, unknown=False,
1468 ignored=False, clean=False, unknown=False,
1469 listsubrepos=False):
1469 listsubrepos=False):
1470 """return status of files between two nodes or node and working
1470 """return status of files between two nodes or node and working
1471 directory.
1471 directory.
1472
1472
1473 If node1 is None, use the first dirstate parent instead.
1473 If node1 is None, use the first dirstate parent instead.
1474 If node2 is None, compare node1 with working directory.
1474 If node2 is None, compare node1 with working directory.
1475 """
1475 """
1476
1476
1477 def mfmatches(ctx):
1477 def mfmatches(ctx):
1478 mf = ctx.manifest().copy()
1478 mf = ctx.manifest().copy()
1479 if match.always():
1479 if match.always():
1480 return mf
1480 return mf
1481 for fn in mf.keys():
1481 for fn in mf.keys():
1482 if not match(fn):
1482 if not match(fn):
1483 del mf[fn]
1483 del mf[fn]
1484 return mf
1484 return mf
1485
1485
1486 ctx1 = self[node1]
1486 ctx1 = self[node1]
1487 ctx2 = self[node2]
1487 ctx2 = self[node2]
1488
1488
1489 working = ctx2.rev() is None
1489 working = ctx2.rev() is None
1490 parentworking = working and ctx1 == self['.']
1490 parentworking = working and ctx1 == self['.']
1491 match = match or matchmod.always(self.root, self.getcwd())
1491 match = match or matchmod.always(self.root, self.getcwd())
1492 listignored, listclean, listunknown = ignored, clean, unknown
1492 listignored, listclean, listunknown = ignored, clean, unknown
1493
1493
1494 # load earliest manifest first for caching reasons
1494 # load earliest manifest first for caching reasons
1495 if not working and ctx2.rev() < ctx1.rev():
1495 if not working and ctx2.rev() < ctx1.rev():
1496 ctx2.manifest()
1496 ctx2.manifest()
1497
1497
1498 if not parentworking:
1498 if not parentworking:
1499 def bad(f, msg):
1499 def bad(f, msg):
1500 # 'f' may be a directory pattern from 'match.files()',
1500 # 'f' may be a directory pattern from 'match.files()',
1501 # so 'f not in ctx1' is not enough
1501 # so 'f not in ctx1' is not enough
1502 if f not in ctx1 and f not in ctx1.dirs():
1502 if f not in ctx1 and f not in ctx1.dirs():
1503 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1503 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1504 match.bad = bad
1504 match.bad = bad
1505
1505
1506 if working: # we need to scan the working dir
1506 if working: # we need to scan the working dir
1507 subrepos = []
1507 subrepos = []
1508 if '.hgsub' in self.dirstate:
1508 if '.hgsub' in self.dirstate:
1509 subrepos = sorted(ctx2.substate)
1509 subrepos = sorted(ctx2.substate)
1510 s = self.dirstate.status(match, subrepos, listignored,
1510 s = self.dirstate.status(match, subrepos, listignored,
1511 listclean, listunknown)
1511 listclean, listunknown)
1512 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1512 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1513
1513
1514 # check for any possibly clean files
1514 # check for any possibly clean files
1515 if parentworking and cmp:
1515 if parentworking and cmp:
1516 fixup = []
1516 fixup = []
1517 # do a full compare of any files that might have changed
1517 # do a full compare of any files that might have changed
1518 for f in sorted(cmp):
1518 for f in sorted(cmp):
1519 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1519 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1520 or ctx1[f].cmp(ctx2[f])):
1520 or ctx1[f].cmp(ctx2[f])):
1521 modified.append(f)
1521 modified.append(f)
1522 else:
1522 else:
1523 fixup.append(f)
1523 fixup.append(f)
1524
1524
1525 # update dirstate for files that are actually clean
1525 # update dirstate for files that are actually clean
1526 if fixup:
1526 if fixup:
1527 if listclean:
1527 if listclean:
1528 clean += fixup
1528 clean += fixup
1529
1529
1530 try:
1530 try:
1531 # updating the dirstate is optional
1531 # updating the dirstate is optional
1532 # so we don't wait on the lock
1532 # so we don't wait on the lock
1533 wlock = self.wlock(False)
1533 wlock = self.wlock(False)
1534 try:
1534 try:
1535 for f in fixup:
1535 for f in fixup:
1536 self.dirstate.normal(f)
1536 self.dirstate.normal(f)
1537 finally:
1537 finally:
1538 wlock.release()
1538 wlock.release()
1539 except error.LockError:
1539 except error.LockError:
1540 pass
1540 pass
1541
1541
1542 if not parentworking:
1542 if not parentworking:
1543 mf1 = mfmatches(ctx1)
1543 mf1 = mfmatches(ctx1)
1544 if working:
1544 if working:
1545 # we are comparing working dir against non-parent
1545 # we are comparing working dir against non-parent
1546 # generate a pseudo-manifest for the working dir
1546 # generate a pseudo-manifest for the working dir
1547 mf2 = mfmatches(self['.'])
1547 mf2 = mfmatches(self['.'])
1548 for f in cmp + modified + added:
1548 for f in cmp + modified + added:
1549 mf2[f] = None
1549 mf2[f] = None
1550 mf2.set(f, ctx2.flags(f))
1550 mf2.set(f, ctx2.flags(f))
1551 for f in removed:
1551 for f in removed:
1552 if f in mf2:
1552 if f in mf2:
1553 del mf2[f]
1553 del mf2[f]
1554 else:
1554 else:
1555 # we are comparing two revisions
1555 # we are comparing two revisions
1556 deleted, unknown, ignored = [], [], []
1556 deleted, unknown, ignored = [], [], []
1557 mf2 = mfmatches(ctx2)
1557 mf2 = mfmatches(ctx2)
1558
1558
1559 modified, added, clean = [], [], []
1559 modified, added, clean = [], [], []
1560 withflags = mf1.withflags() | mf2.withflags()
1560 withflags = mf1.withflags() | mf2.withflags()
1561 for fn, mf2node in mf2.iteritems():
1561 for fn, mf2node in mf2.iteritems():
1562 if fn in mf1:
1562 if fn in mf1:
1563 if (fn not in deleted and
1563 if (fn not in deleted and
1564 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1564 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1565 (mf1[fn] != mf2node and
1565 (mf1[fn] != mf2node and
1566 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1566 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1567 modified.append(fn)
1567 modified.append(fn)
1568 elif listclean:
1568 elif listclean:
1569 clean.append(fn)
1569 clean.append(fn)
1570 del mf1[fn]
1570 del mf1[fn]
1571 elif fn not in deleted:
1571 elif fn not in deleted:
1572 added.append(fn)
1572 added.append(fn)
1573 removed = mf1.keys()
1573 removed = mf1.keys()
1574
1574
1575 if working and modified and not self.dirstate._checklink:
1575 if working and modified and not self.dirstate._checklink:
1576 # Symlink placeholders may get non-symlink-like contents
1576 # Symlink placeholders may get non-symlink-like contents
1577 # via user error or dereferencing by NFS or Samba servers,
1577 # via user error or dereferencing by NFS or Samba servers,
1578 # so we filter out any placeholders that don't look like a
1578 # so we filter out any placeholders that don't look like a
1579 # symlink
1579 # symlink
1580 sane = []
1580 sane = []
1581 for f in modified:
1581 for f in modified:
1582 if ctx2.flags(f) == 'l':
1582 if ctx2.flags(f) == 'l':
1583 d = ctx2[f].data()
1583 d = ctx2[f].data()
1584 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1584 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1585 self.ui.debug('ignoring suspect symlink placeholder'
1585 self.ui.debug('ignoring suspect symlink placeholder'
1586 ' "%s"\n' % f)
1586 ' "%s"\n' % f)
1587 continue
1587 continue
1588 sane.append(f)
1588 sane.append(f)
1589 modified = sane
1589 modified = sane
1590
1590
1591 r = modified, added, removed, deleted, unknown, ignored, clean
1591 r = modified, added, removed, deleted, unknown, ignored, clean
1592
1592
1593 if listsubrepos:
1593 if listsubrepos:
1594 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1594 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1595 if working:
1595 if working:
1596 rev2 = None
1596 rev2 = None
1597 else:
1597 else:
1598 rev2 = ctx2.substate[subpath][1]
1598 rev2 = ctx2.substate[subpath][1]
1599 try:
1599 try:
1600 submatch = matchmod.narrowmatcher(subpath, match)
1600 submatch = matchmod.narrowmatcher(subpath, match)
1601 s = sub.status(rev2, match=submatch, ignored=listignored,
1601 s = sub.status(rev2, match=submatch, ignored=listignored,
1602 clean=listclean, unknown=listunknown,
1602 clean=listclean, unknown=listunknown,
1603 listsubrepos=True)
1603 listsubrepos=True)
1604 for rfiles, sfiles in zip(r, s):
1604 for rfiles, sfiles in zip(r, s):
1605 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1605 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1606 except error.LookupError:
1606 except error.LookupError:
1607 self.ui.status(_("skipping missing subrepository: %s\n")
1607 self.ui.status(_("skipping missing subrepository: %s\n")
1608 % subpath)
1608 % subpath)
1609
1609
1610 for l in r:
1610 for l in r:
1611 l.sort()
1611 l.sort()
1612 return r
1612 return r
1613
1613
1614 def heads(self, start=None):
1614 def heads(self, start=None):
1615 heads = self.changelog.heads(start)
1615 heads = self.changelog.heads(start)
1616 # sort the output in rev descending order
1616 # sort the output in rev descending order
1617 return sorted(heads, key=self.changelog.rev, reverse=True)
1617 return sorted(heads, key=self.changelog.rev, reverse=True)
1618
1618
1619 def branchheads(self, branch=None, start=None, closed=False):
1619 def branchheads(self, branch=None, start=None, closed=False):
1620 '''return a (possibly filtered) list of heads for the given branch
1620 '''return a (possibly filtered) list of heads for the given branch
1621
1621
1622 Heads are returned in topological order, from newest to oldest.
1622 Heads are returned in topological order, from newest to oldest.
1623 If branch is None, use the dirstate branch.
1623 If branch is None, use the dirstate branch.
1624 If start is not None, return only heads reachable from start.
1624 If start is not None, return only heads reachable from start.
1625 If closed is True, return heads that are marked as closed as well.
1625 If closed is True, return heads that are marked as closed as well.
1626 '''
1626 '''
1627 if branch is None:
1627 if branch is None:
1628 branch = self[None].branch()
1628 branch = self[None].branch()
1629 branches = self.branchmap()
1629 branches = self.branchmap()
1630 if branch not in branches:
1630 if branch not in branches:
1631 return []
1631 return []
1632 # the cache returns heads ordered lowest to highest
1632 # the cache returns heads ordered lowest to highest
1633 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1633 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1634 if start is not None:
1634 if start is not None:
1635 # filter out the heads that cannot be reached from startrev
1635 # filter out the heads that cannot be reached from startrev
1636 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1636 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1637 bheads = [h for h in bheads if h in fbheads]
1637 bheads = [h for h in bheads if h in fbheads]
1638 return bheads
1638 return bheads
1639
1639
1640 def branches(self, nodes):
1640 def branches(self, nodes):
1641 if not nodes:
1641 if not nodes:
1642 nodes = [self.changelog.tip()]
1642 nodes = [self.changelog.tip()]
1643 b = []
1643 b = []
1644 for n in nodes:
1644 for n in nodes:
1645 t = n
1645 t = n
1646 while True:
1646 while True:
1647 p = self.changelog.parents(n)
1647 p = self.changelog.parents(n)
1648 if p[1] != nullid or p[0] == nullid:
1648 if p[1] != nullid or p[0] == nullid:
1649 b.append((t, n, p[0], p[1]))
1649 b.append((t, n, p[0], p[1]))
1650 break
1650 break
1651 n = p[0]
1651 n = p[0]
1652 return b
1652 return b
1653
1653
1654 def between(self, pairs):
1654 def between(self, pairs):
1655 r = []
1655 r = []
1656
1656
1657 for top, bottom in pairs:
1657 for top, bottom in pairs:
1658 n, l, i = top, [], 0
1658 n, l, i = top, [], 0
1659 f = 1
1659 f = 1
1660
1660
1661 while n != bottom and n != nullid:
1661 while n != bottom and n != nullid:
1662 p = self.changelog.parents(n)[0]
1662 p = self.changelog.parents(n)[0]
1663 if i == f:
1663 if i == f:
1664 l.append(n)
1664 l.append(n)
1665 f = f * 2
1665 f = f * 2
1666 n = p
1666 n = p
1667 i += 1
1667 i += 1
1668
1668
1669 r.append(l)
1669 r.append(l)
1670
1670
1671 return r
1671 return r
1672
1672
1673 def pull(self, remote, heads=None, force=False):
1673 def pull(self, remote, heads=None, force=False):
1674 return exchange.pull (self, remote, heads, force)
1674 return exchange.pull (self, remote, heads, force)
1675
1675
1676 def checkpush(self, pushop):
1676 def checkpush(self, pushop):
1677 """Extensions can override this function if additional checks have
1677 """Extensions can override this function if additional checks have
1678 to be performed before pushing, or call it if they override push
1678 to be performed before pushing, or call it if they override push
1679 command.
1679 command.
1680 """
1680 """
1681 pass
1681 pass
1682
1682
1683 def push(self, remote, force=False, revs=None, newbranch=False):
1683 def push(self, remote, force=False, revs=None, newbranch=False):
1684 return exchange.push(self, remote, force, revs, newbranch)
1684 return exchange.push(self, remote, force, revs, newbranch)
1685
1685
1686 def getlocalbundle(self, source, outgoing, bundlecaps=None):
1687 """Like getbundle, but taking a discovery.outgoing as an argument.
1688
1689 This is only implemented for local repos and reuses potentially
1690 precomputed sets in outgoing."""
1691 if not outgoing.missing:
1692 return None
1693 bundler = changegroup.bundle10(self, bundlecaps)
1694 return changegroup.getsubset(self, outgoing, bundler, source)
1695
1696 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
1686 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
1697 """Like changegroupsubset, but returns the set difference between the
1687 """Like changegroupsubset, but returns the set difference between the
1698 ancestors of heads and the ancestors common.
1688 ancestors of heads and the ancestors common.
1699
1689
1700 If heads is None, use the local heads. If common is None, use [nullid].
1690 If heads is None, use the local heads. If common is None, use [nullid].
1701
1691
1702 The nodes in common might not all be known locally due to the way the
1692 The nodes in common might not all be known locally due to the way the
1703 current discovery protocol works.
1693 current discovery protocol works.
1704 """
1694 """
1705 cl = self.changelog
1695 cl = self.changelog
1706 if common:
1696 if common:
1707 hasnode = cl.hasnode
1697 hasnode = cl.hasnode
1708 common = [n for n in common if hasnode(n)]
1698 common = [n for n in common if hasnode(n)]
1709 else:
1699 else:
1710 common = [nullid]
1700 common = [nullid]
1711 if not heads:
1701 if not heads:
1712 heads = cl.heads()
1702 heads = cl.heads()
1713 return self.getlocalbundle(source,
1703 outgoing = discovery.outgoing(cl, common, heads)
1714 discovery.outgoing(cl, common, heads),
1704 return changegroup.getlocalbundle(self, source, outgoing,
1715 bundlecaps=bundlecaps)
1705 bundlecaps=bundlecaps)
1716
1706
1717 def changegroup(self, basenodes, source):
1707 def changegroup(self, basenodes, source):
1718 # to avoid a race we use changegroupsubset() (issue1320)
1708 # to avoid a race we use changegroupsubset() (issue1320)
1719 return changegroup.changegroupsubset(self, basenodes, self.heads(),
1709 return changegroup.changegroupsubset(self, basenodes, self.heads(),
1720 source)
1710 source)
1721
1711
1722 @unfilteredmethod
1712 @unfilteredmethod
1723 def addchangegroup(self, source, srctype, url, emptyok=False):
1713 def addchangegroup(self, source, srctype, url, emptyok=False):
1724 """Add the changegroup returned by source.read() to this repo.
1714 """Add the changegroup returned by source.read() to this repo.
1725 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1715 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1726 the URL of the repo where this changegroup is coming from.
1716 the URL of the repo where this changegroup is coming from.
1727
1717
1728 Return an integer summarizing the change to this repo:
1718 Return an integer summarizing the change to this repo:
1729 - nothing changed or no source: 0
1719 - nothing changed or no source: 0
1730 - more heads than before: 1+added heads (2..n)
1720 - more heads than before: 1+added heads (2..n)
1731 - fewer heads than before: -1-removed heads (-2..-n)
1721 - fewer heads than before: -1-removed heads (-2..-n)
1732 - number of heads stays the same: 1
1722 - number of heads stays the same: 1
1733 """
1723 """
1734 def csmap(x):
1724 def csmap(x):
1735 self.ui.debug("add changeset %s\n" % short(x))
1725 self.ui.debug("add changeset %s\n" % short(x))
1736 return len(cl)
1726 return len(cl)
1737
1727
1738 def revmap(x):
1728 def revmap(x):
1739 return cl.rev(x)
1729 return cl.rev(x)
1740
1730
1741 if not source:
1731 if not source:
1742 return 0
1732 return 0
1743
1733
1744 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1734 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1745
1735
1746 changesets = files = revisions = 0
1736 changesets = files = revisions = 0
1747 efiles = set()
1737 efiles = set()
1748
1738
1749 # write changelog data to temp files so concurrent readers will not see
1739 # write changelog data to temp files so concurrent readers will not see
1750 # inconsistent view
1740 # inconsistent view
1751 cl = self.changelog
1741 cl = self.changelog
1752 cl.delayupdate()
1742 cl.delayupdate()
1753 oldheads = cl.heads()
1743 oldheads = cl.heads()
1754
1744
1755 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1745 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1756 try:
1746 try:
1757 trp = weakref.proxy(tr)
1747 trp = weakref.proxy(tr)
1758 # pull off the changeset group
1748 # pull off the changeset group
1759 self.ui.status(_("adding changesets\n"))
1749 self.ui.status(_("adding changesets\n"))
1760 clstart = len(cl)
1750 clstart = len(cl)
1761 class prog(object):
1751 class prog(object):
1762 step = _('changesets')
1752 step = _('changesets')
1763 count = 1
1753 count = 1
1764 ui = self.ui
1754 ui = self.ui
1765 total = None
1755 total = None
1766 def __call__(self):
1756 def __call__(self):
1767 self.ui.progress(self.step, self.count, unit=_('chunks'),
1757 self.ui.progress(self.step, self.count, unit=_('chunks'),
1768 total=self.total)
1758 total=self.total)
1769 self.count += 1
1759 self.count += 1
1770 pr = prog()
1760 pr = prog()
1771 source.callback = pr
1761 source.callback = pr
1772
1762
1773 source.changelogheader()
1763 source.changelogheader()
1774 srccontent = cl.addgroup(source, csmap, trp)
1764 srccontent = cl.addgroup(source, csmap, trp)
1775 if not (srccontent or emptyok):
1765 if not (srccontent or emptyok):
1776 raise util.Abort(_("received changelog group is empty"))
1766 raise util.Abort(_("received changelog group is empty"))
1777 clend = len(cl)
1767 clend = len(cl)
1778 changesets = clend - clstart
1768 changesets = clend - clstart
1779 for c in xrange(clstart, clend):
1769 for c in xrange(clstart, clend):
1780 efiles.update(self[c].files())
1770 efiles.update(self[c].files())
1781 efiles = len(efiles)
1771 efiles = len(efiles)
1782 self.ui.progress(_('changesets'), None)
1772 self.ui.progress(_('changesets'), None)
1783
1773
1784 # pull off the manifest group
1774 # pull off the manifest group
1785 self.ui.status(_("adding manifests\n"))
1775 self.ui.status(_("adding manifests\n"))
1786 pr.step = _('manifests')
1776 pr.step = _('manifests')
1787 pr.count = 1
1777 pr.count = 1
1788 pr.total = changesets # manifests <= changesets
1778 pr.total = changesets # manifests <= changesets
1789 # no need to check for empty manifest group here:
1779 # no need to check for empty manifest group here:
1790 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1780 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1791 # no new manifest will be created and the manifest group will
1781 # no new manifest will be created and the manifest group will
1792 # be empty during the pull
1782 # be empty during the pull
1793 source.manifestheader()
1783 source.manifestheader()
1794 self.manifest.addgroup(source, revmap, trp)
1784 self.manifest.addgroup(source, revmap, trp)
1795 self.ui.progress(_('manifests'), None)
1785 self.ui.progress(_('manifests'), None)
1796
1786
1797 needfiles = {}
1787 needfiles = {}
1798 if self.ui.configbool('server', 'validate', default=False):
1788 if self.ui.configbool('server', 'validate', default=False):
1799 # validate incoming csets have their manifests
1789 # validate incoming csets have their manifests
1800 for cset in xrange(clstart, clend):
1790 for cset in xrange(clstart, clend):
1801 mfest = self.changelog.read(self.changelog.node(cset))[0]
1791 mfest = self.changelog.read(self.changelog.node(cset))[0]
1802 mfest = self.manifest.readdelta(mfest)
1792 mfest = self.manifest.readdelta(mfest)
1803 # store file nodes we must see
1793 # store file nodes we must see
1804 for f, n in mfest.iteritems():
1794 for f, n in mfest.iteritems():
1805 needfiles.setdefault(f, set()).add(n)
1795 needfiles.setdefault(f, set()).add(n)
1806
1796
1807 # process the files
1797 # process the files
1808 self.ui.status(_("adding file changes\n"))
1798 self.ui.status(_("adding file changes\n"))
1809 pr.step = _('files')
1799 pr.step = _('files')
1810 pr.count = 1
1800 pr.count = 1
1811 pr.total = efiles
1801 pr.total = efiles
1812 source.callback = None
1802 source.callback = None
1813
1803
1814 newrevs, newfiles = self.addchangegroupfiles(source, revmap, trp,
1804 newrevs, newfiles = self.addchangegroupfiles(source, revmap, trp,
1815 pr, needfiles)
1805 pr, needfiles)
1816 revisions += newrevs
1806 revisions += newrevs
1817 files += newfiles
1807 files += newfiles
1818
1808
1819 dh = 0
1809 dh = 0
1820 if oldheads:
1810 if oldheads:
1821 heads = cl.heads()
1811 heads = cl.heads()
1822 dh = len(heads) - len(oldheads)
1812 dh = len(heads) - len(oldheads)
1823 for h in heads:
1813 for h in heads:
1824 if h not in oldheads and self[h].closesbranch():
1814 if h not in oldheads and self[h].closesbranch():
1825 dh -= 1
1815 dh -= 1
1826 htext = ""
1816 htext = ""
1827 if dh:
1817 if dh:
1828 htext = _(" (%+d heads)") % dh
1818 htext = _(" (%+d heads)") % dh
1829
1819
1830 self.ui.status(_("added %d changesets"
1820 self.ui.status(_("added %d changesets"
1831 " with %d changes to %d files%s\n")
1821 " with %d changes to %d files%s\n")
1832 % (changesets, revisions, files, htext))
1822 % (changesets, revisions, files, htext))
1833 self.invalidatevolatilesets()
1823 self.invalidatevolatilesets()
1834
1824
1835 if changesets > 0:
1825 if changesets > 0:
1836 p = lambda: cl.writepending() and self.root or ""
1826 p = lambda: cl.writepending() and self.root or ""
1837 self.hook('pretxnchangegroup', throw=True,
1827 self.hook('pretxnchangegroup', throw=True,
1838 node=hex(cl.node(clstart)), source=srctype,
1828 node=hex(cl.node(clstart)), source=srctype,
1839 url=url, pending=p)
1829 url=url, pending=p)
1840
1830
1841 added = [cl.node(r) for r in xrange(clstart, clend)]
1831 added = [cl.node(r) for r in xrange(clstart, clend)]
1842 publishing = self.ui.configbool('phases', 'publish', True)
1832 publishing = self.ui.configbool('phases', 'publish', True)
1843 if srctype == 'push':
1833 if srctype == 'push':
1844 # Old servers can not push the boundary themselves.
1834 # Old servers can not push the boundary themselves.
1845 # New servers won't push the boundary if changeset already
1835 # New servers won't push the boundary if changeset already
1846 # exists locally as secret
1836 # exists locally as secret
1847 #
1837 #
1848 # We should not use added here but the list of all change in
1838 # We should not use added here but the list of all change in
1849 # the bundle
1839 # the bundle
1850 if publishing:
1840 if publishing:
1851 phases.advanceboundary(self, phases.public, srccontent)
1841 phases.advanceboundary(self, phases.public, srccontent)
1852 else:
1842 else:
1853 phases.advanceboundary(self, phases.draft, srccontent)
1843 phases.advanceboundary(self, phases.draft, srccontent)
1854 phases.retractboundary(self, phases.draft, added)
1844 phases.retractboundary(self, phases.draft, added)
1855 elif srctype != 'strip':
1845 elif srctype != 'strip':
1856 # publishing only alter behavior during push
1846 # publishing only alter behavior during push
1857 #
1847 #
1858 # strip should not touch boundary at all
1848 # strip should not touch boundary at all
1859 phases.retractboundary(self, phases.draft, added)
1849 phases.retractboundary(self, phases.draft, added)
1860
1850
1861 # make changelog see real files again
1851 # make changelog see real files again
1862 cl.finalize(trp)
1852 cl.finalize(trp)
1863
1853
1864 tr.close()
1854 tr.close()
1865
1855
1866 if changesets > 0:
1856 if changesets > 0:
1867 if srctype != 'strip':
1857 if srctype != 'strip':
1868 # During strip, branchcache is invalid but coming call to
1858 # During strip, branchcache is invalid but coming call to
1869 # `destroyed` will repair it.
1859 # `destroyed` will repair it.
1870 # In other case we can safely update cache on disk.
1860 # In other case we can safely update cache on disk.
1871 branchmap.updatecache(self.filtered('served'))
1861 branchmap.updatecache(self.filtered('served'))
1872 def runhooks():
1862 def runhooks():
1873 # These hooks run when the lock releases, not when the
1863 # These hooks run when the lock releases, not when the
1874 # transaction closes. So it's possible for the changelog
1864 # transaction closes. So it's possible for the changelog
1875 # to have changed since we last saw it.
1865 # to have changed since we last saw it.
1876 if clstart >= len(self):
1866 if clstart >= len(self):
1877 return
1867 return
1878
1868
1879 # forcefully update the on-disk branch cache
1869 # forcefully update the on-disk branch cache
1880 self.ui.debug("updating the branch cache\n")
1870 self.ui.debug("updating the branch cache\n")
1881 self.hook("changegroup", node=hex(cl.node(clstart)),
1871 self.hook("changegroup", node=hex(cl.node(clstart)),
1882 source=srctype, url=url)
1872 source=srctype, url=url)
1883
1873
1884 for n in added:
1874 for n in added:
1885 self.hook("incoming", node=hex(n), source=srctype,
1875 self.hook("incoming", node=hex(n), source=srctype,
1886 url=url)
1876 url=url)
1887
1877
1888 newheads = [h for h in self.heads() if h not in oldheads]
1878 newheads = [h for h in self.heads() if h not in oldheads]
1889 self.ui.log("incoming",
1879 self.ui.log("incoming",
1890 "%s incoming changes - new heads: %s\n",
1880 "%s incoming changes - new heads: %s\n",
1891 len(added),
1881 len(added),
1892 ', '.join([hex(c[:6]) for c in newheads]))
1882 ', '.join([hex(c[:6]) for c in newheads]))
1893 self._afterlock(runhooks)
1883 self._afterlock(runhooks)
1894
1884
1895 finally:
1885 finally:
1896 tr.release()
1886 tr.release()
1897 # never return 0 here:
1887 # never return 0 here:
1898 if dh < 0:
1888 if dh < 0:
1899 return dh - 1
1889 return dh - 1
1900 else:
1890 else:
1901 return dh + 1
1891 return dh + 1
1902
1892
1903 def addchangegroupfiles(self, source, revmap, trp, pr, needfiles):
1893 def addchangegroupfiles(self, source, revmap, trp, pr, needfiles):
1904 revisions = 0
1894 revisions = 0
1905 files = 0
1895 files = 0
1906 while True:
1896 while True:
1907 chunkdata = source.filelogheader()
1897 chunkdata = source.filelogheader()
1908 if not chunkdata:
1898 if not chunkdata:
1909 break
1899 break
1910 f = chunkdata["filename"]
1900 f = chunkdata["filename"]
1911 self.ui.debug("adding %s revisions\n" % f)
1901 self.ui.debug("adding %s revisions\n" % f)
1912 pr()
1902 pr()
1913 fl = self.file(f)
1903 fl = self.file(f)
1914 o = len(fl)
1904 o = len(fl)
1915 if not fl.addgroup(source, revmap, trp):
1905 if not fl.addgroup(source, revmap, trp):
1916 raise util.Abort(_("received file revlog group is empty"))
1906 raise util.Abort(_("received file revlog group is empty"))
1917 revisions += len(fl) - o
1907 revisions += len(fl) - o
1918 files += 1
1908 files += 1
1919 if f in needfiles:
1909 if f in needfiles:
1920 needs = needfiles[f]
1910 needs = needfiles[f]
1921 for new in xrange(o, len(fl)):
1911 for new in xrange(o, len(fl)):
1922 n = fl.node(new)
1912 n = fl.node(new)
1923 if n in needs:
1913 if n in needs:
1924 needs.remove(n)
1914 needs.remove(n)
1925 else:
1915 else:
1926 raise util.Abort(
1916 raise util.Abort(
1927 _("received spurious file revlog entry"))
1917 _("received spurious file revlog entry"))
1928 if not needs:
1918 if not needs:
1929 del needfiles[f]
1919 del needfiles[f]
1930 self.ui.progress(_('files'), None)
1920 self.ui.progress(_('files'), None)
1931
1921
1932 for f, needs in needfiles.iteritems():
1922 for f, needs in needfiles.iteritems():
1933 fl = self.file(f)
1923 fl = self.file(f)
1934 for n in needs:
1924 for n in needs:
1935 try:
1925 try:
1936 fl.rev(n)
1926 fl.rev(n)
1937 except error.LookupError:
1927 except error.LookupError:
1938 raise util.Abort(
1928 raise util.Abort(
1939 _('missing file data for %s:%s - run hg verify') %
1929 _('missing file data for %s:%s - run hg verify') %
1940 (f, hex(n)))
1930 (f, hex(n)))
1941
1931
1942 return revisions, files
1932 return revisions, files
1943
1933
1944 def stream_in(self, remote, requirements):
1934 def stream_in(self, remote, requirements):
1945 lock = self.lock()
1935 lock = self.lock()
1946 try:
1936 try:
1947 # Save remote branchmap. We will use it later
1937 # Save remote branchmap. We will use it later
1948 # to speed up branchcache creation
1938 # to speed up branchcache creation
1949 rbranchmap = None
1939 rbranchmap = None
1950 if remote.capable("branchmap"):
1940 if remote.capable("branchmap"):
1951 rbranchmap = remote.branchmap()
1941 rbranchmap = remote.branchmap()
1952
1942
1953 fp = remote.stream_out()
1943 fp = remote.stream_out()
1954 l = fp.readline()
1944 l = fp.readline()
1955 try:
1945 try:
1956 resp = int(l)
1946 resp = int(l)
1957 except ValueError:
1947 except ValueError:
1958 raise error.ResponseError(
1948 raise error.ResponseError(
1959 _('unexpected response from remote server:'), l)
1949 _('unexpected response from remote server:'), l)
1960 if resp == 1:
1950 if resp == 1:
1961 raise util.Abort(_('operation forbidden by server'))
1951 raise util.Abort(_('operation forbidden by server'))
1962 elif resp == 2:
1952 elif resp == 2:
1963 raise util.Abort(_('locking the remote repository failed'))
1953 raise util.Abort(_('locking the remote repository failed'))
1964 elif resp != 0:
1954 elif resp != 0:
1965 raise util.Abort(_('the server sent an unknown error code'))
1955 raise util.Abort(_('the server sent an unknown error code'))
1966 self.ui.status(_('streaming all changes\n'))
1956 self.ui.status(_('streaming all changes\n'))
1967 l = fp.readline()
1957 l = fp.readline()
1968 try:
1958 try:
1969 total_files, total_bytes = map(int, l.split(' ', 1))
1959 total_files, total_bytes = map(int, l.split(' ', 1))
1970 except (ValueError, TypeError):
1960 except (ValueError, TypeError):
1971 raise error.ResponseError(
1961 raise error.ResponseError(
1972 _('unexpected response from remote server:'), l)
1962 _('unexpected response from remote server:'), l)
1973 self.ui.status(_('%d files to transfer, %s of data\n') %
1963 self.ui.status(_('%d files to transfer, %s of data\n') %
1974 (total_files, util.bytecount(total_bytes)))
1964 (total_files, util.bytecount(total_bytes)))
1975 handled_bytes = 0
1965 handled_bytes = 0
1976 self.ui.progress(_('clone'), 0, total=total_bytes)
1966 self.ui.progress(_('clone'), 0, total=total_bytes)
1977 start = time.time()
1967 start = time.time()
1978
1968
1979 tr = self.transaction(_('clone'))
1969 tr = self.transaction(_('clone'))
1980 try:
1970 try:
1981 for i in xrange(total_files):
1971 for i in xrange(total_files):
1982 # XXX doesn't support '\n' or '\r' in filenames
1972 # XXX doesn't support '\n' or '\r' in filenames
1983 l = fp.readline()
1973 l = fp.readline()
1984 try:
1974 try:
1985 name, size = l.split('\0', 1)
1975 name, size = l.split('\0', 1)
1986 size = int(size)
1976 size = int(size)
1987 except (ValueError, TypeError):
1977 except (ValueError, TypeError):
1988 raise error.ResponseError(
1978 raise error.ResponseError(
1989 _('unexpected response from remote server:'), l)
1979 _('unexpected response from remote server:'), l)
1990 if self.ui.debugflag:
1980 if self.ui.debugflag:
1991 self.ui.debug('adding %s (%s)\n' %
1981 self.ui.debug('adding %s (%s)\n' %
1992 (name, util.bytecount(size)))
1982 (name, util.bytecount(size)))
1993 # for backwards compat, name was partially encoded
1983 # for backwards compat, name was partially encoded
1994 ofp = self.sopener(store.decodedir(name), 'w')
1984 ofp = self.sopener(store.decodedir(name), 'w')
1995 for chunk in util.filechunkiter(fp, limit=size):
1985 for chunk in util.filechunkiter(fp, limit=size):
1996 handled_bytes += len(chunk)
1986 handled_bytes += len(chunk)
1997 self.ui.progress(_('clone'), handled_bytes,
1987 self.ui.progress(_('clone'), handled_bytes,
1998 total=total_bytes)
1988 total=total_bytes)
1999 ofp.write(chunk)
1989 ofp.write(chunk)
2000 ofp.close()
1990 ofp.close()
2001 tr.close()
1991 tr.close()
2002 finally:
1992 finally:
2003 tr.release()
1993 tr.release()
2004
1994
2005 # Writing straight to files circumvented the inmemory caches
1995 # Writing straight to files circumvented the inmemory caches
2006 self.invalidate()
1996 self.invalidate()
2007
1997
2008 elapsed = time.time() - start
1998 elapsed = time.time() - start
2009 if elapsed <= 0:
1999 if elapsed <= 0:
2010 elapsed = 0.001
2000 elapsed = 0.001
2011 self.ui.progress(_('clone'), None)
2001 self.ui.progress(_('clone'), None)
2012 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2002 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2013 (util.bytecount(total_bytes), elapsed,
2003 (util.bytecount(total_bytes), elapsed,
2014 util.bytecount(total_bytes / elapsed)))
2004 util.bytecount(total_bytes / elapsed)))
2015
2005
2016 # new requirements = old non-format requirements +
2006 # new requirements = old non-format requirements +
2017 # new format-related
2007 # new format-related
2018 # requirements from the streamed-in repository
2008 # requirements from the streamed-in repository
2019 requirements.update(set(self.requirements) - self.supportedformats)
2009 requirements.update(set(self.requirements) - self.supportedformats)
2020 self._applyrequirements(requirements)
2010 self._applyrequirements(requirements)
2021 self._writerequirements()
2011 self._writerequirements()
2022
2012
2023 if rbranchmap:
2013 if rbranchmap:
2024 rbheads = []
2014 rbheads = []
2025 for bheads in rbranchmap.itervalues():
2015 for bheads in rbranchmap.itervalues():
2026 rbheads.extend(bheads)
2016 rbheads.extend(bheads)
2027
2017
2028 if rbheads:
2018 if rbheads:
2029 rtiprev = max((int(self.changelog.rev(node))
2019 rtiprev = max((int(self.changelog.rev(node))
2030 for node in rbheads))
2020 for node in rbheads))
2031 cache = branchmap.branchcache(rbranchmap,
2021 cache = branchmap.branchcache(rbranchmap,
2032 self[rtiprev].node(),
2022 self[rtiprev].node(),
2033 rtiprev)
2023 rtiprev)
2034 # Try to stick it as low as possible
2024 # Try to stick it as low as possible
2035 # filter above served are unlikely to be fetch from a clone
2025 # filter above served are unlikely to be fetch from a clone
2036 for candidate in ('base', 'immutable', 'served'):
2026 for candidate in ('base', 'immutable', 'served'):
2037 rview = self.filtered(candidate)
2027 rview = self.filtered(candidate)
2038 if cache.validfor(rview):
2028 if cache.validfor(rview):
2039 self._branchcaches[candidate] = cache
2029 self._branchcaches[candidate] = cache
2040 cache.write(rview)
2030 cache.write(rview)
2041 break
2031 break
2042 self.invalidate()
2032 self.invalidate()
2043 return len(self.heads()) + 1
2033 return len(self.heads()) + 1
2044 finally:
2034 finally:
2045 lock.release()
2035 lock.release()
2046
2036
2047 def clone(self, remote, heads=[], stream=False):
2037 def clone(self, remote, heads=[], stream=False):
2048 '''clone remote repository.
2038 '''clone remote repository.
2049
2039
2050 keyword arguments:
2040 keyword arguments:
2051 heads: list of revs to clone (forces use of pull)
2041 heads: list of revs to clone (forces use of pull)
2052 stream: use streaming clone if possible'''
2042 stream: use streaming clone if possible'''
2053
2043
2054 # now, all clients that can request uncompressed clones can
2044 # now, all clients that can request uncompressed clones can
2055 # read repo formats supported by all servers that can serve
2045 # read repo formats supported by all servers that can serve
2056 # them.
2046 # them.
2057
2047
2058 # if revlog format changes, client will have to check version
2048 # if revlog format changes, client will have to check version
2059 # and format flags on "stream" capability, and use
2049 # and format flags on "stream" capability, and use
2060 # uncompressed only if compatible.
2050 # uncompressed only if compatible.
2061
2051
2062 if not stream:
2052 if not stream:
2063 # if the server explicitly prefers to stream (for fast LANs)
2053 # if the server explicitly prefers to stream (for fast LANs)
2064 stream = remote.capable('stream-preferred')
2054 stream = remote.capable('stream-preferred')
2065
2055
2066 if stream and not heads:
2056 if stream and not heads:
2067 # 'stream' means remote revlog format is revlogv1 only
2057 # 'stream' means remote revlog format is revlogv1 only
2068 if remote.capable('stream'):
2058 if remote.capable('stream'):
2069 return self.stream_in(remote, set(('revlogv1',)))
2059 return self.stream_in(remote, set(('revlogv1',)))
2070 # otherwise, 'streamreqs' contains the remote revlog format
2060 # otherwise, 'streamreqs' contains the remote revlog format
2071 streamreqs = remote.capable('streamreqs')
2061 streamreqs = remote.capable('streamreqs')
2072 if streamreqs:
2062 if streamreqs:
2073 streamreqs = set(streamreqs.split(','))
2063 streamreqs = set(streamreqs.split(','))
2074 # if we support it, stream in and adjust our requirements
2064 # if we support it, stream in and adjust our requirements
2075 if not streamreqs - self.supportedformats:
2065 if not streamreqs - self.supportedformats:
2076 return self.stream_in(remote, streamreqs)
2066 return self.stream_in(remote, streamreqs)
2077 return self.pull(remote, heads)
2067 return self.pull(remote, heads)
2078
2068
2079 def pushkey(self, namespace, key, old, new):
2069 def pushkey(self, namespace, key, old, new):
2080 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2070 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2081 old=old, new=new)
2071 old=old, new=new)
2082 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2072 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2083 ret = pushkey.push(self, namespace, key, old, new)
2073 ret = pushkey.push(self, namespace, key, old, new)
2084 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2074 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2085 ret=ret)
2075 ret=ret)
2086 return ret
2076 return ret
2087
2077
2088 def listkeys(self, namespace):
2078 def listkeys(self, namespace):
2089 self.hook('prelistkeys', throw=True, namespace=namespace)
2079 self.hook('prelistkeys', throw=True, namespace=namespace)
2090 self.ui.debug('listing keys for "%s"\n' % namespace)
2080 self.ui.debug('listing keys for "%s"\n' % namespace)
2091 values = pushkey.list(self, namespace)
2081 values = pushkey.list(self, namespace)
2092 self.hook('listkeys', namespace=namespace, values=values)
2082 self.hook('listkeys', namespace=namespace, values=values)
2093 return values
2083 return values
2094
2084
2095 def debugwireargs(self, one, two, three=None, four=None, five=None):
2085 def debugwireargs(self, one, two, three=None, four=None, five=None):
2096 '''used to test argument passing over the wire'''
2086 '''used to test argument passing over the wire'''
2097 return "%s %s %s %s %s" % (one, two, three, four, five)
2087 return "%s %s %s %s %s" % (one, two, three, four, five)
2098
2088
2099 def savecommitmessage(self, text):
2089 def savecommitmessage(self, text):
2100 fp = self.opener('last-message.txt', 'wb')
2090 fp = self.opener('last-message.txt', 'wb')
2101 try:
2091 try:
2102 fp.write(text)
2092 fp.write(text)
2103 finally:
2093 finally:
2104 fp.close()
2094 fp.close()
2105 return self.pathto(fp.name[len(self.root) + 1:])
2095 return self.pathto(fp.name[len(self.root) + 1:])
2106
2096
2107 # used to avoid circular references so destructors work
2097 # used to avoid circular references so destructors work
2108 def aftertrans(files):
2098 def aftertrans(files):
2109 renamefiles = [tuple(t) for t in files]
2099 renamefiles = [tuple(t) for t in files]
2110 def a():
2100 def a():
2111 for vfs, src, dest in renamefiles:
2101 for vfs, src, dest in renamefiles:
2112 try:
2102 try:
2113 vfs.rename(src, dest)
2103 vfs.rename(src, dest)
2114 except OSError: # journal file does not yet exist
2104 except OSError: # journal file does not yet exist
2115 pass
2105 pass
2116 return a
2106 return a
2117
2107
2118 def undoname(fn):
2108 def undoname(fn):
2119 base, name = os.path.split(fn)
2109 base, name = os.path.split(fn)
2120 assert name.startswith('journal')
2110 assert name.startswith('journal')
2121 return os.path.join(base, name.replace('journal', 'undo', 1))
2111 return os.path.join(base, name.replace('journal', 'undo', 1))
2122
2112
2123 def instance(ui, path, create):
2113 def instance(ui, path, create):
2124 return localrepository(ui, util.urllocalpath(path), create)
2114 return localrepository(ui, util.urllocalpath(path), create)
2125
2115
2126 def islocal(path):
2116 def islocal(path):
2127 return True
2117 return True
General Comments 0
You need to be logged in to leave comments. Login now