##// END OF EJS Templates
bundle-ng: add bundlecaps argument to getbundle() command
Benoit Boissinot -
r19201:309c439c default
parent child Browse files
Show More
@@ -1,299 +1,302 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import nullrev
9 from node import nullrev
10 import mdiff, util, dagutil
10 import mdiff, util, dagutil
11 import struct, os, bz2, zlib, tempfile
11 import struct, os, bz2, zlib, tempfile
12
12
13 _BUNDLE10_DELTA_HEADER = "20s20s20s20s"
13 _BUNDLE10_DELTA_HEADER = "20s20s20s20s"
14
14
15 def readexactly(stream, n):
15 def readexactly(stream, n):
16 '''read n bytes from stream.read and abort if less was available'''
16 '''read n bytes from stream.read and abort if less was available'''
17 s = stream.read(n)
17 s = stream.read(n)
18 if len(s) < n:
18 if len(s) < n:
19 raise util.Abort(_("stream ended unexpectedly"
19 raise util.Abort(_("stream ended unexpectedly"
20 " (got %d bytes, expected %d)")
20 " (got %d bytes, expected %d)")
21 % (len(s), n))
21 % (len(s), n))
22 return s
22 return s
23
23
24 def getchunk(stream):
24 def getchunk(stream):
25 """return the next chunk from stream as a string"""
25 """return the next chunk from stream as a string"""
26 d = readexactly(stream, 4)
26 d = readexactly(stream, 4)
27 l = struct.unpack(">l", d)[0]
27 l = struct.unpack(">l", d)[0]
28 if l <= 4:
28 if l <= 4:
29 if l:
29 if l:
30 raise util.Abort(_("invalid chunk length %d") % l)
30 raise util.Abort(_("invalid chunk length %d") % l)
31 return ""
31 return ""
32 return readexactly(stream, l - 4)
32 return readexactly(stream, l - 4)
33
33
34 def chunkheader(length):
34 def chunkheader(length):
35 """return a changegroup chunk header (string)"""
35 """return a changegroup chunk header (string)"""
36 return struct.pack(">l", length + 4)
36 return struct.pack(">l", length + 4)
37
37
38 def closechunk():
38 def closechunk():
39 """return a changegroup chunk header (string) for a zero-length chunk"""
39 """return a changegroup chunk header (string) for a zero-length chunk"""
40 return struct.pack(">l", 0)
40 return struct.pack(">l", 0)
41
41
42 class nocompress(object):
42 class nocompress(object):
43 def compress(self, x):
43 def compress(self, x):
44 return x
44 return x
45 def flush(self):
45 def flush(self):
46 return ""
46 return ""
47
47
48 bundletypes = {
48 bundletypes = {
49 "": ("", nocompress), # only when using unbundle on ssh and old http servers
49 "": ("", nocompress), # only when using unbundle on ssh and old http servers
50 # since the unification ssh accepts a header but there
50 # since the unification ssh accepts a header but there
51 # is no capability signaling it.
51 # is no capability signaling it.
52 "HG10UN": ("HG10UN", nocompress),
52 "HG10UN": ("HG10UN", nocompress),
53 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
53 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
54 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
54 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
55 }
55 }
56
56
57 # hgweb uses this list to communicate its preferred type
57 # hgweb uses this list to communicate its preferred type
58 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
58 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
59
59
60 def writebundle(cg, filename, bundletype):
60 def writebundle(cg, filename, bundletype):
61 """Write a bundle file and return its filename.
61 """Write a bundle file and return its filename.
62
62
63 Existing files will not be overwritten.
63 Existing files will not be overwritten.
64 If no filename is specified, a temporary file is created.
64 If no filename is specified, a temporary file is created.
65 bz2 compression can be turned off.
65 bz2 compression can be turned off.
66 The bundle file will be deleted in case of errors.
66 The bundle file will be deleted in case of errors.
67 """
67 """
68
68
69 fh = None
69 fh = None
70 cleanup = None
70 cleanup = None
71 try:
71 try:
72 if filename:
72 if filename:
73 fh = open(filename, "wb")
73 fh = open(filename, "wb")
74 else:
74 else:
75 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
75 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
76 fh = os.fdopen(fd, "wb")
76 fh = os.fdopen(fd, "wb")
77 cleanup = filename
77 cleanup = filename
78
78
79 header, compressor = bundletypes[bundletype]
79 header, compressor = bundletypes[bundletype]
80 fh.write(header)
80 fh.write(header)
81 z = compressor()
81 z = compressor()
82
82
83 # parse the changegroup data, otherwise we will block
83 # parse the changegroup data, otherwise we will block
84 # in case of sshrepo because we don't know the end of the stream
84 # in case of sshrepo because we don't know the end of the stream
85
85
86 # an empty chunkgroup is the end of the changegroup
86 # an empty chunkgroup is the end of the changegroup
87 # a changegroup has at least 2 chunkgroups (changelog and manifest).
87 # a changegroup has at least 2 chunkgroups (changelog and manifest).
88 # after that, an empty chunkgroup is the end of the changegroup
88 # after that, an empty chunkgroup is the end of the changegroup
89 empty = False
89 empty = False
90 count = 0
90 count = 0
91 while not empty or count <= 2:
91 while not empty or count <= 2:
92 empty = True
92 empty = True
93 count += 1
93 count += 1
94 while True:
94 while True:
95 chunk = getchunk(cg)
95 chunk = getchunk(cg)
96 if not chunk:
96 if not chunk:
97 break
97 break
98 empty = False
98 empty = False
99 fh.write(z.compress(chunkheader(len(chunk))))
99 fh.write(z.compress(chunkheader(len(chunk))))
100 pos = 0
100 pos = 0
101 while pos < len(chunk):
101 while pos < len(chunk):
102 next = pos + 2**20
102 next = pos + 2**20
103 fh.write(z.compress(chunk[pos:next]))
103 fh.write(z.compress(chunk[pos:next]))
104 pos = next
104 pos = next
105 fh.write(z.compress(closechunk()))
105 fh.write(z.compress(closechunk()))
106 fh.write(z.flush())
106 fh.write(z.flush())
107 cleanup = None
107 cleanup = None
108 return filename
108 return filename
109 finally:
109 finally:
110 if fh is not None:
110 if fh is not None:
111 fh.close()
111 fh.close()
112 if cleanup is not None:
112 if cleanup is not None:
113 os.unlink(cleanup)
113 os.unlink(cleanup)
114
114
115 def decompressor(fh, alg):
115 def decompressor(fh, alg):
116 if alg == 'UN':
116 if alg == 'UN':
117 return fh
117 return fh
118 elif alg == 'GZ':
118 elif alg == 'GZ':
119 def generator(f):
119 def generator(f):
120 zd = zlib.decompressobj()
120 zd = zlib.decompressobj()
121 for chunk in util.filechunkiter(f):
121 for chunk in util.filechunkiter(f):
122 yield zd.decompress(chunk)
122 yield zd.decompress(chunk)
123 elif alg == 'BZ':
123 elif alg == 'BZ':
124 def generator(f):
124 def generator(f):
125 zd = bz2.BZ2Decompressor()
125 zd = bz2.BZ2Decompressor()
126 zd.decompress("BZ")
126 zd.decompress("BZ")
127 for chunk in util.filechunkiter(f, 4096):
127 for chunk in util.filechunkiter(f, 4096):
128 yield zd.decompress(chunk)
128 yield zd.decompress(chunk)
129 else:
129 else:
130 raise util.Abort("unknown bundle compression '%s'" % alg)
130 raise util.Abort("unknown bundle compression '%s'" % alg)
131 return util.chunkbuffer(generator(fh))
131 return util.chunkbuffer(generator(fh))
132
132
133 class unbundle10(object):
133 class unbundle10(object):
134 deltaheader = _BUNDLE10_DELTA_HEADER
134 deltaheader = _BUNDLE10_DELTA_HEADER
135 deltaheadersize = struct.calcsize(deltaheader)
135 deltaheadersize = struct.calcsize(deltaheader)
136 def __init__(self, fh, alg):
136 def __init__(self, fh, alg):
137 self._stream = decompressor(fh, alg)
137 self._stream = decompressor(fh, alg)
138 self._type = alg
138 self._type = alg
139 self.callback = None
139 self.callback = None
140 def compressed(self):
140 def compressed(self):
141 return self._type != 'UN'
141 return self._type != 'UN'
142 def read(self, l):
142 def read(self, l):
143 return self._stream.read(l)
143 return self._stream.read(l)
144 def seek(self, pos):
144 def seek(self, pos):
145 return self._stream.seek(pos)
145 return self._stream.seek(pos)
146 def tell(self):
146 def tell(self):
147 return self._stream.tell()
147 return self._stream.tell()
148 def close(self):
148 def close(self):
149 return self._stream.close()
149 return self._stream.close()
150
150
151 def chunklength(self):
151 def chunklength(self):
152 d = readexactly(self._stream, 4)
152 d = readexactly(self._stream, 4)
153 l = struct.unpack(">l", d)[0]
153 l = struct.unpack(">l", d)[0]
154 if l <= 4:
154 if l <= 4:
155 if l:
155 if l:
156 raise util.Abort(_("invalid chunk length %d") % l)
156 raise util.Abort(_("invalid chunk length %d") % l)
157 return 0
157 return 0
158 if self.callback:
158 if self.callback:
159 self.callback()
159 self.callback()
160 return l - 4
160 return l - 4
161
161
162 def changelogheader(self):
162 def changelogheader(self):
163 """v10 does not have a changelog header chunk"""
163 """v10 does not have a changelog header chunk"""
164 return {}
164 return {}
165
165
166 def manifestheader(self):
166 def manifestheader(self):
167 """v10 does not have a manifest header chunk"""
167 """v10 does not have a manifest header chunk"""
168 return {}
168 return {}
169
169
170 def filelogheader(self):
170 def filelogheader(self):
171 """return the header of the filelogs chunk, v10 only has the filename"""
171 """return the header of the filelogs chunk, v10 only has the filename"""
172 l = self.chunklength()
172 l = self.chunklength()
173 if not l:
173 if not l:
174 return {}
174 return {}
175 fname = readexactly(self._stream, l)
175 fname = readexactly(self._stream, l)
176 return dict(filename=fname)
176 return dict(filename=fname)
177
177
178 def _deltaheader(self, headertuple, prevnode):
178 def _deltaheader(self, headertuple, prevnode):
179 node, p1, p2, cs = headertuple
179 node, p1, p2, cs = headertuple
180 if prevnode is None:
180 if prevnode is None:
181 deltabase = p1
181 deltabase = p1
182 else:
182 else:
183 deltabase = prevnode
183 deltabase = prevnode
184 return node, p1, p2, deltabase, cs
184 return node, p1, p2, deltabase, cs
185
185
186 def deltachunk(self, prevnode):
186 def deltachunk(self, prevnode):
187 l = self.chunklength()
187 l = self.chunklength()
188 if not l:
188 if not l:
189 return {}
189 return {}
190 headerdata = readexactly(self._stream, self.deltaheadersize)
190 headerdata = readexactly(self._stream, self.deltaheadersize)
191 header = struct.unpack(self.deltaheader, headerdata)
191 header = struct.unpack(self.deltaheader, headerdata)
192 delta = readexactly(self._stream, l - self.deltaheadersize)
192 delta = readexactly(self._stream, l - self.deltaheadersize)
193 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
193 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
194 return dict(node=node, p1=p1, p2=p2, cs=cs,
194 return dict(node=node, p1=p1, p2=p2, cs=cs,
195 deltabase=deltabase, delta=delta)
195 deltabase=deltabase, delta=delta)
196
196
197 class headerlessfixup(object):
197 class headerlessfixup(object):
198 def __init__(self, fh, h):
198 def __init__(self, fh, h):
199 self._h = h
199 self._h = h
200 self._fh = fh
200 self._fh = fh
201 def read(self, n):
201 def read(self, n):
202 if self._h:
202 if self._h:
203 d, self._h = self._h[:n], self._h[n:]
203 d, self._h = self._h[:n], self._h[n:]
204 if len(d) < n:
204 if len(d) < n:
205 d += readexactly(self._fh, n - len(d))
205 d += readexactly(self._fh, n - len(d))
206 return d
206 return d
207 return readexactly(self._fh, n)
207 return readexactly(self._fh, n)
208
208
209 def readbundle(fh, fname):
209 def readbundle(fh, fname):
210 header = readexactly(fh, 6)
210 header = readexactly(fh, 6)
211
211
212 if not fname:
212 if not fname:
213 fname = "stream"
213 fname = "stream"
214 if not header.startswith('HG') and header.startswith('\0'):
214 if not header.startswith('HG') and header.startswith('\0'):
215 fh = headerlessfixup(fh, header)
215 fh = headerlessfixup(fh, header)
216 header = "HG10UN"
216 header = "HG10UN"
217
217
218 magic, version, alg = header[0:2], header[2:4], header[4:6]
218 magic, version, alg = header[0:2], header[2:4], header[4:6]
219
219
220 if magic != 'HG':
220 if magic != 'HG':
221 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
221 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
222 if version != '10':
222 if version != '10':
223 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
223 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
224 return unbundle10(fh, alg)
224 return unbundle10(fh, alg)
225
225
226 class bundle10(object):
226 class bundle10(object):
227 deltaheader = _BUNDLE10_DELTA_HEADER
227 deltaheader = _BUNDLE10_DELTA_HEADER
228 def __init__(self):
228 def __init__(self, bundlecaps=None):
229 pass
229 # Set of capabilities we can use to build the bundle.
230 if bundlecaps is None:
231 bundlecaps = set()
232 self._bundlecaps = bundlecaps
230 def start(self, lookup):
233 def start(self, lookup):
231 self._lookup = lookup
234 self._lookup = lookup
232 def close(self):
235 def close(self):
233 return closechunk()
236 return closechunk()
234
237
235 def fileheader(self, fname):
238 def fileheader(self, fname):
236 return chunkheader(len(fname)) + fname
239 return chunkheader(len(fname)) + fname
237
240
238 def group(self, nodelist, revlog, reorder=None):
241 def group(self, nodelist, revlog, reorder=None):
239 """Calculate a delta group, yielding a sequence of changegroup chunks
242 """Calculate a delta group, yielding a sequence of changegroup chunks
240 (strings).
243 (strings).
241
244
242 Given a list of changeset revs, return a set of deltas and
245 Given a list of changeset revs, return a set of deltas and
243 metadata corresponding to nodes. The first delta is
246 metadata corresponding to nodes. The first delta is
244 first parent(nodelist[0]) -> nodelist[0], the receiver is
247 first parent(nodelist[0]) -> nodelist[0], the receiver is
245 guaranteed to have this parent as it has all history before
248 guaranteed to have this parent as it has all history before
246 these changesets. In the case firstparent is nullrev the
249 these changesets. In the case firstparent is nullrev the
247 changegroup starts with a full revision.
250 changegroup starts with a full revision.
248 """
251 """
249
252
250 # if we don't have any revisions touched by these changesets, bail
253 # if we don't have any revisions touched by these changesets, bail
251 if len(nodelist) == 0:
254 if len(nodelist) == 0:
252 yield self.close()
255 yield self.close()
253 return
256 return
254
257
255 # for generaldelta revlogs, we linearize the revs; this will both be
258 # for generaldelta revlogs, we linearize the revs; this will both be
256 # much quicker and generate a much smaller bundle
259 # much quicker and generate a much smaller bundle
257 if (revlog._generaldelta and reorder is not False) or reorder:
260 if (revlog._generaldelta and reorder is not False) or reorder:
258 dag = dagutil.revlogdag(revlog)
261 dag = dagutil.revlogdag(revlog)
259 revs = set(revlog.rev(n) for n in nodelist)
262 revs = set(revlog.rev(n) for n in nodelist)
260 revs = dag.linearize(revs)
263 revs = dag.linearize(revs)
261 else:
264 else:
262 revs = sorted([revlog.rev(n) for n in nodelist])
265 revs = sorted([revlog.rev(n) for n in nodelist])
263
266
264 # add the parent of the first rev
267 # add the parent of the first rev
265 p = revlog.parentrevs(revs[0])[0]
268 p = revlog.parentrevs(revs[0])[0]
266 revs.insert(0, p)
269 revs.insert(0, p)
267
270
268 # build deltas
271 # build deltas
269 for r in xrange(len(revs) - 1):
272 for r in xrange(len(revs) - 1):
270 prev, curr = revs[r], revs[r + 1]
273 prev, curr = revs[r], revs[r + 1]
271 for c in self.revchunk(revlog, curr, prev):
274 for c in self.revchunk(revlog, curr, prev):
272 yield c
275 yield c
273
276
274 yield self.close()
277 yield self.close()
275
278
276
279
277 def revchunk(self, revlog, rev, prev):
280 def revchunk(self, revlog, rev, prev):
278 node = revlog.node(rev)
281 node = revlog.node(rev)
279 p1, p2 = revlog.parentrevs(rev)
282 p1, p2 = revlog.parentrevs(rev)
280 base = prev
283 base = prev
281
284
282 prefix = ''
285 prefix = ''
283 if base == nullrev:
286 if base == nullrev:
284 delta = revlog.revision(node)
287 delta = revlog.revision(node)
285 prefix = mdiff.trivialdiffheader(len(delta))
288 prefix = mdiff.trivialdiffheader(len(delta))
286 else:
289 else:
287 delta = revlog.revdiff(base, rev)
290 delta = revlog.revdiff(base, rev)
288 linknode = self._lookup(revlog, node)
291 linknode = self._lookup(revlog, node)
289 p1n, p2n = revlog.parents(node)
292 p1n, p2n = revlog.parents(node)
290 basenode = revlog.node(base)
293 basenode = revlog.node(base)
291 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
294 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
292 meta += prefix
295 meta += prefix
293 l = len(meta) + len(delta)
296 l = len(meta) + len(delta)
294 yield chunkheader(l)
297 yield chunkheader(l)
295 yield meta
298 yield meta
296 yield delta
299 yield delta
297 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
300 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
298 # do nothing with basenode, it is implicitly the previous one in HG10
301 # do nothing with basenode, it is implicitly the previous one in HG10
299 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
302 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
@@ -1,5875 +1,5880 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _
10 from i18n import _
11 import os, re, difflib, time, tempfile, errno
11 import os, re, difflib, time, tempfile, errno
12 import hg, scmutil, util, revlog, copies, error, bookmarks
12 import hg, scmutil, util, revlog, copies, error, bookmarks
13 import patch, help, encoding, templatekw, discovery
13 import patch, help, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, hbisect
14 import archival, changegroup, cmdutil, hbisect
15 import sshserver, hgweb, hgweb.server, commandserver
15 import sshserver, hgweb, hgweb.server, commandserver
16 import merge as mergemod
16 import merge as mergemod
17 import minirst, revset, fileset
17 import minirst, revset, fileset
18 import dagparser, context, simplemerge, graphmod
18 import dagparser, context, simplemerge, graphmod
19 import random, setdiscovery, treediscovery, dagutil, pvec, localrepo
19 import random, setdiscovery, treediscovery, dagutil, pvec, localrepo
20 import phases, obsolete
20 import phases, obsolete
21
21
22 table = {}
22 table = {}
23
23
24 command = cmdutil.command(table)
24 command = cmdutil.command(table)
25
25
26 # common command options
26 # common command options
27
27
28 globalopts = [
28 globalopts = [
29 ('R', 'repository', '',
29 ('R', 'repository', '',
30 _('repository root directory or name of overlay bundle file'),
30 _('repository root directory or name of overlay bundle file'),
31 _('REPO')),
31 _('REPO')),
32 ('', 'cwd', '',
32 ('', 'cwd', '',
33 _('change working directory'), _('DIR')),
33 _('change working directory'), _('DIR')),
34 ('y', 'noninteractive', None,
34 ('y', 'noninteractive', None,
35 _('do not prompt, automatically pick the first choice for all prompts')),
35 _('do not prompt, automatically pick the first choice for all prompts')),
36 ('q', 'quiet', None, _('suppress output')),
36 ('q', 'quiet', None, _('suppress output')),
37 ('v', 'verbose', None, _('enable additional output')),
37 ('v', 'verbose', None, _('enable additional output')),
38 ('', 'config', [],
38 ('', 'config', [],
39 _('set/override config option (use \'section.name=value\')'),
39 _('set/override config option (use \'section.name=value\')'),
40 _('CONFIG')),
40 _('CONFIG')),
41 ('', 'debug', None, _('enable debugging output')),
41 ('', 'debug', None, _('enable debugging output')),
42 ('', 'debugger', None, _('start debugger')),
42 ('', 'debugger', None, _('start debugger')),
43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
44 _('ENCODE')),
44 _('ENCODE')),
45 ('', 'encodingmode', encoding.encodingmode,
45 ('', 'encodingmode', encoding.encodingmode,
46 _('set the charset encoding mode'), _('MODE')),
46 _('set the charset encoding mode'), _('MODE')),
47 ('', 'traceback', None, _('always print a traceback on exception')),
47 ('', 'traceback', None, _('always print a traceback on exception')),
48 ('', 'time', None, _('time how long the command takes')),
48 ('', 'time', None, _('time how long the command takes')),
49 ('', 'profile', None, _('print command execution profile')),
49 ('', 'profile', None, _('print command execution profile')),
50 ('', 'version', None, _('output version information and exit')),
50 ('', 'version', None, _('output version information and exit')),
51 ('h', 'help', None, _('display help and exit')),
51 ('h', 'help', None, _('display help and exit')),
52 ('', 'hidden', False, _('consider hidden changesets')),
52 ('', 'hidden', False, _('consider hidden changesets')),
53 ]
53 ]
54
54
55 dryrunopts = [('n', 'dry-run', None,
55 dryrunopts = [('n', 'dry-run', None,
56 _('do not perform actions, just print output'))]
56 _('do not perform actions, just print output'))]
57
57
58 remoteopts = [
58 remoteopts = [
59 ('e', 'ssh', '',
59 ('e', 'ssh', '',
60 _('specify ssh command to use'), _('CMD')),
60 _('specify ssh command to use'), _('CMD')),
61 ('', 'remotecmd', '',
61 ('', 'remotecmd', '',
62 _('specify hg command to run on the remote side'), _('CMD')),
62 _('specify hg command to run on the remote side'), _('CMD')),
63 ('', 'insecure', None,
63 ('', 'insecure', None,
64 _('do not verify server certificate (ignoring web.cacerts config)')),
64 _('do not verify server certificate (ignoring web.cacerts config)')),
65 ]
65 ]
66
66
67 walkopts = [
67 walkopts = [
68 ('I', 'include', [],
68 ('I', 'include', [],
69 _('include names matching the given patterns'), _('PATTERN')),
69 _('include names matching the given patterns'), _('PATTERN')),
70 ('X', 'exclude', [],
70 ('X', 'exclude', [],
71 _('exclude names matching the given patterns'), _('PATTERN')),
71 _('exclude names matching the given patterns'), _('PATTERN')),
72 ]
72 ]
73
73
74 commitopts = [
74 commitopts = [
75 ('m', 'message', '',
75 ('m', 'message', '',
76 _('use text as commit message'), _('TEXT')),
76 _('use text as commit message'), _('TEXT')),
77 ('l', 'logfile', '',
77 ('l', 'logfile', '',
78 _('read commit message from file'), _('FILE')),
78 _('read commit message from file'), _('FILE')),
79 ]
79 ]
80
80
81 commitopts2 = [
81 commitopts2 = [
82 ('d', 'date', '',
82 ('d', 'date', '',
83 _('record the specified date as commit date'), _('DATE')),
83 _('record the specified date as commit date'), _('DATE')),
84 ('u', 'user', '',
84 ('u', 'user', '',
85 _('record the specified user as committer'), _('USER')),
85 _('record the specified user as committer'), _('USER')),
86 ]
86 ]
87
87
88 templateopts = [
88 templateopts = [
89 ('', 'style', '',
89 ('', 'style', '',
90 _('display using template map file'), _('STYLE')),
90 _('display using template map file'), _('STYLE')),
91 ('', 'template', '',
91 ('', 'template', '',
92 _('display with template'), _('TEMPLATE')),
92 _('display with template'), _('TEMPLATE')),
93 ]
93 ]
94
94
95 logopts = [
95 logopts = [
96 ('p', 'patch', None, _('show patch')),
96 ('p', 'patch', None, _('show patch')),
97 ('g', 'git', None, _('use git extended diff format')),
97 ('g', 'git', None, _('use git extended diff format')),
98 ('l', 'limit', '',
98 ('l', 'limit', '',
99 _('limit number of changes displayed'), _('NUM')),
99 _('limit number of changes displayed'), _('NUM')),
100 ('M', 'no-merges', None, _('do not show merges')),
100 ('M', 'no-merges', None, _('do not show merges')),
101 ('', 'stat', None, _('output diffstat-style summary of changes')),
101 ('', 'stat', None, _('output diffstat-style summary of changes')),
102 ('G', 'graph', None, _("show the revision DAG")),
102 ('G', 'graph', None, _("show the revision DAG")),
103 ] + templateopts
103 ] + templateopts
104
104
105 diffopts = [
105 diffopts = [
106 ('a', 'text', None, _('treat all files as text')),
106 ('a', 'text', None, _('treat all files as text')),
107 ('g', 'git', None, _('use git extended diff format')),
107 ('g', 'git', None, _('use git extended diff format')),
108 ('', 'nodates', None, _('omit dates from diff headers'))
108 ('', 'nodates', None, _('omit dates from diff headers'))
109 ]
109 ]
110
110
111 diffwsopts = [
111 diffwsopts = [
112 ('w', 'ignore-all-space', None,
112 ('w', 'ignore-all-space', None,
113 _('ignore white space when comparing lines')),
113 _('ignore white space when comparing lines')),
114 ('b', 'ignore-space-change', None,
114 ('b', 'ignore-space-change', None,
115 _('ignore changes in the amount of white space')),
115 _('ignore changes in the amount of white space')),
116 ('B', 'ignore-blank-lines', None,
116 ('B', 'ignore-blank-lines', None,
117 _('ignore changes whose lines are all blank')),
117 _('ignore changes whose lines are all blank')),
118 ]
118 ]
119
119
120 diffopts2 = [
120 diffopts2 = [
121 ('p', 'show-function', None, _('show which function each change is in')),
121 ('p', 'show-function', None, _('show which function each change is in')),
122 ('', 'reverse', None, _('produce a diff that undoes the changes')),
122 ('', 'reverse', None, _('produce a diff that undoes the changes')),
123 ] + diffwsopts + [
123 ] + diffwsopts + [
124 ('U', 'unified', '',
124 ('U', 'unified', '',
125 _('number of lines of context to show'), _('NUM')),
125 _('number of lines of context to show'), _('NUM')),
126 ('', 'stat', None, _('output diffstat-style summary of changes')),
126 ('', 'stat', None, _('output diffstat-style summary of changes')),
127 ]
127 ]
128
128
129 mergetoolopts = [
129 mergetoolopts = [
130 ('t', 'tool', '', _('specify merge tool')),
130 ('t', 'tool', '', _('specify merge tool')),
131 ]
131 ]
132
132
133 similarityopts = [
133 similarityopts = [
134 ('s', 'similarity', '',
134 ('s', 'similarity', '',
135 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
135 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
136 ]
136 ]
137
137
138 subrepoopts = [
138 subrepoopts = [
139 ('S', 'subrepos', None,
139 ('S', 'subrepos', None,
140 _('recurse into subrepositories'))
140 _('recurse into subrepositories'))
141 ]
141 ]
142
142
143 # Commands start here, listed alphabetically
143 # Commands start here, listed alphabetically
144
144
145 @command('^add',
145 @command('^add',
146 walkopts + subrepoopts + dryrunopts,
146 walkopts + subrepoopts + dryrunopts,
147 _('[OPTION]... [FILE]...'))
147 _('[OPTION]... [FILE]...'))
148 def add(ui, repo, *pats, **opts):
148 def add(ui, repo, *pats, **opts):
149 """add the specified files on the next commit
149 """add the specified files on the next commit
150
150
151 Schedule files to be version controlled and added to the
151 Schedule files to be version controlled and added to the
152 repository.
152 repository.
153
153
154 The files will be added to the repository at the next commit. To
154 The files will be added to the repository at the next commit. To
155 undo an add before that, see :hg:`forget`.
155 undo an add before that, see :hg:`forget`.
156
156
157 If no names are given, add all files to the repository.
157 If no names are given, add all files to the repository.
158
158
159 .. container:: verbose
159 .. container:: verbose
160
160
161 An example showing how new (unknown) files are added
161 An example showing how new (unknown) files are added
162 automatically by :hg:`add`::
162 automatically by :hg:`add`::
163
163
164 $ ls
164 $ ls
165 foo.c
165 foo.c
166 $ hg status
166 $ hg status
167 ? foo.c
167 ? foo.c
168 $ hg add
168 $ hg add
169 adding foo.c
169 adding foo.c
170 $ hg status
170 $ hg status
171 A foo.c
171 A foo.c
172
172
173 Returns 0 if all files are successfully added.
173 Returns 0 if all files are successfully added.
174 """
174 """
175
175
176 m = scmutil.match(repo[None], pats, opts)
176 m = scmutil.match(repo[None], pats, opts)
177 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
177 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
178 opts.get('subrepos'), prefix="", explicitonly=False)
178 opts.get('subrepos'), prefix="", explicitonly=False)
179 return rejected and 1 or 0
179 return rejected and 1 or 0
180
180
181 @command('addremove',
181 @command('addremove',
182 similarityopts + walkopts + dryrunopts,
182 similarityopts + walkopts + dryrunopts,
183 _('[OPTION]... [FILE]...'))
183 _('[OPTION]... [FILE]...'))
184 def addremove(ui, repo, *pats, **opts):
184 def addremove(ui, repo, *pats, **opts):
185 """add all new files, delete all missing files
185 """add all new files, delete all missing files
186
186
187 Add all new files and remove all missing files from the
187 Add all new files and remove all missing files from the
188 repository.
188 repository.
189
189
190 New files are ignored if they match any of the patterns in
190 New files are ignored if they match any of the patterns in
191 ``.hgignore``. As with add, these changes take effect at the next
191 ``.hgignore``. As with add, these changes take effect at the next
192 commit.
192 commit.
193
193
194 Use the -s/--similarity option to detect renamed files. This
194 Use the -s/--similarity option to detect renamed files. This
195 option takes a percentage between 0 (disabled) and 100 (files must
195 option takes a percentage between 0 (disabled) and 100 (files must
196 be identical) as its parameter. With a parameter greater than 0,
196 be identical) as its parameter. With a parameter greater than 0,
197 this compares every removed file with every added file and records
197 this compares every removed file with every added file and records
198 those similar enough as renames. Detecting renamed files this way
198 those similar enough as renames. Detecting renamed files this way
199 can be expensive. After using this option, :hg:`status -C` can be
199 can be expensive. After using this option, :hg:`status -C` can be
200 used to check which files were identified as moved or renamed. If
200 used to check which files were identified as moved or renamed. If
201 not specified, -s/--similarity defaults to 100 and only renames of
201 not specified, -s/--similarity defaults to 100 and only renames of
202 identical files are detected.
202 identical files are detected.
203
203
204 Returns 0 if all files are successfully added.
204 Returns 0 if all files are successfully added.
205 """
205 """
206 try:
206 try:
207 sim = float(opts.get('similarity') or 100)
207 sim = float(opts.get('similarity') or 100)
208 except ValueError:
208 except ValueError:
209 raise util.Abort(_('similarity must be a number'))
209 raise util.Abort(_('similarity must be a number'))
210 if sim < 0 or sim > 100:
210 if sim < 0 or sim > 100:
211 raise util.Abort(_('similarity must be between 0 and 100'))
211 raise util.Abort(_('similarity must be between 0 and 100'))
212 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
212 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
213
213
214 @command('^annotate|blame',
214 @command('^annotate|blame',
215 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
215 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
216 ('', 'follow', None,
216 ('', 'follow', None,
217 _('follow copies/renames and list the filename (DEPRECATED)')),
217 _('follow copies/renames and list the filename (DEPRECATED)')),
218 ('', 'no-follow', None, _("don't follow copies and renames")),
218 ('', 'no-follow', None, _("don't follow copies and renames")),
219 ('a', 'text', None, _('treat all files as text')),
219 ('a', 'text', None, _('treat all files as text')),
220 ('u', 'user', None, _('list the author (long with -v)')),
220 ('u', 'user', None, _('list the author (long with -v)')),
221 ('f', 'file', None, _('list the filename')),
221 ('f', 'file', None, _('list the filename')),
222 ('d', 'date', None, _('list the date (short with -q)')),
222 ('d', 'date', None, _('list the date (short with -q)')),
223 ('n', 'number', None, _('list the revision number (default)')),
223 ('n', 'number', None, _('list the revision number (default)')),
224 ('c', 'changeset', None, _('list the changeset')),
224 ('c', 'changeset', None, _('list the changeset')),
225 ('l', 'line-number', None, _('show line number at the first appearance'))
225 ('l', 'line-number', None, _('show line number at the first appearance'))
226 ] + diffwsopts + walkopts,
226 ] + diffwsopts + walkopts,
227 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
227 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
228 def annotate(ui, repo, *pats, **opts):
228 def annotate(ui, repo, *pats, **opts):
229 """show changeset information by line for each file
229 """show changeset information by line for each file
230
230
231 List changes in files, showing the revision id responsible for
231 List changes in files, showing the revision id responsible for
232 each line
232 each line
233
233
234 This command is useful for discovering when a change was made and
234 This command is useful for discovering when a change was made and
235 by whom.
235 by whom.
236
236
237 Without the -a/--text option, annotate will avoid processing files
237 Without the -a/--text option, annotate will avoid processing files
238 it detects as binary. With -a, annotate will annotate the file
238 it detects as binary. With -a, annotate will annotate the file
239 anyway, although the results will probably be neither useful
239 anyway, although the results will probably be neither useful
240 nor desirable.
240 nor desirable.
241
241
242 Returns 0 on success.
242 Returns 0 on success.
243 """
243 """
244 if opts.get('follow'):
244 if opts.get('follow'):
245 # --follow is deprecated and now just an alias for -f/--file
245 # --follow is deprecated and now just an alias for -f/--file
246 # to mimic the behavior of Mercurial before version 1.5
246 # to mimic the behavior of Mercurial before version 1.5
247 opts['file'] = True
247 opts['file'] = True
248
248
249 datefunc = ui.quiet and util.shortdate or util.datestr
249 datefunc = ui.quiet and util.shortdate or util.datestr
250 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
250 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
251
251
252 if not pats:
252 if not pats:
253 raise util.Abort(_('at least one filename or pattern is required'))
253 raise util.Abort(_('at least one filename or pattern is required'))
254
254
255 hexfn = ui.debugflag and hex or short
255 hexfn = ui.debugflag and hex or short
256
256
257 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
257 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
258 ('number', ' ', lambda x: str(x[0].rev())),
258 ('number', ' ', lambda x: str(x[0].rev())),
259 ('changeset', ' ', lambda x: hexfn(x[0].node())),
259 ('changeset', ' ', lambda x: hexfn(x[0].node())),
260 ('date', ' ', getdate),
260 ('date', ' ', getdate),
261 ('file', ' ', lambda x: x[0].path()),
261 ('file', ' ', lambda x: x[0].path()),
262 ('line_number', ':', lambda x: str(x[1])),
262 ('line_number', ':', lambda x: str(x[1])),
263 ]
263 ]
264
264
265 if (not opts.get('user') and not opts.get('changeset')
265 if (not opts.get('user') and not opts.get('changeset')
266 and not opts.get('date') and not opts.get('file')):
266 and not opts.get('date') and not opts.get('file')):
267 opts['number'] = True
267 opts['number'] = True
268
268
269 linenumber = opts.get('line_number') is not None
269 linenumber = opts.get('line_number') is not None
270 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
270 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
271 raise util.Abort(_('at least one of -n/-c is required for -l'))
271 raise util.Abort(_('at least one of -n/-c is required for -l'))
272
272
273 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
273 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
274 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
274 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
275
275
276 def bad(x, y):
276 def bad(x, y):
277 raise util.Abort("%s: %s" % (x, y))
277 raise util.Abort("%s: %s" % (x, y))
278
278
279 ctx = scmutil.revsingle(repo, opts.get('rev'))
279 ctx = scmutil.revsingle(repo, opts.get('rev'))
280 m = scmutil.match(ctx, pats, opts)
280 m = scmutil.match(ctx, pats, opts)
281 m.bad = bad
281 m.bad = bad
282 follow = not opts.get('no_follow')
282 follow = not opts.get('no_follow')
283 diffopts = patch.diffopts(ui, opts, section='annotate')
283 diffopts = patch.diffopts(ui, opts, section='annotate')
284 for abs in ctx.walk(m):
284 for abs in ctx.walk(m):
285 fctx = ctx[abs]
285 fctx = ctx[abs]
286 if not opts.get('text') and util.binary(fctx.data()):
286 if not opts.get('text') and util.binary(fctx.data()):
287 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
287 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
288 continue
288 continue
289
289
290 lines = fctx.annotate(follow=follow, linenumber=linenumber,
290 lines = fctx.annotate(follow=follow, linenumber=linenumber,
291 diffopts=diffopts)
291 diffopts=diffopts)
292 pieces = []
292 pieces = []
293
293
294 for f, sep in funcmap:
294 for f, sep in funcmap:
295 l = [f(n) for n, dummy in lines]
295 l = [f(n) for n, dummy in lines]
296 if l:
296 if l:
297 sized = [(x, encoding.colwidth(x)) for x in l]
297 sized = [(x, encoding.colwidth(x)) for x in l]
298 ml = max([w for x, w in sized])
298 ml = max([w for x, w in sized])
299 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
299 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
300 for x, w in sized])
300 for x, w in sized])
301
301
302 if pieces:
302 if pieces:
303 for p, l in zip(zip(*pieces), lines):
303 for p, l in zip(zip(*pieces), lines):
304 ui.write("%s: %s" % ("".join(p), l[1]))
304 ui.write("%s: %s" % ("".join(p), l[1]))
305
305
306 if lines and not lines[-1][1].endswith('\n'):
306 if lines and not lines[-1][1].endswith('\n'):
307 ui.write('\n')
307 ui.write('\n')
308
308
309 @command('archive',
309 @command('archive',
310 [('', 'no-decode', None, _('do not pass files through decoders')),
310 [('', 'no-decode', None, _('do not pass files through decoders')),
311 ('p', 'prefix', '', _('directory prefix for files in archive'),
311 ('p', 'prefix', '', _('directory prefix for files in archive'),
312 _('PREFIX')),
312 _('PREFIX')),
313 ('r', 'rev', '', _('revision to distribute'), _('REV')),
313 ('r', 'rev', '', _('revision to distribute'), _('REV')),
314 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
314 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
315 ] + subrepoopts + walkopts,
315 ] + subrepoopts + walkopts,
316 _('[OPTION]... DEST'))
316 _('[OPTION]... DEST'))
317 def archive(ui, repo, dest, **opts):
317 def archive(ui, repo, dest, **opts):
318 '''create an unversioned archive of a repository revision
318 '''create an unversioned archive of a repository revision
319
319
320 By default, the revision used is the parent of the working
320 By default, the revision used is the parent of the working
321 directory; use -r/--rev to specify a different revision.
321 directory; use -r/--rev to specify a different revision.
322
322
323 The archive type is automatically detected based on file
323 The archive type is automatically detected based on file
324 extension (or override using -t/--type).
324 extension (or override using -t/--type).
325
325
326 .. container:: verbose
326 .. container:: verbose
327
327
328 Examples:
328 Examples:
329
329
330 - create a zip file containing the 1.0 release::
330 - create a zip file containing the 1.0 release::
331
331
332 hg archive -r 1.0 project-1.0.zip
332 hg archive -r 1.0 project-1.0.zip
333
333
334 - create a tarball excluding .hg files::
334 - create a tarball excluding .hg files::
335
335
336 hg archive project.tar.gz -X ".hg*"
336 hg archive project.tar.gz -X ".hg*"
337
337
338 Valid types are:
338 Valid types are:
339
339
340 :``files``: a directory full of files (default)
340 :``files``: a directory full of files (default)
341 :``tar``: tar archive, uncompressed
341 :``tar``: tar archive, uncompressed
342 :``tbz2``: tar archive, compressed using bzip2
342 :``tbz2``: tar archive, compressed using bzip2
343 :``tgz``: tar archive, compressed using gzip
343 :``tgz``: tar archive, compressed using gzip
344 :``uzip``: zip archive, uncompressed
344 :``uzip``: zip archive, uncompressed
345 :``zip``: zip archive, compressed using deflate
345 :``zip``: zip archive, compressed using deflate
346
346
347 The exact name of the destination archive or directory is given
347 The exact name of the destination archive or directory is given
348 using a format string; see :hg:`help export` for details.
348 using a format string; see :hg:`help export` for details.
349
349
350 Each member added to an archive file has a directory prefix
350 Each member added to an archive file has a directory prefix
351 prepended. Use -p/--prefix to specify a format string for the
351 prepended. Use -p/--prefix to specify a format string for the
352 prefix. The default is the basename of the archive, with suffixes
352 prefix. The default is the basename of the archive, with suffixes
353 removed.
353 removed.
354
354
355 Returns 0 on success.
355 Returns 0 on success.
356 '''
356 '''
357
357
358 ctx = scmutil.revsingle(repo, opts.get('rev'))
358 ctx = scmutil.revsingle(repo, opts.get('rev'))
359 if not ctx:
359 if not ctx:
360 raise util.Abort(_('no working directory: please specify a revision'))
360 raise util.Abort(_('no working directory: please specify a revision'))
361 node = ctx.node()
361 node = ctx.node()
362 dest = cmdutil.makefilename(repo, dest, node)
362 dest = cmdutil.makefilename(repo, dest, node)
363 if os.path.realpath(dest) == repo.root:
363 if os.path.realpath(dest) == repo.root:
364 raise util.Abort(_('repository root cannot be destination'))
364 raise util.Abort(_('repository root cannot be destination'))
365
365
366 kind = opts.get('type') or archival.guesskind(dest) or 'files'
366 kind = opts.get('type') or archival.guesskind(dest) or 'files'
367 prefix = opts.get('prefix')
367 prefix = opts.get('prefix')
368
368
369 if dest == '-':
369 if dest == '-':
370 if kind == 'files':
370 if kind == 'files':
371 raise util.Abort(_('cannot archive plain files to stdout'))
371 raise util.Abort(_('cannot archive plain files to stdout'))
372 dest = cmdutil.makefileobj(repo, dest)
372 dest = cmdutil.makefileobj(repo, dest)
373 if not prefix:
373 if not prefix:
374 prefix = os.path.basename(repo.root) + '-%h'
374 prefix = os.path.basename(repo.root) + '-%h'
375
375
376 prefix = cmdutil.makefilename(repo, prefix, node)
376 prefix = cmdutil.makefilename(repo, prefix, node)
377 matchfn = scmutil.match(ctx, [], opts)
377 matchfn = scmutil.match(ctx, [], opts)
378 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
378 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
379 matchfn, prefix, subrepos=opts.get('subrepos'))
379 matchfn, prefix, subrepos=opts.get('subrepos'))
380
380
381 @command('backout',
381 @command('backout',
382 [('', 'merge', None, _('merge with old dirstate parent after backout')),
382 [('', 'merge', None, _('merge with old dirstate parent after backout')),
383 ('', 'parent', '',
383 ('', 'parent', '',
384 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
384 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
385 ('r', 'rev', '', _('revision to backout'), _('REV')),
385 ('r', 'rev', '', _('revision to backout'), _('REV')),
386 ] + mergetoolopts + walkopts + commitopts + commitopts2,
386 ] + mergetoolopts + walkopts + commitopts + commitopts2,
387 _('[OPTION]... [-r] REV'))
387 _('[OPTION]... [-r] REV'))
388 def backout(ui, repo, node=None, rev=None, **opts):
388 def backout(ui, repo, node=None, rev=None, **opts):
389 '''reverse effect of earlier changeset
389 '''reverse effect of earlier changeset
390
390
391 Prepare a new changeset with the effect of REV undone in the
391 Prepare a new changeset with the effect of REV undone in the
392 current working directory.
392 current working directory.
393
393
394 If REV is the parent of the working directory, then this new changeset
394 If REV is the parent of the working directory, then this new changeset
395 is committed automatically. Otherwise, hg needs to merge the
395 is committed automatically. Otherwise, hg needs to merge the
396 changes and the merged result is left uncommitted.
396 changes and the merged result is left uncommitted.
397
397
398 .. note::
398 .. note::
399 backout cannot be used to fix either an unwanted or
399 backout cannot be used to fix either an unwanted or
400 incorrect merge.
400 incorrect merge.
401
401
402 .. container:: verbose
402 .. container:: verbose
403
403
404 By default, the pending changeset will have one parent,
404 By default, the pending changeset will have one parent,
405 maintaining a linear history. With --merge, the pending
405 maintaining a linear history. With --merge, the pending
406 changeset will instead have two parents: the old parent of the
406 changeset will instead have two parents: the old parent of the
407 working directory and a new child of REV that simply undoes REV.
407 working directory and a new child of REV that simply undoes REV.
408
408
409 Before version 1.7, the behavior without --merge was equivalent
409 Before version 1.7, the behavior without --merge was equivalent
410 to specifying --merge followed by :hg:`update --clean .` to
410 to specifying --merge followed by :hg:`update --clean .` to
411 cancel the merge and leave the child of REV as a head to be
411 cancel the merge and leave the child of REV as a head to be
412 merged separately.
412 merged separately.
413
413
414 See :hg:`help dates` for a list of formats valid for -d/--date.
414 See :hg:`help dates` for a list of formats valid for -d/--date.
415
415
416 Returns 0 on success.
416 Returns 0 on success.
417 '''
417 '''
418 if rev and node:
418 if rev and node:
419 raise util.Abort(_("please specify just one revision"))
419 raise util.Abort(_("please specify just one revision"))
420
420
421 if not rev:
421 if not rev:
422 rev = node
422 rev = node
423
423
424 if not rev:
424 if not rev:
425 raise util.Abort(_("please specify a revision to backout"))
425 raise util.Abort(_("please specify a revision to backout"))
426
426
427 date = opts.get('date')
427 date = opts.get('date')
428 if date:
428 if date:
429 opts['date'] = util.parsedate(date)
429 opts['date'] = util.parsedate(date)
430
430
431 cmdutil.bailifchanged(repo)
431 cmdutil.bailifchanged(repo)
432 node = scmutil.revsingle(repo, rev).node()
432 node = scmutil.revsingle(repo, rev).node()
433
433
434 op1, op2 = repo.dirstate.parents()
434 op1, op2 = repo.dirstate.parents()
435 a = repo.changelog.ancestor(op1, node)
435 a = repo.changelog.ancestor(op1, node)
436 if a != node:
436 if a != node:
437 raise util.Abort(_('cannot backout change on a different branch'))
437 raise util.Abort(_('cannot backout change on a different branch'))
438
438
439 p1, p2 = repo.changelog.parents(node)
439 p1, p2 = repo.changelog.parents(node)
440 if p1 == nullid:
440 if p1 == nullid:
441 raise util.Abort(_('cannot backout a change with no parents'))
441 raise util.Abort(_('cannot backout a change with no parents'))
442 if p2 != nullid:
442 if p2 != nullid:
443 if not opts.get('parent'):
443 if not opts.get('parent'):
444 raise util.Abort(_('cannot backout a merge changeset'))
444 raise util.Abort(_('cannot backout a merge changeset'))
445 p = repo.lookup(opts['parent'])
445 p = repo.lookup(opts['parent'])
446 if p not in (p1, p2):
446 if p not in (p1, p2):
447 raise util.Abort(_('%s is not a parent of %s') %
447 raise util.Abort(_('%s is not a parent of %s') %
448 (short(p), short(node)))
448 (short(p), short(node)))
449 parent = p
449 parent = p
450 else:
450 else:
451 if opts.get('parent'):
451 if opts.get('parent'):
452 raise util.Abort(_('cannot use --parent on non-merge changeset'))
452 raise util.Abort(_('cannot use --parent on non-merge changeset'))
453 parent = p1
453 parent = p1
454
454
455 # the backout should appear on the same branch
455 # the backout should appear on the same branch
456 wlock = repo.wlock()
456 wlock = repo.wlock()
457 try:
457 try:
458 branch = repo.dirstate.branch()
458 branch = repo.dirstate.branch()
459 bheads = repo.branchheads(branch)
459 bheads = repo.branchheads(branch)
460 hg.clean(repo, node, show_stats=False)
460 hg.clean(repo, node, show_stats=False)
461 repo.dirstate.setbranch(branch)
461 repo.dirstate.setbranch(branch)
462 rctx = scmutil.revsingle(repo, hex(parent))
462 rctx = scmutil.revsingle(repo, hex(parent))
463 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
463 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
464 if not opts.get('merge') and op1 != node:
464 if not opts.get('merge') and op1 != node:
465 try:
465 try:
466 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
466 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
467 return hg.update(repo, op1)
467 return hg.update(repo, op1)
468 finally:
468 finally:
469 ui.setconfig('ui', 'forcemerge', '')
469 ui.setconfig('ui', 'forcemerge', '')
470
470
471 e = cmdutil.commiteditor
471 e = cmdutil.commiteditor
472 if not opts['message'] and not opts['logfile']:
472 if not opts['message'] and not opts['logfile']:
473 # we don't translate commit messages
473 # we don't translate commit messages
474 opts['message'] = "Backed out changeset %s" % short(node)
474 opts['message'] = "Backed out changeset %s" % short(node)
475 e = cmdutil.commitforceeditor
475 e = cmdutil.commitforceeditor
476
476
477 def commitfunc(ui, repo, message, match, opts):
477 def commitfunc(ui, repo, message, match, opts):
478 return repo.commit(message, opts.get('user'), opts.get('date'),
478 return repo.commit(message, opts.get('user'), opts.get('date'),
479 match, editor=e)
479 match, editor=e)
480 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
480 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
481 cmdutil.commitstatus(repo, newnode, branch, bheads)
481 cmdutil.commitstatus(repo, newnode, branch, bheads)
482
482
483 def nice(node):
483 def nice(node):
484 return '%d:%s' % (repo.changelog.rev(node), short(node))
484 return '%d:%s' % (repo.changelog.rev(node), short(node))
485 ui.status(_('changeset %s backs out changeset %s\n') %
485 ui.status(_('changeset %s backs out changeset %s\n') %
486 (nice(repo.changelog.tip()), nice(node)))
486 (nice(repo.changelog.tip()), nice(node)))
487 if opts.get('merge') and op1 != node:
487 if opts.get('merge') and op1 != node:
488 hg.clean(repo, op1, show_stats=False)
488 hg.clean(repo, op1, show_stats=False)
489 ui.status(_('merging with changeset %s\n')
489 ui.status(_('merging with changeset %s\n')
490 % nice(repo.changelog.tip()))
490 % nice(repo.changelog.tip()))
491 try:
491 try:
492 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
492 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
493 return hg.merge(repo, hex(repo.changelog.tip()))
493 return hg.merge(repo, hex(repo.changelog.tip()))
494 finally:
494 finally:
495 ui.setconfig('ui', 'forcemerge', '')
495 ui.setconfig('ui', 'forcemerge', '')
496 finally:
496 finally:
497 wlock.release()
497 wlock.release()
498 return 0
498 return 0
499
499
500 @command('bisect',
500 @command('bisect',
501 [('r', 'reset', False, _('reset bisect state')),
501 [('r', 'reset', False, _('reset bisect state')),
502 ('g', 'good', False, _('mark changeset good')),
502 ('g', 'good', False, _('mark changeset good')),
503 ('b', 'bad', False, _('mark changeset bad')),
503 ('b', 'bad', False, _('mark changeset bad')),
504 ('s', 'skip', False, _('skip testing changeset')),
504 ('s', 'skip', False, _('skip testing changeset')),
505 ('e', 'extend', False, _('extend the bisect range')),
505 ('e', 'extend', False, _('extend the bisect range')),
506 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
506 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
507 ('U', 'noupdate', False, _('do not update to target'))],
507 ('U', 'noupdate', False, _('do not update to target'))],
508 _("[-gbsr] [-U] [-c CMD] [REV]"))
508 _("[-gbsr] [-U] [-c CMD] [REV]"))
509 def bisect(ui, repo, rev=None, extra=None, command=None,
509 def bisect(ui, repo, rev=None, extra=None, command=None,
510 reset=None, good=None, bad=None, skip=None, extend=None,
510 reset=None, good=None, bad=None, skip=None, extend=None,
511 noupdate=None):
511 noupdate=None):
512 """subdivision search of changesets
512 """subdivision search of changesets
513
513
514 This command helps to find changesets which introduce problems. To
514 This command helps to find changesets which introduce problems. To
515 use, mark the earliest changeset you know exhibits the problem as
515 use, mark the earliest changeset you know exhibits the problem as
516 bad, then mark the latest changeset which is free from the problem
516 bad, then mark the latest changeset which is free from the problem
517 as good. Bisect will update your working directory to a revision
517 as good. Bisect will update your working directory to a revision
518 for testing (unless the -U/--noupdate option is specified). Once
518 for testing (unless the -U/--noupdate option is specified). Once
519 you have performed tests, mark the working directory as good or
519 you have performed tests, mark the working directory as good or
520 bad, and bisect will either update to another candidate changeset
520 bad, and bisect will either update to another candidate changeset
521 or announce that it has found the bad revision.
521 or announce that it has found the bad revision.
522
522
523 As a shortcut, you can also use the revision argument to mark a
523 As a shortcut, you can also use the revision argument to mark a
524 revision as good or bad without checking it out first.
524 revision as good or bad without checking it out first.
525
525
526 If you supply a command, it will be used for automatic bisection.
526 If you supply a command, it will be used for automatic bisection.
527 The environment variable HG_NODE will contain the ID of the
527 The environment variable HG_NODE will contain the ID of the
528 changeset being tested. The exit status of the command will be
528 changeset being tested. The exit status of the command will be
529 used to mark revisions as good or bad: status 0 means good, 125
529 used to mark revisions as good or bad: status 0 means good, 125
530 means to skip the revision, 127 (command not found) will abort the
530 means to skip the revision, 127 (command not found) will abort the
531 bisection, and any other non-zero exit status means the revision
531 bisection, and any other non-zero exit status means the revision
532 is bad.
532 is bad.
533
533
534 .. container:: verbose
534 .. container:: verbose
535
535
536 Some examples:
536 Some examples:
537
537
538 - start a bisection with known bad revision 12, and good revision 34::
538 - start a bisection with known bad revision 12, and good revision 34::
539
539
540 hg bisect --bad 34
540 hg bisect --bad 34
541 hg bisect --good 12
541 hg bisect --good 12
542
542
543 - advance the current bisection by marking current revision as good or
543 - advance the current bisection by marking current revision as good or
544 bad::
544 bad::
545
545
546 hg bisect --good
546 hg bisect --good
547 hg bisect --bad
547 hg bisect --bad
548
548
549 - mark the current revision, or a known revision, to be skipped (e.g. if
549 - mark the current revision, or a known revision, to be skipped (e.g. if
550 that revision is not usable because of another issue)::
550 that revision is not usable because of another issue)::
551
551
552 hg bisect --skip
552 hg bisect --skip
553 hg bisect --skip 23
553 hg bisect --skip 23
554
554
555 - skip all revisions that do not touch directories ``foo`` or ``bar``
555 - skip all revisions that do not touch directories ``foo`` or ``bar``
556
556
557 hg bisect --skip '!( file("path:foo") & file("path:bar") )'
557 hg bisect --skip '!( file("path:foo") & file("path:bar") )'
558
558
559 - forget the current bisection::
559 - forget the current bisection::
560
560
561 hg bisect --reset
561 hg bisect --reset
562
562
563 - use 'make && make tests' to automatically find the first broken
563 - use 'make && make tests' to automatically find the first broken
564 revision::
564 revision::
565
565
566 hg bisect --reset
566 hg bisect --reset
567 hg bisect --bad 34
567 hg bisect --bad 34
568 hg bisect --good 12
568 hg bisect --good 12
569 hg bisect --command 'make && make tests'
569 hg bisect --command 'make && make tests'
570
570
571 - see all changesets whose states are already known in the current
571 - see all changesets whose states are already known in the current
572 bisection::
572 bisection::
573
573
574 hg log -r "bisect(pruned)"
574 hg log -r "bisect(pruned)"
575
575
576 - see the changeset currently being bisected (especially useful
576 - see the changeset currently being bisected (especially useful
577 if running with -U/--noupdate)::
577 if running with -U/--noupdate)::
578
578
579 hg log -r "bisect(current)"
579 hg log -r "bisect(current)"
580
580
581 - see all changesets that took part in the current bisection::
581 - see all changesets that took part in the current bisection::
582
582
583 hg log -r "bisect(range)"
583 hg log -r "bisect(range)"
584
584
585 - with the graphlog extension, you can even get a nice graph::
585 - with the graphlog extension, you can even get a nice graph::
586
586
587 hg log --graph -r "bisect(range)"
587 hg log --graph -r "bisect(range)"
588
588
589 See :hg:`help revsets` for more about the `bisect()` keyword.
589 See :hg:`help revsets` for more about the `bisect()` keyword.
590
590
591 Returns 0 on success.
591 Returns 0 on success.
592 """
592 """
593 def extendbisectrange(nodes, good):
593 def extendbisectrange(nodes, good):
594 # bisect is incomplete when it ends on a merge node and
594 # bisect is incomplete when it ends on a merge node and
595 # one of the parent was not checked.
595 # one of the parent was not checked.
596 parents = repo[nodes[0]].parents()
596 parents = repo[nodes[0]].parents()
597 if len(parents) > 1:
597 if len(parents) > 1:
598 side = good and state['bad'] or state['good']
598 side = good and state['bad'] or state['good']
599 num = len(set(i.node() for i in parents) & set(side))
599 num = len(set(i.node() for i in parents) & set(side))
600 if num == 1:
600 if num == 1:
601 return parents[0].ancestor(parents[1])
601 return parents[0].ancestor(parents[1])
602 return None
602 return None
603
603
604 def print_result(nodes, good):
604 def print_result(nodes, good):
605 displayer = cmdutil.show_changeset(ui, repo, {})
605 displayer = cmdutil.show_changeset(ui, repo, {})
606 if len(nodes) == 1:
606 if len(nodes) == 1:
607 # narrowed it down to a single revision
607 # narrowed it down to a single revision
608 if good:
608 if good:
609 ui.write(_("The first good revision is:\n"))
609 ui.write(_("The first good revision is:\n"))
610 else:
610 else:
611 ui.write(_("The first bad revision is:\n"))
611 ui.write(_("The first bad revision is:\n"))
612 displayer.show(repo[nodes[0]])
612 displayer.show(repo[nodes[0]])
613 extendnode = extendbisectrange(nodes, good)
613 extendnode = extendbisectrange(nodes, good)
614 if extendnode is not None:
614 if extendnode is not None:
615 ui.write(_('Not all ancestors of this changeset have been'
615 ui.write(_('Not all ancestors of this changeset have been'
616 ' checked.\nUse bisect --extend to continue the '
616 ' checked.\nUse bisect --extend to continue the '
617 'bisection from\nthe common ancestor, %s.\n')
617 'bisection from\nthe common ancestor, %s.\n')
618 % extendnode)
618 % extendnode)
619 else:
619 else:
620 # multiple possible revisions
620 # multiple possible revisions
621 if good:
621 if good:
622 ui.write(_("Due to skipped revisions, the first "
622 ui.write(_("Due to skipped revisions, the first "
623 "good revision could be any of:\n"))
623 "good revision could be any of:\n"))
624 else:
624 else:
625 ui.write(_("Due to skipped revisions, the first "
625 ui.write(_("Due to skipped revisions, the first "
626 "bad revision could be any of:\n"))
626 "bad revision could be any of:\n"))
627 for n in nodes:
627 for n in nodes:
628 displayer.show(repo[n])
628 displayer.show(repo[n])
629 displayer.close()
629 displayer.close()
630
630
631 def check_state(state, interactive=True):
631 def check_state(state, interactive=True):
632 if not state['good'] or not state['bad']:
632 if not state['good'] or not state['bad']:
633 if (good or bad or skip or reset) and interactive:
633 if (good or bad or skip or reset) and interactive:
634 return
634 return
635 if not state['good']:
635 if not state['good']:
636 raise util.Abort(_('cannot bisect (no known good revisions)'))
636 raise util.Abort(_('cannot bisect (no known good revisions)'))
637 else:
637 else:
638 raise util.Abort(_('cannot bisect (no known bad revisions)'))
638 raise util.Abort(_('cannot bisect (no known bad revisions)'))
639 return True
639 return True
640
640
641 # backward compatibility
641 # backward compatibility
642 if rev in "good bad reset init".split():
642 if rev in "good bad reset init".split():
643 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
643 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
644 cmd, rev, extra = rev, extra, None
644 cmd, rev, extra = rev, extra, None
645 if cmd == "good":
645 if cmd == "good":
646 good = True
646 good = True
647 elif cmd == "bad":
647 elif cmd == "bad":
648 bad = True
648 bad = True
649 else:
649 else:
650 reset = True
650 reset = True
651 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
651 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
652 raise util.Abort(_('incompatible arguments'))
652 raise util.Abort(_('incompatible arguments'))
653
653
654 if reset:
654 if reset:
655 p = repo.join("bisect.state")
655 p = repo.join("bisect.state")
656 if os.path.exists(p):
656 if os.path.exists(p):
657 os.unlink(p)
657 os.unlink(p)
658 return
658 return
659
659
660 state = hbisect.load_state(repo)
660 state = hbisect.load_state(repo)
661
661
662 if command:
662 if command:
663 changesets = 1
663 changesets = 1
664 try:
664 try:
665 node = state['current'][0]
665 node = state['current'][0]
666 except LookupError:
666 except LookupError:
667 if noupdate:
667 if noupdate:
668 raise util.Abort(_('current bisect revision is unknown - '
668 raise util.Abort(_('current bisect revision is unknown - '
669 'start a new bisect to fix'))
669 'start a new bisect to fix'))
670 node, p2 = repo.dirstate.parents()
670 node, p2 = repo.dirstate.parents()
671 if p2 != nullid:
671 if p2 != nullid:
672 raise util.Abort(_('current bisect revision is a merge'))
672 raise util.Abort(_('current bisect revision is a merge'))
673 try:
673 try:
674 while changesets:
674 while changesets:
675 # update state
675 # update state
676 state['current'] = [node]
676 state['current'] = [node]
677 hbisect.save_state(repo, state)
677 hbisect.save_state(repo, state)
678 status = util.system(command,
678 status = util.system(command,
679 environ={'HG_NODE': hex(node)},
679 environ={'HG_NODE': hex(node)},
680 out=ui.fout)
680 out=ui.fout)
681 if status == 125:
681 if status == 125:
682 transition = "skip"
682 transition = "skip"
683 elif status == 0:
683 elif status == 0:
684 transition = "good"
684 transition = "good"
685 # status < 0 means process was killed
685 # status < 0 means process was killed
686 elif status == 127:
686 elif status == 127:
687 raise util.Abort(_("failed to execute %s") % command)
687 raise util.Abort(_("failed to execute %s") % command)
688 elif status < 0:
688 elif status < 0:
689 raise util.Abort(_("%s killed") % command)
689 raise util.Abort(_("%s killed") % command)
690 else:
690 else:
691 transition = "bad"
691 transition = "bad"
692 ctx = scmutil.revsingle(repo, rev, node)
692 ctx = scmutil.revsingle(repo, rev, node)
693 rev = None # clear for future iterations
693 rev = None # clear for future iterations
694 state[transition].append(ctx.node())
694 state[transition].append(ctx.node())
695 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
695 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
696 check_state(state, interactive=False)
696 check_state(state, interactive=False)
697 # bisect
697 # bisect
698 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
698 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
699 # update to next check
699 # update to next check
700 node = nodes[0]
700 node = nodes[0]
701 if not noupdate:
701 if not noupdate:
702 cmdutil.bailifchanged(repo)
702 cmdutil.bailifchanged(repo)
703 hg.clean(repo, node, show_stats=False)
703 hg.clean(repo, node, show_stats=False)
704 finally:
704 finally:
705 state['current'] = [node]
705 state['current'] = [node]
706 hbisect.save_state(repo, state)
706 hbisect.save_state(repo, state)
707 print_result(nodes, good)
707 print_result(nodes, good)
708 return
708 return
709
709
710 # update state
710 # update state
711
711
712 if rev:
712 if rev:
713 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
713 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
714 else:
714 else:
715 nodes = [repo.lookup('.')]
715 nodes = [repo.lookup('.')]
716
716
717 if good or bad or skip:
717 if good or bad or skip:
718 if good:
718 if good:
719 state['good'] += nodes
719 state['good'] += nodes
720 elif bad:
720 elif bad:
721 state['bad'] += nodes
721 state['bad'] += nodes
722 elif skip:
722 elif skip:
723 state['skip'] += nodes
723 state['skip'] += nodes
724 hbisect.save_state(repo, state)
724 hbisect.save_state(repo, state)
725
725
726 if not check_state(state):
726 if not check_state(state):
727 return
727 return
728
728
729 # actually bisect
729 # actually bisect
730 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
730 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
731 if extend:
731 if extend:
732 if not changesets:
732 if not changesets:
733 extendnode = extendbisectrange(nodes, good)
733 extendnode = extendbisectrange(nodes, good)
734 if extendnode is not None:
734 if extendnode is not None:
735 ui.write(_("Extending search to changeset %d:%s\n"
735 ui.write(_("Extending search to changeset %d:%s\n"
736 % (extendnode.rev(), extendnode)))
736 % (extendnode.rev(), extendnode)))
737 state['current'] = [extendnode.node()]
737 state['current'] = [extendnode.node()]
738 hbisect.save_state(repo, state)
738 hbisect.save_state(repo, state)
739 if noupdate:
739 if noupdate:
740 return
740 return
741 cmdutil.bailifchanged(repo)
741 cmdutil.bailifchanged(repo)
742 return hg.clean(repo, extendnode.node())
742 return hg.clean(repo, extendnode.node())
743 raise util.Abort(_("nothing to extend"))
743 raise util.Abort(_("nothing to extend"))
744
744
745 if changesets == 0:
745 if changesets == 0:
746 print_result(nodes, good)
746 print_result(nodes, good)
747 else:
747 else:
748 assert len(nodes) == 1 # only a single node can be tested next
748 assert len(nodes) == 1 # only a single node can be tested next
749 node = nodes[0]
749 node = nodes[0]
750 # compute the approximate number of remaining tests
750 # compute the approximate number of remaining tests
751 tests, size = 0, 2
751 tests, size = 0, 2
752 while size <= changesets:
752 while size <= changesets:
753 tests, size = tests + 1, size * 2
753 tests, size = tests + 1, size * 2
754 rev = repo.changelog.rev(node)
754 rev = repo.changelog.rev(node)
755 ui.write(_("Testing changeset %d:%s "
755 ui.write(_("Testing changeset %d:%s "
756 "(%d changesets remaining, ~%d tests)\n")
756 "(%d changesets remaining, ~%d tests)\n")
757 % (rev, short(node), changesets, tests))
757 % (rev, short(node), changesets, tests))
758 state['current'] = [node]
758 state['current'] = [node]
759 hbisect.save_state(repo, state)
759 hbisect.save_state(repo, state)
760 if not noupdate:
760 if not noupdate:
761 cmdutil.bailifchanged(repo)
761 cmdutil.bailifchanged(repo)
762 return hg.clean(repo, node)
762 return hg.clean(repo, node)
763
763
764 @command('bookmarks|bookmark',
764 @command('bookmarks|bookmark',
765 [('f', 'force', False, _('force')),
765 [('f', 'force', False, _('force')),
766 ('r', 'rev', '', _('revision'), _('REV')),
766 ('r', 'rev', '', _('revision'), _('REV')),
767 ('d', 'delete', False, _('delete a given bookmark')),
767 ('d', 'delete', False, _('delete a given bookmark')),
768 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
768 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
769 ('i', 'inactive', False, _('mark a bookmark inactive'))],
769 ('i', 'inactive', False, _('mark a bookmark inactive'))],
770 _('hg bookmarks [OPTIONS]... [NAME]...'))
770 _('hg bookmarks [OPTIONS]... [NAME]...'))
771 def bookmark(ui, repo, *names, **opts):
771 def bookmark(ui, repo, *names, **opts):
772 '''track a line of development with movable markers
772 '''track a line of development with movable markers
773
773
774 Bookmarks are pointers to certain commits that move when committing.
774 Bookmarks are pointers to certain commits that move when committing.
775 Bookmarks are local. They can be renamed, copied and deleted. It is
775 Bookmarks are local. They can be renamed, copied and deleted. It is
776 possible to use :hg:`merge NAME` to merge from a given bookmark, and
776 possible to use :hg:`merge NAME` to merge from a given bookmark, and
777 :hg:`update NAME` to update to a given bookmark.
777 :hg:`update NAME` to update to a given bookmark.
778
778
779 You can use :hg:`bookmark NAME` to set a bookmark on the working
779 You can use :hg:`bookmark NAME` to set a bookmark on the working
780 directory's parent revision with the given name. If you specify
780 directory's parent revision with the given name. If you specify
781 a revision using -r REV (where REV may be an existing bookmark),
781 a revision using -r REV (where REV may be an existing bookmark),
782 the bookmark is assigned to that revision.
782 the bookmark is assigned to that revision.
783
783
784 Bookmarks can be pushed and pulled between repositories (see :hg:`help
784 Bookmarks can be pushed and pulled between repositories (see :hg:`help
785 push` and :hg:`help pull`). This requires both the local and remote
785 push` and :hg:`help pull`). This requires both the local and remote
786 repositories to support bookmarks. For versions prior to 1.8, this means
786 repositories to support bookmarks. For versions prior to 1.8, this means
787 the bookmarks extension must be enabled.
787 the bookmarks extension must be enabled.
788
788
789 If you set a bookmark called '@', new clones of the repository will
789 If you set a bookmark called '@', new clones of the repository will
790 have that revision checked out (and the bookmark made active) by
790 have that revision checked out (and the bookmark made active) by
791 default.
791 default.
792
792
793 With -i/--inactive, the new bookmark will not be made the active
793 With -i/--inactive, the new bookmark will not be made the active
794 bookmark. If -r/--rev is given, the new bookmark will not be made
794 bookmark. If -r/--rev is given, the new bookmark will not be made
795 active even if -i/--inactive is not given. If no NAME is given, the
795 active even if -i/--inactive is not given. If no NAME is given, the
796 current active bookmark will be marked inactive.
796 current active bookmark will be marked inactive.
797 '''
797 '''
798 force = opts.get('force')
798 force = opts.get('force')
799 rev = opts.get('rev')
799 rev = opts.get('rev')
800 delete = opts.get('delete')
800 delete = opts.get('delete')
801 rename = opts.get('rename')
801 rename = opts.get('rename')
802 inactive = opts.get('inactive')
802 inactive = opts.get('inactive')
803
803
804 hexfn = ui.debugflag and hex or short
804 hexfn = ui.debugflag and hex or short
805 marks = repo._bookmarks
805 marks = repo._bookmarks
806 cur = repo.changectx('.').node()
806 cur = repo.changectx('.').node()
807
807
808 def checkformat(mark):
808 def checkformat(mark):
809 mark = mark.strip()
809 mark = mark.strip()
810 if not mark:
810 if not mark:
811 raise util.Abort(_("bookmark names cannot consist entirely of "
811 raise util.Abort(_("bookmark names cannot consist entirely of "
812 "whitespace"))
812 "whitespace"))
813 scmutil.checknewlabel(repo, mark, 'bookmark')
813 scmutil.checknewlabel(repo, mark, 'bookmark')
814 return mark
814 return mark
815
815
816 def checkconflict(repo, mark, force=False, target=None):
816 def checkconflict(repo, mark, force=False, target=None):
817 if mark in marks and not force:
817 if mark in marks and not force:
818 if target:
818 if target:
819 if marks[mark] == target and target == cur:
819 if marks[mark] == target and target == cur:
820 # re-activating a bookmark
820 # re-activating a bookmark
821 return
821 return
822 anc = repo.changelog.ancestors([repo[target].rev()])
822 anc = repo.changelog.ancestors([repo[target].rev()])
823 bmctx = repo[marks[mark]]
823 bmctx = repo[marks[mark]]
824 divs = [repo[b].node() for b in marks
824 divs = [repo[b].node() for b in marks
825 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
825 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
826
826
827 # allow resolving a single divergent bookmark even if moving
827 # allow resolving a single divergent bookmark even if moving
828 # the bookmark across branches when a revision is specified
828 # the bookmark across branches when a revision is specified
829 # that contains a divergent bookmark
829 # that contains a divergent bookmark
830 if bmctx.rev() not in anc and target in divs:
830 if bmctx.rev() not in anc and target in divs:
831 bookmarks.deletedivergent(repo, [target], mark)
831 bookmarks.deletedivergent(repo, [target], mark)
832 return
832 return
833
833
834 deletefrom = [b for b in divs
834 deletefrom = [b for b in divs
835 if repo[b].rev() in anc or b == target]
835 if repo[b].rev() in anc or b == target]
836 bookmarks.deletedivergent(repo, deletefrom, mark)
836 bookmarks.deletedivergent(repo, deletefrom, mark)
837 if bmctx.rev() in anc:
837 if bmctx.rev() in anc:
838 ui.status(_("moving bookmark '%s' forward from %s\n") %
838 ui.status(_("moving bookmark '%s' forward from %s\n") %
839 (mark, short(bmctx.node())))
839 (mark, short(bmctx.node())))
840 return
840 return
841 raise util.Abort(_("bookmark '%s' already exists "
841 raise util.Abort(_("bookmark '%s' already exists "
842 "(use -f to force)") % mark)
842 "(use -f to force)") % mark)
843 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
843 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
844 and not force):
844 and not force):
845 raise util.Abort(
845 raise util.Abort(
846 _("a bookmark cannot have the name of an existing branch"))
846 _("a bookmark cannot have the name of an existing branch"))
847
847
848 if delete and rename:
848 if delete and rename:
849 raise util.Abort(_("--delete and --rename are incompatible"))
849 raise util.Abort(_("--delete and --rename are incompatible"))
850 if delete and rev:
850 if delete and rev:
851 raise util.Abort(_("--rev is incompatible with --delete"))
851 raise util.Abort(_("--rev is incompatible with --delete"))
852 if rename and rev:
852 if rename and rev:
853 raise util.Abort(_("--rev is incompatible with --rename"))
853 raise util.Abort(_("--rev is incompatible with --rename"))
854 if not names and (delete or rev):
854 if not names and (delete or rev):
855 raise util.Abort(_("bookmark name required"))
855 raise util.Abort(_("bookmark name required"))
856
856
857 if delete:
857 if delete:
858 for mark in names:
858 for mark in names:
859 if mark not in marks:
859 if mark not in marks:
860 raise util.Abort(_("bookmark '%s' does not exist") % mark)
860 raise util.Abort(_("bookmark '%s' does not exist") % mark)
861 if mark == repo._bookmarkcurrent:
861 if mark == repo._bookmarkcurrent:
862 bookmarks.setcurrent(repo, None)
862 bookmarks.setcurrent(repo, None)
863 del marks[mark]
863 del marks[mark]
864 marks.write()
864 marks.write()
865
865
866 elif rename:
866 elif rename:
867 if not names:
867 if not names:
868 raise util.Abort(_("new bookmark name required"))
868 raise util.Abort(_("new bookmark name required"))
869 elif len(names) > 1:
869 elif len(names) > 1:
870 raise util.Abort(_("only one new bookmark name allowed"))
870 raise util.Abort(_("only one new bookmark name allowed"))
871 mark = checkformat(names[0])
871 mark = checkformat(names[0])
872 if rename not in marks:
872 if rename not in marks:
873 raise util.Abort(_("bookmark '%s' does not exist") % rename)
873 raise util.Abort(_("bookmark '%s' does not exist") % rename)
874 checkconflict(repo, mark, force)
874 checkconflict(repo, mark, force)
875 marks[mark] = marks[rename]
875 marks[mark] = marks[rename]
876 if repo._bookmarkcurrent == rename and not inactive:
876 if repo._bookmarkcurrent == rename and not inactive:
877 bookmarks.setcurrent(repo, mark)
877 bookmarks.setcurrent(repo, mark)
878 del marks[rename]
878 del marks[rename]
879 marks.write()
879 marks.write()
880
880
881 elif names:
881 elif names:
882 newact = None
882 newact = None
883 for mark in names:
883 for mark in names:
884 mark = checkformat(mark)
884 mark = checkformat(mark)
885 if newact is None:
885 if newact is None:
886 newact = mark
886 newact = mark
887 if inactive and mark == repo._bookmarkcurrent:
887 if inactive and mark == repo._bookmarkcurrent:
888 bookmarks.setcurrent(repo, None)
888 bookmarks.setcurrent(repo, None)
889 return
889 return
890 tgt = cur
890 tgt = cur
891 if rev:
891 if rev:
892 tgt = scmutil.revsingle(repo, rev).node()
892 tgt = scmutil.revsingle(repo, rev).node()
893 checkconflict(repo, mark, force, tgt)
893 checkconflict(repo, mark, force, tgt)
894 marks[mark] = tgt
894 marks[mark] = tgt
895 if not inactive and cur == marks[newact] and not rev:
895 if not inactive and cur == marks[newact] and not rev:
896 bookmarks.setcurrent(repo, newact)
896 bookmarks.setcurrent(repo, newact)
897 elif cur != tgt and newact == repo._bookmarkcurrent:
897 elif cur != tgt and newact == repo._bookmarkcurrent:
898 bookmarks.setcurrent(repo, None)
898 bookmarks.setcurrent(repo, None)
899 marks.write()
899 marks.write()
900
900
901 # Same message whether trying to deactivate the current bookmark (-i
901 # Same message whether trying to deactivate the current bookmark (-i
902 # with no NAME) or listing bookmarks
902 # with no NAME) or listing bookmarks
903 elif len(marks) == 0:
903 elif len(marks) == 0:
904 ui.status(_("no bookmarks set\n"))
904 ui.status(_("no bookmarks set\n"))
905
905
906 elif inactive:
906 elif inactive:
907 if not repo._bookmarkcurrent:
907 if not repo._bookmarkcurrent:
908 ui.status(_("no active bookmark\n"))
908 ui.status(_("no active bookmark\n"))
909 else:
909 else:
910 bookmarks.setcurrent(repo, None)
910 bookmarks.setcurrent(repo, None)
911
911
912 else: # show bookmarks
912 else: # show bookmarks
913 for bmark, n in sorted(marks.iteritems()):
913 for bmark, n in sorted(marks.iteritems()):
914 current = repo._bookmarkcurrent
914 current = repo._bookmarkcurrent
915 if bmark == current:
915 if bmark == current:
916 prefix, label = '*', 'bookmarks.current'
916 prefix, label = '*', 'bookmarks.current'
917 else:
917 else:
918 prefix, label = ' ', ''
918 prefix, label = ' ', ''
919
919
920 if ui.quiet:
920 if ui.quiet:
921 ui.write("%s\n" % bmark, label=label)
921 ui.write("%s\n" % bmark, label=label)
922 else:
922 else:
923 ui.write(" %s %-25s %d:%s\n" % (
923 ui.write(" %s %-25s %d:%s\n" % (
924 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
924 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
925 label=label)
925 label=label)
926
926
927 @command('branch',
927 @command('branch',
928 [('f', 'force', None,
928 [('f', 'force', None,
929 _('set branch name even if it shadows an existing branch')),
929 _('set branch name even if it shadows an existing branch')),
930 ('C', 'clean', None, _('reset branch name to parent branch name'))],
930 ('C', 'clean', None, _('reset branch name to parent branch name'))],
931 _('[-fC] [NAME]'))
931 _('[-fC] [NAME]'))
932 def branch(ui, repo, label=None, **opts):
932 def branch(ui, repo, label=None, **opts):
933 """set or show the current branch name
933 """set or show the current branch name
934
934
935 .. note::
935 .. note::
936 Branch names are permanent and global. Use :hg:`bookmark` to create a
936 Branch names are permanent and global. Use :hg:`bookmark` to create a
937 light-weight bookmark instead. See :hg:`help glossary` for more
937 light-weight bookmark instead. See :hg:`help glossary` for more
938 information about named branches and bookmarks.
938 information about named branches and bookmarks.
939
939
940 With no argument, show the current branch name. With one argument,
940 With no argument, show the current branch name. With one argument,
941 set the working directory branch name (the branch will not exist
941 set the working directory branch name (the branch will not exist
942 in the repository until the next commit). Standard practice
942 in the repository until the next commit). Standard practice
943 recommends that primary development take place on the 'default'
943 recommends that primary development take place on the 'default'
944 branch.
944 branch.
945
945
946 Unless -f/--force is specified, branch will not let you set a
946 Unless -f/--force is specified, branch will not let you set a
947 branch name that already exists, even if it's inactive.
947 branch name that already exists, even if it's inactive.
948
948
949 Use -C/--clean to reset the working directory branch to that of
949 Use -C/--clean to reset the working directory branch to that of
950 the parent of the working directory, negating a previous branch
950 the parent of the working directory, negating a previous branch
951 change.
951 change.
952
952
953 Use the command :hg:`update` to switch to an existing branch. Use
953 Use the command :hg:`update` to switch to an existing branch. Use
954 :hg:`commit --close-branch` to mark this branch as closed.
954 :hg:`commit --close-branch` to mark this branch as closed.
955
955
956 Returns 0 on success.
956 Returns 0 on success.
957 """
957 """
958 if label:
958 if label:
959 label = label.strip()
959 label = label.strip()
960
960
961 if not opts.get('clean') and not label:
961 if not opts.get('clean') and not label:
962 ui.write("%s\n" % repo.dirstate.branch())
962 ui.write("%s\n" % repo.dirstate.branch())
963 return
963 return
964
964
965 wlock = repo.wlock()
965 wlock = repo.wlock()
966 try:
966 try:
967 if opts.get('clean'):
967 if opts.get('clean'):
968 label = repo[None].p1().branch()
968 label = repo[None].p1().branch()
969 repo.dirstate.setbranch(label)
969 repo.dirstate.setbranch(label)
970 ui.status(_('reset working directory to branch %s\n') % label)
970 ui.status(_('reset working directory to branch %s\n') % label)
971 elif label:
971 elif label:
972 if not opts.get('force') and label in repo.branchmap():
972 if not opts.get('force') and label in repo.branchmap():
973 if label not in [p.branch() for p in repo.parents()]:
973 if label not in [p.branch() for p in repo.parents()]:
974 raise util.Abort(_('a branch of the same name already'
974 raise util.Abort(_('a branch of the same name already'
975 ' exists'),
975 ' exists'),
976 # i18n: "it" refers to an existing branch
976 # i18n: "it" refers to an existing branch
977 hint=_("use 'hg update' to switch to it"))
977 hint=_("use 'hg update' to switch to it"))
978 scmutil.checknewlabel(repo, label, 'branch')
978 scmutil.checknewlabel(repo, label, 'branch')
979 repo.dirstate.setbranch(label)
979 repo.dirstate.setbranch(label)
980 ui.status(_('marked working directory as branch %s\n') % label)
980 ui.status(_('marked working directory as branch %s\n') % label)
981 ui.status(_('(branches are permanent and global, '
981 ui.status(_('(branches are permanent and global, '
982 'did you want a bookmark?)\n'))
982 'did you want a bookmark?)\n'))
983 finally:
983 finally:
984 wlock.release()
984 wlock.release()
985
985
986 @command('branches',
986 @command('branches',
987 [('a', 'active', False, _('show only branches that have unmerged heads')),
987 [('a', 'active', False, _('show only branches that have unmerged heads')),
988 ('c', 'closed', False, _('show normal and closed branches'))],
988 ('c', 'closed', False, _('show normal and closed branches'))],
989 _('[-ac]'))
989 _('[-ac]'))
990 def branches(ui, repo, active=False, closed=False):
990 def branches(ui, repo, active=False, closed=False):
991 """list repository named branches
991 """list repository named branches
992
992
993 List the repository's named branches, indicating which ones are
993 List the repository's named branches, indicating which ones are
994 inactive. If -c/--closed is specified, also list branches which have
994 inactive. If -c/--closed is specified, also list branches which have
995 been marked closed (see :hg:`commit --close-branch`).
995 been marked closed (see :hg:`commit --close-branch`).
996
996
997 If -a/--active is specified, only show active branches. A branch
997 If -a/--active is specified, only show active branches. A branch
998 is considered active if it contains repository heads.
998 is considered active if it contains repository heads.
999
999
1000 Use the command :hg:`update` to switch to an existing branch.
1000 Use the command :hg:`update` to switch to an existing branch.
1001
1001
1002 Returns 0.
1002 Returns 0.
1003 """
1003 """
1004
1004
1005 hexfunc = ui.debugflag and hex or short
1005 hexfunc = ui.debugflag and hex or short
1006
1006
1007 activebranches = set([repo[n].branch() for n in repo.heads()])
1007 activebranches = set([repo[n].branch() for n in repo.heads()])
1008 branches = []
1008 branches = []
1009 for tag, heads in repo.branchmap().iteritems():
1009 for tag, heads in repo.branchmap().iteritems():
1010 for h in reversed(heads):
1010 for h in reversed(heads):
1011 ctx = repo[h]
1011 ctx = repo[h]
1012 isopen = not ctx.closesbranch()
1012 isopen = not ctx.closesbranch()
1013 if isopen:
1013 if isopen:
1014 tip = ctx
1014 tip = ctx
1015 break
1015 break
1016 else:
1016 else:
1017 tip = repo[heads[-1]]
1017 tip = repo[heads[-1]]
1018 isactive = tag in activebranches and isopen
1018 isactive = tag in activebranches and isopen
1019 branches.append((tip, isactive, isopen))
1019 branches.append((tip, isactive, isopen))
1020 branches.sort(key=lambda i: (i[1], i[0].rev(), i[0].branch(), i[2]),
1020 branches.sort(key=lambda i: (i[1], i[0].rev(), i[0].branch(), i[2]),
1021 reverse=True)
1021 reverse=True)
1022
1022
1023 for ctx, isactive, isopen in branches:
1023 for ctx, isactive, isopen in branches:
1024 if (not active) or isactive:
1024 if (not active) or isactive:
1025 if isactive:
1025 if isactive:
1026 label = 'branches.active'
1026 label = 'branches.active'
1027 notice = ''
1027 notice = ''
1028 elif not isopen:
1028 elif not isopen:
1029 if not closed:
1029 if not closed:
1030 continue
1030 continue
1031 label = 'branches.closed'
1031 label = 'branches.closed'
1032 notice = _(' (closed)')
1032 notice = _(' (closed)')
1033 else:
1033 else:
1034 label = 'branches.inactive'
1034 label = 'branches.inactive'
1035 notice = _(' (inactive)')
1035 notice = _(' (inactive)')
1036 if ctx.branch() == repo.dirstate.branch():
1036 if ctx.branch() == repo.dirstate.branch():
1037 label = 'branches.current'
1037 label = 'branches.current'
1038 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(ctx.branch()))
1038 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(ctx.branch()))
1039 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1039 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1040 'log.changeset changeset.%s' % ctx.phasestr())
1040 'log.changeset changeset.%s' % ctx.phasestr())
1041 tag = ui.label(ctx.branch(), label)
1041 tag = ui.label(ctx.branch(), label)
1042 if ui.quiet:
1042 if ui.quiet:
1043 ui.write("%s\n" % tag)
1043 ui.write("%s\n" % tag)
1044 else:
1044 else:
1045 ui.write("%s %s%s\n" % (tag, rev, notice))
1045 ui.write("%s %s%s\n" % (tag, rev, notice))
1046
1046
1047 @command('bundle',
1047 @command('bundle',
1048 [('f', 'force', None, _('run even when the destination is unrelated')),
1048 [('f', 'force', None, _('run even when the destination is unrelated')),
1049 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1049 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1050 _('REV')),
1050 _('REV')),
1051 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1051 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1052 _('BRANCH')),
1052 _('BRANCH')),
1053 ('', 'base', [],
1053 ('', 'base', [],
1054 _('a base changeset assumed to be available at the destination'),
1054 _('a base changeset assumed to be available at the destination'),
1055 _('REV')),
1055 _('REV')),
1056 ('a', 'all', None, _('bundle all changesets in the repository')),
1056 ('a', 'all', None, _('bundle all changesets in the repository')),
1057 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1057 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1058 ] + remoteopts,
1058 ] + remoteopts,
1059 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1059 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1060 def bundle(ui, repo, fname, dest=None, **opts):
1060 def bundle(ui, repo, fname, dest=None, **opts):
1061 """create a changegroup file
1061 """create a changegroup file
1062
1062
1063 Generate a compressed changegroup file collecting changesets not
1063 Generate a compressed changegroup file collecting changesets not
1064 known to be in another repository.
1064 known to be in another repository.
1065
1065
1066 If you omit the destination repository, then hg assumes the
1066 If you omit the destination repository, then hg assumes the
1067 destination will have all the nodes you specify with --base
1067 destination will have all the nodes you specify with --base
1068 parameters. To create a bundle containing all changesets, use
1068 parameters. To create a bundle containing all changesets, use
1069 -a/--all (or --base null).
1069 -a/--all (or --base null).
1070
1070
1071 You can change compression method with the -t/--type option.
1071 You can change compression method with the -t/--type option.
1072 The available compression methods are: none, bzip2, and
1072 The available compression methods are: none, bzip2, and
1073 gzip (by default, bundles are compressed using bzip2).
1073 gzip (by default, bundles are compressed using bzip2).
1074
1074
1075 The bundle file can then be transferred using conventional means
1075 The bundle file can then be transferred using conventional means
1076 and applied to another repository with the unbundle or pull
1076 and applied to another repository with the unbundle or pull
1077 command. This is useful when direct push and pull are not
1077 command. This is useful when direct push and pull are not
1078 available or when exporting an entire repository is undesirable.
1078 available or when exporting an entire repository is undesirable.
1079
1079
1080 Applying bundles preserves all changeset contents including
1080 Applying bundles preserves all changeset contents including
1081 permissions, copy/rename information, and revision history.
1081 permissions, copy/rename information, and revision history.
1082
1082
1083 Returns 0 on success, 1 if no changes found.
1083 Returns 0 on success, 1 if no changes found.
1084 """
1084 """
1085 revs = None
1085 revs = None
1086 if 'rev' in opts:
1086 if 'rev' in opts:
1087 revs = scmutil.revrange(repo, opts['rev'])
1087 revs = scmutil.revrange(repo, opts['rev'])
1088
1088
1089 bundletype = opts.get('type', 'bzip2').lower()
1089 bundletype = opts.get('type', 'bzip2').lower()
1090 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1090 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1091 bundletype = btypes.get(bundletype)
1091 bundletype = btypes.get(bundletype)
1092 if bundletype not in changegroup.bundletypes:
1092 if bundletype not in changegroup.bundletypes:
1093 raise util.Abort(_('unknown bundle type specified with --type'))
1093 raise util.Abort(_('unknown bundle type specified with --type'))
1094
1094
1095 if opts.get('all'):
1095 if opts.get('all'):
1096 base = ['null']
1096 base = ['null']
1097 else:
1097 else:
1098 base = scmutil.revrange(repo, opts.get('base'))
1098 base = scmutil.revrange(repo, opts.get('base'))
1099 # TODO: get desired bundlecaps from command line.
1100 bundlecaps = None
1099 if base:
1101 if base:
1100 if dest:
1102 if dest:
1101 raise util.Abort(_("--base is incompatible with specifying "
1103 raise util.Abort(_("--base is incompatible with specifying "
1102 "a destination"))
1104 "a destination"))
1103 common = [repo.lookup(rev) for rev in base]
1105 common = [repo.lookup(rev) for rev in base]
1104 heads = revs and map(repo.lookup, revs) or revs
1106 heads = revs and map(repo.lookup, revs) or revs
1105 cg = repo.getbundle('bundle', heads=heads, common=common)
1107 cg = repo.getbundle('bundle', heads=heads, common=common,
1108 bundlecaps=bundlecaps)
1106 outgoing = None
1109 outgoing = None
1107 else:
1110 else:
1108 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1111 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1109 dest, branches = hg.parseurl(dest, opts.get('branch'))
1112 dest, branches = hg.parseurl(dest, opts.get('branch'))
1110 other = hg.peer(repo, opts, dest)
1113 other = hg.peer(repo, opts, dest)
1111 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1114 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1112 heads = revs and map(repo.lookup, revs) or revs
1115 heads = revs and map(repo.lookup, revs) or revs
1113 outgoing = discovery.findcommonoutgoing(repo, other,
1116 outgoing = discovery.findcommonoutgoing(repo, other,
1114 onlyheads=heads,
1117 onlyheads=heads,
1115 force=opts.get('force'),
1118 force=opts.get('force'),
1116 portable=True)
1119 portable=True)
1117 cg = repo.getlocalbundle('bundle', outgoing)
1120 cg = repo.getlocalbundle('bundle', outgoing, bundlecaps)
1118 if not cg:
1121 if not cg:
1119 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1122 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1120 return 1
1123 return 1
1121
1124
1122 changegroup.writebundle(cg, fname, bundletype)
1125 changegroup.writebundle(cg, fname, bundletype)
1123
1126
1124 @command('cat',
1127 @command('cat',
1125 [('o', 'output', '',
1128 [('o', 'output', '',
1126 _('print output to file with formatted name'), _('FORMAT')),
1129 _('print output to file with formatted name'), _('FORMAT')),
1127 ('r', 'rev', '', _('print the given revision'), _('REV')),
1130 ('r', 'rev', '', _('print the given revision'), _('REV')),
1128 ('', 'decode', None, _('apply any matching decode filter')),
1131 ('', 'decode', None, _('apply any matching decode filter')),
1129 ] + walkopts,
1132 ] + walkopts,
1130 _('[OPTION]... FILE...'))
1133 _('[OPTION]... FILE...'))
1131 def cat(ui, repo, file1, *pats, **opts):
1134 def cat(ui, repo, file1, *pats, **opts):
1132 """output the current or given revision of files
1135 """output the current or given revision of files
1133
1136
1134 Print the specified files as they were at the given revision. If
1137 Print the specified files as they were at the given revision. If
1135 no revision is given, the parent of the working directory is used,
1138 no revision is given, the parent of the working directory is used,
1136 or tip if no revision is checked out.
1139 or tip if no revision is checked out.
1137
1140
1138 Output may be to a file, in which case the name of the file is
1141 Output may be to a file, in which case the name of the file is
1139 given using a format string. The formatting rules are the same as
1142 given using a format string. The formatting rules are the same as
1140 for the export command, with the following additions:
1143 for the export command, with the following additions:
1141
1144
1142 :``%s``: basename of file being printed
1145 :``%s``: basename of file being printed
1143 :``%d``: dirname of file being printed, or '.' if in repository root
1146 :``%d``: dirname of file being printed, or '.' if in repository root
1144 :``%p``: root-relative path name of file being printed
1147 :``%p``: root-relative path name of file being printed
1145
1148
1146 Returns 0 on success.
1149 Returns 0 on success.
1147 """
1150 """
1148 ctx = scmutil.revsingle(repo, opts.get('rev'))
1151 ctx = scmutil.revsingle(repo, opts.get('rev'))
1149 err = 1
1152 err = 1
1150 m = scmutil.match(ctx, (file1,) + pats, opts)
1153 m = scmutil.match(ctx, (file1,) + pats, opts)
1151 for abs in ctx.walk(m):
1154 for abs in ctx.walk(m):
1152 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1155 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1153 pathname=abs)
1156 pathname=abs)
1154 data = ctx[abs].data()
1157 data = ctx[abs].data()
1155 if opts.get('decode'):
1158 if opts.get('decode'):
1156 data = repo.wwritedata(abs, data)
1159 data = repo.wwritedata(abs, data)
1157 fp.write(data)
1160 fp.write(data)
1158 fp.close()
1161 fp.close()
1159 err = 0
1162 err = 0
1160 return err
1163 return err
1161
1164
1162 @command('^clone',
1165 @command('^clone',
1163 [('U', 'noupdate', None,
1166 [('U', 'noupdate', None,
1164 _('the clone will include an empty working copy (only a repository)')),
1167 _('the clone will include an empty working copy (only a repository)')),
1165 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1168 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1166 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1169 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1167 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1170 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1168 ('', 'pull', None, _('use pull protocol to copy metadata')),
1171 ('', 'pull', None, _('use pull protocol to copy metadata')),
1169 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1172 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1170 ] + remoteopts,
1173 ] + remoteopts,
1171 _('[OPTION]... SOURCE [DEST]'))
1174 _('[OPTION]... SOURCE [DEST]'))
1172 def clone(ui, source, dest=None, **opts):
1175 def clone(ui, source, dest=None, **opts):
1173 """make a copy of an existing repository
1176 """make a copy of an existing repository
1174
1177
1175 Create a copy of an existing repository in a new directory.
1178 Create a copy of an existing repository in a new directory.
1176
1179
1177 If no destination directory name is specified, it defaults to the
1180 If no destination directory name is specified, it defaults to the
1178 basename of the source.
1181 basename of the source.
1179
1182
1180 The location of the source is added to the new repository's
1183 The location of the source is added to the new repository's
1181 ``.hg/hgrc`` file, as the default to be used for future pulls.
1184 ``.hg/hgrc`` file, as the default to be used for future pulls.
1182
1185
1183 Only local paths and ``ssh://`` URLs are supported as
1186 Only local paths and ``ssh://`` URLs are supported as
1184 destinations. For ``ssh://`` destinations, no working directory or
1187 destinations. For ``ssh://`` destinations, no working directory or
1185 ``.hg/hgrc`` will be created on the remote side.
1188 ``.hg/hgrc`` will be created on the remote side.
1186
1189
1187 To pull only a subset of changesets, specify one or more revisions
1190 To pull only a subset of changesets, specify one or more revisions
1188 identifiers with -r/--rev or branches with -b/--branch. The
1191 identifiers with -r/--rev or branches with -b/--branch. The
1189 resulting clone will contain only the specified changesets and
1192 resulting clone will contain only the specified changesets and
1190 their ancestors. These options (or 'clone src#rev dest') imply
1193 their ancestors. These options (or 'clone src#rev dest') imply
1191 --pull, even for local source repositories. Note that specifying a
1194 --pull, even for local source repositories. Note that specifying a
1192 tag will include the tagged changeset but not the changeset
1195 tag will include the tagged changeset but not the changeset
1193 containing the tag.
1196 containing the tag.
1194
1197
1195 If the source repository has a bookmark called '@' set, that
1198 If the source repository has a bookmark called '@' set, that
1196 revision will be checked out in the new repository by default.
1199 revision will be checked out in the new repository by default.
1197
1200
1198 To check out a particular version, use -u/--update, or
1201 To check out a particular version, use -u/--update, or
1199 -U/--noupdate to create a clone with no working directory.
1202 -U/--noupdate to create a clone with no working directory.
1200
1203
1201 .. container:: verbose
1204 .. container:: verbose
1202
1205
1203 For efficiency, hardlinks are used for cloning whenever the
1206 For efficiency, hardlinks are used for cloning whenever the
1204 source and destination are on the same filesystem (note this
1207 source and destination are on the same filesystem (note this
1205 applies only to the repository data, not to the working
1208 applies only to the repository data, not to the working
1206 directory). Some filesystems, such as AFS, implement hardlinking
1209 directory). Some filesystems, such as AFS, implement hardlinking
1207 incorrectly, but do not report errors. In these cases, use the
1210 incorrectly, but do not report errors. In these cases, use the
1208 --pull option to avoid hardlinking.
1211 --pull option to avoid hardlinking.
1209
1212
1210 In some cases, you can clone repositories and the working
1213 In some cases, you can clone repositories and the working
1211 directory using full hardlinks with ::
1214 directory using full hardlinks with ::
1212
1215
1213 $ cp -al REPO REPOCLONE
1216 $ cp -al REPO REPOCLONE
1214
1217
1215 This is the fastest way to clone, but it is not always safe. The
1218 This is the fastest way to clone, but it is not always safe. The
1216 operation is not atomic (making sure REPO is not modified during
1219 operation is not atomic (making sure REPO is not modified during
1217 the operation is up to you) and you have to make sure your
1220 the operation is up to you) and you have to make sure your
1218 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1221 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1219 so). Also, this is not compatible with certain extensions that
1222 so). Also, this is not compatible with certain extensions that
1220 place their metadata under the .hg directory, such as mq.
1223 place their metadata under the .hg directory, such as mq.
1221
1224
1222 Mercurial will update the working directory to the first applicable
1225 Mercurial will update the working directory to the first applicable
1223 revision from this list:
1226 revision from this list:
1224
1227
1225 a) null if -U or the source repository has no changesets
1228 a) null if -U or the source repository has no changesets
1226 b) if -u . and the source repository is local, the first parent of
1229 b) if -u . and the source repository is local, the first parent of
1227 the source repository's working directory
1230 the source repository's working directory
1228 c) the changeset specified with -u (if a branch name, this means the
1231 c) the changeset specified with -u (if a branch name, this means the
1229 latest head of that branch)
1232 latest head of that branch)
1230 d) the changeset specified with -r
1233 d) the changeset specified with -r
1231 e) the tipmost head specified with -b
1234 e) the tipmost head specified with -b
1232 f) the tipmost head specified with the url#branch source syntax
1235 f) the tipmost head specified with the url#branch source syntax
1233 g) the revision marked with the '@' bookmark, if present
1236 g) the revision marked with the '@' bookmark, if present
1234 h) the tipmost head of the default branch
1237 h) the tipmost head of the default branch
1235 i) tip
1238 i) tip
1236
1239
1237 Examples:
1240 Examples:
1238
1241
1239 - clone a remote repository to a new directory named hg/::
1242 - clone a remote repository to a new directory named hg/::
1240
1243
1241 hg clone http://selenic.com/hg
1244 hg clone http://selenic.com/hg
1242
1245
1243 - create a lightweight local clone::
1246 - create a lightweight local clone::
1244
1247
1245 hg clone project/ project-feature/
1248 hg clone project/ project-feature/
1246
1249
1247 - clone from an absolute path on an ssh server (note double-slash)::
1250 - clone from an absolute path on an ssh server (note double-slash)::
1248
1251
1249 hg clone ssh://user@server//home/projects/alpha/
1252 hg clone ssh://user@server//home/projects/alpha/
1250
1253
1251 - do a high-speed clone over a LAN while checking out a
1254 - do a high-speed clone over a LAN while checking out a
1252 specified version::
1255 specified version::
1253
1256
1254 hg clone --uncompressed http://server/repo -u 1.5
1257 hg clone --uncompressed http://server/repo -u 1.5
1255
1258
1256 - create a repository without changesets after a particular revision::
1259 - create a repository without changesets after a particular revision::
1257
1260
1258 hg clone -r 04e544 experimental/ good/
1261 hg clone -r 04e544 experimental/ good/
1259
1262
1260 - clone (and track) a particular named branch::
1263 - clone (and track) a particular named branch::
1261
1264
1262 hg clone http://selenic.com/hg#stable
1265 hg clone http://selenic.com/hg#stable
1263
1266
1264 See :hg:`help urls` for details on specifying URLs.
1267 See :hg:`help urls` for details on specifying URLs.
1265
1268
1266 Returns 0 on success.
1269 Returns 0 on success.
1267 """
1270 """
1268 if opts.get('noupdate') and opts.get('updaterev'):
1271 if opts.get('noupdate') and opts.get('updaterev'):
1269 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1272 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1270
1273
1271 r = hg.clone(ui, opts, source, dest,
1274 r = hg.clone(ui, opts, source, dest,
1272 pull=opts.get('pull'),
1275 pull=opts.get('pull'),
1273 stream=opts.get('uncompressed'),
1276 stream=opts.get('uncompressed'),
1274 rev=opts.get('rev'),
1277 rev=opts.get('rev'),
1275 update=opts.get('updaterev') or not opts.get('noupdate'),
1278 update=opts.get('updaterev') or not opts.get('noupdate'),
1276 branch=opts.get('branch'))
1279 branch=opts.get('branch'))
1277
1280
1278 return r is None
1281 return r is None
1279
1282
1280 @command('^commit|ci',
1283 @command('^commit|ci',
1281 [('A', 'addremove', None,
1284 [('A', 'addremove', None,
1282 _('mark new/missing files as added/removed before committing')),
1285 _('mark new/missing files as added/removed before committing')),
1283 ('', 'close-branch', None,
1286 ('', 'close-branch', None,
1284 _('mark a branch as closed, hiding it from the branch list')),
1287 _('mark a branch as closed, hiding it from the branch list')),
1285 ('', 'amend', None, _('amend the parent of the working dir')),
1288 ('', 'amend', None, _('amend the parent of the working dir')),
1286 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1289 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1287 _('[OPTION]... [FILE]...'))
1290 _('[OPTION]... [FILE]...'))
1288 def commit(ui, repo, *pats, **opts):
1291 def commit(ui, repo, *pats, **opts):
1289 """commit the specified files or all outstanding changes
1292 """commit the specified files or all outstanding changes
1290
1293
1291 Commit changes to the given files into the repository. Unlike a
1294 Commit changes to the given files into the repository. Unlike a
1292 centralized SCM, this operation is a local operation. See
1295 centralized SCM, this operation is a local operation. See
1293 :hg:`push` for a way to actively distribute your changes.
1296 :hg:`push` for a way to actively distribute your changes.
1294
1297
1295 If a list of files is omitted, all changes reported by :hg:`status`
1298 If a list of files is omitted, all changes reported by :hg:`status`
1296 will be committed.
1299 will be committed.
1297
1300
1298 If you are committing the result of a merge, do not provide any
1301 If you are committing the result of a merge, do not provide any
1299 filenames or -I/-X filters.
1302 filenames or -I/-X filters.
1300
1303
1301 If no commit message is specified, Mercurial starts your
1304 If no commit message is specified, Mercurial starts your
1302 configured editor where you can enter a message. In case your
1305 configured editor where you can enter a message. In case your
1303 commit fails, you will find a backup of your message in
1306 commit fails, you will find a backup of your message in
1304 ``.hg/last-message.txt``.
1307 ``.hg/last-message.txt``.
1305
1308
1306 The --amend flag can be used to amend the parent of the
1309 The --amend flag can be used to amend the parent of the
1307 working directory with a new commit that contains the changes
1310 working directory with a new commit that contains the changes
1308 in the parent in addition to those currently reported by :hg:`status`,
1311 in the parent in addition to those currently reported by :hg:`status`,
1309 if there are any. The old commit is stored in a backup bundle in
1312 if there are any. The old commit is stored in a backup bundle in
1310 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1313 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1311 on how to restore it).
1314 on how to restore it).
1312
1315
1313 Message, user and date are taken from the amended commit unless
1316 Message, user and date are taken from the amended commit unless
1314 specified. When a message isn't specified on the command line,
1317 specified. When a message isn't specified on the command line,
1315 the editor will open with the message of the amended commit.
1318 the editor will open with the message of the amended commit.
1316
1319
1317 It is not possible to amend public changesets (see :hg:`help phases`)
1320 It is not possible to amend public changesets (see :hg:`help phases`)
1318 or changesets that have children.
1321 or changesets that have children.
1319
1322
1320 See :hg:`help dates` for a list of formats valid for -d/--date.
1323 See :hg:`help dates` for a list of formats valid for -d/--date.
1321
1324
1322 Returns 0 on success, 1 if nothing changed.
1325 Returns 0 on success, 1 if nothing changed.
1323 """
1326 """
1324 if opts.get('subrepos'):
1327 if opts.get('subrepos'):
1325 # Let --subrepos on the command line override config setting.
1328 # Let --subrepos on the command line override config setting.
1326 ui.setconfig('ui', 'commitsubrepos', True)
1329 ui.setconfig('ui', 'commitsubrepos', True)
1327
1330
1328 extra = {}
1331 extra = {}
1329 if opts.get('close_branch'):
1332 if opts.get('close_branch'):
1330 extra['close'] = 1
1333 extra['close'] = 1
1331
1334
1332 branch = repo[None].branch()
1335 branch = repo[None].branch()
1333 bheads = repo.branchheads(branch)
1336 bheads = repo.branchheads(branch)
1334
1337
1335 if opts.get('amend'):
1338 if opts.get('amend'):
1336 if ui.configbool('ui', 'commitsubrepos'):
1339 if ui.configbool('ui', 'commitsubrepos'):
1337 raise util.Abort(_('cannot amend recursively'))
1340 raise util.Abort(_('cannot amend recursively'))
1338
1341
1339 old = repo['.']
1342 old = repo['.']
1340 if old.phase() == phases.public:
1343 if old.phase() == phases.public:
1341 raise util.Abort(_('cannot amend public changesets'))
1344 raise util.Abort(_('cannot amend public changesets'))
1342 if len(repo[None].parents()) > 1:
1345 if len(repo[None].parents()) > 1:
1343 raise util.Abort(_('cannot amend while merging'))
1346 raise util.Abort(_('cannot amend while merging'))
1344 if (not obsolete._enabled) and old.children():
1347 if (not obsolete._enabled) and old.children():
1345 raise util.Abort(_('cannot amend changeset with children'))
1348 raise util.Abort(_('cannot amend changeset with children'))
1346
1349
1347 e = cmdutil.commiteditor
1350 e = cmdutil.commiteditor
1348 if opts.get('force_editor'):
1351 if opts.get('force_editor'):
1349 e = cmdutil.commitforceeditor
1352 e = cmdutil.commitforceeditor
1350
1353
1351 def commitfunc(ui, repo, message, match, opts):
1354 def commitfunc(ui, repo, message, match, opts):
1352 editor = e
1355 editor = e
1353 # message contains text from -m or -l, if it's empty,
1356 # message contains text from -m or -l, if it's empty,
1354 # open the editor with the old message
1357 # open the editor with the old message
1355 if not message:
1358 if not message:
1356 message = old.description()
1359 message = old.description()
1357 editor = cmdutil.commitforceeditor
1360 editor = cmdutil.commitforceeditor
1358 return repo.commit(message,
1361 return repo.commit(message,
1359 opts.get('user') or old.user(),
1362 opts.get('user') or old.user(),
1360 opts.get('date') or old.date(),
1363 opts.get('date') or old.date(),
1361 match,
1364 match,
1362 editor=editor,
1365 editor=editor,
1363 extra=extra)
1366 extra=extra)
1364
1367
1365 current = repo._bookmarkcurrent
1368 current = repo._bookmarkcurrent
1366 marks = old.bookmarks()
1369 marks = old.bookmarks()
1367 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1370 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1368 if node == old.node():
1371 if node == old.node():
1369 ui.status(_("nothing changed\n"))
1372 ui.status(_("nothing changed\n"))
1370 return 1
1373 return 1
1371 elif marks:
1374 elif marks:
1372 ui.debug('moving bookmarks %r from %s to %s\n' %
1375 ui.debug('moving bookmarks %r from %s to %s\n' %
1373 (marks, old.hex(), hex(node)))
1376 (marks, old.hex(), hex(node)))
1374 newmarks = repo._bookmarks
1377 newmarks = repo._bookmarks
1375 for bm in marks:
1378 for bm in marks:
1376 newmarks[bm] = node
1379 newmarks[bm] = node
1377 if bm == current:
1380 if bm == current:
1378 bookmarks.setcurrent(repo, bm)
1381 bookmarks.setcurrent(repo, bm)
1379 newmarks.write()
1382 newmarks.write()
1380 else:
1383 else:
1381 e = cmdutil.commiteditor
1384 e = cmdutil.commiteditor
1382 if opts.get('force_editor'):
1385 if opts.get('force_editor'):
1383 e = cmdutil.commitforceeditor
1386 e = cmdutil.commitforceeditor
1384
1387
1385 def commitfunc(ui, repo, message, match, opts):
1388 def commitfunc(ui, repo, message, match, opts):
1386 return repo.commit(message, opts.get('user'), opts.get('date'),
1389 return repo.commit(message, opts.get('user'), opts.get('date'),
1387 match, editor=e, extra=extra)
1390 match, editor=e, extra=extra)
1388
1391
1389 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1392 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1390
1393
1391 if not node:
1394 if not node:
1392 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1395 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1393 if stat[3]:
1396 if stat[3]:
1394 ui.status(_("nothing changed (%d missing files, see "
1397 ui.status(_("nothing changed (%d missing files, see "
1395 "'hg status')\n") % len(stat[3]))
1398 "'hg status')\n") % len(stat[3]))
1396 else:
1399 else:
1397 ui.status(_("nothing changed\n"))
1400 ui.status(_("nothing changed\n"))
1398 return 1
1401 return 1
1399
1402
1400 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1403 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1401
1404
1402 @command('copy|cp',
1405 @command('copy|cp',
1403 [('A', 'after', None, _('record a copy that has already occurred')),
1406 [('A', 'after', None, _('record a copy that has already occurred')),
1404 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1407 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1405 ] + walkopts + dryrunopts,
1408 ] + walkopts + dryrunopts,
1406 _('[OPTION]... [SOURCE]... DEST'))
1409 _('[OPTION]... [SOURCE]... DEST'))
1407 def copy(ui, repo, *pats, **opts):
1410 def copy(ui, repo, *pats, **opts):
1408 """mark files as copied for the next commit
1411 """mark files as copied for the next commit
1409
1412
1410 Mark dest as having copies of source files. If dest is a
1413 Mark dest as having copies of source files. If dest is a
1411 directory, copies are put in that directory. If dest is a file,
1414 directory, copies are put in that directory. If dest is a file,
1412 the source must be a single file.
1415 the source must be a single file.
1413
1416
1414 By default, this command copies the contents of files as they
1417 By default, this command copies the contents of files as they
1415 exist in the working directory. If invoked with -A/--after, the
1418 exist in the working directory. If invoked with -A/--after, the
1416 operation is recorded, but no copying is performed.
1419 operation is recorded, but no copying is performed.
1417
1420
1418 This command takes effect with the next commit. To undo a copy
1421 This command takes effect with the next commit. To undo a copy
1419 before that, see :hg:`revert`.
1422 before that, see :hg:`revert`.
1420
1423
1421 Returns 0 on success, 1 if errors are encountered.
1424 Returns 0 on success, 1 if errors are encountered.
1422 """
1425 """
1423 wlock = repo.wlock(False)
1426 wlock = repo.wlock(False)
1424 try:
1427 try:
1425 return cmdutil.copy(ui, repo, pats, opts)
1428 return cmdutil.copy(ui, repo, pats, opts)
1426 finally:
1429 finally:
1427 wlock.release()
1430 wlock.release()
1428
1431
1429 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1432 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1430 def debugancestor(ui, repo, *args):
1433 def debugancestor(ui, repo, *args):
1431 """find the ancestor revision of two revisions in a given index"""
1434 """find the ancestor revision of two revisions in a given index"""
1432 if len(args) == 3:
1435 if len(args) == 3:
1433 index, rev1, rev2 = args
1436 index, rev1, rev2 = args
1434 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1437 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1435 lookup = r.lookup
1438 lookup = r.lookup
1436 elif len(args) == 2:
1439 elif len(args) == 2:
1437 if not repo:
1440 if not repo:
1438 raise util.Abort(_("there is no Mercurial repository here "
1441 raise util.Abort(_("there is no Mercurial repository here "
1439 "(.hg not found)"))
1442 "(.hg not found)"))
1440 rev1, rev2 = args
1443 rev1, rev2 = args
1441 r = repo.changelog
1444 r = repo.changelog
1442 lookup = repo.lookup
1445 lookup = repo.lookup
1443 else:
1446 else:
1444 raise util.Abort(_('either two or three arguments required'))
1447 raise util.Abort(_('either two or three arguments required'))
1445 a = r.ancestor(lookup(rev1), lookup(rev2))
1448 a = r.ancestor(lookup(rev1), lookup(rev2))
1446 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1449 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1447
1450
1448 @command('debugbuilddag',
1451 @command('debugbuilddag',
1449 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1452 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1450 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1453 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1451 ('n', 'new-file', None, _('add new file at each rev'))],
1454 ('n', 'new-file', None, _('add new file at each rev'))],
1452 _('[OPTION]... [TEXT]'))
1455 _('[OPTION]... [TEXT]'))
1453 def debugbuilddag(ui, repo, text=None,
1456 def debugbuilddag(ui, repo, text=None,
1454 mergeable_file=False,
1457 mergeable_file=False,
1455 overwritten_file=False,
1458 overwritten_file=False,
1456 new_file=False):
1459 new_file=False):
1457 """builds a repo with a given DAG from scratch in the current empty repo
1460 """builds a repo with a given DAG from scratch in the current empty repo
1458
1461
1459 The description of the DAG is read from stdin if not given on the
1462 The description of the DAG is read from stdin if not given on the
1460 command line.
1463 command line.
1461
1464
1462 Elements:
1465 Elements:
1463
1466
1464 - "+n" is a linear run of n nodes based on the current default parent
1467 - "+n" is a linear run of n nodes based on the current default parent
1465 - "." is a single node based on the current default parent
1468 - "." is a single node based on the current default parent
1466 - "$" resets the default parent to null (implied at the start);
1469 - "$" resets the default parent to null (implied at the start);
1467 otherwise the default parent is always the last node created
1470 otherwise the default parent is always the last node created
1468 - "<p" sets the default parent to the backref p
1471 - "<p" sets the default parent to the backref p
1469 - "*p" is a fork at parent p, which is a backref
1472 - "*p" is a fork at parent p, which is a backref
1470 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1473 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1471 - "/p2" is a merge of the preceding node and p2
1474 - "/p2" is a merge of the preceding node and p2
1472 - ":tag" defines a local tag for the preceding node
1475 - ":tag" defines a local tag for the preceding node
1473 - "@branch" sets the named branch for subsequent nodes
1476 - "@branch" sets the named branch for subsequent nodes
1474 - "#...\\n" is a comment up to the end of the line
1477 - "#...\\n" is a comment up to the end of the line
1475
1478
1476 Whitespace between the above elements is ignored.
1479 Whitespace between the above elements is ignored.
1477
1480
1478 A backref is either
1481 A backref is either
1479
1482
1480 - a number n, which references the node curr-n, where curr is the current
1483 - a number n, which references the node curr-n, where curr is the current
1481 node, or
1484 node, or
1482 - the name of a local tag you placed earlier using ":tag", or
1485 - the name of a local tag you placed earlier using ":tag", or
1483 - empty to denote the default parent.
1486 - empty to denote the default parent.
1484
1487
1485 All string valued-elements are either strictly alphanumeric, or must
1488 All string valued-elements are either strictly alphanumeric, or must
1486 be enclosed in double quotes ("..."), with "\\" as escape character.
1489 be enclosed in double quotes ("..."), with "\\" as escape character.
1487 """
1490 """
1488
1491
1489 if text is None:
1492 if text is None:
1490 ui.status(_("reading DAG from stdin\n"))
1493 ui.status(_("reading DAG from stdin\n"))
1491 text = ui.fin.read()
1494 text = ui.fin.read()
1492
1495
1493 cl = repo.changelog
1496 cl = repo.changelog
1494 if len(cl) > 0:
1497 if len(cl) > 0:
1495 raise util.Abort(_('repository is not empty'))
1498 raise util.Abort(_('repository is not empty'))
1496
1499
1497 # determine number of revs in DAG
1500 # determine number of revs in DAG
1498 total = 0
1501 total = 0
1499 for type, data in dagparser.parsedag(text):
1502 for type, data in dagparser.parsedag(text):
1500 if type == 'n':
1503 if type == 'n':
1501 total += 1
1504 total += 1
1502
1505
1503 if mergeable_file:
1506 if mergeable_file:
1504 linesperrev = 2
1507 linesperrev = 2
1505 # make a file with k lines per rev
1508 # make a file with k lines per rev
1506 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1509 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1507 initialmergedlines.append("")
1510 initialmergedlines.append("")
1508
1511
1509 tags = []
1512 tags = []
1510
1513
1511 lock = tr = None
1514 lock = tr = None
1512 try:
1515 try:
1513 lock = repo.lock()
1516 lock = repo.lock()
1514 tr = repo.transaction("builddag")
1517 tr = repo.transaction("builddag")
1515
1518
1516 at = -1
1519 at = -1
1517 atbranch = 'default'
1520 atbranch = 'default'
1518 nodeids = []
1521 nodeids = []
1519 id = 0
1522 id = 0
1520 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1523 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1521 for type, data in dagparser.parsedag(text):
1524 for type, data in dagparser.parsedag(text):
1522 if type == 'n':
1525 if type == 'n':
1523 ui.note(('node %s\n' % str(data)))
1526 ui.note(('node %s\n' % str(data)))
1524 id, ps = data
1527 id, ps = data
1525
1528
1526 files = []
1529 files = []
1527 fctxs = {}
1530 fctxs = {}
1528
1531
1529 p2 = None
1532 p2 = None
1530 if mergeable_file:
1533 if mergeable_file:
1531 fn = "mf"
1534 fn = "mf"
1532 p1 = repo[ps[0]]
1535 p1 = repo[ps[0]]
1533 if len(ps) > 1:
1536 if len(ps) > 1:
1534 p2 = repo[ps[1]]
1537 p2 = repo[ps[1]]
1535 pa = p1.ancestor(p2)
1538 pa = p1.ancestor(p2)
1536 base, local, other = [x[fn].data() for x in (pa, p1,
1539 base, local, other = [x[fn].data() for x in (pa, p1,
1537 p2)]
1540 p2)]
1538 m3 = simplemerge.Merge3Text(base, local, other)
1541 m3 = simplemerge.Merge3Text(base, local, other)
1539 ml = [l.strip() for l in m3.merge_lines()]
1542 ml = [l.strip() for l in m3.merge_lines()]
1540 ml.append("")
1543 ml.append("")
1541 elif at > 0:
1544 elif at > 0:
1542 ml = p1[fn].data().split("\n")
1545 ml = p1[fn].data().split("\n")
1543 else:
1546 else:
1544 ml = initialmergedlines
1547 ml = initialmergedlines
1545 ml[id * linesperrev] += " r%i" % id
1548 ml[id * linesperrev] += " r%i" % id
1546 mergedtext = "\n".join(ml)
1549 mergedtext = "\n".join(ml)
1547 files.append(fn)
1550 files.append(fn)
1548 fctxs[fn] = context.memfilectx(fn, mergedtext)
1551 fctxs[fn] = context.memfilectx(fn, mergedtext)
1549
1552
1550 if overwritten_file:
1553 if overwritten_file:
1551 fn = "of"
1554 fn = "of"
1552 files.append(fn)
1555 files.append(fn)
1553 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1556 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1554
1557
1555 if new_file:
1558 if new_file:
1556 fn = "nf%i" % id
1559 fn = "nf%i" % id
1557 files.append(fn)
1560 files.append(fn)
1558 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1561 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1559 if len(ps) > 1:
1562 if len(ps) > 1:
1560 if not p2:
1563 if not p2:
1561 p2 = repo[ps[1]]
1564 p2 = repo[ps[1]]
1562 for fn in p2:
1565 for fn in p2:
1563 if fn.startswith("nf"):
1566 if fn.startswith("nf"):
1564 files.append(fn)
1567 files.append(fn)
1565 fctxs[fn] = p2[fn]
1568 fctxs[fn] = p2[fn]
1566
1569
1567 def fctxfn(repo, cx, path):
1570 def fctxfn(repo, cx, path):
1568 return fctxs.get(path)
1571 return fctxs.get(path)
1569
1572
1570 if len(ps) == 0 or ps[0] < 0:
1573 if len(ps) == 0 or ps[0] < 0:
1571 pars = [None, None]
1574 pars = [None, None]
1572 elif len(ps) == 1:
1575 elif len(ps) == 1:
1573 pars = [nodeids[ps[0]], None]
1576 pars = [nodeids[ps[0]], None]
1574 else:
1577 else:
1575 pars = [nodeids[p] for p in ps]
1578 pars = [nodeids[p] for p in ps]
1576 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1579 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1577 date=(id, 0),
1580 date=(id, 0),
1578 user="debugbuilddag",
1581 user="debugbuilddag",
1579 extra={'branch': atbranch})
1582 extra={'branch': atbranch})
1580 nodeid = repo.commitctx(cx)
1583 nodeid = repo.commitctx(cx)
1581 nodeids.append(nodeid)
1584 nodeids.append(nodeid)
1582 at = id
1585 at = id
1583 elif type == 'l':
1586 elif type == 'l':
1584 id, name = data
1587 id, name = data
1585 ui.note(('tag %s\n' % name))
1588 ui.note(('tag %s\n' % name))
1586 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1589 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1587 elif type == 'a':
1590 elif type == 'a':
1588 ui.note(('branch %s\n' % data))
1591 ui.note(('branch %s\n' % data))
1589 atbranch = data
1592 atbranch = data
1590 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1593 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1591 tr.close()
1594 tr.close()
1592
1595
1593 if tags:
1596 if tags:
1594 repo.opener.write("localtags", "".join(tags))
1597 repo.opener.write("localtags", "".join(tags))
1595 finally:
1598 finally:
1596 ui.progress(_('building'), None)
1599 ui.progress(_('building'), None)
1597 release(tr, lock)
1600 release(tr, lock)
1598
1601
1599 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1602 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1600 def debugbundle(ui, bundlepath, all=None, **opts):
1603 def debugbundle(ui, bundlepath, all=None, **opts):
1601 """lists the contents of a bundle"""
1604 """lists the contents of a bundle"""
1602 f = hg.openpath(ui, bundlepath)
1605 f = hg.openpath(ui, bundlepath)
1603 try:
1606 try:
1604 gen = changegroup.readbundle(f, bundlepath)
1607 gen = changegroup.readbundle(f, bundlepath)
1605 if all:
1608 if all:
1606 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1609 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1607
1610
1608 def showchunks(named):
1611 def showchunks(named):
1609 ui.write("\n%s\n" % named)
1612 ui.write("\n%s\n" % named)
1610 chain = None
1613 chain = None
1611 while True:
1614 while True:
1612 chunkdata = gen.deltachunk(chain)
1615 chunkdata = gen.deltachunk(chain)
1613 if not chunkdata:
1616 if not chunkdata:
1614 break
1617 break
1615 node = chunkdata['node']
1618 node = chunkdata['node']
1616 p1 = chunkdata['p1']
1619 p1 = chunkdata['p1']
1617 p2 = chunkdata['p2']
1620 p2 = chunkdata['p2']
1618 cs = chunkdata['cs']
1621 cs = chunkdata['cs']
1619 deltabase = chunkdata['deltabase']
1622 deltabase = chunkdata['deltabase']
1620 delta = chunkdata['delta']
1623 delta = chunkdata['delta']
1621 ui.write("%s %s %s %s %s %s\n" %
1624 ui.write("%s %s %s %s %s %s\n" %
1622 (hex(node), hex(p1), hex(p2),
1625 (hex(node), hex(p1), hex(p2),
1623 hex(cs), hex(deltabase), len(delta)))
1626 hex(cs), hex(deltabase), len(delta)))
1624 chain = node
1627 chain = node
1625
1628
1626 chunkdata = gen.changelogheader()
1629 chunkdata = gen.changelogheader()
1627 showchunks("changelog")
1630 showchunks("changelog")
1628 chunkdata = gen.manifestheader()
1631 chunkdata = gen.manifestheader()
1629 showchunks("manifest")
1632 showchunks("manifest")
1630 while True:
1633 while True:
1631 chunkdata = gen.filelogheader()
1634 chunkdata = gen.filelogheader()
1632 if not chunkdata:
1635 if not chunkdata:
1633 break
1636 break
1634 fname = chunkdata['filename']
1637 fname = chunkdata['filename']
1635 showchunks(fname)
1638 showchunks(fname)
1636 else:
1639 else:
1637 chunkdata = gen.changelogheader()
1640 chunkdata = gen.changelogheader()
1638 chain = None
1641 chain = None
1639 while True:
1642 while True:
1640 chunkdata = gen.deltachunk(chain)
1643 chunkdata = gen.deltachunk(chain)
1641 if not chunkdata:
1644 if not chunkdata:
1642 break
1645 break
1643 node = chunkdata['node']
1646 node = chunkdata['node']
1644 ui.write("%s\n" % hex(node))
1647 ui.write("%s\n" % hex(node))
1645 chain = node
1648 chain = node
1646 finally:
1649 finally:
1647 f.close()
1650 f.close()
1648
1651
1649 @command('debugcheckstate', [], '')
1652 @command('debugcheckstate', [], '')
1650 def debugcheckstate(ui, repo):
1653 def debugcheckstate(ui, repo):
1651 """validate the correctness of the current dirstate"""
1654 """validate the correctness of the current dirstate"""
1652 parent1, parent2 = repo.dirstate.parents()
1655 parent1, parent2 = repo.dirstate.parents()
1653 m1 = repo[parent1].manifest()
1656 m1 = repo[parent1].manifest()
1654 m2 = repo[parent2].manifest()
1657 m2 = repo[parent2].manifest()
1655 errors = 0
1658 errors = 0
1656 for f in repo.dirstate:
1659 for f in repo.dirstate:
1657 state = repo.dirstate[f]
1660 state = repo.dirstate[f]
1658 if state in "nr" and f not in m1:
1661 if state in "nr" and f not in m1:
1659 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1662 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1660 errors += 1
1663 errors += 1
1661 if state in "a" and f in m1:
1664 if state in "a" and f in m1:
1662 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1665 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1663 errors += 1
1666 errors += 1
1664 if state in "m" and f not in m1 and f not in m2:
1667 if state in "m" and f not in m1 and f not in m2:
1665 ui.warn(_("%s in state %s, but not in either manifest\n") %
1668 ui.warn(_("%s in state %s, but not in either manifest\n") %
1666 (f, state))
1669 (f, state))
1667 errors += 1
1670 errors += 1
1668 for f in m1:
1671 for f in m1:
1669 state = repo.dirstate[f]
1672 state = repo.dirstate[f]
1670 if state not in "nrm":
1673 if state not in "nrm":
1671 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1674 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1672 errors += 1
1675 errors += 1
1673 if errors:
1676 if errors:
1674 error = _(".hg/dirstate inconsistent with current parent's manifest")
1677 error = _(".hg/dirstate inconsistent with current parent's manifest")
1675 raise util.Abort(error)
1678 raise util.Abort(error)
1676
1679
1677 @command('debugcommands', [], _('[COMMAND]'))
1680 @command('debugcommands', [], _('[COMMAND]'))
1678 def debugcommands(ui, cmd='', *args):
1681 def debugcommands(ui, cmd='', *args):
1679 """list all available commands and options"""
1682 """list all available commands and options"""
1680 for cmd, vals in sorted(table.iteritems()):
1683 for cmd, vals in sorted(table.iteritems()):
1681 cmd = cmd.split('|')[0].strip('^')
1684 cmd = cmd.split('|')[0].strip('^')
1682 opts = ', '.join([i[1] for i in vals[1]])
1685 opts = ', '.join([i[1] for i in vals[1]])
1683 ui.write('%s: %s\n' % (cmd, opts))
1686 ui.write('%s: %s\n' % (cmd, opts))
1684
1687
1685 @command('debugcomplete',
1688 @command('debugcomplete',
1686 [('o', 'options', None, _('show the command options'))],
1689 [('o', 'options', None, _('show the command options'))],
1687 _('[-o] CMD'))
1690 _('[-o] CMD'))
1688 def debugcomplete(ui, cmd='', **opts):
1691 def debugcomplete(ui, cmd='', **opts):
1689 """returns the completion list associated with the given command"""
1692 """returns the completion list associated with the given command"""
1690
1693
1691 if opts.get('options'):
1694 if opts.get('options'):
1692 options = []
1695 options = []
1693 otables = [globalopts]
1696 otables = [globalopts]
1694 if cmd:
1697 if cmd:
1695 aliases, entry = cmdutil.findcmd(cmd, table, False)
1698 aliases, entry = cmdutil.findcmd(cmd, table, False)
1696 otables.append(entry[1])
1699 otables.append(entry[1])
1697 for t in otables:
1700 for t in otables:
1698 for o in t:
1701 for o in t:
1699 if "(DEPRECATED)" in o[3]:
1702 if "(DEPRECATED)" in o[3]:
1700 continue
1703 continue
1701 if o[0]:
1704 if o[0]:
1702 options.append('-%s' % o[0])
1705 options.append('-%s' % o[0])
1703 options.append('--%s' % o[1])
1706 options.append('--%s' % o[1])
1704 ui.write("%s\n" % "\n".join(options))
1707 ui.write("%s\n" % "\n".join(options))
1705 return
1708 return
1706
1709
1707 cmdlist = cmdutil.findpossible(cmd, table)
1710 cmdlist = cmdutil.findpossible(cmd, table)
1708 if ui.verbose:
1711 if ui.verbose:
1709 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1712 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1710 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1713 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1711
1714
1712 @command('debugdag',
1715 @command('debugdag',
1713 [('t', 'tags', None, _('use tags as labels')),
1716 [('t', 'tags', None, _('use tags as labels')),
1714 ('b', 'branches', None, _('annotate with branch names')),
1717 ('b', 'branches', None, _('annotate with branch names')),
1715 ('', 'dots', None, _('use dots for runs')),
1718 ('', 'dots', None, _('use dots for runs')),
1716 ('s', 'spaces', None, _('separate elements by spaces'))],
1719 ('s', 'spaces', None, _('separate elements by spaces'))],
1717 _('[OPTION]... [FILE [REV]...]'))
1720 _('[OPTION]... [FILE [REV]...]'))
1718 def debugdag(ui, repo, file_=None, *revs, **opts):
1721 def debugdag(ui, repo, file_=None, *revs, **opts):
1719 """format the changelog or an index DAG as a concise textual description
1722 """format the changelog or an index DAG as a concise textual description
1720
1723
1721 If you pass a revlog index, the revlog's DAG is emitted. If you list
1724 If you pass a revlog index, the revlog's DAG is emitted. If you list
1722 revision numbers, they get labeled in the output as rN.
1725 revision numbers, they get labeled in the output as rN.
1723
1726
1724 Otherwise, the changelog DAG of the current repo is emitted.
1727 Otherwise, the changelog DAG of the current repo is emitted.
1725 """
1728 """
1726 spaces = opts.get('spaces')
1729 spaces = opts.get('spaces')
1727 dots = opts.get('dots')
1730 dots = opts.get('dots')
1728 if file_:
1731 if file_:
1729 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1732 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1730 revs = set((int(r) for r in revs))
1733 revs = set((int(r) for r in revs))
1731 def events():
1734 def events():
1732 for r in rlog:
1735 for r in rlog:
1733 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1736 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1734 if p != -1)))
1737 if p != -1)))
1735 if r in revs:
1738 if r in revs:
1736 yield 'l', (r, "r%i" % r)
1739 yield 'l', (r, "r%i" % r)
1737 elif repo:
1740 elif repo:
1738 cl = repo.changelog
1741 cl = repo.changelog
1739 tags = opts.get('tags')
1742 tags = opts.get('tags')
1740 branches = opts.get('branches')
1743 branches = opts.get('branches')
1741 if tags:
1744 if tags:
1742 labels = {}
1745 labels = {}
1743 for l, n in repo.tags().items():
1746 for l, n in repo.tags().items():
1744 labels.setdefault(cl.rev(n), []).append(l)
1747 labels.setdefault(cl.rev(n), []).append(l)
1745 def events():
1748 def events():
1746 b = "default"
1749 b = "default"
1747 for r in cl:
1750 for r in cl:
1748 if branches:
1751 if branches:
1749 newb = cl.read(cl.node(r))[5]['branch']
1752 newb = cl.read(cl.node(r))[5]['branch']
1750 if newb != b:
1753 if newb != b:
1751 yield 'a', newb
1754 yield 'a', newb
1752 b = newb
1755 b = newb
1753 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1756 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1754 if p != -1)))
1757 if p != -1)))
1755 if tags:
1758 if tags:
1756 ls = labels.get(r)
1759 ls = labels.get(r)
1757 if ls:
1760 if ls:
1758 for l in ls:
1761 for l in ls:
1759 yield 'l', (r, l)
1762 yield 'l', (r, l)
1760 else:
1763 else:
1761 raise util.Abort(_('need repo for changelog dag'))
1764 raise util.Abort(_('need repo for changelog dag'))
1762
1765
1763 for line in dagparser.dagtextlines(events(),
1766 for line in dagparser.dagtextlines(events(),
1764 addspaces=spaces,
1767 addspaces=spaces,
1765 wraplabels=True,
1768 wraplabels=True,
1766 wrapannotations=True,
1769 wrapannotations=True,
1767 wrapnonlinear=dots,
1770 wrapnonlinear=dots,
1768 usedots=dots,
1771 usedots=dots,
1769 maxlinewidth=70):
1772 maxlinewidth=70):
1770 ui.write(line)
1773 ui.write(line)
1771 ui.write("\n")
1774 ui.write("\n")
1772
1775
1773 @command('debugdata',
1776 @command('debugdata',
1774 [('c', 'changelog', False, _('open changelog')),
1777 [('c', 'changelog', False, _('open changelog')),
1775 ('m', 'manifest', False, _('open manifest'))],
1778 ('m', 'manifest', False, _('open manifest'))],
1776 _('-c|-m|FILE REV'))
1779 _('-c|-m|FILE REV'))
1777 def debugdata(ui, repo, file_, rev = None, **opts):
1780 def debugdata(ui, repo, file_, rev = None, **opts):
1778 """dump the contents of a data file revision"""
1781 """dump the contents of a data file revision"""
1779 if opts.get('changelog') or opts.get('manifest'):
1782 if opts.get('changelog') or opts.get('manifest'):
1780 file_, rev = None, file_
1783 file_, rev = None, file_
1781 elif rev is None:
1784 elif rev is None:
1782 raise error.CommandError('debugdata', _('invalid arguments'))
1785 raise error.CommandError('debugdata', _('invalid arguments'))
1783 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1786 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1784 try:
1787 try:
1785 ui.write(r.revision(r.lookup(rev)))
1788 ui.write(r.revision(r.lookup(rev)))
1786 except KeyError:
1789 except KeyError:
1787 raise util.Abort(_('invalid revision identifier %s') % rev)
1790 raise util.Abort(_('invalid revision identifier %s') % rev)
1788
1791
1789 @command('debugdate',
1792 @command('debugdate',
1790 [('e', 'extended', None, _('try extended date formats'))],
1793 [('e', 'extended', None, _('try extended date formats'))],
1791 _('[-e] DATE [RANGE]'))
1794 _('[-e] DATE [RANGE]'))
1792 def debugdate(ui, date, range=None, **opts):
1795 def debugdate(ui, date, range=None, **opts):
1793 """parse and display a date"""
1796 """parse and display a date"""
1794 if opts["extended"]:
1797 if opts["extended"]:
1795 d = util.parsedate(date, util.extendeddateformats)
1798 d = util.parsedate(date, util.extendeddateformats)
1796 else:
1799 else:
1797 d = util.parsedate(date)
1800 d = util.parsedate(date)
1798 ui.write(("internal: %s %s\n") % d)
1801 ui.write(("internal: %s %s\n") % d)
1799 ui.write(("standard: %s\n") % util.datestr(d))
1802 ui.write(("standard: %s\n") % util.datestr(d))
1800 if range:
1803 if range:
1801 m = util.matchdate(range)
1804 m = util.matchdate(range)
1802 ui.write(("match: %s\n") % m(d[0]))
1805 ui.write(("match: %s\n") % m(d[0]))
1803
1806
1804 @command('debugdiscovery',
1807 @command('debugdiscovery',
1805 [('', 'old', None, _('use old-style discovery')),
1808 [('', 'old', None, _('use old-style discovery')),
1806 ('', 'nonheads', None,
1809 ('', 'nonheads', None,
1807 _('use old-style discovery with non-heads included')),
1810 _('use old-style discovery with non-heads included')),
1808 ] + remoteopts,
1811 ] + remoteopts,
1809 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1812 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1810 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1813 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1811 """runs the changeset discovery protocol in isolation"""
1814 """runs the changeset discovery protocol in isolation"""
1812 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1815 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1813 opts.get('branch'))
1816 opts.get('branch'))
1814 remote = hg.peer(repo, opts, remoteurl)
1817 remote = hg.peer(repo, opts, remoteurl)
1815 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1818 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1816
1819
1817 # make sure tests are repeatable
1820 # make sure tests are repeatable
1818 random.seed(12323)
1821 random.seed(12323)
1819
1822
1820 def doit(localheads, remoteheads, remote=remote):
1823 def doit(localheads, remoteheads, remote=remote):
1821 if opts.get('old'):
1824 if opts.get('old'):
1822 if localheads:
1825 if localheads:
1823 raise util.Abort('cannot use localheads with old style '
1826 raise util.Abort('cannot use localheads with old style '
1824 'discovery')
1827 'discovery')
1825 if not util.safehasattr(remote, 'branches'):
1828 if not util.safehasattr(remote, 'branches'):
1826 # enable in-client legacy support
1829 # enable in-client legacy support
1827 remote = localrepo.locallegacypeer(remote.local())
1830 remote = localrepo.locallegacypeer(remote.local())
1828 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1831 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1829 force=True)
1832 force=True)
1830 common = set(common)
1833 common = set(common)
1831 if not opts.get('nonheads'):
1834 if not opts.get('nonheads'):
1832 ui.write(("unpruned common: %s\n") %
1835 ui.write(("unpruned common: %s\n") %
1833 " ".join(sorted(short(n) for n in common)))
1836 " ".join(sorted(short(n) for n in common)))
1834 dag = dagutil.revlogdag(repo.changelog)
1837 dag = dagutil.revlogdag(repo.changelog)
1835 all = dag.ancestorset(dag.internalizeall(common))
1838 all = dag.ancestorset(dag.internalizeall(common))
1836 common = dag.externalizeall(dag.headsetofconnecteds(all))
1839 common = dag.externalizeall(dag.headsetofconnecteds(all))
1837 else:
1840 else:
1838 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1841 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1839 common = set(common)
1842 common = set(common)
1840 rheads = set(hds)
1843 rheads = set(hds)
1841 lheads = set(repo.heads())
1844 lheads = set(repo.heads())
1842 ui.write(("common heads: %s\n") %
1845 ui.write(("common heads: %s\n") %
1843 " ".join(sorted(short(n) for n in common)))
1846 " ".join(sorted(short(n) for n in common)))
1844 if lheads <= common:
1847 if lheads <= common:
1845 ui.write(("local is subset\n"))
1848 ui.write(("local is subset\n"))
1846 elif rheads <= common:
1849 elif rheads <= common:
1847 ui.write(("remote is subset\n"))
1850 ui.write(("remote is subset\n"))
1848
1851
1849 serverlogs = opts.get('serverlog')
1852 serverlogs = opts.get('serverlog')
1850 if serverlogs:
1853 if serverlogs:
1851 for filename in serverlogs:
1854 for filename in serverlogs:
1852 logfile = open(filename, 'r')
1855 logfile = open(filename, 'r')
1853 try:
1856 try:
1854 line = logfile.readline()
1857 line = logfile.readline()
1855 while line:
1858 while line:
1856 parts = line.strip().split(';')
1859 parts = line.strip().split(';')
1857 op = parts[1]
1860 op = parts[1]
1858 if op == 'cg':
1861 if op == 'cg':
1859 pass
1862 pass
1860 elif op == 'cgss':
1863 elif op == 'cgss':
1861 doit(parts[2].split(' '), parts[3].split(' '))
1864 doit(parts[2].split(' '), parts[3].split(' '))
1862 elif op == 'unb':
1865 elif op == 'unb':
1863 doit(parts[3].split(' '), parts[2].split(' '))
1866 doit(parts[3].split(' '), parts[2].split(' '))
1864 line = logfile.readline()
1867 line = logfile.readline()
1865 finally:
1868 finally:
1866 logfile.close()
1869 logfile.close()
1867
1870
1868 else:
1871 else:
1869 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1872 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1870 opts.get('remote_head'))
1873 opts.get('remote_head'))
1871 localrevs = opts.get('local_head')
1874 localrevs = opts.get('local_head')
1872 doit(localrevs, remoterevs)
1875 doit(localrevs, remoterevs)
1873
1876
1874 @command('debugfileset',
1877 @command('debugfileset',
1875 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
1878 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
1876 _('[-r REV] FILESPEC'))
1879 _('[-r REV] FILESPEC'))
1877 def debugfileset(ui, repo, expr, **opts):
1880 def debugfileset(ui, repo, expr, **opts):
1878 '''parse and apply a fileset specification'''
1881 '''parse and apply a fileset specification'''
1879 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1882 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1880 if ui.verbose:
1883 if ui.verbose:
1881 tree = fileset.parse(expr)[0]
1884 tree = fileset.parse(expr)[0]
1882 ui.note(tree, "\n")
1885 ui.note(tree, "\n")
1883
1886
1884 for f in fileset.getfileset(ctx, expr):
1887 for f in fileset.getfileset(ctx, expr):
1885 ui.write("%s\n" % f)
1888 ui.write("%s\n" % f)
1886
1889
1887 @command('debugfsinfo', [], _('[PATH]'))
1890 @command('debugfsinfo', [], _('[PATH]'))
1888 def debugfsinfo(ui, path = "."):
1891 def debugfsinfo(ui, path = "."):
1889 """show information detected about current filesystem"""
1892 """show information detected about current filesystem"""
1890 util.writefile('.debugfsinfo', '')
1893 util.writefile('.debugfsinfo', '')
1891 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1894 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1892 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1895 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1893 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
1896 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
1894 and 'yes' or 'no'))
1897 and 'yes' or 'no'))
1895 os.unlink('.debugfsinfo')
1898 os.unlink('.debugfsinfo')
1896
1899
1897 @command('debuggetbundle',
1900 @command('debuggetbundle',
1898 [('H', 'head', [], _('id of head node'), _('ID')),
1901 [('H', 'head', [], _('id of head node'), _('ID')),
1899 ('C', 'common', [], _('id of common node'), _('ID')),
1902 ('C', 'common', [], _('id of common node'), _('ID')),
1900 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1903 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1901 _('REPO FILE [-H|-C ID]...'))
1904 _('REPO FILE [-H|-C ID]...'))
1902 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1905 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1903 """retrieves a bundle from a repo
1906 """retrieves a bundle from a repo
1904
1907
1905 Every ID must be a full-length hex node id string. Saves the bundle to the
1908 Every ID must be a full-length hex node id string. Saves the bundle to the
1906 given file.
1909 given file.
1907 """
1910 """
1908 repo = hg.peer(ui, opts, repopath)
1911 repo = hg.peer(ui, opts, repopath)
1909 if not repo.capable('getbundle'):
1912 if not repo.capable('getbundle'):
1910 raise util.Abort("getbundle() not supported by target repository")
1913 raise util.Abort("getbundle() not supported by target repository")
1911 args = {}
1914 args = {}
1912 if common:
1915 if common:
1913 args['common'] = [bin(s) for s in common]
1916 args['common'] = [bin(s) for s in common]
1914 if head:
1917 if head:
1915 args['heads'] = [bin(s) for s in head]
1918 args['heads'] = [bin(s) for s in head]
1919 # TODO: get desired bundlecaps from command line.
1920 args['bundlecaps'] = None
1916 bundle = repo.getbundle('debug', **args)
1921 bundle = repo.getbundle('debug', **args)
1917
1922
1918 bundletype = opts.get('type', 'bzip2').lower()
1923 bundletype = opts.get('type', 'bzip2').lower()
1919 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1924 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1920 bundletype = btypes.get(bundletype)
1925 bundletype = btypes.get(bundletype)
1921 if bundletype not in changegroup.bundletypes:
1926 if bundletype not in changegroup.bundletypes:
1922 raise util.Abort(_('unknown bundle type specified with --type'))
1927 raise util.Abort(_('unknown bundle type specified with --type'))
1923 changegroup.writebundle(bundle, bundlepath, bundletype)
1928 changegroup.writebundle(bundle, bundlepath, bundletype)
1924
1929
1925 @command('debugignore', [], '')
1930 @command('debugignore', [], '')
1926 def debugignore(ui, repo, *values, **opts):
1931 def debugignore(ui, repo, *values, **opts):
1927 """display the combined ignore pattern"""
1932 """display the combined ignore pattern"""
1928 ignore = repo.dirstate._ignore
1933 ignore = repo.dirstate._ignore
1929 includepat = getattr(ignore, 'includepat', None)
1934 includepat = getattr(ignore, 'includepat', None)
1930 if includepat is not None:
1935 if includepat is not None:
1931 ui.write("%s\n" % includepat)
1936 ui.write("%s\n" % includepat)
1932 else:
1937 else:
1933 raise util.Abort(_("no ignore patterns found"))
1938 raise util.Abort(_("no ignore patterns found"))
1934
1939
1935 @command('debugindex',
1940 @command('debugindex',
1936 [('c', 'changelog', False, _('open changelog')),
1941 [('c', 'changelog', False, _('open changelog')),
1937 ('m', 'manifest', False, _('open manifest')),
1942 ('m', 'manifest', False, _('open manifest')),
1938 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1943 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1939 _('[-f FORMAT] -c|-m|FILE'))
1944 _('[-f FORMAT] -c|-m|FILE'))
1940 def debugindex(ui, repo, file_ = None, **opts):
1945 def debugindex(ui, repo, file_ = None, **opts):
1941 """dump the contents of an index file"""
1946 """dump the contents of an index file"""
1942 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1947 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1943 format = opts.get('format', 0)
1948 format = opts.get('format', 0)
1944 if format not in (0, 1):
1949 if format not in (0, 1):
1945 raise util.Abort(_("unknown format %d") % format)
1950 raise util.Abort(_("unknown format %d") % format)
1946
1951
1947 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1952 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1948 if generaldelta:
1953 if generaldelta:
1949 basehdr = ' delta'
1954 basehdr = ' delta'
1950 else:
1955 else:
1951 basehdr = ' base'
1956 basehdr = ' base'
1952
1957
1953 if format == 0:
1958 if format == 0:
1954 ui.write(" rev offset length " + basehdr + " linkrev"
1959 ui.write(" rev offset length " + basehdr + " linkrev"
1955 " nodeid p1 p2\n")
1960 " nodeid p1 p2\n")
1956 elif format == 1:
1961 elif format == 1:
1957 ui.write(" rev flag offset length"
1962 ui.write(" rev flag offset length"
1958 " size " + basehdr + " link p1 p2"
1963 " size " + basehdr + " link p1 p2"
1959 " nodeid\n")
1964 " nodeid\n")
1960
1965
1961 for i in r:
1966 for i in r:
1962 node = r.node(i)
1967 node = r.node(i)
1963 if generaldelta:
1968 if generaldelta:
1964 base = r.deltaparent(i)
1969 base = r.deltaparent(i)
1965 else:
1970 else:
1966 base = r.chainbase(i)
1971 base = r.chainbase(i)
1967 if format == 0:
1972 if format == 0:
1968 try:
1973 try:
1969 pp = r.parents(node)
1974 pp = r.parents(node)
1970 except Exception:
1975 except Exception:
1971 pp = [nullid, nullid]
1976 pp = [nullid, nullid]
1972 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1977 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1973 i, r.start(i), r.length(i), base, r.linkrev(i),
1978 i, r.start(i), r.length(i), base, r.linkrev(i),
1974 short(node), short(pp[0]), short(pp[1])))
1979 short(node), short(pp[0]), short(pp[1])))
1975 elif format == 1:
1980 elif format == 1:
1976 pr = r.parentrevs(i)
1981 pr = r.parentrevs(i)
1977 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1982 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1978 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1983 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1979 base, r.linkrev(i), pr[0], pr[1], short(node)))
1984 base, r.linkrev(i), pr[0], pr[1], short(node)))
1980
1985
1981 @command('debugindexdot', [], _('FILE'))
1986 @command('debugindexdot', [], _('FILE'))
1982 def debugindexdot(ui, repo, file_):
1987 def debugindexdot(ui, repo, file_):
1983 """dump an index DAG as a graphviz dot file"""
1988 """dump an index DAG as a graphviz dot file"""
1984 r = None
1989 r = None
1985 if repo:
1990 if repo:
1986 filelog = repo.file(file_)
1991 filelog = repo.file(file_)
1987 if len(filelog):
1992 if len(filelog):
1988 r = filelog
1993 r = filelog
1989 if not r:
1994 if not r:
1990 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1995 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1991 ui.write(("digraph G {\n"))
1996 ui.write(("digraph G {\n"))
1992 for i in r:
1997 for i in r:
1993 node = r.node(i)
1998 node = r.node(i)
1994 pp = r.parents(node)
1999 pp = r.parents(node)
1995 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2000 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1996 if pp[1] != nullid:
2001 if pp[1] != nullid:
1997 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2002 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1998 ui.write("}\n")
2003 ui.write("}\n")
1999
2004
2000 @command('debuginstall', [], '')
2005 @command('debuginstall', [], '')
2001 def debuginstall(ui):
2006 def debuginstall(ui):
2002 '''test Mercurial installation
2007 '''test Mercurial installation
2003
2008
2004 Returns 0 on success.
2009 Returns 0 on success.
2005 '''
2010 '''
2006
2011
2007 def writetemp(contents):
2012 def writetemp(contents):
2008 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2013 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2009 f = os.fdopen(fd, "wb")
2014 f = os.fdopen(fd, "wb")
2010 f.write(contents)
2015 f.write(contents)
2011 f.close()
2016 f.close()
2012 return name
2017 return name
2013
2018
2014 problems = 0
2019 problems = 0
2015
2020
2016 # encoding
2021 # encoding
2017 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2022 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2018 try:
2023 try:
2019 encoding.fromlocal("test")
2024 encoding.fromlocal("test")
2020 except util.Abort, inst:
2025 except util.Abort, inst:
2021 ui.write(" %s\n" % inst)
2026 ui.write(" %s\n" % inst)
2022 ui.write(_(" (check that your locale is properly set)\n"))
2027 ui.write(_(" (check that your locale is properly set)\n"))
2023 problems += 1
2028 problems += 1
2024
2029
2025 # Python lib
2030 # Python lib
2026 ui.status(_("checking Python lib (%s)...\n")
2031 ui.status(_("checking Python lib (%s)...\n")
2027 % os.path.dirname(os.__file__))
2032 % os.path.dirname(os.__file__))
2028
2033
2029 # compiled modules
2034 # compiled modules
2030 ui.status(_("checking installed modules (%s)...\n")
2035 ui.status(_("checking installed modules (%s)...\n")
2031 % os.path.dirname(__file__))
2036 % os.path.dirname(__file__))
2032 try:
2037 try:
2033 import bdiff, mpatch, base85, osutil
2038 import bdiff, mpatch, base85, osutil
2034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2039 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2035 except Exception, inst:
2040 except Exception, inst:
2036 ui.write(" %s\n" % inst)
2041 ui.write(" %s\n" % inst)
2037 ui.write(_(" One or more extensions could not be found"))
2042 ui.write(_(" One or more extensions could not be found"))
2038 ui.write(_(" (check that you compiled the extensions)\n"))
2043 ui.write(_(" (check that you compiled the extensions)\n"))
2039 problems += 1
2044 problems += 1
2040
2045
2041 # templates
2046 # templates
2042 import templater
2047 import templater
2043 p = templater.templatepath()
2048 p = templater.templatepath()
2044 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2049 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2045 try:
2050 try:
2046 templater.templater(templater.templatepath("map-cmdline.default"))
2051 templater.templater(templater.templatepath("map-cmdline.default"))
2047 except Exception, inst:
2052 except Exception, inst:
2048 ui.write(" %s\n" % inst)
2053 ui.write(" %s\n" % inst)
2049 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2054 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2050 problems += 1
2055 problems += 1
2051
2056
2052 # editor
2057 # editor
2053 ui.status(_("checking commit editor...\n"))
2058 ui.status(_("checking commit editor...\n"))
2054 editor = ui.geteditor()
2059 editor = ui.geteditor()
2055 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2060 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2056 if not cmdpath:
2061 if not cmdpath:
2057 if editor == 'vi':
2062 if editor == 'vi':
2058 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2063 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2059 ui.write(_(" (specify a commit editor in your configuration"
2064 ui.write(_(" (specify a commit editor in your configuration"
2060 " file)\n"))
2065 " file)\n"))
2061 else:
2066 else:
2062 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2067 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2063 ui.write(_(" (specify a commit editor in your configuration"
2068 ui.write(_(" (specify a commit editor in your configuration"
2064 " file)\n"))
2069 " file)\n"))
2065 problems += 1
2070 problems += 1
2066
2071
2067 # check username
2072 # check username
2068 ui.status(_("checking username...\n"))
2073 ui.status(_("checking username...\n"))
2069 try:
2074 try:
2070 ui.username()
2075 ui.username()
2071 except util.Abort, e:
2076 except util.Abort, e:
2072 ui.write(" %s\n" % e)
2077 ui.write(" %s\n" % e)
2073 ui.write(_(" (specify a username in your configuration file)\n"))
2078 ui.write(_(" (specify a username in your configuration file)\n"))
2074 problems += 1
2079 problems += 1
2075
2080
2076 if not problems:
2081 if not problems:
2077 ui.status(_("no problems detected\n"))
2082 ui.status(_("no problems detected\n"))
2078 else:
2083 else:
2079 ui.write(_("%s problems detected,"
2084 ui.write(_("%s problems detected,"
2080 " please check your install!\n") % problems)
2085 " please check your install!\n") % problems)
2081
2086
2082 return problems
2087 return problems
2083
2088
2084 @command('debugknown', [], _('REPO ID...'))
2089 @command('debugknown', [], _('REPO ID...'))
2085 def debugknown(ui, repopath, *ids, **opts):
2090 def debugknown(ui, repopath, *ids, **opts):
2086 """test whether node ids are known to a repo
2091 """test whether node ids are known to a repo
2087
2092
2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2093 Every ID must be a full-length hex node id string. Returns a list of 0s
2089 and 1s indicating unknown/known.
2094 and 1s indicating unknown/known.
2090 """
2095 """
2091 repo = hg.peer(ui, opts, repopath)
2096 repo = hg.peer(ui, opts, repopath)
2092 if not repo.capable('known'):
2097 if not repo.capable('known'):
2093 raise util.Abort("known() not supported by target repository")
2098 raise util.Abort("known() not supported by target repository")
2094 flags = repo.known([bin(s) for s in ids])
2099 flags = repo.known([bin(s) for s in ids])
2095 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2100 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2096
2101
2097 @command('debuglabelcomplete', [], _('LABEL...'))
2102 @command('debuglabelcomplete', [], _('LABEL...'))
2098 def debuglabelcomplete(ui, repo, *args):
2103 def debuglabelcomplete(ui, repo, *args):
2099 '''complete "labels" - tags, open branch names, bookmark names'''
2104 '''complete "labels" - tags, open branch names, bookmark names'''
2100
2105
2101 labels = set()
2106 labels = set()
2102 labels.update(t[0] for t in repo.tagslist())
2107 labels.update(t[0] for t in repo.tagslist())
2103 labels.update(repo._bookmarks.keys())
2108 labels.update(repo._bookmarks.keys())
2104 for heads in repo.branchmap().itervalues():
2109 for heads in repo.branchmap().itervalues():
2105 for h in heads:
2110 for h in heads:
2106 ctx = repo[h]
2111 ctx = repo[h]
2107 if not ctx.closesbranch():
2112 if not ctx.closesbranch():
2108 labels.add(ctx.branch())
2113 labels.add(ctx.branch())
2109 completions = set()
2114 completions = set()
2110 if not args:
2115 if not args:
2111 args = ['']
2116 args = ['']
2112 for a in args:
2117 for a in args:
2113 completions.update(l for l in labels if l.startswith(a))
2118 completions.update(l for l in labels if l.startswith(a))
2114 ui.write('\n'.join(sorted(completions)))
2119 ui.write('\n'.join(sorted(completions)))
2115 ui.write('\n')
2120 ui.write('\n')
2116
2121
2117 @command('debugobsolete',
2122 @command('debugobsolete',
2118 [('', 'flags', 0, _('markers flag')),
2123 [('', 'flags', 0, _('markers flag')),
2119 ] + commitopts2,
2124 ] + commitopts2,
2120 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2125 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2121 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2126 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2122 """create arbitrary obsolete marker
2127 """create arbitrary obsolete marker
2123
2128
2124 With no arguments, displays the list of obsolescence markers."""
2129 With no arguments, displays the list of obsolescence markers."""
2125 def parsenodeid(s):
2130 def parsenodeid(s):
2126 try:
2131 try:
2127 # We do not use revsingle/revrange functions here to accept
2132 # We do not use revsingle/revrange functions here to accept
2128 # arbitrary node identifiers, possibly not present in the
2133 # arbitrary node identifiers, possibly not present in the
2129 # local repository.
2134 # local repository.
2130 n = bin(s)
2135 n = bin(s)
2131 if len(n) != len(nullid):
2136 if len(n) != len(nullid):
2132 raise TypeError()
2137 raise TypeError()
2133 return n
2138 return n
2134 except TypeError:
2139 except TypeError:
2135 raise util.Abort('changeset references must be full hexadecimal '
2140 raise util.Abort('changeset references must be full hexadecimal '
2136 'node identifiers')
2141 'node identifiers')
2137
2142
2138 if precursor is not None:
2143 if precursor is not None:
2139 metadata = {}
2144 metadata = {}
2140 if 'date' in opts:
2145 if 'date' in opts:
2141 metadata['date'] = opts['date']
2146 metadata['date'] = opts['date']
2142 metadata['user'] = opts['user'] or ui.username()
2147 metadata['user'] = opts['user'] or ui.username()
2143 succs = tuple(parsenodeid(succ) for succ in successors)
2148 succs = tuple(parsenodeid(succ) for succ in successors)
2144 l = repo.lock()
2149 l = repo.lock()
2145 try:
2150 try:
2146 tr = repo.transaction('debugobsolete')
2151 tr = repo.transaction('debugobsolete')
2147 try:
2152 try:
2148 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2153 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2149 opts['flags'], metadata)
2154 opts['flags'], metadata)
2150 tr.close()
2155 tr.close()
2151 finally:
2156 finally:
2152 tr.release()
2157 tr.release()
2153 finally:
2158 finally:
2154 l.release()
2159 l.release()
2155 else:
2160 else:
2156 for m in obsolete.allmarkers(repo):
2161 for m in obsolete.allmarkers(repo):
2157 ui.write(hex(m.precnode()))
2162 ui.write(hex(m.precnode()))
2158 for repl in m.succnodes():
2163 for repl in m.succnodes():
2159 ui.write(' ')
2164 ui.write(' ')
2160 ui.write(hex(repl))
2165 ui.write(hex(repl))
2161 ui.write(' %X ' % m._data[2])
2166 ui.write(' %X ' % m._data[2])
2162 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
2167 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
2163 sorted(m.metadata().items()))))
2168 sorted(m.metadata().items()))))
2164 ui.write('\n')
2169 ui.write('\n')
2165
2170
2166 @command('debugpathcomplete',
2171 @command('debugpathcomplete',
2167 [('f', 'full', None, _('complete an entire path')),
2172 [('f', 'full', None, _('complete an entire path')),
2168 ('n', 'normal', None, _('show only normal files')),
2173 ('n', 'normal', None, _('show only normal files')),
2169 ('a', 'added', None, _('show only added files')),
2174 ('a', 'added', None, _('show only added files')),
2170 ('r', 'removed', None, _('show only removed files'))],
2175 ('r', 'removed', None, _('show only removed files'))],
2171 _('FILESPEC...'))
2176 _('FILESPEC...'))
2172 def debugpathcomplete(ui, repo, *specs, **opts):
2177 def debugpathcomplete(ui, repo, *specs, **opts):
2173 '''complete part or all of a tracked path
2178 '''complete part or all of a tracked path
2174
2179
2175 This command supports shells that offer path name completion. It
2180 This command supports shells that offer path name completion. It
2176 currently completes only files already known to the dirstate.
2181 currently completes only files already known to the dirstate.
2177
2182
2178 Completion extends only to the next path segment unless
2183 Completion extends only to the next path segment unless
2179 --full is specified, in which case entire paths are used.'''
2184 --full is specified, in which case entire paths are used.'''
2180
2185
2181 def complete(path, acceptable):
2186 def complete(path, acceptable):
2182 dirstate = repo.dirstate
2187 dirstate = repo.dirstate
2183 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2188 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2184 rootdir = repo.root + os.sep
2189 rootdir = repo.root + os.sep
2185 if spec != repo.root and not spec.startswith(rootdir):
2190 if spec != repo.root and not spec.startswith(rootdir):
2186 return [], []
2191 return [], []
2187 if os.path.isdir(spec):
2192 if os.path.isdir(spec):
2188 spec += '/'
2193 spec += '/'
2189 spec = spec[len(rootdir):]
2194 spec = spec[len(rootdir):]
2190 fixpaths = os.sep != '/'
2195 fixpaths = os.sep != '/'
2191 if fixpaths:
2196 if fixpaths:
2192 spec = spec.replace(os.sep, '/')
2197 spec = spec.replace(os.sep, '/')
2193 speclen = len(spec)
2198 speclen = len(spec)
2194 fullpaths = opts['full']
2199 fullpaths = opts['full']
2195 files, dirs = set(), set()
2200 files, dirs = set(), set()
2196 adddir, addfile = dirs.add, files.add
2201 adddir, addfile = dirs.add, files.add
2197 for f, st in dirstate.iteritems():
2202 for f, st in dirstate.iteritems():
2198 if f.startswith(spec) and st[0] in acceptable:
2203 if f.startswith(spec) and st[0] in acceptable:
2199 if fixpaths:
2204 if fixpaths:
2200 f = f.replace('/', os.sep)
2205 f = f.replace('/', os.sep)
2201 if fullpaths:
2206 if fullpaths:
2202 addfile(f)
2207 addfile(f)
2203 continue
2208 continue
2204 s = f.find(os.sep, speclen)
2209 s = f.find(os.sep, speclen)
2205 if s >= 0:
2210 if s >= 0:
2206 adddir(f[:s + 1])
2211 adddir(f[:s + 1])
2207 else:
2212 else:
2208 addfile(f)
2213 addfile(f)
2209 return files, dirs
2214 return files, dirs
2210
2215
2211 acceptable = ''
2216 acceptable = ''
2212 if opts['normal']:
2217 if opts['normal']:
2213 acceptable += 'nm'
2218 acceptable += 'nm'
2214 if opts['added']:
2219 if opts['added']:
2215 acceptable += 'a'
2220 acceptable += 'a'
2216 if opts['removed']:
2221 if opts['removed']:
2217 acceptable += 'r'
2222 acceptable += 'r'
2218 cwd = repo.getcwd()
2223 cwd = repo.getcwd()
2219 if not specs:
2224 if not specs:
2220 specs = ['.']
2225 specs = ['.']
2221
2226
2222 files, dirs = set(), set()
2227 files, dirs = set(), set()
2223 for spec in specs:
2228 for spec in specs:
2224 f, d = complete(spec, acceptable or 'nmar')
2229 f, d = complete(spec, acceptable or 'nmar')
2225 files.update(f)
2230 files.update(f)
2226 dirs.update(d)
2231 dirs.update(d)
2227 if not files and len(dirs) == 1:
2232 if not files and len(dirs) == 1:
2228 # force the shell to consider a completion that matches one
2233 # force the shell to consider a completion that matches one
2229 # directory and zero files to be ambiguous
2234 # directory and zero files to be ambiguous
2230 dirs.add(iter(dirs).next() + '.')
2235 dirs.add(iter(dirs).next() + '.')
2231 files.update(dirs)
2236 files.update(dirs)
2232 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2237 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2233 ui.write('\n')
2238 ui.write('\n')
2234
2239
2235 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2240 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2236 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2241 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2237 '''access the pushkey key/value protocol
2242 '''access the pushkey key/value protocol
2238
2243
2239 With two args, list the keys in the given namespace.
2244 With two args, list the keys in the given namespace.
2240
2245
2241 With five args, set a key to new if it currently is set to old.
2246 With five args, set a key to new if it currently is set to old.
2242 Reports success or failure.
2247 Reports success or failure.
2243 '''
2248 '''
2244
2249
2245 target = hg.peer(ui, {}, repopath)
2250 target = hg.peer(ui, {}, repopath)
2246 if keyinfo:
2251 if keyinfo:
2247 key, old, new = keyinfo
2252 key, old, new = keyinfo
2248 r = target.pushkey(namespace, key, old, new)
2253 r = target.pushkey(namespace, key, old, new)
2249 ui.status(str(r) + '\n')
2254 ui.status(str(r) + '\n')
2250 return not r
2255 return not r
2251 else:
2256 else:
2252 for k, v in sorted(target.listkeys(namespace).iteritems()):
2257 for k, v in sorted(target.listkeys(namespace).iteritems()):
2253 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2258 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2254 v.encode('string-escape')))
2259 v.encode('string-escape')))
2255
2260
2256 @command('debugpvec', [], _('A B'))
2261 @command('debugpvec', [], _('A B'))
2257 def debugpvec(ui, repo, a, b=None):
2262 def debugpvec(ui, repo, a, b=None):
2258 ca = scmutil.revsingle(repo, a)
2263 ca = scmutil.revsingle(repo, a)
2259 cb = scmutil.revsingle(repo, b)
2264 cb = scmutil.revsingle(repo, b)
2260 pa = pvec.ctxpvec(ca)
2265 pa = pvec.ctxpvec(ca)
2261 pb = pvec.ctxpvec(cb)
2266 pb = pvec.ctxpvec(cb)
2262 if pa == pb:
2267 if pa == pb:
2263 rel = "="
2268 rel = "="
2264 elif pa > pb:
2269 elif pa > pb:
2265 rel = ">"
2270 rel = ">"
2266 elif pa < pb:
2271 elif pa < pb:
2267 rel = "<"
2272 rel = "<"
2268 elif pa | pb:
2273 elif pa | pb:
2269 rel = "|"
2274 rel = "|"
2270 ui.write(_("a: %s\n") % pa)
2275 ui.write(_("a: %s\n") % pa)
2271 ui.write(_("b: %s\n") % pb)
2276 ui.write(_("b: %s\n") % pb)
2272 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2277 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2273 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2278 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2274 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2279 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2275 pa.distance(pb), rel))
2280 pa.distance(pb), rel))
2276
2281
2277 @command('debugrebuilddirstate|debugrebuildstate',
2282 @command('debugrebuilddirstate|debugrebuildstate',
2278 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2283 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2279 _('[-r REV]'))
2284 _('[-r REV]'))
2280 def debugrebuilddirstate(ui, repo, rev):
2285 def debugrebuilddirstate(ui, repo, rev):
2281 """rebuild the dirstate as it would look like for the given revision
2286 """rebuild the dirstate as it would look like for the given revision
2282
2287
2283 If no revision is specified the first current parent will be used.
2288 If no revision is specified the first current parent will be used.
2284
2289
2285 The dirstate will be set to the files of the given revision.
2290 The dirstate will be set to the files of the given revision.
2286 The actual working directory content or existing dirstate
2291 The actual working directory content or existing dirstate
2287 information such as adds or removes is not considered.
2292 information such as adds or removes is not considered.
2288
2293
2289 One use of this command is to make the next :hg:`status` invocation
2294 One use of this command is to make the next :hg:`status` invocation
2290 check the actual file content.
2295 check the actual file content.
2291 """
2296 """
2292 ctx = scmutil.revsingle(repo, rev)
2297 ctx = scmutil.revsingle(repo, rev)
2293 wlock = repo.wlock()
2298 wlock = repo.wlock()
2294 try:
2299 try:
2295 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2300 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2296 finally:
2301 finally:
2297 wlock.release()
2302 wlock.release()
2298
2303
2299 @command('debugrename',
2304 @command('debugrename',
2300 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2305 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2301 _('[-r REV] FILE'))
2306 _('[-r REV] FILE'))
2302 def debugrename(ui, repo, file1, *pats, **opts):
2307 def debugrename(ui, repo, file1, *pats, **opts):
2303 """dump rename information"""
2308 """dump rename information"""
2304
2309
2305 ctx = scmutil.revsingle(repo, opts.get('rev'))
2310 ctx = scmutil.revsingle(repo, opts.get('rev'))
2306 m = scmutil.match(ctx, (file1,) + pats, opts)
2311 m = scmutil.match(ctx, (file1,) + pats, opts)
2307 for abs in ctx.walk(m):
2312 for abs in ctx.walk(m):
2308 fctx = ctx[abs]
2313 fctx = ctx[abs]
2309 o = fctx.filelog().renamed(fctx.filenode())
2314 o = fctx.filelog().renamed(fctx.filenode())
2310 rel = m.rel(abs)
2315 rel = m.rel(abs)
2311 if o:
2316 if o:
2312 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2317 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2313 else:
2318 else:
2314 ui.write(_("%s not renamed\n") % rel)
2319 ui.write(_("%s not renamed\n") % rel)
2315
2320
2316 @command('debugrevlog',
2321 @command('debugrevlog',
2317 [('c', 'changelog', False, _('open changelog')),
2322 [('c', 'changelog', False, _('open changelog')),
2318 ('m', 'manifest', False, _('open manifest')),
2323 ('m', 'manifest', False, _('open manifest')),
2319 ('d', 'dump', False, _('dump index data'))],
2324 ('d', 'dump', False, _('dump index data'))],
2320 _('-c|-m|FILE'))
2325 _('-c|-m|FILE'))
2321 def debugrevlog(ui, repo, file_ = None, **opts):
2326 def debugrevlog(ui, repo, file_ = None, **opts):
2322 """show data and statistics about a revlog"""
2327 """show data and statistics about a revlog"""
2323 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2328 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2324
2329
2325 if opts.get("dump"):
2330 if opts.get("dump"):
2326 numrevs = len(r)
2331 numrevs = len(r)
2327 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2332 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2328 " rawsize totalsize compression heads\n")
2333 " rawsize totalsize compression heads\n")
2329 ts = 0
2334 ts = 0
2330 heads = set()
2335 heads = set()
2331 for rev in xrange(numrevs):
2336 for rev in xrange(numrevs):
2332 dbase = r.deltaparent(rev)
2337 dbase = r.deltaparent(rev)
2333 if dbase == -1:
2338 if dbase == -1:
2334 dbase = rev
2339 dbase = rev
2335 cbase = r.chainbase(rev)
2340 cbase = r.chainbase(rev)
2336 p1, p2 = r.parentrevs(rev)
2341 p1, p2 = r.parentrevs(rev)
2337 rs = r.rawsize(rev)
2342 rs = r.rawsize(rev)
2338 ts = ts + rs
2343 ts = ts + rs
2339 heads -= set(r.parentrevs(rev))
2344 heads -= set(r.parentrevs(rev))
2340 heads.add(rev)
2345 heads.add(rev)
2341 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2346 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2342 (rev, p1, p2, r.start(rev), r.end(rev),
2347 (rev, p1, p2, r.start(rev), r.end(rev),
2343 r.start(dbase), r.start(cbase),
2348 r.start(dbase), r.start(cbase),
2344 r.start(p1), r.start(p2),
2349 r.start(p1), r.start(p2),
2345 rs, ts, ts / r.end(rev), len(heads)))
2350 rs, ts, ts / r.end(rev), len(heads)))
2346 return 0
2351 return 0
2347
2352
2348 v = r.version
2353 v = r.version
2349 format = v & 0xFFFF
2354 format = v & 0xFFFF
2350 flags = []
2355 flags = []
2351 gdelta = False
2356 gdelta = False
2352 if v & revlog.REVLOGNGINLINEDATA:
2357 if v & revlog.REVLOGNGINLINEDATA:
2353 flags.append('inline')
2358 flags.append('inline')
2354 if v & revlog.REVLOGGENERALDELTA:
2359 if v & revlog.REVLOGGENERALDELTA:
2355 gdelta = True
2360 gdelta = True
2356 flags.append('generaldelta')
2361 flags.append('generaldelta')
2357 if not flags:
2362 if not flags:
2358 flags = ['(none)']
2363 flags = ['(none)']
2359
2364
2360 nummerges = 0
2365 nummerges = 0
2361 numfull = 0
2366 numfull = 0
2362 numprev = 0
2367 numprev = 0
2363 nump1 = 0
2368 nump1 = 0
2364 nump2 = 0
2369 nump2 = 0
2365 numother = 0
2370 numother = 0
2366 nump1prev = 0
2371 nump1prev = 0
2367 nump2prev = 0
2372 nump2prev = 0
2368 chainlengths = []
2373 chainlengths = []
2369
2374
2370 datasize = [None, 0, 0L]
2375 datasize = [None, 0, 0L]
2371 fullsize = [None, 0, 0L]
2376 fullsize = [None, 0, 0L]
2372 deltasize = [None, 0, 0L]
2377 deltasize = [None, 0, 0L]
2373
2378
2374 def addsize(size, l):
2379 def addsize(size, l):
2375 if l[0] is None or size < l[0]:
2380 if l[0] is None or size < l[0]:
2376 l[0] = size
2381 l[0] = size
2377 if size > l[1]:
2382 if size > l[1]:
2378 l[1] = size
2383 l[1] = size
2379 l[2] += size
2384 l[2] += size
2380
2385
2381 numrevs = len(r)
2386 numrevs = len(r)
2382 for rev in xrange(numrevs):
2387 for rev in xrange(numrevs):
2383 p1, p2 = r.parentrevs(rev)
2388 p1, p2 = r.parentrevs(rev)
2384 delta = r.deltaparent(rev)
2389 delta = r.deltaparent(rev)
2385 if format > 0:
2390 if format > 0:
2386 addsize(r.rawsize(rev), datasize)
2391 addsize(r.rawsize(rev), datasize)
2387 if p2 != nullrev:
2392 if p2 != nullrev:
2388 nummerges += 1
2393 nummerges += 1
2389 size = r.length(rev)
2394 size = r.length(rev)
2390 if delta == nullrev:
2395 if delta == nullrev:
2391 chainlengths.append(0)
2396 chainlengths.append(0)
2392 numfull += 1
2397 numfull += 1
2393 addsize(size, fullsize)
2398 addsize(size, fullsize)
2394 else:
2399 else:
2395 chainlengths.append(chainlengths[delta] + 1)
2400 chainlengths.append(chainlengths[delta] + 1)
2396 addsize(size, deltasize)
2401 addsize(size, deltasize)
2397 if delta == rev - 1:
2402 if delta == rev - 1:
2398 numprev += 1
2403 numprev += 1
2399 if delta == p1:
2404 if delta == p1:
2400 nump1prev += 1
2405 nump1prev += 1
2401 elif delta == p2:
2406 elif delta == p2:
2402 nump2prev += 1
2407 nump2prev += 1
2403 elif delta == p1:
2408 elif delta == p1:
2404 nump1 += 1
2409 nump1 += 1
2405 elif delta == p2:
2410 elif delta == p2:
2406 nump2 += 1
2411 nump2 += 1
2407 elif delta != nullrev:
2412 elif delta != nullrev:
2408 numother += 1
2413 numother += 1
2409
2414
2410 # Adjust size min value for empty cases
2415 # Adjust size min value for empty cases
2411 for size in (datasize, fullsize, deltasize):
2416 for size in (datasize, fullsize, deltasize):
2412 if size[0] is None:
2417 if size[0] is None:
2413 size[0] = 0
2418 size[0] = 0
2414
2419
2415 numdeltas = numrevs - numfull
2420 numdeltas = numrevs - numfull
2416 numoprev = numprev - nump1prev - nump2prev
2421 numoprev = numprev - nump1prev - nump2prev
2417 totalrawsize = datasize[2]
2422 totalrawsize = datasize[2]
2418 datasize[2] /= numrevs
2423 datasize[2] /= numrevs
2419 fulltotal = fullsize[2]
2424 fulltotal = fullsize[2]
2420 fullsize[2] /= numfull
2425 fullsize[2] /= numfull
2421 deltatotal = deltasize[2]
2426 deltatotal = deltasize[2]
2422 if numrevs - numfull > 0:
2427 if numrevs - numfull > 0:
2423 deltasize[2] /= numrevs - numfull
2428 deltasize[2] /= numrevs - numfull
2424 totalsize = fulltotal + deltatotal
2429 totalsize = fulltotal + deltatotal
2425 avgchainlen = sum(chainlengths) / numrevs
2430 avgchainlen = sum(chainlengths) / numrevs
2426 compratio = totalrawsize / totalsize
2431 compratio = totalrawsize / totalsize
2427
2432
2428 basedfmtstr = '%%%dd\n'
2433 basedfmtstr = '%%%dd\n'
2429 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2434 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2430
2435
2431 def dfmtstr(max):
2436 def dfmtstr(max):
2432 return basedfmtstr % len(str(max))
2437 return basedfmtstr % len(str(max))
2433 def pcfmtstr(max, padding=0):
2438 def pcfmtstr(max, padding=0):
2434 return basepcfmtstr % (len(str(max)), ' ' * padding)
2439 return basepcfmtstr % (len(str(max)), ' ' * padding)
2435
2440
2436 def pcfmt(value, total):
2441 def pcfmt(value, total):
2437 return (value, 100 * float(value) / total)
2442 return (value, 100 * float(value) / total)
2438
2443
2439 ui.write(('format : %d\n') % format)
2444 ui.write(('format : %d\n') % format)
2440 ui.write(('flags : %s\n') % ', '.join(flags))
2445 ui.write(('flags : %s\n') % ', '.join(flags))
2441
2446
2442 ui.write('\n')
2447 ui.write('\n')
2443 fmt = pcfmtstr(totalsize)
2448 fmt = pcfmtstr(totalsize)
2444 fmt2 = dfmtstr(totalsize)
2449 fmt2 = dfmtstr(totalsize)
2445 ui.write(('revisions : ') + fmt2 % numrevs)
2450 ui.write(('revisions : ') + fmt2 % numrevs)
2446 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2451 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2447 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2452 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2448 ui.write(('revisions : ') + fmt2 % numrevs)
2453 ui.write(('revisions : ') + fmt2 % numrevs)
2449 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2454 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2450 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2455 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2451 ui.write(('revision size : ') + fmt2 % totalsize)
2456 ui.write(('revision size : ') + fmt2 % totalsize)
2452 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2457 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2453 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2458 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2454
2459
2455 ui.write('\n')
2460 ui.write('\n')
2456 fmt = dfmtstr(max(avgchainlen, compratio))
2461 fmt = dfmtstr(max(avgchainlen, compratio))
2457 ui.write(('avg chain length : ') + fmt % avgchainlen)
2462 ui.write(('avg chain length : ') + fmt % avgchainlen)
2458 ui.write(('compression ratio : ') + fmt % compratio)
2463 ui.write(('compression ratio : ') + fmt % compratio)
2459
2464
2460 if format > 0:
2465 if format > 0:
2461 ui.write('\n')
2466 ui.write('\n')
2462 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2467 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2463 % tuple(datasize))
2468 % tuple(datasize))
2464 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2469 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2465 % tuple(fullsize))
2470 % tuple(fullsize))
2466 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2471 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2467 % tuple(deltasize))
2472 % tuple(deltasize))
2468
2473
2469 if numdeltas > 0:
2474 if numdeltas > 0:
2470 ui.write('\n')
2475 ui.write('\n')
2471 fmt = pcfmtstr(numdeltas)
2476 fmt = pcfmtstr(numdeltas)
2472 fmt2 = pcfmtstr(numdeltas, 4)
2477 fmt2 = pcfmtstr(numdeltas, 4)
2473 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2478 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2474 if numprev > 0:
2479 if numprev > 0:
2475 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2480 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2476 numprev))
2481 numprev))
2477 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2482 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2478 numprev))
2483 numprev))
2479 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2484 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2480 numprev))
2485 numprev))
2481 if gdelta:
2486 if gdelta:
2482 ui.write(('deltas against p1 : ')
2487 ui.write(('deltas against p1 : ')
2483 + fmt % pcfmt(nump1, numdeltas))
2488 + fmt % pcfmt(nump1, numdeltas))
2484 ui.write(('deltas against p2 : ')
2489 ui.write(('deltas against p2 : ')
2485 + fmt % pcfmt(nump2, numdeltas))
2490 + fmt % pcfmt(nump2, numdeltas))
2486 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2491 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2487 numdeltas))
2492 numdeltas))
2488
2493
2489 @command('debugrevspec', [], ('REVSPEC'))
2494 @command('debugrevspec', [], ('REVSPEC'))
2490 def debugrevspec(ui, repo, expr):
2495 def debugrevspec(ui, repo, expr):
2491 """parse and apply a revision specification
2496 """parse and apply a revision specification
2492
2497
2493 Use --verbose to print the parsed tree before and after aliases
2498 Use --verbose to print the parsed tree before and after aliases
2494 expansion.
2499 expansion.
2495 """
2500 """
2496 if ui.verbose:
2501 if ui.verbose:
2497 tree = revset.parse(expr)[0]
2502 tree = revset.parse(expr)[0]
2498 ui.note(revset.prettyformat(tree), "\n")
2503 ui.note(revset.prettyformat(tree), "\n")
2499 newtree = revset.findaliases(ui, tree)
2504 newtree = revset.findaliases(ui, tree)
2500 if newtree != tree:
2505 if newtree != tree:
2501 ui.note(revset.prettyformat(newtree), "\n")
2506 ui.note(revset.prettyformat(newtree), "\n")
2502 func = revset.match(ui, expr)
2507 func = revset.match(ui, expr)
2503 for c in func(repo, range(len(repo))):
2508 for c in func(repo, range(len(repo))):
2504 ui.write("%s\n" % c)
2509 ui.write("%s\n" % c)
2505
2510
2506 @command('debugsetparents', [], _('REV1 [REV2]'))
2511 @command('debugsetparents', [], _('REV1 [REV2]'))
2507 def debugsetparents(ui, repo, rev1, rev2=None):
2512 def debugsetparents(ui, repo, rev1, rev2=None):
2508 """manually set the parents of the current working directory
2513 """manually set the parents of the current working directory
2509
2514
2510 This is useful for writing repository conversion tools, but should
2515 This is useful for writing repository conversion tools, but should
2511 be used with care.
2516 be used with care.
2512
2517
2513 Returns 0 on success.
2518 Returns 0 on success.
2514 """
2519 """
2515
2520
2516 r1 = scmutil.revsingle(repo, rev1).node()
2521 r1 = scmutil.revsingle(repo, rev1).node()
2517 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2522 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2518
2523
2519 wlock = repo.wlock()
2524 wlock = repo.wlock()
2520 try:
2525 try:
2521 repo.setparents(r1, r2)
2526 repo.setparents(r1, r2)
2522 finally:
2527 finally:
2523 wlock.release()
2528 wlock.release()
2524
2529
2525 @command('debugdirstate|debugstate',
2530 @command('debugdirstate|debugstate',
2526 [('', 'nodates', None, _('do not display the saved mtime')),
2531 [('', 'nodates', None, _('do not display the saved mtime')),
2527 ('', 'datesort', None, _('sort by saved mtime'))],
2532 ('', 'datesort', None, _('sort by saved mtime'))],
2528 _('[OPTION]...'))
2533 _('[OPTION]...'))
2529 def debugstate(ui, repo, nodates=None, datesort=None):
2534 def debugstate(ui, repo, nodates=None, datesort=None):
2530 """show the contents of the current dirstate"""
2535 """show the contents of the current dirstate"""
2531 timestr = ""
2536 timestr = ""
2532 showdate = not nodates
2537 showdate = not nodates
2533 if datesort:
2538 if datesort:
2534 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2539 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2535 else:
2540 else:
2536 keyfunc = None # sort by filename
2541 keyfunc = None # sort by filename
2537 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2542 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2538 if showdate:
2543 if showdate:
2539 if ent[3] == -1:
2544 if ent[3] == -1:
2540 # Pad or slice to locale representation
2545 # Pad or slice to locale representation
2541 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2546 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2542 time.localtime(0)))
2547 time.localtime(0)))
2543 timestr = 'unset'
2548 timestr = 'unset'
2544 timestr = (timestr[:locale_len] +
2549 timestr = (timestr[:locale_len] +
2545 ' ' * (locale_len - len(timestr)))
2550 ' ' * (locale_len - len(timestr)))
2546 else:
2551 else:
2547 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2552 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2548 time.localtime(ent[3]))
2553 time.localtime(ent[3]))
2549 if ent[1] & 020000:
2554 if ent[1] & 020000:
2550 mode = 'lnk'
2555 mode = 'lnk'
2551 else:
2556 else:
2552 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2557 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2553 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2558 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2554 for f in repo.dirstate.copies():
2559 for f in repo.dirstate.copies():
2555 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2560 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2556
2561
2557 @command('debugsub',
2562 @command('debugsub',
2558 [('r', 'rev', '',
2563 [('r', 'rev', '',
2559 _('revision to check'), _('REV'))],
2564 _('revision to check'), _('REV'))],
2560 _('[-r REV] [REV]'))
2565 _('[-r REV] [REV]'))
2561 def debugsub(ui, repo, rev=None):
2566 def debugsub(ui, repo, rev=None):
2562 ctx = scmutil.revsingle(repo, rev, None)
2567 ctx = scmutil.revsingle(repo, rev, None)
2563 for k, v in sorted(ctx.substate.items()):
2568 for k, v in sorted(ctx.substate.items()):
2564 ui.write(('path %s\n') % k)
2569 ui.write(('path %s\n') % k)
2565 ui.write((' source %s\n') % v[0])
2570 ui.write((' source %s\n') % v[0])
2566 ui.write((' revision %s\n') % v[1])
2571 ui.write((' revision %s\n') % v[1])
2567
2572
2568 @command('debugsuccessorssets',
2573 @command('debugsuccessorssets',
2569 [],
2574 [],
2570 _('[REV]'))
2575 _('[REV]'))
2571 def debugsuccessorssets(ui, repo, *revs):
2576 def debugsuccessorssets(ui, repo, *revs):
2572 """show set of successors for revision
2577 """show set of successors for revision
2573
2578
2574 A successors set of changeset A is a consistent group of revisions that
2579 A successors set of changeset A is a consistent group of revisions that
2575 succeed A. It contains non-obsolete changesets only.
2580 succeed A. It contains non-obsolete changesets only.
2576
2581
2577 In most cases a changeset A has a single successors set containing a single
2582 In most cases a changeset A has a single successors set containing a single
2578 successor (changeset A replaced by A').
2583 successor (changeset A replaced by A').
2579
2584
2580 A changeset that is made obsolete with no successors are called "pruned".
2585 A changeset that is made obsolete with no successors are called "pruned".
2581 Such changesets have no successors sets at all.
2586 Such changesets have no successors sets at all.
2582
2587
2583 A changeset that has been "split" will have a successors set containing
2588 A changeset that has been "split" will have a successors set containing
2584 more than one successor.
2589 more than one successor.
2585
2590
2586 A changeset that has been rewritten in multiple different ways is called
2591 A changeset that has been rewritten in multiple different ways is called
2587 "divergent". Such changesets have multiple successor sets (each of which
2592 "divergent". Such changesets have multiple successor sets (each of which
2588 may also be split, i.e. have multiple successors).
2593 may also be split, i.e. have multiple successors).
2589
2594
2590 Results are displayed as follows::
2595 Results are displayed as follows::
2591
2596
2592 <rev1>
2597 <rev1>
2593 <successors-1A>
2598 <successors-1A>
2594 <rev2>
2599 <rev2>
2595 <successors-2A>
2600 <successors-2A>
2596 <successors-2B1> <successors-2B2> <successors-2B3>
2601 <successors-2B1> <successors-2B2> <successors-2B3>
2597
2602
2598 Here rev2 has two possible (i.e. divergent) successors sets. The first
2603 Here rev2 has two possible (i.e. divergent) successors sets. The first
2599 holds one element, whereas the second holds three (i.e. the changeset has
2604 holds one element, whereas the second holds three (i.e. the changeset has
2600 been split).
2605 been split).
2601 """
2606 """
2602 # passed to successorssets caching computation from one call to another
2607 # passed to successorssets caching computation from one call to another
2603 cache = {}
2608 cache = {}
2604 ctx2str = str
2609 ctx2str = str
2605 node2str = short
2610 node2str = short
2606 if ui.debug():
2611 if ui.debug():
2607 def ctx2str(ctx):
2612 def ctx2str(ctx):
2608 return ctx.hex()
2613 return ctx.hex()
2609 node2str = hex
2614 node2str = hex
2610 for rev in scmutil.revrange(repo, revs):
2615 for rev in scmutil.revrange(repo, revs):
2611 ctx = repo[rev]
2616 ctx = repo[rev]
2612 ui.write('%s\n'% ctx2str(ctx))
2617 ui.write('%s\n'% ctx2str(ctx))
2613 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2618 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2614 if succsset:
2619 if succsset:
2615 ui.write(' ')
2620 ui.write(' ')
2616 ui.write(node2str(succsset[0]))
2621 ui.write(node2str(succsset[0]))
2617 for node in succsset[1:]:
2622 for node in succsset[1:]:
2618 ui.write(' ')
2623 ui.write(' ')
2619 ui.write(node2str(node))
2624 ui.write(node2str(node))
2620 ui.write('\n')
2625 ui.write('\n')
2621
2626
2622 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2627 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2623 def debugwalk(ui, repo, *pats, **opts):
2628 def debugwalk(ui, repo, *pats, **opts):
2624 """show how files match on given patterns"""
2629 """show how files match on given patterns"""
2625 m = scmutil.match(repo[None], pats, opts)
2630 m = scmutil.match(repo[None], pats, opts)
2626 items = list(repo.walk(m))
2631 items = list(repo.walk(m))
2627 if not items:
2632 if not items:
2628 return
2633 return
2629 f = lambda fn: fn
2634 f = lambda fn: fn
2630 if ui.configbool('ui', 'slash') and os.sep != '/':
2635 if ui.configbool('ui', 'slash') and os.sep != '/':
2631 f = lambda fn: util.normpath(fn)
2636 f = lambda fn: util.normpath(fn)
2632 fmt = 'f %%-%ds %%-%ds %%s' % (
2637 fmt = 'f %%-%ds %%-%ds %%s' % (
2633 max([len(abs) for abs in items]),
2638 max([len(abs) for abs in items]),
2634 max([len(m.rel(abs)) for abs in items]))
2639 max([len(m.rel(abs)) for abs in items]))
2635 for abs in items:
2640 for abs in items:
2636 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2641 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2637 ui.write("%s\n" % line.rstrip())
2642 ui.write("%s\n" % line.rstrip())
2638
2643
2639 @command('debugwireargs',
2644 @command('debugwireargs',
2640 [('', 'three', '', 'three'),
2645 [('', 'three', '', 'three'),
2641 ('', 'four', '', 'four'),
2646 ('', 'four', '', 'four'),
2642 ('', 'five', '', 'five'),
2647 ('', 'five', '', 'five'),
2643 ] + remoteopts,
2648 ] + remoteopts,
2644 _('REPO [OPTIONS]... [ONE [TWO]]'))
2649 _('REPO [OPTIONS]... [ONE [TWO]]'))
2645 def debugwireargs(ui, repopath, *vals, **opts):
2650 def debugwireargs(ui, repopath, *vals, **opts):
2646 repo = hg.peer(ui, opts, repopath)
2651 repo = hg.peer(ui, opts, repopath)
2647 for opt in remoteopts:
2652 for opt in remoteopts:
2648 del opts[opt[1]]
2653 del opts[opt[1]]
2649 args = {}
2654 args = {}
2650 for k, v in opts.iteritems():
2655 for k, v in opts.iteritems():
2651 if v:
2656 if v:
2652 args[k] = v
2657 args[k] = v
2653 # run twice to check that we don't mess up the stream for the next command
2658 # run twice to check that we don't mess up the stream for the next command
2654 res1 = repo.debugwireargs(*vals, **args)
2659 res1 = repo.debugwireargs(*vals, **args)
2655 res2 = repo.debugwireargs(*vals, **args)
2660 res2 = repo.debugwireargs(*vals, **args)
2656 ui.write("%s\n" % res1)
2661 ui.write("%s\n" % res1)
2657 if res1 != res2:
2662 if res1 != res2:
2658 ui.warn("%s\n" % res2)
2663 ui.warn("%s\n" % res2)
2659
2664
2660 @command('^diff',
2665 @command('^diff',
2661 [('r', 'rev', [], _('revision'), _('REV')),
2666 [('r', 'rev', [], _('revision'), _('REV')),
2662 ('c', 'change', '', _('change made by revision'), _('REV'))
2667 ('c', 'change', '', _('change made by revision'), _('REV'))
2663 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2668 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2664 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2669 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2665 def diff(ui, repo, *pats, **opts):
2670 def diff(ui, repo, *pats, **opts):
2666 """diff repository (or selected files)
2671 """diff repository (or selected files)
2667
2672
2668 Show differences between revisions for the specified files.
2673 Show differences between revisions for the specified files.
2669
2674
2670 Differences between files are shown using the unified diff format.
2675 Differences between files are shown using the unified diff format.
2671
2676
2672 .. note::
2677 .. note::
2673 diff may generate unexpected results for merges, as it will
2678 diff may generate unexpected results for merges, as it will
2674 default to comparing against the working directory's first
2679 default to comparing against the working directory's first
2675 parent changeset if no revisions are specified.
2680 parent changeset if no revisions are specified.
2676
2681
2677 When two revision arguments are given, then changes are shown
2682 When two revision arguments are given, then changes are shown
2678 between those revisions. If only one revision is specified then
2683 between those revisions. If only one revision is specified then
2679 that revision is compared to the working directory, and, when no
2684 that revision is compared to the working directory, and, when no
2680 revisions are specified, the working directory files are compared
2685 revisions are specified, the working directory files are compared
2681 to its parent.
2686 to its parent.
2682
2687
2683 Alternatively you can specify -c/--change with a revision to see
2688 Alternatively you can specify -c/--change with a revision to see
2684 the changes in that changeset relative to its first parent.
2689 the changes in that changeset relative to its first parent.
2685
2690
2686 Without the -a/--text option, diff will avoid generating diffs of
2691 Without the -a/--text option, diff will avoid generating diffs of
2687 files it detects as binary. With -a, diff will generate a diff
2692 files it detects as binary. With -a, diff will generate a diff
2688 anyway, probably with undesirable results.
2693 anyway, probably with undesirable results.
2689
2694
2690 Use the -g/--git option to generate diffs in the git extended diff
2695 Use the -g/--git option to generate diffs in the git extended diff
2691 format. For more information, read :hg:`help diffs`.
2696 format. For more information, read :hg:`help diffs`.
2692
2697
2693 .. container:: verbose
2698 .. container:: verbose
2694
2699
2695 Examples:
2700 Examples:
2696
2701
2697 - compare a file in the current working directory to its parent::
2702 - compare a file in the current working directory to its parent::
2698
2703
2699 hg diff foo.c
2704 hg diff foo.c
2700
2705
2701 - compare two historical versions of a directory, with rename info::
2706 - compare two historical versions of a directory, with rename info::
2702
2707
2703 hg diff --git -r 1.0:1.2 lib/
2708 hg diff --git -r 1.0:1.2 lib/
2704
2709
2705 - get change stats relative to the last change on some date::
2710 - get change stats relative to the last change on some date::
2706
2711
2707 hg diff --stat -r "date('may 2')"
2712 hg diff --stat -r "date('may 2')"
2708
2713
2709 - diff all newly-added files that contain a keyword::
2714 - diff all newly-added files that contain a keyword::
2710
2715
2711 hg diff "set:added() and grep(GNU)"
2716 hg diff "set:added() and grep(GNU)"
2712
2717
2713 - compare a revision and its parents::
2718 - compare a revision and its parents::
2714
2719
2715 hg diff -c 9353 # compare against first parent
2720 hg diff -c 9353 # compare against first parent
2716 hg diff -r 9353^:9353 # same using revset syntax
2721 hg diff -r 9353^:9353 # same using revset syntax
2717 hg diff -r 9353^2:9353 # compare against the second parent
2722 hg diff -r 9353^2:9353 # compare against the second parent
2718
2723
2719 Returns 0 on success.
2724 Returns 0 on success.
2720 """
2725 """
2721
2726
2722 revs = opts.get('rev')
2727 revs = opts.get('rev')
2723 change = opts.get('change')
2728 change = opts.get('change')
2724 stat = opts.get('stat')
2729 stat = opts.get('stat')
2725 reverse = opts.get('reverse')
2730 reverse = opts.get('reverse')
2726
2731
2727 if revs and change:
2732 if revs and change:
2728 msg = _('cannot specify --rev and --change at the same time')
2733 msg = _('cannot specify --rev and --change at the same time')
2729 raise util.Abort(msg)
2734 raise util.Abort(msg)
2730 elif change:
2735 elif change:
2731 node2 = scmutil.revsingle(repo, change, None).node()
2736 node2 = scmutil.revsingle(repo, change, None).node()
2732 node1 = repo[node2].p1().node()
2737 node1 = repo[node2].p1().node()
2733 else:
2738 else:
2734 node1, node2 = scmutil.revpair(repo, revs)
2739 node1, node2 = scmutil.revpair(repo, revs)
2735
2740
2736 if reverse:
2741 if reverse:
2737 node1, node2 = node2, node1
2742 node1, node2 = node2, node1
2738
2743
2739 diffopts = patch.diffopts(ui, opts)
2744 diffopts = patch.diffopts(ui, opts)
2740 m = scmutil.match(repo[node2], pats, opts)
2745 m = scmutil.match(repo[node2], pats, opts)
2741 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2746 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2742 listsubrepos=opts.get('subrepos'))
2747 listsubrepos=opts.get('subrepos'))
2743
2748
2744 @command('^export',
2749 @command('^export',
2745 [('o', 'output', '',
2750 [('o', 'output', '',
2746 _('print output to file with formatted name'), _('FORMAT')),
2751 _('print output to file with formatted name'), _('FORMAT')),
2747 ('', 'switch-parent', None, _('diff against the second parent')),
2752 ('', 'switch-parent', None, _('diff against the second parent')),
2748 ('r', 'rev', [], _('revisions to export'), _('REV')),
2753 ('r', 'rev', [], _('revisions to export'), _('REV')),
2749 ] + diffopts,
2754 ] + diffopts,
2750 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2755 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2751 def export(ui, repo, *changesets, **opts):
2756 def export(ui, repo, *changesets, **opts):
2752 """dump the header and diffs for one or more changesets
2757 """dump the header and diffs for one or more changesets
2753
2758
2754 Print the changeset header and diffs for one or more revisions.
2759 Print the changeset header and diffs for one or more revisions.
2755 If no revision is given, the parent of the working directory is used.
2760 If no revision is given, the parent of the working directory is used.
2756
2761
2757 The information shown in the changeset header is: author, date,
2762 The information shown in the changeset header is: author, date,
2758 branch name (if non-default), changeset hash, parent(s) and commit
2763 branch name (if non-default), changeset hash, parent(s) and commit
2759 comment.
2764 comment.
2760
2765
2761 .. note::
2766 .. note::
2762 export may generate unexpected diff output for merge
2767 export may generate unexpected diff output for merge
2763 changesets, as it will compare the merge changeset against its
2768 changesets, as it will compare the merge changeset against its
2764 first parent only.
2769 first parent only.
2765
2770
2766 Output may be to a file, in which case the name of the file is
2771 Output may be to a file, in which case the name of the file is
2767 given using a format string. The formatting rules are as follows:
2772 given using a format string. The formatting rules are as follows:
2768
2773
2769 :``%%``: literal "%" character
2774 :``%%``: literal "%" character
2770 :``%H``: changeset hash (40 hexadecimal digits)
2775 :``%H``: changeset hash (40 hexadecimal digits)
2771 :``%N``: number of patches being generated
2776 :``%N``: number of patches being generated
2772 :``%R``: changeset revision number
2777 :``%R``: changeset revision number
2773 :``%b``: basename of the exporting repository
2778 :``%b``: basename of the exporting repository
2774 :``%h``: short-form changeset hash (12 hexadecimal digits)
2779 :``%h``: short-form changeset hash (12 hexadecimal digits)
2775 :``%m``: first line of the commit message (only alphanumeric characters)
2780 :``%m``: first line of the commit message (only alphanumeric characters)
2776 :``%n``: zero-padded sequence number, starting at 1
2781 :``%n``: zero-padded sequence number, starting at 1
2777 :``%r``: zero-padded changeset revision number
2782 :``%r``: zero-padded changeset revision number
2778
2783
2779 Without the -a/--text option, export will avoid generating diffs
2784 Without the -a/--text option, export will avoid generating diffs
2780 of files it detects as binary. With -a, export will generate a
2785 of files it detects as binary. With -a, export will generate a
2781 diff anyway, probably with undesirable results.
2786 diff anyway, probably with undesirable results.
2782
2787
2783 Use the -g/--git option to generate diffs in the git extended diff
2788 Use the -g/--git option to generate diffs in the git extended diff
2784 format. See :hg:`help diffs` for more information.
2789 format. See :hg:`help diffs` for more information.
2785
2790
2786 With the --switch-parent option, the diff will be against the
2791 With the --switch-parent option, the diff will be against the
2787 second parent. It can be useful to review a merge.
2792 second parent. It can be useful to review a merge.
2788
2793
2789 .. container:: verbose
2794 .. container:: verbose
2790
2795
2791 Examples:
2796 Examples:
2792
2797
2793 - use export and import to transplant a bugfix to the current
2798 - use export and import to transplant a bugfix to the current
2794 branch::
2799 branch::
2795
2800
2796 hg export -r 9353 | hg import -
2801 hg export -r 9353 | hg import -
2797
2802
2798 - export all the changesets between two revisions to a file with
2803 - export all the changesets between two revisions to a file with
2799 rename information::
2804 rename information::
2800
2805
2801 hg export --git -r 123:150 > changes.txt
2806 hg export --git -r 123:150 > changes.txt
2802
2807
2803 - split outgoing changes into a series of patches with
2808 - split outgoing changes into a series of patches with
2804 descriptive names::
2809 descriptive names::
2805
2810
2806 hg export -r "outgoing()" -o "%n-%m.patch"
2811 hg export -r "outgoing()" -o "%n-%m.patch"
2807
2812
2808 Returns 0 on success.
2813 Returns 0 on success.
2809 """
2814 """
2810 changesets += tuple(opts.get('rev', []))
2815 changesets += tuple(opts.get('rev', []))
2811 if not changesets:
2816 if not changesets:
2812 changesets = ['.']
2817 changesets = ['.']
2813 revs = scmutil.revrange(repo, changesets)
2818 revs = scmutil.revrange(repo, changesets)
2814 if not revs:
2819 if not revs:
2815 raise util.Abort(_("export requires at least one changeset"))
2820 raise util.Abort(_("export requires at least one changeset"))
2816 if len(revs) > 1:
2821 if len(revs) > 1:
2817 ui.note(_('exporting patches:\n'))
2822 ui.note(_('exporting patches:\n'))
2818 else:
2823 else:
2819 ui.note(_('exporting patch:\n'))
2824 ui.note(_('exporting patch:\n'))
2820 cmdutil.export(repo, revs, template=opts.get('output'),
2825 cmdutil.export(repo, revs, template=opts.get('output'),
2821 switch_parent=opts.get('switch_parent'),
2826 switch_parent=opts.get('switch_parent'),
2822 opts=patch.diffopts(ui, opts))
2827 opts=patch.diffopts(ui, opts))
2823
2828
2824 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2829 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2825 def forget(ui, repo, *pats, **opts):
2830 def forget(ui, repo, *pats, **opts):
2826 """forget the specified files on the next commit
2831 """forget the specified files on the next commit
2827
2832
2828 Mark the specified files so they will no longer be tracked
2833 Mark the specified files so they will no longer be tracked
2829 after the next commit.
2834 after the next commit.
2830
2835
2831 This only removes files from the current branch, not from the
2836 This only removes files from the current branch, not from the
2832 entire project history, and it does not delete them from the
2837 entire project history, and it does not delete them from the
2833 working directory.
2838 working directory.
2834
2839
2835 To undo a forget before the next commit, see :hg:`add`.
2840 To undo a forget before the next commit, see :hg:`add`.
2836
2841
2837 .. container:: verbose
2842 .. container:: verbose
2838
2843
2839 Examples:
2844 Examples:
2840
2845
2841 - forget newly-added binary files::
2846 - forget newly-added binary files::
2842
2847
2843 hg forget "set:added() and binary()"
2848 hg forget "set:added() and binary()"
2844
2849
2845 - forget files that would be excluded by .hgignore::
2850 - forget files that would be excluded by .hgignore::
2846
2851
2847 hg forget "set:hgignore()"
2852 hg forget "set:hgignore()"
2848
2853
2849 Returns 0 on success.
2854 Returns 0 on success.
2850 """
2855 """
2851
2856
2852 if not pats:
2857 if not pats:
2853 raise util.Abort(_('no files specified'))
2858 raise util.Abort(_('no files specified'))
2854
2859
2855 m = scmutil.match(repo[None], pats, opts)
2860 m = scmutil.match(repo[None], pats, opts)
2856 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2861 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2857 return rejected and 1 or 0
2862 return rejected and 1 or 0
2858
2863
2859 @command(
2864 @command(
2860 'graft',
2865 'graft',
2861 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2866 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2862 ('c', 'continue', False, _('resume interrupted graft')),
2867 ('c', 'continue', False, _('resume interrupted graft')),
2863 ('e', 'edit', False, _('invoke editor on commit messages')),
2868 ('e', 'edit', False, _('invoke editor on commit messages')),
2864 ('', 'log', None, _('append graft info to log message')),
2869 ('', 'log', None, _('append graft info to log message')),
2865 ('D', 'currentdate', False,
2870 ('D', 'currentdate', False,
2866 _('record the current date as commit date')),
2871 _('record the current date as commit date')),
2867 ('U', 'currentuser', False,
2872 ('U', 'currentuser', False,
2868 _('record the current user as committer'), _('DATE'))]
2873 _('record the current user as committer'), _('DATE'))]
2869 + commitopts2 + mergetoolopts + dryrunopts,
2874 + commitopts2 + mergetoolopts + dryrunopts,
2870 _('[OPTION]... [-r] REV...'))
2875 _('[OPTION]... [-r] REV...'))
2871 def graft(ui, repo, *revs, **opts):
2876 def graft(ui, repo, *revs, **opts):
2872 '''copy changes from other branches onto the current branch
2877 '''copy changes from other branches onto the current branch
2873
2878
2874 This command uses Mercurial's merge logic to copy individual
2879 This command uses Mercurial's merge logic to copy individual
2875 changes from other branches without merging branches in the
2880 changes from other branches without merging branches in the
2876 history graph. This is sometimes known as 'backporting' or
2881 history graph. This is sometimes known as 'backporting' or
2877 'cherry-picking'. By default, graft will copy user, date, and
2882 'cherry-picking'. By default, graft will copy user, date, and
2878 description from the source changesets.
2883 description from the source changesets.
2879
2884
2880 Changesets that are ancestors of the current revision, that have
2885 Changesets that are ancestors of the current revision, that have
2881 already been grafted, or that are merges will be skipped.
2886 already been grafted, or that are merges will be skipped.
2882
2887
2883 If --log is specified, log messages will have a comment appended
2888 If --log is specified, log messages will have a comment appended
2884 of the form::
2889 of the form::
2885
2890
2886 (grafted from CHANGESETHASH)
2891 (grafted from CHANGESETHASH)
2887
2892
2888 If a graft merge results in conflicts, the graft process is
2893 If a graft merge results in conflicts, the graft process is
2889 interrupted so that the current merge can be manually resolved.
2894 interrupted so that the current merge can be manually resolved.
2890 Once all conflicts are addressed, the graft process can be
2895 Once all conflicts are addressed, the graft process can be
2891 continued with the -c/--continue option.
2896 continued with the -c/--continue option.
2892
2897
2893 .. note::
2898 .. note::
2894 The -c/--continue option does not reapply earlier options.
2899 The -c/--continue option does not reapply earlier options.
2895
2900
2896 .. container:: verbose
2901 .. container:: verbose
2897
2902
2898 Examples:
2903 Examples:
2899
2904
2900 - copy a single change to the stable branch and edit its description::
2905 - copy a single change to the stable branch and edit its description::
2901
2906
2902 hg update stable
2907 hg update stable
2903 hg graft --edit 9393
2908 hg graft --edit 9393
2904
2909
2905 - graft a range of changesets with one exception, updating dates::
2910 - graft a range of changesets with one exception, updating dates::
2906
2911
2907 hg graft -D "2085::2093 and not 2091"
2912 hg graft -D "2085::2093 and not 2091"
2908
2913
2909 - continue a graft after resolving conflicts::
2914 - continue a graft after resolving conflicts::
2910
2915
2911 hg graft -c
2916 hg graft -c
2912
2917
2913 - show the source of a grafted changeset::
2918 - show the source of a grafted changeset::
2914
2919
2915 hg log --debug -r tip
2920 hg log --debug -r tip
2916
2921
2917 Returns 0 on successful completion.
2922 Returns 0 on successful completion.
2918 '''
2923 '''
2919
2924
2920 revs = list(revs)
2925 revs = list(revs)
2921 revs.extend(opts['rev'])
2926 revs.extend(opts['rev'])
2922
2927
2923 if not opts.get('user') and opts.get('currentuser'):
2928 if not opts.get('user') and opts.get('currentuser'):
2924 opts['user'] = ui.username()
2929 opts['user'] = ui.username()
2925 if not opts.get('date') and opts.get('currentdate'):
2930 if not opts.get('date') and opts.get('currentdate'):
2926 opts['date'] = "%d %d" % util.makedate()
2931 opts['date'] = "%d %d" % util.makedate()
2927
2932
2928 editor = None
2933 editor = None
2929 if opts.get('edit'):
2934 if opts.get('edit'):
2930 editor = cmdutil.commitforceeditor
2935 editor = cmdutil.commitforceeditor
2931
2936
2932 cont = False
2937 cont = False
2933 if opts['continue']:
2938 if opts['continue']:
2934 cont = True
2939 cont = True
2935 if revs:
2940 if revs:
2936 raise util.Abort(_("can't specify --continue and revisions"))
2941 raise util.Abort(_("can't specify --continue and revisions"))
2937 # read in unfinished revisions
2942 # read in unfinished revisions
2938 try:
2943 try:
2939 nodes = repo.opener.read('graftstate').splitlines()
2944 nodes = repo.opener.read('graftstate').splitlines()
2940 revs = [repo[node].rev() for node in nodes]
2945 revs = [repo[node].rev() for node in nodes]
2941 except IOError, inst:
2946 except IOError, inst:
2942 if inst.errno != errno.ENOENT:
2947 if inst.errno != errno.ENOENT:
2943 raise
2948 raise
2944 raise util.Abort(_("no graft state found, can't continue"))
2949 raise util.Abort(_("no graft state found, can't continue"))
2945 else:
2950 else:
2946 cmdutil.bailifchanged(repo)
2951 cmdutil.bailifchanged(repo)
2947 if not revs:
2952 if not revs:
2948 raise util.Abort(_('no revisions specified'))
2953 raise util.Abort(_('no revisions specified'))
2949 revs = scmutil.revrange(repo, revs)
2954 revs = scmutil.revrange(repo, revs)
2950
2955
2951 # check for merges
2956 # check for merges
2952 for rev in repo.revs('%ld and merge()', revs):
2957 for rev in repo.revs('%ld and merge()', revs):
2953 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2958 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2954 revs.remove(rev)
2959 revs.remove(rev)
2955 if not revs:
2960 if not revs:
2956 return -1
2961 return -1
2957
2962
2958 # check for ancestors of dest branch
2963 # check for ancestors of dest branch
2959 crev = repo['.'].rev()
2964 crev = repo['.'].rev()
2960 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2965 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2961 # don't mutate while iterating, create a copy
2966 # don't mutate while iterating, create a copy
2962 for rev in list(revs):
2967 for rev in list(revs):
2963 if rev in ancestors:
2968 if rev in ancestors:
2964 ui.warn(_('skipping ancestor revision %s\n') % rev)
2969 ui.warn(_('skipping ancestor revision %s\n') % rev)
2965 revs.remove(rev)
2970 revs.remove(rev)
2966 if not revs:
2971 if not revs:
2967 return -1
2972 return -1
2968
2973
2969 # analyze revs for earlier grafts
2974 # analyze revs for earlier grafts
2970 ids = {}
2975 ids = {}
2971 for ctx in repo.set("%ld", revs):
2976 for ctx in repo.set("%ld", revs):
2972 ids[ctx.hex()] = ctx.rev()
2977 ids[ctx.hex()] = ctx.rev()
2973 n = ctx.extra().get('source')
2978 n = ctx.extra().get('source')
2974 if n:
2979 if n:
2975 ids[n] = ctx.rev()
2980 ids[n] = ctx.rev()
2976
2981
2977 # check ancestors for earlier grafts
2982 # check ancestors for earlier grafts
2978 ui.debug('scanning for duplicate grafts\n')
2983 ui.debug('scanning for duplicate grafts\n')
2979
2984
2980 for rev in repo.changelog.findmissingrevs(revs, [crev]):
2985 for rev in repo.changelog.findmissingrevs(revs, [crev]):
2981 ctx = repo[rev]
2986 ctx = repo[rev]
2982 n = ctx.extra().get('source')
2987 n = ctx.extra().get('source')
2983 if n in ids:
2988 if n in ids:
2984 r = repo[n].rev()
2989 r = repo[n].rev()
2985 if r in revs:
2990 if r in revs:
2986 ui.warn(_('skipping already grafted revision %s\n') % r)
2991 ui.warn(_('skipping already grafted revision %s\n') % r)
2987 revs.remove(r)
2992 revs.remove(r)
2988 elif ids[n] in revs:
2993 elif ids[n] in revs:
2989 ui.warn(_('skipping already grafted revision %s '
2994 ui.warn(_('skipping already grafted revision %s '
2990 '(same origin %d)\n') % (ids[n], r))
2995 '(same origin %d)\n') % (ids[n], r))
2991 revs.remove(ids[n])
2996 revs.remove(ids[n])
2992 elif ctx.hex() in ids:
2997 elif ctx.hex() in ids:
2993 r = ids[ctx.hex()]
2998 r = ids[ctx.hex()]
2994 ui.warn(_('skipping already grafted revision %s '
2999 ui.warn(_('skipping already grafted revision %s '
2995 '(was grafted from %d)\n') % (r, rev))
3000 '(was grafted from %d)\n') % (r, rev))
2996 revs.remove(r)
3001 revs.remove(r)
2997 if not revs:
3002 if not revs:
2998 return -1
3003 return -1
2999
3004
3000 wlock = repo.wlock()
3005 wlock = repo.wlock()
3001 try:
3006 try:
3002 current = repo['.']
3007 current = repo['.']
3003 for pos, ctx in enumerate(repo.set("%ld", revs)):
3008 for pos, ctx in enumerate(repo.set("%ld", revs)):
3004
3009
3005 ui.status(_('grafting revision %s\n') % ctx.rev())
3010 ui.status(_('grafting revision %s\n') % ctx.rev())
3006 if opts.get('dry_run'):
3011 if opts.get('dry_run'):
3007 continue
3012 continue
3008
3013
3009 source = ctx.extra().get('source')
3014 source = ctx.extra().get('source')
3010 if not source:
3015 if not source:
3011 source = ctx.hex()
3016 source = ctx.hex()
3012 extra = {'source': source}
3017 extra = {'source': source}
3013 user = ctx.user()
3018 user = ctx.user()
3014 if opts.get('user'):
3019 if opts.get('user'):
3015 user = opts['user']
3020 user = opts['user']
3016 date = ctx.date()
3021 date = ctx.date()
3017 if opts.get('date'):
3022 if opts.get('date'):
3018 date = opts['date']
3023 date = opts['date']
3019 message = ctx.description()
3024 message = ctx.description()
3020 if opts.get('log'):
3025 if opts.get('log'):
3021 message += '\n(grafted from %s)' % ctx.hex()
3026 message += '\n(grafted from %s)' % ctx.hex()
3022
3027
3023 # we don't merge the first commit when continuing
3028 # we don't merge the first commit when continuing
3024 if not cont:
3029 if not cont:
3025 # perform the graft merge with p1(rev) as 'ancestor'
3030 # perform the graft merge with p1(rev) as 'ancestor'
3026 try:
3031 try:
3027 # ui.forcemerge is an internal variable, do not document
3032 # ui.forcemerge is an internal variable, do not document
3028 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3033 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3029 stats = mergemod.update(repo, ctx.node(), True, True, False,
3034 stats = mergemod.update(repo, ctx.node(), True, True, False,
3030 ctx.p1().node())
3035 ctx.p1().node())
3031 finally:
3036 finally:
3032 repo.ui.setconfig('ui', 'forcemerge', '')
3037 repo.ui.setconfig('ui', 'forcemerge', '')
3033 # report any conflicts
3038 # report any conflicts
3034 if stats and stats[3] > 0:
3039 if stats and stats[3] > 0:
3035 # write out state for --continue
3040 # write out state for --continue
3036 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3041 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3037 repo.opener.write('graftstate', ''.join(nodelines))
3042 repo.opener.write('graftstate', ''.join(nodelines))
3038 raise util.Abort(
3043 raise util.Abort(
3039 _("unresolved conflicts, can't continue"),
3044 _("unresolved conflicts, can't continue"),
3040 hint=_('use hg resolve and hg graft --continue'))
3045 hint=_('use hg resolve and hg graft --continue'))
3041 else:
3046 else:
3042 cont = False
3047 cont = False
3043
3048
3044 # drop the second merge parent
3049 # drop the second merge parent
3045 repo.setparents(current.node(), nullid)
3050 repo.setparents(current.node(), nullid)
3046 repo.dirstate.write()
3051 repo.dirstate.write()
3047 # fix up dirstate for copies and renames
3052 # fix up dirstate for copies and renames
3048 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3053 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3049
3054
3050 # commit
3055 # commit
3051 node = repo.commit(text=message, user=user,
3056 node = repo.commit(text=message, user=user,
3052 date=date, extra=extra, editor=editor)
3057 date=date, extra=extra, editor=editor)
3053 if node is None:
3058 if node is None:
3054 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3059 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3055 else:
3060 else:
3056 current = repo[node]
3061 current = repo[node]
3057 finally:
3062 finally:
3058 wlock.release()
3063 wlock.release()
3059
3064
3060 # remove state when we complete successfully
3065 # remove state when we complete successfully
3061 if not opts.get('dry_run'):
3066 if not opts.get('dry_run'):
3062 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3067 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3063
3068
3064 return 0
3069 return 0
3065
3070
3066 @command('grep',
3071 @command('grep',
3067 [('0', 'print0', None, _('end fields with NUL')),
3072 [('0', 'print0', None, _('end fields with NUL')),
3068 ('', 'all', None, _('print all revisions that match')),
3073 ('', 'all', None, _('print all revisions that match')),
3069 ('a', 'text', None, _('treat all files as text')),
3074 ('a', 'text', None, _('treat all files as text')),
3070 ('f', 'follow', None,
3075 ('f', 'follow', None,
3071 _('follow changeset history,'
3076 _('follow changeset history,'
3072 ' or file history across copies and renames')),
3077 ' or file history across copies and renames')),
3073 ('i', 'ignore-case', None, _('ignore case when matching')),
3078 ('i', 'ignore-case', None, _('ignore case when matching')),
3074 ('l', 'files-with-matches', None,
3079 ('l', 'files-with-matches', None,
3075 _('print only filenames and revisions that match')),
3080 _('print only filenames and revisions that match')),
3076 ('n', 'line-number', None, _('print matching line numbers')),
3081 ('n', 'line-number', None, _('print matching line numbers')),
3077 ('r', 'rev', [],
3082 ('r', 'rev', [],
3078 _('only search files changed within revision range'), _('REV')),
3083 _('only search files changed within revision range'), _('REV')),
3079 ('u', 'user', None, _('list the author (long with -v)')),
3084 ('u', 'user', None, _('list the author (long with -v)')),
3080 ('d', 'date', None, _('list the date (short with -q)')),
3085 ('d', 'date', None, _('list the date (short with -q)')),
3081 ] + walkopts,
3086 ] + walkopts,
3082 _('[OPTION]... PATTERN [FILE]...'))
3087 _('[OPTION]... PATTERN [FILE]...'))
3083 def grep(ui, repo, pattern, *pats, **opts):
3088 def grep(ui, repo, pattern, *pats, **opts):
3084 """search for a pattern in specified files and revisions
3089 """search for a pattern in specified files and revisions
3085
3090
3086 Search revisions of files for a regular expression.
3091 Search revisions of files for a regular expression.
3087
3092
3088 This command behaves differently than Unix grep. It only accepts
3093 This command behaves differently than Unix grep. It only accepts
3089 Python/Perl regexps. It searches repository history, not the
3094 Python/Perl regexps. It searches repository history, not the
3090 working directory. It always prints the revision number in which a
3095 working directory. It always prints the revision number in which a
3091 match appears.
3096 match appears.
3092
3097
3093 By default, grep only prints output for the first revision of a
3098 By default, grep only prints output for the first revision of a
3094 file in which it finds a match. To get it to print every revision
3099 file in which it finds a match. To get it to print every revision
3095 that contains a change in match status ("-" for a match that
3100 that contains a change in match status ("-" for a match that
3096 becomes a non-match, or "+" for a non-match that becomes a match),
3101 becomes a non-match, or "+" for a non-match that becomes a match),
3097 use the --all flag.
3102 use the --all flag.
3098
3103
3099 Returns 0 if a match is found, 1 otherwise.
3104 Returns 0 if a match is found, 1 otherwise.
3100 """
3105 """
3101 reflags = re.M
3106 reflags = re.M
3102 if opts.get('ignore_case'):
3107 if opts.get('ignore_case'):
3103 reflags |= re.I
3108 reflags |= re.I
3104 try:
3109 try:
3105 regexp = util.compilere(pattern, reflags)
3110 regexp = util.compilere(pattern, reflags)
3106 except re.error, inst:
3111 except re.error, inst:
3107 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3112 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3108 return 1
3113 return 1
3109 sep, eol = ':', '\n'
3114 sep, eol = ':', '\n'
3110 if opts.get('print0'):
3115 if opts.get('print0'):
3111 sep = eol = '\0'
3116 sep = eol = '\0'
3112
3117
3113 getfile = util.lrucachefunc(repo.file)
3118 getfile = util.lrucachefunc(repo.file)
3114
3119
3115 def matchlines(body):
3120 def matchlines(body):
3116 begin = 0
3121 begin = 0
3117 linenum = 0
3122 linenum = 0
3118 while begin < len(body):
3123 while begin < len(body):
3119 match = regexp.search(body, begin)
3124 match = regexp.search(body, begin)
3120 if not match:
3125 if not match:
3121 break
3126 break
3122 mstart, mend = match.span()
3127 mstart, mend = match.span()
3123 linenum += body.count('\n', begin, mstart) + 1
3128 linenum += body.count('\n', begin, mstart) + 1
3124 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3129 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3125 begin = body.find('\n', mend) + 1 or len(body) + 1
3130 begin = body.find('\n', mend) + 1 or len(body) + 1
3126 lend = begin - 1
3131 lend = begin - 1
3127 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3132 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3128
3133
3129 class linestate(object):
3134 class linestate(object):
3130 def __init__(self, line, linenum, colstart, colend):
3135 def __init__(self, line, linenum, colstart, colend):
3131 self.line = line
3136 self.line = line
3132 self.linenum = linenum
3137 self.linenum = linenum
3133 self.colstart = colstart
3138 self.colstart = colstart
3134 self.colend = colend
3139 self.colend = colend
3135
3140
3136 def __hash__(self):
3141 def __hash__(self):
3137 return hash((self.linenum, self.line))
3142 return hash((self.linenum, self.line))
3138
3143
3139 def __eq__(self, other):
3144 def __eq__(self, other):
3140 return self.line == other.line
3145 return self.line == other.line
3141
3146
3142 matches = {}
3147 matches = {}
3143 copies = {}
3148 copies = {}
3144 def grepbody(fn, rev, body):
3149 def grepbody(fn, rev, body):
3145 matches[rev].setdefault(fn, [])
3150 matches[rev].setdefault(fn, [])
3146 m = matches[rev][fn]
3151 m = matches[rev][fn]
3147 for lnum, cstart, cend, line in matchlines(body):
3152 for lnum, cstart, cend, line in matchlines(body):
3148 s = linestate(line, lnum, cstart, cend)
3153 s = linestate(line, lnum, cstart, cend)
3149 m.append(s)
3154 m.append(s)
3150
3155
3151 def difflinestates(a, b):
3156 def difflinestates(a, b):
3152 sm = difflib.SequenceMatcher(None, a, b)
3157 sm = difflib.SequenceMatcher(None, a, b)
3153 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3158 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3154 if tag == 'insert':
3159 if tag == 'insert':
3155 for i in xrange(blo, bhi):
3160 for i in xrange(blo, bhi):
3156 yield ('+', b[i])
3161 yield ('+', b[i])
3157 elif tag == 'delete':
3162 elif tag == 'delete':
3158 for i in xrange(alo, ahi):
3163 for i in xrange(alo, ahi):
3159 yield ('-', a[i])
3164 yield ('-', a[i])
3160 elif tag == 'replace':
3165 elif tag == 'replace':
3161 for i in xrange(alo, ahi):
3166 for i in xrange(alo, ahi):
3162 yield ('-', a[i])
3167 yield ('-', a[i])
3163 for i in xrange(blo, bhi):
3168 for i in xrange(blo, bhi):
3164 yield ('+', b[i])
3169 yield ('+', b[i])
3165
3170
3166 def display(fn, ctx, pstates, states):
3171 def display(fn, ctx, pstates, states):
3167 rev = ctx.rev()
3172 rev = ctx.rev()
3168 datefunc = ui.quiet and util.shortdate or util.datestr
3173 datefunc = ui.quiet and util.shortdate or util.datestr
3169 found = False
3174 found = False
3170 filerevmatches = {}
3175 filerevmatches = {}
3171 def binary():
3176 def binary():
3172 flog = getfile(fn)
3177 flog = getfile(fn)
3173 return util.binary(flog.read(ctx.filenode(fn)))
3178 return util.binary(flog.read(ctx.filenode(fn)))
3174
3179
3175 if opts.get('all'):
3180 if opts.get('all'):
3176 iter = difflinestates(pstates, states)
3181 iter = difflinestates(pstates, states)
3177 else:
3182 else:
3178 iter = [('', l) for l in states]
3183 iter = [('', l) for l in states]
3179 for change, l in iter:
3184 for change, l in iter:
3180 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3185 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3181 before, match, after = None, None, None
3186 before, match, after = None, None, None
3182
3187
3183 if opts.get('line_number'):
3188 if opts.get('line_number'):
3184 cols.append((str(l.linenum), 'grep.linenumber'))
3189 cols.append((str(l.linenum), 'grep.linenumber'))
3185 if opts.get('all'):
3190 if opts.get('all'):
3186 cols.append((change, 'grep.change'))
3191 cols.append((change, 'grep.change'))
3187 if opts.get('user'):
3192 if opts.get('user'):
3188 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3193 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3189 if opts.get('date'):
3194 if opts.get('date'):
3190 cols.append((datefunc(ctx.date()), 'grep.date'))
3195 cols.append((datefunc(ctx.date()), 'grep.date'))
3191 if opts.get('files_with_matches'):
3196 if opts.get('files_with_matches'):
3192 c = (fn, rev)
3197 c = (fn, rev)
3193 if c in filerevmatches:
3198 if c in filerevmatches:
3194 continue
3199 continue
3195 filerevmatches[c] = 1
3200 filerevmatches[c] = 1
3196 else:
3201 else:
3197 before = l.line[:l.colstart]
3202 before = l.line[:l.colstart]
3198 match = l.line[l.colstart:l.colend]
3203 match = l.line[l.colstart:l.colend]
3199 after = l.line[l.colend:]
3204 after = l.line[l.colend:]
3200 for col, label in cols[:-1]:
3205 for col, label in cols[:-1]:
3201 ui.write(col, label=label)
3206 ui.write(col, label=label)
3202 ui.write(sep, label='grep.sep')
3207 ui.write(sep, label='grep.sep')
3203 ui.write(cols[-1][0], label=cols[-1][1])
3208 ui.write(cols[-1][0], label=cols[-1][1])
3204 if before is not None:
3209 if before is not None:
3205 ui.write(sep, label='grep.sep')
3210 ui.write(sep, label='grep.sep')
3206 if not opts.get('text') and binary():
3211 if not opts.get('text') and binary():
3207 ui.write(" Binary file matches")
3212 ui.write(" Binary file matches")
3208 else:
3213 else:
3209 ui.write(before)
3214 ui.write(before)
3210 ui.write(match, label='grep.match')
3215 ui.write(match, label='grep.match')
3211 ui.write(after)
3216 ui.write(after)
3212 ui.write(eol)
3217 ui.write(eol)
3213 found = True
3218 found = True
3214 return found
3219 return found
3215
3220
3216 skip = {}
3221 skip = {}
3217 revfiles = {}
3222 revfiles = {}
3218 matchfn = scmutil.match(repo[None], pats, opts)
3223 matchfn = scmutil.match(repo[None], pats, opts)
3219 found = False
3224 found = False
3220 follow = opts.get('follow')
3225 follow = opts.get('follow')
3221
3226
3222 def prep(ctx, fns):
3227 def prep(ctx, fns):
3223 rev = ctx.rev()
3228 rev = ctx.rev()
3224 pctx = ctx.p1()
3229 pctx = ctx.p1()
3225 parent = pctx.rev()
3230 parent = pctx.rev()
3226 matches.setdefault(rev, {})
3231 matches.setdefault(rev, {})
3227 matches.setdefault(parent, {})
3232 matches.setdefault(parent, {})
3228 files = revfiles.setdefault(rev, [])
3233 files = revfiles.setdefault(rev, [])
3229 for fn in fns:
3234 for fn in fns:
3230 flog = getfile(fn)
3235 flog = getfile(fn)
3231 try:
3236 try:
3232 fnode = ctx.filenode(fn)
3237 fnode = ctx.filenode(fn)
3233 except error.LookupError:
3238 except error.LookupError:
3234 continue
3239 continue
3235
3240
3236 copied = flog.renamed(fnode)
3241 copied = flog.renamed(fnode)
3237 copy = follow and copied and copied[0]
3242 copy = follow and copied and copied[0]
3238 if copy:
3243 if copy:
3239 copies.setdefault(rev, {})[fn] = copy
3244 copies.setdefault(rev, {})[fn] = copy
3240 if fn in skip:
3245 if fn in skip:
3241 if copy:
3246 if copy:
3242 skip[copy] = True
3247 skip[copy] = True
3243 continue
3248 continue
3244 files.append(fn)
3249 files.append(fn)
3245
3250
3246 if fn not in matches[rev]:
3251 if fn not in matches[rev]:
3247 grepbody(fn, rev, flog.read(fnode))
3252 grepbody(fn, rev, flog.read(fnode))
3248
3253
3249 pfn = copy or fn
3254 pfn = copy or fn
3250 if pfn not in matches[parent]:
3255 if pfn not in matches[parent]:
3251 try:
3256 try:
3252 fnode = pctx.filenode(pfn)
3257 fnode = pctx.filenode(pfn)
3253 grepbody(pfn, parent, flog.read(fnode))
3258 grepbody(pfn, parent, flog.read(fnode))
3254 except error.LookupError:
3259 except error.LookupError:
3255 pass
3260 pass
3256
3261
3257 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3262 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3258 rev = ctx.rev()
3263 rev = ctx.rev()
3259 parent = ctx.p1().rev()
3264 parent = ctx.p1().rev()
3260 for fn in sorted(revfiles.get(rev, [])):
3265 for fn in sorted(revfiles.get(rev, [])):
3261 states = matches[rev][fn]
3266 states = matches[rev][fn]
3262 copy = copies.get(rev, {}).get(fn)
3267 copy = copies.get(rev, {}).get(fn)
3263 if fn in skip:
3268 if fn in skip:
3264 if copy:
3269 if copy:
3265 skip[copy] = True
3270 skip[copy] = True
3266 continue
3271 continue
3267 pstates = matches.get(parent, {}).get(copy or fn, [])
3272 pstates = matches.get(parent, {}).get(copy or fn, [])
3268 if pstates or states:
3273 if pstates or states:
3269 r = display(fn, ctx, pstates, states)
3274 r = display(fn, ctx, pstates, states)
3270 found = found or r
3275 found = found or r
3271 if r and not opts.get('all'):
3276 if r and not opts.get('all'):
3272 skip[fn] = True
3277 skip[fn] = True
3273 if copy:
3278 if copy:
3274 skip[copy] = True
3279 skip[copy] = True
3275 del matches[rev]
3280 del matches[rev]
3276 del revfiles[rev]
3281 del revfiles[rev]
3277
3282
3278 return not found
3283 return not found
3279
3284
3280 @command('heads',
3285 @command('heads',
3281 [('r', 'rev', '',
3286 [('r', 'rev', '',
3282 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3287 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3283 ('t', 'topo', False, _('show topological heads only')),
3288 ('t', 'topo', False, _('show topological heads only')),
3284 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3289 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3285 ('c', 'closed', False, _('show normal and closed branch heads')),
3290 ('c', 'closed', False, _('show normal and closed branch heads')),
3286 ] + templateopts,
3291 ] + templateopts,
3287 _('[-ct] [-r STARTREV] [REV]...'))
3292 _('[-ct] [-r STARTREV] [REV]...'))
3288 def heads(ui, repo, *branchrevs, **opts):
3293 def heads(ui, repo, *branchrevs, **opts):
3289 """show current repository heads or show branch heads
3294 """show current repository heads or show branch heads
3290
3295
3291 With no arguments, show all repository branch heads.
3296 With no arguments, show all repository branch heads.
3292
3297
3293 Repository "heads" are changesets with no child changesets. They are
3298 Repository "heads" are changesets with no child changesets. They are
3294 where development generally takes place and are the usual targets
3299 where development generally takes place and are the usual targets
3295 for update and merge operations. Branch heads are changesets that have
3300 for update and merge operations. Branch heads are changesets that have
3296 no child changeset on the same branch.
3301 no child changeset on the same branch.
3297
3302
3298 If one or more REVs are given, only branch heads on the branches
3303 If one or more REVs are given, only branch heads on the branches
3299 associated with the specified changesets are shown. This means
3304 associated with the specified changesets are shown. This means
3300 that you can use :hg:`heads foo` to see the heads on a branch
3305 that you can use :hg:`heads foo` to see the heads on a branch
3301 named ``foo``.
3306 named ``foo``.
3302
3307
3303 If -c/--closed is specified, also show branch heads marked closed
3308 If -c/--closed is specified, also show branch heads marked closed
3304 (see :hg:`commit --close-branch`).
3309 (see :hg:`commit --close-branch`).
3305
3310
3306 If STARTREV is specified, only those heads that are descendants of
3311 If STARTREV is specified, only those heads that are descendants of
3307 STARTREV will be displayed.
3312 STARTREV will be displayed.
3308
3313
3309 If -t/--topo is specified, named branch mechanics will be ignored and only
3314 If -t/--topo is specified, named branch mechanics will be ignored and only
3310 changesets without children will be shown.
3315 changesets without children will be shown.
3311
3316
3312 Returns 0 if matching heads are found, 1 if not.
3317 Returns 0 if matching heads are found, 1 if not.
3313 """
3318 """
3314
3319
3315 start = None
3320 start = None
3316 if 'rev' in opts:
3321 if 'rev' in opts:
3317 start = scmutil.revsingle(repo, opts['rev'], None).node()
3322 start = scmutil.revsingle(repo, opts['rev'], None).node()
3318
3323
3319 if opts.get('topo'):
3324 if opts.get('topo'):
3320 heads = [repo[h] for h in repo.heads(start)]
3325 heads = [repo[h] for h in repo.heads(start)]
3321 else:
3326 else:
3322 heads = []
3327 heads = []
3323 for branch in repo.branchmap():
3328 for branch in repo.branchmap():
3324 heads += repo.branchheads(branch, start, opts.get('closed'))
3329 heads += repo.branchheads(branch, start, opts.get('closed'))
3325 heads = [repo[h] for h in heads]
3330 heads = [repo[h] for h in heads]
3326
3331
3327 if branchrevs:
3332 if branchrevs:
3328 branches = set(repo[br].branch() for br in branchrevs)
3333 branches = set(repo[br].branch() for br in branchrevs)
3329 heads = [h for h in heads if h.branch() in branches]
3334 heads = [h for h in heads if h.branch() in branches]
3330
3335
3331 if opts.get('active') and branchrevs:
3336 if opts.get('active') and branchrevs:
3332 dagheads = repo.heads(start)
3337 dagheads = repo.heads(start)
3333 heads = [h for h in heads if h.node() in dagheads]
3338 heads = [h for h in heads if h.node() in dagheads]
3334
3339
3335 if branchrevs:
3340 if branchrevs:
3336 haveheads = set(h.branch() for h in heads)
3341 haveheads = set(h.branch() for h in heads)
3337 if branches - haveheads:
3342 if branches - haveheads:
3338 headless = ', '.join(b for b in branches - haveheads)
3343 headless = ', '.join(b for b in branches - haveheads)
3339 msg = _('no open branch heads found on branches %s')
3344 msg = _('no open branch heads found on branches %s')
3340 if opts.get('rev'):
3345 if opts.get('rev'):
3341 msg += _(' (started at %s)') % opts['rev']
3346 msg += _(' (started at %s)') % opts['rev']
3342 ui.warn((msg + '\n') % headless)
3347 ui.warn((msg + '\n') % headless)
3343
3348
3344 if not heads:
3349 if not heads:
3345 return 1
3350 return 1
3346
3351
3347 heads = sorted(heads, key=lambda x: -x.rev())
3352 heads = sorted(heads, key=lambda x: -x.rev())
3348 displayer = cmdutil.show_changeset(ui, repo, opts)
3353 displayer = cmdutil.show_changeset(ui, repo, opts)
3349 for ctx in heads:
3354 for ctx in heads:
3350 displayer.show(ctx)
3355 displayer.show(ctx)
3351 displayer.close()
3356 displayer.close()
3352
3357
3353 @command('help',
3358 @command('help',
3354 [('e', 'extension', None, _('show only help for extensions')),
3359 [('e', 'extension', None, _('show only help for extensions')),
3355 ('c', 'command', None, _('show only help for commands')),
3360 ('c', 'command', None, _('show only help for commands')),
3356 ('k', 'keyword', '', _('show topics matching keyword')),
3361 ('k', 'keyword', '', _('show topics matching keyword')),
3357 ],
3362 ],
3358 _('[-ec] [TOPIC]'))
3363 _('[-ec] [TOPIC]'))
3359 def help_(ui, name=None, **opts):
3364 def help_(ui, name=None, **opts):
3360 """show help for a given topic or a help overview
3365 """show help for a given topic or a help overview
3361
3366
3362 With no arguments, print a list of commands with short help messages.
3367 With no arguments, print a list of commands with short help messages.
3363
3368
3364 Given a topic, extension, or command name, print help for that
3369 Given a topic, extension, or command name, print help for that
3365 topic.
3370 topic.
3366
3371
3367 Returns 0 if successful.
3372 Returns 0 if successful.
3368 """
3373 """
3369
3374
3370 textwidth = min(ui.termwidth(), 80) - 2
3375 textwidth = min(ui.termwidth(), 80) - 2
3371
3376
3372 keep = ui.verbose and ['verbose'] or []
3377 keep = ui.verbose and ['verbose'] or []
3373 text = help.help_(ui, name, **opts)
3378 text = help.help_(ui, name, **opts)
3374
3379
3375 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3380 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3376 if 'verbose' in pruned:
3381 if 'verbose' in pruned:
3377 keep.append('omitted')
3382 keep.append('omitted')
3378 else:
3383 else:
3379 keep.append('notomitted')
3384 keep.append('notomitted')
3380 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3385 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3381 ui.write(formatted)
3386 ui.write(formatted)
3382
3387
3383
3388
3384 @command('identify|id',
3389 @command('identify|id',
3385 [('r', 'rev', '',
3390 [('r', 'rev', '',
3386 _('identify the specified revision'), _('REV')),
3391 _('identify the specified revision'), _('REV')),
3387 ('n', 'num', None, _('show local revision number')),
3392 ('n', 'num', None, _('show local revision number')),
3388 ('i', 'id', None, _('show global revision id')),
3393 ('i', 'id', None, _('show global revision id')),
3389 ('b', 'branch', None, _('show branch')),
3394 ('b', 'branch', None, _('show branch')),
3390 ('t', 'tags', None, _('show tags')),
3395 ('t', 'tags', None, _('show tags')),
3391 ('B', 'bookmarks', None, _('show bookmarks')),
3396 ('B', 'bookmarks', None, _('show bookmarks')),
3392 ] + remoteopts,
3397 ] + remoteopts,
3393 _('[-nibtB] [-r REV] [SOURCE]'))
3398 _('[-nibtB] [-r REV] [SOURCE]'))
3394 def identify(ui, repo, source=None, rev=None,
3399 def identify(ui, repo, source=None, rev=None,
3395 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3400 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3396 """identify the working copy or specified revision
3401 """identify the working copy or specified revision
3397
3402
3398 Print a summary identifying the repository state at REV using one or
3403 Print a summary identifying the repository state at REV using one or
3399 two parent hash identifiers, followed by a "+" if the working
3404 two parent hash identifiers, followed by a "+" if the working
3400 directory has uncommitted changes, the branch name (if not default),
3405 directory has uncommitted changes, the branch name (if not default),
3401 a list of tags, and a list of bookmarks.
3406 a list of tags, and a list of bookmarks.
3402
3407
3403 When REV is not given, print a summary of the current state of the
3408 When REV is not given, print a summary of the current state of the
3404 repository.
3409 repository.
3405
3410
3406 Specifying a path to a repository root or Mercurial bundle will
3411 Specifying a path to a repository root or Mercurial bundle will
3407 cause lookup to operate on that repository/bundle.
3412 cause lookup to operate on that repository/bundle.
3408
3413
3409 .. container:: verbose
3414 .. container:: verbose
3410
3415
3411 Examples:
3416 Examples:
3412
3417
3413 - generate a build identifier for the working directory::
3418 - generate a build identifier for the working directory::
3414
3419
3415 hg id --id > build-id.dat
3420 hg id --id > build-id.dat
3416
3421
3417 - find the revision corresponding to a tag::
3422 - find the revision corresponding to a tag::
3418
3423
3419 hg id -n -r 1.3
3424 hg id -n -r 1.3
3420
3425
3421 - check the most recent revision of a remote repository::
3426 - check the most recent revision of a remote repository::
3422
3427
3423 hg id -r tip http://selenic.com/hg/
3428 hg id -r tip http://selenic.com/hg/
3424
3429
3425 Returns 0 if successful.
3430 Returns 0 if successful.
3426 """
3431 """
3427
3432
3428 if not repo and not source:
3433 if not repo and not source:
3429 raise util.Abort(_("there is no Mercurial repository here "
3434 raise util.Abort(_("there is no Mercurial repository here "
3430 "(.hg not found)"))
3435 "(.hg not found)"))
3431
3436
3432 hexfunc = ui.debugflag and hex or short
3437 hexfunc = ui.debugflag and hex or short
3433 default = not (num or id or branch or tags or bookmarks)
3438 default = not (num or id or branch or tags or bookmarks)
3434 output = []
3439 output = []
3435 revs = []
3440 revs = []
3436
3441
3437 if source:
3442 if source:
3438 source, branches = hg.parseurl(ui.expandpath(source))
3443 source, branches = hg.parseurl(ui.expandpath(source))
3439 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3444 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3440 repo = peer.local()
3445 repo = peer.local()
3441 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3446 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3442
3447
3443 if not repo:
3448 if not repo:
3444 if num or branch or tags:
3449 if num or branch or tags:
3445 raise util.Abort(
3450 raise util.Abort(
3446 _("can't query remote revision number, branch, or tags"))
3451 _("can't query remote revision number, branch, or tags"))
3447 if not rev and revs:
3452 if not rev and revs:
3448 rev = revs[0]
3453 rev = revs[0]
3449 if not rev:
3454 if not rev:
3450 rev = "tip"
3455 rev = "tip"
3451
3456
3452 remoterev = peer.lookup(rev)
3457 remoterev = peer.lookup(rev)
3453 if default or id:
3458 if default or id:
3454 output = [hexfunc(remoterev)]
3459 output = [hexfunc(remoterev)]
3455
3460
3456 def getbms():
3461 def getbms():
3457 bms = []
3462 bms = []
3458
3463
3459 if 'bookmarks' in peer.listkeys('namespaces'):
3464 if 'bookmarks' in peer.listkeys('namespaces'):
3460 hexremoterev = hex(remoterev)
3465 hexremoterev = hex(remoterev)
3461 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3466 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3462 if bmr == hexremoterev]
3467 if bmr == hexremoterev]
3463
3468
3464 return sorted(bms)
3469 return sorted(bms)
3465
3470
3466 if bookmarks:
3471 if bookmarks:
3467 output.extend(getbms())
3472 output.extend(getbms())
3468 elif default and not ui.quiet:
3473 elif default and not ui.quiet:
3469 # multiple bookmarks for a single parent separated by '/'
3474 # multiple bookmarks for a single parent separated by '/'
3470 bm = '/'.join(getbms())
3475 bm = '/'.join(getbms())
3471 if bm:
3476 if bm:
3472 output.append(bm)
3477 output.append(bm)
3473 else:
3478 else:
3474 if not rev:
3479 if not rev:
3475 ctx = repo[None]
3480 ctx = repo[None]
3476 parents = ctx.parents()
3481 parents = ctx.parents()
3477 changed = ""
3482 changed = ""
3478 if default or id or num:
3483 if default or id or num:
3479 if (util.any(repo.status())
3484 if (util.any(repo.status())
3480 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3485 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3481 changed = '+'
3486 changed = '+'
3482 if default or id:
3487 if default or id:
3483 output = ["%s%s" %
3488 output = ["%s%s" %
3484 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3489 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3485 if num:
3490 if num:
3486 output.append("%s%s" %
3491 output.append("%s%s" %
3487 ('+'.join([str(p.rev()) for p in parents]), changed))
3492 ('+'.join([str(p.rev()) for p in parents]), changed))
3488 else:
3493 else:
3489 ctx = scmutil.revsingle(repo, rev)
3494 ctx = scmutil.revsingle(repo, rev)
3490 if default or id:
3495 if default or id:
3491 output = [hexfunc(ctx.node())]
3496 output = [hexfunc(ctx.node())]
3492 if num:
3497 if num:
3493 output.append(str(ctx.rev()))
3498 output.append(str(ctx.rev()))
3494
3499
3495 if default and not ui.quiet:
3500 if default and not ui.quiet:
3496 b = ctx.branch()
3501 b = ctx.branch()
3497 if b != 'default':
3502 if b != 'default':
3498 output.append("(%s)" % b)
3503 output.append("(%s)" % b)
3499
3504
3500 # multiple tags for a single parent separated by '/'
3505 # multiple tags for a single parent separated by '/'
3501 t = '/'.join(ctx.tags())
3506 t = '/'.join(ctx.tags())
3502 if t:
3507 if t:
3503 output.append(t)
3508 output.append(t)
3504
3509
3505 # multiple bookmarks for a single parent separated by '/'
3510 # multiple bookmarks for a single parent separated by '/'
3506 bm = '/'.join(ctx.bookmarks())
3511 bm = '/'.join(ctx.bookmarks())
3507 if bm:
3512 if bm:
3508 output.append(bm)
3513 output.append(bm)
3509 else:
3514 else:
3510 if branch:
3515 if branch:
3511 output.append(ctx.branch())
3516 output.append(ctx.branch())
3512
3517
3513 if tags:
3518 if tags:
3514 output.extend(ctx.tags())
3519 output.extend(ctx.tags())
3515
3520
3516 if bookmarks:
3521 if bookmarks:
3517 output.extend(ctx.bookmarks())
3522 output.extend(ctx.bookmarks())
3518
3523
3519 ui.write("%s\n" % ' '.join(output))
3524 ui.write("%s\n" % ' '.join(output))
3520
3525
3521 @command('import|patch',
3526 @command('import|patch',
3522 [('p', 'strip', 1,
3527 [('p', 'strip', 1,
3523 _('directory strip option for patch. This has the same '
3528 _('directory strip option for patch. This has the same '
3524 'meaning as the corresponding patch option'), _('NUM')),
3529 'meaning as the corresponding patch option'), _('NUM')),
3525 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3530 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3526 ('e', 'edit', False, _('invoke editor on commit messages')),
3531 ('e', 'edit', False, _('invoke editor on commit messages')),
3527 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3532 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3528 ('', 'no-commit', None,
3533 ('', 'no-commit', None,
3529 _("don't commit, just update the working directory")),
3534 _("don't commit, just update the working directory")),
3530 ('', 'bypass', None,
3535 ('', 'bypass', None,
3531 _("apply patch without touching the working directory")),
3536 _("apply patch without touching the working directory")),
3532 ('', 'exact', None,
3537 ('', 'exact', None,
3533 _('apply patch to the nodes from which it was generated')),
3538 _('apply patch to the nodes from which it was generated')),
3534 ('', 'import-branch', None,
3539 ('', 'import-branch', None,
3535 _('use any branch information in patch (implied by --exact)'))] +
3540 _('use any branch information in patch (implied by --exact)'))] +
3536 commitopts + commitopts2 + similarityopts,
3541 commitopts + commitopts2 + similarityopts,
3537 _('[OPTION]... PATCH...'))
3542 _('[OPTION]... PATCH...'))
3538 def import_(ui, repo, patch1=None, *patches, **opts):
3543 def import_(ui, repo, patch1=None, *patches, **opts):
3539 """import an ordered set of patches
3544 """import an ordered set of patches
3540
3545
3541 Import a list of patches and commit them individually (unless
3546 Import a list of patches and commit them individually (unless
3542 --no-commit is specified).
3547 --no-commit is specified).
3543
3548
3544 If there are outstanding changes in the working directory, import
3549 If there are outstanding changes in the working directory, import
3545 will abort unless given the -f/--force flag.
3550 will abort unless given the -f/--force flag.
3546
3551
3547 You can import a patch straight from a mail message. Even patches
3552 You can import a patch straight from a mail message. Even patches
3548 as attachments work (to use the body part, it must have type
3553 as attachments work (to use the body part, it must have type
3549 text/plain or text/x-patch). From and Subject headers of email
3554 text/plain or text/x-patch). From and Subject headers of email
3550 message are used as default committer and commit message. All
3555 message are used as default committer and commit message. All
3551 text/plain body parts before first diff are added to commit
3556 text/plain body parts before first diff are added to commit
3552 message.
3557 message.
3553
3558
3554 If the imported patch was generated by :hg:`export`, user and
3559 If the imported patch was generated by :hg:`export`, user and
3555 description from patch override values from message headers and
3560 description from patch override values from message headers and
3556 body. Values given on command line with -m/--message and -u/--user
3561 body. Values given on command line with -m/--message and -u/--user
3557 override these.
3562 override these.
3558
3563
3559 If --exact is specified, import will set the working directory to
3564 If --exact is specified, import will set the working directory to
3560 the parent of each patch before applying it, and will abort if the
3565 the parent of each patch before applying it, and will abort if the
3561 resulting changeset has a different ID than the one recorded in
3566 resulting changeset has a different ID than the one recorded in
3562 the patch. This may happen due to character set problems or other
3567 the patch. This may happen due to character set problems or other
3563 deficiencies in the text patch format.
3568 deficiencies in the text patch format.
3564
3569
3565 Use --bypass to apply and commit patches directly to the
3570 Use --bypass to apply and commit patches directly to the
3566 repository, not touching the working directory. Without --exact,
3571 repository, not touching the working directory. Without --exact,
3567 patches will be applied on top of the working directory parent
3572 patches will be applied on top of the working directory parent
3568 revision.
3573 revision.
3569
3574
3570 With -s/--similarity, hg will attempt to discover renames and
3575 With -s/--similarity, hg will attempt to discover renames and
3571 copies in the patch in the same way as :hg:`addremove`.
3576 copies in the patch in the same way as :hg:`addremove`.
3572
3577
3573 To read a patch from standard input, use "-" as the patch name. If
3578 To read a patch from standard input, use "-" as the patch name. If
3574 a URL is specified, the patch will be downloaded from it.
3579 a URL is specified, the patch will be downloaded from it.
3575 See :hg:`help dates` for a list of formats valid for -d/--date.
3580 See :hg:`help dates` for a list of formats valid for -d/--date.
3576
3581
3577 .. container:: verbose
3582 .. container:: verbose
3578
3583
3579 Examples:
3584 Examples:
3580
3585
3581 - import a traditional patch from a website and detect renames::
3586 - import a traditional patch from a website and detect renames::
3582
3587
3583 hg import -s 80 http://example.com/bugfix.patch
3588 hg import -s 80 http://example.com/bugfix.patch
3584
3589
3585 - import a changeset from an hgweb server::
3590 - import a changeset from an hgweb server::
3586
3591
3587 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3592 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3588
3593
3589 - import all the patches in an Unix-style mbox::
3594 - import all the patches in an Unix-style mbox::
3590
3595
3591 hg import incoming-patches.mbox
3596 hg import incoming-patches.mbox
3592
3597
3593 - attempt to exactly restore an exported changeset (not always
3598 - attempt to exactly restore an exported changeset (not always
3594 possible)::
3599 possible)::
3595
3600
3596 hg import --exact proposed-fix.patch
3601 hg import --exact proposed-fix.patch
3597
3602
3598 Returns 0 on success.
3603 Returns 0 on success.
3599 """
3604 """
3600
3605
3601 if not patch1:
3606 if not patch1:
3602 raise util.Abort(_('need at least one patch to import'))
3607 raise util.Abort(_('need at least one patch to import'))
3603
3608
3604 patches = (patch1,) + patches
3609 patches = (patch1,) + patches
3605
3610
3606 date = opts.get('date')
3611 date = opts.get('date')
3607 if date:
3612 if date:
3608 opts['date'] = util.parsedate(date)
3613 opts['date'] = util.parsedate(date)
3609
3614
3610 editor = cmdutil.commiteditor
3615 editor = cmdutil.commiteditor
3611 if opts.get('edit'):
3616 if opts.get('edit'):
3612 editor = cmdutil.commitforceeditor
3617 editor = cmdutil.commitforceeditor
3613
3618
3614 update = not opts.get('bypass')
3619 update = not opts.get('bypass')
3615 if not update and opts.get('no_commit'):
3620 if not update and opts.get('no_commit'):
3616 raise util.Abort(_('cannot use --no-commit with --bypass'))
3621 raise util.Abort(_('cannot use --no-commit with --bypass'))
3617 try:
3622 try:
3618 sim = float(opts.get('similarity') or 0)
3623 sim = float(opts.get('similarity') or 0)
3619 except ValueError:
3624 except ValueError:
3620 raise util.Abort(_('similarity must be a number'))
3625 raise util.Abort(_('similarity must be a number'))
3621 if sim < 0 or sim > 100:
3626 if sim < 0 or sim > 100:
3622 raise util.Abort(_('similarity must be between 0 and 100'))
3627 raise util.Abort(_('similarity must be between 0 and 100'))
3623 if sim and not update:
3628 if sim and not update:
3624 raise util.Abort(_('cannot use --similarity with --bypass'))
3629 raise util.Abort(_('cannot use --similarity with --bypass'))
3625
3630
3626 if (opts.get('exact') or not opts.get('force')) and update:
3631 if (opts.get('exact') or not opts.get('force')) and update:
3627 cmdutil.bailifchanged(repo)
3632 cmdutil.bailifchanged(repo)
3628
3633
3629 base = opts["base"]
3634 base = opts["base"]
3630 strip = opts["strip"]
3635 strip = opts["strip"]
3631 wlock = lock = tr = None
3636 wlock = lock = tr = None
3632 msgs = []
3637 msgs = []
3633
3638
3634 def tryone(ui, hunk, parents):
3639 def tryone(ui, hunk, parents):
3635 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3640 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3636 patch.extract(ui, hunk)
3641 patch.extract(ui, hunk)
3637
3642
3638 if not tmpname:
3643 if not tmpname:
3639 return (None, None)
3644 return (None, None)
3640 msg = _('applied to working directory')
3645 msg = _('applied to working directory')
3641
3646
3642 try:
3647 try:
3643 cmdline_message = cmdutil.logmessage(ui, opts)
3648 cmdline_message = cmdutil.logmessage(ui, opts)
3644 if cmdline_message:
3649 if cmdline_message:
3645 # pickup the cmdline msg
3650 # pickup the cmdline msg
3646 message = cmdline_message
3651 message = cmdline_message
3647 elif message:
3652 elif message:
3648 # pickup the patch msg
3653 # pickup the patch msg
3649 message = message.strip()
3654 message = message.strip()
3650 else:
3655 else:
3651 # launch the editor
3656 # launch the editor
3652 message = None
3657 message = None
3653 ui.debug('message:\n%s\n' % message)
3658 ui.debug('message:\n%s\n' % message)
3654
3659
3655 if len(parents) == 1:
3660 if len(parents) == 1:
3656 parents.append(repo[nullid])
3661 parents.append(repo[nullid])
3657 if opts.get('exact'):
3662 if opts.get('exact'):
3658 if not nodeid or not p1:
3663 if not nodeid or not p1:
3659 raise util.Abort(_('not a Mercurial patch'))
3664 raise util.Abort(_('not a Mercurial patch'))
3660 p1 = repo[p1]
3665 p1 = repo[p1]
3661 p2 = repo[p2 or nullid]
3666 p2 = repo[p2 or nullid]
3662 elif p2:
3667 elif p2:
3663 try:
3668 try:
3664 p1 = repo[p1]
3669 p1 = repo[p1]
3665 p2 = repo[p2]
3670 p2 = repo[p2]
3666 # Without any options, consider p2 only if the
3671 # Without any options, consider p2 only if the
3667 # patch is being applied on top of the recorded
3672 # patch is being applied on top of the recorded
3668 # first parent.
3673 # first parent.
3669 if p1 != parents[0]:
3674 if p1 != parents[0]:
3670 p1 = parents[0]
3675 p1 = parents[0]
3671 p2 = repo[nullid]
3676 p2 = repo[nullid]
3672 except error.RepoError:
3677 except error.RepoError:
3673 p1, p2 = parents
3678 p1, p2 = parents
3674 else:
3679 else:
3675 p1, p2 = parents
3680 p1, p2 = parents
3676
3681
3677 n = None
3682 n = None
3678 if update:
3683 if update:
3679 if p1 != parents[0]:
3684 if p1 != parents[0]:
3680 hg.clean(repo, p1.node())
3685 hg.clean(repo, p1.node())
3681 if p2 != parents[1]:
3686 if p2 != parents[1]:
3682 repo.setparents(p1.node(), p2.node())
3687 repo.setparents(p1.node(), p2.node())
3683
3688
3684 if opts.get('exact') or opts.get('import_branch'):
3689 if opts.get('exact') or opts.get('import_branch'):
3685 repo.dirstate.setbranch(branch or 'default')
3690 repo.dirstate.setbranch(branch or 'default')
3686
3691
3687 files = set()
3692 files = set()
3688 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3693 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3689 eolmode=None, similarity=sim / 100.0)
3694 eolmode=None, similarity=sim / 100.0)
3690 files = list(files)
3695 files = list(files)
3691 if opts.get('no_commit'):
3696 if opts.get('no_commit'):
3692 if message:
3697 if message:
3693 msgs.append(message)
3698 msgs.append(message)
3694 else:
3699 else:
3695 if opts.get('exact') or p2:
3700 if opts.get('exact') or p2:
3696 # If you got here, you either use --force and know what
3701 # If you got here, you either use --force and know what
3697 # you are doing or used --exact or a merge patch while
3702 # you are doing or used --exact or a merge patch while
3698 # being updated to its first parent.
3703 # being updated to its first parent.
3699 m = None
3704 m = None
3700 else:
3705 else:
3701 m = scmutil.matchfiles(repo, files or [])
3706 m = scmutil.matchfiles(repo, files or [])
3702 n = repo.commit(message, opts.get('user') or user,
3707 n = repo.commit(message, opts.get('user') or user,
3703 opts.get('date') or date, match=m,
3708 opts.get('date') or date, match=m,
3704 editor=editor)
3709 editor=editor)
3705 else:
3710 else:
3706 if opts.get('exact') or opts.get('import_branch'):
3711 if opts.get('exact') or opts.get('import_branch'):
3707 branch = branch or 'default'
3712 branch = branch or 'default'
3708 else:
3713 else:
3709 branch = p1.branch()
3714 branch = p1.branch()
3710 store = patch.filestore()
3715 store = patch.filestore()
3711 try:
3716 try:
3712 files = set()
3717 files = set()
3713 try:
3718 try:
3714 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3719 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3715 files, eolmode=None)
3720 files, eolmode=None)
3716 except patch.PatchError, e:
3721 except patch.PatchError, e:
3717 raise util.Abort(str(e))
3722 raise util.Abort(str(e))
3718 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3723 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3719 message,
3724 message,
3720 opts.get('user') or user,
3725 opts.get('user') or user,
3721 opts.get('date') or date,
3726 opts.get('date') or date,
3722 branch, files, store,
3727 branch, files, store,
3723 editor=cmdutil.commiteditor)
3728 editor=cmdutil.commiteditor)
3724 repo.savecommitmessage(memctx.description())
3729 repo.savecommitmessage(memctx.description())
3725 n = memctx.commit()
3730 n = memctx.commit()
3726 finally:
3731 finally:
3727 store.close()
3732 store.close()
3728 if opts.get('exact') and hex(n) != nodeid:
3733 if opts.get('exact') and hex(n) != nodeid:
3729 raise util.Abort(_('patch is damaged or loses information'))
3734 raise util.Abort(_('patch is damaged or loses information'))
3730 if n:
3735 if n:
3731 # i18n: refers to a short changeset id
3736 # i18n: refers to a short changeset id
3732 msg = _('created %s') % short(n)
3737 msg = _('created %s') % short(n)
3733 return (msg, n)
3738 return (msg, n)
3734 finally:
3739 finally:
3735 os.unlink(tmpname)
3740 os.unlink(tmpname)
3736
3741
3737 try:
3742 try:
3738 try:
3743 try:
3739 wlock = repo.wlock()
3744 wlock = repo.wlock()
3740 if not opts.get('no_commit'):
3745 if not opts.get('no_commit'):
3741 lock = repo.lock()
3746 lock = repo.lock()
3742 tr = repo.transaction('import')
3747 tr = repo.transaction('import')
3743 parents = repo.parents()
3748 parents = repo.parents()
3744 for patchurl in patches:
3749 for patchurl in patches:
3745 if patchurl == '-':
3750 if patchurl == '-':
3746 ui.status(_('applying patch from stdin\n'))
3751 ui.status(_('applying patch from stdin\n'))
3747 patchfile = ui.fin
3752 patchfile = ui.fin
3748 patchurl = 'stdin' # for error message
3753 patchurl = 'stdin' # for error message
3749 else:
3754 else:
3750 patchurl = os.path.join(base, patchurl)
3755 patchurl = os.path.join(base, patchurl)
3751 ui.status(_('applying %s\n') % patchurl)
3756 ui.status(_('applying %s\n') % patchurl)
3752 patchfile = hg.openpath(ui, patchurl)
3757 patchfile = hg.openpath(ui, patchurl)
3753
3758
3754 haspatch = False
3759 haspatch = False
3755 for hunk in patch.split(patchfile):
3760 for hunk in patch.split(patchfile):
3756 (msg, node) = tryone(ui, hunk, parents)
3761 (msg, node) = tryone(ui, hunk, parents)
3757 if msg:
3762 if msg:
3758 haspatch = True
3763 haspatch = True
3759 ui.note(msg + '\n')
3764 ui.note(msg + '\n')
3760 if update or opts.get('exact'):
3765 if update or opts.get('exact'):
3761 parents = repo.parents()
3766 parents = repo.parents()
3762 else:
3767 else:
3763 parents = [repo[node]]
3768 parents = [repo[node]]
3764
3769
3765 if not haspatch:
3770 if not haspatch:
3766 raise util.Abort(_('%s: no diffs found') % patchurl)
3771 raise util.Abort(_('%s: no diffs found') % patchurl)
3767
3772
3768 if tr:
3773 if tr:
3769 tr.close()
3774 tr.close()
3770 if msgs:
3775 if msgs:
3771 repo.savecommitmessage('\n* * *\n'.join(msgs))
3776 repo.savecommitmessage('\n* * *\n'.join(msgs))
3772 except: # re-raises
3777 except: # re-raises
3773 # wlock.release() indirectly calls dirstate.write(): since
3778 # wlock.release() indirectly calls dirstate.write(): since
3774 # we're crashing, we do not want to change the working dir
3779 # we're crashing, we do not want to change the working dir
3775 # parent after all, so make sure it writes nothing
3780 # parent after all, so make sure it writes nothing
3776 repo.dirstate.invalidate()
3781 repo.dirstate.invalidate()
3777 raise
3782 raise
3778 finally:
3783 finally:
3779 if tr:
3784 if tr:
3780 tr.release()
3785 tr.release()
3781 release(lock, wlock)
3786 release(lock, wlock)
3782
3787
3783 @command('incoming|in',
3788 @command('incoming|in',
3784 [('f', 'force', None,
3789 [('f', 'force', None,
3785 _('run even if remote repository is unrelated')),
3790 _('run even if remote repository is unrelated')),
3786 ('n', 'newest-first', None, _('show newest record first')),
3791 ('n', 'newest-first', None, _('show newest record first')),
3787 ('', 'bundle', '',
3792 ('', 'bundle', '',
3788 _('file to store the bundles into'), _('FILE')),
3793 _('file to store the bundles into'), _('FILE')),
3789 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3794 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3790 ('B', 'bookmarks', False, _("compare bookmarks")),
3795 ('B', 'bookmarks', False, _("compare bookmarks")),
3791 ('b', 'branch', [],
3796 ('b', 'branch', [],
3792 _('a specific branch you would like to pull'), _('BRANCH')),
3797 _('a specific branch you would like to pull'), _('BRANCH')),
3793 ] + logopts + remoteopts + subrepoopts,
3798 ] + logopts + remoteopts + subrepoopts,
3794 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3799 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3795 def incoming(ui, repo, source="default", **opts):
3800 def incoming(ui, repo, source="default", **opts):
3796 """show new changesets found in source
3801 """show new changesets found in source
3797
3802
3798 Show new changesets found in the specified path/URL or the default
3803 Show new changesets found in the specified path/URL or the default
3799 pull location. These are the changesets that would have been pulled
3804 pull location. These are the changesets that would have been pulled
3800 if a pull at the time you issued this command.
3805 if a pull at the time you issued this command.
3801
3806
3802 For remote repository, using --bundle avoids downloading the
3807 For remote repository, using --bundle avoids downloading the
3803 changesets twice if the incoming is followed by a pull.
3808 changesets twice if the incoming is followed by a pull.
3804
3809
3805 See pull for valid source format details.
3810 See pull for valid source format details.
3806
3811
3807 Returns 0 if there are incoming changes, 1 otherwise.
3812 Returns 0 if there are incoming changes, 1 otherwise.
3808 """
3813 """
3809 if opts.get('graph'):
3814 if opts.get('graph'):
3810 cmdutil.checkunsupportedgraphflags([], opts)
3815 cmdutil.checkunsupportedgraphflags([], opts)
3811 def display(other, chlist, displayer):
3816 def display(other, chlist, displayer):
3812 revdag = cmdutil.graphrevs(other, chlist, opts)
3817 revdag = cmdutil.graphrevs(other, chlist, opts)
3813 showparents = [ctx.node() for ctx in repo[None].parents()]
3818 showparents = [ctx.node() for ctx in repo[None].parents()]
3814 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3819 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3815 graphmod.asciiedges)
3820 graphmod.asciiedges)
3816
3821
3817 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3822 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3818 return 0
3823 return 0
3819
3824
3820 if opts.get('bundle') and opts.get('subrepos'):
3825 if opts.get('bundle') and opts.get('subrepos'):
3821 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3826 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3822
3827
3823 if opts.get('bookmarks'):
3828 if opts.get('bookmarks'):
3824 source, branches = hg.parseurl(ui.expandpath(source),
3829 source, branches = hg.parseurl(ui.expandpath(source),
3825 opts.get('branch'))
3830 opts.get('branch'))
3826 other = hg.peer(repo, opts, source)
3831 other = hg.peer(repo, opts, source)
3827 if 'bookmarks' not in other.listkeys('namespaces'):
3832 if 'bookmarks' not in other.listkeys('namespaces'):
3828 ui.warn(_("remote doesn't support bookmarks\n"))
3833 ui.warn(_("remote doesn't support bookmarks\n"))
3829 return 0
3834 return 0
3830 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3835 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3831 return bookmarks.diff(ui, repo, other)
3836 return bookmarks.diff(ui, repo, other)
3832
3837
3833 repo._subtoppath = ui.expandpath(source)
3838 repo._subtoppath = ui.expandpath(source)
3834 try:
3839 try:
3835 return hg.incoming(ui, repo, source, opts)
3840 return hg.incoming(ui, repo, source, opts)
3836 finally:
3841 finally:
3837 del repo._subtoppath
3842 del repo._subtoppath
3838
3843
3839
3844
3840 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3845 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3841 def init(ui, dest=".", **opts):
3846 def init(ui, dest=".", **opts):
3842 """create a new repository in the given directory
3847 """create a new repository in the given directory
3843
3848
3844 Initialize a new repository in the given directory. If the given
3849 Initialize a new repository in the given directory. If the given
3845 directory does not exist, it will be created.
3850 directory does not exist, it will be created.
3846
3851
3847 If no directory is given, the current directory is used.
3852 If no directory is given, the current directory is used.
3848
3853
3849 It is possible to specify an ``ssh://`` URL as the destination.
3854 It is possible to specify an ``ssh://`` URL as the destination.
3850 See :hg:`help urls` for more information.
3855 See :hg:`help urls` for more information.
3851
3856
3852 Returns 0 on success.
3857 Returns 0 on success.
3853 """
3858 """
3854 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3859 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3855
3860
3856 @command('locate',
3861 @command('locate',
3857 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3862 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3858 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3863 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3859 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3864 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3860 ] + walkopts,
3865 ] + walkopts,
3861 _('[OPTION]... [PATTERN]...'))
3866 _('[OPTION]... [PATTERN]...'))
3862 def locate(ui, repo, *pats, **opts):
3867 def locate(ui, repo, *pats, **opts):
3863 """locate files matching specific patterns
3868 """locate files matching specific patterns
3864
3869
3865 Print files under Mercurial control in the working directory whose
3870 Print files under Mercurial control in the working directory whose
3866 names match the given patterns.
3871 names match the given patterns.
3867
3872
3868 By default, this command searches all directories in the working
3873 By default, this command searches all directories in the working
3869 directory. To search just the current directory and its
3874 directory. To search just the current directory and its
3870 subdirectories, use "--include .".
3875 subdirectories, use "--include .".
3871
3876
3872 If no patterns are given to match, this command prints the names
3877 If no patterns are given to match, this command prints the names
3873 of all files under Mercurial control in the working directory.
3878 of all files under Mercurial control in the working directory.
3874
3879
3875 If you want to feed the output of this command into the "xargs"
3880 If you want to feed the output of this command into the "xargs"
3876 command, use the -0 option to both this command and "xargs". This
3881 command, use the -0 option to both this command and "xargs". This
3877 will avoid the problem of "xargs" treating single filenames that
3882 will avoid the problem of "xargs" treating single filenames that
3878 contain whitespace as multiple filenames.
3883 contain whitespace as multiple filenames.
3879
3884
3880 Returns 0 if a match is found, 1 otherwise.
3885 Returns 0 if a match is found, 1 otherwise.
3881 """
3886 """
3882 end = opts.get('print0') and '\0' or '\n'
3887 end = opts.get('print0') and '\0' or '\n'
3883 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3888 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3884
3889
3885 ret = 1
3890 ret = 1
3886 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3891 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3887 m.bad = lambda x, y: False
3892 m.bad = lambda x, y: False
3888 for abs in repo[rev].walk(m):
3893 for abs in repo[rev].walk(m):
3889 if not rev and abs not in repo.dirstate:
3894 if not rev and abs not in repo.dirstate:
3890 continue
3895 continue
3891 if opts.get('fullpath'):
3896 if opts.get('fullpath'):
3892 ui.write(repo.wjoin(abs), end)
3897 ui.write(repo.wjoin(abs), end)
3893 else:
3898 else:
3894 ui.write(((pats and m.rel(abs)) or abs), end)
3899 ui.write(((pats and m.rel(abs)) or abs), end)
3895 ret = 0
3900 ret = 0
3896
3901
3897 return ret
3902 return ret
3898
3903
3899 @command('^log|history',
3904 @command('^log|history',
3900 [('f', 'follow', None,
3905 [('f', 'follow', None,
3901 _('follow changeset history, or file history across copies and renames')),
3906 _('follow changeset history, or file history across copies and renames')),
3902 ('', 'follow-first', None,
3907 ('', 'follow-first', None,
3903 _('only follow the first parent of merge changesets (DEPRECATED)')),
3908 _('only follow the first parent of merge changesets (DEPRECATED)')),
3904 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3909 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3905 ('C', 'copies', None, _('show copied files')),
3910 ('C', 'copies', None, _('show copied files')),
3906 ('k', 'keyword', [],
3911 ('k', 'keyword', [],
3907 _('do case-insensitive search for a given text'), _('TEXT')),
3912 _('do case-insensitive search for a given text'), _('TEXT')),
3908 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3913 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3909 ('', 'removed', None, _('include revisions where files were removed')),
3914 ('', 'removed', None, _('include revisions where files were removed')),
3910 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3915 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3911 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3916 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3912 ('', 'only-branch', [],
3917 ('', 'only-branch', [],
3913 _('show only changesets within the given named branch (DEPRECATED)'),
3918 _('show only changesets within the given named branch (DEPRECATED)'),
3914 _('BRANCH')),
3919 _('BRANCH')),
3915 ('b', 'branch', [],
3920 ('b', 'branch', [],
3916 _('show changesets within the given named branch'), _('BRANCH')),
3921 _('show changesets within the given named branch'), _('BRANCH')),
3917 ('P', 'prune', [],
3922 ('P', 'prune', [],
3918 _('do not display revision or any of its ancestors'), _('REV')),
3923 _('do not display revision or any of its ancestors'), _('REV')),
3919 ] + logopts + walkopts,
3924 ] + logopts + walkopts,
3920 _('[OPTION]... [FILE]'))
3925 _('[OPTION]... [FILE]'))
3921 def log(ui, repo, *pats, **opts):
3926 def log(ui, repo, *pats, **opts):
3922 """show revision history of entire repository or files
3927 """show revision history of entire repository or files
3923
3928
3924 Print the revision history of the specified files or the entire
3929 Print the revision history of the specified files or the entire
3925 project.
3930 project.
3926
3931
3927 If no revision range is specified, the default is ``tip:0`` unless
3932 If no revision range is specified, the default is ``tip:0`` unless
3928 --follow is set, in which case the working directory parent is
3933 --follow is set, in which case the working directory parent is
3929 used as the starting revision.
3934 used as the starting revision.
3930
3935
3931 File history is shown without following rename or copy history of
3936 File history is shown without following rename or copy history of
3932 files. Use -f/--follow with a filename to follow history across
3937 files. Use -f/--follow with a filename to follow history across
3933 renames and copies. --follow without a filename will only show
3938 renames and copies. --follow without a filename will only show
3934 ancestors or descendants of the starting revision.
3939 ancestors or descendants of the starting revision.
3935
3940
3936 By default this command prints revision number and changeset id,
3941 By default this command prints revision number and changeset id,
3937 tags, non-trivial parents, user, date and time, and a summary for
3942 tags, non-trivial parents, user, date and time, and a summary for
3938 each commit. When the -v/--verbose switch is used, the list of
3943 each commit. When the -v/--verbose switch is used, the list of
3939 changed files and full commit message are shown.
3944 changed files and full commit message are shown.
3940
3945
3941 .. note::
3946 .. note::
3942 log -p/--patch may generate unexpected diff output for merge
3947 log -p/--patch may generate unexpected diff output for merge
3943 changesets, as it will only compare the merge changeset against
3948 changesets, as it will only compare the merge changeset against
3944 its first parent. Also, only files different from BOTH parents
3949 its first parent. Also, only files different from BOTH parents
3945 will appear in files:.
3950 will appear in files:.
3946
3951
3947 .. note::
3952 .. note::
3948 for performance reasons, log FILE may omit duplicate changes
3953 for performance reasons, log FILE may omit duplicate changes
3949 made on branches and will not show deletions. To see all
3954 made on branches and will not show deletions. To see all
3950 changes including duplicates and deletions, use the --removed
3955 changes including duplicates and deletions, use the --removed
3951 switch.
3956 switch.
3952
3957
3953 .. container:: verbose
3958 .. container:: verbose
3954
3959
3955 Some examples:
3960 Some examples:
3956
3961
3957 - changesets with full descriptions and file lists::
3962 - changesets with full descriptions and file lists::
3958
3963
3959 hg log -v
3964 hg log -v
3960
3965
3961 - changesets ancestral to the working directory::
3966 - changesets ancestral to the working directory::
3962
3967
3963 hg log -f
3968 hg log -f
3964
3969
3965 - last 10 commits on the current branch::
3970 - last 10 commits on the current branch::
3966
3971
3967 hg log -l 10 -b .
3972 hg log -l 10 -b .
3968
3973
3969 - changesets showing all modifications of a file, including removals::
3974 - changesets showing all modifications of a file, including removals::
3970
3975
3971 hg log --removed file.c
3976 hg log --removed file.c
3972
3977
3973 - all changesets that touch a directory, with diffs, excluding merges::
3978 - all changesets that touch a directory, with diffs, excluding merges::
3974
3979
3975 hg log -Mp lib/
3980 hg log -Mp lib/
3976
3981
3977 - all revision numbers that match a keyword::
3982 - all revision numbers that match a keyword::
3978
3983
3979 hg log -k bug --template "{rev}\\n"
3984 hg log -k bug --template "{rev}\\n"
3980
3985
3981 - check if a given changeset is included is a tagged release::
3986 - check if a given changeset is included is a tagged release::
3982
3987
3983 hg log -r "a21ccf and ancestor(1.9)"
3988 hg log -r "a21ccf and ancestor(1.9)"
3984
3989
3985 - find all changesets by some user in a date range::
3990 - find all changesets by some user in a date range::
3986
3991
3987 hg log -k alice -d "may 2008 to jul 2008"
3992 hg log -k alice -d "may 2008 to jul 2008"
3988
3993
3989 - summary of all changesets after the last tag::
3994 - summary of all changesets after the last tag::
3990
3995
3991 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3996 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3992
3997
3993 See :hg:`help dates` for a list of formats valid for -d/--date.
3998 See :hg:`help dates` for a list of formats valid for -d/--date.
3994
3999
3995 See :hg:`help revisions` and :hg:`help revsets` for more about
4000 See :hg:`help revisions` and :hg:`help revsets` for more about
3996 specifying revisions.
4001 specifying revisions.
3997
4002
3998 See :hg:`help templates` for more about pre-packaged styles and
4003 See :hg:`help templates` for more about pre-packaged styles and
3999 specifying custom templates.
4004 specifying custom templates.
4000
4005
4001 Returns 0 on success.
4006 Returns 0 on success.
4002 """
4007 """
4003 if opts.get('graph'):
4008 if opts.get('graph'):
4004 return cmdutil.graphlog(ui, repo, *pats, **opts)
4009 return cmdutil.graphlog(ui, repo, *pats, **opts)
4005
4010
4006 matchfn = scmutil.match(repo[None], pats, opts)
4011 matchfn = scmutil.match(repo[None], pats, opts)
4007 limit = cmdutil.loglimit(opts)
4012 limit = cmdutil.loglimit(opts)
4008 count = 0
4013 count = 0
4009
4014
4010 getrenamed, endrev = None, None
4015 getrenamed, endrev = None, None
4011 if opts.get('copies'):
4016 if opts.get('copies'):
4012 if opts.get('rev'):
4017 if opts.get('rev'):
4013 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4018 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4014 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4019 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4015
4020
4016 df = False
4021 df = False
4017 if opts.get("date"):
4022 if opts.get("date"):
4018 df = util.matchdate(opts["date"])
4023 df = util.matchdate(opts["date"])
4019
4024
4020 branches = opts.get('branch', []) + opts.get('only_branch', [])
4025 branches = opts.get('branch', []) + opts.get('only_branch', [])
4021 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4026 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4022
4027
4023 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4028 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4024 def prep(ctx, fns):
4029 def prep(ctx, fns):
4025 rev = ctx.rev()
4030 rev = ctx.rev()
4026 parents = [p for p in repo.changelog.parentrevs(rev)
4031 parents = [p for p in repo.changelog.parentrevs(rev)
4027 if p != nullrev]
4032 if p != nullrev]
4028 if opts.get('no_merges') and len(parents) == 2:
4033 if opts.get('no_merges') and len(parents) == 2:
4029 return
4034 return
4030 if opts.get('only_merges') and len(parents) != 2:
4035 if opts.get('only_merges') and len(parents) != 2:
4031 return
4036 return
4032 if opts.get('branch') and ctx.branch() not in opts['branch']:
4037 if opts.get('branch') and ctx.branch() not in opts['branch']:
4033 return
4038 return
4034 if df and not df(ctx.date()[0]):
4039 if df and not df(ctx.date()[0]):
4035 return
4040 return
4036
4041
4037 lower = encoding.lower
4042 lower = encoding.lower
4038 if opts.get('user'):
4043 if opts.get('user'):
4039 luser = lower(ctx.user())
4044 luser = lower(ctx.user())
4040 for k in [lower(x) for x in opts['user']]:
4045 for k in [lower(x) for x in opts['user']]:
4041 if (k in luser):
4046 if (k in luser):
4042 break
4047 break
4043 else:
4048 else:
4044 return
4049 return
4045 if opts.get('keyword'):
4050 if opts.get('keyword'):
4046 luser = lower(ctx.user())
4051 luser = lower(ctx.user())
4047 ldesc = lower(ctx.description())
4052 ldesc = lower(ctx.description())
4048 lfiles = lower(" ".join(ctx.files()))
4053 lfiles = lower(" ".join(ctx.files()))
4049 for k in [lower(x) for x in opts['keyword']]:
4054 for k in [lower(x) for x in opts['keyword']]:
4050 if (k in luser or k in ldesc or k in lfiles):
4055 if (k in luser or k in ldesc or k in lfiles):
4051 break
4056 break
4052 else:
4057 else:
4053 return
4058 return
4054
4059
4055 copies = None
4060 copies = None
4056 if getrenamed is not None and rev:
4061 if getrenamed is not None and rev:
4057 copies = []
4062 copies = []
4058 for fn in ctx.files():
4063 for fn in ctx.files():
4059 rename = getrenamed(fn, rev)
4064 rename = getrenamed(fn, rev)
4060 if rename:
4065 if rename:
4061 copies.append((fn, rename[0]))
4066 copies.append((fn, rename[0]))
4062
4067
4063 revmatchfn = None
4068 revmatchfn = None
4064 if opts.get('patch') or opts.get('stat'):
4069 if opts.get('patch') or opts.get('stat'):
4065 if opts.get('follow') or opts.get('follow_first'):
4070 if opts.get('follow') or opts.get('follow_first'):
4066 # note: this might be wrong when following through merges
4071 # note: this might be wrong when following through merges
4067 revmatchfn = scmutil.match(repo[None], fns, default='path')
4072 revmatchfn = scmutil.match(repo[None], fns, default='path')
4068 else:
4073 else:
4069 revmatchfn = matchfn
4074 revmatchfn = matchfn
4070
4075
4071 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4076 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4072
4077
4073 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4078 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4074 if displayer.flush(ctx.rev()):
4079 if displayer.flush(ctx.rev()):
4075 count += 1
4080 count += 1
4076 if count == limit:
4081 if count == limit:
4077 break
4082 break
4078 displayer.close()
4083 displayer.close()
4079
4084
4080 @command('manifest',
4085 @command('manifest',
4081 [('r', 'rev', '', _('revision to display'), _('REV')),
4086 [('r', 'rev', '', _('revision to display'), _('REV')),
4082 ('', 'all', False, _("list files from all revisions"))],
4087 ('', 'all', False, _("list files from all revisions"))],
4083 _('[-r REV]'))
4088 _('[-r REV]'))
4084 def manifest(ui, repo, node=None, rev=None, **opts):
4089 def manifest(ui, repo, node=None, rev=None, **opts):
4085 """output the current or given revision of the project manifest
4090 """output the current or given revision of the project manifest
4086
4091
4087 Print a list of version controlled files for the given revision.
4092 Print a list of version controlled files for the given revision.
4088 If no revision is given, the first parent of the working directory
4093 If no revision is given, the first parent of the working directory
4089 is used, or the null revision if no revision is checked out.
4094 is used, or the null revision if no revision is checked out.
4090
4095
4091 With -v, print file permissions, symlink and executable bits.
4096 With -v, print file permissions, symlink and executable bits.
4092 With --debug, print file revision hashes.
4097 With --debug, print file revision hashes.
4093
4098
4094 If option --all is specified, the list of all files from all revisions
4099 If option --all is specified, the list of all files from all revisions
4095 is printed. This includes deleted and renamed files.
4100 is printed. This includes deleted and renamed files.
4096
4101
4097 Returns 0 on success.
4102 Returns 0 on success.
4098 """
4103 """
4099
4104
4100 fm = ui.formatter('manifest', opts)
4105 fm = ui.formatter('manifest', opts)
4101
4106
4102 if opts.get('all'):
4107 if opts.get('all'):
4103 if rev or node:
4108 if rev or node:
4104 raise util.Abort(_("can't specify a revision with --all"))
4109 raise util.Abort(_("can't specify a revision with --all"))
4105
4110
4106 res = []
4111 res = []
4107 prefix = "data/"
4112 prefix = "data/"
4108 suffix = ".i"
4113 suffix = ".i"
4109 plen = len(prefix)
4114 plen = len(prefix)
4110 slen = len(suffix)
4115 slen = len(suffix)
4111 lock = repo.lock()
4116 lock = repo.lock()
4112 try:
4117 try:
4113 for fn, b, size in repo.store.datafiles():
4118 for fn, b, size in repo.store.datafiles():
4114 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4119 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4115 res.append(fn[plen:-slen])
4120 res.append(fn[plen:-slen])
4116 finally:
4121 finally:
4117 lock.release()
4122 lock.release()
4118 for f in res:
4123 for f in res:
4119 fm.startitem()
4124 fm.startitem()
4120 fm.write("path", '%s\n', f)
4125 fm.write("path", '%s\n', f)
4121 fm.end()
4126 fm.end()
4122 return
4127 return
4123
4128
4124 if rev and node:
4129 if rev and node:
4125 raise util.Abort(_("please specify just one revision"))
4130 raise util.Abort(_("please specify just one revision"))
4126
4131
4127 if not node:
4132 if not node:
4128 node = rev
4133 node = rev
4129
4134
4130 char = {'l': '@', 'x': '*', '': ''}
4135 char = {'l': '@', 'x': '*', '': ''}
4131 mode = {'l': '644', 'x': '755', '': '644'}
4136 mode = {'l': '644', 'x': '755', '': '644'}
4132 ctx = scmutil.revsingle(repo, node)
4137 ctx = scmutil.revsingle(repo, node)
4133 mf = ctx.manifest()
4138 mf = ctx.manifest()
4134 for f in ctx:
4139 for f in ctx:
4135 fm.startitem()
4140 fm.startitem()
4136 fl = ctx[f].flags()
4141 fl = ctx[f].flags()
4137 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4142 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4138 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4143 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4139 fm.write('path', '%s\n', f)
4144 fm.write('path', '%s\n', f)
4140 fm.end()
4145 fm.end()
4141
4146
4142 @command('^merge',
4147 @command('^merge',
4143 [('f', 'force', None, _('force a merge with outstanding changes')),
4148 [('f', 'force', None, _('force a merge with outstanding changes')),
4144 ('r', 'rev', '', _('revision to merge'), _('REV')),
4149 ('r', 'rev', '', _('revision to merge'), _('REV')),
4145 ('P', 'preview', None,
4150 ('P', 'preview', None,
4146 _('review revisions to merge (no merge is performed)'))
4151 _('review revisions to merge (no merge is performed)'))
4147 ] + mergetoolopts,
4152 ] + mergetoolopts,
4148 _('[-P] [-f] [[-r] REV]'))
4153 _('[-P] [-f] [[-r] REV]'))
4149 def merge(ui, repo, node=None, **opts):
4154 def merge(ui, repo, node=None, **opts):
4150 """merge working directory with another revision
4155 """merge working directory with another revision
4151
4156
4152 The current working directory is updated with all changes made in
4157 The current working directory is updated with all changes made in
4153 the requested revision since the last common predecessor revision.
4158 the requested revision since the last common predecessor revision.
4154
4159
4155 Files that changed between either parent are marked as changed for
4160 Files that changed between either parent are marked as changed for
4156 the next commit and a commit must be performed before any further
4161 the next commit and a commit must be performed before any further
4157 updates to the repository are allowed. The next commit will have
4162 updates to the repository are allowed. The next commit will have
4158 two parents.
4163 two parents.
4159
4164
4160 ``--tool`` can be used to specify the merge tool used for file
4165 ``--tool`` can be used to specify the merge tool used for file
4161 merges. It overrides the HGMERGE environment variable and your
4166 merges. It overrides the HGMERGE environment variable and your
4162 configuration files. See :hg:`help merge-tools` for options.
4167 configuration files. See :hg:`help merge-tools` for options.
4163
4168
4164 If no revision is specified, the working directory's parent is a
4169 If no revision is specified, the working directory's parent is a
4165 head revision, and the current branch contains exactly one other
4170 head revision, and the current branch contains exactly one other
4166 head, the other head is merged with by default. Otherwise, an
4171 head, the other head is merged with by default. Otherwise, an
4167 explicit revision with which to merge with must be provided.
4172 explicit revision with which to merge with must be provided.
4168
4173
4169 :hg:`resolve` must be used to resolve unresolved files.
4174 :hg:`resolve` must be used to resolve unresolved files.
4170
4175
4171 To undo an uncommitted merge, use :hg:`update --clean .` which
4176 To undo an uncommitted merge, use :hg:`update --clean .` which
4172 will check out a clean copy of the original merge parent, losing
4177 will check out a clean copy of the original merge parent, losing
4173 all changes.
4178 all changes.
4174
4179
4175 Returns 0 on success, 1 if there are unresolved files.
4180 Returns 0 on success, 1 if there are unresolved files.
4176 """
4181 """
4177
4182
4178 if opts.get('rev') and node:
4183 if opts.get('rev') and node:
4179 raise util.Abort(_("please specify just one revision"))
4184 raise util.Abort(_("please specify just one revision"))
4180 if not node:
4185 if not node:
4181 node = opts.get('rev')
4186 node = opts.get('rev')
4182
4187
4183 if node:
4188 if node:
4184 node = scmutil.revsingle(repo, node).node()
4189 node = scmutil.revsingle(repo, node).node()
4185
4190
4186 if not node and repo._bookmarkcurrent:
4191 if not node and repo._bookmarkcurrent:
4187 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4192 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4188 curhead = repo[repo._bookmarkcurrent].node()
4193 curhead = repo[repo._bookmarkcurrent].node()
4189 if len(bmheads) == 2:
4194 if len(bmheads) == 2:
4190 if curhead == bmheads[0]:
4195 if curhead == bmheads[0]:
4191 node = bmheads[1]
4196 node = bmheads[1]
4192 else:
4197 else:
4193 node = bmheads[0]
4198 node = bmheads[0]
4194 elif len(bmheads) > 2:
4199 elif len(bmheads) > 2:
4195 raise util.Abort(_("multiple matching bookmarks to merge - "
4200 raise util.Abort(_("multiple matching bookmarks to merge - "
4196 "please merge with an explicit rev or bookmark"),
4201 "please merge with an explicit rev or bookmark"),
4197 hint=_("run 'hg heads' to see all heads"))
4202 hint=_("run 'hg heads' to see all heads"))
4198 elif len(bmheads) <= 1:
4203 elif len(bmheads) <= 1:
4199 raise util.Abort(_("no matching bookmark to merge - "
4204 raise util.Abort(_("no matching bookmark to merge - "
4200 "please merge with an explicit rev or bookmark"),
4205 "please merge with an explicit rev or bookmark"),
4201 hint=_("run 'hg heads' to see all heads"))
4206 hint=_("run 'hg heads' to see all heads"))
4202
4207
4203 if not node and not repo._bookmarkcurrent:
4208 if not node and not repo._bookmarkcurrent:
4204 branch = repo[None].branch()
4209 branch = repo[None].branch()
4205 bheads = repo.branchheads(branch)
4210 bheads = repo.branchheads(branch)
4206 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4211 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4207
4212
4208 if len(nbhs) > 2:
4213 if len(nbhs) > 2:
4209 raise util.Abort(_("branch '%s' has %d heads - "
4214 raise util.Abort(_("branch '%s' has %d heads - "
4210 "please merge with an explicit rev")
4215 "please merge with an explicit rev")
4211 % (branch, len(bheads)),
4216 % (branch, len(bheads)),
4212 hint=_("run 'hg heads .' to see heads"))
4217 hint=_("run 'hg heads .' to see heads"))
4213
4218
4214 parent = repo.dirstate.p1()
4219 parent = repo.dirstate.p1()
4215 if len(nbhs) <= 1:
4220 if len(nbhs) <= 1:
4216 if len(bheads) > 1:
4221 if len(bheads) > 1:
4217 raise util.Abort(_("heads are bookmarked - "
4222 raise util.Abort(_("heads are bookmarked - "
4218 "please merge with an explicit rev"),
4223 "please merge with an explicit rev"),
4219 hint=_("run 'hg heads' to see all heads"))
4224 hint=_("run 'hg heads' to see all heads"))
4220 if len(repo.heads()) > 1:
4225 if len(repo.heads()) > 1:
4221 raise util.Abort(_("branch '%s' has one head - "
4226 raise util.Abort(_("branch '%s' has one head - "
4222 "please merge with an explicit rev")
4227 "please merge with an explicit rev")
4223 % branch,
4228 % branch,
4224 hint=_("run 'hg heads' to see all heads"))
4229 hint=_("run 'hg heads' to see all heads"))
4225 msg, hint = _('nothing to merge'), None
4230 msg, hint = _('nothing to merge'), None
4226 if parent != repo.lookup(branch):
4231 if parent != repo.lookup(branch):
4227 hint = _("use 'hg update' instead")
4232 hint = _("use 'hg update' instead")
4228 raise util.Abort(msg, hint=hint)
4233 raise util.Abort(msg, hint=hint)
4229
4234
4230 if parent not in bheads:
4235 if parent not in bheads:
4231 raise util.Abort(_('working directory not at a head revision'),
4236 raise util.Abort(_('working directory not at a head revision'),
4232 hint=_("use 'hg update' or merge with an "
4237 hint=_("use 'hg update' or merge with an "
4233 "explicit revision"))
4238 "explicit revision"))
4234 if parent == nbhs[0]:
4239 if parent == nbhs[0]:
4235 node = nbhs[-1]
4240 node = nbhs[-1]
4236 else:
4241 else:
4237 node = nbhs[0]
4242 node = nbhs[0]
4238
4243
4239 if opts.get('preview'):
4244 if opts.get('preview'):
4240 # find nodes that are ancestors of p2 but not of p1
4245 # find nodes that are ancestors of p2 but not of p1
4241 p1 = repo.lookup('.')
4246 p1 = repo.lookup('.')
4242 p2 = repo.lookup(node)
4247 p2 = repo.lookup(node)
4243 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4248 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4244
4249
4245 displayer = cmdutil.show_changeset(ui, repo, opts)
4250 displayer = cmdutil.show_changeset(ui, repo, opts)
4246 for node in nodes:
4251 for node in nodes:
4247 displayer.show(repo[node])
4252 displayer.show(repo[node])
4248 displayer.close()
4253 displayer.close()
4249 return 0
4254 return 0
4250
4255
4251 try:
4256 try:
4252 # ui.forcemerge is an internal variable, do not document
4257 # ui.forcemerge is an internal variable, do not document
4253 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4258 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4254 return hg.merge(repo, node, force=opts.get('force'))
4259 return hg.merge(repo, node, force=opts.get('force'))
4255 finally:
4260 finally:
4256 ui.setconfig('ui', 'forcemerge', '')
4261 ui.setconfig('ui', 'forcemerge', '')
4257
4262
4258 @command('outgoing|out',
4263 @command('outgoing|out',
4259 [('f', 'force', None, _('run even when the destination is unrelated')),
4264 [('f', 'force', None, _('run even when the destination is unrelated')),
4260 ('r', 'rev', [],
4265 ('r', 'rev', [],
4261 _('a changeset intended to be included in the destination'), _('REV')),
4266 _('a changeset intended to be included in the destination'), _('REV')),
4262 ('n', 'newest-first', None, _('show newest record first')),
4267 ('n', 'newest-first', None, _('show newest record first')),
4263 ('B', 'bookmarks', False, _('compare bookmarks')),
4268 ('B', 'bookmarks', False, _('compare bookmarks')),
4264 ('b', 'branch', [], _('a specific branch you would like to push'),
4269 ('b', 'branch', [], _('a specific branch you would like to push'),
4265 _('BRANCH')),
4270 _('BRANCH')),
4266 ] + logopts + remoteopts + subrepoopts,
4271 ] + logopts + remoteopts + subrepoopts,
4267 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4272 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4268 def outgoing(ui, repo, dest=None, **opts):
4273 def outgoing(ui, repo, dest=None, **opts):
4269 """show changesets not found in the destination
4274 """show changesets not found in the destination
4270
4275
4271 Show changesets not found in the specified destination repository
4276 Show changesets not found in the specified destination repository
4272 or the default push location. These are the changesets that would
4277 or the default push location. These are the changesets that would
4273 be pushed if a push was requested.
4278 be pushed if a push was requested.
4274
4279
4275 See pull for details of valid destination formats.
4280 See pull for details of valid destination formats.
4276
4281
4277 Returns 0 if there are outgoing changes, 1 otherwise.
4282 Returns 0 if there are outgoing changes, 1 otherwise.
4278 """
4283 """
4279 if opts.get('graph'):
4284 if opts.get('graph'):
4280 cmdutil.checkunsupportedgraphflags([], opts)
4285 cmdutil.checkunsupportedgraphflags([], opts)
4281 o = hg._outgoing(ui, repo, dest, opts)
4286 o = hg._outgoing(ui, repo, dest, opts)
4282 if o is None:
4287 if o is None:
4283 return
4288 return
4284
4289
4285 revdag = cmdutil.graphrevs(repo, o, opts)
4290 revdag = cmdutil.graphrevs(repo, o, opts)
4286 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4291 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4287 showparents = [ctx.node() for ctx in repo[None].parents()]
4292 showparents = [ctx.node() for ctx in repo[None].parents()]
4288 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4293 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4289 graphmod.asciiedges)
4294 graphmod.asciiedges)
4290 return 0
4295 return 0
4291
4296
4292 if opts.get('bookmarks'):
4297 if opts.get('bookmarks'):
4293 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4298 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4294 dest, branches = hg.parseurl(dest, opts.get('branch'))
4299 dest, branches = hg.parseurl(dest, opts.get('branch'))
4295 other = hg.peer(repo, opts, dest)
4300 other = hg.peer(repo, opts, dest)
4296 if 'bookmarks' not in other.listkeys('namespaces'):
4301 if 'bookmarks' not in other.listkeys('namespaces'):
4297 ui.warn(_("remote doesn't support bookmarks\n"))
4302 ui.warn(_("remote doesn't support bookmarks\n"))
4298 return 0
4303 return 0
4299 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4304 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4300 return bookmarks.diff(ui, other, repo)
4305 return bookmarks.diff(ui, other, repo)
4301
4306
4302 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4307 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4303 try:
4308 try:
4304 return hg.outgoing(ui, repo, dest, opts)
4309 return hg.outgoing(ui, repo, dest, opts)
4305 finally:
4310 finally:
4306 del repo._subtoppath
4311 del repo._subtoppath
4307
4312
4308 @command('parents',
4313 @command('parents',
4309 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4314 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4310 ] + templateopts,
4315 ] + templateopts,
4311 _('[-r REV] [FILE]'))
4316 _('[-r REV] [FILE]'))
4312 def parents(ui, repo, file_=None, **opts):
4317 def parents(ui, repo, file_=None, **opts):
4313 """show the parents of the working directory or revision
4318 """show the parents of the working directory or revision
4314
4319
4315 Print the working directory's parent revisions. If a revision is
4320 Print the working directory's parent revisions. If a revision is
4316 given via -r/--rev, the parent of that revision will be printed.
4321 given via -r/--rev, the parent of that revision will be printed.
4317 If a file argument is given, the revision in which the file was
4322 If a file argument is given, the revision in which the file was
4318 last changed (before the working directory revision or the
4323 last changed (before the working directory revision or the
4319 argument to --rev if given) is printed.
4324 argument to --rev if given) is printed.
4320
4325
4321 Returns 0 on success.
4326 Returns 0 on success.
4322 """
4327 """
4323
4328
4324 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4329 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4325
4330
4326 if file_:
4331 if file_:
4327 m = scmutil.match(ctx, (file_,), opts)
4332 m = scmutil.match(ctx, (file_,), opts)
4328 if m.anypats() or len(m.files()) != 1:
4333 if m.anypats() or len(m.files()) != 1:
4329 raise util.Abort(_('can only specify an explicit filename'))
4334 raise util.Abort(_('can only specify an explicit filename'))
4330 file_ = m.files()[0]
4335 file_ = m.files()[0]
4331 filenodes = []
4336 filenodes = []
4332 for cp in ctx.parents():
4337 for cp in ctx.parents():
4333 if not cp:
4338 if not cp:
4334 continue
4339 continue
4335 try:
4340 try:
4336 filenodes.append(cp.filenode(file_))
4341 filenodes.append(cp.filenode(file_))
4337 except error.LookupError:
4342 except error.LookupError:
4338 pass
4343 pass
4339 if not filenodes:
4344 if not filenodes:
4340 raise util.Abort(_("'%s' not found in manifest!") % file_)
4345 raise util.Abort(_("'%s' not found in manifest!") % file_)
4341 fl = repo.file(file_)
4346 fl = repo.file(file_)
4342 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4347 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4343 else:
4348 else:
4344 p = [cp.node() for cp in ctx.parents()]
4349 p = [cp.node() for cp in ctx.parents()]
4345
4350
4346 displayer = cmdutil.show_changeset(ui, repo, opts)
4351 displayer = cmdutil.show_changeset(ui, repo, opts)
4347 for n in p:
4352 for n in p:
4348 if n != nullid:
4353 if n != nullid:
4349 displayer.show(repo[n])
4354 displayer.show(repo[n])
4350 displayer.close()
4355 displayer.close()
4351
4356
4352 @command('paths', [], _('[NAME]'))
4357 @command('paths', [], _('[NAME]'))
4353 def paths(ui, repo, search=None):
4358 def paths(ui, repo, search=None):
4354 """show aliases for remote repositories
4359 """show aliases for remote repositories
4355
4360
4356 Show definition of symbolic path name NAME. If no name is given,
4361 Show definition of symbolic path name NAME. If no name is given,
4357 show definition of all available names.
4362 show definition of all available names.
4358
4363
4359 Option -q/--quiet suppresses all output when searching for NAME
4364 Option -q/--quiet suppresses all output when searching for NAME
4360 and shows only the path names when listing all definitions.
4365 and shows only the path names when listing all definitions.
4361
4366
4362 Path names are defined in the [paths] section of your
4367 Path names are defined in the [paths] section of your
4363 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4368 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4364 repository, ``.hg/hgrc`` is used, too.
4369 repository, ``.hg/hgrc`` is used, too.
4365
4370
4366 The path names ``default`` and ``default-push`` have a special
4371 The path names ``default`` and ``default-push`` have a special
4367 meaning. When performing a push or pull operation, they are used
4372 meaning. When performing a push or pull operation, they are used
4368 as fallbacks if no location is specified on the command-line.
4373 as fallbacks if no location is specified on the command-line.
4369 When ``default-push`` is set, it will be used for push and
4374 When ``default-push`` is set, it will be used for push and
4370 ``default`` will be used for pull; otherwise ``default`` is used
4375 ``default`` will be used for pull; otherwise ``default`` is used
4371 as the fallback for both. When cloning a repository, the clone
4376 as the fallback for both. When cloning a repository, the clone
4372 source is written as ``default`` in ``.hg/hgrc``. Note that
4377 source is written as ``default`` in ``.hg/hgrc``. Note that
4373 ``default`` and ``default-push`` apply to all inbound (e.g.
4378 ``default`` and ``default-push`` apply to all inbound (e.g.
4374 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4379 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4375 :hg:`bundle`) operations.
4380 :hg:`bundle`) operations.
4376
4381
4377 See :hg:`help urls` for more information.
4382 See :hg:`help urls` for more information.
4378
4383
4379 Returns 0 on success.
4384 Returns 0 on success.
4380 """
4385 """
4381 if search:
4386 if search:
4382 for name, path in ui.configitems("paths"):
4387 for name, path in ui.configitems("paths"):
4383 if name == search:
4388 if name == search:
4384 ui.status("%s\n" % util.hidepassword(path))
4389 ui.status("%s\n" % util.hidepassword(path))
4385 return
4390 return
4386 if not ui.quiet:
4391 if not ui.quiet:
4387 ui.warn(_("not found!\n"))
4392 ui.warn(_("not found!\n"))
4388 return 1
4393 return 1
4389 else:
4394 else:
4390 for name, path in ui.configitems("paths"):
4395 for name, path in ui.configitems("paths"):
4391 if ui.quiet:
4396 if ui.quiet:
4392 ui.write("%s\n" % name)
4397 ui.write("%s\n" % name)
4393 else:
4398 else:
4394 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4399 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4395
4400
4396 @command('phase',
4401 @command('phase',
4397 [('p', 'public', False, _('set changeset phase to public')),
4402 [('p', 'public', False, _('set changeset phase to public')),
4398 ('d', 'draft', False, _('set changeset phase to draft')),
4403 ('d', 'draft', False, _('set changeset phase to draft')),
4399 ('s', 'secret', False, _('set changeset phase to secret')),
4404 ('s', 'secret', False, _('set changeset phase to secret')),
4400 ('f', 'force', False, _('allow to move boundary backward')),
4405 ('f', 'force', False, _('allow to move boundary backward')),
4401 ('r', 'rev', [], _('target revision'), _('REV')),
4406 ('r', 'rev', [], _('target revision'), _('REV')),
4402 ],
4407 ],
4403 _('[-p|-d|-s] [-f] [-r] REV...'))
4408 _('[-p|-d|-s] [-f] [-r] REV...'))
4404 def phase(ui, repo, *revs, **opts):
4409 def phase(ui, repo, *revs, **opts):
4405 """set or show the current phase name
4410 """set or show the current phase name
4406
4411
4407 With no argument, show the phase name of specified revisions.
4412 With no argument, show the phase name of specified revisions.
4408
4413
4409 With one of -p/--public, -d/--draft or -s/--secret, change the
4414 With one of -p/--public, -d/--draft or -s/--secret, change the
4410 phase value of the specified revisions.
4415 phase value of the specified revisions.
4411
4416
4412 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4417 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4413 lower phase to an higher phase. Phases are ordered as follows::
4418 lower phase to an higher phase. Phases are ordered as follows::
4414
4419
4415 public < draft < secret
4420 public < draft < secret
4416
4421
4417 Return 0 on success, 1 if no phases were changed or some could not
4422 Return 0 on success, 1 if no phases were changed or some could not
4418 be changed.
4423 be changed.
4419 """
4424 """
4420 # search for a unique phase argument
4425 # search for a unique phase argument
4421 targetphase = None
4426 targetphase = None
4422 for idx, name in enumerate(phases.phasenames):
4427 for idx, name in enumerate(phases.phasenames):
4423 if opts[name]:
4428 if opts[name]:
4424 if targetphase is not None:
4429 if targetphase is not None:
4425 raise util.Abort(_('only one phase can be specified'))
4430 raise util.Abort(_('only one phase can be specified'))
4426 targetphase = idx
4431 targetphase = idx
4427
4432
4428 # look for specified revision
4433 # look for specified revision
4429 revs = list(revs)
4434 revs = list(revs)
4430 revs.extend(opts['rev'])
4435 revs.extend(opts['rev'])
4431 if not revs:
4436 if not revs:
4432 raise util.Abort(_('no revisions specified'))
4437 raise util.Abort(_('no revisions specified'))
4433
4438
4434 revs = scmutil.revrange(repo, revs)
4439 revs = scmutil.revrange(repo, revs)
4435
4440
4436 lock = None
4441 lock = None
4437 ret = 0
4442 ret = 0
4438 if targetphase is None:
4443 if targetphase is None:
4439 # display
4444 # display
4440 for r in revs:
4445 for r in revs:
4441 ctx = repo[r]
4446 ctx = repo[r]
4442 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4447 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4443 else:
4448 else:
4444 lock = repo.lock()
4449 lock = repo.lock()
4445 try:
4450 try:
4446 # set phase
4451 # set phase
4447 if not revs:
4452 if not revs:
4448 raise util.Abort(_('empty revision set'))
4453 raise util.Abort(_('empty revision set'))
4449 nodes = [repo[r].node() for r in revs]
4454 nodes = [repo[r].node() for r in revs]
4450 olddata = repo._phasecache.getphaserevs(repo)[:]
4455 olddata = repo._phasecache.getphaserevs(repo)[:]
4451 phases.advanceboundary(repo, targetphase, nodes)
4456 phases.advanceboundary(repo, targetphase, nodes)
4452 if opts['force']:
4457 if opts['force']:
4453 phases.retractboundary(repo, targetphase, nodes)
4458 phases.retractboundary(repo, targetphase, nodes)
4454 finally:
4459 finally:
4455 lock.release()
4460 lock.release()
4456 # moving revision from public to draft may hide them
4461 # moving revision from public to draft may hide them
4457 # We have to check result on an unfiltered repository
4462 # We have to check result on an unfiltered repository
4458 unfi = repo.unfiltered()
4463 unfi = repo.unfiltered()
4459 newdata = repo._phasecache.getphaserevs(unfi)
4464 newdata = repo._phasecache.getphaserevs(unfi)
4460 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4465 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4461 cl = unfi.changelog
4466 cl = unfi.changelog
4462 rejected = [n for n in nodes
4467 rejected = [n for n in nodes
4463 if newdata[cl.rev(n)] < targetphase]
4468 if newdata[cl.rev(n)] < targetphase]
4464 if rejected:
4469 if rejected:
4465 ui.warn(_('cannot move %i changesets to a more permissive '
4470 ui.warn(_('cannot move %i changesets to a more permissive '
4466 'phase, use --force\n') % len(rejected))
4471 'phase, use --force\n') % len(rejected))
4467 ret = 1
4472 ret = 1
4468 if changes:
4473 if changes:
4469 msg = _('phase changed for %i changesets\n') % changes
4474 msg = _('phase changed for %i changesets\n') % changes
4470 if ret:
4475 if ret:
4471 ui.status(msg)
4476 ui.status(msg)
4472 else:
4477 else:
4473 ui.note(msg)
4478 ui.note(msg)
4474 else:
4479 else:
4475 ui.warn(_('no phases changed\n'))
4480 ui.warn(_('no phases changed\n'))
4476 ret = 1
4481 ret = 1
4477 return ret
4482 return ret
4478
4483
4479 def postincoming(ui, repo, modheads, optupdate, checkout):
4484 def postincoming(ui, repo, modheads, optupdate, checkout):
4480 if modheads == 0:
4485 if modheads == 0:
4481 return
4486 return
4482 if optupdate:
4487 if optupdate:
4483 movemarkfrom = repo['.'].node()
4488 movemarkfrom = repo['.'].node()
4484 try:
4489 try:
4485 ret = hg.update(repo, checkout)
4490 ret = hg.update(repo, checkout)
4486 except util.Abort, inst:
4491 except util.Abort, inst:
4487 ui.warn(_("not updating: %s\n") % str(inst))
4492 ui.warn(_("not updating: %s\n") % str(inst))
4488 return 0
4493 return 0
4489 if not ret and not checkout:
4494 if not ret and not checkout:
4490 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4495 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4491 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4496 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4492 return ret
4497 return ret
4493 if modheads > 1:
4498 if modheads > 1:
4494 currentbranchheads = len(repo.branchheads())
4499 currentbranchheads = len(repo.branchheads())
4495 if currentbranchheads == modheads:
4500 if currentbranchheads == modheads:
4496 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4501 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4497 elif currentbranchheads > 1:
4502 elif currentbranchheads > 1:
4498 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4503 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4499 "merge)\n"))
4504 "merge)\n"))
4500 else:
4505 else:
4501 ui.status(_("(run 'hg heads' to see heads)\n"))
4506 ui.status(_("(run 'hg heads' to see heads)\n"))
4502 else:
4507 else:
4503 ui.status(_("(run 'hg update' to get a working copy)\n"))
4508 ui.status(_("(run 'hg update' to get a working copy)\n"))
4504
4509
4505 @command('^pull',
4510 @command('^pull',
4506 [('u', 'update', None,
4511 [('u', 'update', None,
4507 _('update to new branch head if changesets were pulled')),
4512 _('update to new branch head if changesets were pulled')),
4508 ('f', 'force', None, _('run even when remote repository is unrelated')),
4513 ('f', 'force', None, _('run even when remote repository is unrelated')),
4509 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4514 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4510 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4515 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4511 ('b', 'branch', [], _('a specific branch you would like to pull'),
4516 ('b', 'branch', [], _('a specific branch you would like to pull'),
4512 _('BRANCH')),
4517 _('BRANCH')),
4513 ] + remoteopts,
4518 ] + remoteopts,
4514 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4519 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4515 def pull(ui, repo, source="default", **opts):
4520 def pull(ui, repo, source="default", **opts):
4516 """pull changes from the specified source
4521 """pull changes from the specified source
4517
4522
4518 Pull changes from a remote repository to a local one.
4523 Pull changes from a remote repository to a local one.
4519
4524
4520 This finds all changes from the repository at the specified path
4525 This finds all changes from the repository at the specified path
4521 or URL and adds them to a local repository (the current one unless
4526 or URL and adds them to a local repository (the current one unless
4522 -R is specified). By default, this does not update the copy of the
4527 -R is specified). By default, this does not update the copy of the
4523 project in the working directory.
4528 project in the working directory.
4524
4529
4525 Use :hg:`incoming` if you want to see what would have been added
4530 Use :hg:`incoming` if you want to see what would have been added
4526 by a pull at the time you issued this command. If you then decide
4531 by a pull at the time you issued this command. If you then decide
4527 to add those changes to the repository, you should use :hg:`pull
4532 to add those changes to the repository, you should use :hg:`pull
4528 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4533 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4529
4534
4530 If SOURCE is omitted, the 'default' path will be used.
4535 If SOURCE is omitted, the 'default' path will be used.
4531 See :hg:`help urls` for more information.
4536 See :hg:`help urls` for more information.
4532
4537
4533 Returns 0 on success, 1 if an update had unresolved files.
4538 Returns 0 on success, 1 if an update had unresolved files.
4534 """
4539 """
4535 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4540 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4536 other = hg.peer(repo, opts, source)
4541 other = hg.peer(repo, opts, source)
4537 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4542 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4538 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4543 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4539
4544
4540 remotebookmarks = other.listkeys('bookmarks')
4545 remotebookmarks = other.listkeys('bookmarks')
4541
4546
4542 if opts.get('bookmark'):
4547 if opts.get('bookmark'):
4543 if not revs:
4548 if not revs:
4544 revs = []
4549 revs = []
4545 for b in opts['bookmark']:
4550 for b in opts['bookmark']:
4546 if b not in remotebookmarks:
4551 if b not in remotebookmarks:
4547 raise util.Abort(_('remote bookmark %s not found!') % b)
4552 raise util.Abort(_('remote bookmark %s not found!') % b)
4548 revs.append(remotebookmarks[b])
4553 revs.append(remotebookmarks[b])
4549
4554
4550 if revs:
4555 if revs:
4551 try:
4556 try:
4552 revs = [other.lookup(rev) for rev in revs]
4557 revs = [other.lookup(rev) for rev in revs]
4553 except error.CapabilityError:
4558 except error.CapabilityError:
4554 err = _("other repository doesn't support revision lookup, "
4559 err = _("other repository doesn't support revision lookup, "
4555 "so a rev cannot be specified.")
4560 "so a rev cannot be specified.")
4556 raise util.Abort(err)
4561 raise util.Abort(err)
4557
4562
4558 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4563 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4559 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4564 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4560 if checkout:
4565 if checkout:
4561 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4566 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4562 repo._subtoppath = source
4567 repo._subtoppath = source
4563 try:
4568 try:
4564 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4569 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4565
4570
4566 finally:
4571 finally:
4567 del repo._subtoppath
4572 del repo._subtoppath
4568
4573
4569 # update specified bookmarks
4574 # update specified bookmarks
4570 if opts.get('bookmark'):
4575 if opts.get('bookmark'):
4571 marks = repo._bookmarks
4576 marks = repo._bookmarks
4572 for b in opts['bookmark']:
4577 for b in opts['bookmark']:
4573 # explicit pull overrides local bookmark if any
4578 # explicit pull overrides local bookmark if any
4574 ui.status(_("importing bookmark %s\n") % b)
4579 ui.status(_("importing bookmark %s\n") % b)
4575 marks[b] = repo[remotebookmarks[b]].node()
4580 marks[b] = repo[remotebookmarks[b]].node()
4576 marks.write()
4581 marks.write()
4577
4582
4578 return ret
4583 return ret
4579
4584
4580 @command('^push',
4585 @command('^push',
4581 [('f', 'force', None, _('force push')),
4586 [('f', 'force', None, _('force push')),
4582 ('r', 'rev', [],
4587 ('r', 'rev', [],
4583 _('a changeset intended to be included in the destination'),
4588 _('a changeset intended to be included in the destination'),
4584 _('REV')),
4589 _('REV')),
4585 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4590 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4586 ('b', 'branch', [],
4591 ('b', 'branch', [],
4587 _('a specific branch you would like to push'), _('BRANCH')),
4592 _('a specific branch you would like to push'), _('BRANCH')),
4588 ('', 'new-branch', False, _('allow pushing a new branch')),
4593 ('', 'new-branch', False, _('allow pushing a new branch')),
4589 ] + remoteopts,
4594 ] + remoteopts,
4590 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4595 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4591 def push(ui, repo, dest=None, **opts):
4596 def push(ui, repo, dest=None, **opts):
4592 """push changes to the specified destination
4597 """push changes to the specified destination
4593
4598
4594 Push changesets from the local repository to the specified
4599 Push changesets from the local repository to the specified
4595 destination.
4600 destination.
4596
4601
4597 This operation is symmetrical to pull: it is identical to a pull
4602 This operation is symmetrical to pull: it is identical to a pull
4598 in the destination repository from the current one.
4603 in the destination repository from the current one.
4599
4604
4600 By default, push will not allow creation of new heads at the
4605 By default, push will not allow creation of new heads at the
4601 destination, since multiple heads would make it unclear which head
4606 destination, since multiple heads would make it unclear which head
4602 to use. In this situation, it is recommended to pull and merge
4607 to use. In this situation, it is recommended to pull and merge
4603 before pushing.
4608 before pushing.
4604
4609
4605 Use --new-branch if you want to allow push to create a new named
4610 Use --new-branch if you want to allow push to create a new named
4606 branch that is not present at the destination. This allows you to
4611 branch that is not present at the destination. This allows you to
4607 only create a new branch without forcing other changes.
4612 only create a new branch without forcing other changes.
4608
4613
4609 Use -f/--force to override the default behavior and push all
4614 Use -f/--force to override the default behavior and push all
4610 changesets on all branches.
4615 changesets on all branches.
4611
4616
4612 If -r/--rev is used, the specified revision and all its ancestors
4617 If -r/--rev is used, the specified revision and all its ancestors
4613 will be pushed to the remote repository.
4618 will be pushed to the remote repository.
4614
4619
4615 If -B/--bookmark is used, the specified bookmarked revision, its
4620 If -B/--bookmark is used, the specified bookmarked revision, its
4616 ancestors, and the bookmark will be pushed to the remote
4621 ancestors, and the bookmark will be pushed to the remote
4617 repository.
4622 repository.
4618
4623
4619 Please see :hg:`help urls` for important details about ``ssh://``
4624 Please see :hg:`help urls` for important details about ``ssh://``
4620 URLs. If DESTINATION is omitted, a default path will be used.
4625 URLs. If DESTINATION is omitted, a default path will be used.
4621
4626
4622 Returns 0 if push was successful, 1 if nothing to push.
4627 Returns 0 if push was successful, 1 if nothing to push.
4623 """
4628 """
4624
4629
4625 if opts.get('bookmark'):
4630 if opts.get('bookmark'):
4626 for b in opts['bookmark']:
4631 for b in opts['bookmark']:
4627 # translate -B options to -r so changesets get pushed
4632 # translate -B options to -r so changesets get pushed
4628 if b in repo._bookmarks:
4633 if b in repo._bookmarks:
4629 opts.setdefault('rev', []).append(b)
4634 opts.setdefault('rev', []).append(b)
4630 else:
4635 else:
4631 # if we try to push a deleted bookmark, translate it to null
4636 # if we try to push a deleted bookmark, translate it to null
4632 # this lets simultaneous -r, -b options continue working
4637 # this lets simultaneous -r, -b options continue working
4633 opts.setdefault('rev', []).append("null")
4638 opts.setdefault('rev', []).append("null")
4634
4639
4635 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4640 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4636 dest, branches = hg.parseurl(dest, opts.get('branch'))
4641 dest, branches = hg.parseurl(dest, opts.get('branch'))
4637 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4642 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4638 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4643 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4639 other = hg.peer(repo, opts, dest)
4644 other = hg.peer(repo, opts, dest)
4640 if revs:
4645 if revs:
4641 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4646 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4642
4647
4643 repo._subtoppath = dest
4648 repo._subtoppath = dest
4644 try:
4649 try:
4645 # push subrepos depth-first for coherent ordering
4650 # push subrepos depth-first for coherent ordering
4646 c = repo['']
4651 c = repo['']
4647 subs = c.substate # only repos that are committed
4652 subs = c.substate # only repos that are committed
4648 for s in sorted(subs):
4653 for s in sorted(subs):
4649 if c.sub(s).push(opts) == 0:
4654 if c.sub(s).push(opts) == 0:
4650 return False
4655 return False
4651 finally:
4656 finally:
4652 del repo._subtoppath
4657 del repo._subtoppath
4653 result = repo.push(other, opts.get('force'), revs=revs,
4658 result = repo.push(other, opts.get('force'), revs=revs,
4654 newbranch=opts.get('new_branch'))
4659 newbranch=opts.get('new_branch'))
4655
4660
4656 result = not result
4661 result = not result
4657
4662
4658 if opts.get('bookmark'):
4663 if opts.get('bookmark'):
4659 rb = other.listkeys('bookmarks')
4664 rb = other.listkeys('bookmarks')
4660 for b in opts['bookmark']:
4665 for b in opts['bookmark']:
4661 # explicit push overrides remote bookmark if any
4666 # explicit push overrides remote bookmark if any
4662 if b in repo._bookmarks:
4667 if b in repo._bookmarks:
4663 ui.status(_("exporting bookmark %s\n") % b)
4668 ui.status(_("exporting bookmark %s\n") % b)
4664 new = repo[b].hex()
4669 new = repo[b].hex()
4665 elif b in rb:
4670 elif b in rb:
4666 ui.status(_("deleting remote bookmark %s\n") % b)
4671 ui.status(_("deleting remote bookmark %s\n") % b)
4667 new = '' # delete
4672 new = '' # delete
4668 else:
4673 else:
4669 ui.warn(_('bookmark %s does not exist on the local '
4674 ui.warn(_('bookmark %s does not exist on the local '
4670 'or remote repository!\n') % b)
4675 'or remote repository!\n') % b)
4671 return 2
4676 return 2
4672 old = rb.get(b, '')
4677 old = rb.get(b, '')
4673 r = other.pushkey('bookmarks', b, old, new)
4678 r = other.pushkey('bookmarks', b, old, new)
4674 if not r:
4679 if not r:
4675 ui.warn(_('updating bookmark %s failed!\n') % b)
4680 ui.warn(_('updating bookmark %s failed!\n') % b)
4676 if not result:
4681 if not result:
4677 result = 2
4682 result = 2
4678
4683
4679 return result
4684 return result
4680
4685
4681 @command('recover', [])
4686 @command('recover', [])
4682 def recover(ui, repo):
4687 def recover(ui, repo):
4683 """roll back an interrupted transaction
4688 """roll back an interrupted transaction
4684
4689
4685 Recover from an interrupted commit or pull.
4690 Recover from an interrupted commit or pull.
4686
4691
4687 This command tries to fix the repository status after an
4692 This command tries to fix the repository status after an
4688 interrupted operation. It should only be necessary when Mercurial
4693 interrupted operation. It should only be necessary when Mercurial
4689 suggests it.
4694 suggests it.
4690
4695
4691 Returns 0 if successful, 1 if nothing to recover or verify fails.
4696 Returns 0 if successful, 1 if nothing to recover or verify fails.
4692 """
4697 """
4693 if repo.recover():
4698 if repo.recover():
4694 return hg.verify(repo)
4699 return hg.verify(repo)
4695 return 1
4700 return 1
4696
4701
4697 @command('^remove|rm',
4702 @command('^remove|rm',
4698 [('A', 'after', None, _('record delete for missing files')),
4703 [('A', 'after', None, _('record delete for missing files')),
4699 ('f', 'force', None,
4704 ('f', 'force', None,
4700 _('remove (and delete) file even if added or modified')),
4705 _('remove (and delete) file even if added or modified')),
4701 ] + walkopts,
4706 ] + walkopts,
4702 _('[OPTION]... FILE...'))
4707 _('[OPTION]... FILE...'))
4703 def remove(ui, repo, *pats, **opts):
4708 def remove(ui, repo, *pats, **opts):
4704 """remove the specified files on the next commit
4709 """remove the specified files on the next commit
4705
4710
4706 Schedule the indicated files for removal from the current branch.
4711 Schedule the indicated files for removal from the current branch.
4707
4712
4708 This command schedules the files to be removed at the next commit.
4713 This command schedules the files to be removed at the next commit.
4709 To undo a remove before that, see :hg:`revert`. To undo added
4714 To undo a remove before that, see :hg:`revert`. To undo added
4710 files, see :hg:`forget`.
4715 files, see :hg:`forget`.
4711
4716
4712 .. container:: verbose
4717 .. container:: verbose
4713
4718
4714 -A/--after can be used to remove only files that have already
4719 -A/--after can be used to remove only files that have already
4715 been deleted, -f/--force can be used to force deletion, and -Af
4720 been deleted, -f/--force can be used to force deletion, and -Af
4716 can be used to remove files from the next revision without
4721 can be used to remove files from the next revision without
4717 deleting them from the working directory.
4722 deleting them from the working directory.
4718
4723
4719 The following table details the behavior of remove for different
4724 The following table details the behavior of remove for different
4720 file states (columns) and option combinations (rows). The file
4725 file states (columns) and option combinations (rows). The file
4721 states are Added [A], Clean [C], Modified [M] and Missing [!]
4726 states are Added [A], Clean [C], Modified [M] and Missing [!]
4722 (as reported by :hg:`status`). The actions are Warn, Remove
4727 (as reported by :hg:`status`). The actions are Warn, Remove
4723 (from branch) and Delete (from disk):
4728 (from branch) and Delete (from disk):
4724
4729
4725 ======= == == == ==
4730 ======= == == == ==
4726 A C M !
4731 A C M !
4727 ======= == == == ==
4732 ======= == == == ==
4728 none W RD W R
4733 none W RD W R
4729 -f R RD RD R
4734 -f R RD RD R
4730 -A W W W R
4735 -A W W W R
4731 -Af R R R R
4736 -Af R R R R
4732 ======= == == == ==
4737 ======= == == == ==
4733
4738
4734 Note that remove never deletes files in Added [A] state from the
4739 Note that remove never deletes files in Added [A] state from the
4735 working directory, not even if option --force is specified.
4740 working directory, not even if option --force is specified.
4736
4741
4737 Returns 0 on success, 1 if any warnings encountered.
4742 Returns 0 on success, 1 if any warnings encountered.
4738 """
4743 """
4739
4744
4740 ret = 0
4745 ret = 0
4741 after, force = opts.get('after'), opts.get('force')
4746 after, force = opts.get('after'), opts.get('force')
4742 if not pats and not after:
4747 if not pats and not after:
4743 raise util.Abort(_('no files specified'))
4748 raise util.Abort(_('no files specified'))
4744
4749
4745 m = scmutil.match(repo[None], pats, opts)
4750 m = scmutil.match(repo[None], pats, opts)
4746 s = repo.status(match=m, clean=True)
4751 s = repo.status(match=m, clean=True)
4747 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4752 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4748
4753
4749 # warn about failure to delete explicit files/dirs
4754 # warn about failure to delete explicit files/dirs
4750 wctx = repo[None]
4755 wctx = repo[None]
4751 for f in m.files():
4756 for f in m.files():
4752 if f in repo.dirstate or f in wctx.dirs():
4757 if f in repo.dirstate or f in wctx.dirs():
4753 continue
4758 continue
4754 if os.path.exists(m.rel(f)):
4759 if os.path.exists(m.rel(f)):
4755 if os.path.isdir(m.rel(f)):
4760 if os.path.isdir(m.rel(f)):
4756 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4761 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4757 else:
4762 else:
4758 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4763 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4759 # missing files will generate a warning elsewhere
4764 # missing files will generate a warning elsewhere
4760 ret = 1
4765 ret = 1
4761
4766
4762 if force:
4767 if force:
4763 list = modified + deleted + clean + added
4768 list = modified + deleted + clean + added
4764 elif after:
4769 elif after:
4765 list = deleted
4770 list = deleted
4766 for f in modified + added + clean:
4771 for f in modified + added + clean:
4767 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4772 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4768 ret = 1
4773 ret = 1
4769 else:
4774 else:
4770 list = deleted + clean
4775 list = deleted + clean
4771 for f in modified:
4776 for f in modified:
4772 ui.warn(_('not removing %s: file is modified (use -f'
4777 ui.warn(_('not removing %s: file is modified (use -f'
4773 ' to force removal)\n') % m.rel(f))
4778 ' to force removal)\n') % m.rel(f))
4774 ret = 1
4779 ret = 1
4775 for f in added:
4780 for f in added:
4776 ui.warn(_('not removing %s: file has been marked for add'
4781 ui.warn(_('not removing %s: file has been marked for add'
4777 ' (use forget to undo)\n') % m.rel(f))
4782 ' (use forget to undo)\n') % m.rel(f))
4778 ret = 1
4783 ret = 1
4779
4784
4780 for f in sorted(list):
4785 for f in sorted(list):
4781 if ui.verbose or not m.exact(f):
4786 if ui.verbose or not m.exact(f):
4782 ui.status(_('removing %s\n') % m.rel(f))
4787 ui.status(_('removing %s\n') % m.rel(f))
4783
4788
4784 wlock = repo.wlock()
4789 wlock = repo.wlock()
4785 try:
4790 try:
4786 if not after:
4791 if not after:
4787 for f in list:
4792 for f in list:
4788 if f in added:
4793 if f in added:
4789 continue # we never unlink added files on remove
4794 continue # we never unlink added files on remove
4790 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4795 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4791 repo[None].forget(list)
4796 repo[None].forget(list)
4792 finally:
4797 finally:
4793 wlock.release()
4798 wlock.release()
4794
4799
4795 return ret
4800 return ret
4796
4801
4797 @command('rename|move|mv',
4802 @command('rename|move|mv',
4798 [('A', 'after', None, _('record a rename that has already occurred')),
4803 [('A', 'after', None, _('record a rename that has already occurred')),
4799 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4804 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4800 ] + walkopts + dryrunopts,
4805 ] + walkopts + dryrunopts,
4801 _('[OPTION]... SOURCE... DEST'))
4806 _('[OPTION]... SOURCE... DEST'))
4802 def rename(ui, repo, *pats, **opts):
4807 def rename(ui, repo, *pats, **opts):
4803 """rename files; equivalent of copy + remove
4808 """rename files; equivalent of copy + remove
4804
4809
4805 Mark dest as copies of sources; mark sources for deletion. If dest
4810 Mark dest as copies of sources; mark sources for deletion. If dest
4806 is a directory, copies are put in that directory. If dest is a
4811 is a directory, copies are put in that directory. If dest is a
4807 file, there can only be one source.
4812 file, there can only be one source.
4808
4813
4809 By default, this command copies the contents of files as they
4814 By default, this command copies the contents of files as they
4810 exist in the working directory. If invoked with -A/--after, the
4815 exist in the working directory. If invoked with -A/--after, the
4811 operation is recorded, but no copying is performed.
4816 operation is recorded, but no copying is performed.
4812
4817
4813 This command takes effect at the next commit. To undo a rename
4818 This command takes effect at the next commit. To undo a rename
4814 before that, see :hg:`revert`.
4819 before that, see :hg:`revert`.
4815
4820
4816 Returns 0 on success, 1 if errors are encountered.
4821 Returns 0 on success, 1 if errors are encountered.
4817 """
4822 """
4818 wlock = repo.wlock(False)
4823 wlock = repo.wlock(False)
4819 try:
4824 try:
4820 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4825 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4821 finally:
4826 finally:
4822 wlock.release()
4827 wlock.release()
4823
4828
4824 @command('resolve',
4829 @command('resolve',
4825 [('a', 'all', None, _('select all unresolved files')),
4830 [('a', 'all', None, _('select all unresolved files')),
4826 ('l', 'list', None, _('list state of files needing merge')),
4831 ('l', 'list', None, _('list state of files needing merge')),
4827 ('m', 'mark', None, _('mark files as resolved')),
4832 ('m', 'mark', None, _('mark files as resolved')),
4828 ('u', 'unmark', None, _('mark files as unresolved')),
4833 ('u', 'unmark', None, _('mark files as unresolved')),
4829 ('n', 'no-status', None, _('hide status prefix'))]
4834 ('n', 'no-status', None, _('hide status prefix'))]
4830 + mergetoolopts + walkopts,
4835 + mergetoolopts + walkopts,
4831 _('[OPTION]... [FILE]...'))
4836 _('[OPTION]... [FILE]...'))
4832 def resolve(ui, repo, *pats, **opts):
4837 def resolve(ui, repo, *pats, **opts):
4833 """redo merges or set/view the merge status of files
4838 """redo merges or set/view the merge status of files
4834
4839
4835 Merges with unresolved conflicts are often the result of
4840 Merges with unresolved conflicts are often the result of
4836 non-interactive merging using the ``internal:merge`` configuration
4841 non-interactive merging using the ``internal:merge`` configuration
4837 setting, or a command-line merge tool like ``diff3``. The resolve
4842 setting, or a command-line merge tool like ``diff3``. The resolve
4838 command is used to manage the files involved in a merge, after
4843 command is used to manage the files involved in a merge, after
4839 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4844 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4840 working directory must have two parents). See :hg:`help
4845 working directory must have two parents). See :hg:`help
4841 merge-tools` for information on configuring merge tools.
4846 merge-tools` for information on configuring merge tools.
4842
4847
4843 The resolve command can be used in the following ways:
4848 The resolve command can be used in the following ways:
4844
4849
4845 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4850 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4846 files, discarding any previous merge attempts. Re-merging is not
4851 files, discarding any previous merge attempts. Re-merging is not
4847 performed for files already marked as resolved. Use ``--all/-a``
4852 performed for files already marked as resolved. Use ``--all/-a``
4848 to select all unresolved files. ``--tool`` can be used to specify
4853 to select all unresolved files. ``--tool`` can be used to specify
4849 the merge tool used for the given files. It overrides the HGMERGE
4854 the merge tool used for the given files. It overrides the HGMERGE
4850 environment variable and your configuration files. Previous file
4855 environment variable and your configuration files. Previous file
4851 contents are saved with a ``.orig`` suffix.
4856 contents are saved with a ``.orig`` suffix.
4852
4857
4853 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4858 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4854 (e.g. after having manually fixed-up the files). The default is
4859 (e.g. after having manually fixed-up the files). The default is
4855 to mark all unresolved files.
4860 to mark all unresolved files.
4856
4861
4857 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4862 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4858 default is to mark all resolved files.
4863 default is to mark all resolved files.
4859
4864
4860 - :hg:`resolve -l`: list files which had or still have conflicts.
4865 - :hg:`resolve -l`: list files which had or still have conflicts.
4861 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4866 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4862
4867
4863 Note that Mercurial will not let you commit files with unresolved
4868 Note that Mercurial will not let you commit files with unresolved
4864 merge conflicts. You must use :hg:`resolve -m ...` before you can
4869 merge conflicts. You must use :hg:`resolve -m ...` before you can
4865 commit after a conflicting merge.
4870 commit after a conflicting merge.
4866
4871
4867 Returns 0 on success, 1 if any files fail a resolve attempt.
4872 Returns 0 on success, 1 if any files fail a resolve attempt.
4868 """
4873 """
4869
4874
4870 all, mark, unmark, show, nostatus = \
4875 all, mark, unmark, show, nostatus = \
4871 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4876 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4872
4877
4873 if (show and (mark or unmark)) or (mark and unmark):
4878 if (show and (mark or unmark)) or (mark and unmark):
4874 raise util.Abort(_("too many options specified"))
4879 raise util.Abort(_("too many options specified"))
4875 if pats and all:
4880 if pats and all:
4876 raise util.Abort(_("can't specify --all and patterns"))
4881 raise util.Abort(_("can't specify --all and patterns"))
4877 if not (all or pats or show or mark or unmark):
4882 if not (all or pats or show or mark or unmark):
4878 raise util.Abort(_('no files or directories specified; '
4883 raise util.Abort(_('no files or directories specified; '
4879 'use --all to remerge all files'))
4884 'use --all to remerge all files'))
4880
4885
4881 ms = mergemod.mergestate(repo)
4886 ms = mergemod.mergestate(repo)
4882 m = scmutil.match(repo[None], pats, opts)
4887 m = scmutil.match(repo[None], pats, opts)
4883 ret = 0
4888 ret = 0
4884
4889
4885 for f in ms:
4890 for f in ms:
4886 if m(f):
4891 if m(f):
4887 if show:
4892 if show:
4888 if nostatus:
4893 if nostatus:
4889 ui.write("%s\n" % f)
4894 ui.write("%s\n" % f)
4890 else:
4895 else:
4891 ui.write("%s %s\n" % (ms[f].upper(), f),
4896 ui.write("%s %s\n" % (ms[f].upper(), f),
4892 label='resolve.' +
4897 label='resolve.' +
4893 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4898 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4894 elif mark:
4899 elif mark:
4895 ms.mark(f, "r")
4900 ms.mark(f, "r")
4896 elif unmark:
4901 elif unmark:
4897 ms.mark(f, "u")
4902 ms.mark(f, "u")
4898 else:
4903 else:
4899 wctx = repo[None]
4904 wctx = repo[None]
4900 mctx = wctx.parents()[-1]
4905 mctx = wctx.parents()[-1]
4901
4906
4902 # backup pre-resolve (merge uses .orig for its own purposes)
4907 # backup pre-resolve (merge uses .orig for its own purposes)
4903 a = repo.wjoin(f)
4908 a = repo.wjoin(f)
4904 util.copyfile(a, a + ".resolve")
4909 util.copyfile(a, a + ".resolve")
4905
4910
4906 try:
4911 try:
4907 # resolve file
4912 # resolve file
4908 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4913 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4909 if ms.resolve(f, wctx, mctx):
4914 if ms.resolve(f, wctx, mctx):
4910 ret = 1
4915 ret = 1
4911 finally:
4916 finally:
4912 ui.setconfig('ui', 'forcemerge', '')
4917 ui.setconfig('ui', 'forcemerge', '')
4913 ms.commit()
4918 ms.commit()
4914
4919
4915 # replace filemerge's .orig file with our resolve file
4920 # replace filemerge's .orig file with our resolve file
4916 util.rename(a + ".resolve", a + ".orig")
4921 util.rename(a + ".resolve", a + ".orig")
4917
4922
4918 ms.commit()
4923 ms.commit()
4919 return ret
4924 return ret
4920
4925
4921 @command('revert',
4926 @command('revert',
4922 [('a', 'all', None, _('revert all changes when no arguments given')),
4927 [('a', 'all', None, _('revert all changes when no arguments given')),
4923 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4928 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4924 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4929 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4925 ('C', 'no-backup', None, _('do not save backup copies of files')),
4930 ('C', 'no-backup', None, _('do not save backup copies of files')),
4926 ] + walkopts + dryrunopts,
4931 ] + walkopts + dryrunopts,
4927 _('[OPTION]... [-r REV] [NAME]...'))
4932 _('[OPTION]... [-r REV] [NAME]...'))
4928 def revert(ui, repo, *pats, **opts):
4933 def revert(ui, repo, *pats, **opts):
4929 """restore files to their checkout state
4934 """restore files to their checkout state
4930
4935
4931 .. note::
4936 .. note::
4932
4937
4933 To check out earlier revisions, you should use :hg:`update REV`.
4938 To check out earlier revisions, you should use :hg:`update REV`.
4934 To cancel an uncommitted merge (and lose your changes), use
4939 To cancel an uncommitted merge (and lose your changes), use
4935 :hg:`update --clean .`.
4940 :hg:`update --clean .`.
4936
4941
4937 With no revision specified, revert the specified files or directories
4942 With no revision specified, revert the specified files or directories
4938 to the contents they had in the parent of the working directory.
4943 to the contents they had in the parent of the working directory.
4939 This restores the contents of files to an unmodified
4944 This restores the contents of files to an unmodified
4940 state and unschedules adds, removes, copies, and renames. If the
4945 state and unschedules adds, removes, copies, and renames. If the
4941 working directory has two parents, you must explicitly specify a
4946 working directory has two parents, you must explicitly specify a
4942 revision.
4947 revision.
4943
4948
4944 Using the -r/--rev or -d/--date options, revert the given files or
4949 Using the -r/--rev or -d/--date options, revert the given files or
4945 directories to their states as of a specific revision. Because
4950 directories to their states as of a specific revision. Because
4946 revert does not change the working directory parents, this will
4951 revert does not change the working directory parents, this will
4947 cause these files to appear modified. This can be helpful to "back
4952 cause these files to appear modified. This can be helpful to "back
4948 out" some or all of an earlier change. See :hg:`backout` for a
4953 out" some or all of an earlier change. See :hg:`backout` for a
4949 related method.
4954 related method.
4950
4955
4951 Modified files are saved with a .orig suffix before reverting.
4956 Modified files are saved with a .orig suffix before reverting.
4952 To disable these backups, use --no-backup.
4957 To disable these backups, use --no-backup.
4953
4958
4954 See :hg:`help dates` for a list of formats valid for -d/--date.
4959 See :hg:`help dates` for a list of formats valid for -d/--date.
4955
4960
4956 Returns 0 on success.
4961 Returns 0 on success.
4957 """
4962 """
4958
4963
4959 if opts.get("date"):
4964 if opts.get("date"):
4960 if opts.get("rev"):
4965 if opts.get("rev"):
4961 raise util.Abort(_("you can't specify a revision and a date"))
4966 raise util.Abort(_("you can't specify a revision and a date"))
4962 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4967 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4963
4968
4964 parent, p2 = repo.dirstate.parents()
4969 parent, p2 = repo.dirstate.parents()
4965 if not opts.get('rev') and p2 != nullid:
4970 if not opts.get('rev') and p2 != nullid:
4966 # revert after merge is a trap for new users (issue2915)
4971 # revert after merge is a trap for new users (issue2915)
4967 raise util.Abort(_('uncommitted merge with no revision specified'),
4972 raise util.Abort(_('uncommitted merge with no revision specified'),
4968 hint=_('use "hg update" or see "hg help revert"'))
4973 hint=_('use "hg update" or see "hg help revert"'))
4969
4974
4970 ctx = scmutil.revsingle(repo, opts.get('rev'))
4975 ctx = scmutil.revsingle(repo, opts.get('rev'))
4971
4976
4972 if not pats and not opts.get('all'):
4977 if not pats and not opts.get('all'):
4973 msg = _("no files or directories specified")
4978 msg = _("no files or directories specified")
4974 if p2 != nullid:
4979 if p2 != nullid:
4975 hint = _("uncommitted merge, use --all to discard all changes,"
4980 hint = _("uncommitted merge, use --all to discard all changes,"
4976 " or 'hg update -C .' to abort the merge")
4981 " or 'hg update -C .' to abort the merge")
4977 raise util.Abort(msg, hint=hint)
4982 raise util.Abort(msg, hint=hint)
4978 dirty = util.any(repo.status())
4983 dirty = util.any(repo.status())
4979 node = ctx.node()
4984 node = ctx.node()
4980 if node != parent:
4985 if node != parent:
4981 if dirty:
4986 if dirty:
4982 hint = _("uncommitted changes, use --all to discard all"
4987 hint = _("uncommitted changes, use --all to discard all"
4983 " changes, or 'hg update %s' to update") % ctx.rev()
4988 " changes, or 'hg update %s' to update") % ctx.rev()
4984 else:
4989 else:
4985 hint = _("use --all to revert all files,"
4990 hint = _("use --all to revert all files,"
4986 " or 'hg update %s' to update") % ctx.rev()
4991 " or 'hg update %s' to update") % ctx.rev()
4987 elif dirty:
4992 elif dirty:
4988 hint = _("uncommitted changes, use --all to discard all changes")
4993 hint = _("uncommitted changes, use --all to discard all changes")
4989 else:
4994 else:
4990 hint = _("use --all to revert all files")
4995 hint = _("use --all to revert all files")
4991 raise util.Abort(msg, hint=hint)
4996 raise util.Abort(msg, hint=hint)
4992
4997
4993 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4998 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4994
4999
4995 @command('rollback', dryrunopts +
5000 @command('rollback', dryrunopts +
4996 [('f', 'force', False, _('ignore safety measures'))])
5001 [('f', 'force', False, _('ignore safety measures'))])
4997 def rollback(ui, repo, **opts):
5002 def rollback(ui, repo, **opts):
4998 """roll back the last transaction (dangerous)
5003 """roll back the last transaction (dangerous)
4999
5004
5000 This command should be used with care. There is only one level of
5005 This command should be used with care. There is only one level of
5001 rollback, and there is no way to undo a rollback. It will also
5006 rollback, and there is no way to undo a rollback. It will also
5002 restore the dirstate at the time of the last transaction, losing
5007 restore the dirstate at the time of the last transaction, losing
5003 any dirstate changes since that time. This command does not alter
5008 any dirstate changes since that time. This command does not alter
5004 the working directory.
5009 the working directory.
5005
5010
5006 Transactions are used to encapsulate the effects of all commands
5011 Transactions are used to encapsulate the effects of all commands
5007 that create new changesets or propagate existing changesets into a
5012 that create new changesets or propagate existing changesets into a
5008 repository.
5013 repository.
5009
5014
5010 .. container:: verbose
5015 .. container:: verbose
5011
5016
5012 For example, the following commands are transactional, and their
5017 For example, the following commands are transactional, and their
5013 effects can be rolled back:
5018 effects can be rolled back:
5014
5019
5015 - commit
5020 - commit
5016 - import
5021 - import
5017 - pull
5022 - pull
5018 - push (with this repository as the destination)
5023 - push (with this repository as the destination)
5019 - unbundle
5024 - unbundle
5020
5025
5021 To avoid permanent data loss, rollback will refuse to rollback a
5026 To avoid permanent data loss, rollback will refuse to rollback a
5022 commit transaction if it isn't checked out. Use --force to
5027 commit transaction if it isn't checked out. Use --force to
5023 override this protection.
5028 override this protection.
5024
5029
5025 This command is not intended for use on public repositories. Once
5030 This command is not intended for use on public repositories. Once
5026 changes are visible for pull by other users, rolling a transaction
5031 changes are visible for pull by other users, rolling a transaction
5027 back locally is ineffective (someone else may already have pulled
5032 back locally is ineffective (someone else may already have pulled
5028 the changes). Furthermore, a race is possible with readers of the
5033 the changes). Furthermore, a race is possible with readers of the
5029 repository; for example an in-progress pull from the repository
5034 repository; for example an in-progress pull from the repository
5030 may fail if a rollback is performed.
5035 may fail if a rollback is performed.
5031
5036
5032 Returns 0 on success, 1 if no rollback data is available.
5037 Returns 0 on success, 1 if no rollback data is available.
5033 """
5038 """
5034 return repo.rollback(dryrun=opts.get('dry_run'),
5039 return repo.rollback(dryrun=opts.get('dry_run'),
5035 force=opts.get('force'))
5040 force=opts.get('force'))
5036
5041
5037 @command('root', [])
5042 @command('root', [])
5038 def root(ui, repo):
5043 def root(ui, repo):
5039 """print the root (top) of the current working directory
5044 """print the root (top) of the current working directory
5040
5045
5041 Print the root directory of the current repository.
5046 Print the root directory of the current repository.
5042
5047
5043 Returns 0 on success.
5048 Returns 0 on success.
5044 """
5049 """
5045 ui.write(repo.root + "\n")
5050 ui.write(repo.root + "\n")
5046
5051
5047 @command('^serve',
5052 @command('^serve',
5048 [('A', 'accesslog', '', _('name of access log file to write to'),
5053 [('A', 'accesslog', '', _('name of access log file to write to'),
5049 _('FILE')),
5054 _('FILE')),
5050 ('d', 'daemon', None, _('run server in background')),
5055 ('d', 'daemon', None, _('run server in background')),
5051 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5056 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5052 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5057 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5053 # use string type, then we can check if something was passed
5058 # use string type, then we can check if something was passed
5054 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5059 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5055 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5060 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5056 _('ADDR')),
5061 _('ADDR')),
5057 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5062 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5058 _('PREFIX')),
5063 _('PREFIX')),
5059 ('n', 'name', '',
5064 ('n', 'name', '',
5060 _('name to show in web pages (default: working directory)'), _('NAME')),
5065 _('name to show in web pages (default: working directory)'), _('NAME')),
5061 ('', 'web-conf', '',
5066 ('', 'web-conf', '',
5062 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5067 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5063 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5068 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5064 _('FILE')),
5069 _('FILE')),
5065 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5070 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5066 ('', 'stdio', None, _('for remote clients')),
5071 ('', 'stdio', None, _('for remote clients')),
5067 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5072 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5068 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5073 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5069 ('', 'style', '', _('template style to use'), _('STYLE')),
5074 ('', 'style', '', _('template style to use'), _('STYLE')),
5070 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5075 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5071 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5076 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5072 _('[OPTION]...'))
5077 _('[OPTION]...'))
5073 def serve(ui, repo, **opts):
5078 def serve(ui, repo, **opts):
5074 """start stand-alone webserver
5079 """start stand-alone webserver
5075
5080
5076 Start a local HTTP repository browser and pull server. You can use
5081 Start a local HTTP repository browser and pull server. You can use
5077 this for ad-hoc sharing and browsing of repositories. It is
5082 this for ad-hoc sharing and browsing of repositories. It is
5078 recommended to use a real web server to serve a repository for
5083 recommended to use a real web server to serve a repository for
5079 longer periods of time.
5084 longer periods of time.
5080
5085
5081 Please note that the server does not implement access control.
5086 Please note that the server does not implement access control.
5082 This means that, by default, anybody can read from the server and
5087 This means that, by default, anybody can read from the server and
5083 nobody can write to it by default. Set the ``web.allow_push``
5088 nobody can write to it by default. Set the ``web.allow_push``
5084 option to ``*`` to allow everybody to push to the server. You
5089 option to ``*`` to allow everybody to push to the server. You
5085 should use a real web server if you need to authenticate users.
5090 should use a real web server if you need to authenticate users.
5086
5091
5087 By default, the server logs accesses to stdout and errors to
5092 By default, the server logs accesses to stdout and errors to
5088 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5093 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5089 files.
5094 files.
5090
5095
5091 To have the server choose a free port number to listen on, specify
5096 To have the server choose a free port number to listen on, specify
5092 a port number of 0; in this case, the server will print the port
5097 a port number of 0; in this case, the server will print the port
5093 number it uses.
5098 number it uses.
5094
5099
5095 Returns 0 on success.
5100 Returns 0 on success.
5096 """
5101 """
5097
5102
5098 if opts["stdio"] and opts["cmdserver"]:
5103 if opts["stdio"] and opts["cmdserver"]:
5099 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5104 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5100
5105
5101 def checkrepo():
5106 def checkrepo():
5102 if repo is None:
5107 if repo is None:
5103 raise error.RepoError(_("there is no Mercurial repository here"
5108 raise error.RepoError(_("there is no Mercurial repository here"
5104 " (.hg not found)"))
5109 " (.hg not found)"))
5105
5110
5106 if opts["stdio"]:
5111 if opts["stdio"]:
5107 checkrepo()
5112 checkrepo()
5108 s = sshserver.sshserver(ui, repo)
5113 s = sshserver.sshserver(ui, repo)
5109 s.serve_forever()
5114 s.serve_forever()
5110
5115
5111 if opts["cmdserver"]:
5116 if opts["cmdserver"]:
5112 checkrepo()
5117 checkrepo()
5113 s = commandserver.server(ui, repo, opts["cmdserver"])
5118 s = commandserver.server(ui, repo, opts["cmdserver"])
5114 return s.serve()
5119 return s.serve()
5115
5120
5116 # this way we can check if something was given in the command-line
5121 # this way we can check if something was given in the command-line
5117 if opts.get('port'):
5122 if opts.get('port'):
5118 opts['port'] = util.getport(opts.get('port'))
5123 opts['port'] = util.getport(opts.get('port'))
5119
5124
5120 baseui = repo and repo.baseui or ui
5125 baseui = repo and repo.baseui or ui
5121 optlist = ("name templates style address port prefix ipv6"
5126 optlist = ("name templates style address port prefix ipv6"
5122 " accesslog errorlog certificate encoding")
5127 " accesslog errorlog certificate encoding")
5123 for o in optlist.split():
5128 for o in optlist.split():
5124 val = opts.get(o, '')
5129 val = opts.get(o, '')
5125 if val in (None, ''): # should check against default options instead
5130 if val in (None, ''): # should check against default options instead
5126 continue
5131 continue
5127 baseui.setconfig("web", o, val)
5132 baseui.setconfig("web", o, val)
5128 if repo and repo.ui != baseui:
5133 if repo and repo.ui != baseui:
5129 repo.ui.setconfig("web", o, val)
5134 repo.ui.setconfig("web", o, val)
5130
5135
5131 o = opts.get('web_conf') or opts.get('webdir_conf')
5136 o = opts.get('web_conf') or opts.get('webdir_conf')
5132 if not o:
5137 if not o:
5133 if not repo:
5138 if not repo:
5134 raise error.RepoError(_("there is no Mercurial repository"
5139 raise error.RepoError(_("there is no Mercurial repository"
5135 " here (.hg not found)"))
5140 " here (.hg not found)"))
5136 o = repo
5141 o = repo
5137
5142
5138 app = hgweb.hgweb(o, baseui=baseui)
5143 app = hgweb.hgweb(o, baseui=baseui)
5139
5144
5140 class service(object):
5145 class service(object):
5141 def init(self):
5146 def init(self):
5142 util.setsignalhandler()
5147 util.setsignalhandler()
5143 self.httpd = hgweb.server.create_server(ui, app)
5148 self.httpd = hgweb.server.create_server(ui, app)
5144
5149
5145 if opts['port'] and not ui.verbose:
5150 if opts['port'] and not ui.verbose:
5146 return
5151 return
5147
5152
5148 if self.httpd.prefix:
5153 if self.httpd.prefix:
5149 prefix = self.httpd.prefix.strip('/') + '/'
5154 prefix = self.httpd.prefix.strip('/') + '/'
5150 else:
5155 else:
5151 prefix = ''
5156 prefix = ''
5152
5157
5153 port = ':%d' % self.httpd.port
5158 port = ':%d' % self.httpd.port
5154 if port == ':80':
5159 if port == ':80':
5155 port = ''
5160 port = ''
5156
5161
5157 bindaddr = self.httpd.addr
5162 bindaddr = self.httpd.addr
5158 if bindaddr == '0.0.0.0':
5163 if bindaddr == '0.0.0.0':
5159 bindaddr = '*'
5164 bindaddr = '*'
5160 elif ':' in bindaddr: # IPv6
5165 elif ':' in bindaddr: # IPv6
5161 bindaddr = '[%s]' % bindaddr
5166 bindaddr = '[%s]' % bindaddr
5162
5167
5163 fqaddr = self.httpd.fqaddr
5168 fqaddr = self.httpd.fqaddr
5164 if ':' in fqaddr:
5169 if ':' in fqaddr:
5165 fqaddr = '[%s]' % fqaddr
5170 fqaddr = '[%s]' % fqaddr
5166 if opts['port']:
5171 if opts['port']:
5167 write = ui.status
5172 write = ui.status
5168 else:
5173 else:
5169 write = ui.write
5174 write = ui.write
5170 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5175 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5171 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5176 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5172
5177
5173 def run(self):
5178 def run(self):
5174 self.httpd.serve_forever()
5179 self.httpd.serve_forever()
5175
5180
5176 service = service()
5181 service = service()
5177
5182
5178 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5183 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5179
5184
5180 @command('showconfig|debugconfig',
5185 @command('showconfig|debugconfig',
5181 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5186 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5182 _('[-u] [NAME]...'))
5187 _('[-u] [NAME]...'))
5183 def showconfig(ui, repo, *values, **opts):
5188 def showconfig(ui, repo, *values, **opts):
5184 """show combined config settings from all hgrc files
5189 """show combined config settings from all hgrc files
5185
5190
5186 With no arguments, print names and values of all config items.
5191 With no arguments, print names and values of all config items.
5187
5192
5188 With one argument of the form section.name, print just the value
5193 With one argument of the form section.name, print just the value
5189 of that config item.
5194 of that config item.
5190
5195
5191 With multiple arguments, print names and values of all config
5196 With multiple arguments, print names and values of all config
5192 items with matching section names.
5197 items with matching section names.
5193
5198
5194 With --debug, the source (filename and line number) is printed
5199 With --debug, the source (filename and line number) is printed
5195 for each config item.
5200 for each config item.
5196
5201
5197 Returns 0 on success.
5202 Returns 0 on success.
5198 """
5203 """
5199
5204
5200 for f in scmutil.rcpath():
5205 for f in scmutil.rcpath():
5201 ui.debug('read config from: %s\n' % f)
5206 ui.debug('read config from: %s\n' % f)
5202 untrusted = bool(opts.get('untrusted'))
5207 untrusted = bool(opts.get('untrusted'))
5203 if values:
5208 if values:
5204 sections = [v for v in values if '.' not in v]
5209 sections = [v for v in values if '.' not in v]
5205 items = [v for v in values if '.' in v]
5210 items = [v for v in values if '.' in v]
5206 if len(items) > 1 or items and sections:
5211 if len(items) > 1 or items and sections:
5207 raise util.Abort(_('only one config item permitted'))
5212 raise util.Abort(_('only one config item permitted'))
5208 for section, name, value in ui.walkconfig(untrusted=untrusted):
5213 for section, name, value in ui.walkconfig(untrusted=untrusted):
5209 value = str(value).replace('\n', '\\n')
5214 value = str(value).replace('\n', '\\n')
5210 sectname = section + '.' + name
5215 sectname = section + '.' + name
5211 if values:
5216 if values:
5212 for v in values:
5217 for v in values:
5213 if v == section:
5218 if v == section:
5214 ui.debug('%s: ' %
5219 ui.debug('%s: ' %
5215 ui.configsource(section, name, untrusted))
5220 ui.configsource(section, name, untrusted))
5216 ui.write('%s=%s\n' % (sectname, value))
5221 ui.write('%s=%s\n' % (sectname, value))
5217 elif v == sectname:
5222 elif v == sectname:
5218 ui.debug('%s: ' %
5223 ui.debug('%s: ' %
5219 ui.configsource(section, name, untrusted))
5224 ui.configsource(section, name, untrusted))
5220 ui.write(value, '\n')
5225 ui.write(value, '\n')
5221 else:
5226 else:
5222 ui.debug('%s: ' %
5227 ui.debug('%s: ' %
5223 ui.configsource(section, name, untrusted))
5228 ui.configsource(section, name, untrusted))
5224 ui.write('%s=%s\n' % (sectname, value))
5229 ui.write('%s=%s\n' % (sectname, value))
5225
5230
5226 @command('^status|st',
5231 @command('^status|st',
5227 [('A', 'all', None, _('show status of all files')),
5232 [('A', 'all', None, _('show status of all files')),
5228 ('m', 'modified', None, _('show only modified files')),
5233 ('m', 'modified', None, _('show only modified files')),
5229 ('a', 'added', None, _('show only added files')),
5234 ('a', 'added', None, _('show only added files')),
5230 ('r', 'removed', None, _('show only removed files')),
5235 ('r', 'removed', None, _('show only removed files')),
5231 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5236 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5232 ('c', 'clean', None, _('show only files without changes')),
5237 ('c', 'clean', None, _('show only files without changes')),
5233 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5238 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5234 ('i', 'ignored', None, _('show only ignored files')),
5239 ('i', 'ignored', None, _('show only ignored files')),
5235 ('n', 'no-status', None, _('hide status prefix')),
5240 ('n', 'no-status', None, _('hide status prefix')),
5236 ('C', 'copies', None, _('show source of copied files')),
5241 ('C', 'copies', None, _('show source of copied files')),
5237 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5242 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5238 ('', 'rev', [], _('show difference from revision'), _('REV')),
5243 ('', 'rev', [], _('show difference from revision'), _('REV')),
5239 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5244 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5240 ] + walkopts + subrepoopts,
5245 ] + walkopts + subrepoopts,
5241 _('[OPTION]... [FILE]...'))
5246 _('[OPTION]... [FILE]...'))
5242 def status(ui, repo, *pats, **opts):
5247 def status(ui, repo, *pats, **opts):
5243 """show changed files in the working directory
5248 """show changed files in the working directory
5244
5249
5245 Show status of files in the repository. If names are given, only
5250 Show status of files in the repository. If names are given, only
5246 files that match are shown. Files that are clean or ignored or
5251 files that match are shown. Files that are clean or ignored or
5247 the source of a copy/move operation, are not listed unless
5252 the source of a copy/move operation, are not listed unless
5248 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5253 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5249 Unless options described with "show only ..." are given, the
5254 Unless options described with "show only ..." are given, the
5250 options -mardu are used.
5255 options -mardu are used.
5251
5256
5252 Option -q/--quiet hides untracked (unknown and ignored) files
5257 Option -q/--quiet hides untracked (unknown and ignored) files
5253 unless explicitly requested with -u/--unknown or -i/--ignored.
5258 unless explicitly requested with -u/--unknown or -i/--ignored.
5254
5259
5255 .. note::
5260 .. note::
5256 status may appear to disagree with diff if permissions have
5261 status may appear to disagree with diff if permissions have
5257 changed or a merge has occurred. The standard diff format does
5262 changed or a merge has occurred. The standard diff format does
5258 not report permission changes and diff only reports changes
5263 not report permission changes and diff only reports changes
5259 relative to one merge parent.
5264 relative to one merge parent.
5260
5265
5261 If one revision is given, it is used as the base revision.
5266 If one revision is given, it is used as the base revision.
5262 If two revisions are given, the differences between them are
5267 If two revisions are given, the differences between them are
5263 shown. The --change option can also be used as a shortcut to list
5268 shown. The --change option can also be used as a shortcut to list
5264 the changed files of a revision from its first parent.
5269 the changed files of a revision from its first parent.
5265
5270
5266 The codes used to show the status of files are::
5271 The codes used to show the status of files are::
5267
5272
5268 M = modified
5273 M = modified
5269 A = added
5274 A = added
5270 R = removed
5275 R = removed
5271 C = clean
5276 C = clean
5272 ! = missing (deleted by non-hg command, but still tracked)
5277 ! = missing (deleted by non-hg command, but still tracked)
5273 ? = not tracked
5278 ? = not tracked
5274 I = ignored
5279 I = ignored
5275 = origin of the previous file listed as A (added)
5280 = origin of the previous file listed as A (added)
5276
5281
5277 .. container:: verbose
5282 .. container:: verbose
5278
5283
5279 Examples:
5284 Examples:
5280
5285
5281 - show changes in the working directory relative to a
5286 - show changes in the working directory relative to a
5282 changeset::
5287 changeset::
5283
5288
5284 hg status --rev 9353
5289 hg status --rev 9353
5285
5290
5286 - show all changes including copies in an existing changeset::
5291 - show all changes including copies in an existing changeset::
5287
5292
5288 hg status --copies --change 9353
5293 hg status --copies --change 9353
5289
5294
5290 - get a NUL separated list of added files, suitable for xargs::
5295 - get a NUL separated list of added files, suitable for xargs::
5291
5296
5292 hg status -an0
5297 hg status -an0
5293
5298
5294 Returns 0 on success.
5299 Returns 0 on success.
5295 """
5300 """
5296
5301
5297 revs = opts.get('rev')
5302 revs = opts.get('rev')
5298 change = opts.get('change')
5303 change = opts.get('change')
5299
5304
5300 if revs and change:
5305 if revs and change:
5301 msg = _('cannot specify --rev and --change at the same time')
5306 msg = _('cannot specify --rev and --change at the same time')
5302 raise util.Abort(msg)
5307 raise util.Abort(msg)
5303 elif change:
5308 elif change:
5304 node2 = scmutil.revsingle(repo, change, None).node()
5309 node2 = scmutil.revsingle(repo, change, None).node()
5305 node1 = repo[node2].p1().node()
5310 node1 = repo[node2].p1().node()
5306 else:
5311 else:
5307 node1, node2 = scmutil.revpair(repo, revs)
5312 node1, node2 = scmutil.revpair(repo, revs)
5308
5313
5309 cwd = (pats and repo.getcwd()) or ''
5314 cwd = (pats and repo.getcwd()) or ''
5310 end = opts.get('print0') and '\0' or '\n'
5315 end = opts.get('print0') and '\0' or '\n'
5311 copy = {}
5316 copy = {}
5312 states = 'modified added removed deleted unknown ignored clean'.split()
5317 states = 'modified added removed deleted unknown ignored clean'.split()
5313 show = [k for k in states if opts.get(k)]
5318 show = [k for k in states if opts.get(k)]
5314 if opts.get('all'):
5319 if opts.get('all'):
5315 show += ui.quiet and (states[:4] + ['clean']) or states
5320 show += ui.quiet and (states[:4] + ['clean']) or states
5316 if not show:
5321 if not show:
5317 show = ui.quiet and states[:4] or states[:5]
5322 show = ui.quiet and states[:4] or states[:5]
5318
5323
5319 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5324 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5320 'ignored' in show, 'clean' in show, 'unknown' in show,
5325 'ignored' in show, 'clean' in show, 'unknown' in show,
5321 opts.get('subrepos'))
5326 opts.get('subrepos'))
5322 changestates = zip(states, 'MAR!?IC', stat)
5327 changestates = zip(states, 'MAR!?IC', stat)
5323
5328
5324 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5329 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5325 copy = copies.pathcopies(repo[node1], repo[node2])
5330 copy = copies.pathcopies(repo[node1], repo[node2])
5326
5331
5327 fm = ui.formatter('status', opts)
5332 fm = ui.formatter('status', opts)
5328 fmt = '%s' + end
5333 fmt = '%s' + end
5329 showchar = not opts.get('no_status')
5334 showchar = not opts.get('no_status')
5330
5335
5331 for state, char, files in changestates:
5336 for state, char, files in changestates:
5332 if state in show:
5337 if state in show:
5333 label = 'status.' + state
5338 label = 'status.' + state
5334 for f in files:
5339 for f in files:
5335 fm.startitem()
5340 fm.startitem()
5336 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5341 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5337 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5342 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5338 if f in copy:
5343 if f in copy:
5339 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5344 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5340 label='status.copied')
5345 label='status.copied')
5341 fm.end()
5346 fm.end()
5342
5347
5343 @command('^summary|sum',
5348 @command('^summary|sum',
5344 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5349 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5345 def summary(ui, repo, **opts):
5350 def summary(ui, repo, **opts):
5346 """summarize working directory state
5351 """summarize working directory state
5347
5352
5348 This generates a brief summary of the working directory state,
5353 This generates a brief summary of the working directory state,
5349 including parents, branch, commit status, and available updates.
5354 including parents, branch, commit status, and available updates.
5350
5355
5351 With the --remote option, this will check the default paths for
5356 With the --remote option, this will check the default paths for
5352 incoming and outgoing changes. This can be time-consuming.
5357 incoming and outgoing changes. This can be time-consuming.
5353
5358
5354 Returns 0 on success.
5359 Returns 0 on success.
5355 """
5360 """
5356
5361
5357 ctx = repo[None]
5362 ctx = repo[None]
5358 parents = ctx.parents()
5363 parents = ctx.parents()
5359 pnode = parents[0].node()
5364 pnode = parents[0].node()
5360 marks = []
5365 marks = []
5361
5366
5362 for p in parents:
5367 for p in parents:
5363 # label with log.changeset (instead of log.parent) since this
5368 # label with log.changeset (instead of log.parent) since this
5364 # shows a working directory parent *changeset*:
5369 # shows a working directory parent *changeset*:
5365 # i18n: column positioning for "hg summary"
5370 # i18n: column positioning for "hg summary"
5366 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5371 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5367 label='log.changeset changeset.%s' % p.phasestr())
5372 label='log.changeset changeset.%s' % p.phasestr())
5368 ui.write(' '.join(p.tags()), label='log.tag')
5373 ui.write(' '.join(p.tags()), label='log.tag')
5369 if p.bookmarks():
5374 if p.bookmarks():
5370 marks.extend(p.bookmarks())
5375 marks.extend(p.bookmarks())
5371 if p.rev() == -1:
5376 if p.rev() == -1:
5372 if not len(repo):
5377 if not len(repo):
5373 ui.write(_(' (empty repository)'))
5378 ui.write(_(' (empty repository)'))
5374 else:
5379 else:
5375 ui.write(_(' (no revision checked out)'))
5380 ui.write(_(' (no revision checked out)'))
5376 ui.write('\n')
5381 ui.write('\n')
5377 if p.description():
5382 if p.description():
5378 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5383 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5379 label='log.summary')
5384 label='log.summary')
5380
5385
5381 branch = ctx.branch()
5386 branch = ctx.branch()
5382 bheads = repo.branchheads(branch)
5387 bheads = repo.branchheads(branch)
5383 # i18n: column positioning for "hg summary"
5388 # i18n: column positioning for "hg summary"
5384 m = _('branch: %s\n') % branch
5389 m = _('branch: %s\n') % branch
5385 if branch != 'default':
5390 if branch != 'default':
5386 ui.write(m, label='log.branch')
5391 ui.write(m, label='log.branch')
5387 else:
5392 else:
5388 ui.status(m, label='log.branch')
5393 ui.status(m, label='log.branch')
5389
5394
5390 if marks:
5395 if marks:
5391 current = repo._bookmarkcurrent
5396 current = repo._bookmarkcurrent
5392 # i18n: column positioning for "hg summary"
5397 # i18n: column positioning for "hg summary"
5393 ui.write(_('bookmarks:'), label='log.bookmark')
5398 ui.write(_('bookmarks:'), label='log.bookmark')
5394 if current is not None:
5399 if current is not None:
5395 if current in marks:
5400 if current in marks:
5396 ui.write(' *' + current, label='bookmarks.current')
5401 ui.write(' *' + current, label='bookmarks.current')
5397 marks.remove(current)
5402 marks.remove(current)
5398 else:
5403 else:
5399 ui.write(' [%s]' % current, label='bookmarks.current')
5404 ui.write(' [%s]' % current, label='bookmarks.current')
5400 for m in marks:
5405 for m in marks:
5401 ui.write(' ' + m, label='log.bookmark')
5406 ui.write(' ' + m, label='log.bookmark')
5402 ui.write('\n', label='log.bookmark')
5407 ui.write('\n', label='log.bookmark')
5403
5408
5404 st = list(repo.status(unknown=True))[:6]
5409 st = list(repo.status(unknown=True))[:6]
5405
5410
5406 c = repo.dirstate.copies()
5411 c = repo.dirstate.copies()
5407 copied, renamed = [], []
5412 copied, renamed = [], []
5408 for d, s in c.iteritems():
5413 for d, s in c.iteritems():
5409 if s in st[2]:
5414 if s in st[2]:
5410 st[2].remove(s)
5415 st[2].remove(s)
5411 renamed.append(d)
5416 renamed.append(d)
5412 else:
5417 else:
5413 copied.append(d)
5418 copied.append(d)
5414 if d in st[1]:
5419 if d in st[1]:
5415 st[1].remove(d)
5420 st[1].remove(d)
5416 st.insert(3, renamed)
5421 st.insert(3, renamed)
5417 st.insert(4, copied)
5422 st.insert(4, copied)
5418
5423
5419 ms = mergemod.mergestate(repo)
5424 ms = mergemod.mergestate(repo)
5420 st.append([f for f in ms if ms[f] == 'u'])
5425 st.append([f for f in ms if ms[f] == 'u'])
5421
5426
5422 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5427 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5423 st.append(subs)
5428 st.append(subs)
5424
5429
5425 labels = [ui.label(_('%d modified'), 'status.modified'),
5430 labels = [ui.label(_('%d modified'), 'status.modified'),
5426 ui.label(_('%d added'), 'status.added'),
5431 ui.label(_('%d added'), 'status.added'),
5427 ui.label(_('%d removed'), 'status.removed'),
5432 ui.label(_('%d removed'), 'status.removed'),
5428 ui.label(_('%d renamed'), 'status.copied'),
5433 ui.label(_('%d renamed'), 'status.copied'),
5429 ui.label(_('%d copied'), 'status.copied'),
5434 ui.label(_('%d copied'), 'status.copied'),
5430 ui.label(_('%d deleted'), 'status.deleted'),
5435 ui.label(_('%d deleted'), 'status.deleted'),
5431 ui.label(_('%d unknown'), 'status.unknown'),
5436 ui.label(_('%d unknown'), 'status.unknown'),
5432 ui.label(_('%d ignored'), 'status.ignored'),
5437 ui.label(_('%d ignored'), 'status.ignored'),
5433 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5438 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5434 ui.label(_('%d subrepos'), 'status.modified')]
5439 ui.label(_('%d subrepos'), 'status.modified')]
5435 t = []
5440 t = []
5436 for s, l in zip(st, labels):
5441 for s, l in zip(st, labels):
5437 if s:
5442 if s:
5438 t.append(l % len(s))
5443 t.append(l % len(s))
5439
5444
5440 t = ', '.join(t)
5445 t = ', '.join(t)
5441 cleanworkdir = False
5446 cleanworkdir = False
5442
5447
5443 if len(parents) > 1:
5448 if len(parents) > 1:
5444 t += _(' (merge)')
5449 t += _(' (merge)')
5445 elif branch != parents[0].branch():
5450 elif branch != parents[0].branch():
5446 t += _(' (new branch)')
5451 t += _(' (new branch)')
5447 elif (parents[0].closesbranch() and
5452 elif (parents[0].closesbranch() and
5448 pnode in repo.branchheads(branch, closed=True)):
5453 pnode in repo.branchheads(branch, closed=True)):
5449 t += _(' (head closed)')
5454 t += _(' (head closed)')
5450 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5455 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5451 t += _(' (clean)')
5456 t += _(' (clean)')
5452 cleanworkdir = True
5457 cleanworkdir = True
5453 elif pnode not in bheads:
5458 elif pnode not in bheads:
5454 t += _(' (new branch head)')
5459 t += _(' (new branch head)')
5455
5460
5456 if cleanworkdir:
5461 if cleanworkdir:
5457 # i18n: column positioning for "hg summary"
5462 # i18n: column positioning for "hg summary"
5458 ui.status(_('commit: %s\n') % t.strip())
5463 ui.status(_('commit: %s\n') % t.strip())
5459 else:
5464 else:
5460 # i18n: column positioning for "hg summary"
5465 # i18n: column positioning for "hg summary"
5461 ui.write(_('commit: %s\n') % t.strip())
5466 ui.write(_('commit: %s\n') % t.strip())
5462
5467
5463 # all ancestors of branch heads - all ancestors of parent = new csets
5468 # all ancestors of branch heads - all ancestors of parent = new csets
5464 new = [0] * len(repo)
5469 new = [0] * len(repo)
5465 cl = repo.changelog
5470 cl = repo.changelog
5466 for a in [cl.rev(n) for n in bheads]:
5471 for a in [cl.rev(n) for n in bheads]:
5467 new[a] = 1
5472 new[a] = 1
5468 for a in cl.ancestors([cl.rev(n) for n in bheads]):
5473 for a in cl.ancestors([cl.rev(n) for n in bheads]):
5469 new[a] = 1
5474 new[a] = 1
5470 for a in [p.rev() for p in parents]:
5475 for a in [p.rev() for p in parents]:
5471 if a >= 0:
5476 if a >= 0:
5472 new[a] = 0
5477 new[a] = 0
5473 for a in cl.ancestors([p.rev() for p in parents]):
5478 for a in cl.ancestors([p.rev() for p in parents]):
5474 new[a] = 0
5479 new[a] = 0
5475 new = sum(new)
5480 new = sum(new)
5476
5481
5477 if new == 0:
5482 if new == 0:
5478 # i18n: column positioning for "hg summary"
5483 # i18n: column positioning for "hg summary"
5479 ui.status(_('update: (current)\n'))
5484 ui.status(_('update: (current)\n'))
5480 elif pnode not in bheads:
5485 elif pnode not in bheads:
5481 # i18n: column positioning for "hg summary"
5486 # i18n: column positioning for "hg summary"
5482 ui.write(_('update: %d new changesets (update)\n') % new)
5487 ui.write(_('update: %d new changesets (update)\n') % new)
5483 else:
5488 else:
5484 # i18n: column positioning for "hg summary"
5489 # i18n: column positioning for "hg summary"
5485 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5490 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5486 (new, len(bheads)))
5491 (new, len(bheads)))
5487
5492
5488 if opts.get('remote'):
5493 if opts.get('remote'):
5489 t = []
5494 t = []
5490 source, branches = hg.parseurl(ui.expandpath('default'))
5495 source, branches = hg.parseurl(ui.expandpath('default'))
5491 sbranch = branches[0]
5496 sbranch = branches[0]
5492 other = hg.peer(repo, {}, source)
5497 other = hg.peer(repo, {}, source)
5493 revs, checkout = hg.addbranchrevs(repo, other, branches,
5498 revs, checkout = hg.addbranchrevs(repo, other, branches,
5494 opts.get('rev'))
5499 opts.get('rev'))
5495 if revs:
5500 if revs:
5496 revs = [other.lookup(rev) for rev in revs]
5501 revs = [other.lookup(rev) for rev in revs]
5497 ui.debug('comparing with %s\n' % util.hidepassword(source))
5502 ui.debug('comparing with %s\n' % util.hidepassword(source))
5498 repo.ui.pushbuffer()
5503 repo.ui.pushbuffer()
5499 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5504 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5500 _common, incoming, _rheads = commoninc
5505 _common, incoming, _rheads = commoninc
5501 repo.ui.popbuffer()
5506 repo.ui.popbuffer()
5502 if incoming:
5507 if incoming:
5503 t.append(_('1 or more incoming'))
5508 t.append(_('1 or more incoming'))
5504
5509
5505 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5510 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5506 dbranch = branches[0]
5511 dbranch = branches[0]
5507 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5512 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5508 if source != dest:
5513 if source != dest:
5509 other = hg.peer(repo, {}, dest)
5514 other = hg.peer(repo, {}, dest)
5510 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5515 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5511 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5516 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5512 commoninc = None
5517 commoninc = None
5513 if revs:
5518 if revs:
5514 revs = [repo.lookup(rev) for rev in revs]
5519 revs = [repo.lookup(rev) for rev in revs]
5515 repo.ui.pushbuffer()
5520 repo.ui.pushbuffer()
5516 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs,
5521 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs,
5517 commoninc=commoninc)
5522 commoninc=commoninc)
5518 repo.ui.popbuffer()
5523 repo.ui.popbuffer()
5519 o = outgoing.missing
5524 o = outgoing.missing
5520 if o:
5525 if o:
5521 t.append(_('%d outgoing') % len(o))
5526 t.append(_('%d outgoing') % len(o))
5522 if 'bookmarks' in other.listkeys('namespaces'):
5527 if 'bookmarks' in other.listkeys('namespaces'):
5523 lmarks = repo.listkeys('bookmarks')
5528 lmarks = repo.listkeys('bookmarks')
5524 rmarks = other.listkeys('bookmarks')
5529 rmarks = other.listkeys('bookmarks')
5525 diff = set(rmarks) - set(lmarks)
5530 diff = set(rmarks) - set(lmarks)
5526 if len(diff) > 0:
5531 if len(diff) > 0:
5527 t.append(_('%d incoming bookmarks') % len(diff))
5532 t.append(_('%d incoming bookmarks') % len(diff))
5528 diff = set(lmarks) - set(rmarks)
5533 diff = set(lmarks) - set(rmarks)
5529 if len(diff) > 0:
5534 if len(diff) > 0:
5530 t.append(_('%d outgoing bookmarks') % len(diff))
5535 t.append(_('%d outgoing bookmarks') % len(diff))
5531
5536
5532 if t:
5537 if t:
5533 # i18n: column positioning for "hg summary"
5538 # i18n: column positioning for "hg summary"
5534 ui.write(_('remote: %s\n') % (', '.join(t)))
5539 ui.write(_('remote: %s\n') % (', '.join(t)))
5535 else:
5540 else:
5536 # i18n: column positioning for "hg summary"
5541 # i18n: column positioning for "hg summary"
5537 ui.status(_('remote: (synced)\n'))
5542 ui.status(_('remote: (synced)\n'))
5538
5543
5539 @command('tag',
5544 @command('tag',
5540 [('f', 'force', None, _('force tag')),
5545 [('f', 'force', None, _('force tag')),
5541 ('l', 'local', None, _('make the tag local')),
5546 ('l', 'local', None, _('make the tag local')),
5542 ('r', 'rev', '', _('revision to tag'), _('REV')),
5547 ('r', 'rev', '', _('revision to tag'), _('REV')),
5543 ('', 'remove', None, _('remove a tag')),
5548 ('', 'remove', None, _('remove a tag')),
5544 # -l/--local is already there, commitopts cannot be used
5549 # -l/--local is already there, commitopts cannot be used
5545 ('e', 'edit', None, _('edit commit message')),
5550 ('e', 'edit', None, _('edit commit message')),
5546 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5551 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5547 ] + commitopts2,
5552 ] + commitopts2,
5548 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5553 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5549 def tag(ui, repo, name1, *names, **opts):
5554 def tag(ui, repo, name1, *names, **opts):
5550 """add one or more tags for the current or given revision
5555 """add one or more tags for the current or given revision
5551
5556
5552 Name a particular revision using <name>.
5557 Name a particular revision using <name>.
5553
5558
5554 Tags are used to name particular revisions of the repository and are
5559 Tags are used to name particular revisions of the repository and are
5555 very useful to compare different revisions, to go back to significant
5560 very useful to compare different revisions, to go back to significant
5556 earlier versions or to mark branch points as releases, etc. Changing
5561 earlier versions or to mark branch points as releases, etc. Changing
5557 an existing tag is normally disallowed; use -f/--force to override.
5562 an existing tag is normally disallowed; use -f/--force to override.
5558
5563
5559 If no revision is given, the parent of the working directory is
5564 If no revision is given, the parent of the working directory is
5560 used, or tip if no revision is checked out.
5565 used, or tip if no revision is checked out.
5561
5566
5562 To facilitate version control, distribution, and merging of tags,
5567 To facilitate version control, distribution, and merging of tags,
5563 they are stored as a file named ".hgtags" which is managed similarly
5568 they are stored as a file named ".hgtags" which is managed similarly
5564 to other project files and can be hand-edited if necessary. This
5569 to other project files and can be hand-edited if necessary. This
5565 also means that tagging creates a new commit. The file
5570 also means that tagging creates a new commit. The file
5566 ".hg/localtags" is used for local tags (not shared among
5571 ".hg/localtags" is used for local tags (not shared among
5567 repositories).
5572 repositories).
5568
5573
5569 Tag commits are usually made at the head of a branch. If the parent
5574 Tag commits are usually made at the head of a branch. If the parent
5570 of the working directory is not a branch head, :hg:`tag` aborts; use
5575 of the working directory is not a branch head, :hg:`tag` aborts; use
5571 -f/--force to force the tag commit to be based on a non-head
5576 -f/--force to force the tag commit to be based on a non-head
5572 changeset.
5577 changeset.
5573
5578
5574 See :hg:`help dates` for a list of formats valid for -d/--date.
5579 See :hg:`help dates` for a list of formats valid for -d/--date.
5575
5580
5576 Since tag names have priority over branch names during revision
5581 Since tag names have priority over branch names during revision
5577 lookup, using an existing branch name as a tag name is discouraged.
5582 lookup, using an existing branch name as a tag name is discouraged.
5578
5583
5579 Returns 0 on success.
5584 Returns 0 on success.
5580 """
5585 """
5581 wlock = lock = None
5586 wlock = lock = None
5582 try:
5587 try:
5583 wlock = repo.wlock()
5588 wlock = repo.wlock()
5584 lock = repo.lock()
5589 lock = repo.lock()
5585 rev_ = "."
5590 rev_ = "."
5586 names = [t.strip() for t in (name1,) + names]
5591 names = [t.strip() for t in (name1,) + names]
5587 if len(names) != len(set(names)):
5592 if len(names) != len(set(names)):
5588 raise util.Abort(_('tag names must be unique'))
5593 raise util.Abort(_('tag names must be unique'))
5589 for n in names:
5594 for n in names:
5590 scmutil.checknewlabel(repo, n, 'tag')
5595 scmutil.checknewlabel(repo, n, 'tag')
5591 if not n:
5596 if not n:
5592 raise util.Abort(_('tag names cannot consist entirely of '
5597 raise util.Abort(_('tag names cannot consist entirely of '
5593 'whitespace'))
5598 'whitespace'))
5594 if opts.get('rev') and opts.get('remove'):
5599 if opts.get('rev') and opts.get('remove'):
5595 raise util.Abort(_("--rev and --remove are incompatible"))
5600 raise util.Abort(_("--rev and --remove are incompatible"))
5596 if opts.get('rev'):
5601 if opts.get('rev'):
5597 rev_ = opts['rev']
5602 rev_ = opts['rev']
5598 message = opts.get('message')
5603 message = opts.get('message')
5599 if opts.get('remove'):
5604 if opts.get('remove'):
5600 expectedtype = opts.get('local') and 'local' or 'global'
5605 expectedtype = opts.get('local') and 'local' or 'global'
5601 for n in names:
5606 for n in names:
5602 if not repo.tagtype(n):
5607 if not repo.tagtype(n):
5603 raise util.Abort(_("tag '%s' does not exist") % n)
5608 raise util.Abort(_("tag '%s' does not exist") % n)
5604 if repo.tagtype(n) != expectedtype:
5609 if repo.tagtype(n) != expectedtype:
5605 if expectedtype == 'global':
5610 if expectedtype == 'global':
5606 raise util.Abort(_("tag '%s' is not a global tag") % n)
5611 raise util.Abort(_("tag '%s' is not a global tag") % n)
5607 else:
5612 else:
5608 raise util.Abort(_("tag '%s' is not a local tag") % n)
5613 raise util.Abort(_("tag '%s' is not a local tag") % n)
5609 rev_ = nullid
5614 rev_ = nullid
5610 if not message:
5615 if not message:
5611 # we don't translate commit messages
5616 # we don't translate commit messages
5612 message = 'Removed tag %s' % ', '.join(names)
5617 message = 'Removed tag %s' % ', '.join(names)
5613 elif not opts.get('force'):
5618 elif not opts.get('force'):
5614 for n in names:
5619 for n in names:
5615 if n in repo.tags():
5620 if n in repo.tags():
5616 raise util.Abort(_("tag '%s' already exists "
5621 raise util.Abort(_("tag '%s' already exists "
5617 "(use -f to force)") % n)
5622 "(use -f to force)") % n)
5618 if not opts.get('local'):
5623 if not opts.get('local'):
5619 p1, p2 = repo.dirstate.parents()
5624 p1, p2 = repo.dirstate.parents()
5620 if p2 != nullid:
5625 if p2 != nullid:
5621 raise util.Abort(_('uncommitted merge'))
5626 raise util.Abort(_('uncommitted merge'))
5622 bheads = repo.branchheads()
5627 bheads = repo.branchheads()
5623 if not opts.get('force') and bheads and p1 not in bheads:
5628 if not opts.get('force') and bheads and p1 not in bheads:
5624 raise util.Abort(_('not at a branch head (use -f to force)'))
5629 raise util.Abort(_('not at a branch head (use -f to force)'))
5625 r = scmutil.revsingle(repo, rev_).node()
5630 r = scmutil.revsingle(repo, rev_).node()
5626
5631
5627 if not message:
5632 if not message:
5628 # we don't translate commit messages
5633 # we don't translate commit messages
5629 message = ('Added tag %s for changeset %s' %
5634 message = ('Added tag %s for changeset %s' %
5630 (', '.join(names), short(r)))
5635 (', '.join(names), short(r)))
5631
5636
5632 date = opts.get('date')
5637 date = opts.get('date')
5633 if date:
5638 if date:
5634 date = util.parsedate(date)
5639 date = util.parsedate(date)
5635
5640
5636 if opts.get('edit'):
5641 if opts.get('edit'):
5637 message = ui.edit(message, ui.username())
5642 message = ui.edit(message, ui.username())
5638
5643
5639 # don't allow tagging the null rev
5644 # don't allow tagging the null rev
5640 if (not opts.get('remove') and
5645 if (not opts.get('remove') and
5641 scmutil.revsingle(repo, rev_).rev() == nullrev):
5646 scmutil.revsingle(repo, rev_).rev() == nullrev):
5642 raise util.Abort(_("cannot tag null revision"))
5647 raise util.Abort(_("cannot tag null revision"))
5643
5648
5644 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5649 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5645 finally:
5650 finally:
5646 release(lock, wlock)
5651 release(lock, wlock)
5647
5652
5648 @command('tags', [], '')
5653 @command('tags', [], '')
5649 def tags(ui, repo, **opts):
5654 def tags(ui, repo, **opts):
5650 """list repository tags
5655 """list repository tags
5651
5656
5652 This lists both regular and local tags. When the -v/--verbose
5657 This lists both regular and local tags. When the -v/--verbose
5653 switch is used, a third column "local" is printed for local tags.
5658 switch is used, a third column "local" is printed for local tags.
5654
5659
5655 Returns 0 on success.
5660 Returns 0 on success.
5656 """
5661 """
5657
5662
5658 fm = ui.formatter('tags', opts)
5663 fm = ui.formatter('tags', opts)
5659 hexfunc = ui.debugflag and hex or short
5664 hexfunc = ui.debugflag and hex or short
5660 tagtype = ""
5665 tagtype = ""
5661
5666
5662 for t, n in reversed(repo.tagslist()):
5667 for t, n in reversed(repo.tagslist()):
5663 hn = hexfunc(n)
5668 hn = hexfunc(n)
5664 label = 'tags.normal'
5669 label = 'tags.normal'
5665 tagtype = ''
5670 tagtype = ''
5666 if repo.tagtype(t) == 'local':
5671 if repo.tagtype(t) == 'local':
5667 label = 'tags.local'
5672 label = 'tags.local'
5668 tagtype = 'local'
5673 tagtype = 'local'
5669
5674
5670 fm.startitem()
5675 fm.startitem()
5671 fm.write('tag', '%s', t, label=label)
5676 fm.write('tag', '%s', t, label=label)
5672 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5677 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5673 fm.condwrite(not ui.quiet, 'rev id', fmt,
5678 fm.condwrite(not ui.quiet, 'rev id', fmt,
5674 repo.changelog.rev(n), hn, label=label)
5679 repo.changelog.rev(n), hn, label=label)
5675 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5680 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5676 tagtype, label=label)
5681 tagtype, label=label)
5677 fm.plain('\n')
5682 fm.plain('\n')
5678 fm.end()
5683 fm.end()
5679
5684
5680 @command('tip',
5685 @command('tip',
5681 [('p', 'patch', None, _('show patch')),
5686 [('p', 'patch', None, _('show patch')),
5682 ('g', 'git', None, _('use git extended diff format')),
5687 ('g', 'git', None, _('use git extended diff format')),
5683 ] + templateopts,
5688 ] + templateopts,
5684 _('[-p] [-g]'))
5689 _('[-p] [-g]'))
5685 def tip(ui, repo, **opts):
5690 def tip(ui, repo, **opts):
5686 """show the tip revision
5691 """show the tip revision
5687
5692
5688 The tip revision (usually just called the tip) is the changeset
5693 The tip revision (usually just called the tip) is the changeset
5689 most recently added to the repository (and therefore the most
5694 most recently added to the repository (and therefore the most
5690 recently changed head).
5695 recently changed head).
5691
5696
5692 If you have just made a commit, that commit will be the tip. If
5697 If you have just made a commit, that commit will be the tip. If
5693 you have just pulled changes from another repository, the tip of
5698 you have just pulled changes from another repository, the tip of
5694 that repository becomes the current tip. The "tip" tag is special
5699 that repository becomes the current tip. The "tip" tag is special
5695 and cannot be renamed or assigned to a different changeset.
5700 and cannot be renamed or assigned to a different changeset.
5696
5701
5697 Returns 0 on success.
5702 Returns 0 on success.
5698 """
5703 """
5699 displayer = cmdutil.show_changeset(ui, repo, opts)
5704 displayer = cmdutil.show_changeset(ui, repo, opts)
5700 displayer.show(repo['tip'])
5705 displayer.show(repo['tip'])
5701 displayer.close()
5706 displayer.close()
5702
5707
5703 @command('unbundle',
5708 @command('unbundle',
5704 [('u', 'update', None,
5709 [('u', 'update', None,
5705 _('update to new branch head if changesets were unbundled'))],
5710 _('update to new branch head if changesets were unbundled'))],
5706 _('[-u] FILE...'))
5711 _('[-u] FILE...'))
5707 def unbundle(ui, repo, fname1, *fnames, **opts):
5712 def unbundle(ui, repo, fname1, *fnames, **opts):
5708 """apply one or more changegroup files
5713 """apply one or more changegroup files
5709
5714
5710 Apply one or more compressed changegroup files generated by the
5715 Apply one or more compressed changegroup files generated by the
5711 bundle command.
5716 bundle command.
5712
5717
5713 Returns 0 on success, 1 if an update has unresolved files.
5718 Returns 0 on success, 1 if an update has unresolved files.
5714 """
5719 """
5715 fnames = (fname1,) + fnames
5720 fnames = (fname1,) + fnames
5716
5721
5717 lock = repo.lock()
5722 lock = repo.lock()
5718 wc = repo['.']
5723 wc = repo['.']
5719 try:
5724 try:
5720 for fname in fnames:
5725 for fname in fnames:
5721 f = hg.openpath(ui, fname)
5726 f = hg.openpath(ui, fname)
5722 gen = changegroup.readbundle(f, fname)
5727 gen = changegroup.readbundle(f, fname)
5723 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5728 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5724 finally:
5729 finally:
5725 lock.release()
5730 lock.release()
5726 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5731 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5727 return postincoming(ui, repo, modheads, opts.get('update'), None)
5732 return postincoming(ui, repo, modheads, opts.get('update'), None)
5728
5733
5729 @command('^update|up|checkout|co',
5734 @command('^update|up|checkout|co',
5730 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5735 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5731 ('c', 'check', None,
5736 ('c', 'check', None,
5732 _('update across branches if no uncommitted changes')),
5737 _('update across branches if no uncommitted changes')),
5733 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5738 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5734 ('r', 'rev', '', _('revision'), _('REV'))],
5739 ('r', 'rev', '', _('revision'), _('REV'))],
5735 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5740 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5736 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5741 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5737 """update working directory (or switch revisions)
5742 """update working directory (or switch revisions)
5738
5743
5739 Update the repository's working directory to the specified
5744 Update the repository's working directory to the specified
5740 changeset. If no changeset is specified, update to the tip of the
5745 changeset. If no changeset is specified, update to the tip of the
5741 current named branch and move the current bookmark (see :hg:`help
5746 current named branch and move the current bookmark (see :hg:`help
5742 bookmarks`).
5747 bookmarks`).
5743
5748
5744 Update sets the working directory's parent revision to the specified
5749 Update sets the working directory's parent revision to the specified
5745 changeset (see :hg:`help parents`).
5750 changeset (see :hg:`help parents`).
5746
5751
5747 If the changeset is not a descendant or ancestor of the working
5752 If the changeset is not a descendant or ancestor of the working
5748 directory's parent, the update is aborted. With the -c/--check
5753 directory's parent, the update is aborted. With the -c/--check
5749 option, the working directory is checked for uncommitted changes; if
5754 option, the working directory is checked for uncommitted changes; if
5750 none are found, the working directory is updated to the specified
5755 none are found, the working directory is updated to the specified
5751 changeset.
5756 changeset.
5752
5757
5753 .. container:: verbose
5758 .. container:: verbose
5754
5759
5755 The following rules apply when the working directory contains
5760 The following rules apply when the working directory contains
5756 uncommitted changes:
5761 uncommitted changes:
5757
5762
5758 1. If neither -c/--check nor -C/--clean is specified, and if
5763 1. If neither -c/--check nor -C/--clean is specified, and if
5759 the requested changeset is an ancestor or descendant of
5764 the requested changeset is an ancestor or descendant of
5760 the working directory's parent, the uncommitted changes
5765 the working directory's parent, the uncommitted changes
5761 are merged into the requested changeset and the merged
5766 are merged into the requested changeset and the merged
5762 result is left uncommitted. If the requested changeset is
5767 result is left uncommitted. If the requested changeset is
5763 not an ancestor or descendant (that is, it is on another
5768 not an ancestor or descendant (that is, it is on another
5764 branch), the update is aborted and the uncommitted changes
5769 branch), the update is aborted and the uncommitted changes
5765 are preserved.
5770 are preserved.
5766
5771
5767 2. With the -c/--check option, the update is aborted and the
5772 2. With the -c/--check option, the update is aborted and the
5768 uncommitted changes are preserved.
5773 uncommitted changes are preserved.
5769
5774
5770 3. With the -C/--clean option, uncommitted changes are discarded and
5775 3. With the -C/--clean option, uncommitted changes are discarded and
5771 the working directory is updated to the requested changeset.
5776 the working directory is updated to the requested changeset.
5772
5777
5773 To cancel an uncommitted merge (and lose your changes), use
5778 To cancel an uncommitted merge (and lose your changes), use
5774 :hg:`update --clean .`.
5779 :hg:`update --clean .`.
5775
5780
5776 Use null as the changeset to remove the working directory (like
5781 Use null as the changeset to remove the working directory (like
5777 :hg:`clone -U`).
5782 :hg:`clone -U`).
5778
5783
5779 If you want to revert just one file to an older revision, use
5784 If you want to revert just one file to an older revision, use
5780 :hg:`revert [-r REV] NAME`.
5785 :hg:`revert [-r REV] NAME`.
5781
5786
5782 See :hg:`help dates` for a list of formats valid for -d/--date.
5787 See :hg:`help dates` for a list of formats valid for -d/--date.
5783
5788
5784 Returns 0 on success, 1 if there are unresolved files.
5789 Returns 0 on success, 1 if there are unresolved files.
5785 """
5790 """
5786 if rev and node:
5791 if rev and node:
5787 raise util.Abort(_("please specify just one revision"))
5792 raise util.Abort(_("please specify just one revision"))
5788
5793
5789 if rev is None or rev == '':
5794 if rev is None or rev == '':
5790 rev = node
5795 rev = node
5791
5796
5792 # with no argument, we also move the current bookmark, if any
5797 # with no argument, we also move the current bookmark, if any
5793 movemarkfrom = None
5798 movemarkfrom = None
5794 if rev is None:
5799 if rev is None:
5795 curmark = repo._bookmarkcurrent
5800 curmark = repo._bookmarkcurrent
5796 if bookmarks.iscurrent(repo):
5801 if bookmarks.iscurrent(repo):
5797 movemarkfrom = repo['.'].node()
5802 movemarkfrom = repo['.'].node()
5798 elif curmark:
5803 elif curmark:
5799 ui.status(_("updating to active bookmark %s\n") % curmark)
5804 ui.status(_("updating to active bookmark %s\n") % curmark)
5800 rev = curmark
5805 rev = curmark
5801
5806
5802 # if we defined a bookmark, we have to remember the original bookmark name
5807 # if we defined a bookmark, we have to remember the original bookmark name
5803 brev = rev
5808 brev = rev
5804 rev = scmutil.revsingle(repo, rev, rev).rev()
5809 rev = scmutil.revsingle(repo, rev, rev).rev()
5805
5810
5806 if check and clean:
5811 if check and clean:
5807 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5812 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5808
5813
5809 if date:
5814 if date:
5810 if rev is not None:
5815 if rev is not None:
5811 raise util.Abort(_("you can't specify a revision and a date"))
5816 raise util.Abort(_("you can't specify a revision and a date"))
5812 rev = cmdutil.finddate(ui, repo, date)
5817 rev = cmdutil.finddate(ui, repo, date)
5813
5818
5814 if check:
5819 if check:
5815 c = repo[None]
5820 c = repo[None]
5816 if c.dirty(merge=False, branch=False, missing=True):
5821 if c.dirty(merge=False, branch=False, missing=True):
5817 raise util.Abort(_("uncommitted local changes"))
5822 raise util.Abort(_("uncommitted local changes"))
5818 if rev is None:
5823 if rev is None:
5819 rev = repo[repo[None].branch()].rev()
5824 rev = repo[repo[None].branch()].rev()
5820 mergemod._checkunknown(repo, repo[None], repo[rev])
5825 mergemod._checkunknown(repo, repo[None], repo[rev])
5821
5826
5822 if clean:
5827 if clean:
5823 ret = hg.clean(repo, rev)
5828 ret = hg.clean(repo, rev)
5824 else:
5829 else:
5825 ret = hg.update(repo, rev)
5830 ret = hg.update(repo, rev)
5826
5831
5827 if not ret and movemarkfrom:
5832 if not ret and movemarkfrom:
5828 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5833 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5829 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5834 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5830 elif brev in repo._bookmarks:
5835 elif brev in repo._bookmarks:
5831 bookmarks.setcurrent(repo, brev)
5836 bookmarks.setcurrent(repo, brev)
5832 elif brev:
5837 elif brev:
5833 bookmarks.unsetcurrent(repo)
5838 bookmarks.unsetcurrent(repo)
5834
5839
5835 return ret
5840 return ret
5836
5841
5837 @command('verify', [])
5842 @command('verify', [])
5838 def verify(ui, repo):
5843 def verify(ui, repo):
5839 """verify the integrity of the repository
5844 """verify the integrity of the repository
5840
5845
5841 Verify the integrity of the current repository.
5846 Verify the integrity of the current repository.
5842
5847
5843 This will perform an extensive check of the repository's
5848 This will perform an extensive check of the repository's
5844 integrity, validating the hashes and checksums of each entry in
5849 integrity, validating the hashes and checksums of each entry in
5845 the changelog, manifest, and tracked files, as well as the
5850 the changelog, manifest, and tracked files, as well as the
5846 integrity of their crosslinks and indices.
5851 integrity of their crosslinks and indices.
5847
5852
5848 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5853 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5849 for more information about recovery from corruption of the
5854 for more information about recovery from corruption of the
5850 repository.
5855 repository.
5851
5856
5852 Returns 0 on success, 1 if errors are encountered.
5857 Returns 0 on success, 1 if errors are encountered.
5853 """
5858 """
5854 return hg.verify(repo)
5859 return hg.verify(repo)
5855
5860
5856 @command('version', [])
5861 @command('version', [])
5857 def version_(ui):
5862 def version_(ui):
5858 """output version and copyright information"""
5863 """output version and copyright information"""
5859 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5864 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5860 % util.version())
5865 % util.version())
5861 ui.status(_(
5866 ui.status(_(
5862 "(see http://mercurial.selenic.com for more information)\n"
5867 "(see http://mercurial.selenic.com for more information)\n"
5863 "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
5868 "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
5864 "This is free software; see the source for copying conditions. "
5869 "This is free software; see the source for copying conditions. "
5865 "There is NO\nwarranty; "
5870 "There is NO\nwarranty; "
5866 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5871 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5867 ))
5872 ))
5868
5873
5869 norepo = ("clone init version help debugcommands debugcomplete"
5874 norepo = ("clone init version help debugcommands debugcomplete"
5870 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5875 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5871 " debugknown debuggetbundle debugbundle")
5876 " debugknown debuggetbundle debugbundle")
5872 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5877 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5873 " debugdata debugindex debugindexdot debugrevlog")
5878 " debugdata debugindex debugindexdot debugrevlog")
5874 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5879 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5875 " remove resolve status debugwalk")
5880 " remove resolve status debugwalk")
@@ -1,2613 +1,2618 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock, transaction, store, encoding
11 import lock, transaction, store, encoding
12 import scmutil, util, extensions, hook, error, revset
12 import scmutil, util, extensions, hook, error, revset
13 import match as matchmod
13 import match as matchmod
14 import merge as mergemod
14 import merge as mergemod
15 import tags as tagsmod
15 import tags as tagsmod
16 from lock import release
16 from lock import release
17 import weakref, errno, os, time, inspect
17 import weakref, errno, os, time, inspect
18 import branchmap
18 import branchmap
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class repofilecache(filecache):
22 class repofilecache(filecache):
23 """All filecache usage on repo are done for logic that should be unfiltered
23 """All filecache usage on repo are done for logic that should be unfiltered
24 """
24 """
25
25
26 def __get__(self, repo, type=None):
26 def __get__(self, repo, type=None):
27 return super(repofilecache, self).__get__(repo.unfiltered(), type)
27 return super(repofilecache, self).__get__(repo.unfiltered(), type)
28 def __set__(self, repo, value):
28 def __set__(self, repo, value):
29 return super(repofilecache, self).__set__(repo.unfiltered(), value)
29 return super(repofilecache, self).__set__(repo.unfiltered(), value)
30 def __delete__(self, repo):
30 def __delete__(self, repo):
31 return super(repofilecache, self).__delete__(repo.unfiltered())
31 return super(repofilecache, self).__delete__(repo.unfiltered())
32
32
33 class storecache(repofilecache):
33 class storecache(repofilecache):
34 """filecache for files in the store"""
34 """filecache for files in the store"""
35 def join(self, obj, fname):
35 def join(self, obj, fname):
36 return obj.sjoin(fname)
36 return obj.sjoin(fname)
37
37
38 class unfilteredpropertycache(propertycache):
38 class unfilteredpropertycache(propertycache):
39 """propertycache that apply to unfiltered repo only"""
39 """propertycache that apply to unfiltered repo only"""
40
40
41 def __get__(self, repo, type=None):
41 def __get__(self, repo, type=None):
42 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
42 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
43
43
44 class filteredpropertycache(propertycache):
44 class filteredpropertycache(propertycache):
45 """propertycache that must take filtering in account"""
45 """propertycache that must take filtering in account"""
46
46
47 def cachevalue(self, obj, value):
47 def cachevalue(self, obj, value):
48 object.__setattr__(obj, self.name, value)
48 object.__setattr__(obj, self.name, value)
49
49
50
50
51 def hasunfilteredcache(repo, name):
51 def hasunfilteredcache(repo, name):
52 """check if a repo has an unfilteredpropertycache value for <name>"""
52 """check if a repo has an unfilteredpropertycache value for <name>"""
53 return name in vars(repo.unfiltered())
53 return name in vars(repo.unfiltered())
54
54
55 def unfilteredmethod(orig):
55 def unfilteredmethod(orig):
56 """decorate method that always need to be run on unfiltered version"""
56 """decorate method that always need to be run on unfiltered version"""
57 def wrapper(repo, *args, **kwargs):
57 def wrapper(repo, *args, **kwargs):
58 return orig(repo.unfiltered(), *args, **kwargs)
58 return orig(repo.unfiltered(), *args, **kwargs)
59 return wrapper
59 return wrapper
60
60
61 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
61 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
62 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
62 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
63
63
64 class localpeer(peer.peerrepository):
64 class localpeer(peer.peerrepository):
65 '''peer for a local repo; reflects only the most recent API'''
65 '''peer for a local repo; reflects only the most recent API'''
66
66
67 def __init__(self, repo, caps=MODERNCAPS):
67 def __init__(self, repo, caps=MODERNCAPS):
68 peer.peerrepository.__init__(self)
68 peer.peerrepository.__init__(self)
69 self._repo = repo.filtered('served')
69 self._repo = repo.filtered('served')
70 self.ui = repo.ui
70 self.ui = repo.ui
71 self._caps = repo._restrictcapabilities(caps)
71 self._caps = repo._restrictcapabilities(caps)
72 self.requirements = repo.requirements
72 self.requirements = repo.requirements
73 self.supportedformats = repo.supportedformats
73 self.supportedformats = repo.supportedformats
74
74
75 def close(self):
75 def close(self):
76 self._repo.close()
76 self._repo.close()
77
77
78 def _capabilities(self):
78 def _capabilities(self):
79 return self._caps
79 return self._caps
80
80
81 def local(self):
81 def local(self):
82 return self._repo
82 return self._repo
83
83
84 def canpush(self):
84 def canpush(self):
85 return True
85 return True
86
86
87 def url(self):
87 def url(self):
88 return self._repo.url()
88 return self._repo.url()
89
89
90 def lookup(self, key):
90 def lookup(self, key):
91 return self._repo.lookup(key)
91 return self._repo.lookup(key)
92
92
93 def branchmap(self):
93 def branchmap(self):
94 return self._repo.branchmap()
94 return self._repo.branchmap()
95
95
96 def heads(self):
96 def heads(self):
97 return self._repo.heads()
97 return self._repo.heads()
98
98
99 def known(self, nodes):
99 def known(self, nodes):
100 return self._repo.known(nodes)
100 return self._repo.known(nodes)
101
101
102 def getbundle(self, source, heads=None, common=None):
102 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
103 return self._repo.getbundle(source, heads=heads, common=common)
103 return self._repo.getbundle(source, heads=heads, common=common,
104 bundlecaps=None)
104
105
105 # TODO We might want to move the next two calls into legacypeer and add
106 # TODO We might want to move the next two calls into legacypeer and add
106 # unbundle instead.
107 # unbundle instead.
107
108
108 def lock(self):
109 def lock(self):
109 return self._repo.lock()
110 return self._repo.lock()
110
111
111 def addchangegroup(self, cg, source, url):
112 def addchangegroup(self, cg, source, url):
112 return self._repo.addchangegroup(cg, source, url)
113 return self._repo.addchangegroup(cg, source, url)
113
114
114 def pushkey(self, namespace, key, old, new):
115 def pushkey(self, namespace, key, old, new):
115 return self._repo.pushkey(namespace, key, old, new)
116 return self._repo.pushkey(namespace, key, old, new)
116
117
117 def listkeys(self, namespace):
118 def listkeys(self, namespace):
118 return self._repo.listkeys(namespace)
119 return self._repo.listkeys(namespace)
119
120
120 def debugwireargs(self, one, two, three=None, four=None, five=None):
121 def debugwireargs(self, one, two, three=None, four=None, five=None):
121 '''used to test argument passing over the wire'''
122 '''used to test argument passing over the wire'''
122 return "%s %s %s %s %s" % (one, two, three, four, five)
123 return "%s %s %s %s %s" % (one, two, three, four, five)
123
124
124 class locallegacypeer(localpeer):
125 class locallegacypeer(localpeer):
125 '''peer extension which implements legacy methods too; used for tests with
126 '''peer extension which implements legacy methods too; used for tests with
126 restricted capabilities'''
127 restricted capabilities'''
127
128
128 def __init__(self, repo):
129 def __init__(self, repo):
129 localpeer.__init__(self, repo, caps=LEGACYCAPS)
130 localpeer.__init__(self, repo, caps=LEGACYCAPS)
130
131
131 def branches(self, nodes):
132 def branches(self, nodes):
132 return self._repo.branches(nodes)
133 return self._repo.branches(nodes)
133
134
134 def between(self, pairs):
135 def between(self, pairs):
135 return self._repo.between(pairs)
136 return self._repo.between(pairs)
136
137
137 def changegroup(self, basenodes, source):
138 def changegroup(self, basenodes, source):
138 return self._repo.changegroup(basenodes, source)
139 return self._repo.changegroup(basenodes, source)
139
140
140 def changegroupsubset(self, bases, heads, source):
141 def changegroupsubset(self, bases, heads, source):
141 return self._repo.changegroupsubset(bases, heads, source)
142 return self._repo.changegroupsubset(bases, heads, source)
142
143
143 class localrepository(object):
144 class localrepository(object):
144
145
145 supportedformats = set(('revlogv1', 'generaldelta'))
146 supportedformats = set(('revlogv1', 'generaldelta'))
146 supported = supportedformats | set(('store', 'fncache', 'shared',
147 supported = supportedformats | set(('store', 'fncache', 'shared',
147 'dotencode'))
148 'dotencode'))
148 openerreqs = set(('revlogv1', 'generaldelta'))
149 openerreqs = set(('revlogv1', 'generaldelta'))
149 requirements = ['revlogv1']
150 requirements = ['revlogv1']
150 filtername = None
151 filtername = None
151
152
152 def _baserequirements(self, create):
153 def _baserequirements(self, create):
153 return self.requirements[:]
154 return self.requirements[:]
154
155
155 def __init__(self, baseui, path=None, create=False):
156 def __init__(self, baseui, path=None, create=False):
156 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
157 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
157 self.wopener = self.wvfs
158 self.wopener = self.wvfs
158 self.root = self.wvfs.base
159 self.root = self.wvfs.base
159 self.path = self.wvfs.join(".hg")
160 self.path = self.wvfs.join(".hg")
160 self.origroot = path
161 self.origroot = path
161 self.auditor = scmutil.pathauditor(self.root, self._checknested)
162 self.auditor = scmutil.pathauditor(self.root, self._checknested)
162 self.vfs = scmutil.vfs(self.path)
163 self.vfs = scmutil.vfs(self.path)
163 self.opener = self.vfs
164 self.opener = self.vfs
164 self.baseui = baseui
165 self.baseui = baseui
165 self.ui = baseui.copy()
166 self.ui = baseui.copy()
166 # A list of callback to shape the phase if no data were found.
167 # A list of callback to shape the phase if no data were found.
167 # Callback are in the form: func(repo, roots) --> processed root.
168 # Callback are in the form: func(repo, roots) --> processed root.
168 # This list it to be filled by extension during repo setup
169 # This list it to be filled by extension during repo setup
169 self._phasedefaults = []
170 self._phasedefaults = []
170 try:
171 try:
171 self.ui.readconfig(self.join("hgrc"), self.root)
172 self.ui.readconfig(self.join("hgrc"), self.root)
172 extensions.loadall(self.ui)
173 extensions.loadall(self.ui)
173 except IOError:
174 except IOError:
174 pass
175 pass
175
176
176 if not self.vfs.isdir():
177 if not self.vfs.isdir():
177 if create:
178 if create:
178 if not self.wvfs.exists():
179 if not self.wvfs.exists():
179 self.wvfs.makedirs()
180 self.wvfs.makedirs()
180 self.vfs.makedir(notindexed=True)
181 self.vfs.makedir(notindexed=True)
181 requirements = self._baserequirements(create)
182 requirements = self._baserequirements(create)
182 if self.ui.configbool('format', 'usestore', True):
183 if self.ui.configbool('format', 'usestore', True):
183 self.vfs.mkdir("store")
184 self.vfs.mkdir("store")
184 requirements.append("store")
185 requirements.append("store")
185 if self.ui.configbool('format', 'usefncache', True):
186 if self.ui.configbool('format', 'usefncache', True):
186 requirements.append("fncache")
187 requirements.append("fncache")
187 if self.ui.configbool('format', 'dotencode', True):
188 if self.ui.configbool('format', 'dotencode', True):
188 requirements.append('dotencode')
189 requirements.append('dotencode')
189 # create an invalid changelog
190 # create an invalid changelog
190 self.vfs.append(
191 self.vfs.append(
191 "00changelog.i",
192 "00changelog.i",
192 '\0\0\0\2' # represents revlogv2
193 '\0\0\0\2' # represents revlogv2
193 ' dummy changelog to prevent using the old repo layout'
194 ' dummy changelog to prevent using the old repo layout'
194 )
195 )
195 if self.ui.configbool('format', 'generaldelta', False):
196 if self.ui.configbool('format', 'generaldelta', False):
196 requirements.append("generaldelta")
197 requirements.append("generaldelta")
197 requirements = set(requirements)
198 requirements = set(requirements)
198 else:
199 else:
199 raise error.RepoError(_("repository %s not found") % path)
200 raise error.RepoError(_("repository %s not found") % path)
200 elif create:
201 elif create:
201 raise error.RepoError(_("repository %s already exists") % path)
202 raise error.RepoError(_("repository %s already exists") % path)
202 else:
203 else:
203 try:
204 try:
204 requirements = scmutil.readrequires(self.vfs, self.supported)
205 requirements = scmutil.readrequires(self.vfs, self.supported)
205 except IOError, inst:
206 except IOError, inst:
206 if inst.errno != errno.ENOENT:
207 if inst.errno != errno.ENOENT:
207 raise
208 raise
208 requirements = set()
209 requirements = set()
209
210
210 self.sharedpath = self.path
211 self.sharedpath = self.path
211 try:
212 try:
212 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
213 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
213 realpath=True)
214 realpath=True)
214 s = vfs.base
215 s = vfs.base
215 if not vfs.exists():
216 if not vfs.exists():
216 raise error.RepoError(
217 raise error.RepoError(
217 _('.hg/sharedpath points to nonexistent directory %s') % s)
218 _('.hg/sharedpath points to nonexistent directory %s') % s)
218 self.sharedpath = s
219 self.sharedpath = s
219 except IOError, inst:
220 except IOError, inst:
220 if inst.errno != errno.ENOENT:
221 if inst.errno != errno.ENOENT:
221 raise
222 raise
222
223
223 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
224 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
224 self.spath = self.store.path
225 self.spath = self.store.path
225 self.svfs = self.store.vfs
226 self.svfs = self.store.vfs
226 self.sopener = self.svfs
227 self.sopener = self.svfs
227 self.sjoin = self.store.join
228 self.sjoin = self.store.join
228 self.vfs.createmode = self.store.createmode
229 self.vfs.createmode = self.store.createmode
229 self._applyrequirements(requirements)
230 self._applyrequirements(requirements)
230 if create:
231 if create:
231 self._writerequirements()
232 self._writerequirements()
232
233
233
234
234 self._branchcaches = {}
235 self._branchcaches = {}
235 self.filterpats = {}
236 self.filterpats = {}
236 self._datafilters = {}
237 self._datafilters = {}
237 self._transref = self._lockref = self._wlockref = None
238 self._transref = self._lockref = self._wlockref = None
238
239
239 # A cache for various files under .hg/ that tracks file changes,
240 # A cache for various files under .hg/ that tracks file changes,
240 # (used by the filecache decorator)
241 # (used by the filecache decorator)
241 #
242 #
242 # Maps a property name to its util.filecacheentry
243 # Maps a property name to its util.filecacheentry
243 self._filecache = {}
244 self._filecache = {}
244
245
245 # hold sets of revision to be filtered
246 # hold sets of revision to be filtered
246 # should be cleared when something might have changed the filter value:
247 # should be cleared when something might have changed the filter value:
247 # - new changesets,
248 # - new changesets,
248 # - phase change,
249 # - phase change,
249 # - new obsolescence marker,
250 # - new obsolescence marker,
250 # - working directory parent change,
251 # - working directory parent change,
251 # - bookmark changes
252 # - bookmark changes
252 self.filteredrevcache = {}
253 self.filteredrevcache = {}
253
254
254 def close(self):
255 def close(self):
255 pass
256 pass
256
257
257 def _restrictcapabilities(self, caps):
258 def _restrictcapabilities(self, caps):
258 return caps
259 return caps
259
260
260 def _applyrequirements(self, requirements):
261 def _applyrequirements(self, requirements):
261 self.requirements = requirements
262 self.requirements = requirements
262 self.sopener.options = dict((r, 1) for r in requirements
263 self.sopener.options = dict((r, 1) for r in requirements
263 if r in self.openerreqs)
264 if r in self.openerreqs)
264
265
265 def _writerequirements(self):
266 def _writerequirements(self):
266 reqfile = self.opener("requires", "w")
267 reqfile = self.opener("requires", "w")
267 for r in sorted(self.requirements):
268 for r in sorted(self.requirements):
268 reqfile.write("%s\n" % r)
269 reqfile.write("%s\n" % r)
269 reqfile.close()
270 reqfile.close()
270
271
271 def _checknested(self, path):
272 def _checknested(self, path):
272 """Determine if path is a legal nested repository."""
273 """Determine if path is a legal nested repository."""
273 if not path.startswith(self.root):
274 if not path.startswith(self.root):
274 return False
275 return False
275 subpath = path[len(self.root) + 1:]
276 subpath = path[len(self.root) + 1:]
276 normsubpath = util.pconvert(subpath)
277 normsubpath = util.pconvert(subpath)
277
278
278 # XXX: Checking against the current working copy is wrong in
279 # XXX: Checking against the current working copy is wrong in
279 # the sense that it can reject things like
280 # the sense that it can reject things like
280 #
281 #
281 # $ hg cat -r 10 sub/x.txt
282 # $ hg cat -r 10 sub/x.txt
282 #
283 #
283 # if sub/ is no longer a subrepository in the working copy
284 # if sub/ is no longer a subrepository in the working copy
284 # parent revision.
285 # parent revision.
285 #
286 #
286 # However, it can of course also allow things that would have
287 # However, it can of course also allow things that would have
287 # been rejected before, such as the above cat command if sub/
288 # been rejected before, such as the above cat command if sub/
288 # is a subrepository now, but was a normal directory before.
289 # is a subrepository now, but was a normal directory before.
289 # The old path auditor would have rejected by mistake since it
290 # The old path auditor would have rejected by mistake since it
290 # panics when it sees sub/.hg/.
291 # panics when it sees sub/.hg/.
291 #
292 #
292 # All in all, checking against the working copy seems sensible
293 # All in all, checking against the working copy seems sensible
293 # since we want to prevent access to nested repositories on
294 # since we want to prevent access to nested repositories on
294 # the filesystem *now*.
295 # the filesystem *now*.
295 ctx = self[None]
296 ctx = self[None]
296 parts = util.splitpath(subpath)
297 parts = util.splitpath(subpath)
297 while parts:
298 while parts:
298 prefix = '/'.join(parts)
299 prefix = '/'.join(parts)
299 if prefix in ctx.substate:
300 if prefix in ctx.substate:
300 if prefix == normsubpath:
301 if prefix == normsubpath:
301 return True
302 return True
302 else:
303 else:
303 sub = ctx.sub(prefix)
304 sub = ctx.sub(prefix)
304 return sub.checknested(subpath[len(prefix) + 1:])
305 return sub.checknested(subpath[len(prefix) + 1:])
305 else:
306 else:
306 parts.pop()
307 parts.pop()
307 return False
308 return False
308
309
309 def peer(self):
310 def peer(self):
310 return localpeer(self) # not cached to avoid reference cycle
311 return localpeer(self) # not cached to avoid reference cycle
311
312
312 def unfiltered(self):
313 def unfiltered(self):
313 """Return unfiltered version of the repository
314 """Return unfiltered version of the repository
314
315
315 Intended to be overwritten by filtered repo."""
316 Intended to be overwritten by filtered repo."""
316 return self
317 return self
317
318
318 def filtered(self, name):
319 def filtered(self, name):
319 """Return a filtered version of a repository"""
320 """Return a filtered version of a repository"""
320 # build a new class with the mixin and the current class
321 # build a new class with the mixin and the current class
321 # (possibly subclass of the repo)
322 # (possibly subclass of the repo)
322 class proxycls(repoview.repoview, self.unfiltered().__class__):
323 class proxycls(repoview.repoview, self.unfiltered().__class__):
323 pass
324 pass
324 return proxycls(self, name)
325 return proxycls(self, name)
325
326
326 @repofilecache('bookmarks')
327 @repofilecache('bookmarks')
327 def _bookmarks(self):
328 def _bookmarks(self):
328 return bookmarks.bmstore(self)
329 return bookmarks.bmstore(self)
329
330
330 @repofilecache('bookmarks.current')
331 @repofilecache('bookmarks.current')
331 def _bookmarkcurrent(self):
332 def _bookmarkcurrent(self):
332 return bookmarks.readcurrent(self)
333 return bookmarks.readcurrent(self)
333
334
334 def bookmarkheads(self, bookmark):
335 def bookmarkheads(self, bookmark):
335 name = bookmark.split('@', 1)[0]
336 name = bookmark.split('@', 1)[0]
336 heads = []
337 heads = []
337 for mark, n in self._bookmarks.iteritems():
338 for mark, n in self._bookmarks.iteritems():
338 if mark.split('@', 1)[0] == name:
339 if mark.split('@', 1)[0] == name:
339 heads.append(n)
340 heads.append(n)
340 return heads
341 return heads
341
342
342 @storecache('phaseroots')
343 @storecache('phaseroots')
343 def _phasecache(self):
344 def _phasecache(self):
344 return phases.phasecache(self, self._phasedefaults)
345 return phases.phasecache(self, self._phasedefaults)
345
346
346 @storecache('obsstore')
347 @storecache('obsstore')
347 def obsstore(self):
348 def obsstore(self):
348 store = obsolete.obsstore(self.sopener)
349 store = obsolete.obsstore(self.sopener)
349 if store and not obsolete._enabled:
350 if store and not obsolete._enabled:
350 # message is rare enough to not be translated
351 # message is rare enough to not be translated
351 msg = 'obsolete feature not enabled but %i markers found!\n'
352 msg = 'obsolete feature not enabled but %i markers found!\n'
352 self.ui.warn(msg % len(list(store)))
353 self.ui.warn(msg % len(list(store)))
353 return store
354 return store
354
355
355 @storecache('00changelog.i')
356 @storecache('00changelog.i')
356 def changelog(self):
357 def changelog(self):
357 c = changelog.changelog(self.sopener)
358 c = changelog.changelog(self.sopener)
358 if 'HG_PENDING' in os.environ:
359 if 'HG_PENDING' in os.environ:
359 p = os.environ['HG_PENDING']
360 p = os.environ['HG_PENDING']
360 if p.startswith(self.root):
361 if p.startswith(self.root):
361 c.readpending('00changelog.i.a')
362 c.readpending('00changelog.i.a')
362 return c
363 return c
363
364
364 @storecache('00manifest.i')
365 @storecache('00manifest.i')
365 def manifest(self):
366 def manifest(self):
366 return manifest.manifest(self.sopener)
367 return manifest.manifest(self.sopener)
367
368
368 @repofilecache('dirstate')
369 @repofilecache('dirstate')
369 def dirstate(self):
370 def dirstate(self):
370 warned = [0]
371 warned = [0]
371 def validate(node):
372 def validate(node):
372 try:
373 try:
373 self.changelog.rev(node)
374 self.changelog.rev(node)
374 return node
375 return node
375 except error.LookupError:
376 except error.LookupError:
376 if not warned[0]:
377 if not warned[0]:
377 warned[0] = True
378 warned[0] = True
378 self.ui.warn(_("warning: ignoring unknown"
379 self.ui.warn(_("warning: ignoring unknown"
379 " working parent %s!\n") % short(node))
380 " working parent %s!\n") % short(node))
380 return nullid
381 return nullid
381
382
382 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
383 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
383
384
384 def __getitem__(self, changeid):
385 def __getitem__(self, changeid):
385 if changeid is None:
386 if changeid is None:
386 return context.workingctx(self)
387 return context.workingctx(self)
387 return context.changectx(self, changeid)
388 return context.changectx(self, changeid)
388
389
389 def __contains__(self, changeid):
390 def __contains__(self, changeid):
390 try:
391 try:
391 return bool(self.lookup(changeid))
392 return bool(self.lookup(changeid))
392 except error.RepoLookupError:
393 except error.RepoLookupError:
393 return False
394 return False
394
395
395 def __nonzero__(self):
396 def __nonzero__(self):
396 return True
397 return True
397
398
398 def __len__(self):
399 def __len__(self):
399 return len(self.changelog)
400 return len(self.changelog)
400
401
401 def __iter__(self):
402 def __iter__(self):
402 return iter(self.changelog)
403 return iter(self.changelog)
403
404
404 def revs(self, expr, *args):
405 def revs(self, expr, *args):
405 '''Return a list of revisions matching the given revset'''
406 '''Return a list of revisions matching the given revset'''
406 expr = revset.formatspec(expr, *args)
407 expr = revset.formatspec(expr, *args)
407 m = revset.match(None, expr)
408 m = revset.match(None, expr)
408 return [r for r in m(self, list(self))]
409 return [r for r in m(self, list(self))]
409
410
410 def set(self, expr, *args):
411 def set(self, expr, *args):
411 '''
412 '''
412 Yield a context for each matching revision, after doing arg
413 Yield a context for each matching revision, after doing arg
413 replacement via revset.formatspec
414 replacement via revset.formatspec
414 '''
415 '''
415 for r in self.revs(expr, *args):
416 for r in self.revs(expr, *args):
416 yield self[r]
417 yield self[r]
417
418
418 def url(self):
419 def url(self):
419 return 'file:' + self.root
420 return 'file:' + self.root
420
421
421 def hook(self, name, throw=False, **args):
422 def hook(self, name, throw=False, **args):
422 return hook.hook(self.ui, self, name, throw, **args)
423 return hook.hook(self.ui, self, name, throw, **args)
423
424
424 @unfilteredmethod
425 @unfilteredmethod
425 def _tag(self, names, node, message, local, user, date, extra={}):
426 def _tag(self, names, node, message, local, user, date, extra={}):
426 if isinstance(names, str):
427 if isinstance(names, str):
427 names = (names,)
428 names = (names,)
428
429
429 branches = self.branchmap()
430 branches = self.branchmap()
430 for name in names:
431 for name in names:
431 self.hook('pretag', throw=True, node=hex(node), tag=name,
432 self.hook('pretag', throw=True, node=hex(node), tag=name,
432 local=local)
433 local=local)
433 if name in branches:
434 if name in branches:
434 self.ui.warn(_("warning: tag %s conflicts with existing"
435 self.ui.warn(_("warning: tag %s conflicts with existing"
435 " branch name\n") % name)
436 " branch name\n") % name)
436
437
437 def writetags(fp, names, munge, prevtags):
438 def writetags(fp, names, munge, prevtags):
438 fp.seek(0, 2)
439 fp.seek(0, 2)
439 if prevtags and prevtags[-1] != '\n':
440 if prevtags and prevtags[-1] != '\n':
440 fp.write('\n')
441 fp.write('\n')
441 for name in names:
442 for name in names:
442 m = munge and munge(name) or name
443 m = munge and munge(name) or name
443 if (self._tagscache.tagtypes and
444 if (self._tagscache.tagtypes and
444 name in self._tagscache.tagtypes):
445 name in self._tagscache.tagtypes):
445 old = self.tags().get(name, nullid)
446 old = self.tags().get(name, nullid)
446 fp.write('%s %s\n' % (hex(old), m))
447 fp.write('%s %s\n' % (hex(old), m))
447 fp.write('%s %s\n' % (hex(node), m))
448 fp.write('%s %s\n' % (hex(node), m))
448 fp.close()
449 fp.close()
449
450
450 prevtags = ''
451 prevtags = ''
451 if local:
452 if local:
452 try:
453 try:
453 fp = self.opener('localtags', 'r+')
454 fp = self.opener('localtags', 'r+')
454 except IOError:
455 except IOError:
455 fp = self.opener('localtags', 'a')
456 fp = self.opener('localtags', 'a')
456 else:
457 else:
457 prevtags = fp.read()
458 prevtags = fp.read()
458
459
459 # local tags are stored in the current charset
460 # local tags are stored in the current charset
460 writetags(fp, names, None, prevtags)
461 writetags(fp, names, None, prevtags)
461 for name in names:
462 for name in names:
462 self.hook('tag', node=hex(node), tag=name, local=local)
463 self.hook('tag', node=hex(node), tag=name, local=local)
463 return
464 return
464
465
465 try:
466 try:
466 fp = self.wfile('.hgtags', 'rb+')
467 fp = self.wfile('.hgtags', 'rb+')
467 except IOError, e:
468 except IOError, e:
468 if e.errno != errno.ENOENT:
469 if e.errno != errno.ENOENT:
469 raise
470 raise
470 fp = self.wfile('.hgtags', 'ab')
471 fp = self.wfile('.hgtags', 'ab')
471 else:
472 else:
472 prevtags = fp.read()
473 prevtags = fp.read()
473
474
474 # committed tags are stored in UTF-8
475 # committed tags are stored in UTF-8
475 writetags(fp, names, encoding.fromlocal, prevtags)
476 writetags(fp, names, encoding.fromlocal, prevtags)
476
477
477 fp.close()
478 fp.close()
478
479
479 self.invalidatecaches()
480 self.invalidatecaches()
480
481
481 if '.hgtags' not in self.dirstate:
482 if '.hgtags' not in self.dirstate:
482 self[None].add(['.hgtags'])
483 self[None].add(['.hgtags'])
483
484
484 m = matchmod.exact(self.root, '', ['.hgtags'])
485 m = matchmod.exact(self.root, '', ['.hgtags'])
485 tagnode = self.commit(message, user, date, extra=extra, match=m)
486 tagnode = self.commit(message, user, date, extra=extra, match=m)
486
487
487 for name in names:
488 for name in names:
488 self.hook('tag', node=hex(node), tag=name, local=local)
489 self.hook('tag', node=hex(node), tag=name, local=local)
489
490
490 return tagnode
491 return tagnode
491
492
492 def tag(self, names, node, message, local, user, date):
493 def tag(self, names, node, message, local, user, date):
493 '''tag a revision with one or more symbolic names.
494 '''tag a revision with one or more symbolic names.
494
495
495 names is a list of strings or, when adding a single tag, names may be a
496 names is a list of strings or, when adding a single tag, names may be a
496 string.
497 string.
497
498
498 if local is True, the tags are stored in a per-repository file.
499 if local is True, the tags are stored in a per-repository file.
499 otherwise, they are stored in the .hgtags file, and a new
500 otherwise, they are stored in the .hgtags file, and a new
500 changeset is committed with the change.
501 changeset is committed with the change.
501
502
502 keyword arguments:
503 keyword arguments:
503
504
504 local: whether to store tags in non-version-controlled file
505 local: whether to store tags in non-version-controlled file
505 (default False)
506 (default False)
506
507
507 message: commit message to use if committing
508 message: commit message to use if committing
508
509
509 user: name of user to use if committing
510 user: name of user to use if committing
510
511
511 date: date tuple to use if committing'''
512 date: date tuple to use if committing'''
512
513
513 if not local:
514 if not local:
514 for x in self.status()[:5]:
515 for x in self.status()[:5]:
515 if '.hgtags' in x:
516 if '.hgtags' in x:
516 raise util.Abort(_('working copy of .hgtags is changed '
517 raise util.Abort(_('working copy of .hgtags is changed '
517 '(please commit .hgtags manually)'))
518 '(please commit .hgtags manually)'))
518
519
519 self.tags() # instantiate the cache
520 self.tags() # instantiate the cache
520 self._tag(names, node, message, local, user, date)
521 self._tag(names, node, message, local, user, date)
521
522
522 @filteredpropertycache
523 @filteredpropertycache
523 def _tagscache(self):
524 def _tagscache(self):
524 '''Returns a tagscache object that contains various tags related
525 '''Returns a tagscache object that contains various tags related
525 caches.'''
526 caches.'''
526
527
527 # This simplifies its cache management by having one decorated
528 # This simplifies its cache management by having one decorated
528 # function (this one) and the rest simply fetch things from it.
529 # function (this one) and the rest simply fetch things from it.
529 class tagscache(object):
530 class tagscache(object):
530 def __init__(self):
531 def __init__(self):
531 # These two define the set of tags for this repository. tags
532 # These two define the set of tags for this repository. tags
532 # maps tag name to node; tagtypes maps tag name to 'global' or
533 # maps tag name to node; tagtypes maps tag name to 'global' or
533 # 'local'. (Global tags are defined by .hgtags across all
534 # 'local'. (Global tags are defined by .hgtags across all
534 # heads, and local tags are defined in .hg/localtags.)
535 # heads, and local tags are defined in .hg/localtags.)
535 # They constitute the in-memory cache of tags.
536 # They constitute the in-memory cache of tags.
536 self.tags = self.tagtypes = None
537 self.tags = self.tagtypes = None
537
538
538 self.nodetagscache = self.tagslist = None
539 self.nodetagscache = self.tagslist = None
539
540
540 cache = tagscache()
541 cache = tagscache()
541 cache.tags, cache.tagtypes = self._findtags()
542 cache.tags, cache.tagtypes = self._findtags()
542
543
543 return cache
544 return cache
544
545
545 def tags(self):
546 def tags(self):
546 '''return a mapping of tag to node'''
547 '''return a mapping of tag to node'''
547 t = {}
548 t = {}
548 if self.changelog.filteredrevs:
549 if self.changelog.filteredrevs:
549 tags, tt = self._findtags()
550 tags, tt = self._findtags()
550 else:
551 else:
551 tags = self._tagscache.tags
552 tags = self._tagscache.tags
552 for k, v in tags.iteritems():
553 for k, v in tags.iteritems():
553 try:
554 try:
554 # ignore tags to unknown nodes
555 # ignore tags to unknown nodes
555 self.changelog.rev(v)
556 self.changelog.rev(v)
556 t[k] = v
557 t[k] = v
557 except (error.LookupError, ValueError):
558 except (error.LookupError, ValueError):
558 pass
559 pass
559 return t
560 return t
560
561
561 def _findtags(self):
562 def _findtags(self):
562 '''Do the hard work of finding tags. Return a pair of dicts
563 '''Do the hard work of finding tags. Return a pair of dicts
563 (tags, tagtypes) where tags maps tag name to node, and tagtypes
564 (tags, tagtypes) where tags maps tag name to node, and tagtypes
564 maps tag name to a string like \'global\' or \'local\'.
565 maps tag name to a string like \'global\' or \'local\'.
565 Subclasses or extensions are free to add their own tags, but
566 Subclasses or extensions are free to add their own tags, but
566 should be aware that the returned dicts will be retained for the
567 should be aware that the returned dicts will be retained for the
567 duration of the localrepo object.'''
568 duration of the localrepo object.'''
568
569
569 # XXX what tagtype should subclasses/extensions use? Currently
570 # XXX what tagtype should subclasses/extensions use? Currently
570 # mq and bookmarks add tags, but do not set the tagtype at all.
571 # mq and bookmarks add tags, but do not set the tagtype at all.
571 # Should each extension invent its own tag type? Should there
572 # Should each extension invent its own tag type? Should there
572 # be one tagtype for all such "virtual" tags? Or is the status
573 # be one tagtype for all such "virtual" tags? Or is the status
573 # quo fine?
574 # quo fine?
574
575
575 alltags = {} # map tag name to (node, hist)
576 alltags = {} # map tag name to (node, hist)
576 tagtypes = {}
577 tagtypes = {}
577
578
578 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
579 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
579 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
580 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
580
581
581 # Build the return dicts. Have to re-encode tag names because
582 # Build the return dicts. Have to re-encode tag names because
582 # the tags module always uses UTF-8 (in order not to lose info
583 # the tags module always uses UTF-8 (in order not to lose info
583 # writing to the cache), but the rest of Mercurial wants them in
584 # writing to the cache), but the rest of Mercurial wants them in
584 # local encoding.
585 # local encoding.
585 tags = {}
586 tags = {}
586 for (name, (node, hist)) in alltags.iteritems():
587 for (name, (node, hist)) in alltags.iteritems():
587 if node != nullid:
588 if node != nullid:
588 tags[encoding.tolocal(name)] = node
589 tags[encoding.tolocal(name)] = node
589 tags['tip'] = self.changelog.tip()
590 tags['tip'] = self.changelog.tip()
590 tagtypes = dict([(encoding.tolocal(name), value)
591 tagtypes = dict([(encoding.tolocal(name), value)
591 for (name, value) in tagtypes.iteritems()])
592 for (name, value) in tagtypes.iteritems()])
592 return (tags, tagtypes)
593 return (tags, tagtypes)
593
594
594 def tagtype(self, tagname):
595 def tagtype(self, tagname):
595 '''
596 '''
596 return the type of the given tag. result can be:
597 return the type of the given tag. result can be:
597
598
598 'local' : a local tag
599 'local' : a local tag
599 'global' : a global tag
600 'global' : a global tag
600 None : tag does not exist
601 None : tag does not exist
601 '''
602 '''
602
603
603 return self._tagscache.tagtypes.get(tagname)
604 return self._tagscache.tagtypes.get(tagname)
604
605
605 def tagslist(self):
606 def tagslist(self):
606 '''return a list of tags ordered by revision'''
607 '''return a list of tags ordered by revision'''
607 if not self._tagscache.tagslist:
608 if not self._tagscache.tagslist:
608 l = []
609 l = []
609 for t, n in self.tags().iteritems():
610 for t, n in self.tags().iteritems():
610 r = self.changelog.rev(n)
611 r = self.changelog.rev(n)
611 l.append((r, t, n))
612 l.append((r, t, n))
612 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
613 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
613
614
614 return self._tagscache.tagslist
615 return self._tagscache.tagslist
615
616
616 def nodetags(self, node):
617 def nodetags(self, node):
617 '''return the tags associated with a node'''
618 '''return the tags associated with a node'''
618 if not self._tagscache.nodetagscache:
619 if not self._tagscache.nodetagscache:
619 nodetagscache = {}
620 nodetagscache = {}
620 for t, n in self._tagscache.tags.iteritems():
621 for t, n in self._tagscache.tags.iteritems():
621 nodetagscache.setdefault(n, []).append(t)
622 nodetagscache.setdefault(n, []).append(t)
622 for tags in nodetagscache.itervalues():
623 for tags in nodetagscache.itervalues():
623 tags.sort()
624 tags.sort()
624 self._tagscache.nodetagscache = nodetagscache
625 self._tagscache.nodetagscache = nodetagscache
625 return self._tagscache.nodetagscache.get(node, [])
626 return self._tagscache.nodetagscache.get(node, [])
626
627
627 def nodebookmarks(self, node):
628 def nodebookmarks(self, node):
628 marks = []
629 marks = []
629 for bookmark, n in self._bookmarks.iteritems():
630 for bookmark, n in self._bookmarks.iteritems():
630 if n == node:
631 if n == node:
631 marks.append(bookmark)
632 marks.append(bookmark)
632 return sorted(marks)
633 return sorted(marks)
633
634
634 def branchmap(self):
635 def branchmap(self):
635 '''returns a dictionary {branch: [branchheads]}'''
636 '''returns a dictionary {branch: [branchheads]}'''
636 branchmap.updatecache(self)
637 branchmap.updatecache(self)
637 return self._branchcaches[self.filtername]
638 return self._branchcaches[self.filtername]
638
639
639
640
640 def _branchtip(self, heads):
641 def _branchtip(self, heads):
641 '''return the tipmost branch head in heads'''
642 '''return the tipmost branch head in heads'''
642 tip = heads[-1]
643 tip = heads[-1]
643 for h in reversed(heads):
644 for h in reversed(heads):
644 if not self[h].closesbranch():
645 if not self[h].closesbranch():
645 tip = h
646 tip = h
646 break
647 break
647 return tip
648 return tip
648
649
649 def branchtip(self, branch):
650 def branchtip(self, branch):
650 '''return the tip node for a given branch'''
651 '''return the tip node for a given branch'''
651 if branch not in self.branchmap():
652 if branch not in self.branchmap():
652 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
653 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
653 return self._branchtip(self.branchmap()[branch])
654 return self._branchtip(self.branchmap()[branch])
654
655
655 def branchtags(self):
656 def branchtags(self):
656 '''return a dict where branch names map to the tipmost head of
657 '''return a dict where branch names map to the tipmost head of
657 the branch, open heads come before closed'''
658 the branch, open heads come before closed'''
658 bt = {}
659 bt = {}
659 for bn, heads in self.branchmap().iteritems():
660 for bn, heads in self.branchmap().iteritems():
660 bt[bn] = self._branchtip(heads)
661 bt[bn] = self._branchtip(heads)
661 return bt
662 return bt
662
663
663 def lookup(self, key):
664 def lookup(self, key):
664 return self[key].node()
665 return self[key].node()
665
666
666 def lookupbranch(self, key, remote=None):
667 def lookupbranch(self, key, remote=None):
667 repo = remote or self
668 repo = remote or self
668 if key in repo.branchmap():
669 if key in repo.branchmap():
669 return key
670 return key
670
671
671 repo = (remote and remote.local()) and remote or self
672 repo = (remote and remote.local()) and remote or self
672 return repo[key].branch()
673 return repo[key].branch()
673
674
674 def known(self, nodes):
675 def known(self, nodes):
675 nm = self.changelog.nodemap
676 nm = self.changelog.nodemap
676 pc = self._phasecache
677 pc = self._phasecache
677 result = []
678 result = []
678 for n in nodes:
679 for n in nodes:
679 r = nm.get(n)
680 r = nm.get(n)
680 resp = not (r is None or pc.phase(self, r) >= phases.secret)
681 resp = not (r is None or pc.phase(self, r) >= phases.secret)
681 result.append(resp)
682 result.append(resp)
682 return result
683 return result
683
684
684 def local(self):
685 def local(self):
685 return self
686 return self
686
687
687 def cancopy(self):
688 def cancopy(self):
688 return self.local() # so statichttprepo's override of local() works
689 return self.local() # so statichttprepo's override of local() works
689
690
690 def join(self, f):
691 def join(self, f):
691 return os.path.join(self.path, f)
692 return os.path.join(self.path, f)
692
693
693 def wjoin(self, f):
694 def wjoin(self, f):
694 return os.path.join(self.root, f)
695 return os.path.join(self.root, f)
695
696
696 def file(self, f):
697 def file(self, f):
697 if f[0] == '/':
698 if f[0] == '/':
698 f = f[1:]
699 f = f[1:]
699 return filelog.filelog(self.sopener, f)
700 return filelog.filelog(self.sopener, f)
700
701
701 def changectx(self, changeid):
702 def changectx(self, changeid):
702 return self[changeid]
703 return self[changeid]
703
704
704 def parents(self, changeid=None):
705 def parents(self, changeid=None):
705 '''get list of changectxs for parents of changeid'''
706 '''get list of changectxs for parents of changeid'''
706 return self[changeid].parents()
707 return self[changeid].parents()
707
708
708 def setparents(self, p1, p2=nullid):
709 def setparents(self, p1, p2=nullid):
709 copies = self.dirstate.setparents(p1, p2)
710 copies = self.dirstate.setparents(p1, p2)
710 pctx = self[p1]
711 pctx = self[p1]
711 if copies:
712 if copies:
712 # Adjust copy records, the dirstate cannot do it, it
713 # Adjust copy records, the dirstate cannot do it, it
713 # requires access to parents manifests. Preserve them
714 # requires access to parents manifests. Preserve them
714 # only for entries added to first parent.
715 # only for entries added to first parent.
715 for f in copies:
716 for f in copies:
716 if f not in pctx and copies[f] in pctx:
717 if f not in pctx and copies[f] in pctx:
717 self.dirstate.copy(copies[f], f)
718 self.dirstate.copy(copies[f], f)
718 if p2 == nullid:
719 if p2 == nullid:
719 for f, s in sorted(self.dirstate.copies().items()):
720 for f, s in sorted(self.dirstate.copies().items()):
720 if f not in pctx and s not in pctx:
721 if f not in pctx and s not in pctx:
721 self.dirstate.copy(None, f)
722 self.dirstate.copy(None, f)
722
723
723 def filectx(self, path, changeid=None, fileid=None):
724 def filectx(self, path, changeid=None, fileid=None):
724 """changeid can be a changeset revision, node, or tag.
725 """changeid can be a changeset revision, node, or tag.
725 fileid can be a file revision or node."""
726 fileid can be a file revision or node."""
726 return context.filectx(self, path, changeid, fileid)
727 return context.filectx(self, path, changeid, fileid)
727
728
728 def getcwd(self):
729 def getcwd(self):
729 return self.dirstate.getcwd()
730 return self.dirstate.getcwd()
730
731
731 def pathto(self, f, cwd=None):
732 def pathto(self, f, cwd=None):
732 return self.dirstate.pathto(f, cwd)
733 return self.dirstate.pathto(f, cwd)
733
734
734 def wfile(self, f, mode='r'):
735 def wfile(self, f, mode='r'):
735 return self.wopener(f, mode)
736 return self.wopener(f, mode)
736
737
737 def _link(self, f):
738 def _link(self, f):
738 return self.wvfs.islink(f)
739 return self.wvfs.islink(f)
739
740
740 def _loadfilter(self, filter):
741 def _loadfilter(self, filter):
741 if filter not in self.filterpats:
742 if filter not in self.filterpats:
742 l = []
743 l = []
743 for pat, cmd in self.ui.configitems(filter):
744 for pat, cmd in self.ui.configitems(filter):
744 if cmd == '!':
745 if cmd == '!':
745 continue
746 continue
746 mf = matchmod.match(self.root, '', [pat])
747 mf = matchmod.match(self.root, '', [pat])
747 fn = None
748 fn = None
748 params = cmd
749 params = cmd
749 for name, filterfn in self._datafilters.iteritems():
750 for name, filterfn in self._datafilters.iteritems():
750 if cmd.startswith(name):
751 if cmd.startswith(name):
751 fn = filterfn
752 fn = filterfn
752 params = cmd[len(name):].lstrip()
753 params = cmd[len(name):].lstrip()
753 break
754 break
754 if not fn:
755 if not fn:
755 fn = lambda s, c, **kwargs: util.filter(s, c)
756 fn = lambda s, c, **kwargs: util.filter(s, c)
756 # Wrap old filters not supporting keyword arguments
757 # Wrap old filters not supporting keyword arguments
757 if not inspect.getargspec(fn)[2]:
758 if not inspect.getargspec(fn)[2]:
758 oldfn = fn
759 oldfn = fn
759 fn = lambda s, c, **kwargs: oldfn(s, c)
760 fn = lambda s, c, **kwargs: oldfn(s, c)
760 l.append((mf, fn, params))
761 l.append((mf, fn, params))
761 self.filterpats[filter] = l
762 self.filterpats[filter] = l
762 return self.filterpats[filter]
763 return self.filterpats[filter]
763
764
764 def _filter(self, filterpats, filename, data):
765 def _filter(self, filterpats, filename, data):
765 for mf, fn, cmd in filterpats:
766 for mf, fn, cmd in filterpats:
766 if mf(filename):
767 if mf(filename):
767 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
768 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
768 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
769 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
769 break
770 break
770
771
771 return data
772 return data
772
773
773 @unfilteredpropertycache
774 @unfilteredpropertycache
774 def _encodefilterpats(self):
775 def _encodefilterpats(self):
775 return self._loadfilter('encode')
776 return self._loadfilter('encode')
776
777
777 @unfilteredpropertycache
778 @unfilteredpropertycache
778 def _decodefilterpats(self):
779 def _decodefilterpats(self):
779 return self._loadfilter('decode')
780 return self._loadfilter('decode')
780
781
781 def adddatafilter(self, name, filter):
782 def adddatafilter(self, name, filter):
782 self._datafilters[name] = filter
783 self._datafilters[name] = filter
783
784
784 def wread(self, filename):
785 def wread(self, filename):
785 if self._link(filename):
786 if self._link(filename):
786 data = self.wvfs.readlink(filename)
787 data = self.wvfs.readlink(filename)
787 else:
788 else:
788 data = self.wopener.read(filename)
789 data = self.wopener.read(filename)
789 return self._filter(self._encodefilterpats, filename, data)
790 return self._filter(self._encodefilterpats, filename, data)
790
791
791 def wwrite(self, filename, data, flags):
792 def wwrite(self, filename, data, flags):
792 data = self._filter(self._decodefilterpats, filename, data)
793 data = self._filter(self._decodefilterpats, filename, data)
793 if 'l' in flags:
794 if 'l' in flags:
794 self.wopener.symlink(data, filename)
795 self.wopener.symlink(data, filename)
795 else:
796 else:
796 self.wopener.write(filename, data)
797 self.wopener.write(filename, data)
797 if 'x' in flags:
798 if 'x' in flags:
798 self.wvfs.setflags(filename, False, True)
799 self.wvfs.setflags(filename, False, True)
799
800
800 def wwritedata(self, filename, data):
801 def wwritedata(self, filename, data):
801 return self._filter(self._decodefilterpats, filename, data)
802 return self._filter(self._decodefilterpats, filename, data)
802
803
803 def transaction(self, desc):
804 def transaction(self, desc):
804 tr = self._transref and self._transref() or None
805 tr = self._transref and self._transref() or None
805 if tr and tr.running():
806 if tr and tr.running():
806 return tr.nest()
807 return tr.nest()
807
808
808 # abort here if the journal already exists
809 # abort here if the journal already exists
809 if self.svfs.exists("journal"):
810 if self.svfs.exists("journal"):
810 raise error.RepoError(
811 raise error.RepoError(
811 _("abandoned transaction found - run hg recover"))
812 _("abandoned transaction found - run hg recover"))
812
813
813 self._writejournal(desc)
814 self._writejournal(desc)
814 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
815 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
815
816
816 tr = transaction.transaction(self.ui.warn, self.sopener,
817 tr = transaction.transaction(self.ui.warn, self.sopener,
817 self.sjoin("journal"),
818 self.sjoin("journal"),
818 aftertrans(renames),
819 aftertrans(renames),
819 self.store.createmode)
820 self.store.createmode)
820 self._transref = weakref.ref(tr)
821 self._transref = weakref.ref(tr)
821 return tr
822 return tr
822
823
823 def _journalfiles(self):
824 def _journalfiles(self):
824 return ((self.svfs, 'journal'),
825 return ((self.svfs, 'journal'),
825 (self.vfs, 'journal.dirstate'),
826 (self.vfs, 'journal.dirstate'),
826 (self.vfs, 'journal.branch'),
827 (self.vfs, 'journal.branch'),
827 (self.vfs, 'journal.desc'),
828 (self.vfs, 'journal.desc'),
828 (self.vfs, 'journal.bookmarks'),
829 (self.vfs, 'journal.bookmarks'),
829 (self.svfs, 'journal.phaseroots'))
830 (self.svfs, 'journal.phaseroots'))
830
831
831 def undofiles(self):
832 def undofiles(self):
832 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
833 return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
833
834
834 def _writejournal(self, desc):
835 def _writejournal(self, desc):
835 self.opener.write("journal.dirstate",
836 self.opener.write("journal.dirstate",
836 self.opener.tryread("dirstate"))
837 self.opener.tryread("dirstate"))
837 self.opener.write("journal.branch",
838 self.opener.write("journal.branch",
838 encoding.fromlocal(self.dirstate.branch()))
839 encoding.fromlocal(self.dirstate.branch()))
839 self.opener.write("journal.desc",
840 self.opener.write("journal.desc",
840 "%d\n%s\n" % (len(self), desc))
841 "%d\n%s\n" % (len(self), desc))
841 self.opener.write("journal.bookmarks",
842 self.opener.write("journal.bookmarks",
842 self.opener.tryread("bookmarks"))
843 self.opener.tryread("bookmarks"))
843 self.sopener.write("journal.phaseroots",
844 self.sopener.write("journal.phaseroots",
844 self.sopener.tryread("phaseroots"))
845 self.sopener.tryread("phaseroots"))
845
846
846 def recover(self):
847 def recover(self):
847 lock = self.lock()
848 lock = self.lock()
848 try:
849 try:
849 if self.svfs.exists("journal"):
850 if self.svfs.exists("journal"):
850 self.ui.status(_("rolling back interrupted transaction\n"))
851 self.ui.status(_("rolling back interrupted transaction\n"))
851 transaction.rollback(self.sopener, self.sjoin("journal"),
852 transaction.rollback(self.sopener, self.sjoin("journal"),
852 self.ui.warn)
853 self.ui.warn)
853 self.invalidate()
854 self.invalidate()
854 return True
855 return True
855 else:
856 else:
856 self.ui.warn(_("no interrupted transaction available\n"))
857 self.ui.warn(_("no interrupted transaction available\n"))
857 return False
858 return False
858 finally:
859 finally:
859 lock.release()
860 lock.release()
860
861
861 def rollback(self, dryrun=False, force=False):
862 def rollback(self, dryrun=False, force=False):
862 wlock = lock = None
863 wlock = lock = None
863 try:
864 try:
864 wlock = self.wlock()
865 wlock = self.wlock()
865 lock = self.lock()
866 lock = self.lock()
866 if self.svfs.exists("undo"):
867 if self.svfs.exists("undo"):
867 return self._rollback(dryrun, force)
868 return self._rollback(dryrun, force)
868 else:
869 else:
869 self.ui.warn(_("no rollback information available\n"))
870 self.ui.warn(_("no rollback information available\n"))
870 return 1
871 return 1
871 finally:
872 finally:
872 release(lock, wlock)
873 release(lock, wlock)
873
874
874 @unfilteredmethod # Until we get smarter cache management
875 @unfilteredmethod # Until we get smarter cache management
875 def _rollback(self, dryrun, force):
876 def _rollback(self, dryrun, force):
876 ui = self.ui
877 ui = self.ui
877 try:
878 try:
878 args = self.opener.read('undo.desc').splitlines()
879 args = self.opener.read('undo.desc').splitlines()
879 (oldlen, desc, detail) = (int(args[0]), args[1], None)
880 (oldlen, desc, detail) = (int(args[0]), args[1], None)
880 if len(args) >= 3:
881 if len(args) >= 3:
881 detail = args[2]
882 detail = args[2]
882 oldtip = oldlen - 1
883 oldtip = oldlen - 1
883
884
884 if detail and ui.verbose:
885 if detail and ui.verbose:
885 msg = (_('repository tip rolled back to revision %s'
886 msg = (_('repository tip rolled back to revision %s'
886 ' (undo %s: %s)\n')
887 ' (undo %s: %s)\n')
887 % (oldtip, desc, detail))
888 % (oldtip, desc, detail))
888 else:
889 else:
889 msg = (_('repository tip rolled back to revision %s'
890 msg = (_('repository tip rolled back to revision %s'
890 ' (undo %s)\n')
891 ' (undo %s)\n')
891 % (oldtip, desc))
892 % (oldtip, desc))
892 except IOError:
893 except IOError:
893 msg = _('rolling back unknown transaction\n')
894 msg = _('rolling back unknown transaction\n')
894 desc = None
895 desc = None
895
896
896 if not force and self['.'] != self['tip'] and desc == 'commit':
897 if not force and self['.'] != self['tip'] and desc == 'commit':
897 raise util.Abort(
898 raise util.Abort(
898 _('rollback of last commit while not checked out '
899 _('rollback of last commit while not checked out '
899 'may lose data'), hint=_('use -f to force'))
900 'may lose data'), hint=_('use -f to force'))
900
901
901 ui.status(msg)
902 ui.status(msg)
902 if dryrun:
903 if dryrun:
903 return 0
904 return 0
904
905
905 parents = self.dirstate.parents()
906 parents = self.dirstate.parents()
906 self.destroying()
907 self.destroying()
907 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
908 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
908 if self.vfs.exists('undo.bookmarks'):
909 if self.vfs.exists('undo.bookmarks'):
909 self.vfs.rename('undo.bookmarks', 'bookmarks')
910 self.vfs.rename('undo.bookmarks', 'bookmarks')
910 if self.svfs.exists('undo.phaseroots'):
911 if self.svfs.exists('undo.phaseroots'):
911 self.svfs.rename('undo.phaseroots', 'phaseroots')
912 self.svfs.rename('undo.phaseroots', 'phaseroots')
912 self.invalidate()
913 self.invalidate()
913
914
914 parentgone = (parents[0] not in self.changelog.nodemap or
915 parentgone = (parents[0] not in self.changelog.nodemap or
915 parents[1] not in self.changelog.nodemap)
916 parents[1] not in self.changelog.nodemap)
916 if parentgone:
917 if parentgone:
917 self.vfs.rename('undo.dirstate', 'dirstate')
918 self.vfs.rename('undo.dirstate', 'dirstate')
918 try:
919 try:
919 branch = self.opener.read('undo.branch')
920 branch = self.opener.read('undo.branch')
920 self.dirstate.setbranch(encoding.tolocal(branch))
921 self.dirstate.setbranch(encoding.tolocal(branch))
921 except IOError:
922 except IOError:
922 ui.warn(_('named branch could not be reset: '
923 ui.warn(_('named branch could not be reset: '
923 'current branch is still \'%s\'\n')
924 'current branch is still \'%s\'\n')
924 % self.dirstate.branch())
925 % self.dirstate.branch())
925
926
926 self.dirstate.invalidate()
927 self.dirstate.invalidate()
927 parents = tuple([p.rev() for p in self.parents()])
928 parents = tuple([p.rev() for p in self.parents()])
928 if len(parents) > 1:
929 if len(parents) > 1:
929 ui.status(_('working directory now based on '
930 ui.status(_('working directory now based on '
930 'revisions %d and %d\n') % parents)
931 'revisions %d and %d\n') % parents)
931 else:
932 else:
932 ui.status(_('working directory now based on '
933 ui.status(_('working directory now based on '
933 'revision %d\n') % parents)
934 'revision %d\n') % parents)
934 # TODO: if we know which new heads may result from this rollback, pass
935 # TODO: if we know which new heads may result from this rollback, pass
935 # them to destroy(), which will prevent the branchhead cache from being
936 # them to destroy(), which will prevent the branchhead cache from being
936 # invalidated.
937 # invalidated.
937 self.destroyed()
938 self.destroyed()
938 return 0
939 return 0
939
940
940 def invalidatecaches(self):
941 def invalidatecaches(self):
941
942
942 if '_tagscache' in vars(self):
943 if '_tagscache' in vars(self):
943 # can't use delattr on proxy
944 # can't use delattr on proxy
944 del self.__dict__['_tagscache']
945 del self.__dict__['_tagscache']
945
946
946 self.unfiltered()._branchcaches.clear()
947 self.unfiltered()._branchcaches.clear()
947 self.invalidatevolatilesets()
948 self.invalidatevolatilesets()
948
949
949 def invalidatevolatilesets(self):
950 def invalidatevolatilesets(self):
950 self.filteredrevcache.clear()
951 self.filteredrevcache.clear()
951 obsolete.clearobscaches(self)
952 obsolete.clearobscaches(self)
952
953
953 def invalidatedirstate(self):
954 def invalidatedirstate(self):
954 '''Invalidates the dirstate, causing the next call to dirstate
955 '''Invalidates the dirstate, causing the next call to dirstate
955 to check if it was modified since the last time it was read,
956 to check if it was modified since the last time it was read,
956 rereading it if it has.
957 rereading it if it has.
957
958
958 This is different to dirstate.invalidate() that it doesn't always
959 This is different to dirstate.invalidate() that it doesn't always
959 rereads the dirstate. Use dirstate.invalidate() if you want to
960 rereads the dirstate. Use dirstate.invalidate() if you want to
960 explicitly read the dirstate again (i.e. restoring it to a previous
961 explicitly read the dirstate again (i.e. restoring it to a previous
961 known good state).'''
962 known good state).'''
962 if hasunfilteredcache(self, 'dirstate'):
963 if hasunfilteredcache(self, 'dirstate'):
963 for k in self.dirstate._filecache:
964 for k in self.dirstate._filecache:
964 try:
965 try:
965 delattr(self.dirstate, k)
966 delattr(self.dirstate, k)
966 except AttributeError:
967 except AttributeError:
967 pass
968 pass
968 delattr(self.unfiltered(), 'dirstate')
969 delattr(self.unfiltered(), 'dirstate')
969
970
970 def invalidate(self):
971 def invalidate(self):
971 unfiltered = self.unfiltered() # all file caches are stored unfiltered
972 unfiltered = self.unfiltered() # all file caches are stored unfiltered
972 for k in self._filecache:
973 for k in self._filecache:
973 # dirstate is invalidated separately in invalidatedirstate()
974 # dirstate is invalidated separately in invalidatedirstate()
974 if k == 'dirstate':
975 if k == 'dirstate':
975 continue
976 continue
976
977
977 try:
978 try:
978 delattr(unfiltered, k)
979 delattr(unfiltered, k)
979 except AttributeError:
980 except AttributeError:
980 pass
981 pass
981 self.invalidatecaches()
982 self.invalidatecaches()
982
983
983 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
984 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
984 try:
985 try:
985 l = lock.lock(lockname, 0, releasefn, desc=desc)
986 l = lock.lock(lockname, 0, releasefn, desc=desc)
986 except error.LockHeld, inst:
987 except error.LockHeld, inst:
987 if not wait:
988 if not wait:
988 raise
989 raise
989 self.ui.warn(_("waiting for lock on %s held by %r\n") %
990 self.ui.warn(_("waiting for lock on %s held by %r\n") %
990 (desc, inst.locker))
991 (desc, inst.locker))
991 # default to 600 seconds timeout
992 # default to 600 seconds timeout
992 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
993 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
993 releasefn, desc=desc)
994 releasefn, desc=desc)
994 if acquirefn:
995 if acquirefn:
995 acquirefn()
996 acquirefn()
996 return l
997 return l
997
998
998 def _afterlock(self, callback):
999 def _afterlock(self, callback):
999 """add a callback to the current repository lock.
1000 """add a callback to the current repository lock.
1000
1001
1001 The callback will be executed on lock release."""
1002 The callback will be executed on lock release."""
1002 l = self._lockref and self._lockref()
1003 l = self._lockref and self._lockref()
1003 if l:
1004 if l:
1004 l.postrelease.append(callback)
1005 l.postrelease.append(callback)
1005 else:
1006 else:
1006 callback()
1007 callback()
1007
1008
1008 def lock(self, wait=True):
1009 def lock(self, wait=True):
1009 '''Lock the repository store (.hg/store) and return a weak reference
1010 '''Lock the repository store (.hg/store) and return a weak reference
1010 to the lock. Use this before modifying the store (e.g. committing or
1011 to the lock. Use this before modifying the store (e.g. committing or
1011 stripping). If you are opening a transaction, get a lock as well.)'''
1012 stripping). If you are opening a transaction, get a lock as well.)'''
1012 l = self._lockref and self._lockref()
1013 l = self._lockref and self._lockref()
1013 if l is not None and l.held:
1014 if l is not None and l.held:
1014 l.lock()
1015 l.lock()
1015 return l
1016 return l
1016
1017
1017 def unlock():
1018 def unlock():
1018 self.store.write()
1019 self.store.write()
1019 if hasunfilteredcache(self, '_phasecache'):
1020 if hasunfilteredcache(self, '_phasecache'):
1020 self._phasecache.write()
1021 self._phasecache.write()
1021 for k, ce in self._filecache.items():
1022 for k, ce in self._filecache.items():
1022 if k == 'dirstate' or k not in self.__dict__:
1023 if k == 'dirstate' or k not in self.__dict__:
1023 continue
1024 continue
1024 ce.refresh()
1025 ce.refresh()
1025
1026
1026 l = self._lock(self.sjoin("lock"), wait, unlock,
1027 l = self._lock(self.sjoin("lock"), wait, unlock,
1027 self.invalidate, _('repository %s') % self.origroot)
1028 self.invalidate, _('repository %s') % self.origroot)
1028 self._lockref = weakref.ref(l)
1029 self._lockref = weakref.ref(l)
1029 return l
1030 return l
1030
1031
1031 def wlock(self, wait=True):
1032 def wlock(self, wait=True):
1032 '''Lock the non-store parts of the repository (everything under
1033 '''Lock the non-store parts of the repository (everything under
1033 .hg except .hg/store) and return a weak reference to the lock.
1034 .hg except .hg/store) and return a weak reference to the lock.
1034 Use this before modifying files in .hg.'''
1035 Use this before modifying files in .hg.'''
1035 l = self._wlockref and self._wlockref()
1036 l = self._wlockref and self._wlockref()
1036 if l is not None and l.held:
1037 if l is not None and l.held:
1037 l.lock()
1038 l.lock()
1038 return l
1039 return l
1039
1040
1040 def unlock():
1041 def unlock():
1041 self.dirstate.write()
1042 self.dirstate.write()
1042 self._filecache['dirstate'].refresh()
1043 self._filecache['dirstate'].refresh()
1043
1044
1044 l = self._lock(self.join("wlock"), wait, unlock,
1045 l = self._lock(self.join("wlock"), wait, unlock,
1045 self.invalidatedirstate, _('working directory of %s') %
1046 self.invalidatedirstate, _('working directory of %s') %
1046 self.origroot)
1047 self.origroot)
1047 self._wlockref = weakref.ref(l)
1048 self._wlockref = weakref.ref(l)
1048 return l
1049 return l
1049
1050
1050 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1051 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1051 """
1052 """
1052 commit an individual file as part of a larger transaction
1053 commit an individual file as part of a larger transaction
1053 """
1054 """
1054
1055
1055 fname = fctx.path()
1056 fname = fctx.path()
1056 text = fctx.data()
1057 text = fctx.data()
1057 flog = self.file(fname)
1058 flog = self.file(fname)
1058 fparent1 = manifest1.get(fname, nullid)
1059 fparent1 = manifest1.get(fname, nullid)
1059 fparent2 = fparent2o = manifest2.get(fname, nullid)
1060 fparent2 = fparent2o = manifest2.get(fname, nullid)
1060
1061
1061 meta = {}
1062 meta = {}
1062 copy = fctx.renamed()
1063 copy = fctx.renamed()
1063 if copy and copy[0] != fname:
1064 if copy and copy[0] != fname:
1064 # Mark the new revision of this file as a copy of another
1065 # Mark the new revision of this file as a copy of another
1065 # file. This copy data will effectively act as a parent
1066 # file. This copy data will effectively act as a parent
1066 # of this new revision. If this is a merge, the first
1067 # of this new revision. If this is a merge, the first
1067 # parent will be the nullid (meaning "look up the copy data")
1068 # parent will be the nullid (meaning "look up the copy data")
1068 # and the second one will be the other parent. For example:
1069 # and the second one will be the other parent. For example:
1069 #
1070 #
1070 # 0 --- 1 --- 3 rev1 changes file foo
1071 # 0 --- 1 --- 3 rev1 changes file foo
1071 # \ / rev2 renames foo to bar and changes it
1072 # \ / rev2 renames foo to bar and changes it
1072 # \- 2 -/ rev3 should have bar with all changes and
1073 # \- 2 -/ rev3 should have bar with all changes and
1073 # should record that bar descends from
1074 # should record that bar descends from
1074 # bar in rev2 and foo in rev1
1075 # bar in rev2 and foo in rev1
1075 #
1076 #
1076 # this allows this merge to succeed:
1077 # this allows this merge to succeed:
1077 #
1078 #
1078 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1079 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1079 # \ / merging rev3 and rev4 should use bar@rev2
1080 # \ / merging rev3 and rev4 should use bar@rev2
1080 # \- 2 --- 4 as the merge base
1081 # \- 2 --- 4 as the merge base
1081 #
1082 #
1082
1083
1083 cfname = copy[0]
1084 cfname = copy[0]
1084 crev = manifest1.get(cfname)
1085 crev = manifest1.get(cfname)
1085 newfparent = fparent2
1086 newfparent = fparent2
1086
1087
1087 if manifest2: # branch merge
1088 if manifest2: # branch merge
1088 if fparent2 == nullid or crev is None: # copied on remote side
1089 if fparent2 == nullid or crev is None: # copied on remote side
1089 if cfname in manifest2:
1090 if cfname in manifest2:
1090 crev = manifest2[cfname]
1091 crev = manifest2[cfname]
1091 newfparent = fparent1
1092 newfparent = fparent1
1092
1093
1093 # find source in nearest ancestor if we've lost track
1094 # find source in nearest ancestor if we've lost track
1094 if not crev:
1095 if not crev:
1095 self.ui.debug(" %s: searching for copy revision for %s\n" %
1096 self.ui.debug(" %s: searching for copy revision for %s\n" %
1096 (fname, cfname))
1097 (fname, cfname))
1097 for ancestor in self[None].ancestors():
1098 for ancestor in self[None].ancestors():
1098 if cfname in ancestor:
1099 if cfname in ancestor:
1099 crev = ancestor[cfname].filenode()
1100 crev = ancestor[cfname].filenode()
1100 break
1101 break
1101
1102
1102 if crev:
1103 if crev:
1103 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1104 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1104 meta["copy"] = cfname
1105 meta["copy"] = cfname
1105 meta["copyrev"] = hex(crev)
1106 meta["copyrev"] = hex(crev)
1106 fparent1, fparent2 = nullid, newfparent
1107 fparent1, fparent2 = nullid, newfparent
1107 else:
1108 else:
1108 self.ui.warn(_("warning: can't find ancestor for '%s' "
1109 self.ui.warn(_("warning: can't find ancestor for '%s' "
1109 "copied from '%s'!\n") % (fname, cfname))
1110 "copied from '%s'!\n") % (fname, cfname))
1110
1111
1111 elif fparent2 != nullid:
1112 elif fparent2 != nullid:
1112 # is one parent an ancestor of the other?
1113 # is one parent an ancestor of the other?
1113 fparentancestor = flog.ancestor(fparent1, fparent2)
1114 fparentancestor = flog.ancestor(fparent1, fparent2)
1114 if fparentancestor == fparent1:
1115 if fparentancestor == fparent1:
1115 fparent1, fparent2 = fparent2, nullid
1116 fparent1, fparent2 = fparent2, nullid
1116 elif fparentancestor == fparent2:
1117 elif fparentancestor == fparent2:
1117 fparent2 = nullid
1118 fparent2 = nullid
1118
1119
1119 # is the file changed?
1120 # is the file changed?
1120 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1121 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1121 changelist.append(fname)
1122 changelist.append(fname)
1122 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1123 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1123
1124
1124 # are just the flags changed during merge?
1125 # are just the flags changed during merge?
1125 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1126 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1126 changelist.append(fname)
1127 changelist.append(fname)
1127
1128
1128 return fparent1
1129 return fparent1
1129
1130
1130 @unfilteredmethod
1131 @unfilteredmethod
1131 def commit(self, text="", user=None, date=None, match=None, force=False,
1132 def commit(self, text="", user=None, date=None, match=None, force=False,
1132 editor=False, extra={}):
1133 editor=False, extra={}):
1133 """Add a new revision to current repository.
1134 """Add a new revision to current repository.
1134
1135
1135 Revision information is gathered from the working directory,
1136 Revision information is gathered from the working directory,
1136 match can be used to filter the committed files. If editor is
1137 match can be used to filter the committed files. If editor is
1137 supplied, it is called to get a commit message.
1138 supplied, it is called to get a commit message.
1138 """
1139 """
1139
1140
1140 def fail(f, msg):
1141 def fail(f, msg):
1141 raise util.Abort('%s: %s' % (f, msg))
1142 raise util.Abort('%s: %s' % (f, msg))
1142
1143
1143 if not match:
1144 if not match:
1144 match = matchmod.always(self.root, '')
1145 match = matchmod.always(self.root, '')
1145
1146
1146 if not force:
1147 if not force:
1147 vdirs = []
1148 vdirs = []
1148 match.explicitdir = vdirs.append
1149 match.explicitdir = vdirs.append
1149 match.bad = fail
1150 match.bad = fail
1150
1151
1151 wlock = self.wlock()
1152 wlock = self.wlock()
1152 try:
1153 try:
1153 wctx = self[None]
1154 wctx = self[None]
1154 merge = len(wctx.parents()) > 1
1155 merge = len(wctx.parents()) > 1
1155
1156
1156 if (not force and merge and match and
1157 if (not force and merge and match and
1157 (match.files() or match.anypats())):
1158 (match.files() or match.anypats())):
1158 raise util.Abort(_('cannot partially commit a merge '
1159 raise util.Abort(_('cannot partially commit a merge '
1159 '(do not specify files or patterns)'))
1160 '(do not specify files or patterns)'))
1160
1161
1161 changes = self.status(match=match, clean=force)
1162 changes = self.status(match=match, clean=force)
1162 if force:
1163 if force:
1163 changes[0].extend(changes[6]) # mq may commit unchanged files
1164 changes[0].extend(changes[6]) # mq may commit unchanged files
1164
1165
1165 # check subrepos
1166 # check subrepos
1166 subs = []
1167 subs = []
1167 commitsubs = set()
1168 commitsubs = set()
1168 newstate = wctx.substate.copy()
1169 newstate = wctx.substate.copy()
1169 # only manage subrepos and .hgsubstate if .hgsub is present
1170 # only manage subrepos and .hgsubstate if .hgsub is present
1170 if '.hgsub' in wctx:
1171 if '.hgsub' in wctx:
1171 # we'll decide whether to track this ourselves, thanks
1172 # we'll decide whether to track this ourselves, thanks
1172 if '.hgsubstate' in changes[0]:
1173 if '.hgsubstate' in changes[0]:
1173 changes[0].remove('.hgsubstate')
1174 changes[0].remove('.hgsubstate')
1174 if '.hgsubstate' in changes[2]:
1175 if '.hgsubstate' in changes[2]:
1175 changes[2].remove('.hgsubstate')
1176 changes[2].remove('.hgsubstate')
1176
1177
1177 # compare current state to last committed state
1178 # compare current state to last committed state
1178 # build new substate based on last committed state
1179 # build new substate based on last committed state
1179 oldstate = wctx.p1().substate
1180 oldstate = wctx.p1().substate
1180 for s in sorted(newstate.keys()):
1181 for s in sorted(newstate.keys()):
1181 if not match(s):
1182 if not match(s):
1182 # ignore working copy, use old state if present
1183 # ignore working copy, use old state if present
1183 if s in oldstate:
1184 if s in oldstate:
1184 newstate[s] = oldstate[s]
1185 newstate[s] = oldstate[s]
1185 continue
1186 continue
1186 if not force:
1187 if not force:
1187 raise util.Abort(
1188 raise util.Abort(
1188 _("commit with new subrepo %s excluded") % s)
1189 _("commit with new subrepo %s excluded") % s)
1189 if wctx.sub(s).dirty(True):
1190 if wctx.sub(s).dirty(True):
1190 if not self.ui.configbool('ui', 'commitsubrepos'):
1191 if not self.ui.configbool('ui', 'commitsubrepos'):
1191 raise util.Abort(
1192 raise util.Abort(
1192 _("uncommitted changes in subrepo %s") % s,
1193 _("uncommitted changes in subrepo %s") % s,
1193 hint=_("use --subrepos for recursive commit"))
1194 hint=_("use --subrepos for recursive commit"))
1194 subs.append(s)
1195 subs.append(s)
1195 commitsubs.add(s)
1196 commitsubs.add(s)
1196 else:
1197 else:
1197 bs = wctx.sub(s).basestate()
1198 bs = wctx.sub(s).basestate()
1198 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1199 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1199 if oldstate.get(s, (None, None, None))[1] != bs:
1200 if oldstate.get(s, (None, None, None))[1] != bs:
1200 subs.append(s)
1201 subs.append(s)
1201
1202
1202 # check for removed subrepos
1203 # check for removed subrepos
1203 for p in wctx.parents():
1204 for p in wctx.parents():
1204 r = [s for s in p.substate if s not in newstate]
1205 r = [s for s in p.substate if s not in newstate]
1205 subs += [s for s in r if match(s)]
1206 subs += [s for s in r if match(s)]
1206 if subs:
1207 if subs:
1207 if (not match('.hgsub') and
1208 if (not match('.hgsub') and
1208 '.hgsub' in (wctx.modified() + wctx.added())):
1209 '.hgsub' in (wctx.modified() + wctx.added())):
1209 raise util.Abort(
1210 raise util.Abort(
1210 _("can't commit subrepos without .hgsub"))
1211 _("can't commit subrepos without .hgsub"))
1211 changes[0].insert(0, '.hgsubstate')
1212 changes[0].insert(0, '.hgsubstate')
1212
1213
1213 elif '.hgsub' in changes[2]:
1214 elif '.hgsub' in changes[2]:
1214 # clean up .hgsubstate when .hgsub is removed
1215 # clean up .hgsubstate when .hgsub is removed
1215 if ('.hgsubstate' in wctx and
1216 if ('.hgsubstate' in wctx and
1216 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1217 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1217 changes[2].insert(0, '.hgsubstate')
1218 changes[2].insert(0, '.hgsubstate')
1218
1219
1219 # make sure all explicit patterns are matched
1220 # make sure all explicit patterns are matched
1220 if not force and match.files():
1221 if not force and match.files():
1221 matched = set(changes[0] + changes[1] + changes[2])
1222 matched = set(changes[0] + changes[1] + changes[2])
1222
1223
1223 for f in match.files():
1224 for f in match.files():
1224 f = self.dirstate.normalize(f)
1225 f = self.dirstate.normalize(f)
1225 if f == '.' or f in matched or f in wctx.substate:
1226 if f == '.' or f in matched or f in wctx.substate:
1226 continue
1227 continue
1227 if f in changes[3]: # missing
1228 if f in changes[3]: # missing
1228 fail(f, _('file not found!'))
1229 fail(f, _('file not found!'))
1229 if f in vdirs: # visited directory
1230 if f in vdirs: # visited directory
1230 d = f + '/'
1231 d = f + '/'
1231 for mf in matched:
1232 for mf in matched:
1232 if mf.startswith(d):
1233 if mf.startswith(d):
1233 break
1234 break
1234 else:
1235 else:
1235 fail(f, _("no match under directory!"))
1236 fail(f, _("no match under directory!"))
1236 elif f not in self.dirstate:
1237 elif f not in self.dirstate:
1237 fail(f, _("file not tracked!"))
1238 fail(f, _("file not tracked!"))
1238
1239
1239 cctx = context.workingctx(self, text, user, date, extra, changes)
1240 cctx = context.workingctx(self, text, user, date, extra, changes)
1240
1241
1241 if (not force and not extra.get("close") and not merge
1242 if (not force and not extra.get("close") and not merge
1242 and not cctx.files()
1243 and not cctx.files()
1243 and wctx.branch() == wctx.p1().branch()):
1244 and wctx.branch() == wctx.p1().branch()):
1244 return None
1245 return None
1245
1246
1246 if merge and cctx.deleted():
1247 if merge and cctx.deleted():
1247 raise util.Abort(_("cannot commit merge with missing files"))
1248 raise util.Abort(_("cannot commit merge with missing files"))
1248
1249
1249 ms = mergemod.mergestate(self)
1250 ms = mergemod.mergestate(self)
1250 for f in changes[0]:
1251 for f in changes[0]:
1251 if f in ms and ms[f] == 'u':
1252 if f in ms and ms[f] == 'u':
1252 raise util.Abort(_("unresolved merge conflicts "
1253 raise util.Abort(_("unresolved merge conflicts "
1253 "(see hg help resolve)"))
1254 "(see hg help resolve)"))
1254
1255
1255 if editor:
1256 if editor:
1256 cctx._text = editor(self, cctx, subs)
1257 cctx._text = editor(self, cctx, subs)
1257 edited = (text != cctx._text)
1258 edited = (text != cctx._text)
1258
1259
1259 # commit subs and write new state
1260 # commit subs and write new state
1260 if subs:
1261 if subs:
1261 for s in sorted(commitsubs):
1262 for s in sorted(commitsubs):
1262 sub = wctx.sub(s)
1263 sub = wctx.sub(s)
1263 self.ui.status(_('committing subrepository %s\n') %
1264 self.ui.status(_('committing subrepository %s\n') %
1264 subrepo.subrelpath(sub))
1265 subrepo.subrelpath(sub))
1265 sr = sub.commit(cctx._text, user, date)
1266 sr = sub.commit(cctx._text, user, date)
1266 newstate[s] = (newstate[s][0], sr)
1267 newstate[s] = (newstate[s][0], sr)
1267 subrepo.writestate(self, newstate)
1268 subrepo.writestate(self, newstate)
1268
1269
1269 # Save commit message in case this transaction gets rolled back
1270 # Save commit message in case this transaction gets rolled back
1270 # (e.g. by a pretxncommit hook). Leave the content alone on
1271 # (e.g. by a pretxncommit hook). Leave the content alone on
1271 # the assumption that the user will use the same editor again.
1272 # the assumption that the user will use the same editor again.
1272 msgfn = self.savecommitmessage(cctx._text)
1273 msgfn = self.savecommitmessage(cctx._text)
1273
1274
1274 p1, p2 = self.dirstate.parents()
1275 p1, p2 = self.dirstate.parents()
1275 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1276 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1276 try:
1277 try:
1277 self.hook("precommit", throw=True, parent1=hookp1,
1278 self.hook("precommit", throw=True, parent1=hookp1,
1278 parent2=hookp2)
1279 parent2=hookp2)
1279 ret = self.commitctx(cctx, True)
1280 ret = self.commitctx(cctx, True)
1280 except: # re-raises
1281 except: # re-raises
1281 if edited:
1282 if edited:
1282 self.ui.write(
1283 self.ui.write(
1283 _('note: commit message saved in %s\n') % msgfn)
1284 _('note: commit message saved in %s\n') % msgfn)
1284 raise
1285 raise
1285
1286
1286 # update bookmarks, dirstate and mergestate
1287 # update bookmarks, dirstate and mergestate
1287 bookmarks.update(self, [p1, p2], ret)
1288 bookmarks.update(self, [p1, p2], ret)
1288 cctx.markcommitted(ret)
1289 cctx.markcommitted(ret)
1289 ms.reset()
1290 ms.reset()
1290 finally:
1291 finally:
1291 wlock.release()
1292 wlock.release()
1292
1293
1293 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1294 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1294 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1295 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1295 self._afterlock(commithook)
1296 self._afterlock(commithook)
1296 return ret
1297 return ret
1297
1298
1298 @unfilteredmethod
1299 @unfilteredmethod
1299 def commitctx(self, ctx, error=False):
1300 def commitctx(self, ctx, error=False):
1300 """Add a new revision to current repository.
1301 """Add a new revision to current repository.
1301 Revision information is passed via the context argument.
1302 Revision information is passed via the context argument.
1302 """
1303 """
1303
1304
1304 tr = lock = None
1305 tr = lock = None
1305 removed = list(ctx.removed())
1306 removed = list(ctx.removed())
1306 p1, p2 = ctx.p1(), ctx.p2()
1307 p1, p2 = ctx.p1(), ctx.p2()
1307 user = ctx.user()
1308 user = ctx.user()
1308
1309
1309 lock = self.lock()
1310 lock = self.lock()
1310 try:
1311 try:
1311 tr = self.transaction("commit")
1312 tr = self.transaction("commit")
1312 trp = weakref.proxy(tr)
1313 trp = weakref.proxy(tr)
1313
1314
1314 if ctx.files():
1315 if ctx.files():
1315 m1 = p1.manifest().copy()
1316 m1 = p1.manifest().copy()
1316 m2 = p2.manifest()
1317 m2 = p2.manifest()
1317
1318
1318 # check in files
1319 # check in files
1319 new = {}
1320 new = {}
1320 changed = []
1321 changed = []
1321 linkrev = len(self)
1322 linkrev = len(self)
1322 for f in sorted(ctx.modified() + ctx.added()):
1323 for f in sorted(ctx.modified() + ctx.added()):
1323 self.ui.note(f + "\n")
1324 self.ui.note(f + "\n")
1324 try:
1325 try:
1325 fctx = ctx[f]
1326 fctx = ctx[f]
1326 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1327 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1327 changed)
1328 changed)
1328 m1.set(f, fctx.flags())
1329 m1.set(f, fctx.flags())
1329 except OSError, inst:
1330 except OSError, inst:
1330 self.ui.warn(_("trouble committing %s!\n") % f)
1331 self.ui.warn(_("trouble committing %s!\n") % f)
1331 raise
1332 raise
1332 except IOError, inst:
1333 except IOError, inst:
1333 errcode = getattr(inst, 'errno', errno.ENOENT)
1334 errcode = getattr(inst, 'errno', errno.ENOENT)
1334 if error or errcode and errcode != errno.ENOENT:
1335 if error or errcode and errcode != errno.ENOENT:
1335 self.ui.warn(_("trouble committing %s!\n") % f)
1336 self.ui.warn(_("trouble committing %s!\n") % f)
1336 raise
1337 raise
1337 else:
1338 else:
1338 removed.append(f)
1339 removed.append(f)
1339
1340
1340 # update manifest
1341 # update manifest
1341 m1.update(new)
1342 m1.update(new)
1342 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1343 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1343 drop = [f for f in removed if f in m1]
1344 drop = [f for f in removed if f in m1]
1344 for f in drop:
1345 for f in drop:
1345 del m1[f]
1346 del m1[f]
1346 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1347 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1347 p2.manifestnode(), (new, drop))
1348 p2.manifestnode(), (new, drop))
1348 files = changed + removed
1349 files = changed + removed
1349 else:
1350 else:
1350 mn = p1.manifestnode()
1351 mn = p1.manifestnode()
1351 files = []
1352 files = []
1352
1353
1353 # update changelog
1354 # update changelog
1354 self.changelog.delayupdate()
1355 self.changelog.delayupdate()
1355 n = self.changelog.add(mn, files, ctx.description(),
1356 n = self.changelog.add(mn, files, ctx.description(),
1356 trp, p1.node(), p2.node(),
1357 trp, p1.node(), p2.node(),
1357 user, ctx.date(), ctx.extra().copy())
1358 user, ctx.date(), ctx.extra().copy())
1358 p = lambda: self.changelog.writepending() and self.root or ""
1359 p = lambda: self.changelog.writepending() and self.root or ""
1359 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1360 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1360 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1361 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1361 parent2=xp2, pending=p)
1362 parent2=xp2, pending=p)
1362 self.changelog.finalize(trp)
1363 self.changelog.finalize(trp)
1363 # set the new commit is proper phase
1364 # set the new commit is proper phase
1364 targetphase = phases.newcommitphase(self.ui)
1365 targetphase = phases.newcommitphase(self.ui)
1365 if targetphase:
1366 if targetphase:
1366 # retract boundary do not alter parent changeset.
1367 # retract boundary do not alter parent changeset.
1367 # if a parent have higher the resulting phase will
1368 # if a parent have higher the resulting phase will
1368 # be compliant anyway
1369 # be compliant anyway
1369 #
1370 #
1370 # if minimal phase was 0 we don't need to retract anything
1371 # if minimal phase was 0 we don't need to retract anything
1371 phases.retractboundary(self, targetphase, [n])
1372 phases.retractboundary(self, targetphase, [n])
1372 tr.close()
1373 tr.close()
1373 branchmap.updatecache(self.filtered('served'))
1374 branchmap.updatecache(self.filtered('served'))
1374 return n
1375 return n
1375 finally:
1376 finally:
1376 if tr:
1377 if tr:
1377 tr.release()
1378 tr.release()
1378 lock.release()
1379 lock.release()
1379
1380
1380 @unfilteredmethod
1381 @unfilteredmethod
1381 def destroying(self):
1382 def destroying(self):
1382 '''Inform the repository that nodes are about to be destroyed.
1383 '''Inform the repository that nodes are about to be destroyed.
1383 Intended for use by strip and rollback, so there's a common
1384 Intended for use by strip and rollback, so there's a common
1384 place for anything that has to be done before destroying history.
1385 place for anything that has to be done before destroying history.
1385
1386
1386 This is mostly useful for saving state that is in memory and waiting
1387 This is mostly useful for saving state that is in memory and waiting
1387 to be flushed when the current lock is released. Because a call to
1388 to be flushed when the current lock is released. Because a call to
1388 destroyed is imminent, the repo will be invalidated causing those
1389 destroyed is imminent, the repo will be invalidated causing those
1389 changes to stay in memory (waiting for the next unlock), or vanish
1390 changes to stay in memory (waiting for the next unlock), or vanish
1390 completely.
1391 completely.
1391 '''
1392 '''
1392 # When using the same lock to commit and strip, the phasecache is left
1393 # When using the same lock to commit and strip, the phasecache is left
1393 # dirty after committing. Then when we strip, the repo is invalidated,
1394 # dirty after committing. Then when we strip, the repo is invalidated,
1394 # causing those changes to disappear.
1395 # causing those changes to disappear.
1395 if '_phasecache' in vars(self):
1396 if '_phasecache' in vars(self):
1396 self._phasecache.write()
1397 self._phasecache.write()
1397
1398
1398 @unfilteredmethod
1399 @unfilteredmethod
1399 def destroyed(self):
1400 def destroyed(self):
1400 '''Inform the repository that nodes have been destroyed.
1401 '''Inform the repository that nodes have been destroyed.
1401 Intended for use by strip and rollback, so there's a common
1402 Intended for use by strip and rollback, so there's a common
1402 place for anything that has to be done after destroying history.
1403 place for anything that has to be done after destroying history.
1403 '''
1404 '''
1404 # When one tries to:
1405 # When one tries to:
1405 # 1) destroy nodes thus calling this method (e.g. strip)
1406 # 1) destroy nodes thus calling this method (e.g. strip)
1406 # 2) use phasecache somewhere (e.g. commit)
1407 # 2) use phasecache somewhere (e.g. commit)
1407 #
1408 #
1408 # then 2) will fail because the phasecache contains nodes that were
1409 # then 2) will fail because the phasecache contains nodes that were
1409 # removed. We can either remove phasecache from the filecache,
1410 # removed. We can either remove phasecache from the filecache,
1410 # causing it to reload next time it is accessed, or simply filter
1411 # causing it to reload next time it is accessed, or simply filter
1411 # the removed nodes now and write the updated cache.
1412 # the removed nodes now and write the updated cache.
1412 self._phasecache.filterunknown(self)
1413 self._phasecache.filterunknown(self)
1413 self._phasecache.write()
1414 self._phasecache.write()
1414
1415
1415 # update the 'served' branch cache to help read only server process
1416 # update the 'served' branch cache to help read only server process
1416 # Thanks to branchcache collaboration this is done from the nearest
1417 # Thanks to branchcache collaboration this is done from the nearest
1417 # filtered subset and it is expected to be fast.
1418 # filtered subset and it is expected to be fast.
1418 branchmap.updatecache(self.filtered('served'))
1419 branchmap.updatecache(self.filtered('served'))
1419
1420
1420 # Ensure the persistent tag cache is updated. Doing it now
1421 # Ensure the persistent tag cache is updated. Doing it now
1421 # means that the tag cache only has to worry about destroyed
1422 # means that the tag cache only has to worry about destroyed
1422 # heads immediately after a strip/rollback. That in turn
1423 # heads immediately after a strip/rollback. That in turn
1423 # guarantees that "cachetip == currenttip" (comparing both rev
1424 # guarantees that "cachetip == currenttip" (comparing both rev
1424 # and node) always means no nodes have been added or destroyed.
1425 # and node) always means no nodes have been added or destroyed.
1425
1426
1426 # XXX this is suboptimal when qrefresh'ing: we strip the current
1427 # XXX this is suboptimal when qrefresh'ing: we strip the current
1427 # head, refresh the tag cache, then immediately add a new head.
1428 # head, refresh the tag cache, then immediately add a new head.
1428 # But I think doing it this way is necessary for the "instant
1429 # But I think doing it this way is necessary for the "instant
1429 # tag cache retrieval" case to work.
1430 # tag cache retrieval" case to work.
1430 self.invalidate()
1431 self.invalidate()
1431
1432
1432 def walk(self, match, node=None):
1433 def walk(self, match, node=None):
1433 '''
1434 '''
1434 walk recursively through the directory tree or a given
1435 walk recursively through the directory tree or a given
1435 changeset, finding all files matched by the match
1436 changeset, finding all files matched by the match
1436 function
1437 function
1437 '''
1438 '''
1438 return self[node].walk(match)
1439 return self[node].walk(match)
1439
1440
1440 def status(self, node1='.', node2=None, match=None,
1441 def status(self, node1='.', node2=None, match=None,
1441 ignored=False, clean=False, unknown=False,
1442 ignored=False, clean=False, unknown=False,
1442 listsubrepos=False):
1443 listsubrepos=False):
1443 """return status of files between two nodes or node and working
1444 """return status of files between two nodes or node and working
1444 directory.
1445 directory.
1445
1446
1446 If node1 is None, use the first dirstate parent instead.
1447 If node1 is None, use the first dirstate parent instead.
1447 If node2 is None, compare node1 with working directory.
1448 If node2 is None, compare node1 with working directory.
1448 """
1449 """
1449
1450
1450 def mfmatches(ctx):
1451 def mfmatches(ctx):
1451 mf = ctx.manifest().copy()
1452 mf = ctx.manifest().copy()
1452 if match.always():
1453 if match.always():
1453 return mf
1454 return mf
1454 for fn in mf.keys():
1455 for fn in mf.keys():
1455 if not match(fn):
1456 if not match(fn):
1456 del mf[fn]
1457 del mf[fn]
1457 return mf
1458 return mf
1458
1459
1459 if isinstance(node1, context.changectx):
1460 if isinstance(node1, context.changectx):
1460 ctx1 = node1
1461 ctx1 = node1
1461 else:
1462 else:
1462 ctx1 = self[node1]
1463 ctx1 = self[node1]
1463 if isinstance(node2, context.changectx):
1464 if isinstance(node2, context.changectx):
1464 ctx2 = node2
1465 ctx2 = node2
1465 else:
1466 else:
1466 ctx2 = self[node2]
1467 ctx2 = self[node2]
1467
1468
1468 working = ctx2.rev() is None
1469 working = ctx2.rev() is None
1469 parentworking = working and ctx1 == self['.']
1470 parentworking = working and ctx1 == self['.']
1470 match = match or matchmod.always(self.root, self.getcwd())
1471 match = match or matchmod.always(self.root, self.getcwd())
1471 listignored, listclean, listunknown = ignored, clean, unknown
1472 listignored, listclean, listunknown = ignored, clean, unknown
1472
1473
1473 # load earliest manifest first for caching reasons
1474 # load earliest manifest first for caching reasons
1474 if not working and ctx2.rev() < ctx1.rev():
1475 if not working and ctx2.rev() < ctx1.rev():
1475 ctx2.manifest()
1476 ctx2.manifest()
1476
1477
1477 if not parentworking:
1478 if not parentworking:
1478 def bad(f, msg):
1479 def bad(f, msg):
1479 # 'f' may be a directory pattern from 'match.files()',
1480 # 'f' may be a directory pattern from 'match.files()',
1480 # so 'f not in ctx1' is not enough
1481 # so 'f not in ctx1' is not enough
1481 if f not in ctx1 and f not in ctx1.dirs():
1482 if f not in ctx1 and f not in ctx1.dirs():
1482 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1483 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1483 match.bad = bad
1484 match.bad = bad
1484
1485
1485 if working: # we need to scan the working dir
1486 if working: # we need to scan the working dir
1486 subrepos = []
1487 subrepos = []
1487 if '.hgsub' in self.dirstate:
1488 if '.hgsub' in self.dirstate:
1488 subrepos = sorted(ctx2.substate)
1489 subrepos = sorted(ctx2.substate)
1489 s = self.dirstate.status(match, subrepos, listignored,
1490 s = self.dirstate.status(match, subrepos, listignored,
1490 listclean, listunknown)
1491 listclean, listunknown)
1491 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1492 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1492
1493
1493 # check for any possibly clean files
1494 # check for any possibly clean files
1494 if parentworking and cmp:
1495 if parentworking and cmp:
1495 fixup = []
1496 fixup = []
1496 # do a full compare of any files that might have changed
1497 # do a full compare of any files that might have changed
1497 for f in sorted(cmp):
1498 for f in sorted(cmp):
1498 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1499 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1499 or ctx1[f].cmp(ctx2[f])):
1500 or ctx1[f].cmp(ctx2[f])):
1500 modified.append(f)
1501 modified.append(f)
1501 else:
1502 else:
1502 fixup.append(f)
1503 fixup.append(f)
1503
1504
1504 # update dirstate for files that are actually clean
1505 # update dirstate for files that are actually clean
1505 if fixup:
1506 if fixup:
1506 if listclean:
1507 if listclean:
1507 clean += fixup
1508 clean += fixup
1508
1509
1509 try:
1510 try:
1510 # updating the dirstate is optional
1511 # updating the dirstate is optional
1511 # so we don't wait on the lock
1512 # so we don't wait on the lock
1512 wlock = self.wlock(False)
1513 wlock = self.wlock(False)
1513 try:
1514 try:
1514 for f in fixup:
1515 for f in fixup:
1515 self.dirstate.normal(f)
1516 self.dirstate.normal(f)
1516 finally:
1517 finally:
1517 wlock.release()
1518 wlock.release()
1518 except error.LockError:
1519 except error.LockError:
1519 pass
1520 pass
1520
1521
1521 if not parentworking:
1522 if not parentworking:
1522 mf1 = mfmatches(ctx1)
1523 mf1 = mfmatches(ctx1)
1523 if working:
1524 if working:
1524 # we are comparing working dir against non-parent
1525 # we are comparing working dir against non-parent
1525 # generate a pseudo-manifest for the working dir
1526 # generate a pseudo-manifest for the working dir
1526 mf2 = mfmatches(self['.'])
1527 mf2 = mfmatches(self['.'])
1527 for f in cmp + modified + added:
1528 for f in cmp + modified + added:
1528 mf2[f] = None
1529 mf2[f] = None
1529 mf2.set(f, ctx2.flags(f))
1530 mf2.set(f, ctx2.flags(f))
1530 for f in removed:
1531 for f in removed:
1531 if f in mf2:
1532 if f in mf2:
1532 del mf2[f]
1533 del mf2[f]
1533 else:
1534 else:
1534 # we are comparing two revisions
1535 # we are comparing two revisions
1535 deleted, unknown, ignored = [], [], []
1536 deleted, unknown, ignored = [], [], []
1536 mf2 = mfmatches(ctx2)
1537 mf2 = mfmatches(ctx2)
1537
1538
1538 modified, added, clean = [], [], []
1539 modified, added, clean = [], [], []
1539 withflags = mf1.withflags() | mf2.withflags()
1540 withflags = mf1.withflags() | mf2.withflags()
1540 for fn, mf2node in mf2.iteritems():
1541 for fn, mf2node in mf2.iteritems():
1541 if fn in mf1:
1542 if fn in mf1:
1542 if (fn not in deleted and
1543 if (fn not in deleted and
1543 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1544 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1544 (mf1[fn] != mf2node and
1545 (mf1[fn] != mf2node and
1545 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1546 (mf2node or ctx1[fn].cmp(ctx2[fn]))))):
1546 modified.append(fn)
1547 modified.append(fn)
1547 elif listclean:
1548 elif listclean:
1548 clean.append(fn)
1549 clean.append(fn)
1549 del mf1[fn]
1550 del mf1[fn]
1550 elif fn not in deleted:
1551 elif fn not in deleted:
1551 added.append(fn)
1552 added.append(fn)
1552 removed = mf1.keys()
1553 removed = mf1.keys()
1553
1554
1554 if working and modified and not self.dirstate._checklink:
1555 if working and modified and not self.dirstate._checklink:
1555 # Symlink placeholders may get non-symlink-like contents
1556 # Symlink placeholders may get non-symlink-like contents
1556 # via user error or dereferencing by NFS or Samba servers,
1557 # via user error or dereferencing by NFS or Samba servers,
1557 # so we filter out any placeholders that don't look like a
1558 # so we filter out any placeholders that don't look like a
1558 # symlink
1559 # symlink
1559 sane = []
1560 sane = []
1560 for f in modified:
1561 for f in modified:
1561 if ctx2.flags(f) == 'l':
1562 if ctx2.flags(f) == 'l':
1562 d = ctx2[f].data()
1563 d = ctx2[f].data()
1563 if len(d) >= 1024 or '\n' in d or util.binary(d):
1564 if len(d) >= 1024 or '\n' in d or util.binary(d):
1564 self.ui.debug('ignoring suspect symlink placeholder'
1565 self.ui.debug('ignoring suspect symlink placeholder'
1565 ' "%s"\n' % f)
1566 ' "%s"\n' % f)
1566 continue
1567 continue
1567 sane.append(f)
1568 sane.append(f)
1568 modified = sane
1569 modified = sane
1569
1570
1570 r = modified, added, removed, deleted, unknown, ignored, clean
1571 r = modified, added, removed, deleted, unknown, ignored, clean
1571
1572
1572 if listsubrepos:
1573 if listsubrepos:
1573 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1574 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1574 if working:
1575 if working:
1575 rev2 = None
1576 rev2 = None
1576 else:
1577 else:
1577 rev2 = ctx2.substate[subpath][1]
1578 rev2 = ctx2.substate[subpath][1]
1578 try:
1579 try:
1579 submatch = matchmod.narrowmatcher(subpath, match)
1580 submatch = matchmod.narrowmatcher(subpath, match)
1580 s = sub.status(rev2, match=submatch, ignored=listignored,
1581 s = sub.status(rev2, match=submatch, ignored=listignored,
1581 clean=listclean, unknown=listunknown,
1582 clean=listclean, unknown=listunknown,
1582 listsubrepos=True)
1583 listsubrepos=True)
1583 for rfiles, sfiles in zip(r, s):
1584 for rfiles, sfiles in zip(r, s):
1584 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1585 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1585 except error.LookupError:
1586 except error.LookupError:
1586 self.ui.status(_("skipping missing subrepository: %s\n")
1587 self.ui.status(_("skipping missing subrepository: %s\n")
1587 % subpath)
1588 % subpath)
1588
1589
1589 for l in r:
1590 for l in r:
1590 l.sort()
1591 l.sort()
1591 return r
1592 return r
1592
1593
1593 def heads(self, start=None):
1594 def heads(self, start=None):
1594 heads = self.changelog.heads(start)
1595 heads = self.changelog.heads(start)
1595 # sort the output in rev descending order
1596 # sort the output in rev descending order
1596 return sorted(heads, key=self.changelog.rev, reverse=True)
1597 return sorted(heads, key=self.changelog.rev, reverse=True)
1597
1598
1598 def branchheads(self, branch=None, start=None, closed=False):
1599 def branchheads(self, branch=None, start=None, closed=False):
1599 '''return a (possibly filtered) list of heads for the given branch
1600 '''return a (possibly filtered) list of heads for the given branch
1600
1601
1601 Heads are returned in topological order, from newest to oldest.
1602 Heads are returned in topological order, from newest to oldest.
1602 If branch is None, use the dirstate branch.
1603 If branch is None, use the dirstate branch.
1603 If start is not None, return only heads reachable from start.
1604 If start is not None, return only heads reachable from start.
1604 If closed is True, return heads that are marked as closed as well.
1605 If closed is True, return heads that are marked as closed as well.
1605 '''
1606 '''
1606 if branch is None:
1607 if branch is None:
1607 branch = self[None].branch()
1608 branch = self[None].branch()
1608 branches = self.branchmap()
1609 branches = self.branchmap()
1609 if branch not in branches:
1610 if branch not in branches:
1610 return []
1611 return []
1611 # the cache returns heads ordered lowest to highest
1612 # the cache returns heads ordered lowest to highest
1612 bheads = list(reversed(branches[branch]))
1613 bheads = list(reversed(branches[branch]))
1613 if start is not None:
1614 if start is not None:
1614 # filter out the heads that cannot be reached from startrev
1615 # filter out the heads that cannot be reached from startrev
1615 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1616 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1616 bheads = [h for h in bheads if h in fbheads]
1617 bheads = [h for h in bheads if h in fbheads]
1617 if not closed:
1618 if not closed:
1618 bheads = [h for h in bheads if not self[h].closesbranch()]
1619 bheads = [h for h in bheads if not self[h].closesbranch()]
1619 return bheads
1620 return bheads
1620
1621
1621 def branches(self, nodes):
1622 def branches(self, nodes):
1622 if not nodes:
1623 if not nodes:
1623 nodes = [self.changelog.tip()]
1624 nodes = [self.changelog.tip()]
1624 b = []
1625 b = []
1625 for n in nodes:
1626 for n in nodes:
1626 t = n
1627 t = n
1627 while True:
1628 while True:
1628 p = self.changelog.parents(n)
1629 p = self.changelog.parents(n)
1629 if p[1] != nullid or p[0] == nullid:
1630 if p[1] != nullid or p[0] == nullid:
1630 b.append((t, n, p[0], p[1]))
1631 b.append((t, n, p[0], p[1]))
1631 break
1632 break
1632 n = p[0]
1633 n = p[0]
1633 return b
1634 return b
1634
1635
1635 def between(self, pairs):
1636 def between(self, pairs):
1636 r = []
1637 r = []
1637
1638
1638 for top, bottom in pairs:
1639 for top, bottom in pairs:
1639 n, l, i = top, [], 0
1640 n, l, i = top, [], 0
1640 f = 1
1641 f = 1
1641
1642
1642 while n != bottom and n != nullid:
1643 while n != bottom and n != nullid:
1643 p = self.changelog.parents(n)[0]
1644 p = self.changelog.parents(n)[0]
1644 if i == f:
1645 if i == f:
1645 l.append(n)
1646 l.append(n)
1646 f = f * 2
1647 f = f * 2
1647 n = p
1648 n = p
1648 i += 1
1649 i += 1
1649
1650
1650 r.append(l)
1651 r.append(l)
1651
1652
1652 return r
1653 return r
1653
1654
1654 def pull(self, remote, heads=None, force=False):
1655 def pull(self, remote, heads=None, force=False):
1655 # don't open transaction for nothing or you break future useful
1656 # don't open transaction for nothing or you break future useful
1656 # rollback call
1657 # rollback call
1657 tr = None
1658 tr = None
1658 trname = 'pull\n' + util.hidepassword(remote.url())
1659 trname = 'pull\n' + util.hidepassword(remote.url())
1659 lock = self.lock()
1660 lock = self.lock()
1660 try:
1661 try:
1661 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1662 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1662 force=force)
1663 force=force)
1663 common, fetch, rheads = tmp
1664 common, fetch, rheads = tmp
1664 if not fetch:
1665 if not fetch:
1665 self.ui.status(_("no changes found\n"))
1666 self.ui.status(_("no changes found\n"))
1666 added = []
1667 added = []
1667 result = 0
1668 result = 0
1668 else:
1669 else:
1669 tr = self.transaction(trname)
1670 tr = self.transaction(trname)
1670 if heads is None and list(common) == [nullid]:
1671 if heads is None and list(common) == [nullid]:
1671 self.ui.status(_("requesting all changes\n"))
1672 self.ui.status(_("requesting all changes\n"))
1672 elif heads is None and remote.capable('changegroupsubset'):
1673 elif heads is None and remote.capable('changegroupsubset'):
1673 # issue1320, avoid a race if remote changed after discovery
1674 # issue1320, avoid a race if remote changed after discovery
1674 heads = rheads
1675 heads = rheads
1675
1676
1676 if remote.capable('getbundle'):
1677 if remote.capable('getbundle'):
1678 # TODO: get bundlecaps from remote
1677 cg = remote.getbundle('pull', common=common,
1679 cg = remote.getbundle('pull', common=common,
1678 heads=heads or rheads)
1680 heads=heads or rheads)
1679 elif heads is None:
1681 elif heads is None:
1680 cg = remote.changegroup(fetch, 'pull')
1682 cg = remote.changegroup(fetch, 'pull')
1681 elif not remote.capable('changegroupsubset'):
1683 elif not remote.capable('changegroupsubset'):
1682 raise util.Abort(_("partial pull cannot be done because "
1684 raise util.Abort(_("partial pull cannot be done because "
1683 "other repository doesn't support "
1685 "other repository doesn't support "
1684 "changegroupsubset."))
1686 "changegroupsubset."))
1685 else:
1687 else:
1686 cg = remote.changegroupsubset(fetch, heads, 'pull')
1688 cg = remote.changegroupsubset(fetch, heads, 'pull')
1687 # we use unfiltered changelog here because hidden revision must
1689 # we use unfiltered changelog here because hidden revision must
1688 # be taken in account for phase synchronization. They may
1690 # be taken in account for phase synchronization. They may
1689 # becomes public and becomes visible again.
1691 # becomes public and becomes visible again.
1690 cl = self.unfiltered().changelog
1692 cl = self.unfiltered().changelog
1691 clstart = len(cl)
1693 clstart = len(cl)
1692 result = self.addchangegroup(cg, 'pull', remote.url())
1694 result = self.addchangegroup(cg, 'pull', remote.url())
1693 clend = len(cl)
1695 clend = len(cl)
1694 added = [cl.node(r) for r in xrange(clstart, clend)]
1696 added = [cl.node(r) for r in xrange(clstart, clend)]
1695
1697
1696 # compute target subset
1698 # compute target subset
1697 if heads is None:
1699 if heads is None:
1698 # We pulled every thing possible
1700 # We pulled every thing possible
1699 # sync on everything common
1701 # sync on everything common
1700 subset = common + added
1702 subset = common + added
1701 else:
1703 else:
1702 # We pulled a specific subset
1704 # We pulled a specific subset
1703 # sync on this subset
1705 # sync on this subset
1704 subset = heads
1706 subset = heads
1705
1707
1706 # Get remote phases data from remote
1708 # Get remote phases data from remote
1707 remotephases = remote.listkeys('phases')
1709 remotephases = remote.listkeys('phases')
1708 publishing = bool(remotephases.get('publishing', False))
1710 publishing = bool(remotephases.get('publishing', False))
1709 if remotephases and not publishing:
1711 if remotephases and not publishing:
1710 # remote is new and unpublishing
1712 # remote is new and unpublishing
1711 pheads, _dr = phases.analyzeremotephases(self, subset,
1713 pheads, _dr = phases.analyzeremotephases(self, subset,
1712 remotephases)
1714 remotephases)
1713 phases.advanceboundary(self, phases.public, pheads)
1715 phases.advanceboundary(self, phases.public, pheads)
1714 phases.advanceboundary(self, phases.draft, subset)
1716 phases.advanceboundary(self, phases.draft, subset)
1715 else:
1717 else:
1716 # Remote is old or publishing all common changesets
1718 # Remote is old or publishing all common changesets
1717 # should be seen as public
1719 # should be seen as public
1718 phases.advanceboundary(self, phases.public, subset)
1720 phases.advanceboundary(self, phases.public, subset)
1719
1721
1720 def gettransaction():
1722 def gettransaction():
1721 if tr is None:
1723 if tr is None:
1722 return self.transaction(trname)
1724 return self.transaction(trname)
1723 return tr
1725 return tr
1724
1726
1725 obstr = obsolete.syncpull(self, remote, gettransaction)
1727 obstr = obsolete.syncpull(self, remote, gettransaction)
1726 if obstr is not None:
1728 if obstr is not None:
1727 tr = obstr
1729 tr = obstr
1728
1730
1729 if tr is not None:
1731 if tr is not None:
1730 tr.close()
1732 tr.close()
1731 finally:
1733 finally:
1732 if tr is not None:
1734 if tr is not None:
1733 tr.release()
1735 tr.release()
1734 lock.release()
1736 lock.release()
1735
1737
1736 return result
1738 return result
1737
1739
1738 def checkpush(self, force, revs):
1740 def checkpush(self, force, revs):
1739 """Extensions can override this function if additional checks have
1741 """Extensions can override this function if additional checks have
1740 to be performed before pushing, or call it if they override push
1742 to be performed before pushing, or call it if they override push
1741 command.
1743 command.
1742 """
1744 """
1743 pass
1745 pass
1744
1746
1745 def push(self, remote, force=False, revs=None, newbranch=False):
1747 def push(self, remote, force=False, revs=None, newbranch=False):
1746 '''Push outgoing changesets (limited by revs) from the current
1748 '''Push outgoing changesets (limited by revs) from the current
1747 repository to remote. Return an integer:
1749 repository to remote. Return an integer:
1748 - None means nothing to push
1750 - None means nothing to push
1749 - 0 means HTTP error
1751 - 0 means HTTP error
1750 - 1 means we pushed and remote head count is unchanged *or*
1752 - 1 means we pushed and remote head count is unchanged *or*
1751 we have outgoing changesets but refused to push
1753 we have outgoing changesets but refused to push
1752 - other values as described by addchangegroup()
1754 - other values as described by addchangegroup()
1753 '''
1755 '''
1754 # there are two ways to push to remote repo:
1756 # there are two ways to push to remote repo:
1755 #
1757 #
1756 # addchangegroup assumes local user can lock remote
1758 # addchangegroup assumes local user can lock remote
1757 # repo (local filesystem, old ssh servers).
1759 # repo (local filesystem, old ssh servers).
1758 #
1760 #
1759 # unbundle assumes local user cannot lock remote repo (new ssh
1761 # unbundle assumes local user cannot lock remote repo (new ssh
1760 # servers, http servers).
1762 # servers, http servers).
1761
1763
1762 if not remote.canpush():
1764 if not remote.canpush():
1763 raise util.Abort(_("destination does not support push"))
1765 raise util.Abort(_("destination does not support push"))
1764 unfi = self.unfiltered()
1766 unfi = self.unfiltered()
1765 def localphasemove(nodes, phase=phases.public):
1767 def localphasemove(nodes, phase=phases.public):
1766 """move <nodes> to <phase> in the local source repo"""
1768 """move <nodes> to <phase> in the local source repo"""
1767 if locallock is not None:
1769 if locallock is not None:
1768 phases.advanceboundary(self, phase, nodes)
1770 phases.advanceboundary(self, phase, nodes)
1769 else:
1771 else:
1770 # repo is not locked, do not change any phases!
1772 # repo is not locked, do not change any phases!
1771 # Informs the user that phases should have been moved when
1773 # Informs the user that phases should have been moved when
1772 # applicable.
1774 # applicable.
1773 actualmoves = [n for n in nodes if phase < self[n].phase()]
1775 actualmoves = [n for n in nodes if phase < self[n].phase()]
1774 phasestr = phases.phasenames[phase]
1776 phasestr = phases.phasenames[phase]
1775 if actualmoves:
1777 if actualmoves:
1776 self.ui.status(_('cannot lock source repo, skipping local'
1778 self.ui.status(_('cannot lock source repo, skipping local'
1777 ' %s phase update\n') % phasestr)
1779 ' %s phase update\n') % phasestr)
1778 # get local lock as we might write phase data
1780 # get local lock as we might write phase data
1779 locallock = None
1781 locallock = None
1780 try:
1782 try:
1781 locallock = self.lock()
1783 locallock = self.lock()
1782 except IOError, err:
1784 except IOError, err:
1783 if err.errno != errno.EACCES:
1785 if err.errno != errno.EACCES:
1784 raise
1786 raise
1785 # source repo cannot be locked.
1787 # source repo cannot be locked.
1786 # We do not abort the push, but just disable the local phase
1788 # We do not abort the push, but just disable the local phase
1787 # synchronisation.
1789 # synchronisation.
1788 msg = 'cannot lock source repository: %s\n' % err
1790 msg = 'cannot lock source repository: %s\n' % err
1789 self.ui.debug(msg)
1791 self.ui.debug(msg)
1790 try:
1792 try:
1791 self.checkpush(force, revs)
1793 self.checkpush(force, revs)
1792 lock = None
1794 lock = None
1793 unbundle = remote.capable('unbundle')
1795 unbundle = remote.capable('unbundle')
1794 if not unbundle:
1796 if not unbundle:
1795 lock = remote.lock()
1797 lock = remote.lock()
1796 try:
1798 try:
1797 # discovery
1799 # discovery
1798 fci = discovery.findcommonincoming
1800 fci = discovery.findcommonincoming
1799 commoninc = fci(unfi, remote, force=force)
1801 commoninc = fci(unfi, remote, force=force)
1800 common, inc, remoteheads = commoninc
1802 common, inc, remoteheads = commoninc
1801 fco = discovery.findcommonoutgoing
1803 fco = discovery.findcommonoutgoing
1802 outgoing = fco(unfi, remote, onlyheads=revs,
1804 outgoing = fco(unfi, remote, onlyheads=revs,
1803 commoninc=commoninc, force=force)
1805 commoninc=commoninc, force=force)
1804
1806
1805
1807
1806 if not outgoing.missing:
1808 if not outgoing.missing:
1807 # nothing to push
1809 # nothing to push
1808 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1810 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1809 ret = None
1811 ret = None
1810 else:
1812 else:
1811 # something to push
1813 # something to push
1812 if not force:
1814 if not force:
1813 # if self.obsstore == False --> no obsolete
1815 # if self.obsstore == False --> no obsolete
1814 # then, save the iteration
1816 # then, save the iteration
1815 if unfi.obsstore:
1817 if unfi.obsstore:
1816 # this message are here for 80 char limit reason
1818 # this message are here for 80 char limit reason
1817 mso = _("push includes obsolete changeset: %s!")
1819 mso = _("push includes obsolete changeset: %s!")
1818 mst = "push includes %s changeset: %s!"
1820 mst = "push includes %s changeset: %s!"
1819 # plain versions for i18n tool to detect them
1821 # plain versions for i18n tool to detect them
1820 _("push includes unstable changeset: %s!")
1822 _("push includes unstable changeset: %s!")
1821 _("push includes bumped changeset: %s!")
1823 _("push includes bumped changeset: %s!")
1822 _("push includes divergent changeset: %s!")
1824 _("push includes divergent changeset: %s!")
1823 # If we are to push if there is at least one
1825 # If we are to push if there is at least one
1824 # obsolete or unstable changeset in missing, at
1826 # obsolete or unstable changeset in missing, at
1825 # least one of the missinghead will be obsolete or
1827 # least one of the missinghead will be obsolete or
1826 # unstable. So checking heads only is ok
1828 # unstable. So checking heads only is ok
1827 for node in outgoing.missingheads:
1829 for node in outgoing.missingheads:
1828 ctx = unfi[node]
1830 ctx = unfi[node]
1829 if ctx.obsolete():
1831 if ctx.obsolete():
1830 raise util.Abort(mso % ctx)
1832 raise util.Abort(mso % ctx)
1831 elif ctx.troubled():
1833 elif ctx.troubled():
1832 raise util.Abort(_(mst)
1834 raise util.Abort(_(mst)
1833 % (ctx.troubles()[0],
1835 % (ctx.troubles()[0],
1834 ctx))
1836 ctx))
1835 discovery.checkheads(unfi, remote, outgoing,
1837 discovery.checkheads(unfi, remote, outgoing,
1836 remoteheads, newbranch,
1838 remoteheads, newbranch,
1837 bool(inc))
1839 bool(inc))
1838
1840
1841 # TODO: get bundlecaps from remote
1842 bundlecaps = None
1839 # create a changegroup from local
1843 # create a changegroup from local
1840 if revs is None and not outgoing.excluded:
1844 if revs is None and not outgoing.excluded:
1841 # push everything,
1845 # push everything,
1842 # use the fast path, no race possible on push
1846 # use the fast path, no race possible on push
1843 bundler = changegroup.bundle10()
1847 bundler = changegroup.bundle10(bundlecaps)
1844 cg = self._changegroup(outgoing.missing, bundler,
1848 cg = self._changegroup(outgoing.missing, bundler,
1845 'push')
1849 'push')
1846 else:
1850 else:
1847 cg = self.getlocalbundle('push', outgoing)
1851 cg = self.getlocalbundle('push', outgoing, bundlecaps)
1848
1852
1849 # apply changegroup to remote
1853 # apply changegroup to remote
1850 if unbundle:
1854 if unbundle:
1851 # local repo finds heads on server, finds out what
1855 # local repo finds heads on server, finds out what
1852 # revs it must push. once revs transferred, if server
1856 # revs it must push. once revs transferred, if server
1853 # finds it has different heads (someone else won
1857 # finds it has different heads (someone else won
1854 # commit/push race), server aborts.
1858 # commit/push race), server aborts.
1855 if force:
1859 if force:
1856 remoteheads = ['force']
1860 remoteheads = ['force']
1857 # ssh: return remote's addchangegroup()
1861 # ssh: return remote's addchangegroup()
1858 # http: return remote's addchangegroup() or 0 for error
1862 # http: return remote's addchangegroup() or 0 for error
1859 ret = remote.unbundle(cg, remoteheads, 'push')
1863 ret = remote.unbundle(cg, remoteheads, 'push')
1860 else:
1864 else:
1861 # we return an integer indicating remote head count
1865 # we return an integer indicating remote head count
1862 # change
1866 # change
1863 ret = remote.addchangegroup(cg, 'push', self.url())
1867 ret = remote.addchangegroup(cg, 'push', self.url())
1864
1868
1865 if ret:
1869 if ret:
1866 # push succeed, synchronize target of the push
1870 # push succeed, synchronize target of the push
1867 cheads = outgoing.missingheads
1871 cheads = outgoing.missingheads
1868 elif revs is None:
1872 elif revs is None:
1869 # All out push fails. synchronize all common
1873 # All out push fails. synchronize all common
1870 cheads = outgoing.commonheads
1874 cheads = outgoing.commonheads
1871 else:
1875 else:
1872 # I want cheads = heads(::missingheads and ::commonheads)
1876 # I want cheads = heads(::missingheads and ::commonheads)
1873 # (missingheads is revs with secret changeset filtered out)
1877 # (missingheads is revs with secret changeset filtered out)
1874 #
1878 #
1875 # This can be expressed as:
1879 # This can be expressed as:
1876 # cheads = ( (missingheads and ::commonheads)
1880 # cheads = ( (missingheads and ::commonheads)
1877 # + (commonheads and ::missingheads))"
1881 # + (commonheads and ::missingheads))"
1878 # )
1882 # )
1879 #
1883 #
1880 # while trying to push we already computed the following:
1884 # while trying to push we already computed the following:
1881 # common = (::commonheads)
1885 # common = (::commonheads)
1882 # missing = ((commonheads::missingheads) - commonheads)
1886 # missing = ((commonheads::missingheads) - commonheads)
1883 #
1887 #
1884 # We can pick:
1888 # We can pick:
1885 # * missingheads part of common (::commonheads)
1889 # * missingheads part of common (::commonheads)
1886 common = set(outgoing.common)
1890 common = set(outgoing.common)
1887 cheads = [node for node in revs if node in common]
1891 cheads = [node for node in revs if node in common]
1888 # and
1892 # and
1889 # * commonheads parents on missing
1893 # * commonheads parents on missing
1890 revset = unfi.set('%ln and parents(roots(%ln))',
1894 revset = unfi.set('%ln and parents(roots(%ln))',
1891 outgoing.commonheads,
1895 outgoing.commonheads,
1892 outgoing.missing)
1896 outgoing.missing)
1893 cheads.extend(c.node() for c in revset)
1897 cheads.extend(c.node() for c in revset)
1894 # even when we don't push, exchanging phase data is useful
1898 # even when we don't push, exchanging phase data is useful
1895 remotephases = remote.listkeys('phases')
1899 remotephases = remote.listkeys('phases')
1896 if (self.ui.configbool('ui', '_usedassubrepo', False)
1900 if (self.ui.configbool('ui', '_usedassubrepo', False)
1897 and remotephases # server supports phases
1901 and remotephases # server supports phases
1898 and ret is None # nothing was pushed
1902 and ret is None # nothing was pushed
1899 and remotephases.get('publishing', False)):
1903 and remotephases.get('publishing', False)):
1900 # When:
1904 # When:
1901 # - this is a subrepo push
1905 # - this is a subrepo push
1902 # - and remote support phase
1906 # - and remote support phase
1903 # - and no changeset was pushed
1907 # - and no changeset was pushed
1904 # - and remote is publishing
1908 # - and remote is publishing
1905 # We may be in issue 3871 case!
1909 # We may be in issue 3871 case!
1906 # We drop the possible phase synchronisation done by
1910 # We drop the possible phase synchronisation done by
1907 # courtesy to publish changesets possibly locally draft
1911 # courtesy to publish changesets possibly locally draft
1908 # on the remote.
1912 # on the remote.
1909 remotephases = {'publishing': 'True'}
1913 remotephases = {'publishing': 'True'}
1910 if not remotephases: # old server or public only repo
1914 if not remotephases: # old server or public only repo
1911 localphasemove(cheads)
1915 localphasemove(cheads)
1912 # don't push any phase data as there is nothing to push
1916 # don't push any phase data as there is nothing to push
1913 else:
1917 else:
1914 ana = phases.analyzeremotephases(self, cheads, remotephases)
1918 ana = phases.analyzeremotephases(self, cheads, remotephases)
1915 pheads, droots = ana
1919 pheads, droots = ana
1916 ### Apply remote phase on local
1920 ### Apply remote phase on local
1917 if remotephases.get('publishing', False):
1921 if remotephases.get('publishing', False):
1918 localphasemove(cheads)
1922 localphasemove(cheads)
1919 else: # publish = False
1923 else: # publish = False
1920 localphasemove(pheads)
1924 localphasemove(pheads)
1921 localphasemove(cheads, phases.draft)
1925 localphasemove(cheads, phases.draft)
1922 ### Apply local phase on remote
1926 ### Apply local phase on remote
1923
1927
1924 # Get the list of all revs draft on remote by public here.
1928 # Get the list of all revs draft on remote by public here.
1925 # XXX Beware that revset break if droots is not strictly
1929 # XXX Beware that revset break if droots is not strictly
1926 # XXX root we may want to ensure it is but it is costly
1930 # XXX root we may want to ensure it is but it is costly
1927 outdated = unfi.set('heads((%ln::%ln) and public())',
1931 outdated = unfi.set('heads((%ln::%ln) and public())',
1928 droots, cheads)
1932 droots, cheads)
1929 for newremotehead in outdated:
1933 for newremotehead in outdated:
1930 r = remote.pushkey('phases',
1934 r = remote.pushkey('phases',
1931 newremotehead.hex(),
1935 newremotehead.hex(),
1932 str(phases.draft),
1936 str(phases.draft),
1933 str(phases.public))
1937 str(phases.public))
1934 if not r:
1938 if not r:
1935 self.ui.warn(_('updating %s to public failed!\n')
1939 self.ui.warn(_('updating %s to public failed!\n')
1936 % newremotehead)
1940 % newremotehead)
1937 self.ui.debug('try to push obsolete markers to remote\n')
1941 self.ui.debug('try to push obsolete markers to remote\n')
1938 obsolete.syncpush(self, remote)
1942 obsolete.syncpush(self, remote)
1939 finally:
1943 finally:
1940 if lock is not None:
1944 if lock is not None:
1941 lock.release()
1945 lock.release()
1942 finally:
1946 finally:
1943 if locallock is not None:
1947 if locallock is not None:
1944 locallock.release()
1948 locallock.release()
1945
1949
1946 self.ui.debug("checking for updated bookmarks\n")
1950 self.ui.debug("checking for updated bookmarks\n")
1947 rb = remote.listkeys('bookmarks')
1951 rb = remote.listkeys('bookmarks')
1948 for k in rb.keys():
1952 for k in rb.keys():
1949 if k in unfi._bookmarks:
1953 if k in unfi._bookmarks:
1950 nr, nl = rb[k], hex(self._bookmarks[k])
1954 nr, nl = rb[k], hex(self._bookmarks[k])
1951 if nr in unfi:
1955 if nr in unfi:
1952 cr = unfi[nr]
1956 cr = unfi[nr]
1953 cl = unfi[nl]
1957 cl = unfi[nl]
1954 if bookmarks.validdest(unfi, cr, cl):
1958 if bookmarks.validdest(unfi, cr, cl):
1955 r = remote.pushkey('bookmarks', k, nr, nl)
1959 r = remote.pushkey('bookmarks', k, nr, nl)
1956 if r:
1960 if r:
1957 self.ui.status(_("updating bookmark %s\n") % k)
1961 self.ui.status(_("updating bookmark %s\n") % k)
1958 else:
1962 else:
1959 self.ui.warn(_('updating bookmark %s'
1963 self.ui.warn(_('updating bookmark %s'
1960 ' failed!\n') % k)
1964 ' failed!\n') % k)
1961
1965
1962 return ret
1966 return ret
1963
1967
1964 def changegroupinfo(self, nodes, source):
1968 def changegroupinfo(self, nodes, source):
1965 if self.ui.verbose or source == 'bundle':
1969 if self.ui.verbose or source == 'bundle':
1966 self.ui.status(_("%d changesets found\n") % len(nodes))
1970 self.ui.status(_("%d changesets found\n") % len(nodes))
1967 if self.ui.debugflag:
1971 if self.ui.debugflag:
1968 self.ui.debug("list of changesets:\n")
1972 self.ui.debug("list of changesets:\n")
1969 for node in nodes:
1973 for node in nodes:
1970 self.ui.debug("%s\n" % hex(node))
1974 self.ui.debug("%s\n" % hex(node))
1971
1975
1972 def changegroupsubset(self, bases, heads, source):
1976 def changegroupsubset(self, bases, heads, source):
1973 """Compute a changegroup consisting of all the nodes that are
1977 """Compute a changegroup consisting of all the nodes that are
1974 descendants of any of the bases and ancestors of any of the heads.
1978 descendants of any of the bases and ancestors of any of the heads.
1975 Return a chunkbuffer object whose read() method will return
1979 Return a chunkbuffer object whose read() method will return
1976 successive changegroup chunks.
1980 successive changegroup chunks.
1977
1981
1978 It is fairly complex as determining which filenodes and which
1982 It is fairly complex as determining which filenodes and which
1979 manifest nodes need to be included for the changeset to be complete
1983 manifest nodes need to be included for the changeset to be complete
1980 is non-trivial.
1984 is non-trivial.
1981
1985
1982 Another wrinkle is doing the reverse, figuring out which changeset in
1986 Another wrinkle is doing the reverse, figuring out which changeset in
1983 the changegroup a particular filenode or manifestnode belongs to.
1987 the changegroup a particular filenode or manifestnode belongs to.
1984 """
1988 """
1985 cl = self.changelog
1989 cl = self.changelog
1986 if not bases:
1990 if not bases:
1987 bases = [nullid]
1991 bases = [nullid]
1988 csets, bases, heads = cl.nodesbetween(bases, heads)
1992 csets, bases, heads = cl.nodesbetween(bases, heads)
1989 # We assume that all ancestors of bases are known
1993 # We assume that all ancestors of bases are known
1990 common = cl.ancestors([cl.rev(n) for n in bases])
1994 common = cl.ancestors([cl.rev(n) for n in bases])
1991 bundler = changegroup.bundle10()
1995 bundler = changegroup.bundle10()
1992 return self._changegroupsubset(common, csets, heads, bundler, source)
1996 return self._changegroupsubset(common, csets, heads, bundler, source)
1993
1997
1994 def getlocalbundle(self, source, outgoing):
1998 def getlocalbundle(self, source, outgoing, bundlecaps=None):
1995 """Like getbundle, but taking a discovery.outgoing as an argument.
1999 """Like getbundle, but taking a discovery.outgoing as an argument.
1996
2000
1997 This is only implemented for local repos and reuses potentially
2001 This is only implemented for local repos and reuses potentially
1998 precomputed sets in outgoing."""
2002 precomputed sets in outgoing."""
1999 if not outgoing.missing:
2003 if not outgoing.missing:
2000 return None
2004 return None
2001 bundler = changegroup.bundle10()
2005 bundler = changegroup.bundle10(bundlecaps)
2002 return self._changegroupsubset(outgoing.common,
2006 return self._changegroupsubset(outgoing.common,
2003 outgoing.missing,
2007 outgoing.missing,
2004 outgoing.missingheads,
2008 outgoing.missingheads,
2005 bundler,
2009 bundler,
2006 source)
2010 source)
2007
2011
2008 def getbundle(self, source, heads=None, common=None):
2012 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
2009 """Like changegroupsubset, but returns the set difference between the
2013 """Like changegroupsubset, but returns the set difference between the
2010 ancestors of heads and the ancestors common.
2014 ancestors of heads and the ancestors common.
2011
2015
2012 If heads is None, use the local heads. If common is None, use [nullid].
2016 If heads is None, use the local heads. If common is None, use [nullid].
2013
2017
2014 The nodes in common might not all be known locally due to the way the
2018 The nodes in common might not all be known locally due to the way the
2015 current discovery protocol works.
2019 current discovery protocol works.
2016 """
2020 """
2017 cl = self.changelog
2021 cl = self.changelog
2018 if common:
2022 if common:
2019 hasnode = cl.hasnode
2023 hasnode = cl.hasnode
2020 common = [n for n in common if hasnode(n)]
2024 common = [n for n in common if hasnode(n)]
2021 else:
2025 else:
2022 common = [nullid]
2026 common = [nullid]
2023 if not heads:
2027 if not heads:
2024 heads = cl.heads()
2028 heads = cl.heads()
2025 return self.getlocalbundle(source,
2029 return self.getlocalbundle(source,
2026 discovery.outgoing(cl, common, heads))
2030 discovery.outgoing(cl, common, heads),
2031 bundlecaps=bundlecaps)
2027
2032
2028 @unfilteredmethod
2033 @unfilteredmethod
2029 def _changegroupsubset(self, commonrevs, csets, heads, bundler, source):
2034 def _changegroupsubset(self, commonrevs, csets, heads, bundler, source):
2030
2035
2031 cl = self.changelog
2036 cl = self.changelog
2032 mf = self.manifest
2037 mf = self.manifest
2033 mfs = {} # needed manifests
2038 mfs = {} # needed manifests
2034 fnodes = {} # needed file nodes
2039 fnodes = {} # needed file nodes
2035 changedfiles = set()
2040 changedfiles = set()
2036 fstate = ['', {}]
2041 fstate = ['', {}]
2037 count = [0, 0]
2042 count = [0, 0]
2038
2043
2039 # can we go through the fast path ?
2044 # can we go through the fast path ?
2040 heads.sort()
2045 heads.sort()
2041 if heads == sorted(self.heads()):
2046 if heads == sorted(self.heads()):
2042 return self._changegroup(csets, bundler, source)
2047 return self._changegroup(csets, bundler, source)
2043
2048
2044 # slow path
2049 # slow path
2045 self.hook('preoutgoing', throw=True, source=source)
2050 self.hook('preoutgoing', throw=True, source=source)
2046 self.changegroupinfo(csets, source)
2051 self.changegroupinfo(csets, source)
2047
2052
2048 # filter any nodes that claim to be part of the known set
2053 # filter any nodes that claim to be part of the known set
2049 def prune(revlog, missing):
2054 def prune(revlog, missing):
2050 rr, rl = revlog.rev, revlog.linkrev
2055 rr, rl = revlog.rev, revlog.linkrev
2051 return [n for n in missing
2056 return [n for n in missing
2052 if rl(rr(n)) not in commonrevs]
2057 if rl(rr(n)) not in commonrevs]
2053
2058
2054 progress = self.ui.progress
2059 progress = self.ui.progress
2055 _bundling = _('bundling')
2060 _bundling = _('bundling')
2056 _changesets = _('changesets')
2061 _changesets = _('changesets')
2057 _manifests = _('manifests')
2062 _manifests = _('manifests')
2058 _files = _('files')
2063 _files = _('files')
2059
2064
2060 def lookup(revlog, x):
2065 def lookup(revlog, x):
2061 if revlog == cl:
2066 if revlog == cl:
2062 c = cl.read(x)
2067 c = cl.read(x)
2063 changedfiles.update(c[3])
2068 changedfiles.update(c[3])
2064 mfs.setdefault(c[0], x)
2069 mfs.setdefault(c[0], x)
2065 count[0] += 1
2070 count[0] += 1
2066 progress(_bundling, count[0],
2071 progress(_bundling, count[0],
2067 unit=_changesets, total=count[1])
2072 unit=_changesets, total=count[1])
2068 return x
2073 return x
2069 elif revlog == mf:
2074 elif revlog == mf:
2070 clnode = mfs[x]
2075 clnode = mfs[x]
2071 mdata = mf.readfast(x)
2076 mdata = mf.readfast(x)
2072 for f, n in mdata.iteritems():
2077 for f, n in mdata.iteritems():
2073 if f in changedfiles:
2078 if f in changedfiles:
2074 fnodes[f].setdefault(n, clnode)
2079 fnodes[f].setdefault(n, clnode)
2075 count[0] += 1
2080 count[0] += 1
2076 progress(_bundling, count[0],
2081 progress(_bundling, count[0],
2077 unit=_manifests, total=count[1])
2082 unit=_manifests, total=count[1])
2078 return clnode
2083 return clnode
2079 else:
2084 else:
2080 progress(_bundling, count[0], item=fstate[0],
2085 progress(_bundling, count[0], item=fstate[0],
2081 unit=_files, total=count[1])
2086 unit=_files, total=count[1])
2082 return fstate[1][x]
2087 return fstate[1][x]
2083
2088
2084 bundler.start(lookup)
2089 bundler.start(lookup)
2085 reorder = self.ui.config('bundle', 'reorder', 'auto')
2090 reorder = self.ui.config('bundle', 'reorder', 'auto')
2086 if reorder == 'auto':
2091 if reorder == 'auto':
2087 reorder = None
2092 reorder = None
2088 else:
2093 else:
2089 reorder = util.parsebool(reorder)
2094 reorder = util.parsebool(reorder)
2090
2095
2091 def gengroup():
2096 def gengroup():
2092 # Create a changenode group generator that will call our functions
2097 # Create a changenode group generator that will call our functions
2093 # back to lookup the owning changenode and collect information.
2098 # back to lookup the owning changenode and collect information.
2094 count[:] = [0, len(csets)]
2099 count[:] = [0, len(csets)]
2095 for chunk in bundler.group(csets, cl, reorder=reorder):
2100 for chunk in bundler.group(csets, cl, reorder=reorder):
2096 yield chunk
2101 yield chunk
2097 progress(_bundling, None)
2102 progress(_bundling, None)
2098
2103
2099 # Create a generator for the manifestnodes that calls our lookup
2104 # Create a generator for the manifestnodes that calls our lookup
2100 # and data collection functions back.
2105 # and data collection functions back.
2101 for f in changedfiles:
2106 for f in changedfiles:
2102 fnodes[f] = {}
2107 fnodes[f] = {}
2103 count[:] = [0, len(mfs)]
2108 count[:] = [0, len(mfs)]
2104 for chunk in bundler.group(prune(mf, mfs), mf, reorder=reorder):
2109 for chunk in bundler.group(prune(mf, mfs), mf, reorder=reorder):
2105 yield chunk
2110 yield chunk
2106 progress(_bundling, None)
2111 progress(_bundling, None)
2107
2112
2108 mfs.clear()
2113 mfs.clear()
2109
2114
2110 # Go through all our files in order sorted by name.
2115 # Go through all our files in order sorted by name.
2111 count[:] = [0, len(changedfiles)]
2116 count[:] = [0, len(changedfiles)]
2112 for fname in sorted(changedfiles):
2117 for fname in sorted(changedfiles):
2113 filerevlog = self.file(fname)
2118 filerevlog = self.file(fname)
2114 if not len(filerevlog):
2119 if not len(filerevlog):
2115 raise util.Abort(_("empty or missing revlog for %s")
2120 raise util.Abort(_("empty or missing revlog for %s")
2116 % fname)
2121 % fname)
2117 fstate[0] = fname
2122 fstate[0] = fname
2118 fstate[1] = fnodes.pop(fname, {})
2123 fstate[1] = fnodes.pop(fname, {})
2119
2124
2120 nodelist = prune(filerevlog, fstate[1])
2125 nodelist = prune(filerevlog, fstate[1])
2121 if nodelist:
2126 if nodelist:
2122 count[0] += 1
2127 count[0] += 1
2123 yield bundler.fileheader(fname)
2128 yield bundler.fileheader(fname)
2124 for chunk in bundler.group(nodelist, filerevlog, reorder):
2129 for chunk in bundler.group(nodelist, filerevlog, reorder):
2125 yield chunk
2130 yield chunk
2126
2131
2127 # Signal that no more groups are left.
2132 # Signal that no more groups are left.
2128 yield bundler.close()
2133 yield bundler.close()
2129 progress(_bundling, None)
2134 progress(_bundling, None)
2130
2135
2131 if csets:
2136 if csets:
2132 self.hook('outgoing', node=hex(csets[0]), source=source)
2137 self.hook('outgoing', node=hex(csets[0]), source=source)
2133
2138
2134 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2139 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2135
2140
2136 def changegroup(self, basenodes, source):
2141 def changegroup(self, basenodes, source):
2137 # to avoid a race we use changegroupsubset() (issue1320)
2142 # to avoid a race we use changegroupsubset() (issue1320)
2138 return self.changegroupsubset(basenodes, self.heads(), source)
2143 return self.changegroupsubset(basenodes, self.heads(), source)
2139
2144
2140 @unfilteredmethod
2145 @unfilteredmethod
2141 def _changegroup(self, nodes, bundler, source):
2146 def _changegroup(self, nodes, bundler, source):
2142 """Compute the changegroup of all nodes that we have that a recipient
2147 """Compute the changegroup of all nodes that we have that a recipient
2143 doesn't. Return a chunkbuffer object whose read() method will return
2148 doesn't. Return a chunkbuffer object whose read() method will return
2144 successive changegroup chunks.
2149 successive changegroup chunks.
2145
2150
2146 This is much easier than the previous function as we can assume that
2151 This is much easier than the previous function as we can assume that
2147 the recipient has any changenode we aren't sending them.
2152 the recipient has any changenode we aren't sending them.
2148
2153
2149 nodes is the set of nodes to send"""
2154 nodes is the set of nodes to send"""
2150
2155
2151 cl = self.changelog
2156 cl = self.changelog
2152 mf = self.manifest
2157 mf = self.manifest
2153 mfs = {}
2158 mfs = {}
2154 changedfiles = set()
2159 changedfiles = set()
2155 fstate = ['']
2160 fstate = ['']
2156 count = [0, 0]
2161 count = [0, 0]
2157
2162
2158 self.hook('preoutgoing', throw=True, source=source)
2163 self.hook('preoutgoing', throw=True, source=source)
2159 self.changegroupinfo(nodes, source)
2164 self.changegroupinfo(nodes, source)
2160
2165
2161 revset = set([cl.rev(n) for n in nodes])
2166 revset = set([cl.rev(n) for n in nodes])
2162
2167
2163 def gennodelst(log):
2168 def gennodelst(log):
2164 ln, llr = log.node, log.linkrev
2169 ln, llr = log.node, log.linkrev
2165 return [ln(r) for r in log if llr(r) in revset]
2170 return [ln(r) for r in log if llr(r) in revset]
2166
2171
2167 progress = self.ui.progress
2172 progress = self.ui.progress
2168 _bundling = _('bundling')
2173 _bundling = _('bundling')
2169 _changesets = _('changesets')
2174 _changesets = _('changesets')
2170 _manifests = _('manifests')
2175 _manifests = _('manifests')
2171 _files = _('files')
2176 _files = _('files')
2172
2177
2173 def lookup(revlog, x):
2178 def lookup(revlog, x):
2174 if revlog == cl:
2179 if revlog == cl:
2175 c = cl.read(x)
2180 c = cl.read(x)
2176 changedfiles.update(c[3])
2181 changedfiles.update(c[3])
2177 mfs.setdefault(c[0], x)
2182 mfs.setdefault(c[0], x)
2178 count[0] += 1
2183 count[0] += 1
2179 progress(_bundling, count[0],
2184 progress(_bundling, count[0],
2180 unit=_changesets, total=count[1])
2185 unit=_changesets, total=count[1])
2181 return x
2186 return x
2182 elif revlog == mf:
2187 elif revlog == mf:
2183 count[0] += 1
2188 count[0] += 1
2184 progress(_bundling, count[0],
2189 progress(_bundling, count[0],
2185 unit=_manifests, total=count[1])
2190 unit=_manifests, total=count[1])
2186 return cl.node(revlog.linkrev(revlog.rev(x)))
2191 return cl.node(revlog.linkrev(revlog.rev(x)))
2187 else:
2192 else:
2188 progress(_bundling, count[0], item=fstate[0],
2193 progress(_bundling, count[0], item=fstate[0],
2189 total=count[1], unit=_files)
2194 total=count[1], unit=_files)
2190 return cl.node(revlog.linkrev(revlog.rev(x)))
2195 return cl.node(revlog.linkrev(revlog.rev(x)))
2191
2196
2192 bundler.start(lookup)
2197 bundler.start(lookup)
2193 reorder = self.ui.config('bundle', 'reorder', 'auto')
2198 reorder = self.ui.config('bundle', 'reorder', 'auto')
2194 if reorder == 'auto':
2199 if reorder == 'auto':
2195 reorder = None
2200 reorder = None
2196 else:
2201 else:
2197 reorder = util.parsebool(reorder)
2202 reorder = util.parsebool(reorder)
2198
2203
2199 def gengroup():
2204 def gengroup():
2200 '''yield a sequence of changegroup chunks (strings)'''
2205 '''yield a sequence of changegroup chunks (strings)'''
2201 # construct a list of all changed files
2206 # construct a list of all changed files
2202
2207
2203 count[:] = [0, len(nodes)]
2208 count[:] = [0, len(nodes)]
2204 for chunk in bundler.group(nodes, cl, reorder=reorder):
2209 for chunk in bundler.group(nodes, cl, reorder=reorder):
2205 yield chunk
2210 yield chunk
2206 progress(_bundling, None)
2211 progress(_bundling, None)
2207
2212
2208 count[:] = [0, len(mfs)]
2213 count[:] = [0, len(mfs)]
2209 for chunk in bundler.group(gennodelst(mf), mf, reorder=reorder):
2214 for chunk in bundler.group(gennodelst(mf), mf, reorder=reorder):
2210 yield chunk
2215 yield chunk
2211 progress(_bundling, None)
2216 progress(_bundling, None)
2212
2217
2213 count[:] = [0, len(changedfiles)]
2218 count[:] = [0, len(changedfiles)]
2214 for fname in sorted(changedfiles):
2219 for fname in sorted(changedfiles):
2215 filerevlog = self.file(fname)
2220 filerevlog = self.file(fname)
2216 if not len(filerevlog):
2221 if not len(filerevlog):
2217 raise util.Abort(_("empty or missing revlog for %s")
2222 raise util.Abort(_("empty or missing revlog for %s")
2218 % fname)
2223 % fname)
2219 fstate[0] = fname
2224 fstate[0] = fname
2220 nodelist = gennodelst(filerevlog)
2225 nodelist = gennodelst(filerevlog)
2221 if nodelist:
2226 if nodelist:
2222 count[0] += 1
2227 count[0] += 1
2223 yield bundler.fileheader(fname)
2228 yield bundler.fileheader(fname)
2224 for chunk in bundler.group(nodelist, filerevlog, reorder):
2229 for chunk in bundler.group(nodelist, filerevlog, reorder):
2225 yield chunk
2230 yield chunk
2226 yield bundler.close()
2231 yield bundler.close()
2227 progress(_bundling, None)
2232 progress(_bundling, None)
2228
2233
2229 if nodes:
2234 if nodes:
2230 self.hook('outgoing', node=hex(nodes[0]), source=source)
2235 self.hook('outgoing', node=hex(nodes[0]), source=source)
2231
2236
2232 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2237 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2233
2238
2234 @unfilteredmethod
2239 @unfilteredmethod
2235 def addchangegroup(self, source, srctype, url, emptyok=False):
2240 def addchangegroup(self, source, srctype, url, emptyok=False):
2236 """Add the changegroup returned by source.read() to this repo.
2241 """Add the changegroup returned by source.read() to this repo.
2237 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2242 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2238 the URL of the repo where this changegroup is coming from.
2243 the URL of the repo where this changegroup is coming from.
2239
2244
2240 Return an integer summarizing the change to this repo:
2245 Return an integer summarizing the change to this repo:
2241 - nothing changed or no source: 0
2246 - nothing changed or no source: 0
2242 - more heads than before: 1+added heads (2..n)
2247 - more heads than before: 1+added heads (2..n)
2243 - fewer heads than before: -1-removed heads (-2..-n)
2248 - fewer heads than before: -1-removed heads (-2..-n)
2244 - number of heads stays the same: 1
2249 - number of heads stays the same: 1
2245 """
2250 """
2246 def csmap(x):
2251 def csmap(x):
2247 self.ui.debug("add changeset %s\n" % short(x))
2252 self.ui.debug("add changeset %s\n" % short(x))
2248 return len(cl)
2253 return len(cl)
2249
2254
2250 def revmap(x):
2255 def revmap(x):
2251 return cl.rev(x)
2256 return cl.rev(x)
2252
2257
2253 if not source:
2258 if not source:
2254 return 0
2259 return 0
2255
2260
2256 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2261 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2257
2262
2258 changesets = files = revisions = 0
2263 changesets = files = revisions = 0
2259 efiles = set()
2264 efiles = set()
2260
2265
2261 # write changelog data to temp files so concurrent readers will not see
2266 # write changelog data to temp files so concurrent readers will not see
2262 # inconsistent view
2267 # inconsistent view
2263 cl = self.changelog
2268 cl = self.changelog
2264 cl.delayupdate()
2269 cl.delayupdate()
2265 oldheads = cl.heads()
2270 oldheads = cl.heads()
2266
2271
2267 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2272 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2268 try:
2273 try:
2269 trp = weakref.proxy(tr)
2274 trp = weakref.proxy(tr)
2270 # pull off the changeset group
2275 # pull off the changeset group
2271 self.ui.status(_("adding changesets\n"))
2276 self.ui.status(_("adding changesets\n"))
2272 clstart = len(cl)
2277 clstart = len(cl)
2273 class prog(object):
2278 class prog(object):
2274 step = _('changesets')
2279 step = _('changesets')
2275 count = 1
2280 count = 1
2276 ui = self.ui
2281 ui = self.ui
2277 total = None
2282 total = None
2278 def __call__(self):
2283 def __call__(self):
2279 self.ui.progress(self.step, self.count, unit=_('chunks'),
2284 self.ui.progress(self.step, self.count, unit=_('chunks'),
2280 total=self.total)
2285 total=self.total)
2281 self.count += 1
2286 self.count += 1
2282 pr = prog()
2287 pr = prog()
2283 source.callback = pr
2288 source.callback = pr
2284
2289
2285 source.changelogheader()
2290 source.changelogheader()
2286 srccontent = cl.addgroup(source, csmap, trp)
2291 srccontent = cl.addgroup(source, csmap, trp)
2287 if not (srccontent or emptyok):
2292 if not (srccontent or emptyok):
2288 raise util.Abort(_("received changelog group is empty"))
2293 raise util.Abort(_("received changelog group is empty"))
2289 clend = len(cl)
2294 clend = len(cl)
2290 changesets = clend - clstart
2295 changesets = clend - clstart
2291 for c in xrange(clstart, clend):
2296 for c in xrange(clstart, clend):
2292 efiles.update(self[c].files())
2297 efiles.update(self[c].files())
2293 efiles = len(efiles)
2298 efiles = len(efiles)
2294 self.ui.progress(_('changesets'), None)
2299 self.ui.progress(_('changesets'), None)
2295
2300
2296 # pull off the manifest group
2301 # pull off the manifest group
2297 self.ui.status(_("adding manifests\n"))
2302 self.ui.status(_("adding manifests\n"))
2298 pr.step = _('manifests')
2303 pr.step = _('manifests')
2299 pr.count = 1
2304 pr.count = 1
2300 pr.total = changesets # manifests <= changesets
2305 pr.total = changesets # manifests <= changesets
2301 # no need to check for empty manifest group here:
2306 # no need to check for empty manifest group here:
2302 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2307 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2303 # no new manifest will be created and the manifest group will
2308 # no new manifest will be created and the manifest group will
2304 # be empty during the pull
2309 # be empty during the pull
2305 source.manifestheader()
2310 source.manifestheader()
2306 self.manifest.addgroup(source, revmap, trp)
2311 self.manifest.addgroup(source, revmap, trp)
2307 self.ui.progress(_('manifests'), None)
2312 self.ui.progress(_('manifests'), None)
2308
2313
2309 needfiles = {}
2314 needfiles = {}
2310 if self.ui.configbool('server', 'validate', default=False):
2315 if self.ui.configbool('server', 'validate', default=False):
2311 # validate incoming csets have their manifests
2316 # validate incoming csets have their manifests
2312 for cset in xrange(clstart, clend):
2317 for cset in xrange(clstart, clend):
2313 mfest = self.changelog.read(self.changelog.node(cset))[0]
2318 mfest = self.changelog.read(self.changelog.node(cset))[0]
2314 mfest = self.manifest.readdelta(mfest)
2319 mfest = self.manifest.readdelta(mfest)
2315 # store file nodes we must see
2320 # store file nodes we must see
2316 for f, n in mfest.iteritems():
2321 for f, n in mfest.iteritems():
2317 needfiles.setdefault(f, set()).add(n)
2322 needfiles.setdefault(f, set()).add(n)
2318
2323
2319 # process the files
2324 # process the files
2320 self.ui.status(_("adding file changes\n"))
2325 self.ui.status(_("adding file changes\n"))
2321 pr.step = _('files')
2326 pr.step = _('files')
2322 pr.count = 1
2327 pr.count = 1
2323 pr.total = efiles
2328 pr.total = efiles
2324 source.callback = None
2329 source.callback = None
2325
2330
2326 while True:
2331 while True:
2327 chunkdata = source.filelogheader()
2332 chunkdata = source.filelogheader()
2328 if not chunkdata:
2333 if not chunkdata:
2329 break
2334 break
2330 f = chunkdata["filename"]
2335 f = chunkdata["filename"]
2331 self.ui.debug("adding %s revisions\n" % f)
2336 self.ui.debug("adding %s revisions\n" % f)
2332 pr()
2337 pr()
2333 fl = self.file(f)
2338 fl = self.file(f)
2334 o = len(fl)
2339 o = len(fl)
2335 if not fl.addgroup(source, revmap, trp):
2340 if not fl.addgroup(source, revmap, trp):
2336 raise util.Abort(_("received file revlog group is empty"))
2341 raise util.Abort(_("received file revlog group is empty"))
2337 revisions += len(fl) - o
2342 revisions += len(fl) - o
2338 files += 1
2343 files += 1
2339 if f in needfiles:
2344 if f in needfiles:
2340 needs = needfiles[f]
2345 needs = needfiles[f]
2341 for new in xrange(o, len(fl)):
2346 for new in xrange(o, len(fl)):
2342 n = fl.node(new)
2347 n = fl.node(new)
2343 if n in needs:
2348 if n in needs:
2344 needs.remove(n)
2349 needs.remove(n)
2345 else:
2350 else:
2346 raise util.Abort(
2351 raise util.Abort(
2347 _("received spurious file revlog entry"))
2352 _("received spurious file revlog entry"))
2348 if not needs:
2353 if not needs:
2349 del needfiles[f]
2354 del needfiles[f]
2350 self.ui.progress(_('files'), None)
2355 self.ui.progress(_('files'), None)
2351
2356
2352 for f, needs in needfiles.iteritems():
2357 for f, needs in needfiles.iteritems():
2353 fl = self.file(f)
2358 fl = self.file(f)
2354 for n in needs:
2359 for n in needs:
2355 try:
2360 try:
2356 fl.rev(n)
2361 fl.rev(n)
2357 except error.LookupError:
2362 except error.LookupError:
2358 raise util.Abort(
2363 raise util.Abort(
2359 _('missing file data for %s:%s - run hg verify') %
2364 _('missing file data for %s:%s - run hg verify') %
2360 (f, hex(n)))
2365 (f, hex(n)))
2361
2366
2362 dh = 0
2367 dh = 0
2363 if oldheads:
2368 if oldheads:
2364 heads = cl.heads()
2369 heads = cl.heads()
2365 dh = len(heads) - len(oldheads)
2370 dh = len(heads) - len(oldheads)
2366 for h in heads:
2371 for h in heads:
2367 if h not in oldheads and self[h].closesbranch():
2372 if h not in oldheads and self[h].closesbranch():
2368 dh -= 1
2373 dh -= 1
2369 htext = ""
2374 htext = ""
2370 if dh:
2375 if dh:
2371 htext = _(" (%+d heads)") % dh
2376 htext = _(" (%+d heads)") % dh
2372
2377
2373 self.ui.status(_("added %d changesets"
2378 self.ui.status(_("added %d changesets"
2374 " with %d changes to %d files%s\n")
2379 " with %d changes to %d files%s\n")
2375 % (changesets, revisions, files, htext))
2380 % (changesets, revisions, files, htext))
2376 self.invalidatevolatilesets()
2381 self.invalidatevolatilesets()
2377
2382
2378 if changesets > 0:
2383 if changesets > 0:
2379 p = lambda: cl.writepending() and self.root or ""
2384 p = lambda: cl.writepending() and self.root or ""
2380 self.hook('pretxnchangegroup', throw=True,
2385 self.hook('pretxnchangegroup', throw=True,
2381 node=hex(cl.node(clstart)), source=srctype,
2386 node=hex(cl.node(clstart)), source=srctype,
2382 url=url, pending=p)
2387 url=url, pending=p)
2383
2388
2384 added = [cl.node(r) for r in xrange(clstart, clend)]
2389 added = [cl.node(r) for r in xrange(clstart, clend)]
2385 publishing = self.ui.configbool('phases', 'publish', True)
2390 publishing = self.ui.configbool('phases', 'publish', True)
2386 if srctype == 'push':
2391 if srctype == 'push':
2387 # Old server can not push the boundary themself.
2392 # Old server can not push the boundary themself.
2388 # New server won't push the boundary if changeset already
2393 # New server won't push the boundary if changeset already
2389 # existed locally as secrete
2394 # existed locally as secrete
2390 #
2395 #
2391 # We should not use added here but the list of all change in
2396 # We should not use added here but the list of all change in
2392 # the bundle
2397 # the bundle
2393 if publishing:
2398 if publishing:
2394 phases.advanceboundary(self, phases.public, srccontent)
2399 phases.advanceboundary(self, phases.public, srccontent)
2395 else:
2400 else:
2396 phases.advanceboundary(self, phases.draft, srccontent)
2401 phases.advanceboundary(self, phases.draft, srccontent)
2397 phases.retractboundary(self, phases.draft, added)
2402 phases.retractboundary(self, phases.draft, added)
2398 elif srctype != 'strip':
2403 elif srctype != 'strip':
2399 # publishing only alter behavior during push
2404 # publishing only alter behavior during push
2400 #
2405 #
2401 # strip should not touch boundary at all
2406 # strip should not touch boundary at all
2402 phases.retractboundary(self, phases.draft, added)
2407 phases.retractboundary(self, phases.draft, added)
2403
2408
2404 # make changelog see real files again
2409 # make changelog see real files again
2405 cl.finalize(trp)
2410 cl.finalize(trp)
2406
2411
2407 tr.close()
2412 tr.close()
2408
2413
2409 if changesets > 0:
2414 if changesets > 0:
2410 if srctype != 'strip':
2415 if srctype != 'strip':
2411 # During strip, branchcache is invalid but coming call to
2416 # During strip, branchcache is invalid but coming call to
2412 # `destroyed` will repair it.
2417 # `destroyed` will repair it.
2413 # In other case we can safely update cache on disk.
2418 # In other case we can safely update cache on disk.
2414 branchmap.updatecache(self.filtered('served'))
2419 branchmap.updatecache(self.filtered('served'))
2415 def runhooks():
2420 def runhooks():
2416 # forcefully update the on-disk branch cache
2421 # forcefully update the on-disk branch cache
2417 self.ui.debug("updating the branch cache\n")
2422 self.ui.debug("updating the branch cache\n")
2418 self.hook("changegroup", node=hex(cl.node(clstart)),
2423 self.hook("changegroup", node=hex(cl.node(clstart)),
2419 source=srctype, url=url)
2424 source=srctype, url=url)
2420
2425
2421 for n in added:
2426 for n in added:
2422 self.hook("incoming", node=hex(n), source=srctype,
2427 self.hook("incoming", node=hex(n), source=srctype,
2423 url=url)
2428 url=url)
2424
2429
2425 newheads = [h for h in self.heads() if h not in oldheads]
2430 newheads = [h for h in self.heads() if h not in oldheads]
2426 self.ui.log("incoming",
2431 self.ui.log("incoming",
2427 "%s incoming changes - new heads: %s\n",
2432 "%s incoming changes - new heads: %s\n",
2428 len(added),
2433 len(added),
2429 ', '.join([hex(c[:6]) for c in newheads]))
2434 ', '.join([hex(c[:6]) for c in newheads]))
2430 self._afterlock(runhooks)
2435 self._afterlock(runhooks)
2431
2436
2432 finally:
2437 finally:
2433 tr.release()
2438 tr.release()
2434 # never return 0 here:
2439 # never return 0 here:
2435 if dh < 0:
2440 if dh < 0:
2436 return dh - 1
2441 return dh - 1
2437 else:
2442 else:
2438 return dh + 1
2443 return dh + 1
2439
2444
2440 def stream_in(self, remote, requirements):
2445 def stream_in(self, remote, requirements):
2441 lock = self.lock()
2446 lock = self.lock()
2442 try:
2447 try:
2443 # Save remote branchmap. We will use it later
2448 # Save remote branchmap. We will use it later
2444 # to speed up branchcache creation
2449 # to speed up branchcache creation
2445 rbranchmap = None
2450 rbranchmap = None
2446 if remote.capable("branchmap"):
2451 if remote.capable("branchmap"):
2447 rbranchmap = remote.branchmap()
2452 rbranchmap = remote.branchmap()
2448
2453
2449 fp = remote.stream_out()
2454 fp = remote.stream_out()
2450 l = fp.readline()
2455 l = fp.readline()
2451 try:
2456 try:
2452 resp = int(l)
2457 resp = int(l)
2453 except ValueError:
2458 except ValueError:
2454 raise error.ResponseError(
2459 raise error.ResponseError(
2455 _('unexpected response from remote server:'), l)
2460 _('unexpected response from remote server:'), l)
2456 if resp == 1:
2461 if resp == 1:
2457 raise util.Abort(_('operation forbidden by server'))
2462 raise util.Abort(_('operation forbidden by server'))
2458 elif resp == 2:
2463 elif resp == 2:
2459 raise util.Abort(_('locking the remote repository failed'))
2464 raise util.Abort(_('locking the remote repository failed'))
2460 elif resp != 0:
2465 elif resp != 0:
2461 raise util.Abort(_('the server sent an unknown error code'))
2466 raise util.Abort(_('the server sent an unknown error code'))
2462 self.ui.status(_('streaming all changes\n'))
2467 self.ui.status(_('streaming all changes\n'))
2463 l = fp.readline()
2468 l = fp.readline()
2464 try:
2469 try:
2465 total_files, total_bytes = map(int, l.split(' ', 1))
2470 total_files, total_bytes = map(int, l.split(' ', 1))
2466 except (ValueError, TypeError):
2471 except (ValueError, TypeError):
2467 raise error.ResponseError(
2472 raise error.ResponseError(
2468 _('unexpected response from remote server:'), l)
2473 _('unexpected response from remote server:'), l)
2469 self.ui.status(_('%d files to transfer, %s of data\n') %
2474 self.ui.status(_('%d files to transfer, %s of data\n') %
2470 (total_files, util.bytecount(total_bytes)))
2475 (total_files, util.bytecount(total_bytes)))
2471 handled_bytes = 0
2476 handled_bytes = 0
2472 self.ui.progress(_('clone'), 0, total=total_bytes)
2477 self.ui.progress(_('clone'), 0, total=total_bytes)
2473 start = time.time()
2478 start = time.time()
2474 for i in xrange(total_files):
2479 for i in xrange(total_files):
2475 # XXX doesn't support '\n' or '\r' in filenames
2480 # XXX doesn't support '\n' or '\r' in filenames
2476 l = fp.readline()
2481 l = fp.readline()
2477 try:
2482 try:
2478 name, size = l.split('\0', 1)
2483 name, size = l.split('\0', 1)
2479 size = int(size)
2484 size = int(size)
2480 except (ValueError, TypeError):
2485 except (ValueError, TypeError):
2481 raise error.ResponseError(
2486 raise error.ResponseError(
2482 _('unexpected response from remote server:'), l)
2487 _('unexpected response from remote server:'), l)
2483 if self.ui.debugflag:
2488 if self.ui.debugflag:
2484 self.ui.debug('adding %s (%s)\n' %
2489 self.ui.debug('adding %s (%s)\n' %
2485 (name, util.bytecount(size)))
2490 (name, util.bytecount(size)))
2486 # for backwards compat, name was partially encoded
2491 # for backwards compat, name was partially encoded
2487 ofp = self.sopener(store.decodedir(name), 'w')
2492 ofp = self.sopener(store.decodedir(name), 'w')
2488 for chunk in util.filechunkiter(fp, limit=size):
2493 for chunk in util.filechunkiter(fp, limit=size):
2489 handled_bytes += len(chunk)
2494 handled_bytes += len(chunk)
2490 self.ui.progress(_('clone'), handled_bytes,
2495 self.ui.progress(_('clone'), handled_bytes,
2491 total=total_bytes)
2496 total=total_bytes)
2492 ofp.write(chunk)
2497 ofp.write(chunk)
2493 ofp.close()
2498 ofp.close()
2494 elapsed = time.time() - start
2499 elapsed = time.time() - start
2495 if elapsed <= 0:
2500 if elapsed <= 0:
2496 elapsed = 0.001
2501 elapsed = 0.001
2497 self.ui.progress(_('clone'), None)
2502 self.ui.progress(_('clone'), None)
2498 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2503 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2499 (util.bytecount(total_bytes), elapsed,
2504 (util.bytecount(total_bytes), elapsed,
2500 util.bytecount(total_bytes / elapsed)))
2505 util.bytecount(total_bytes / elapsed)))
2501
2506
2502 # new requirements = old non-format requirements +
2507 # new requirements = old non-format requirements +
2503 # new format-related
2508 # new format-related
2504 # requirements from the streamed-in repository
2509 # requirements from the streamed-in repository
2505 requirements.update(set(self.requirements) - self.supportedformats)
2510 requirements.update(set(self.requirements) - self.supportedformats)
2506 self._applyrequirements(requirements)
2511 self._applyrequirements(requirements)
2507 self._writerequirements()
2512 self._writerequirements()
2508
2513
2509 if rbranchmap:
2514 if rbranchmap:
2510 rbheads = []
2515 rbheads = []
2511 for bheads in rbranchmap.itervalues():
2516 for bheads in rbranchmap.itervalues():
2512 rbheads.extend(bheads)
2517 rbheads.extend(bheads)
2513
2518
2514 if rbheads:
2519 if rbheads:
2515 rtiprev = max((int(self.changelog.rev(node))
2520 rtiprev = max((int(self.changelog.rev(node))
2516 for node in rbheads))
2521 for node in rbheads))
2517 cache = branchmap.branchcache(rbranchmap,
2522 cache = branchmap.branchcache(rbranchmap,
2518 self[rtiprev].node(),
2523 self[rtiprev].node(),
2519 rtiprev)
2524 rtiprev)
2520 # Try to stick it as low as possible
2525 # Try to stick it as low as possible
2521 # filter above served are unlikely to be fetch from a clone
2526 # filter above served are unlikely to be fetch from a clone
2522 for candidate in ('base', 'immutable', 'served'):
2527 for candidate in ('base', 'immutable', 'served'):
2523 rview = self.filtered(candidate)
2528 rview = self.filtered(candidate)
2524 if cache.validfor(rview):
2529 if cache.validfor(rview):
2525 self._branchcaches[candidate] = cache
2530 self._branchcaches[candidate] = cache
2526 cache.write(rview)
2531 cache.write(rview)
2527 break
2532 break
2528 self.invalidate()
2533 self.invalidate()
2529 return len(self.heads()) + 1
2534 return len(self.heads()) + 1
2530 finally:
2535 finally:
2531 lock.release()
2536 lock.release()
2532
2537
2533 def clone(self, remote, heads=[], stream=False):
2538 def clone(self, remote, heads=[], stream=False):
2534 '''clone remote repository.
2539 '''clone remote repository.
2535
2540
2536 keyword arguments:
2541 keyword arguments:
2537 heads: list of revs to clone (forces use of pull)
2542 heads: list of revs to clone (forces use of pull)
2538 stream: use streaming clone if possible'''
2543 stream: use streaming clone if possible'''
2539
2544
2540 # now, all clients that can request uncompressed clones can
2545 # now, all clients that can request uncompressed clones can
2541 # read repo formats supported by all servers that can serve
2546 # read repo formats supported by all servers that can serve
2542 # them.
2547 # them.
2543
2548
2544 # if revlog format changes, client will have to check version
2549 # if revlog format changes, client will have to check version
2545 # and format flags on "stream" capability, and use
2550 # and format flags on "stream" capability, and use
2546 # uncompressed only if compatible.
2551 # uncompressed only if compatible.
2547
2552
2548 if not stream:
2553 if not stream:
2549 # if the server explicitly prefers to stream (for fast LANs)
2554 # if the server explicitly prefers to stream (for fast LANs)
2550 stream = remote.capable('stream-preferred')
2555 stream = remote.capable('stream-preferred')
2551
2556
2552 if stream and not heads:
2557 if stream and not heads:
2553 # 'stream' means remote revlog format is revlogv1 only
2558 # 'stream' means remote revlog format is revlogv1 only
2554 if remote.capable('stream'):
2559 if remote.capable('stream'):
2555 return self.stream_in(remote, set(('revlogv1',)))
2560 return self.stream_in(remote, set(('revlogv1',)))
2556 # otherwise, 'streamreqs' contains the remote revlog format
2561 # otherwise, 'streamreqs' contains the remote revlog format
2557 streamreqs = remote.capable('streamreqs')
2562 streamreqs = remote.capable('streamreqs')
2558 if streamreqs:
2563 if streamreqs:
2559 streamreqs = set(streamreqs.split(','))
2564 streamreqs = set(streamreqs.split(','))
2560 # if we support it, stream in and adjust our requirements
2565 # if we support it, stream in and adjust our requirements
2561 if not streamreqs - self.supportedformats:
2566 if not streamreqs - self.supportedformats:
2562 return self.stream_in(remote, streamreqs)
2567 return self.stream_in(remote, streamreqs)
2563 return self.pull(remote, heads)
2568 return self.pull(remote, heads)
2564
2569
2565 def pushkey(self, namespace, key, old, new):
2570 def pushkey(self, namespace, key, old, new):
2566 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2571 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2567 old=old, new=new)
2572 old=old, new=new)
2568 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2573 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2569 ret = pushkey.push(self, namespace, key, old, new)
2574 ret = pushkey.push(self, namespace, key, old, new)
2570 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2575 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2571 ret=ret)
2576 ret=ret)
2572 return ret
2577 return ret
2573
2578
2574 def listkeys(self, namespace):
2579 def listkeys(self, namespace):
2575 self.hook('prelistkeys', throw=True, namespace=namespace)
2580 self.hook('prelistkeys', throw=True, namespace=namespace)
2576 self.ui.debug('listing keys for "%s"\n' % namespace)
2581 self.ui.debug('listing keys for "%s"\n' % namespace)
2577 values = pushkey.list(self, namespace)
2582 values = pushkey.list(self, namespace)
2578 self.hook('listkeys', namespace=namespace, values=values)
2583 self.hook('listkeys', namespace=namespace, values=values)
2579 return values
2584 return values
2580
2585
2581 def debugwireargs(self, one, two, three=None, four=None, five=None):
2586 def debugwireargs(self, one, two, three=None, four=None, five=None):
2582 '''used to test argument passing over the wire'''
2587 '''used to test argument passing over the wire'''
2583 return "%s %s %s %s %s" % (one, two, three, four, five)
2588 return "%s %s %s %s %s" % (one, two, three, four, five)
2584
2589
2585 def savecommitmessage(self, text):
2590 def savecommitmessage(self, text):
2586 fp = self.opener('last-message.txt', 'wb')
2591 fp = self.opener('last-message.txt', 'wb')
2587 try:
2592 try:
2588 fp.write(text)
2593 fp.write(text)
2589 finally:
2594 finally:
2590 fp.close()
2595 fp.close()
2591 return self.pathto(fp.name[len(self.root) + 1:])
2596 return self.pathto(fp.name[len(self.root) + 1:])
2592
2597
2593 # used to avoid circular references so destructors work
2598 # used to avoid circular references so destructors work
2594 def aftertrans(files):
2599 def aftertrans(files):
2595 renamefiles = [tuple(t) for t in files]
2600 renamefiles = [tuple(t) for t in files]
2596 def a():
2601 def a():
2597 for vfs, src, dest in renamefiles:
2602 for vfs, src, dest in renamefiles:
2598 try:
2603 try:
2599 vfs.rename(src, dest)
2604 vfs.rename(src, dest)
2600 except OSError: # journal file does not yet exist
2605 except OSError: # journal file does not yet exist
2601 pass
2606 pass
2602 return a
2607 return a
2603
2608
2604 def undoname(fn):
2609 def undoname(fn):
2605 base, name = os.path.split(fn)
2610 base, name = os.path.split(fn)
2606 assert name.startswith('journal')
2611 assert name.startswith('journal')
2607 return os.path.join(base, name.replace('journal', 'undo', 1))
2612 return os.path.join(base, name.replace('journal', 'undo', 1))
2608
2613
2609 def instance(ui, path, create):
2614 def instance(ui, path, create):
2610 return localrepository(ui, util.urllocalpath(path), create)
2615 return localrepository(ui, util.urllocalpath(path), create)
2611
2616
2612 def islocal(path):
2617 def islocal(path):
2613 return True
2618 return True
@@ -1,660 +1,665 b''
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import peer, error, encoding, util, store
12 import peer, error, encoding, util, store
13
13
14 # abstract batching support
14 # abstract batching support
15
15
16 class future(object):
16 class future(object):
17 '''placeholder for a value to be set later'''
17 '''placeholder for a value to be set later'''
18 def set(self, value):
18 def set(self, value):
19 if util.safehasattr(self, 'value'):
19 if util.safehasattr(self, 'value'):
20 raise error.RepoError("future is already set")
20 raise error.RepoError("future is already set")
21 self.value = value
21 self.value = value
22
22
23 class batcher(object):
23 class batcher(object):
24 '''base class for batches of commands submittable in a single request
24 '''base class for batches of commands submittable in a single request
25
25
26 All methods invoked on instances of this class are simply queued and
26 All methods invoked on instances of this class are simply queued and
27 return a a future for the result. Once you call submit(), all the queued
27 return a a future for the result. Once you call submit(), all the queued
28 calls are performed and the results set in their respective futures.
28 calls are performed and the results set in their respective futures.
29 '''
29 '''
30 def __init__(self):
30 def __init__(self):
31 self.calls = []
31 self.calls = []
32 def __getattr__(self, name):
32 def __getattr__(self, name):
33 def call(*args, **opts):
33 def call(*args, **opts):
34 resref = future()
34 resref = future()
35 self.calls.append((name, args, opts, resref,))
35 self.calls.append((name, args, opts, resref,))
36 return resref
36 return resref
37 return call
37 return call
38 def submit(self):
38 def submit(self):
39 pass
39 pass
40
40
41 class localbatch(batcher):
41 class localbatch(batcher):
42 '''performs the queued calls directly'''
42 '''performs the queued calls directly'''
43 def __init__(self, local):
43 def __init__(self, local):
44 batcher.__init__(self)
44 batcher.__init__(self)
45 self.local = local
45 self.local = local
46 def submit(self):
46 def submit(self):
47 for name, args, opts, resref in self.calls:
47 for name, args, opts, resref in self.calls:
48 resref.set(getattr(self.local, name)(*args, **opts))
48 resref.set(getattr(self.local, name)(*args, **opts))
49
49
50 class remotebatch(batcher):
50 class remotebatch(batcher):
51 '''batches the queued calls; uses as few roundtrips as possible'''
51 '''batches the queued calls; uses as few roundtrips as possible'''
52 def __init__(self, remote):
52 def __init__(self, remote):
53 '''remote must support _submitbatch(encbatch) and
53 '''remote must support _submitbatch(encbatch) and
54 _submitone(op, encargs)'''
54 _submitone(op, encargs)'''
55 batcher.__init__(self)
55 batcher.__init__(self)
56 self.remote = remote
56 self.remote = remote
57 def submit(self):
57 def submit(self):
58 req, rsp = [], []
58 req, rsp = [], []
59 for name, args, opts, resref in self.calls:
59 for name, args, opts, resref in self.calls:
60 mtd = getattr(self.remote, name)
60 mtd = getattr(self.remote, name)
61 batchablefn = getattr(mtd, 'batchable', None)
61 batchablefn = getattr(mtd, 'batchable', None)
62 if batchablefn is not None:
62 if batchablefn is not None:
63 batchable = batchablefn(mtd.im_self, *args, **opts)
63 batchable = batchablefn(mtd.im_self, *args, **opts)
64 encargsorres, encresref = batchable.next()
64 encargsorres, encresref = batchable.next()
65 if encresref:
65 if encresref:
66 req.append((name, encargsorres,))
66 req.append((name, encargsorres,))
67 rsp.append((batchable, encresref, resref,))
67 rsp.append((batchable, encresref, resref,))
68 else:
68 else:
69 resref.set(encargsorres)
69 resref.set(encargsorres)
70 else:
70 else:
71 if req:
71 if req:
72 self._submitreq(req, rsp)
72 self._submitreq(req, rsp)
73 req, rsp = [], []
73 req, rsp = [], []
74 resref.set(mtd(*args, **opts))
74 resref.set(mtd(*args, **opts))
75 if req:
75 if req:
76 self._submitreq(req, rsp)
76 self._submitreq(req, rsp)
77 def _submitreq(self, req, rsp):
77 def _submitreq(self, req, rsp):
78 encresults = self.remote._submitbatch(req)
78 encresults = self.remote._submitbatch(req)
79 for encres, r in zip(encresults, rsp):
79 for encres, r in zip(encresults, rsp):
80 batchable, encresref, resref = r
80 batchable, encresref, resref = r
81 encresref.set(encres)
81 encresref.set(encres)
82 resref.set(batchable.next())
82 resref.set(batchable.next())
83
83
84 def batchable(f):
84 def batchable(f):
85 '''annotation for batchable methods
85 '''annotation for batchable methods
86
86
87 Such methods must implement a coroutine as follows:
87 Such methods must implement a coroutine as follows:
88
88
89 @batchable
89 @batchable
90 def sample(self, one, two=None):
90 def sample(self, one, two=None):
91 # Handle locally computable results first:
91 # Handle locally computable results first:
92 if not one:
92 if not one:
93 yield "a local result", None
93 yield "a local result", None
94 # Build list of encoded arguments suitable for your wire protocol:
94 # Build list of encoded arguments suitable for your wire protocol:
95 encargs = [('one', encode(one),), ('two', encode(two),)]
95 encargs = [('one', encode(one),), ('two', encode(two),)]
96 # Create future for injection of encoded result:
96 # Create future for injection of encoded result:
97 encresref = future()
97 encresref = future()
98 # Return encoded arguments and future:
98 # Return encoded arguments and future:
99 yield encargs, encresref
99 yield encargs, encresref
100 # Assuming the future to be filled with the result from the batched
100 # Assuming the future to be filled with the result from the batched
101 # request now. Decode it:
101 # request now. Decode it:
102 yield decode(encresref.value)
102 yield decode(encresref.value)
103
103
104 The decorator returns a function which wraps this coroutine as a plain
104 The decorator returns a function which wraps this coroutine as a plain
105 method, but adds the original method as an attribute called "batchable",
105 method, but adds the original method as an attribute called "batchable",
106 which is used by remotebatch to split the call into separate encoding and
106 which is used by remotebatch to split the call into separate encoding and
107 decoding phases.
107 decoding phases.
108 '''
108 '''
109 def plain(*args, **opts):
109 def plain(*args, **opts):
110 batchable = f(*args, **opts)
110 batchable = f(*args, **opts)
111 encargsorres, encresref = batchable.next()
111 encargsorres, encresref = batchable.next()
112 if not encresref:
112 if not encresref:
113 return encargsorres # a local result in this case
113 return encargsorres # a local result in this case
114 self = args[0]
114 self = args[0]
115 encresref.set(self._submitone(f.func_name, encargsorres))
115 encresref.set(self._submitone(f.func_name, encargsorres))
116 return batchable.next()
116 return batchable.next()
117 setattr(plain, 'batchable', f)
117 setattr(plain, 'batchable', f)
118 return plain
118 return plain
119
119
120 # list of nodes encoding / decoding
120 # list of nodes encoding / decoding
121
121
122 def decodelist(l, sep=' '):
122 def decodelist(l, sep=' '):
123 if l:
123 if l:
124 return map(bin, l.split(sep))
124 return map(bin, l.split(sep))
125 return []
125 return []
126
126
127 def encodelist(l, sep=' '):
127 def encodelist(l, sep=' '):
128 return sep.join(map(hex, l))
128 return sep.join(map(hex, l))
129
129
130 # batched call argument encoding
130 # batched call argument encoding
131
131
132 def escapearg(plain):
132 def escapearg(plain):
133 return (plain
133 return (plain
134 .replace(':', '::')
134 .replace(':', '::')
135 .replace(',', ':,')
135 .replace(',', ':,')
136 .replace(';', ':;')
136 .replace(';', ':;')
137 .replace('=', ':='))
137 .replace('=', ':='))
138
138
139 def unescapearg(escaped):
139 def unescapearg(escaped):
140 return (escaped
140 return (escaped
141 .replace(':=', '=')
141 .replace(':=', '=')
142 .replace(':;', ';')
142 .replace(':;', ';')
143 .replace(':,', ',')
143 .replace(':,', ',')
144 .replace('::', ':'))
144 .replace('::', ':'))
145
145
146 # client side
146 # client side
147
147
148 def todict(**args):
148 def todict(**args):
149 return args
149 return args
150
150
151 class wirepeer(peer.peerrepository):
151 class wirepeer(peer.peerrepository):
152
152
153 def batch(self):
153 def batch(self):
154 return remotebatch(self)
154 return remotebatch(self)
155 def _submitbatch(self, req):
155 def _submitbatch(self, req):
156 cmds = []
156 cmds = []
157 for op, argsdict in req:
157 for op, argsdict in req:
158 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
158 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
159 cmds.append('%s %s' % (op, args))
159 cmds.append('%s %s' % (op, args))
160 rsp = self._call("batch", cmds=';'.join(cmds))
160 rsp = self._call("batch", cmds=';'.join(cmds))
161 return rsp.split(';')
161 return rsp.split(';')
162 def _submitone(self, op, args):
162 def _submitone(self, op, args):
163 return self._call(op, **args)
163 return self._call(op, **args)
164
164
165 @batchable
165 @batchable
166 def lookup(self, key):
166 def lookup(self, key):
167 self.requirecap('lookup', _('look up remote revision'))
167 self.requirecap('lookup', _('look up remote revision'))
168 f = future()
168 f = future()
169 yield todict(key=encoding.fromlocal(key)), f
169 yield todict(key=encoding.fromlocal(key)), f
170 d = f.value
170 d = f.value
171 success, data = d[:-1].split(" ", 1)
171 success, data = d[:-1].split(" ", 1)
172 if int(success):
172 if int(success):
173 yield bin(data)
173 yield bin(data)
174 self._abort(error.RepoError(data))
174 self._abort(error.RepoError(data))
175
175
176 @batchable
176 @batchable
177 def heads(self):
177 def heads(self):
178 f = future()
178 f = future()
179 yield {}, f
179 yield {}, f
180 d = f.value
180 d = f.value
181 try:
181 try:
182 yield decodelist(d[:-1])
182 yield decodelist(d[:-1])
183 except ValueError:
183 except ValueError:
184 self._abort(error.ResponseError(_("unexpected response:"), d))
184 self._abort(error.ResponseError(_("unexpected response:"), d))
185
185
186 @batchable
186 @batchable
187 def known(self, nodes):
187 def known(self, nodes):
188 f = future()
188 f = future()
189 yield todict(nodes=encodelist(nodes)), f
189 yield todict(nodes=encodelist(nodes)), f
190 d = f.value
190 d = f.value
191 try:
191 try:
192 yield [bool(int(f)) for f in d]
192 yield [bool(int(f)) for f in d]
193 except ValueError:
193 except ValueError:
194 self._abort(error.ResponseError(_("unexpected response:"), d))
194 self._abort(error.ResponseError(_("unexpected response:"), d))
195
195
196 @batchable
196 @batchable
197 def branchmap(self):
197 def branchmap(self):
198 f = future()
198 f = future()
199 yield {}, f
199 yield {}, f
200 d = f.value
200 d = f.value
201 try:
201 try:
202 branchmap = {}
202 branchmap = {}
203 for branchpart in d.splitlines():
203 for branchpart in d.splitlines():
204 branchname, branchheads = branchpart.split(' ', 1)
204 branchname, branchheads = branchpart.split(' ', 1)
205 branchname = encoding.tolocal(urllib.unquote(branchname))
205 branchname = encoding.tolocal(urllib.unquote(branchname))
206 branchheads = decodelist(branchheads)
206 branchheads = decodelist(branchheads)
207 branchmap[branchname] = branchheads
207 branchmap[branchname] = branchheads
208 yield branchmap
208 yield branchmap
209 except TypeError:
209 except TypeError:
210 self._abort(error.ResponseError(_("unexpected response:"), d))
210 self._abort(error.ResponseError(_("unexpected response:"), d))
211
211
212 def branches(self, nodes):
212 def branches(self, nodes):
213 n = encodelist(nodes)
213 n = encodelist(nodes)
214 d = self._call("branches", nodes=n)
214 d = self._call("branches", nodes=n)
215 try:
215 try:
216 br = [tuple(decodelist(b)) for b in d.splitlines()]
216 br = [tuple(decodelist(b)) for b in d.splitlines()]
217 return br
217 return br
218 except ValueError:
218 except ValueError:
219 self._abort(error.ResponseError(_("unexpected response:"), d))
219 self._abort(error.ResponseError(_("unexpected response:"), d))
220
220
221 def between(self, pairs):
221 def between(self, pairs):
222 batch = 8 # avoid giant requests
222 batch = 8 # avoid giant requests
223 r = []
223 r = []
224 for i in xrange(0, len(pairs), batch):
224 for i in xrange(0, len(pairs), batch):
225 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
225 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
226 d = self._call("between", pairs=n)
226 d = self._call("between", pairs=n)
227 try:
227 try:
228 r.extend(l and decodelist(l) or [] for l in d.splitlines())
228 r.extend(l and decodelist(l) or [] for l in d.splitlines())
229 except ValueError:
229 except ValueError:
230 self._abort(error.ResponseError(_("unexpected response:"), d))
230 self._abort(error.ResponseError(_("unexpected response:"), d))
231 return r
231 return r
232
232
233 @batchable
233 @batchable
234 def pushkey(self, namespace, key, old, new):
234 def pushkey(self, namespace, key, old, new):
235 if not self.capable('pushkey'):
235 if not self.capable('pushkey'):
236 yield False, None
236 yield False, None
237 f = future()
237 f = future()
238 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
238 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
239 yield todict(namespace=encoding.fromlocal(namespace),
239 yield todict(namespace=encoding.fromlocal(namespace),
240 key=encoding.fromlocal(key),
240 key=encoding.fromlocal(key),
241 old=encoding.fromlocal(old),
241 old=encoding.fromlocal(old),
242 new=encoding.fromlocal(new)), f
242 new=encoding.fromlocal(new)), f
243 d = f.value
243 d = f.value
244 d, output = d.split('\n', 1)
244 d, output = d.split('\n', 1)
245 try:
245 try:
246 d = bool(int(d))
246 d = bool(int(d))
247 except ValueError:
247 except ValueError:
248 raise error.ResponseError(
248 raise error.ResponseError(
249 _('push failed (unexpected response):'), d)
249 _('push failed (unexpected response):'), d)
250 for l in output.splitlines(True):
250 for l in output.splitlines(True):
251 self.ui.status(_('remote: '), l)
251 self.ui.status(_('remote: '), l)
252 yield d
252 yield d
253
253
254 @batchable
254 @batchable
255 def listkeys(self, namespace):
255 def listkeys(self, namespace):
256 if not self.capable('pushkey'):
256 if not self.capable('pushkey'):
257 yield {}, None
257 yield {}, None
258 f = future()
258 f = future()
259 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
259 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
260 yield todict(namespace=encoding.fromlocal(namespace)), f
260 yield todict(namespace=encoding.fromlocal(namespace)), f
261 d = f.value
261 d = f.value
262 r = {}
262 r = {}
263 for l in d.splitlines():
263 for l in d.splitlines():
264 k, v = l.split('\t')
264 k, v = l.split('\t')
265 r[encoding.tolocal(k)] = encoding.tolocal(v)
265 r[encoding.tolocal(k)] = encoding.tolocal(v)
266 yield r
266 yield r
267
267
268 def stream_out(self):
268 def stream_out(self):
269 return self._callstream('stream_out')
269 return self._callstream('stream_out')
270
270
271 def changegroup(self, nodes, kind):
271 def changegroup(self, nodes, kind):
272 n = encodelist(nodes)
272 n = encodelist(nodes)
273 f = self._callstream("changegroup", roots=n)
273 f = self._callstream("changegroup", roots=n)
274 return changegroupmod.unbundle10(self._decompress(f), 'UN')
274 return changegroupmod.unbundle10(self._decompress(f), 'UN')
275
275
276 def changegroupsubset(self, bases, heads, kind):
276 def changegroupsubset(self, bases, heads, kind):
277 self.requirecap('changegroupsubset', _('look up remote changes'))
277 self.requirecap('changegroupsubset', _('look up remote changes'))
278 bases = encodelist(bases)
278 bases = encodelist(bases)
279 heads = encodelist(heads)
279 heads = encodelist(heads)
280 f = self._callstream("changegroupsubset",
280 f = self._callstream("changegroupsubset",
281 bases=bases, heads=heads)
281 bases=bases, heads=heads)
282 return changegroupmod.unbundle10(self._decompress(f), 'UN')
282 return changegroupmod.unbundle10(self._decompress(f), 'UN')
283
283
284 def getbundle(self, source, heads=None, common=None):
284 def getbundle(self, source, heads=None, common=None, bundlecaps=None):
285 self.requirecap('getbundle', _('look up remote changes'))
285 self.requirecap('getbundle', _('look up remote changes'))
286 opts = {}
286 opts = {}
287 if heads is not None:
287 if heads is not None:
288 opts['heads'] = encodelist(heads)
288 opts['heads'] = encodelist(heads)
289 if common is not None:
289 if common is not None:
290 opts['common'] = encodelist(common)
290 opts['common'] = encodelist(common)
291 if bundlecaps is not None:
292 opts['bundlecaps'] = ','.join(bundlecaps)
291 f = self._callstream("getbundle", **opts)
293 f = self._callstream("getbundle", **opts)
292 return changegroupmod.unbundle10(self._decompress(f), 'UN')
294 return changegroupmod.unbundle10(self._decompress(f), 'UN')
293
295
294 def unbundle(self, cg, heads, source):
296 def unbundle(self, cg, heads, source):
295 '''Send cg (a readable file-like object representing the
297 '''Send cg (a readable file-like object representing the
296 changegroup to push, typically a chunkbuffer object) to the
298 changegroup to push, typically a chunkbuffer object) to the
297 remote server as a bundle. Return an integer indicating the
299 remote server as a bundle. Return an integer indicating the
298 result of the push (see localrepository.addchangegroup()).'''
300 result of the push (see localrepository.addchangegroup()).'''
299
301
300 if heads != ['force'] and self.capable('unbundlehash'):
302 if heads != ['force'] and self.capable('unbundlehash'):
301 heads = encodelist(['hashed',
303 heads = encodelist(['hashed',
302 util.sha1(''.join(sorted(heads))).digest()])
304 util.sha1(''.join(sorted(heads))).digest()])
303 else:
305 else:
304 heads = encodelist(heads)
306 heads = encodelist(heads)
305
307
306 ret, output = self._callpush("unbundle", cg, heads=heads)
308 ret, output = self._callpush("unbundle", cg, heads=heads)
307 if ret == "":
309 if ret == "":
308 raise error.ResponseError(
310 raise error.ResponseError(
309 _('push failed:'), output)
311 _('push failed:'), output)
310 try:
312 try:
311 ret = int(ret)
313 ret = int(ret)
312 except ValueError:
314 except ValueError:
313 raise error.ResponseError(
315 raise error.ResponseError(
314 _('push failed (unexpected response):'), ret)
316 _('push failed (unexpected response):'), ret)
315
317
316 for l in output.splitlines(True):
318 for l in output.splitlines(True):
317 self.ui.status(_('remote: '), l)
319 self.ui.status(_('remote: '), l)
318 return ret
320 return ret
319
321
320 def debugwireargs(self, one, two, three=None, four=None, five=None):
322 def debugwireargs(self, one, two, three=None, four=None, five=None):
321 # don't pass optional arguments left at their default value
323 # don't pass optional arguments left at their default value
322 opts = {}
324 opts = {}
323 if three is not None:
325 if three is not None:
324 opts['three'] = three
326 opts['three'] = three
325 if four is not None:
327 if four is not None:
326 opts['four'] = four
328 opts['four'] = four
327 return self._call('debugwireargs', one=one, two=two, **opts)
329 return self._call('debugwireargs', one=one, two=two, **opts)
328
330
329 # server side
331 # server side
330
332
331 class streamres(object):
333 class streamres(object):
332 def __init__(self, gen):
334 def __init__(self, gen):
333 self.gen = gen
335 self.gen = gen
334
336
335 class pushres(object):
337 class pushres(object):
336 def __init__(self, res):
338 def __init__(self, res):
337 self.res = res
339 self.res = res
338
340
339 class pusherr(object):
341 class pusherr(object):
340 def __init__(self, res):
342 def __init__(self, res):
341 self.res = res
343 self.res = res
342
344
343 class ooberror(object):
345 class ooberror(object):
344 def __init__(self, message):
346 def __init__(self, message):
345 self.message = message
347 self.message = message
346
348
347 def dispatch(repo, proto, command):
349 def dispatch(repo, proto, command):
348 repo = repo.filtered("served")
350 repo = repo.filtered("served")
349 func, spec = commands[command]
351 func, spec = commands[command]
350 args = proto.getargs(spec)
352 args = proto.getargs(spec)
351 return func(repo, proto, *args)
353 return func(repo, proto, *args)
352
354
353 def options(cmd, keys, others):
355 def options(cmd, keys, others):
354 opts = {}
356 opts = {}
355 for k in keys:
357 for k in keys:
356 if k in others:
358 if k in others:
357 opts[k] = others[k]
359 opts[k] = others[k]
358 del others[k]
360 del others[k]
359 if others:
361 if others:
360 sys.stderr.write("abort: %s got unexpected arguments %s\n"
362 sys.stderr.write("abort: %s got unexpected arguments %s\n"
361 % (cmd, ",".join(others)))
363 % (cmd, ",".join(others)))
362 return opts
364 return opts
363
365
364 def batch(repo, proto, cmds, others):
366 def batch(repo, proto, cmds, others):
365 repo = repo.filtered("served")
367 repo = repo.filtered("served")
366 res = []
368 res = []
367 for pair in cmds.split(';'):
369 for pair in cmds.split(';'):
368 op, args = pair.split(' ', 1)
370 op, args = pair.split(' ', 1)
369 vals = {}
371 vals = {}
370 for a in args.split(','):
372 for a in args.split(','):
371 if a:
373 if a:
372 n, v = a.split('=')
374 n, v = a.split('=')
373 vals[n] = unescapearg(v)
375 vals[n] = unescapearg(v)
374 func, spec = commands[op]
376 func, spec = commands[op]
375 if spec:
377 if spec:
376 keys = spec.split()
378 keys = spec.split()
377 data = {}
379 data = {}
378 for k in keys:
380 for k in keys:
379 if k == '*':
381 if k == '*':
380 star = {}
382 star = {}
381 for key in vals.keys():
383 for key in vals.keys():
382 if key not in keys:
384 if key not in keys:
383 star[key] = vals[key]
385 star[key] = vals[key]
384 data['*'] = star
386 data['*'] = star
385 else:
387 else:
386 data[k] = vals[k]
388 data[k] = vals[k]
387 result = func(repo, proto, *[data[k] for k in keys])
389 result = func(repo, proto, *[data[k] for k in keys])
388 else:
390 else:
389 result = func(repo, proto)
391 result = func(repo, proto)
390 if isinstance(result, ooberror):
392 if isinstance(result, ooberror):
391 return result
393 return result
392 res.append(escapearg(result))
394 res.append(escapearg(result))
393 return ';'.join(res)
395 return ';'.join(res)
394
396
395 def between(repo, proto, pairs):
397 def between(repo, proto, pairs):
396 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
398 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
397 r = []
399 r = []
398 for b in repo.between(pairs):
400 for b in repo.between(pairs):
399 r.append(encodelist(b) + "\n")
401 r.append(encodelist(b) + "\n")
400 return "".join(r)
402 return "".join(r)
401
403
402 def branchmap(repo, proto):
404 def branchmap(repo, proto):
403 branchmap = repo.branchmap()
405 branchmap = repo.branchmap()
404 heads = []
406 heads = []
405 for branch, nodes in branchmap.iteritems():
407 for branch, nodes in branchmap.iteritems():
406 branchname = urllib.quote(encoding.fromlocal(branch))
408 branchname = urllib.quote(encoding.fromlocal(branch))
407 branchnodes = encodelist(nodes)
409 branchnodes = encodelist(nodes)
408 heads.append('%s %s' % (branchname, branchnodes))
410 heads.append('%s %s' % (branchname, branchnodes))
409 return '\n'.join(heads)
411 return '\n'.join(heads)
410
412
411 def branches(repo, proto, nodes):
413 def branches(repo, proto, nodes):
412 nodes = decodelist(nodes)
414 nodes = decodelist(nodes)
413 r = []
415 r = []
414 for b in repo.branches(nodes):
416 for b in repo.branches(nodes):
415 r.append(encodelist(b) + "\n")
417 r.append(encodelist(b) + "\n")
416 return "".join(r)
418 return "".join(r)
417
419
418 def capabilities(repo, proto):
420 def capabilities(repo, proto):
419 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
421 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
420 'unbundlehash batch').split()
422 'unbundlehash batch').split()
421 if _allowstream(repo.ui):
423 if _allowstream(repo.ui):
422 if repo.ui.configbool('server', 'preferuncompressed', False):
424 if repo.ui.configbool('server', 'preferuncompressed', False):
423 caps.append('stream-preferred')
425 caps.append('stream-preferred')
424 requiredformats = repo.requirements & repo.supportedformats
426 requiredformats = repo.requirements & repo.supportedformats
425 # if our local revlogs are just revlogv1, add 'stream' cap
427 # if our local revlogs are just revlogv1, add 'stream' cap
426 if not requiredformats - set(('revlogv1',)):
428 if not requiredformats - set(('revlogv1',)):
427 caps.append('stream')
429 caps.append('stream')
428 # otherwise, add 'streamreqs' detailing our local revlog format
430 # otherwise, add 'streamreqs' detailing our local revlog format
429 else:
431 else:
430 caps.append('streamreqs=%s' % ','.join(requiredformats))
432 caps.append('streamreqs=%s' % ','.join(requiredformats))
431 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
433 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
432 caps.append('httpheader=1024')
434 caps.append('httpheader=1024')
433 return ' '.join(caps)
435 return ' '.join(caps)
434
436
435 def changegroup(repo, proto, roots):
437 def changegroup(repo, proto, roots):
436 nodes = decodelist(roots)
438 nodes = decodelist(roots)
437 cg = repo.changegroup(nodes, 'serve')
439 cg = repo.changegroup(nodes, 'serve')
438 return streamres(proto.groupchunks(cg))
440 return streamres(proto.groupchunks(cg))
439
441
440 def changegroupsubset(repo, proto, bases, heads):
442 def changegroupsubset(repo, proto, bases, heads):
441 bases = decodelist(bases)
443 bases = decodelist(bases)
442 heads = decodelist(heads)
444 heads = decodelist(heads)
443 cg = repo.changegroupsubset(bases, heads, 'serve')
445 cg = repo.changegroupsubset(bases, heads, 'serve')
444 return streamres(proto.groupchunks(cg))
446 return streamres(proto.groupchunks(cg))
445
447
446 def debugwireargs(repo, proto, one, two, others):
448 def debugwireargs(repo, proto, one, two, others):
447 # only accept optional args from the known set
449 # only accept optional args from the known set
448 opts = options('debugwireargs', ['three', 'four'], others)
450 opts = options('debugwireargs', ['three', 'four'], others)
449 return repo.debugwireargs(one, two, **opts)
451 return repo.debugwireargs(one, two, **opts)
450
452
451 def getbundle(repo, proto, others):
453 def getbundle(repo, proto, others):
452 opts = options('getbundle', ['heads', 'common'], others)
454 opts = options('getbundle', ['heads', 'common', 'bundlecaps'], others)
453 for k, v in opts.iteritems():
455 for k, v in opts.iteritems():
454 opts[k] = decodelist(v)
456 if k in ('heads', 'common'):
457 opts[k] = decodelist(v)
458 elif k == 'bundlecaps':
459 opts[k] = set(v.split(','))
455 cg = repo.getbundle('serve', **opts)
460 cg = repo.getbundle('serve', **opts)
456 return streamres(proto.groupchunks(cg))
461 return streamres(proto.groupchunks(cg))
457
462
458 def heads(repo, proto):
463 def heads(repo, proto):
459 h = repo.heads()
464 h = repo.heads()
460 return encodelist(h) + "\n"
465 return encodelist(h) + "\n"
461
466
462 def hello(repo, proto):
467 def hello(repo, proto):
463 '''the hello command returns a set of lines describing various
468 '''the hello command returns a set of lines describing various
464 interesting things about the server, in an RFC822-like format.
469 interesting things about the server, in an RFC822-like format.
465 Currently the only one defined is "capabilities", which
470 Currently the only one defined is "capabilities", which
466 consists of a line in the form:
471 consists of a line in the form:
467
472
468 capabilities: space separated list of tokens
473 capabilities: space separated list of tokens
469 '''
474 '''
470 return "capabilities: %s\n" % (capabilities(repo, proto))
475 return "capabilities: %s\n" % (capabilities(repo, proto))
471
476
472 def listkeys(repo, proto, namespace):
477 def listkeys(repo, proto, namespace):
473 d = repo.listkeys(encoding.tolocal(namespace)).items()
478 d = repo.listkeys(encoding.tolocal(namespace)).items()
474 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
479 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
475 for k, v in d])
480 for k, v in d])
476 return t
481 return t
477
482
478 def lookup(repo, proto, key):
483 def lookup(repo, proto, key):
479 try:
484 try:
480 k = encoding.tolocal(key)
485 k = encoding.tolocal(key)
481 c = repo[k]
486 c = repo[k]
482 r = c.hex()
487 r = c.hex()
483 success = 1
488 success = 1
484 except Exception, inst:
489 except Exception, inst:
485 r = str(inst)
490 r = str(inst)
486 success = 0
491 success = 0
487 return "%s %s\n" % (success, r)
492 return "%s %s\n" % (success, r)
488
493
489 def known(repo, proto, nodes, others):
494 def known(repo, proto, nodes, others):
490 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
495 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
491
496
492 def pushkey(repo, proto, namespace, key, old, new):
497 def pushkey(repo, proto, namespace, key, old, new):
493 # compatibility with pre-1.8 clients which were accidentally
498 # compatibility with pre-1.8 clients which were accidentally
494 # sending raw binary nodes rather than utf-8-encoded hex
499 # sending raw binary nodes rather than utf-8-encoded hex
495 if len(new) == 20 and new.encode('string-escape') != new:
500 if len(new) == 20 and new.encode('string-escape') != new:
496 # looks like it could be a binary node
501 # looks like it could be a binary node
497 try:
502 try:
498 new.decode('utf-8')
503 new.decode('utf-8')
499 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
504 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
500 except UnicodeDecodeError:
505 except UnicodeDecodeError:
501 pass # binary, leave unmodified
506 pass # binary, leave unmodified
502 else:
507 else:
503 new = encoding.tolocal(new) # normal path
508 new = encoding.tolocal(new) # normal path
504
509
505 if util.safehasattr(proto, 'restore'):
510 if util.safehasattr(proto, 'restore'):
506
511
507 proto.redirect()
512 proto.redirect()
508
513
509 try:
514 try:
510 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
515 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
511 encoding.tolocal(old), new) or False
516 encoding.tolocal(old), new) or False
512 except util.Abort:
517 except util.Abort:
513 r = False
518 r = False
514
519
515 output = proto.restore()
520 output = proto.restore()
516
521
517 return '%s\n%s' % (int(r), output)
522 return '%s\n%s' % (int(r), output)
518
523
519 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
524 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
520 encoding.tolocal(old), new)
525 encoding.tolocal(old), new)
521 return '%s\n' % int(r)
526 return '%s\n' % int(r)
522
527
523 def _allowstream(ui):
528 def _allowstream(ui):
524 return ui.configbool('server', 'uncompressed', True, untrusted=True)
529 return ui.configbool('server', 'uncompressed', True, untrusted=True)
525
530
526 def _walkstreamfiles(repo):
531 def _walkstreamfiles(repo):
527 # this is it's own function so extensions can override it
532 # this is it's own function so extensions can override it
528 return repo.store.walk()
533 return repo.store.walk()
529
534
530 def stream(repo, proto):
535 def stream(repo, proto):
531 '''If the server supports streaming clone, it advertises the "stream"
536 '''If the server supports streaming clone, it advertises the "stream"
532 capability with a value representing the version and flags of the repo
537 capability with a value representing the version and flags of the repo
533 it is serving. Client checks to see if it understands the format.
538 it is serving. Client checks to see if it understands the format.
534
539
535 The format is simple: the server writes out a line with the amount
540 The format is simple: the server writes out a line with the amount
536 of files, then the total amount of bytes to be transferred (separated
541 of files, then the total amount of bytes to be transferred (separated
537 by a space). Then, for each file, the server first writes the filename
542 by a space). Then, for each file, the server first writes the filename
538 and filesize (separated by the null character), then the file contents.
543 and filesize (separated by the null character), then the file contents.
539 '''
544 '''
540
545
541 if not _allowstream(repo.ui):
546 if not _allowstream(repo.ui):
542 return '1\n'
547 return '1\n'
543
548
544 entries = []
549 entries = []
545 total_bytes = 0
550 total_bytes = 0
546 try:
551 try:
547 # get consistent snapshot of repo, lock during scan
552 # get consistent snapshot of repo, lock during scan
548 lock = repo.lock()
553 lock = repo.lock()
549 try:
554 try:
550 repo.ui.debug('scanning\n')
555 repo.ui.debug('scanning\n')
551 for name, ename, size in _walkstreamfiles(repo):
556 for name, ename, size in _walkstreamfiles(repo):
552 if size:
557 if size:
553 entries.append((name, size))
558 entries.append((name, size))
554 total_bytes += size
559 total_bytes += size
555 finally:
560 finally:
556 lock.release()
561 lock.release()
557 except error.LockError:
562 except error.LockError:
558 return '2\n' # error: 2
563 return '2\n' # error: 2
559
564
560 def streamer(repo, entries, total):
565 def streamer(repo, entries, total):
561 '''stream out all metadata files in repository.'''
566 '''stream out all metadata files in repository.'''
562 yield '0\n' # success
567 yield '0\n' # success
563 repo.ui.debug('%d files, %d bytes to transfer\n' %
568 repo.ui.debug('%d files, %d bytes to transfer\n' %
564 (len(entries), total_bytes))
569 (len(entries), total_bytes))
565 yield '%d %d\n' % (len(entries), total_bytes)
570 yield '%d %d\n' % (len(entries), total_bytes)
566
571
567 sopener = repo.sopener
572 sopener = repo.sopener
568 oldaudit = sopener.mustaudit
573 oldaudit = sopener.mustaudit
569 debugflag = repo.ui.debugflag
574 debugflag = repo.ui.debugflag
570 sopener.mustaudit = False
575 sopener.mustaudit = False
571
576
572 try:
577 try:
573 for name, size in entries:
578 for name, size in entries:
574 if debugflag:
579 if debugflag:
575 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
580 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
576 # partially encode name over the wire for backwards compat
581 # partially encode name over the wire for backwards compat
577 yield '%s\0%d\n' % (store.encodedir(name), size)
582 yield '%s\0%d\n' % (store.encodedir(name), size)
578 if size <= 65536:
583 if size <= 65536:
579 fp = sopener(name)
584 fp = sopener(name)
580 try:
585 try:
581 data = fp.read(size)
586 data = fp.read(size)
582 finally:
587 finally:
583 fp.close()
588 fp.close()
584 yield data
589 yield data
585 else:
590 else:
586 for chunk in util.filechunkiter(sopener(name), limit=size):
591 for chunk in util.filechunkiter(sopener(name), limit=size):
587 yield chunk
592 yield chunk
588 # replace with "finally:" when support for python 2.4 has been dropped
593 # replace with "finally:" when support for python 2.4 has been dropped
589 except Exception:
594 except Exception:
590 sopener.mustaudit = oldaudit
595 sopener.mustaudit = oldaudit
591 raise
596 raise
592 sopener.mustaudit = oldaudit
597 sopener.mustaudit = oldaudit
593
598
594 return streamres(streamer(repo, entries, total_bytes))
599 return streamres(streamer(repo, entries, total_bytes))
595
600
596 def unbundle(repo, proto, heads):
601 def unbundle(repo, proto, heads):
597 their_heads = decodelist(heads)
602 their_heads = decodelist(heads)
598
603
599 def check_heads():
604 def check_heads():
600 heads = repo.heads()
605 heads = repo.heads()
601 heads_hash = util.sha1(''.join(sorted(heads))).digest()
606 heads_hash = util.sha1(''.join(sorted(heads))).digest()
602 return (their_heads == ['force'] or their_heads == heads or
607 return (their_heads == ['force'] or their_heads == heads or
603 their_heads == ['hashed', heads_hash])
608 their_heads == ['hashed', heads_hash])
604
609
605 proto.redirect()
610 proto.redirect()
606
611
607 # fail early if possible
612 # fail early if possible
608 if not check_heads():
613 if not check_heads():
609 return pusherr('repository changed while preparing changes - '
614 return pusherr('repository changed while preparing changes - '
610 'please try again')
615 'please try again')
611
616
612 # write bundle data to temporary file because it can be big
617 # write bundle data to temporary file because it can be big
613 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
618 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
614 fp = os.fdopen(fd, 'wb+')
619 fp = os.fdopen(fd, 'wb+')
615 r = 0
620 r = 0
616 try:
621 try:
617 proto.getfile(fp)
622 proto.getfile(fp)
618 lock = repo.lock()
623 lock = repo.lock()
619 try:
624 try:
620 if not check_heads():
625 if not check_heads():
621 # someone else committed/pushed/unbundled while we
626 # someone else committed/pushed/unbundled while we
622 # were transferring data
627 # were transferring data
623 return pusherr('repository changed while uploading changes - '
628 return pusherr('repository changed while uploading changes - '
624 'please try again')
629 'please try again')
625
630
626 # push can proceed
631 # push can proceed
627 fp.seek(0)
632 fp.seek(0)
628 gen = changegroupmod.readbundle(fp, None)
633 gen = changegroupmod.readbundle(fp, None)
629
634
630 try:
635 try:
631 r = repo.addchangegroup(gen, 'serve', proto._client())
636 r = repo.addchangegroup(gen, 'serve', proto._client())
632 except util.Abort, inst:
637 except util.Abort, inst:
633 sys.stderr.write("abort: %s\n" % inst)
638 sys.stderr.write("abort: %s\n" % inst)
634 finally:
639 finally:
635 lock.release()
640 lock.release()
636 return pushres(r)
641 return pushres(r)
637
642
638 finally:
643 finally:
639 fp.close()
644 fp.close()
640 os.unlink(tempname)
645 os.unlink(tempname)
641
646
642 commands = {
647 commands = {
643 'batch': (batch, 'cmds *'),
648 'batch': (batch, 'cmds *'),
644 'between': (between, 'pairs'),
649 'between': (between, 'pairs'),
645 'branchmap': (branchmap, ''),
650 'branchmap': (branchmap, ''),
646 'branches': (branches, 'nodes'),
651 'branches': (branches, 'nodes'),
647 'capabilities': (capabilities, ''),
652 'capabilities': (capabilities, ''),
648 'changegroup': (changegroup, 'roots'),
653 'changegroup': (changegroup, 'roots'),
649 'changegroupsubset': (changegroupsubset, 'bases heads'),
654 'changegroupsubset': (changegroupsubset, 'bases heads'),
650 'debugwireargs': (debugwireargs, 'one two *'),
655 'debugwireargs': (debugwireargs, 'one two *'),
651 'getbundle': (getbundle, '*'),
656 'getbundle': (getbundle, '*'),
652 'heads': (heads, ''),
657 'heads': (heads, ''),
653 'hello': (hello, ''),
658 'hello': (hello, ''),
654 'known': (known, 'nodes *'),
659 'known': (known, 'nodes *'),
655 'listkeys': (listkeys, 'namespace'),
660 'listkeys': (listkeys, 'namespace'),
656 'lookup': (lookup, 'key'),
661 'lookup': (lookup, 'key'),
657 'pushkey': (pushkey, 'namespace key old new'),
662 'pushkey': (pushkey, 'namespace key old new'),
658 'stream_out': (stream, ''),
663 'stream_out': (stream, ''),
659 'unbundle': (unbundle, 'heads'),
664 'unbundle': (unbundle, 'heads'),
660 }
665 }
General Comments 0
You need to be logged in to leave comments. Login now