##// END OF EJS Templates
backout dbdb777502dc (issue3077) (issue3071)...
Matt Mackall -
r15381:c519cd8f stable
parent child Browse files
Show More
@@ -1,291 +1,291 b''
1 """reorder a revlog (the manifest by default) to save space
1 """reorder a revlog (the manifest by default) to save space
2
2
3 Specifically, this topologically sorts the revisions in the revlog so that
3 Specifically, this topologically sorts the revisions in the revlog so that
4 revisions on the same branch are adjacent as much as possible. This is a
4 revisions on the same branch are adjacent as much as possible. This is a
5 workaround for the fact that Mercurial computes deltas relative to the
5 workaround for the fact that Mercurial computes deltas relative to the
6 previous revision rather than relative to a parent revision.
6 previous revision rather than relative to a parent revision.
7
7
8 This is *not* safe to run on a changelog.
8 This is *not* safe to run on a changelog.
9 """
9 """
10
10
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
13 # renamed by Greg Ward <greg at gerg.ca>.
13 # renamed by Greg Ward <greg at gerg.ca>.
14
14
15 # XXX would be nice to have a way to verify the repository after shrinking,
15 # XXX would be nice to have a way to verify the repository after shrinking,
16 # e.g. by comparing "before" and "after" states of random changesets
16 # e.g. by comparing "before" and "after" states of random changesets
17 # (maybe: export before, shrink, export after, diff).
17 # (maybe: export before, shrink, export after, diff).
18
18
19 import os, tempfile, errno
19 import os, tempfile, errno
20 from mercurial import revlog, transaction, node, util, scmutil
20 from mercurial import revlog, transaction, node, util, scmutil
21 from mercurial import changegroup
21 from mercurial import changegroup
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24
24
25 def postorder(start, edges):
25 def postorder(start, edges):
26 result = []
26 result = []
27 visit = list(start)
27 visit = list(start)
28 finished = set()
28 finished = set()
29
29
30 while visit:
30 while visit:
31 cur = visit[-1]
31 cur = visit[-1]
32 for p in edges[cur]:
32 for p in edges[cur]:
33 # defend against node.nullrev because it's occasionally
33 # defend against node.nullrev because it's occasionally
34 # possible for a node to have parents (null, something)
34 # possible for a node to have parents (null, something)
35 # rather than (something, null)
35 # rather than (something, null)
36 if p not in finished and p != node.nullrev:
36 if p not in finished and p != node.nullrev:
37 visit.append(p)
37 visit.append(p)
38 break
38 break
39 else:
39 else:
40 result.append(cur)
40 result.append(cur)
41 finished.add(cur)
41 finished.add(cur)
42 visit.pop()
42 visit.pop()
43
43
44 return result
44 return result
45
45
46 def toposort_reversepostorder(ui, rl):
46 def toposort_reversepostorder(ui, rl):
47 # postorder of the reverse directed graph
47 # postorder of the reverse directed graph
48
48
49 # map rev to list of parent revs (p2 first)
49 # map rev to list of parent revs (p2 first)
50 parents = {}
50 parents = {}
51 heads = set()
51 heads = set()
52 ui.status(_('reading revs\n'))
52 ui.status(_('reading revs\n'))
53 try:
53 try:
54 for rev in rl:
54 for rev in rl:
55 ui.progress(_('reading'), rev, total=len(rl))
55 ui.progress(_('reading'), rev, total=len(rl))
56 (p1, p2) = rl.parentrevs(rev)
56 (p1, p2) = rl.parentrevs(rev)
57 if p1 == p2 == node.nullrev:
57 if p1 == p2 == node.nullrev:
58 parents[rev] = () # root node
58 parents[rev] = () # root node
59 elif p1 == p2 or p2 == node.nullrev:
59 elif p1 == p2 or p2 == node.nullrev:
60 parents[rev] = (p1,) # normal node
60 parents[rev] = (p1,) # normal node
61 else:
61 else:
62 parents[rev] = (p2, p1) # merge node
62 parents[rev] = (p2, p1) # merge node
63 heads.add(rev)
63 heads.add(rev)
64 for p in parents[rev]:
64 for p in parents[rev]:
65 heads.discard(p)
65 heads.discard(p)
66 finally:
66 finally:
67 ui.progress(_('reading'), None)
67 ui.progress(_('reading'), None)
68
68
69 heads = list(heads)
69 heads = list(heads)
70 heads.sort(reverse=True)
70 heads.sort(reverse=True)
71
71
72 ui.status(_('sorting revs\n'))
72 ui.status(_('sorting revs\n'))
73 return postorder(heads, parents)
73 return postorder(heads, parents)
74
74
75 def toposort_postorderreverse(ui, rl):
75 def toposort_postorderreverse(ui, rl):
76 # reverse-postorder of the reverse directed graph
76 # reverse-postorder of the reverse directed graph
77
77
78 children = {}
78 children = {}
79 roots = set()
79 roots = set()
80 ui.status(_('reading revs\n'))
80 ui.status(_('reading revs\n'))
81 try:
81 try:
82 for rev in rl:
82 for rev in rl:
83 ui.progress(_('reading'), rev, total=len(rl))
83 ui.progress(_('reading'), rev, total=len(rl))
84 (p1, p2) = rl.parentrevs(rev)
84 (p1, p2) = rl.parentrevs(rev)
85 if p1 == p2 == node.nullrev:
85 if p1 == p2 == node.nullrev:
86 roots.add(rev)
86 roots.add(rev)
87 children[rev] = []
87 children[rev] = []
88 if p1 != node.nullrev:
88 if p1 != node.nullrev:
89 children[p1].append(rev)
89 children[p1].append(rev)
90 if p2 != node.nullrev:
90 if p2 != node.nullrev:
91 children[p2].append(rev)
91 children[p2].append(rev)
92 finally:
92 finally:
93 ui.progress(_('reading'), None)
93 ui.progress(_('reading'), None)
94
94
95 roots = list(roots)
95 roots = list(roots)
96 roots.sort()
96 roots.sort()
97
97
98 ui.status(_('sorting revs\n'))
98 ui.status(_('sorting revs\n'))
99 result = postorder(roots, children)
99 result = postorder(roots, children)
100 result.reverse()
100 result.reverse()
101 return result
101 return result
102
102
103 def writerevs(ui, r1, r2, order, tr):
103 def writerevs(ui, r1, r2, order, tr):
104
104
105 ui.status(_('writing revs\n'))
105 ui.status(_('writing revs\n'))
106
106
107
107
108 order = [r1.node(r) for r in order]
108 order = [r1.node(r) for r in order]
109
109
110 # this is a bit ugly, but it works
110 # this is a bit ugly, but it works
111 count = [0]
111 count = [0]
112 def lookup(revl, x):
112 def lookup(revl, x):
113 count[0] += 1
113 count[0] += 1
114 ui.progress(_('writing'), count[0], total=len(order))
114 ui.progress(_('writing'), count[0], total=len(order))
115 return "%020d" % revl.linkrev(revl.rev(x))
115 return "%020d" % revl.linkrev(revl.rev(x))
116
116
117 unlookup = lambda x: int(x, 10)
117 unlookup = lambda x: int(x, 10)
118
118
119 try:
119 try:
120 bundler = changegroup.bundle10(lookup)
120 bundler = changegroup.bundle10(lookup)
121 group = util.chunkbuffer(r1.group(order, bundler))
121 group = util.chunkbuffer(r1.group(order, bundler))
122 group = changegroup.unbundle10(group, "UN")
122 group = changegroup.unbundle10(group, "UN")
123 r2.addgroup(group, unlookup, tr)
123 r2.addgroup(group, unlookup, tr)
124 finally:
124 finally:
125 ui.progress(_('writing'), None)
125 ui.progress(_('writing'), None)
126
126
127 def report(ui, r1, r2):
127 def report(ui, r1, r2):
128 def getsize(r):
128 def getsize(r):
129 s = 0
129 s = 0
130 for fn in (r.indexfile, r.datafile):
130 for fn in (r.indexfile, r.datafile):
131 try:
131 try:
132 s += os.stat(fn).st_size
132 s += os.stat(fn).st_size
133 except OSError, inst:
133 except OSError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return s
136 return s
137
137
138 oldsize = float(getsize(r1))
138 oldsize = float(getsize(r1))
139 newsize = float(getsize(r2))
139 newsize = float(getsize(r2))
140
140
141 # argh: have to pass an int to %d, because a float >= 2^32
141 # argh: have to pass an int to %d, because a float >= 2^32
142 # blows up under Python 2.5 or earlier
142 # blows up under Python 2.5 or earlier
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
144 % (int(oldsize), oldsize / 1024 / 1024))
144 % (int(oldsize), oldsize / 1024 / 1024))
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
146 % (int(newsize), newsize / 1024 / 1024))
146 % (int(newsize), newsize / 1024 / 1024))
147
147
148 shrink_percent = (oldsize - newsize) / oldsize * 100
148 shrink_percent = (oldsize - newsize) / oldsize * 100
149 shrink_factor = oldsize / newsize
149 shrink_factor = oldsize / newsize
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
151 % (shrink_percent, shrink_factor))
151 % (shrink_percent, shrink_factor))
152
152
153 def shrink(ui, repo, **opts):
153 def shrink(ui, repo, **opts):
154 """shrink a revlog by reordering revisions
154 """shrink a revlog by reordering revisions
155
155
156 Rewrites all the entries in some revlog of the current repository
156 Rewrites all the entries in some revlog of the current repository
157 (by default, the manifest log) to save space.
157 (by default, the manifest log) to save space.
158
158
159 Different sort algorithms have different performance
159 Different sort algorithms have different performance
160 characteristics. Use ``--sort`` to select a sort algorithm so you
160 characteristics. Use ``--sort`` to select a sort algorithm so you
161 can determine which works best for your data.
161 can determine which works best for your data.
162 """
162 """
163
163
164 if not repo.local():
164 if not repo.local():
165 raise util.Abort(_('not a local repository: %s') % repo.root)
165 raise util.Abort(_('not a local repository: %s') % repo.root)
166
166
167 fn = opts.get('revlog')
167 fn = opts.get('revlog')
168 if not fn:
168 if not fn:
169 indexfn = repo.sjoin('00manifest.i')
169 indexfn = repo.sjoin('00manifest.i')
170 else:
170 else:
171 if not fn.endswith('.i'):
171 if not fn.endswith('.i'):
172 raise util.Abort(_('--revlog option must specify the revlog index '
172 raise util.Abort(_('--revlog option must specify the revlog index '
173 'file (*.i), not %s') % opts.get('revlog'))
173 'file (*.i), not %s') % opts.get('revlog'))
174
174
175 indexfn = util.realpath(fn)
175 indexfn = os.path.realpath(fn)
176 store = repo.sjoin('')
176 store = repo.sjoin('')
177 if not indexfn.startswith(store):
177 if not indexfn.startswith(store):
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
179 'not %s') % (store, indexfn))
179 'not %s') % (store, indexfn))
180
180
181 sortname = opts['sort']
181 sortname = opts['sort']
182 try:
182 try:
183 toposort = globals()['toposort_' + sortname]
183 toposort = globals()['toposort_' + sortname]
184 except KeyError:
184 except KeyError:
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
186
186
187 if not os.path.exists(indexfn):
187 if not os.path.exists(indexfn):
188 raise util.Abort(_('no such file: %s') % indexfn)
188 raise util.Abort(_('no such file: %s') % indexfn)
189 if '00changelog' in indexfn:
189 if '00changelog' in indexfn:
190 raise util.Abort(_('shrinking the changelog '
190 raise util.Abort(_('shrinking the changelog '
191 'will corrupt your repository'))
191 'will corrupt your repository'))
192
192
193 ui.write(_('shrinking %s\n') % indexfn)
193 ui.write(_('shrinking %s\n') % indexfn)
194 prefix = os.path.basename(indexfn)[:-1]
194 prefix = os.path.basename(indexfn)[:-1]
195 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
195 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
196
196
197 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
197 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
198 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
198 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
199
199
200 datafn, tmpdatafn = r1.datafile, r2.datafile
200 datafn, tmpdatafn = r1.datafile, r2.datafile
201
201
202 oldindexfn = indexfn + '.old'
202 oldindexfn = indexfn + '.old'
203 olddatafn = datafn + '.old'
203 olddatafn = datafn + '.old'
204 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
204 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
205 raise util.Abort(_('one or both of\n'
205 raise util.Abort(_('one or both of\n'
206 ' %s\n'
206 ' %s\n'
207 ' %s\n'
207 ' %s\n'
208 'exists from a previous run; please clean up '
208 'exists from a previous run; please clean up '
209 'before running again') % (oldindexfn, olddatafn))
209 'before running again') % (oldindexfn, olddatafn))
210
210
211 # Don't use repo.transaction(), because then things get hairy with
211 # Don't use repo.transaction(), because then things get hairy with
212 # paths: some need to be relative to .hg, and some need to be
212 # paths: some need to be relative to .hg, and some need to be
213 # absolute. Doing it this way keeps things simple: everything is an
213 # absolute. Doing it this way keeps things simple: everything is an
214 # absolute path.
214 # absolute path.
215 lock = repo.lock(wait=False)
215 lock = repo.lock(wait=False)
216 tr = transaction.transaction(ui.warn,
216 tr = transaction.transaction(ui.warn,
217 open,
217 open,
218 repo.sjoin('journal'))
218 repo.sjoin('journal'))
219
219
220 def ignoremissing(func):
220 def ignoremissing(func):
221 def f(*args, **kw):
221 def f(*args, **kw):
222 try:
222 try:
223 return func(*args, **kw)
223 return func(*args, **kw)
224 except OSError, inst:
224 except OSError, inst:
225 if inst.errno != errno.ENOENT:
225 if inst.errno != errno.ENOENT:
226 raise
226 raise
227 return f
227 return f
228
228
229 try:
229 try:
230 try:
230 try:
231 order = toposort(ui, r1)
231 order = toposort(ui, r1)
232
232
233 suboptimal = 0
233 suboptimal = 0
234 for i in xrange(1, len(order)):
234 for i in xrange(1, len(order)):
235 parents = [p for p in r1.parentrevs(order[i])
235 parents = [p for p in r1.parentrevs(order[i])
236 if p != node.nullrev]
236 if p != node.nullrev]
237 if parents and order[i - 1] not in parents:
237 if parents and order[i - 1] not in parents:
238 suboptimal += 1
238 suboptimal += 1
239 ui.note(_('%d suboptimal nodes\n') % suboptimal)
239 ui.note(_('%d suboptimal nodes\n') % suboptimal)
240
240
241 writerevs(ui, r1, r2, order, tr)
241 writerevs(ui, r1, r2, order, tr)
242 report(ui, r1, r2)
242 report(ui, r1, r2)
243 tr.close()
243 tr.close()
244 except:
244 except:
245 # Abort transaction first, so we truncate the files before
245 # Abort transaction first, so we truncate the files before
246 # deleting them.
246 # deleting them.
247 tr.abort()
247 tr.abort()
248 for fn in (tmpindexfn, tmpdatafn):
248 for fn in (tmpindexfn, tmpdatafn):
249 ignoremissing(os.unlink)(fn)
249 ignoremissing(os.unlink)(fn)
250 raise
250 raise
251 if not opts.get('dry_run'):
251 if not opts.get('dry_run'):
252 # racy, both files cannot be renamed atomically
252 # racy, both files cannot be renamed atomically
253 # copy files
253 # copy files
254 util.oslink(indexfn, oldindexfn)
254 util.oslink(indexfn, oldindexfn)
255 ignoremissing(util.oslink)(datafn, olddatafn)
255 ignoremissing(util.oslink)(datafn, olddatafn)
256
256
257 # rename
257 # rename
258 util.rename(tmpindexfn, indexfn)
258 util.rename(tmpindexfn, indexfn)
259 try:
259 try:
260 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
260 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
261 util.rename(tmpdatafn, datafn)
261 util.rename(tmpdatafn, datafn)
262 except OSError, inst:
262 except OSError, inst:
263 if inst.errno != errno.ENOENT:
263 if inst.errno != errno.ENOENT:
264 raise
264 raise
265 ignoremissing(os.unlink)(datafn)
265 ignoremissing(os.unlink)(datafn)
266 else:
266 else:
267 for fn in (tmpindexfn, tmpdatafn):
267 for fn in (tmpindexfn, tmpdatafn):
268 ignoremissing(os.unlink)(fn)
268 ignoremissing(os.unlink)(fn)
269 finally:
269 finally:
270 lock.release()
270 lock.release()
271
271
272 if not opts.get('dry_run'):
272 if not opts.get('dry_run'):
273 ui.write(_('note: old revlog saved in:\n'
273 ui.write(_('note: old revlog saved in:\n'
274 ' %s\n'
274 ' %s\n'
275 ' %s\n'
275 ' %s\n'
276 '(You can delete those files when you are satisfied that your\n'
276 '(You can delete those files when you are satisfied that your\n'
277 'repository is still sane. '
277 'repository is still sane. '
278 'Running \'hg verify\' is strongly recommended.)\n')
278 'Running \'hg verify\' is strongly recommended.)\n')
279 % (oldindexfn, olddatafn))
279 % (oldindexfn, olddatafn))
280
280
281 cmdtable = {
281 cmdtable = {
282 'shrink': (shrink,
282 'shrink': (shrink,
283 [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
283 [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
284 ('n', 'dry-run', None, _('do not shrink, simulate only')),
284 ('n', 'dry-run', None, _('do not shrink, simulate only')),
285 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
285 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
286 ],
286 ],
287 _('hg shrink [--revlog PATH]'))
287 _('hg shrink [--revlog PATH]'))
288 }
288 }
289
289
290 if __name__ == "__main__":
290 if __name__ == "__main__":
291 print "shrink-revlog.py is now an extension (see hg help extensions)"
291 print "shrink-revlog.py is now an extension (see hg help extensions)"
@@ -1,200 +1,200 b''
1 # darcs.py - darcs support for the convert extension
1 # darcs.py - darcs support for the convert extension
2 #
2 #
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, checktool, commandline, commit, converter_source
8 from common import NoRepo, checktool, commandline, commit, converter_source
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial import util
10 from mercurial import util
11 import os, shutil, tempfile, re
11 import os, shutil, tempfile, re
12
12
13 # The naming drift of ElementTree is fun!
13 # The naming drift of ElementTree is fun!
14
14
15 try:
15 try:
16 from xml.etree.cElementTree import ElementTree, XMLParser
16 from xml.etree.cElementTree import ElementTree, XMLParser
17 except ImportError:
17 except ImportError:
18 try:
18 try:
19 from xml.etree.ElementTree import ElementTree, XMLParser
19 from xml.etree.ElementTree import ElementTree, XMLParser
20 except ImportError:
20 except ImportError:
21 try:
21 try:
22 from elementtree.cElementTree import ElementTree, XMLParser
22 from elementtree.cElementTree import ElementTree, XMLParser
23 except ImportError:
23 except ImportError:
24 try:
24 try:
25 from elementtree.ElementTree import ElementTree, XMLParser
25 from elementtree.ElementTree import ElementTree, XMLParser
26 except ImportError:
26 except ImportError:
27 ElementTree = None
27 ElementTree = None
28
28
29 class darcs_source(converter_source, commandline):
29 class darcs_source(converter_source, commandline):
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 converter_source.__init__(self, ui, path, rev=rev)
31 converter_source.__init__(self, ui, path, rev=rev)
32 commandline.__init__(self, ui, 'darcs')
32 commandline.__init__(self, ui, 'darcs')
33
33
34 # check for _darcs, ElementTree so that we can easily skip
34 # check for _darcs, ElementTree so that we can easily skip
35 # test-convert-darcs if ElementTree is not around
35 # test-convert-darcs if ElementTree is not around
36 if not os.path.exists(os.path.join(path, '_darcs')):
36 if not os.path.exists(os.path.join(path, '_darcs')):
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38
38
39 checktool('darcs')
39 checktool('darcs')
40 version = self.run0('--version').splitlines()[0].strip()
40 version = self.run0('--version').splitlines()[0].strip()
41 if version < '2.1':
41 if version < '2.1':
42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
43 version)
43 version)
44
44
45 if ElementTree is None:
45 if ElementTree is None:
46 raise util.Abort(_("Python ElementTree module is not available"))
46 raise util.Abort(_("Python ElementTree module is not available"))
47
47
48 self.path = util.realpath(path)
48 self.path = os.path.realpath(path)
49
49
50 self.lastrev = None
50 self.lastrev = None
51 self.changes = {}
51 self.changes = {}
52 self.parents = {}
52 self.parents = {}
53 self.tags = {}
53 self.tags = {}
54
54
55 # Check darcs repository format
55 # Check darcs repository format
56 format = self.format()
56 format = self.format()
57 if format:
57 if format:
58 if format in ('darcs-1.0', 'hashed'):
58 if format in ('darcs-1.0', 'hashed'):
59 raise NoRepo(_("%s repository format is unsupported, "
59 raise NoRepo(_("%s repository format is unsupported, "
60 "please upgrade") % format)
60 "please upgrade") % format)
61 else:
61 else:
62 self.ui.warn(_('failed to detect repository format!'))
62 self.ui.warn(_('failed to detect repository format!'))
63
63
64 def before(self):
64 def before(self):
65 self.tmppath = tempfile.mkdtemp(
65 self.tmppath = tempfile.mkdtemp(
66 prefix='convert-' + os.path.basename(self.path) + '-')
66 prefix='convert-' + os.path.basename(self.path) + '-')
67 output, status = self.run('init', repodir=self.tmppath)
67 output, status = self.run('init', repodir=self.tmppath)
68 self.checkexit(status)
68 self.checkexit(status)
69
69
70 tree = self.xml('changes', xml_output=True, summary=True,
70 tree = self.xml('changes', xml_output=True, summary=True,
71 repodir=self.path)
71 repodir=self.path)
72 tagname = None
72 tagname = None
73 child = None
73 child = None
74 for elt in tree.findall('patch'):
74 for elt in tree.findall('patch'):
75 node = elt.get('hash')
75 node = elt.get('hash')
76 name = elt.findtext('name', '')
76 name = elt.findtext('name', '')
77 if name.startswith('TAG '):
77 if name.startswith('TAG '):
78 tagname = name[4:].strip()
78 tagname = name[4:].strip()
79 elif tagname is not None:
79 elif tagname is not None:
80 self.tags[tagname] = node
80 self.tags[tagname] = node
81 tagname = None
81 tagname = None
82 self.changes[node] = elt
82 self.changes[node] = elt
83 self.parents[child] = [node]
83 self.parents[child] = [node]
84 child = node
84 child = node
85 self.parents[child] = []
85 self.parents[child] = []
86
86
87 def after(self):
87 def after(self):
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
90
90
91 def recode(self, s, encoding=None):
91 def recode(self, s, encoding=None):
92 if isinstance(s, unicode):
92 if isinstance(s, unicode):
93 # XMLParser returns unicode objects for anything it can't
93 # XMLParser returns unicode objects for anything it can't
94 # encode into ASCII. We convert them back to str to get
94 # encode into ASCII. We convert them back to str to get
95 # recode's normal conversion behavior.
95 # recode's normal conversion behavior.
96 s = s.encode('latin-1')
96 s = s.encode('latin-1')
97 return super(darcs_source, self).recode(s, encoding)
97 return super(darcs_source, self).recode(s, encoding)
98
98
99 def xml(self, cmd, **kwargs):
99 def xml(self, cmd, **kwargs):
100 # NOTE: darcs is currently encoding agnostic and will print
100 # NOTE: darcs is currently encoding agnostic and will print
101 # patch metadata byte-for-byte, even in the XML changelog.
101 # patch metadata byte-for-byte, even in the XML changelog.
102 etree = ElementTree()
102 etree = ElementTree()
103 # While we are decoding the XML as latin-1 to be as liberal as
103 # While we are decoding the XML as latin-1 to be as liberal as
104 # possible, etree will still raise an exception if any
104 # possible, etree will still raise an exception if any
105 # non-printable characters are in the XML changelog.
105 # non-printable characters are in the XML changelog.
106 parser = XMLParser(encoding='latin-1')
106 parser = XMLParser(encoding='latin-1')
107 fp = self._run(cmd, **kwargs)
107 fp = self._run(cmd, **kwargs)
108 etree.parse(fp, parser=parser)
108 etree.parse(fp, parser=parser)
109 self.checkexit(fp.close())
109 self.checkexit(fp.close())
110 return etree.getroot()
110 return etree.getroot()
111
111
112 def format(self):
112 def format(self):
113 output, status = self.run('show', 'repo', no_files=True,
113 output, status = self.run('show', 'repo', no_files=True,
114 repodir=self.path)
114 repodir=self.path)
115 self.checkexit(status)
115 self.checkexit(status)
116 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
116 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
117 if not m:
117 if not m:
118 return None
118 return None
119 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
119 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
120
120
121 def manifest(self):
121 def manifest(self):
122 man = []
122 man = []
123 output, status = self.run('show', 'files', no_directories=True,
123 output, status = self.run('show', 'files', no_directories=True,
124 repodir=self.tmppath)
124 repodir=self.tmppath)
125 self.checkexit(status)
125 self.checkexit(status)
126 for line in output.split('\n'):
126 for line in output.split('\n'):
127 path = line[2:]
127 path = line[2:]
128 if path:
128 if path:
129 man.append(path)
129 man.append(path)
130 return man
130 return man
131
131
132 def getheads(self):
132 def getheads(self):
133 return self.parents[None]
133 return self.parents[None]
134
134
135 def getcommit(self, rev):
135 def getcommit(self, rev):
136 elt = self.changes[rev]
136 elt = self.changes[rev]
137 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
137 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
138 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
138 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
139 # etree can return unicode objects for name, comment, and author,
139 # etree can return unicode objects for name, comment, and author,
140 # so recode() is used to ensure str objects are emitted.
140 # so recode() is used to ensure str objects are emitted.
141 return commit(author=self.recode(elt.get('author')),
141 return commit(author=self.recode(elt.get('author')),
142 date=util.datestr(date),
142 date=util.datestr(date),
143 desc=self.recode(desc).strip(),
143 desc=self.recode(desc).strip(),
144 parents=self.parents[rev])
144 parents=self.parents[rev])
145
145
146 def pull(self, rev):
146 def pull(self, rev):
147 output, status = self.run('pull', self.path, all=True,
147 output, status = self.run('pull', self.path, all=True,
148 match='hash %s' % rev,
148 match='hash %s' % rev,
149 no_test=True, no_posthook=True,
149 no_test=True, no_posthook=True,
150 external_merge='/bin/false',
150 external_merge='/bin/false',
151 repodir=self.tmppath)
151 repodir=self.tmppath)
152 if status:
152 if status:
153 if output.find('We have conflicts in') == -1:
153 if output.find('We have conflicts in') == -1:
154 self.checkexit(status, output)
154 self.checkexit(status, output)
155 output, status = self.run('revert', all=True, repodir=self.tmppath)
155 output, status = self.run('revert', all=True, repodir=self.tmppath)
156 self.checkexit(status, output)
156 self.checkexit(status, output)
157
157
158 def getchanges(self, rev):
158 def getchanges(self, rev):
159 copies = {}
159 copies = {}
160 changes = []
160 changes = []
161 man = None
161 man = None
162 for elt in self.changes[rev].find('summary').getchildren():
162 for elt in self.changes[rev].find('summary').getchildren():
163 if elt.tag in ('add_directory', 'remove_directory'):
163 if elt.tag in ('add_directory', 'remove_directory'):
164 continue
164 continue
165 if elt.tag == 'move':
165 if elt.tag == 'move':
166 if man is None:
166 if man is None:
167 man = self.manifest()
167 man = self.manifest()
168 source, dest = elt.get('from'), elt.get('to')
168 source, dest = elt.get('from'), elt.get('to')
169 if source in man:
169 if source in man:
170 # File move
170 # File move
171 changes.append((source, rev))
171 changes.append((source, rev))
172 changes.append((dest, rev))
172 changes.append((dest, rev))
173 copies[dest] = source
173 copies[dest] = source
174 else:
174 else:
175 # Directory move, deduce file moves from manifest
175 # Directory move, deduce file moves from manifest
176 source = source + '/'
176 source = source + '/'
177 for f in man:
177 for f in man:
178 if not f.startswith(source):
178 if not f.startswith(source):
179 continue
179 continue
180 fdest = dest + '/' + f[len(source):]
180 fdest = dest + '/' + f[len(source):]
181 changes.append((f, rev))
181 changes.append((f, rev))
182 changes.append((fdest, rev))
182 changes.append((fdest, rev))
183 copies[fdest] = f
183 copies[fdest] = f
184 else:
184 else:
185 changes.append((elt.text.strip(), rev))
185 changes.append((elt.text.strip(), rev))
186 self.pull(rev)
186 self.pull(rev)
187 self.lastrev = rev
187 self.lastrev = rev
188 return sorted(changes), copies
188 return sorted(changes), copies
189
189
190 def getfile(self, name, rev):
190 def getfile(self, name, rev):
191 if rev != self.lastrev:
191 if rev != self.lastrev:
192 raise util.Abort(_('internal calling inconsistency'))
192 raise util.Abort(_('internal calling inconsistency'))
193 path = os.path.join(self.tmppath, name)
193 path = os.path.join(self.tmppath, name)
194 data = util.readfile(path)
194 data = util.readfile(path)
195 mode = os.lstat(path).st_mode
195 mode = os.lstat(path).st_mode
196 mode = (mode & 0111) and 'x' or ''
196 mode = (mode & 0111) and 'x' or ''
197 return data, mode
197 return data, mode
198
198
199 def gettags(self):
199 def gettags(self):
200 return self.tags
200 return self.tags
@@ -1,338 +1,338 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import encoding, util
11 from mercurial import encoding, util
12 import os, shutil, tempfile, stat
12 import os, shutil, tempfile, stat
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 % path)
35 % path)
36
36
37 # Could use checktool, but we want to check for baz or tla.
37 # Could use checktool, but we want to check for baz or tla.
38 self.execmd = None
38 self.execmd = None
39 if util.findexe('baz'):
39 if util.findexe('baz'):
40 self.execmd = 'baz'
40 self.execmd = 'baz'
41 else:
41 else:
42 if util.findexe('tla'):
42 if util.findexe('tla'):
43 self.execmd = 'tla'
43 self.execmd = 'tla'
44 else:
44 else:
45 raise util.Abort(_('cannot find a GNU Arch tool'))
45 raise util.Abort(_('cannot find a GNU Arch tool'))
46
46
47 commandline.__init__(self, ui, self.execmd)
47 commandline.__init__(self, ui, self.execmd)
48
48
49 self.path = util.realpath(path)
49 self.path = os.path.realpath(path)
50 self.tmppath = None
50 self.tmppath = None
51
51
52 self.treeversion = None
52 self.treeversion = None
53 self.lastrev = None
53 self.lastrev = None
54 self.changes = {}
54 self.changes = {}
55 self.parents = {}
55 self.parents = {}
56 self.tags = {}
56 self.tags = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.encoding = encoding.encoding
58 self.encoding = encoding.encoding
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s'
92 self.checkexit(status, 'failed retrieveing revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise util.Abort(_('internal calling inconsistency'))
138 raise util.Abort(_('internal calling inconsistency'))
139
139
140 # Raise IOError if necessary (i.e. deleted files).
140 # Raise IOError if necessary (i.e. deleted files).
141 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 if not os.path.lexists(os.path.join(self.tmppath, name)):
142 raise IOError
142 raise IOError
143
143
144 return self._getfile(name, rev)
144 return self._getfile(name, rev)
145
145
146 def getchanges(self, rev):
146 def getchanges(self, rev):
147 self._update(rev)
147 self._update(rev)
148 changes = []
148 changes = []
149 copies = {}
149 copies = {}
150
150
151 for f in self.changes[rev].add_files:
151 for f in self.changes[rev].add_files:
152 changes.append((f, rev))
152 changes.append((f, rev))
153
153
154 for f in self.changes[rev].mod_files:
154 for f in self.changes[rev].mod_files:
155 changes.append((f, rev))
155 changes.append((f, rev))
156
156
157 for f in self.changes[rev].del_files:
157 for f in self.changes[rev].del_files:
158 changes.append((f, rev))
158 changes.append((f, rev))
159
159
160 for src in self.changes[rev].ren_files:
160 for src in self.changes[rev].ren_files:
161 to = self.changes[rev].ren_files[src]
161 to = self.changes[rev].ren_files[src]
162 changes.append((src, rev))
162 changes.append((src, rev))
163 changes.append((to, rev))
163 changes.append((to, rev))
164 copies[to] = src
164 copies[to] = src
165
165
166 for src in self.changes[rev].ren_dirs:
166 for src in self.changes[rev].ren_dirs:
167 to = self.changes[rev].ren_dirs[src]
167 to = self.changes[rev].ren_dirs[src]
168 chgs, cps = self._rendirchanges(src, to)
168 chgs, cps = self._rendirchanges(src, to)
169 changes += [(f, rev) for f in chgs]
169 changes += [(f, rev) for f in chgs]
170 copies.update(cps)
170 copies.update(cps)
171
171
172 self.lastrev = rev
172 self.lastrev = rev
173 return sorted(set(changes)), copies
173 return sorted(set(changes)), copies
174
174
175 def getcommit(self, rev):
175 def getcommit(self, rev):
176 changes = self.changes[rev]
176 changes = self.changes[rev]
177 return commit(author=changes.author, date=changes.date,
177 return commit(author=changes.author, date=changes.date,
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
179
179
180 def gettags(self):
180 def gettags(self):
181 return self.tags
181 return self.tags
182
182
183 def _execute(self, cmd, *args, **kwargs):
183 def _execute(self, cmd, *args, **kwargs):
184 cmdline = [self.execmd, cmd]
184 cmdline = [self.execmd, cmd]
185 cmdline += args
185 cmdline += args
186 cmdline = [util.shellquote(arg) for arg in cmdline]
186 cmdline = [util.shellquote(arg) for arg in cmdline]
187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
188 cmdline = util.quotecommand(' '.join(cmdline))
188 cmdline = util.quotecommand(' '.join(cmdline))
189 self.ui.debug(cmdline, '\n')
189 self.ui.debug(cmdline, '\n')
190 return os.system(cmdline)
190 return os.system(cmdline)
191
191
192 def _update(self, rev):
192 def _update(self, rev):
193 self.ui.debug('applying revision %s...\n' % rev)
193 self.ui.debug('applying revision %s...\n' % rev)
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 rev)
195 rev)
196 if status:
196 if status:
197 # Something went wrong while merging (baz or tla
197 # Something went wrong while merging (baz or tla
198 # issue?), get latest revision and try from there
198 # issue?), get latest revision and try from there
199 shutil.rmtree(self.tmppath, ignore_errors=True)
199 shutil.rmtree(self.tmppath, ignore_errors=True)
200 self._obtainrevision(rev)
200 self._obtainrevision(rev)
201 else:
201 else:
202 old_rev = self.parents[rev][0]
202 old_rev = self.parents[rev][0]
203 self.ui.debug('computing changeset between %s and %s...\n'
203 self.ui.debug('computing changeset between %s and %s...\n'
204 % (old_rev, rev))
204 % (old_rev, rev))
205 self._parsechangeset(changeset, rev)
205 self._parsechangeset(changeset, rev)
206
206
207 def _getfile(self, name, rev):
207 def _getfile(self, name, rev):
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 if stat.S_ISLNK(mode):
209 if stat.S_ISLNK(mode):
210 data = os.readlink(os.path.join(self.tmppath, name))
210 data = os.readlink(os.path.join(self.tmppath, name))
211 mode = mode and 'l' or ''
211 mode = mode and 'l' or ''
212 else:
212 else:
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
214 mode = (mode & 0111) and 'x' or ''
214 mode = (mode & 0111) and 'x' or ''
215 return data, mode
215 return data, mode
216
216
217 def _exclude(self, name):
217 def _exclude(self, name):
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
219 for exc in exclude:
219 for exc in exclude:
220 if name.find(exc) != -1:
220 if name.find(exc) != -1:
221 return True
221 return True
222 return False
222 return False
223
223
224 def _readcontents(self, path):
224 def _readcontents(self, path):
225 files = []
225 files = []
226 contents = os.listdir(path)
226 contents = os.listdir(path)
227 while len(contents) > 0:
227 while len(contents) > 0:
228 c = contents.pop()
228 c = contents.pop()
229 p = os.path.join(path, c)
229 p = os.path.join(path, c)
230 # os.walk could be used, but here we avoid internal GNU
230 # os.walk could be used, but here we avoid internal GNU
231 # Arch files and directories, thus saving a lot time.
231 # Arch files and directories, thus saving a lot time.
232 if not self._exclude(p):
232 if not self._exclude(p):
233 if os.path.isdir(p):
233 if os.path.isdir(p):
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
235 else:
235 else:
236 files.append(c)
236 files.append(c)
237 return files
237 return files
238
238
239 def _rendirchanges(self, src, dest):
239 def _rendirchanges(self, src, dest):
240 changes = []
240 changes = []
241 copies = {}
241 copies = {}
242 files = self._readcontents(os.path.join(self.tmppath, dest))
242 files = self._readcontents(os.path.join(self.tmppath, dest))
243 for f in files:
243 for f in files:
244 s = os.path.join(src, f)
244 s = os.path.join(src, f)
245 d = os.path.join(dest, f)
245 d = os.path.join(dest, f)
246 changes.append(s)
246 changes.append(s)
247 changes.append(d)
247 changes.append(d)
248 copies[d] = s
248 copies[d] = s
249 return changes, copies
249 return changes, copies
250
250
251 def _obtainrevision(self, rev):
251 def _obtainrevision(self, rev):
252 self.ui.debug('obtaining revision %s...\n' % rev)
252 self.ui.debug('obtaining revision %s...\n' % rev)
253 output = self._execute('get', rev, self.tmppath)
253 output = self._execute('get', rev, self.tmppath)
254 self.checkexit(output)
254 self.checkexit(output)
255 self.ui.debug('analyzing revision %s...\n' % rev)
255 self.ui.debug('analyzing revision %s...\n' % rev)
256 files = self._readcontents(self.tmppath)
256 files = self._readcontents(self.tmppath)
257 self.changes[rev].add_files += files
257 self.changes[rev].add_files += files
258
258
259 def _stripbasepath(self, path):
259 def _stripbasepath(self, path):
260 if path.startswith('./'):
260 if path.startswith('./'):
261 return path[2:]
261 return path[2:]
262 return path
262 return path
263
263
264 def _parsecatlog(self, data, rev):
264 def _parsecatlog(self, data, rev):
265 try:
265 try:
266 catlog = self.catlogparser.parsestr(data)
266 catlog = self.catlogparser.parsestr(data)
267
267
268 # Commit date
268 # Commit date
269 self.changes[rev].date = util.datestr(
269 self.changes[rev].date = util.datestr(
270 util.strdate(catlog['Standard-date'],
270 util.strdate(catlog['Standard-date'],
271 '%Y-%m-%d %H:%M:%S'))
271 '%Y-%m-%d %H:%M:%S'))
272
272
273 # Commit author
273 # Commit author
274 self.changes[rev].author = self.recode(catlog['Creator'])
274 self.changes[rev].author = self.recode(catlog['Creator'])
275
275
276 # Commit description
276 # Commit description
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
278 catlog.get_payload()))
278 catlog.get_payload()))
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
280
280
281 # Commit revision origin when dealing with a branch or tag
281 # Commit revision origin when dealing with a branch or tag
282 if 'Continuation-of' in catlog:
282 if 'Continuation-of' in catlog:
283 self.changes[rev].continuationof = self.recode(
283 self.changes[rev].continuationof = self.recode(
284 catlog['Continuation-of'])
284 catlog['Continuation-of'])
285 except Exception:
285 except Exception:
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
287
287
288 def _parsechangeset(self, data, rev):
288 def _parsechangeset(self, data, rev):
289 for l in data:
289 for l in data:
290 l = l.strip()
290 l = l.strip()
291 # Added file (ignore added directory)
291 # Added file (ignore added directory)
292 if l.startswith('A') and not l.startswith('A/'):
292 if l.startswith('A') and not l.startswith('A/'):
293 file = self._stripbasepath(l[1:].strip())
293 file = self._stripbasepath(l[1:].strip())
294 if not self._exclude(file):
294 if not self._exclude(file):
295 self.changes[rev].add_files.append(file)
295 self.changes[rev].add_files.append(file)
296 # Deleted file (ignore deleted directory)
296 # Deleted file (ignore deleted directory)
297 elif l.startswith('D') and not l.startswith('D/'):
297 elif l.startswith('D') and not l.startswith('D/'):
298 file = self._stripbasepath(l[1:].strip())
298 file = self._stripbasepath(l[1:].strip())
299 if not self._exclude(file):
299 if not self._exclude(file):
300 self.changes[rev].del_files.append(file)
300 self.changes[rev].del_files.append(file)
301 # Modified binary file
301 # Modified binary file
302 elif l.startswith('Mb'):
302 elif l.startswith('Mb'):
303 file = self._stripbasepath(l[2:].strip())
303 file = self._stripbasepath(l[2:].strip())
304 if not self._exclude(file):
304 if not self._exclude(file):
305 self.changes[rev].mod_files.append(file)
305 self.changes[rev].mod_files.append(file)
306 # Modified link
306 # Modified link
307 elif l.startswith('M->'):
307 elif l.startswith('M->'):
308 file = self._stripbasepath(l[3:].strip())
308 file = self._stripbasepath(l[3:].strip())
309 if not self._exclude(file):
309 if not self._exclude(file):
310 self.changes[rev].mod_files.append(file)
310 self.changes[rev].mod_files.append(file)
311 # Modified file
311 # Modified file
312 elif l.startswith('M'):
312 elif l.startswith('M'):
313 file = self._stripbasepath(l[1:].strip())
313 file = self._stripbasepath(l[1:].strip())
314 if not self._exclude(file):
314 if not self._exclude(file):
315 self.changes[rev].mod_files.append(file)
315 self.changes[rev].mod_files.append(file)
316 # Renamed file (or link)
316 # Renamed file (or link)
317 elif l.startswith('=>'):
317 elif l.startswith('=>'):
318 files = l[2:].strip().split(' ')
318 files = l[2:].strip().split(' ')
319 if len(files) == 1:
319 if len(files) == 1:
320 files = l[2:].strip().split('\t')
320 files = l[2:].strip().split('\t')
321 src = self._stripbasepath(files[0])
321 src = self._stripbasepath(files[0])
322 dst = self._stripbasepath(files[1])
322 dst = self._stripbasepath(files[1])
323 if not self._exclude(src) and not self._exclude(dst):
323 if not self._exclude(src) and not self._exclude(dst):
324 self.changes[rev].ren_files[src] = dst
324 self.changes[rev].ren_files[src] = dst
325 # Conversion from file to link or from link to file (modified)
325 # Conversion from file to link or from link to file (modified)
326 elif l.startswith('ch'):
326 elif l.startswith('ch'):
327 file = self._stripbasepath(l[2:].strip())
327 file = self._stripbasepath(l[2:].strip())
328 if not self._exclude(file):
328 if not self._exclude(file):
329 self.changes[rev].mod_files.append(file)
329 self.changes[rev].mod_files.append(file)
330 # Renamed directory
330 # Renamed directory
331 elif l.startswith('/>'):
331 elif l.startswith('/>'):
332 dirs = l[2:].strip().split(' ')
332 dirs = l[2:].strip().split(' ')
333 if len(dirs) == 1:
333 if len(dirs) == 1:
334 dirs = l[2:].strip().split('\t')
334 dirs = l[2:].strip().split('\t')
335 src = self._stripbasepath(dirs[0])
335 src = self._stripbasepath(dirs[0])
336 dst = self._stripbasepath(dirs[1])
336 dst = self._stripbasepath(dirs[1])
337 if not self._exclude(src) and not self._exclude(dst):
337 if not self._exclude(src) and not self._exclude(dst):
338 self.changes[rev].ren_dirs[src] = dst
338 self.changes[rev].ren_dirs[src] = dst
@@ -1,1175 +1,1175 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11 import urllib2
11 import urllib2
12
12
13 from mercurial import strutil, scmutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 # these bindings.
18 # these bindings.
19
19
20 from cStringIO import StringIO
20 from cStringIO import StringIO
21
21
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
24
24
25 try:
25 try:
26 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
27 import svn
27 import svn
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.ra
30 import svn.ra
31 import svn.delta
31 import svn.delta
32 import transport
32 import transport
33 import warnings
33 import warnings
34 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
35 module='svn.core',
35 module='svn.core',
36 category=DeprecationWarning)
36 category=DeprecationWarning)
37
37
38 except ImportError:
38 except ImportError:
39 svn = None
39 svn = None
40
40
41 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
42 pass
42 pass
43
43
44 def revsplit(rev):
44 def revsplit(rev):
45 """Parse a revision string and return (uuid, path, revnum)."""
45 """Parse a revision string and return (uuid, path, revnum)."""
46 url, revnum = rev.rsplit('@', 1)
46 url, revnum = rev.rsplit('@', 1)
47 parts = url.split('/', 1)
47 parts = url.split('/', 1)
48 mod = ''
48 mod = ''
49 if len(parts) > 1:
49 if len(parts) > 1:
50 mod = '/' + parts[1]
50 mod = '/' + parts[1]
51 return parts[0][4:], mod, int(revnum)
51 return parts[0][4:], mod, int(revnum)
52
52
53 def geturl(path):
53 def geturl(path):
54 try:
54 try:
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 except SubversionException:
56 except SubversionException:
57 pass
57 pass
58 if os.path.isdir(path):
58 if os.path.isdir(path):
59 path = os.path.normpath(os.path.abspath(path))
59 path = os.path.normpath(os.path.abspath(path))
60 if os.name == 'nt':
60 if os.name == 'nt':
61 path = '/' + util.normpath(path)
61 path = '/' + util.normpath(path)
62 # Module URL is later compared with the repository URL returned
62 # Module URL is later compared with the repository URL returned
63 # by svn API, which is UTF-8.
63 # by svn API, which is UTF-8.
64 path = encoding.tolocal(path)
64 path = encoding.tolocal(path)
65 return 'file://%s' % urllib.quote(path)
65 return 'file://%s' % urllib.quote(path)
66 return path
66 return path
67
67
68 def optrev(number):
68 def optrev(number):
69 optrev = svn.core.svn_opt_revision_t()
69 optrev = svn.core.svn_opt_revision_t()
70 optrev.kind = svn.core.svn_opt_revision_number
70 optrev.kind = svn.core.svn_opt_revision_number
71 optrev.value.number = number
71 optrev.value.number = number
72 return optrev
72 return optrev
73
73
74 class changedpath(object):
74 class changedpath(object):
75 def __init__(self, p):
75 def __init__(self, p):
76 self.copyfrom_path = p.copyfrom_path
76 self.copyfrom_path = p.copyfrom_path
77 self.copyfrom_rev = p.copyfrom_rev
77 self.copyfrom_rev = p.copyfrom_rev
78 self.action = p.action
78 self.action = p.action
79
79
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 strict_node_history=False):
81 strict_node_history=False):
82 protocol = -1
82 protocol = -1
83 def receiver(orig_paths, revnum, author, date, message, pool):
83 def receiver(orig_paths, revnum, author, date, message, pool):
84 if orig_paths is not None:
84 if orig_paths is not None:
85 for k, v in orig_paths.iteritems():
85 for k, v in orig_paths.iteritems():
86 orig_paths[k] = changedpath(v)
86 orig_paths[k] = changedpath(v)
87 pickle.dump((orig_paths, revnum, author, date, message),
87 pickle.dump((orig_paths, revnum, author, date, message),
88 fp, protocol)
88 fp, protocol)
89
89
90 try:
90 try:
91 # Use an ra of our own so that our parent can consume
91 # Use an ra of our own so that our parent can consume
92 # our results without confusing the server.
92 # our results without confusing the server.
93 t = transport.SvnRaTransport(url=url)
93 t = transport.SvnRaTransport(url=url)
94 svn.ra.get_log(t.ra, paths, start, end, limit,
94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 discover_changed_paths,
95 discover_changed_paths,
96 strict_node_history,
96 strict_node_history,
97 receiver)
97 receiver)
98 except SubversionException, (inst, num):
98 except SubversionException, (inst, num):
99 pickle.dump(num, fp, protocol)
99 pickle.dump(num, fp, protocol)
100 except IOError:
100 except IOError:
101 # Caller may interrupt the iteration
101 # Caller may interrupt the iteration
102 pickle.dump(None, fp, protocol)
102 pickle.dump(None, fp, protocol)
103 else:
103 else:
104 pickle.dump(None, fp, protocol)
104 pickle.dump(None, fp, protocol)
105 fp.close()
105 fp.close()
106 # With large history, cleanup process goes crazy and suddenly
106 # With large history, cleanup process goes crazy and suddenly
107 # consumes *huge* amount of memory. The output file being closed,
107 # consumes *huge* amount of memory. The output file being closed,
108 # there is no need for clean termination.
108 # there is no need for clean termination.
109 os._exit(0)
109 os._exit(0)
110
110
111 def debugsvnlog(ui, **opts):
111 def debugsvnlog(ui, **opts):
112 """Fetch SVN log in a subprocess and channel them back to parent to
112 """Fetch SVN log in a subprocess and channel them back to parent to
113 avoid memory collection issues.
113 avoid memory collection issues.
114 """
114 """
115 util.setbinary(sys.stdin)
115 util.setbinary(sys.stdin)
116 util.setbinary(sys.stdout)
116 util.setbinary(sys.stdout)
117 args = decodeargs(sys.stdin.read())
117 args = decodeargs(sys.stdin.read())
118 get_log_child(sys.stdout, *args)
118 get_log_child(sys.stdout, *args)
119
119
120 class logstream(object):
120 class logstream(object):
121 """Interruptible revision log iterator."""
121 """Interruptible revision log iterator."""
122 def __init__(self, stdout):
122 def __init__(self, stdout):
123 self._stdout = stdout
123 self._stdout = stdout
124
124
125 def __iter__(self):
125 def __iter__(self):
126 while True:
126 while True:
127 try:
127 try:
128 entry = pickle.load(self._stdout)
128 entry = pickle.load(self._stdout)
129 except EOFError:
129 except EOFError:
130 raise util.Abort(_('Mercurial failed to run itself, check'
130 raise util.Abort(_('Mercurial failed to run itself, check'
131 ' hg executable is in PATH'))
131 ' hg executable is in PATH'))
132 try:
132 try:
133 orig_paths, revnum, author, date, message = entry
133 orig_paths, revnum, author, date, message = entry
134 except:
134 except:
135 if entry is None:
135 if entry is None:
136 break
136 break
137 raise SubversionException("child raised exception", entry)
137 raise SubversionException("child raised exception", entry)
138 yield entry
138 yield entry
139
139
140 def close(self):
140 def close(self):
141 if self._stdout:
141 if self._stdout:
142 self._stdout.close()
142 self._stdout.close()
143 self._stdout = None
143 self._stdout = None
144
144
145
145
146 # Check to see if the given path is a local Subversion repo. Verify this by
146 # Check to see if the given path is a local Subversion repo. Verify this by
147 # looking for several svn-specific files and directories in the given
147 # looking for several svn-specific files and directories in the given
148 # directory.
148 # directory.
149 def filecheck(ui, path, proto):
149 def filecheck(ui, path, proto):
150 for x in ('locks', 'hooks', 'format', 'db'):
150 for x in ('locks', 'hooks', 'format', 'db'):
151 if not os.path.exists(os.path.join(path, x)):
151 if not os.path.exists(os.path.join(path, x)):
152 return False
152 return False
153 return True
153 return True
154
154
155 # Check to see if a given path is the root of an svn repo over http. We verify
155 # Check to see if a given path is the root of an svn repo over http. We verify
156 # this by requesting a version-controlled URL we know can't exist and looking
156 # this by requesting a version-controlled URL we know can't exist and looking
157 # for the svn-specific "not found" XML.
157 # for the svn-specific "not found" XML.
158 def httpcheck(ui, path, proto):
158 def httpcheck(ui, path, proto):
159 try:
159 try:
160 opener = urllib2.build_opener()
160 opener = urllib2.build_opener()
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 data = rsp.read()
162 data = rsp.read()
163 except urllib2.HTTPError, inst:
163 except urllib2.HTTPError, inst:
164 if inst.code != 404:
164 if inst.code != 404:
165 # Except for 404 we cannot know for sure this is not an svn repo
165 # Except for 404 we cannot know for sure this is not an svn repo
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 'be a subversion repository. Use --source-type if you '
167 'be a subversion repository. Use --source-type if you '
168 'know better.\n'))
168 'know better.\n'))
169 return True
169 return True
170 data = inst.fp.read()
170 data = inst.fp.read()
171 except:
171 except:
172 # Could be urllib2.URLError if the URL is invalid or anything else.
172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 return False
173 return False
174 return '<m:human-readable errcode="160013">' in data
174 return '<m:human-readable errcode="160013">' in data
175
175
176 protomap = {'http': httpcheck,
176 protomap = {'http': httpcheck,
177 'https': httpcheck,
177 'https': httpcheck,
178 'file': filecheck,
178 'file': filecheck,
179 }
179 }
180 def issvnurl(ui, url):
180 def issvnurl(ui, url):
181 try:
181 try:
182 proto, path = url.split('://', 1)
182 proto, path = url.split('://', 1)
183 if proto == 'file':
183 if proto == 'file':
184 path = urllib.url2pathname(path)
184 path = urllib.url2pathname(path)
185 except ValueError:
185 except ValueError:
186 proto = 'file'
186 proto = 'file'
187 path = os.path.abspath(url)
187 path = os.path.abspath(url)
188 if proto == 'file':
188 if proto == 'file':
189 path = path.replace(os.sep, '/')
189 path = path.replace(os.sep, '/')
190 check = protomap.get(proto, lambda *args: False)
190 check = protomap.get(proto, lambda *args: False)
191 while '/' in path:
191 while '/' in path:
192 if check(ui, path, proto):
192 if check(ui, path, proto):
193 return True
193 return True
194 path = path.rsplit('/', 1)[0]
194 path = path.rsplit('/', 1)[0]
195 return False
195 return False
196
196
197 # SVN conversion code stolen from bzr-svn and tailor
197 # SVN conversion code stolen from bzr-svn and tailor
198 #
198 #
199 # Subversion looks like a versioned filesystem, branches structures
199 # Subversion looks like a versioned filesystem, branches structures
200 # are defined by conventions and not enforced by the tool. First,
200 # are defined by conventions and not enforced by the tool. First,
201 # we define the potential branches (modules) as "trunk" and "branches"
201 # we define the potential branches (modules) as "trunk" and "branches"
202 # children directories. Revisions are then identified by their
202 # children directories. Revisions are then identified by their
203 # module and revision number (and a repository identifier).
203 # module and revision number (and a repository identifier).
204 #
204 #
205 # The revision graph is really a tree (or a forest). By default, a
205 # The revision graph is really a tree (or a forest). By default, a
206 # revision parent is the previous revision in the same module. If the
206 # revision parent is the previous revision in the same module. If the
207 # module directory is copied/moved from another module then the
207 # module directory is copied/moved from another module then the
208 # revision is the module root and its parent the source revision in
208 # revision is the module root and its parent the source revision in
209 # the parent module. A revision has at most one parent.
209 # the parent module. A revision has at most one parent.
210 #
210 #
211 class svn_source(converter_source):
211 class svn_source(converter_source):
212 def __init__(self, ui, url, rev=None):
212 def __init__(self, ui, url, rev=None):
213 super(svn_source, self).__init__(ui, url, rev=rev)
213 super(svn_source, self).__init__(ui, url, rev=rev)
214
214
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 (os.path.exists(url) and
216 (os.path.exists(url) and
217 os.path.exists(os.path.join(url, '.svn'))) or
217 os.path.exists(os.path.join(url, '.svn'))) or
218 issvnurl(ui, url)):
218 issvnurl(ui, url)):
219 raise NoRepo(_("%s does not look like a Subversion repository")
219 raise NoRepo(_("%s does not look like a Subversion repository")
220 % url)
220 % url)
221 if svn is None:
221 if svn is None:
222 raise MissingTool(_('Could not load Subversion python bindings'))
222 raise MissingTool(_('Could not load Subversion python bindings'))
223
223
224 try:
224 try:
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 if version < (1, 4):
226 if version < (1, 4):
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 '1.4 or later required') % version)
228 '1.4 or later required') % version)
229 except AttributeError:
229 except AttributeError:
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 'or later required'))
231 'or later required'))
232
232
233 self.lastrevs = {}
233 self.lastrevs = {}
234
234
235 latest = None
235 latest = None
236 try:
236 try:
237 # Support file://path@rev syntax. Useful e.g. to convert
237 # Support file://path@rev syntax. Useful e.g. to convert
238 # deleted branches.
238 # deleted branches.
239 at = url.rfind('@')
239 at = url.rfind('@')
240 if at >= 0:
240 if at >= 0:
241 latest = int(url[at + 1:])
241 latest = int(url[at + 1:])
242 url = url[:at]
242 url = url[:at]
243 except ValueError:
243 except ValueError:
244 pass
244 pass
245 self.url = geturl(url)
245 self.url = geturl(url)
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 try:
247 try:
248 self.transport = transport.SvnRaTransport(url=self.url)
248 self.transport = transport.SvnRaTransport(url=self.url)
249 self.ra = self.transport.ra
249 self.ra = self.transport.ra
250 self.ctx = self.transport.client
250 self.ctx = self.transport.client
251 self.baseurl = svn.ra.get_repos_root(self.ra)
251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 # Module is either empty or a repository path starting with
252 # Module is either empty or a repository path starting with
253 # a slash and not ending with a slash.
253 # a slash and not ending with a slash.
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 self.prevmodule = None
255 self.prevmodule = None
256 self.rootmodule = self.module
256 self.rootmodule = self.module
257 self.commits = {}
257 self.commits = {}
258 self.paths = {}
258 self.paths = {}
259 self.uuid = svn.ra.get_uuid(self.ra)
259 self.uuid = svn.ra.get_uuid(self.ra)
260 except SubversionException:
260 except SubversionException:
261 ui.traceback()
261 ui.traceback()
262 raise NoRepo(_("%s does not look like a Subversion repository")
262 raise NoRepo(_("%s does not look like a Subversion repository")
263 % self.url)
263 % self.url)
264
264
265 if rev:
265 if rev:
266 try:
266 try:
267 latest = int(rev)
267 latest = int(rev)
268 except ValueError:
268 except ValueError:
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270
270
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 try:
273 try:
274 self.startrev = int(self.startrev)
274 self.startrev = int(self.startrev)
275 if self.startrev < 0:
275 if self.startrev < 0:
276 self.startrev = 0
276 self.startrev = 0
277 except ValueError:
277 except ValueError:
278 raise util.Abort(_('svn: start revision %s is not an integer')
278 raise util.Abort(_('svn: start revision %s is not an integer')
279 % self.startrev)
279 % self.startrev)
280
280
281 try:
281 try:
282 self.head = self.latest(self.module, latest)
282 self.head = self.latest(self.module, latest)
283 except SvnPathNotFound:
283 except SvnPathNotFound:
284 self.head = None
284 self.head = None
285 if not self.head:
285 if not self.head:
286 raise util.Abort(_('no revision found in module %s')
286 raise util.Abort(_('no revision found in module %s')
287 % self.module)
287 % self.module)
288 self.last_changed = self.revnum(self.head)
288 self.last_changed = self.revnum(self.head)
289
289
290 self._changescache = None
290 self._changescache = None
291
291
292 if os.path.exists(os.path.join(url, '.svn/entries')):
292 if os.path.exists(os.path.join(url, '.svn/entries')):
293 self.wc = url
293 self.wc = url
294 else:
294 else:
295 self.wc = None
295 self.wc = None
296 self.convertfp = None
296 self.convertfp = None
297
297
298 def setrevmap(self, revmap):
298 def setrevmap(self, revmap):
299 lastrevs = {}
299 lastrevs = {}
300 for revid in revmap.iterkeys():
300 for revid in revmap.iterkeys():
301 uuid, module, revnum = revsplit(revid)
301 uuid, module, revnum = revsplit(revid)
302 lastrevnum = lastrevs.setdefault(module, revnum)
302 lastrevnum = lastrevs.setdefault(module, revnum)
303 if revnum > lastrevnum:
303 if revnum > lastrevnum:
304 lastrevs[module] = revnum
304 lastrevs[module] = revnum
305 self.lastrevs = lastrevs
305 self.lastrevs = lastrevs
306
306
307 def exists(self, path, optrev):
307 def exists(self, path, optrev):
308 try:
308 try:
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
310 optrev, False, self.ctx)
310 optrev, False, self.ctx)
311 return True
311 return True
312 except SubversionException:
312 except SubversionException:
313 return False
313 return False
314
314
315 def getheads(self):
315 def getheads(self):
316
316
317 def isdir(path, revnum):
317 def isdir(path, revnum):
318 kind = self._checkpath(path, revnum)
318 kind = self._checkpath(path, revnum)
319 return kind == svn.core.svn_node_dir
319 return kind == svn.core.svn_node_dir
320
320
321 def getcfgpath(name, rev):
321 def getcfgpath(name, rev):
322 cfgpath = self.ui.config('convert', 'svn.' + name)
322 cfgpath = self.ui.config('convert', 'svn.' + name)
323 if cfgpath is not None and cfgpath.strip() == '':
323 if cfgpath is not None and cfgpath.strip() == '':
324 return None
324 return None
325 path = (cfgpath or name).strip('/')
325 path = (cfgpath or name).strip('/')
326 if not self.exists(path, rev):
326 if not self.exists(path, rev):
327 if self.module.endswith(path) and name == 'trunk':
327 if self.module.endswith(path) and name == 'trunk':
328 # we are converting from inside this directory
328 # we are converting from inside this directory
329 return None
329 return None
330 if cfgpath:
330 if cfgpath:
331 raise util.Abort(_('expected %s to be at %r, but not found')
331 raise util.Abort(_('expected %s to be at %r, but not found')
332 % (name, path))
332 % (name, path))
333 return None
333 return None
334 self.ui.note(_('found %s at %r\n') % (name, path))
334 self.ui.note(_('found %s at %r\n') % (name, path))
335 return path
335 return path
336
336
337 rev = optrev(self.last_changed)
337 rev = optrev(self.last_changed)
338 oldmodule = ''
338 oldmodule = ''
339 trunk = getcfgpath('trunk', rev)
339 trunk = getcfgpath('trunk', rev)
340 self.tags = getcfgpath('tags', rev)
340 self.tags = getcfgpath('tags', rev)
341 branches = getcfgpath('branches', rev)
341 branches = getcfgpath('branches', rev)
342
342
343 # If the project has a trunk or branches, we will extract heads
343 # If the project has a trunk or branches, we will extract heads
344 # from them. We keep the project root otherwise.
344 # from them. We keep the project root otherwise.
345 if trunk:
345 if trunk:
346 oldmodule = self.module or ''
346 oldmodule = self.module or ''
347 self.module += '/' + trunk
347 self.module += '/' + trunk
348 self.head = self.latest(self.module, self.last_changed)
348 self.head = self.latest(self.module, self.last_changed)
349 if not self.head:
349 if not self.head:
350 raise util.Abort(_('no revision found in module %s')
350 raise util.Abort(_('no revision found in module %s')
351 % self.module)
351 % self.module)
352
352
353 # First head in the list is the module's head
353 # First head in the list is the module's head
354 self.heads = [self.head]
354 self.heads = [self.head]
355 if self.tags is not None:
355 if self.tags is not None:
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
357
357
358 # Check if branches bring a few more heads to the list
358 # Check if branches bring a few more heads to the list
359 if branches:
359 if branches:
360 rpath = self.url.strip('/')
360 rpath = self.url.strip('/')
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
362 rev, False, self.ctx)
362 rev, False, self.ctx)
363 for branch in branchnames.keys():
363 for branch in branchnames.keys():
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
365 if not isdir(module, self.last_changed):
365 if not isdir(module, self.last_changed):
366 continue
366 continue
367 brevid = self.latest(module, self.last_changed)
367 brevid = self.latest(module, self.last_changed)
368 if not brevid:
368 if not brevid:
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
370 continue
370 continue
371 self.ui.note(_('found branch %s at %d\n') %
371 self.ui.note(_('found branch %s at %d\n') %
372 (branch, self.revnum(brevid)))
372 (branch, self.revnum(brevid)))
373 self.heads.append(brevid)
373 self.heads.append(brevid)
374
374
375 if self.startrev and self.heads:
375 if self.startrev and self.heads:
376 if len(self.heads) > 1:
376 if len(self.heads) > 1:
377 raise util.Abort(_('svn: start revision is not supported '
377 raise util.Abort(_('svn: start revision is not supported '
378 'with more than one branch'))
378 'with more than one branch'))
379 revnum = self.revnum(self.heads[0])
379 revnum = self.revnum(self.heads[0])
380 if revnum < self.startrev:
380 if revnum < self.startrev:
381 raise util.Abort(
381 raise util.Abort(
382 _('svn: no revision found after start revision %d')
382 _('svn: no revision found after start revision %d')
383 % self.startrev)
383 % self.startrev)
384
384
385 return self.heads
385 return self.heads
386
386
387 def getchanges(self, rev):
387 def getchanges(self, rev):
388 if self._changescache and self._changescache[0] == rev:
388 if self._changescache and self._changescache[0] == rev:
389 return self._changescache[1]
389 return self._changescache[1]
390 self._changescache = None
390 self._changescache = None
391 (paths, parents) = self.paths[rev]
391 (paths, parents) = self.paths[rev]
392 if parents:
392 if parents:
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
394 else:
394 else:
395 # Perform a full checkout on roots
395 # Perform a full checkout on roots
396 uuid, module, revnum = revsplit(rev)
396 uuid, module, revnum = revsplit(rev)
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
398 optrev(revnum), True, self.ctx)
398 optrev(revnum), True, self.ctx)
399 files = [n for n, e in entries.iteritems()
399 files = [n for n, e in entries.iteritems()
400 if e.kind == svn.core.svn_node_file]
400 if e.kind == svn.core.svn_node_file]
401 copies = {}
401 copies = {}
402 self.removed = set()
402 self.removed = set()
403
403
404 files.sort()
404 files.sort()
405 files = zip(files, [rev] * len(files))
405 files = zip(files, [rev] * len(files))
406
406
407 # caller caches the result, so free it here to release memory
407 # caller caches the result, so free it here to release memory
408 del self.paths[rev]
408 del self.paths[rev]
409 return (files, copies)
409 return (files, copies)
410
410
411 def getchangedfiles(self, rev, i):
411 def getchangedfiles(self, rev, i):
412 changes = self.getchanges(rev)
412 changes = self.getchanges(rev)
413 self._changescache = (rev, changes)
413 self._changescache = (rev, changes)
414 return [f[0] for f in changes[0]]
414 return [f[0] for f in changes[0]]
415
415
416 def getcommit(self, rev):
416 def getcommit(self, rev):
417 if rev not in self.commits:
417 if rev not in self.commits:
418 uuid, module, revnum = revsplit(rev)
418 uuid, module, revnum = revsplit(rev)
419 self.module = module
419 self.module = module
420 self.reparent(module)
420 self.reparent(module)
421 # We assume that:
421 # We assume that:
422 # - requests for revisions after "stop" come from the
422 # - requests for revisions after "stop" come from the
423 # revision graph backward traversal. Cache all of them
423 # revision graph backward traversal. Cache all of them
424 # down to stop, they will be used eventually.
424 # down to stop, they will be used eventually.
425 # - requests for revisions before "stop" come to get
425 # - requests for revisions before "stop" come to get
426 # isolated branches parents. Just fetch what is needed.
426 # isolated branches parents. Just fetch what is needed.
427 stop = self.lastrevs.get(module, 0)
427 stop = self.lastrevs.get(module, 0)
428 if revnum < stop:
428 if revnum < stop:
429 stop = revnum + 1
429 stop = revnum + 1
430 self._fetch_revisions(revnum, stop)
430 self._fetch_revisions(revnum, stop)
431 commit = self.commits[rev]
431 commit = self.commits[rev]
432 # caller caches the result, so free it here to release memory
432 # caller caches the result, so free it here to release memory
433 del self.commits[rev]
433 del self.commits[rev]
434 return commit
434 return commit
435
435
436 def gettags(self):
436 def gettags(self):
437 tags = {}
437 tags = {}
438 if self.tags is None:
438 if self.tags is None:
439 return tags
439 return tags
440
440
441 # svn tags are just a convention, project branches left in a
441 # svn tags are just a convention, project branches left in a
442 # 'tags' directory. There is no other relationship than
442 # 'tags' directory. There is no other relationship than
443 # ancestry, which is expensive to discover and makes them hard
443 # ancestry, which is expensive to discover and makes them hard
444 # to update incrementally. Worse, past revisions may be
444 # to update incrementally. Worse, past revisions may be
445 # referenced by tags far away in the future, requiring a deep
445 # referenced by tags far away in the future, requiring a deep
446 # history traversal on every calculation. Current code
446 # history traversal on every calculation. Current code
447 # performs a single backward traversal, tracking moves within
447 # performs a single backward traversal, tracking moves within
448 # the tags directory (tag renaming) and recording a new tag
448 # the tags directory (tag renaming) and recording a new tag
449 # everytime a project is copied from outside the tags
449 # everytime a project is copied from outside the tags
450 # directory. It also lists deleted tags, this behaviour may
450 # directory. It also lists deleted tags, this behaviour may
451 # change in the future.
451 # change in the future.
452 pendings = []
452 pendings = []
453 tagspath = self.tags
453 tagspath = self.tags
454 start = svn.ra.get_latest_revnum(self.ra)
454 start = svn.ra.get_latest_revnum(self.ra)
455 stream = self._getlog([self.tags], start, self.startrev)
455 stream = self._getlog([self.tags], start, self.startrev)
456 try:
456 try:
457 for entry in stream:
457 for entry in stream:
458 origpaths, revnum, author, date, message = entry
458 origpaths, revnum, author, date, message = entry
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
460 in origpaths.iteritems() if e.copyfrom_path]
460 in origpaths.iteritems() if e.copyfrom_path]
461 # Apply moves/copies from more specific to general
461 # Apply moves/copies from more specific to general
462 copies.sort(reverse=True)
462 copies.sort(reverse=True)
463
463
464 srctagspath = tagspath
464 srctagspath = tagspath
465 if copies and copies[-1][2] == tagspath:
465 if copies and copies[-1][2] == tagspath:
466 # Track tags directory moves
466 # Track tags directory moves
467 srctagspath = copies.pop()[0]
467 srctagspath = copies.pop()[0]
468
468
469 for source, sourcerev, dest in copies:
469 for source, sourcerev, dest in copies:
470 if not dest.startswith(tagspath + '/'):
470 if not dest.startswith(tagspath + '/'):
471 continue
471 continue
472 for tag in pendings:
472 for tag in pendings:
473 if tag[0].startswith(dest):
473 if tag[0].startswith(dest):
474 tagpath = source + tag[0][len(dest):]
474 tagpath = source + tag[0][len(dest):]
475 tag[:2] = [tagpath, sourcerev]
475 tag[:2] = [tagpath, sourcerev]
476 break
476 break
477 else:
477 else:
478 pendings.append([source, sourcerev, dest])
478 pendings.append([source, sourcerev, dest])
479
479
480 # Filter out tags with children coming from different
480 # Filter out tags with children coming from different
481 # parts of the repository like:
481 # parts of the repository like:
482 # /tags/tag.1 (from /trunk:10)
482 # /tags/tag.1 (from /trunk:10)
483 # /tags/tag.1/foo (from /branches/foo:12)
483 # /tags/tag.1/foo (from /branches/foo:12)
484 # Here/tags/tag.1 discarded as well as its children.
484 # Here/tags/tag.1 discarded as well as its children.
485 # It happens with tools like cvs2svn. Such tags cannot
485 # It happens with tools like cvs2svn. Such tags cannot
486 # be represented in mercurial.
486 # be represented in mercurial.
487 addeds = dict((p, e.copyfrom_path) for p, e
487 addeds = dict((p, e.copyfrom_path) for p, e
488 in origpaths.iteritems()
488 in origpaths.iteritems()
489 if e.action == 'A' and e.copyfrom_path)
489 if e.action == 'A' and e.copyfrom_path)
490 badroots = set()
490 badroots = set()
491 for destroot in addeds:
491 for destroot in addeds:
492 for source, sourcerev, dest in pendings:
492 for source, sourcerev, dest in pendings:
493 if (not dest.startswith(destroot + '/')
493 if (not dest.startswith(destroot + '/')
494 or source.startswith(addeds[destroot] + '/')):
494 or source.startswith(addeds[destroot] + '/')):
495 continue
495 continue
496 badroots.add(destroot)
496 badroots.add(destroot)
497 break
497 break
498
498
499 for badroot in badroots:
499 for badroot in badroots:
500 pendings = [p for p in pendings if p[2] != badroot
500 pendings = [p for p in pendings if p[2] != badroot
501 and not p[2].startswith(badroot + '/')]
501 and not p[2].startswith(badroot + '/')]
502
502
503 # Tell tag renamings from tag creations
503 # Tell tag renamings from tag creations
504 renamings = []
504 renamings = []
505 for source, sourcerev, dest in pendings:
505 for source, sourcerev, dest in pendings:
506 tagname = dest.split('/')[-1]
506 tagname = dest.split('/')[-1]
507 if source.startswith(srctagspath):
507 if source.startswith(srctagspath):
508 renamings.append([source, sourcerev, tagname])
508 renamings.append([source, sourcerev, tagname])
509 continue
509 continue
510 if tagname in tags:
510 if tagname in tags:
511 # Keep the latest tag value
511 # Keep the latest tag value
512 continue
512 continue
513 # From revision may be fake, get one with changes
513 # From revision may be fake, get one with changes
514 try:
514 try:
515 tagid = self.latest(source, sourcerev)
515 tagid = self.latest(source, sourcerev)
516 if tagid and tagname not in tags:
516 if tagid and tagname not in tags:
517 tags[tagname] = tagid
517 tags[tagname] = tagid
518 except SvnPathNotFound:
518 except SvnPathNotFound:
519 # It happens when we are following directories
519 # It happens when we are following directories
520 # we assumed were copied with their parents
520 # we assumed were copied with their parents
521 # but were really created in the tag
521 # but were really created in the tag
522 # directory.
522 # directory.
523 pass
523 pass
524 pendings = renamings
524 pendings = renamings
525 tagspath = srctagspath
525 tagspath = srctagspath
526 finally:
526 finally:
527 stream.close()
527 stream.close()
528 return tags
528 return tags
529
529
530 def converted(self, rev, destrev):
530 def converted(self, rev, destrev):
531 if not self.wc:
531 if not self.wc:
532 return
532 return
533 if self.convertfp is None:
533 if self.convertfp is None:
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
535 'a')
535 'a')
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
537 self.convertfp.flush()
537 self.convertfp.flush()
538
538
539 def revid(self, revnum, module=None):
539 def revid(self, revnum, module=None):
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
541
541
542 def revnum(self, rev):
542 def revnum(self, rev):
543 return int(rev.split('@')[-1])
543 return int(rev.split('@')[-1])
544
544
545 def latest(self, path, stop=0):
545 def latest(self, path, stop=0):
546 """Find the latest revid affecting path, up to stop. It may return
546 """Find the latest revid affecting path, up to stop. It may return
547 a revision in a different module, since a branch may be moved without
547 a revision in a different module, since a branch may be moved without
548 a change being reported. Return None if computed module does not
548 a change being reported. Return None if computed module does not
549 belong to rootmodule subtree.
549 belong to rootmodule subtree.
550 """
550 """
551 if not path.startswith(self.rootmodule):
551 if not path.startswith(self.rootmodule):
552 # Requests on foreign branches may be forbidden at server level
552 # Requests on foreign branches may be forbidden at server level
553 self.ui.debug('ignoring foreign branch %r\n' % path)
553 self.ui.debug('ignoring foreign branch %r\n' % path)
554 return None
554 return None
555
555
556 if not stop:
556 if not stop:
557 stop = svn.ra.get_latest_revnum(self.ra)
557 stop = svn.ra.get_latest_revnum(self.ra)
558 try:
558 try:
559 prevmodule = self.reparent('')
559 prevmodule = self.reparent('')
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
561 self.reparent(prevmodule)
561 self.reparent(prevmodule)
562 except SubversionException:
562 except SubversionException:
563 dirent = None
563 dirent = None
564 if not dirent:
564 if not dirent:
565 raise SvnPathNotFound(_('%s not found up to revision %d')
565 raise SvnPathNotFound(_('%s not found up to revision %d')
566 % (path, stop))
566 % (path, stop))
567
567
568 # stat() gives us the previous revision on this line of
568 # stat() gives us the previous revision on this line of
569 # development, but it might be in *another module*. Fetch the
569 # development, but it might be in *another module*. Fetch the
570 # log and detect renames down to the latest revision.
570 # log and detect renames down to the latest revision.
571 stream = self._getlog([path], stop, dirent.created_rev)
571 stream = self._getlog([path], stop, dirent.created_rev)
572 try:
572 try:
573 for entry in stream:
573 for entry in stream:
574 paths, revnum, author, date, message = entry
574 paths, revnum, author, date, message = entry
575 if revnum <= dirent.created_rev:
575 if revnum <= dirent.created_rev:
576 break
576 break
577
577
578 for p in paths:
578 for p in paths:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
580 continue
580 continue
581 newpath = paths[p].copyfrom_path + path[len(p):]
581 newpath = paths[p].copyfrom_path + path[len(p):]
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
583 (path, newpath, revnum))
583 (path, newpath, revnum))
584 path = newpath
584 path = newpath
585 break
585 break
586 finally:
586 finally:
587 stream.close()
587 stream.close()
588
588
589 if not path.startswith(self.rootmodule):
589 if not path.startswith(self.rootmodule):
590 self.ui.debug('ignoring foreign branch %r\n' % path)
590 self.ui.debug('ignoring foreign branch %r\n' % path)
591 return None
591 return None
592 return self.revid(dirent.created_rev, path)
592 return self.revid(dirent.created_rev, path)
593
593
594 def reparent(self, module):
594 def reparent(self, module):
595 """Reparent the svn transport and return the previous parent."""
595 """Reparent the svn transport and return the previous parent."""
596 if self.prevmodule == module:
596 if self.prevmodule == module:
597 return module
597 return module
598 svnurl = self.baseurl + urllib.quote(module)
598 svnurl = self.baseurl + urllib.quote(module)
599 prevmodule = self.prevmodule
599 prevmodule = self.prevmodule
600 if prevmodule is None:
600 if prevmodule is None:
601 prevmodule = ''
601 prevmodule = ''
602 self.ui.debug("reparent to %s\n" % svnurl)
602 self.ui.debug("reparent to %s\n" % svnurl)
603 svn.ra.reparent(self.ra, svnurl)
603 svn.ra.reparent(self.ra, svnurl)
604 self.prevmodule = module
604 self.prevmodule = module
605 return prevmodule
605 return prevmodule
606
606
607 def expandpaths(self, rev, paths, parents):
607 def expandpaths(self, rev, paths, parents):
608 changed, removed = set(), set()
608 changed, removed = set(), set()
609 copies = {}
609 copies = {}
610
610
611 new_module, revnum = revsplit(rev)[1:]
611 new_module, revnum = revsplit(rev)[1:]
612 if new_module != self.module:
612 if new_module != self.module:
613 self.module = new_module
613 self.module = new_module
614 self.reparent(self.module)
614 self.reparent(self.module)
615
615
616 for i, (path, ent) in enumerate(paths):
616 for i, (path, ent) in enumerate(paths):
617 self.ui.progress(_('scanning paths'), i, item=path,
617 self.ui.progress(_('scanning paths'), i, item=path,
618 total=len(paths))
618 total=len(paths))
619 entrypath = self.getrelpath(path)
619 entrypath = self.getrelpath(path)
620
620
621 kind = self._checkpath(entrypath, revnum)
621 kind = self._checkpath(entrypath, revnum)
622 if kind == svn.core.svn_node_file:
622 if kind == svn.core.svn_node_file:
623 changed.add(self.recode(entrypath))
623 changed.add(self.recode(entrypath))
624 if not ent.copyfrom_path or not parents:
624 if not ent.copyfrom_path or not parents:
625 continue
625 continue
626 # Copy sources not in parent revisions cannot be
626 # Copy sources not in parent revisions cannot be
627 # represented, ignore their origin for now
627 # represented, ignore their origin for now
628 pmodule, prevnum = revsplit(parents[0])[1:]
628 pmodule, prevnum = revsplit(parents[0])[1:]
629 if ent.copyfrom_rev < prevnum:
629 if ent.copyfrom_rev < prevnum:
630 continue
630 continue
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
632 if not copyfrom_path:
632 if not copyfrom_path:
633 continue
633 continue
634 self.ui.debug("copied to %s from %s@%s\n" %
634 self.ui.debug("copied to %s from %s@%s\n" %
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
637 elif kind == 0: # gone, but had better be a deleted *file*
637 elif kind == 0: # gone, but had better be a deleted *file*
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
639 pmodule, prevnum = revsplit(parents[0])[1:]
639 pmodule, prevnum = revsplit(parents[0])[1:]
640 parentpath = pmodule + "/" + entrypath
640 parentpath = pmodule + "/" + entrypath
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
642
642
643 if fromkind == svn.core.svn_node_file:
643 if fromkind == svn.core.svn_node_file:
644 removed.add(self.recode(entrypath))
644 removed.add(self.recode(entrypath))
645 elif fromkind == svn.core.svn_node_dir:
645 elif fromkind == svn.core.svn_node_dir:
646 oroot = parentpath.strip('/')
646 oroot = parentpath.strip('/')
647 nroot = path.strip('/')
647 nroot = path.strip('/')
648 children = self._iterfiles(oroot, prevnum)
648 children = self._iterfiles(oroot, prevnum)
649 for childpath in children:
649 for childpath in children:
650 childpath = childpath.replace(oroot, nroot)
650 childpath = childpath.replace(oroot, nroot)
651 childpath = self.getrelpath("/" + childpath, pmodule)
651 childpath = self.getrelpath("/" + childpath, pmodule)
652 if childpath:
652 if childpath:
653 removed.add(self.recode(childpath))
653 removed.add(self.recode(childpath))
654 else:
654 else:
655 self.ui.debug('unknown path in revision %d: %s\n' % \
655 self.ui.debug('unknown path in revision %d: %s\n' % \
656 (revnum, path))
656 (revnum, path))
657 elif kind == svn.core.svn_node_dir:
657 elif kind == svn.core.svn_node_dir:
658 if ent.action == 'M':
658 if ent.action == 'M':
659 # If the directory just had a prop change,
659 # If the directory just had a prop change,
660 # then we shouldn't need to look for its children.
660 # then we shouldn't need to look for its children.
661 continue
661 continue
662 if ent.action == 'R' and parents:
662 if ent.action == 'R' and parents:
663 # If a directory is replacing a file, mark the previous
663 # If a directory is replacing a file, mark the previous
664 # file as deleted
664 # file as deleted
665 pmodule, prevnum = revsplit(parents[0])[1:]
665 pmodule, prevnum = revsplit(parents[0])[1:]
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
667 if pkind == svn.core.svn_node_file:
667 if pkind == svn.core.svn_node_file:
668 removed.add(self.recode(entrypath))
668 removed.add(self.recode(entrypath))
669 elif pkind == svn.core.svn_node_dir:
669 elif pkind == svn.core.svn_node_dir:
670 # We do not know what files were kept or removed,
670 # We do not know what files were kept or removed,
671 # mark them all as changed.
671 # mark them all as changed.
672 for childpath in self._iterfiles(pmodule, prevnum):
672 for childpath in self._iterfiles(pmodule, prevnum):
673 childpath = self.getrelpath("/" + childpath)
673 childpath = self.getrelpath("/" + childpath)
674 if childpath:
674 if childpath:
675 changed.add(self.recode(childpath))
675 changed.add(self.recode(childpath))
676
676
677 for childpath in self._iterfiles(path, revnum):
677 for childpath in self._iterfiles(path, revnum):
678 childpath = self.getrelpath("/" + childpath)
678 childpath = self.getrelpath("/" + childpath)
679 if childpath:
679 if childpath:
680 changed.add(self.recode(childpath))
680 changed.add(self.recode(childpath))
681
681
682 # Handle directory copies
682 # Handle directory copies
683 if not ent.copyfrom_path or not parents:
683 if not ent.copyfrom_path or not parents:
684 continue
684 continue
685 # Copy sources not in parent revisions cannot be
685 # Copy sources not in parent revisions cannot be
686 # represented, ignore their origin for now
686 # represented, ignore their origin for now
687 pmodule, prevnum = revsplit(parents[0])[1:]
687 pmodule, prevnum = revsplit(parents[0])[1:]
688 if ent.copyfrom_rev < prevnum:
688 if ent.copyfrom_rev < prevnum:
689 continue
689 continue
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
691 if not copyfrompath:
691 if not copyfrompath:
692 continue
692 continue
693 self.ui.debug("mark %s came from %s:%d\n"
693 self.ui.debug("mark %s came from %s:%d\n"
694 % (path, copyfrompath, ent.copyfrom_rev))
694 % (path, copyfrompath, ent.copyfrom_rev))
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
696 for childpath in children:
696 for childpath in children:
697 childpath = self.getrelpath("/" + childpath, pmodule)
697 childpath = self.getrelpath("/" + childpath, pmodule)
698 if not childpath:
698 if not childpath:
699 continue
699 continue
700 copytopath = path + childpath[len(copyfrompath):]
700 copytopath = path + childpath[len(copyfrompath):]
701 copytopath = self.getrelpath(copytopath)
701 copytopath = self.getrelpath(copytopath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
703
703
704 self.ui.progress(_('scanning paths'), None)
704 self.ui.progress(_('scanning paths'), None)
705 changed.update(removed)
705 changed.update(removed)
706 return (list(changed), removed, copies)
706 return (list(changed), removed, copies)
707
707
708 def _fetch_revisions(self, from_revnum, to_revnum):
708 def _fetch_revisions(self, from_revnum, to_revnum):
709 if from_revnum < to_revnum:
709 if from_revnum < to_revnum:
710 from_revnum, to_revnum = to_revnum, from_revnum
710 from_revnum, to_revnum = to_revnum, from_revnum
711
711
712 self.child_cset = None
712 self.child_cset = None
713
713
714 def parselogentry(orig_paths, revnum, author, date, message):
714 def parselogentry(orig_paths, revnum, author, date, message):
715 """Return the parsed commit object or None, and True if
715 """Return the parsed commit object or None, and True if
716 the revision is a branch root.
716 the revision is a branch root.
717 """
717 """
718 self.ui.debug("parsing revision %d (%d changes)\n" %
718 self.ui.debug("parsing revision %d (%d changes)\n" %
719 (revnum, len(orig_paths)))
719 (revnum, len(orig_paths)))
720
720
721 branched = False
721 branched = False
722 rev = self.revid(revnum)
722 rev = self.revid(revnum)
723 # branch log might return entries for a parent we already have
723 # branch log might return entries for a parent we already have
724
724
725 if rev in self.commits or revnum < to_revnum:
725 if rev in self.commits or revnum < to_revnum:
726 return None, branched
726 return None, branched
727
727
728 parents = []
728 parents = []
729 # check whether this revision is the start of a branch or part
729 # check whether this revision is the start of a branch or part
730 # of a branch renaming
730 # of a branch renaming
731 orig_paths = sorted(orig_paths.iteritems())
731 orig_paths = sorted(orig_paths.iteritems())
732 root_paths = [(p, e) for p, e in orig_paths
732 root_paths = [(p, e) for p, e in orig_paths
733 if self.module.startswith(p)]
733 if self.module.startswith(p)]
734 if root_paths:
734 if root_paths:
735 path, ent = root_paths[-1]
735 path, ent = root_paths[-1]
736 if ent.copyfrom_path:
736 if ent.copyfrom_path:
737 branched = True
737 branched = True
738 newpath = ent.copyfrom_path + self.module[len(path):]
738 newpath = ent.copyfrom_path + self.module[len(path):]
739 # ent.copyfrom_rev may not be the actual last revision
739 # ent.copyfrom_rev may not be the actual last revision
740 previd = self.latest(newpath, ent.copyfrom_rev)
740 previd = self.latest(newpath, ent.copyfrom_rev)
741 if previd is not None:
741 if previd is not None:
742 prevmodule, prevnum = revsplit(previd)[1:]
742 prevmodule, prevnum = revsplit(previd)[1:]
743 if prevnum >= self.startrev:
743 if prevnum >= self.startrev:
744 parents = [previd]
744 parents = [previd]
745 self.ui.note(
745 self.ui.note(
746 _('found parent of branch %s at %d: %s\n') %
746 _('found parent of branch %s at %d: %s\n') %
747 (self.module, prevnum, prevmodule))
747 (self.module, prevnum, prevmodule))
748 else:
748 else:
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
750
750
751 paths = []
751 paths = []
752 # filter out unrelated paths
752 # filter out unrelated paths
753 for path, ent in orig_paths:
753 for path, ent in orig_paths:
754 if self.getrelpath(path) is None:
754 if self.getrelpath(path) is None:
755 continue
755 continue
756 paths.append((path, ent))
756 paths.append((path, ent))
757
757
758 # Example SVN datetime. Includes microseconds.
758 # Example SVN datetime. Includes microseconds.
759 # ISO-8601 conformant
759 # ISO-8601 conformant
760 # '2007-01-04T17:35:00.902377Z'
760 # '2007-01-04T17:35:00.902377Z'
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
762
762
763 log = message and self.recode(message) or ''
763 log = message and self.recode(message) or ''
764 author = author and self.recode(author) or ''
764 author = author and self.recode(author) or ''
765 try:
765 try:
766 branch = self.module.split("/")[-1]
766 branch = self.module.split("/")[-1]
767 if branch == self.trunkname:
767 if branch == self.trunkname:
768 branch = None
768 branch = None
769 except IndexError:
769 except IndexError:
770 branch = None
770 branch = None
771
771
772 cset = commit(author=author,
772 cset = commit(author=author,
773 date=util.datestr(date),
773 date=util.datestr(date),
774 desc=log,
774 desc=log,
775 parents=parents,
775 parents=parents,
776 branch=branch,
776 branch=branch,
777 rev=rev)
777 rev=rev)
778
778
779 self.commits[rev] = cset
779 self.commits[rev] = cset
780 # The parents list is *shared* among self.paths and the
780 # The parents list is *shared* among self.paths and the
781 # commit object. Both will be updated below.
781 # commit object. Both will be updated below.
782 self.paths[rev] = (paths, cset.parents)
782 self.paths[rev] = (paths, cset.parents)
783 if self.child_cset and not self.child_cset.parents:
783 if self.child_cset and not self.child_cset.parents:
784 self.child_cset.parents[:] = [rev]
784 self.child_cset.parents[:] = [rev]
785 self.child_cset = cset
785 self.child_cset = cset
786 return cset, branched
786 return cset, branched
787
787
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
789 (self.module, from_revnum, to_revnum))
789 (self.module, from_revnum, to_revnum))
790
790
791 try:
791 try:
792 firstcset = None
792 firstcset = None
793 lastonbranch = False
793 lastonbranch = False
794 stream = self._getlog([self.module], from_revnum, to_revnum)
794 stream = self._getlog([self.module], from_revnum, to_revnum)
795 try:
795 try:
796 for entry in stream:
796 for entry in stream:
797 paths, revnum, author, date, message = entry
797 paths, revnum, author, date, message = entry
798 if revnum < self.startrev:
798 if revnum < self.startrev:
799 lastonbranch = True
799 lastonbranch = True
800 break
800 break
801 if not paths:
801 if not paths:
802 self.ui.debug('revision %d has no entries\n' % revnum)
802 self.ui.debug('revision %d has no entries\n' % revnum)
803 # If we ever leave the loop on an empty
803 # If we ever leave the loop on an empty
804 # revision, do not try to get a parent branch
804 # revision, do not try to get a parent branch
805 lastonbranch = lastonbranch or revnum == 0
805 lastonbranch = lastonbranch or revnum == 0
806 continue
806 continue
807 cset, lastonbranch = parselogentry(paths, revnum, author,
807 cset, lastonbranch = parselogentry(paths, revnum, author,
808 date, message)
808 date, message)
809 if cset:
809 if cset:
810 firstcset = cset
810 firstcset = cset
811 if lastonbranch:
811 if lastonbranch:
812 break
812 break
813 finally:
813 finally:
814 stream.close()
814 stream.close()
815
815
816 if not lastonbranch and firstcset and not firstcset.parents:
816 if not lastonbranch and firstcset and not firstcset.parents:
817 # The first revision of the sequence (the last fetched one)
817 # The first revision of the sequence (the last fetched one)
818 # has invalid parents if not a branch root. Find the parent
818 # has invalid parents if not a branch root. Find the parent
819 # revision now, if any.
819 # revision now, if any.
820 try:
820 try:
821 firstrevnum = self.revnum(firstcset.rev)
821 firstrevnum = self.revnum(firstcset.rev)
822 if firstrevnum > 1:
822 if firstrevnum > 1:
823 latest = self.latest(self.module, firstrevnum - 1)
823 latest = self.latest(self.module, firstrevnum - 1)
824 if latest:
824 if latest:
825 firstcset.parents.append(latest)
825 firstcset.parents.append(latest)
826 except SvnPathNotFound:
826 except SvnPathNotFound:
827 pass
827 pass
828 except SubversionException, (inst, num):
828 except SubversionException, (inst, num):
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
831 raise
831 raise
832
832
833 def getfile(self, file, rev):
833 def getfile(self, file, rev):
834 # TODO: ra.get_file transmits the whole file instead of diffs.
834 # TODO: ra.get_file transmits the whole file instead of diffs.
835 if file in self.removed:
835 if file in self.removed:
836 raise IOError()
836 raise IOError()
837 mode = ''
837 mode = ''
838 try:
838 try:
839 new_module, revnum = revsplit(rev)[1:]
839 new_module, revnum = revsplit(rev)[1:]
840 if self.module != new_module:
840 if self.module != new_module:
841 self.module = new_module
841 self.module = new_module
842 self.reparent(self.module)
842 self.reparent(self.module)
843 io = StringIO()
843 io = StringIO()
844 info = svn.ra.get_file(self.ra, file, revnum, io)
844 info = svn.ra.get_file(self.ra, file, revnum, io)
845 data = io.getvalue()
845 data = io.getvalue()
846 # ra.get_files() seems to keep a reference on the input buffer
846 # ra.get_files() seems to keep a reference on the input buffer
847 # preventing collection. Release it explicitely.
847 # preventing collection. Release it explicitely.
848 io.close()
848 io.close()
849 if isinstance(info, list):
849 if isinstance(info, list):
850 info = info[-1]
850 info = info[-1]
851 mode = ("svn:executable" in info) and 'x' or ''
851 mode = ("svn:executable" in info) and 'x' or ''
852 mode = ("svn:special" in info) and 'l' or mode
852 mode = ("svn:special" in info) and 'l' or mode
853 except SubversionException, e:
853 except SubversionException, e:
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
856 if e.apr_err in notfound: # File not found
856 if e.apr_err in notfound: # File not found
857 raise IOError()
857 raise IOError()
858 raise
858 raise
859 if mode == 'l':
859 if mode == 'l':
860 link_prefix = "link "
860 link_prefix = "link "
861 if data.startswith(link_prefix):
861 if data.startswith(link_prefix):
862 data = data[len(link_prefix):]
862 data = data[len(link_prefix):]
863 return data, mode
863 return data, mode
864
864
865 def _iterfiles(self, path, revnum):
865 def _iterfiles(self, path, revnum):
866 """Enumerate all files in path at revnum, recursively."""
866 """Enumerate all files in path at revnum, recursively."""
867 path = path.strip('/')
867 path = path.strip('/')
868 pool = Pool()
868 pool = Pool()
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
871 if path:
871 if path:
872 path += '/'
872 path += '/'
873 return ((path + p) for p, e in entries.iteritems()
873 return ((path + p) for p, e in entries.iteritems()
874 if e.kind == svn.core.svn_node_file)
874 if e.kind == svn.core.svn_node_file)
875
875
876 def getrelpath(self, path, module=None):
876 def getrelpath(self, path, module=None):
877 if module is None:
877 if module is None:
878 module = self.module
878 module = self.module
879 # Given the repository url of this wc, say
879 # Given the repository url of this wc, say
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 # extract the "entry" portion (a relative path) from what
881 # extract the "entry" portion (a relative path) from what
882 # svn log --xml says, ie
882 # svn log --xml says, ie
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
885 if path.startswith(module):
885 if path.startswith(module):
886 relative = path.rstrip('/')[len(module):]
886 relative = path.rstrip('/')[len(module):]
887 if relative.startswith('/'):
887 if relative.startswith('/'):
888 return relative[1:]
888 return relative[1:]
889 elif relative == '':
889 elif relative == '':
890 return relative
890 return relative
891
891
892 # The path is outside our tracked tree...
892 # The path is outside our tracked tree...
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
894 return None
894 return None
895
895
896 def _checkpath(self, path, revnum, module=None):
896 def _checkpath(self, path, revnum, module=None):
897 if module is not None:
897 if module is not None:
898 prevmodule = self.reparent('')
898 prevmodule = self.reparent('')
899 path = module + '/' + path
899 path = module + '/' + path
900 try:
900 try:
901 # ra.check_path does not like leading slashes very much, it leads
901 # ra.check_path does not like leading slashes very much, it leads
902 # to PROPFIND subversion errors
902 # to PROPFIND subversion errors
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
904 finally:
904 finally:
905 if module is not None:
905 if module is not None:
906 self.reparent(prevmodule)
906 self.reparent(prevmodule)
907
907
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
909 strict_node_history=False):
909 strict_node_history=False):
910 # Normalize path names, svn >= 1.5 only wants paths relative to
910 # Normalize path names, svn >= 1.5 only wants paths relative to
911 # supplied URL
911 # supplied URL
912 relpaths = []
912 relpaths = []
913 for p in paths:
913 for p in paths:
914 if not p.startswith('/'):
914 if not p.startswith('/'):
915 p = self.module + '/' + p
915 p = self.module + '/' + p
916 relpaths.append(p.strip('/'))
916 relpaths.append(p.strip('/'))
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
918 strict_node_history]
918 strict_node_history]
919 arg = encodeargs(args)
919 arg = encodeargs(args)
920 hgexe = util.hgexecutable()
920 hgexe = util.hgexecutable()
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
923 stdin.write(arg)
923 stdin.write(arg)
924 try:
924 try:
925 stdin.close()
925 stdin.close()
926 except IOError:
926 except IOError:
927 raise util.Abort(_('Mercurial failed to run itself, check'
927 raise util.Abort(_('Mercurial failed to run itself, check'
928 ' hg executable is in PATH'))
928 ' hg executable is in PATH'))
929 return logstream(stdout)
929 return logstream(stdout)
930
930
931 pre_revprop_change = '''#!/bin/sh
931 pre_revprop_change = '''#!/bin/sh
932
932
933 REPOS="$1"
933 REPOS="$1"
934 REV="$2"
934 REV="$2"
935 USER="$3"
935 USER="$3"
936 PROPNAME="$4"
936 PROPNAME="$4"
937 ACTION="$5"
937 ACTION="$5"
938
938
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
942
942
943 echo "Changing prohibited revision property" >&2
943 echo "Changing prohibited revision property" >&2
944 exit 1
944 exit 1
945 '''
945 '''
946
946
947 class svn_sink(converter_sink, commandline):
947 class svn_sink(converter_sink, commandline):
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
950
950
951 def prerun(self):
951 def prerun(self):
952 if self.wc:
952 if self.wc:
953 os.chdir(self.wc)
953 os.chdir(self.wc)
954
954
955 def postrun(self):
955 def postrun(self):
956 if self.wc:
956 if self.wc:
957 os.chdir(self.cwd)
957 os.chdir(self.cwd)
958
958
959 def join(self, name):
959 def join(self, name):
960 return os.path.join(self.wc, '.svn', name)
960 return os.path.join(self.wc, '.svn', name)
961
961
962 def revmapfile(self):
962 def revmapfile(self):
963 return self.join('hg-shamap')
963 return self.join('hg-shamap')
964
964
965 def authorfile(self):
965 def authorfile(self):
966 return self.join('hg-authormap')
966 return self.join('hg-authormap')
967
967
968 def __init__(self, ui, path):
968 def __init__(self, ui, path):
969
969
970 converter_sink.__init__(self, ui, path)
970 converter_sink.__init__(self, ui, path)
971 commandline.__init__(self, ui, 'svn')
971 commandline.__init__(self, ui, 'svn')
972 self.delete = []
972 self.delete = []
973 self.setexec = []
973 self.setexec = []
974 self.delexec = []
974 self.delexec = []
975 self.copies = []
975 self.copies = []
976 self.wc = None
976 self.wc = None
977 self.cwd = os.getcwd()
977 self.cwd = os.getcwd()
978
978
979 path = util.realpath(path)
979 path = os.path.realpath(path)
980
980
981 created = False
981 created = False
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
983 self.wc = path
983 self.wc = path
984 self.run0('update')
984 self.run0('update')
985 else:
985 else:
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
987
987
988 if os.path.isdir(os.path.dirname(path)):
988 if os.path.isdir(os.path.dirname(path)):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
990 ui.status(_('initializing svn repository %r\n') %
990 ui.status(_('initializing svn repository %r\n') %
991 os.path.basename(path))
991 os.path.basename(path))
992 commandline(ui, 'svnadmin').run0('create', path)
992 commandline(ui, 'svnadmin').run0('create', path)
993 created = path
993 created = path
994 path = util.normpath(path)
994 path = util.normpath(path)
995 if not path.startswith('/'):
995 if not path.startswith('/'):
996 path = '/' + path
996 path = '/' + path
997 path = 'file://' + path
997 path = 'file://' + path
998
998
999 ui.status(_('initializing svn working copy %r\n')
999 ui.status(_('initializing svn working copy %r\n')
1000 % os.path.basename(wcpath))
1000 % os.path.basename(wcpath))
1001 self.run0('checkout', path, wcpath)
1001 self.run0('checkout', path, wcpath)
1002
1002
1003 self.wc = wcpath
1003 self.wc = wcpath
1004 self.opener = scmutil.opener(self.wc)
1004 self.opener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1008
1008
1009 if created:
1009 if created:
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1011 fp = open(hook, 'w')
1011 fp = open(hook, 'w')
1012 fp.write(pre_revprop_change)
1012 fp.write(pre_revprop_change)
1013 fp.close()
1013 fp.close()
1014 util.setflags(hook, False, True)
1014 util.setflags(hook, False, True)
1015
1015
1016 output = self.run0('info')
1016 output = self.run0('info')
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1018
1018
1019 def wjoin(self, *names):
1019 def wjoin(self, *names):
1020 return os.path.join(self.wc, *names)
1020 return os.path.join(self.wc, *names)
1021
1021
1022 def putfile(self, filename, flags, data):
1022 def putfile(self, filename, flags, data):
1023 if 'l' in flags:
1023 if 'l' in flags:
1024 self.wopener.symlink(data, filename)
1024 self.wopener.symlink(data, filename)
1025 else:
1025 else:
1026 try:
1026 try:
1027 if os.path.islink(self.wjoin(filename)):
1027 if os.path.islink(self.wjoin(filename)):
1028 os.unlink(filename)
1028 os.unlink(filename)
1029 except OSError:
1029 except OSError:
1030 pass
1030 pass
1031 self.wopener.write(filename, data)
1031 self.wopener.write(filename, data)
1032
1032
1033 if self.is_exec:
1033 if self.is_exec:
1034 was_exec = self.is_exec(self.wjoin(filename))
1034 was_exec = self.is_exec(self.wjoin(filename))
1035 else:
1035 else:
1036 # On filesystems not supporting execute-bit, there is no way
1036 # On filesystems not supporting execute-bit, there is no way
1037 # to know if it is set but asking subversion. Setting it
1037 # to know if it is set but asking subversion. Setting it
1038 # systematically is just as expensive and much simpler.
1038 # systematically is just as expensive and much simpler.
1039 was_exec = 'x' not in flags
1039 was_exec = 'x' not in flags
1040
1040
1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1042 if was_exec:
1042 if was_exec:
1043 if 'x' not in flags:
1043 if 'x' not in flags:
1044 self.delexec.append(filename)
1044 self.delexec.append(filename)
1045 else:
1045 else:
1046 if 'x' in flags:
1046 if 'x' in flags:
1047 self.setexec.append(filename)
1047 self.setexec.append(filename)
1048
1048
1049 def _copyfile(self, source, dest):
1049 def _copyfile(self, source, dest):
1050 # SVN's copy command pukes if the destination file exists, but
1050 # SVN's copy command pukes if the destination file exists, but
1051 # our copyfile method expects to record a copy that has
1051 # our copyfile method expects to record a copy that has
1052 # already occurred. Cross the semantic gap.
1052 # already occurred. Cross the semantic gap.
1053 wdest = self.wjoin(dest)
1053 wdest = self.wjoin(dest)
1054 exists = os.path.lexists(wdest)
1054 exists = os.path.lexists(wdest)
1055 if exists:
1055 if exists:
1056 fd, tempname = tempfile.mkstemp(
1056 fd, tempname = tempfile.mkstemp(
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1058 os.close(fd)
1058 os.close(fd)
1059 os.unlink(tempname)
1059 os.unlink(tempname)
1060 os.rename(wdest, tempname)
1060 os.rename(wdest, tempname)
1061 try:
1061 try:
1062 self.run0('copy', source, dest)
1062 self.run0('copy', source, dest)
1063 finally:
1063 finally:
1064 if exists:
1064 if exists:
1065 try:
1065 try:
1066 os.unlink(wdest)
1066 os.unlink(wdest)
1067 except OSError:
1067 except OSError:
1068 pass
1068 pass
1069 os.rename(tempname, wdest)
1069 os.rename(tempname, wdest)
1070
1070
1071 def dirs_of(self, files):
1071 def dirs_of(self, files):
1072 dirs = set()
1072 dirs = set()
1073 for f in files:
1073 for f in files:
1074 if os.path.isdir(self.wjoin(f)):
1074 if os.path.isdir(self.wjoin(f)):
1075 dirs.add(f)
1075 dirs.add(f)
1076 for i in strutil.rfindall(f, '/'):
1076 for i in strutil.rfindall(f, '/'):
1077 dirs.add(f[:i])
1077 dirs.add(f[:i])
1078 return dirs
1078 return dirs
1079
1079
1080 def add_dirs(self, files):
1080 def add_dirs(self, files):
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1083 if add_dirs:
1083 if add_dirs:
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1085 return add_dirs
1085 return add_dirs
1086
1086
1087 def add_files(self, files):
1087 def add_files(self, files):
1088 if files:
1088 if files:
1089 self.xargs(files, 'add', quiet=True)
1089 self.xargs(files, 'add', quiet=True)
1090 return files
1090 return files
1091
1091
1092 def tidy_dirs(self, names):
1092 def tidy_dirs(self, names):
1093 deleted = []
1093 deleted = []
1094 for d in sorted(self.dirs_of(names), reverse=True):
1094 for d in sorted(self.dirs_of(names), reverse=True):
1095 wd = self.wjoin(d)
1095 wd = self.wjoin(d)
1096 if os.listdir(wd) == '.svn':
1096 if os.listdir(wd) == '.svn':
1097 self.run0('delete', d)
1097 self.run0('delete', d)
1098 deleted.append(d)
1098 deleted.append(d)
1099 return deleted
1099 return deleted
1100
1100
1101 def addchild(self, parent, child):
1101 def addchild(self, parent, child):
1102 self.childmap[parent] = child
1102 self.childmap[parent] = child
1103
1103
1104 def revid(self, rev):
1104 def revid(self, rev):
1105 return u"svn:%s@%s" % (self.uuid, rev)
1105 return u"svn:%s@%s" % (self.uuid, rev)
1106
1106
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1108 # Apply changes to working copy
1108 # Apply changes to working copy
1109 for f, v in files:
1109 for f, v in files:
1110 try:
1110 try:
1111 data, mode = source.getfile(f, v)
1111 data, mode = source.getfile(f, v)
1112 except IOError:
1112 except IOError:
1113 self.delete.append(f)
1113 self.delete.append(f)
1114 else:
1114 else:
1115 self.putfile(f, mode, data)
1115 self.putfile(f, mode, data)
1116 if f in copies:
1116 if f in copies:
1117 self.copies.append([copies[f], f])
1117 self.copies.append([copies[f], f])
1118 files = [f[0] for f in files]
1118 files = [f[0] for f in files]
1119
1119
1120 for parent in parents:
1120 for parent in parents:
1121 try:
1121 try:
1122 return self.revid(self.childmap[parent])
1122 return self.revid(self.childmap[parent])
1123 except KeyError:
1123 except KeyError:
1124 pass
1124 pass
1125 entries = set(self.delete)
1125 entries = set(self.delete)
1126 files = frozenset(files)
1126 files = frozenset(files)
1127 entries.update(self.add_dirs(files.difference(entries)))
1127 entries.update(self.add_dirs(files.difference(entries)))
1128 if self.copies:
1128 if self.copies:
1129 for s, d in self.copies:
1129 for s, d in self.copies:
1130 self._copyfile(s, d)
1130 self._copyfile(s, d)
1131 self.copies = []
1131 self.copies = []
1132 if self.delete:
1132 if self.delete:
1133 self.xargs(self.delete, 'delete')
1133 self.xargs(self.delete, 'delete')
1134 self.delete = []
1134 self.delete = []
1135 entries.update(self.add_files(files.difference(entries)))
1135 entries.update(self.add_files(files.difference(entries)))
1136 entries.update(self.tidy_dirs(entries))
1136 entries.update(self.tidy_dirs(entries))
1137 if self.delexec:
1137 if self.delexec:
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1139 self.delexec = []
1139 self.delexec = []
1140 if self.setexec:
1140 if self.setexec:
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1142 self.setexec = []
1142 self.setexec = []
1143
1143
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1145 fp = os.fdopen(fd, 'w')
1145 fp = os.fdopen(fd, 'w')
1146 fp.write(commit.desc)
1146 fp.write(commit.desc)
1147 fp.close()
1147 fp.close()
1148 try:
1148 try:
1149 output = self.run0('commit',
1149 output = self.run0('commit',
1150 username=util.shortuser(commit.author),
1150 username=util.shortuser(commit.author),
1151 file=messagefile,
1151 file=messagefile,
1152 encoding='utf-8')
1152 encoding='utf-8')
1153 try:
1153 try:
1154 rev = self.commit_re.search(output).group(1)
1154 rev = self.commit_re.search(output).group(1)
1155 except AttributeError:
1155 except AttributeError:
1156 if not files:
1156 if not files:
1157 return parents[0]
1157 return parents[0]
1158 self.ui.warn(_('unexpected svn output:\n'))
1158 self.ui.warn(_('unexpected svn output:\n'))
1159 self.ui.warn(output)
1159 self.ui.warn(output)
1160 raise util.Abort(_('unable to cope with svn output'))
1160 raise util.Abort(_('unable to cope with svn output'))
1161 if commit.rev:
1161 if commit.rev:
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1163 revprop=True, revision=rev)
1163 revprop=True, revision=rev)
1164 if commit.branch and commit.branch != 'default':
1164 if commit.branch and commit.branch != 'default':
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1166 revprop=True, revision=rev)
1166 revprop=True, revision=rev)
1167 for parent in parents:
1167 for parent in parents:
1168 self.addchild(parent, rev)
1168 self.addchild(parent, rev)
1169 return self.revid(rev)
1169 return self.revid(rev)
1170 finally:
1170 finally:
1171 os.unlink(messagefile)
1171 os.unlink(messagefile)
1172
1172
1173 def puttags(self, tags):
1173 def puttags(self, tags):
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1175 return None, None
1175 return None, None
@@ -1,5643 +1,5643 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, difflib, time, tempfile, errno
11 import os, re, difflib, time, tempfile, errno
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, url, encoding, templatekw, discovery
13 import patch, help, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, hbisect
14 import archival, changegroup, cmdutil, hbisect
15 import sshserver, hgweb, hgweb.server, commandserver
15 import sshserver, hgweb, hgweb.server, commandserver
16 import merge as mergemod
16 import merge as mergemod
17 import minirst, revset, fileset
17 import minirst, revset, fileset
18 import dagparser, context, simplemerge
18 import dagparser, context, simplemerge
19 import random, setdiscovery, treediscovery, dagutil
19 import random, setdiscovery, treediscovery, dagutil
20
20
21 table = {}
21 table = {}
22
22
23 command = cmdutil.command(table)
23 command = cmdutil.command(table)
24
24
25 # common command options
25 # common command options
26
26
27 globalopts = [
27 globalopts = [
28 ('R', 'repository', '',
28 ('R', 'repository', '',
29 _('repository root directory or name of overlay bundle file'),
29 _('repository root directory or name of overlay bundle file'),
30 _('REPO')),
30 _('REPO')),
31 ('', 'cwd', '',
31 ('', 'cwd', '',
32 _('change working directory'), _('DIR')),
32 _('change working directory'), _('DIR')),
33 ('y', 'noninteractive', None,
33 ('y', 'noninteractive', None,
34 _('do not prompt, automatically pick the first choice for all prompts')),
34 _('do not prompt, automatically pick the first choice for all prompts')),
35 ('q', 'quiet', None, _('suppress output')),
35 ('q', 'quiet', None, _('suppress output')),
36 ('v', 'verbose', None, _('enable additional output')),
36 ('v', 'verbose', None, _('enable additional output')),
37 ('', 'config', [],
37 ('', 'config', [],
38 _('set/override config option (use \'section.name=value\')'),
38 _('set/override config option (use \'section.name=value\')'),
39 _('CONFIG')),
39 _('CONFIG')),
40 ('', 'debug', None, _('enable debugging output')),
40 ('', 'debug', None, _('enable debugging output')),
41 ('', 'debugger', None, _('start debugger')),
41 ('', 'debugger', None, _('start debugger')),
42 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
42 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
43 _('ENCODE')),
43 _('ENCODE')),
44 ('', 'encodingmode', encoding.encodingmode,
44 ('', 'encodingmode', encoding.encodingmode,
45 _('set the charset encoding mode'), _('MODE')),
45 _('set the charset encoding mode'), _('MODE')),
46 ('', 'traceback', None, _('always print a traceback on exception')),
46 ('', 'traceback', None, _('always print a traceback on exception')),
47 ('', 'time', None, _('time how long the command takes')),
47 ('', 'time', None, _('time how long the command takes')),
48 ('', 'profile', None, _('print command execution profile')),
48 ('', 'profile', None, _('print command execution profile')),
49 ('', 'version', None, _('output version information and exit')),
49 ('', 'version', None, _('output version information and exit')),
50 ('h', 'help', None, _('display help and exit')),
50 ('h', 'help', None, _('display help and exit')),
51 ]
51 ]
52
52
53 dryrunopts = [('n', 'dry-run', None,
53 dryrunopts = [('n', 'dry-run', None,
54 _('do not perform actions, just print output'))]
54 _('do not perform actions, just print output'))]
55
55
56 remoteopts = [
56 remoteopts = [
57 ('e', 'ssh', '',
57 ('e', 'ssh', '',
58 _('specify ssh command to use'), _('CMD')),
58 _('specify ssh command to use'), _('CMD')),
59 ('', 'remotecmd', '',
59 ('', 'remotecmd', '',
60 _('specify hg command to run on the remote side'), _('CMD')),
60 _('specify hg command to run on the remote side'), _('CMD')),
61 ('', 'insecure', None,
61 ('', 'insecure', None,
62 _('do not verify server certificate (ignoring web.cacerts config)')),
62 _('do not verify server certificate (ignoring web.cacerts config)')),
63 ]
63 ]
64
64
65 walkopts = [
65 walkopts = [
66 ('I', 'include', [],
66 ('I', 'include', [],
67 _('include names matching the given patterns'), _('PATTERN')),
67 _('include names matching the given patterns'), _('PATTERN')),
68 ('X', 'exclude', [],
68 ('X', 'exclude', [],
69 _('exclude names matching the given patterns'), _('PATTERN')),
69 _('exclude names matching the given patterns'), _('PATTERN')),
70 ]
70 ]
71
71
72 commitopts = [
72 commitopts = [
73 ('m', 'message', '',
73 ('m', 'message', '',
74 _('use text as commit message'), _('TEXT')),
74 _('use text as commit message'), _('TEXT')),
75 ('l', 'logfile', '',
75 ('l', 'logfile', '',
76 _('read commit message from file'), _('FILE')),
76 _('read commit message from file'), _('FILE')),
77 ]
77 ]
78
78
79 commitopts2 = [
79 commitopts2 = [
80 ('d', 'date', '',
80 ('d', 'date', '',
81 _('record the specified date as commit date'), _('DATE')),
81 _('record the specified date as commit date'), _('DATE')),
82 ('u', 'user', '',
82 ('u', 'user', '',
83 _('record the specified user as committer'), _('USER')),
83 _('record the specified user as committer'), _('USER')),
84 ]
84 ]
85
85
86 templateopts = [
86 templateopts = [
87 ('', 'style', '',
87 ('', 'style', '',
88 _('display using template map file'), _('STYLE')),
88 _('display using template map file'), _('STYLE')),
89 ('', 'template', '',
89 ('', 'template', '',
90 _('display with template'), _('TEMPLATE')),
90 _('display with template'), _('TEMPLATE')),
91 ]
91 ]
92
92
93 logopts = [
93 logopts = [
94 ('p', 'patch', None, _('show patch')),
94 ('p', 'patch', None, _('show patch')),
95 ('g', 'git', None, _('use git extended diff format')),
95 ('g', 'git', None, _('use git extended diff format')),
96 ('l', 'limit', '',
96 ('l', 'limit', '',
97 _('limit number of changes displayed'), _('NUM')),
97 _('limit number of changes displayed'), _('NUM')),
98 ('M', 'no-merges', None, _('do not show merges')),
98 ('M', 'no-merges', None, _('do not show merges')),
99 ('', 'stat', None, _('output diffstat-style summary of changes')),
99 ('', 'stat', None, _('output diffstat-style summary of changes')),
100 ] + templateopts
100 ] + templateopts
101
101
102 diffopts = [
102 diffopts = [
103 ('a', 'text', None, _('treat all files as text')),
103 ('a', 'text', None, _('treat all files as text')),
104 ('g', 'git', None, _('use git extended diff format')),
104 ('g', 'git', None, _('use git extended diff format')),
105 ('', 'nodates', None, _('omit dates from diff headers'))
105 ('', 'nodates', None, _('omit dates from diff headers'))
106 ]
106 ]
107
107
108 diffopts2 = [
108 diffopts2 = [
109 ('p', 'show-function', None, _('show which function each change is in')),
109 ('p', 'show-function', None, _('show which function each change is in')),
110 ('', 'reverse', None, _('produce a diff that undoes the changes')),
110 ('', 'reverse', None, _('produce a diff that undoes the changes')),
111 ('w', 'ignore-all-space', None,
111 ('w', 'ignore-all-space', None,
112 _('ignore white space when comparing lines')),
112 _('ignore white space when comparing lines')),
113 ('b', 'ignore-space-change', None,
113 ('b', 'ignore-space-change', None,
114 _('ignore changes in the amount of white space')),
114 _('ignore changes in the amount of white space')),
115 ('B', 'ignore-blank-lines', None,
115 ('B', 'ignore-blank-lines', None,
116 _('ignore changes whose lines are all blank')),
116 _('ignore changes whose lines are all blank')),
117 ('U', 'unified', '',
117 ('U', 'unified', '',
118 _('number of lines of context to show'), _('NUM')),
118 _('number of lines of context to show'), _('NUM')),
119 ('', 'stat', None, _('output diffstat-style summary of changes')),
119 ('', 'stat', None, _('output diffstat-style summary of changes')),
120 ]
120 ]
121
121
122 mergetoolopts = [
122 mergetoolopts = [
123 ('t', 'tool', '', _('specify merge tool')),
123 ('t', 'tool', '', _('specify merge tool')),
124 ]
124 ]
125
125
126 similarityopts = [
126 similarityopts = [
127 ('s', 'similarity', '',
127 ('s', 'similarity', '',
128 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
128 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
129 ]
129 ]
130
130
131 subrepoopts = [
131 subrepoopts = [
132 ('S', 'subrepos', None,
132 ('S', 'subrepos', None,
133 _('recurse into subrepositories'))
133 _('recurse into subrepositories'))
134 ]
134 ]
135
135
136 # Commands start here, listed alphabetically
136 # Commands start here, listed alphabetically
137
137
138 @command('^add',
138 @command('^add',
139 walkopts + subrepoopts + dryrunopts,
139 walkopts + subrepoopts + dryrunopts,
140 _('[OPTION]... [FILE]...'))
140 _('[OPTION]... [FILE]...'))
141 def add(ui, repo, *pats, **opts):
141 def add(ui, repo, *pats, **opts):
142 """add the specified files on the next commit
142 """add the specified files on the next commit
143
143
144 Schedule files to be version controlled and added to the
144 Schedule files to be version controlled and added to the
145 repository.
145 repository.
146
146
147 The files will be added to the repository at the next commit. To
147 The files will be added to the repository at the next commit. To
148 undo an add before that, see :hg:`forget`.
148 undo an add before that, see :hg:`forget`.
149
149
150 If no names are given, add all files to the repository.
150 If no names are given, add all files to the repository.
151
151
152 .. container:: verbose
152 .. container:: verbose
153
153
154 An example showing how new (unknown) files are added
154 An example showing how new (unknown) files are added
155 automatically by :hg:`add`::
155 automatically by :hg:`add`::
156
156
157 $ ls
157 $ ls
158 foo.c
158 foo.c
159 $ hg status
159 $ hg status
160 ? foo.c
160 ? foo.c
161 $ hg add
161 $ hg add
162 adding foo.c
162 adding foo.c
163 $ hg status
163 $ hg status
164 A foo.c
164 A foo.c
165
165
166 Returns 0 if all files are successfully added.
166 Returns 0 if all files are successfully added.
167 """
167 """
168
168
169 m = scmutil.match(repo[None], pats, opts)
169 m = scmutil.match(repo[None], pats, opts)
170 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
170 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
171 opts.get('subrepos'), prefix="")
171 opts.get('subrepos'), prefix="")
172 return rejected and 1 or 0
172 return rejected and 1 or 0
173
173
174 @command('addremove',
174 @command('addremove',
175 similarityopts + walkopts + dryrunopts,
175 similarityopts + walkopts + dryrunopts,
176 _('[OPTION]... [FILE]...'))
176 _('[OPTION]... [FILE]...'))
177 def addremove(ui, repo, *pats, **opts):
177 def addremove(ui, repo, *pats, **opts):
178 """add all new files, delete all missing files
178 """add all new files, delete all missing files
179
179
180 Add all new files and remove all missing files from the
180 Add all new files and remove all missing files from the
181 repository.
181 repository.
182
182
183 New files are ignored if they match any of the patterns in
183 New files are ignored if they match any of the patterns in
184 ``.hgignore``. As with add, these changes take effect at the next
184 ``.hgignore``. As with add, these changes take effect at the next
185 commit.
185 commit.
186
186
187 Use the -s/--similarity option to detect renamed files. With a
187 Use the -s/--similarity option to detect renamed files. With a
188 parameter greater than 0, this compares every removed file with
188 parameter greater than 0, this compares every removed file with
189 every added file and records those similar enough as renames. This
189 every added file and records those similar enough as renames. This
190 option takes a percentage between 0 (disabled) and 100 (files must
190 option takes a percentage between 0 (disabled) and 100 (files must
191 be identical) as its parameter. Detecting renamed files this way
191 be identical) as its parameter. Detecting renamed files this way
192 can be expensive. After using this option, :hg:`status -C` can be
192 can be expensive. After using this option, :hg:`status -C` can be
193 used to check which files were identified as moved or renamed.
193 used to check which files were identified as moved or renamed.
194
194
195 Returns 0 if all files are successfully added.
195 Returns 0 if all files are successfully added.
196 """
196 """
197 try:
197 try:
198 sim = float(opts.get('similarity') or 100)
198 sim = float(opts.get('similarity') or 100)
199 except ValueError:
199 except ValueError:
200 raise util.Abort(_('similarity must be a number'))
200 raise util.Abort(_('similarity must be a number'))
201 if sim < 0 or sim > 100:
201 if sim < 0 or sim > 100:
202 raise util.Abort(_('similarity must be between 0 and 100'))
202 raise util.Abort(_('similarity must be between 0 and 100'))
203 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
203 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
204
204
205 @command('^annotate|blame',
205 @command('^annotate|blame',
206 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
206 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
207 ('', 'follow', None,
207 ('', 'follow', None,
208 _('follow copies/renames and list the filename (DEPRECATED)')),
208 _('follow copies/renames and list the filename (DEPRECATED)')),
209 ('', 'no-follow', None, _("don't follow copies and renames")),
209 ('', 'no-follow', None, _("don't follow copies and renames")),
210 ('a', 'text', None, _('treat all files as text')),
210 ('a', 'text', None, _('treat all files as text')),
211 ('u', 'user', None, _('list the author (long with -v)')),
211 ('u', 'user', None, _('list the author (long with -v)')),
212 ('f', 'file', None, _('list the filename')),
212 ('f', 'file', None, _('list the filename')),
213 ('d', 'date', None, _('list the date (short with -q)')),
213 ('d', 'date', None, _('list the date (short with -q)')),
214 ('n', 'number', None, _('list the revision number (default)')),
214 ('n', 'number', None, _('list the revision number (default)')),
215 ('c', 'changeset', None, _('list the changeset')),
215 ('c', 'changeset', None, _('list the changeset')),
216 ('l', 'line-number', None, _('show line number at the first appearance'))
216 ('l', 'line-number', None, _('show line number at the first appearance'))
217 ] + walkopts,
217 ] + walkopts,
218 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
218 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
219 def annotate(ui, repo, *pats, **opts):
219 def annotate(ui, repo, *pats, **opts):
220 """show changeset information by line for each file
220 """show changeset information by line for each file
221
221
222 List changes in files, showing the revision id responsible for
222 List changes in files, showing the revision id responsible for
223 each line
223 each line
224
224
225 This command is useful for discovering when a change was made and
225 This command is useful for discovering when a change was made and
226 by whom.
226 by whom.
227
227
228 Without the -a/--text option, annotate will avoid processing files
228 Without the -a/--text option, annotate will avoid processing files
229 it detects as binary. With -a, annotate will annotate the file
229 it detects as binary. With -a, annotate will annotate the file
230 anyway, although the results will probably be neither useful
230 anyway, although the results will probably be neither useful
231 nor desirable.
231 nor desirable.
232
232
233 Returns 0 on success.
233 Returns 0 on success.
234 """
234 """
235 if opts.get('follow'):
235 if opts.get('follow'):
236 # --follow is deprecated and now just an alias for -f/--file
236 # --follow is deprecated and now just an alias for -f/--file
237 # to mimic the behavior of Mercurial before version 1.5
237 # to mimic the behavior of Mercurial before version 1.5
238 opts['file'] = True
238 opts['file'] = True
239
239
240 datefunc = ui.quiet and util.shortdate or util.datestr
240 datefunc = ui.quiet and util.shortdate or util.datestr
241 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
241 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
242
242
243 if not pats:
243 if not pats:
244 raise util.Abort(_('at least one filename or pattern is required'))
244 raise util.Abort(_('at least one filename or pattern is required'))
245
245
246 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
246 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
247 ('number', ' ', lambda x: str(x[0].rev())),
247 ('number', ' ', lambda x: str(x[0].rev())),
248 ('changeset', ' ', lambda x: short(x[0].node())),
248 ('changeset', ' ', lambda x: short(x[0].node())),
249 ('date', ' ', getdate),
249 ('date', ' ', getdate),
250 ('file', ' ', lambda x: x[0].path()),
250 ('file', ' ', lambda x: x[0].path()),
251 ('line_number', ':', lambda x: str(x[1])),
251 ('line_number', ':', lambda x: str(x[1])),
252 ]
252 ]
253
253
254 if (not opts.get('user') and not opts.get('changeset')
254 if (not opts.get('user') and not opts.get('changeset')
255 and not opts.get('date') and not opts.get('file')):
255 and not opts.get('date') and not opts.get('file')):
256 opts['number'] = True
256 opts['number'] = True
257
257
258 linenumber = opts.get('line_number') is not None
258 linenumber = opts.get('line_number') is not None
259 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
259 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
260 raise util.Abort(_('at least one of -n/-c is required for -l'))
260 raise util.Abort(_('at least one of -n/-c is required for -l'))
261
261
262 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
262 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
263 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
263 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
264
264
265 def bad(x, y):
265 def bad(x, y):
266 raise util.Abort("%s: %s" % (x, y))
266 raise util.Abort("%s: %s" % (x, y))
267
267
268 ctx = scmutil.revsingle(repo, opts.get('rev'))
268 ctx = scmutil.revsingle(repo, opts.get('rev'))
269 m = scmutil.match(ctx, pats, opts)
269 m = scmutil.match(ctx, pats, opts)
270 m.bad = bad
270 m.bad = bad
271 follow = not opts.get('no_follow')
271 follow = not opts.get('no_follow')
272 for abs in ctx.walk(m):
272 for abs in ctx.walk(m):
273 fctx = ctx[abs]
273 fctx = ctx[abs]
274 if not opts.get('text') and util.binary(fctx.data()):
274 if not opts.get('text') and util.binary(fctx.data()):
275 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
275 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
276 continue
276 continue
277
277
278 lines = fctx.annotate(follow=follow, linenumber=linenumber)
278 lines = fctx.annotate(follow=follow, linenumber=linenumber)
279 pieces = []
279 pieces = []
280
280
281 for f, sep in funcmap:
281 for f, sep in funcmap:
282 l = [f(n) for n, dummy in lines]
282 l = [f(n) for n, dummy in lines]
283 if l:
283 if l:
284 sized = [(x, encoding.colwidth(x)) for x in l]
284 sized = [(x, encoding.colwidth(x)) for x in l]
285 ml = max([w for x, w in sized])
285 ml = max([w for x, w in sized])
286 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
286 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
287 for x, w in sized])
287 for x, w in sized])
288
288
289 if pieces:
289 if pieces:
290 for p, l in zip(zip(*pieces), lines):
290 for p, l in zip(zip(*pieces), lines):
291 ui.write("%s: %s" % ("".join(p), l[1]))
291 ui.write("%s: %s" % ("".join(p), l[1]))
292
292
293 @command('archive',
293 @command('archive',
294 [('', 'no-decode', None, _('do not pass files through decoders')),
294 [('', 'no-decode', None, _('do not pass files through decoders')),
295 ('p', 'prefix', '', _('directory prefix for files in archive'),
295 ('p', 'prefix', '', _('directory prefix for files in archive'),
296 _('PREFIX')),
296 _('PREFIX')),
297 ('r', 'rev', '', _('revision to distribute'), _('REV')),
297 ('r', 'rev', '', _('revision to distribute'), _('REV')),
298 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
298 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
299 ] + subrepoopts + walkopts,
299 ] + subrepoopts + walkopts,
300 _('[OPTION]... DEST'))
300 _('[OPTION]... DEST'))
301 def archive(ui, repo, dest, **opts):
301 def archive(ui, repo, dest, **opts):
302 '''create an unversioned archive of a repository revision
302 '''create an unversioned archive of a repository revision
303
303
304 By default, the revision used is the parent of the working
304 By default, the revision used is the parent of the working
305 directory; use -r/--rev to specify a different revision.
305 directory; use -r/--rev to specify a different revision.
306
306
307 The archive type is automatically detected based on file
307 The archive type is automatically detected based on file
308 extension (or override using -t/--type).
308 extension (or override using -t/--type).
309
309
310 .. container:: verbose
310 .. container:: verbose
311
311
312 Examples:
312 Examples:
313
313
314 - create a zip file containing the 1.0 release::
314 - create a zip file containing the 1.0 release::
315
315
316 hg archive -r 1.0 project-1.0.zip
316 hg archive -r 1.0 project-1.0.zip
317
317
318 - create a tarball excluding .hg files::
318 - create a tarball excluding .hg files::
319
319
320 hg archive project.tar.gz -X ".hg*"
320 hg archive project.tar.gz -X ".hg*"
321
321
322 Valid types are:
322 Valid types are:
323
323
324 :``files``: a directory full of files (default)
324 :``files``: a directory full of files (default)
325 :``tar``: tar archive, uncompressed
325 :``tar``: tar archive, uncompressed
326 :``tbz2``: tar archive, compressed using bzip2
326 :``tbz2``: tar archive, compressed using bzip2
327 :``tgz``: tar archive, compressed using gzip
327 :``tgz``: tar archive, compressed using gzip
328 :``uzip``: zip archive, uncompressed
328 :``uzip``: zip archive, uncompressed
329 :``zip``: zip archive, compressed using deflate
329 :``zip``: zip archive, compressed using deflate
330
330
331 The exact name of the destination archive or directory is given
331 The exact name of the destination archive or directory is given
332 using a format string; see :hg:`help export` for details.
332 using a format string; see :hg:`help export` for details.
333
333
334 Each member added to an archive file has a directory prefix
334 Each member added to an archive file has a directory prefix
335 prepended. Use -p/--prefix to specify a format string for the
335 prepended. Use -p/--prefix to specify a format string for the
336 prefix. The default is the basename of the archive, with suffixes
336 prefix. The default is the basename of the archive, with suffixes
337 removed.
337 removed.
338
338
339 Returns 0 on success.
339 Returns 0 on success.
340 '''
340 '''
341
341
342 ctx = scmutil.revsingle(repo, opts.get('rev'))
342 ctx = scmutil.revsingle(repo, opts.get('rev'))
343 if not ctx:
343 if not ctx:
344 raise util.Abort(_('no working directory: please specify a revision'))
344 raise util.Abort(_('no working directory: please specify a revision'))
345 node = ctx.node()
345 node = ctx.node()
346 dest = cmdutil.makefilename(repo, dest, node)
346 dest = cmdutil.makefilename(repo, dest, node)
347 if util.realpath(dest) == repo.root:
347 if os.path.realpath(dest) == repo.root:
348 raise util.Abort(_('repository root cannot be destination'))
348 raise util.Abort(_('repository root cannot be destination'))
349
349
350 kind = opts.get('type') or archival.guesskind(dest) or 'files'
350 kind = opts.get('type') or archival.guesskind(dest) or 'files'
351 prefix = opts.get('prefix')
351 prefix = opts.get('prefix')
352
352
353 if dest == '-':
353 if dest == '-':
354 if kind == 'files':
354 if kind == 'files':
355 raise util.Abort(_('cannot archive plain files to stdout'))
355 raise util.Abort(_('cannot archive plain files to stdout'))
356 dest = cmdutil.makefileobj(repo, dest)
356 dest = cmdutil.makefileobj(repo, dest)
357 if not prefix:
357 if not prefix:
358 prefix = os.path.basename(repo.root) + '-%h'
358 prefix = os.path.basename(repo.root) + '-%h'
359
359
360 prefix = cmdutil.makefilename(repo, prefix, node)
360 prefix = cmdutil.makefilename(repo, prefix, node)
361 matchfn = scmutil.match(ctx, [], opts)
361 matchfn = scmutil.match(ctx, [], opts)
362 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
362 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
363 matchfn, prefix, subrepos=opts.get('subrepos'))
363 matchfn, prefix, subrepos=opts.get('subrepos'))
364
364
365 @command('backout',
365 @command('backout',
366 [('', 'merge', None, _('merge with old dirstate parent after backout')),
366 [('', 'merge', None, _('merge with old dirstate parent after backout')),
367 ('', 'parent', '',
367 ('', 'parent', '',
368 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
368 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
369 ('r', 'rev', '', _('revision to backout'), _('REV')),
369 ('r', 'rev', '', _('revision to backout'), _('REV')),
370 ] + mergetoolopts + walkopts + commitopts + commitopts2,
370 ] + mergetoolopts + walkopts + commitopts + commitopts2,
371 _('[OPTION]... [-r] REV'))
371 _('[OPTION]... [-r] REV'))
372 def backout(ui, repo, node=None, rev=None, **opts):
372 def backout(ui, repo, node=None, rev=None, **opts):
373 '''reverse effect of earlier changeset
373 '''reverse effect of earlier changeset
374
374
375 Prepare a new changeset with the effect of REV undone in the
375 Prepare a new changeset with the effect of REV undone in the
376 current working directory.
376 current working directory.
377
377
378 If REV is the parent of the working directory, then this new changeset
378 If REV is the parent of the working directory, then this new changeset
379 is committed automatically. Otherwise, hg needs to merge the
379 is committed automatically. Otherwise, hg needs to merge the
380 changes and the merged result is left uncommitted.
380 changes and the merged result is left uncommitted.
381
381
382 .. note::
382 .. note::
383 backout cannot be used to fix either an unwanted or
383 backout cannot be used to fix either an unwanted or
384 incorrect merge.
384 incorrect merge.
385
385
386 .. container:: verbose
386 .. container:: verbose
387
387
388 By default, the pending changeset will have one parent,
388 By default, the pending changeset will have one parent,
389 maintaining a linear history. With --merge, the pending
389 maintaining a linear history. With --merge, the pending
390 changeset will instead have two parents: the old parent of the
390 changeset will instead have two parents: the old parent of the
391 working directory and a new child of REV that simply undoes REV.
391 working directory and a new child of REV that simply undoes REV.
392
392
393 Before version 1.7, the behavior without --merge was equivalent
393 Before version 1.7, the behavior without --merge was equivalent
394 to specifying --merge followed by :hg:`update --clean .` to
394 to specifying --merge followed by :hg:`update --clean .` to
395 cancel the merge and leave the child of REV as a head to be
395 cancel the merge and leave the child of REV as a head to be
396 merged separately.
396 merged separately.
397
397
398 See :hg:`help dates` for a list of formats valid for -d/--date.
398 See :hg:`help dates` for a list of formats valid for -d/--date.
399
399
400 Returns 0 on success.
400 Returns 0 on success.
401 '''
401 '''
402 if rev and node:
402 if rev and node:
403 raise util.Abort(_("please specify just one revision"))
403 raise util.Abort(_("please specify just one revision"))
404
404
405 if not rev:
405 if not rev:
406 rev = node
406 rev = node
407
407
408 if not rev:
408 if not rev:
409 raise util.Abort(_("please specify a revision to backout"))
409 raise util.Abort(_("please specify a revision to backout"))
410
410
411 date = opts.get('date')
411 date = opts.get('date')
412 if date:
412 if date:
413 opts['date'] = util.parsedate(date)
413 opts['date'] = util.parsedate(date)
414
414
415 cmdutil.bailifchanged(repo)
415 cmdutil.bailifchanged(repo)
416 node = scmutil.revsingle(repo, rev).node()
416 node = scmutil.revsingle(repo, rev).node()
417
417
418 op1, op2 = repo.dirstate.parents()
418 op1, op2 = repo.dirstate.parents()
419 a = repo.changelog.ancestor(op1, node)
419 a = repo.changelog.ancestor(op1, node)
420 if a != node:
420 if a != node:
421 raise util.Abort(_('cannot backout change on a different branch'))
421 raise util.Abort(_('cannot backout change on a different branch'))
422
422
423 p1, p2 = repo.changelog.parents(node)
423 p1, p2 = repo.changelog.parents(node)
424 if p1 == nullid:
424 if p1 == nullid:
425 raise util.Abort(_('cannot backout a change with no parents'))
425 raise util.Abort(_('cannot backout a change with no parents'))
426 if p2 != nullid:
426 if p2 != nullid:
427 if not opts.get('parent'):
427 if not opts.get('parent'):
428 raise util.Abort(_('cannot backout a merge changeset'))
428 raise util.Abort(_('cannot backout a merge changeset'))
429 p = repo.lookup(opts['parent'])
429 p = repo.lookup(opts['parent'])
430 if p not in (p1, p2):
430 if p not in (p1, p2):
431 raise util.Abort(_('%s is not a parent of %s') %
431 raise util.Abort(_('%s is not a parent of %s') %
432 (short(p), short(node)))
432 (short(p), short(node)))
433 parent = p
433 parent = p
434 else:
434 else:
435 if opts.get('parent'):
435 if opts.get('parent'):
436 raise util.Abort(_('cannot use --parent on non-merge changeset'))
436 raise util.Abort(_('cannot use --parent on non-merge changeset'))
437 parent = p1
437 parent = p1
438
438
439 # the backout should appear on the same branch
439 # the backout should appear on the same branch
440 branch = repo.dirstate.branch()
440 branch = repo.dirstate.branch()
441 hg.clean(repo, node, show_stats=False)
441 hg.clean(repo, node, show_stats=False)
442 repo.dirstate.setbranch(branch)
442 repo.dirstate.setbranch(branch)
443 revert_opts = opts.copy()
443 revert_opts = opts.copy()
444 revert_opts['date'] = None
444 revert_opts['date'] = None
445 revert_opts['all'] = True
445 revert_opts['all'] = True
446 revert_opts['rev'] = hex(parent)
446 revert_opts['rev'] = hex(parent)
447 revert_opts['no_backup'] = None
447 revert_opts['no_backup'] = None
448 revert(ui, repo, **revert_opts)
448 revert(ui, repo, **revert_opts)
449 if not opts.get('merge') and op1 != node:
449 if not opts.get('merge') and op1 != node:
450 try:
450 try:
451 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
451 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
452 return hg.update(repo, op1)
452 return hg.update(repo, op1)
453 finally:
453 finally:
454 ui.setconfig('ui', 'forcemerge', '')
454 ui.setconfig('ui', 'forcemerge', '')
455
455
456 commit_opts = opts.copy()
456 commit_opts = opts.copy()
457 commit_opts['addremove'] = False
457 commit_opts['addremove'] = False
458 if not commit_opts['message'] and not commit_opts['logfile']:
458 if not commit_opts['message'] and not commit_opts['logfile']:
459 # we don't translate commit messages
459 # we don't translate commit messages
460 commit_opts['message'] = "Backed out changeset %s" % short(node)
460 commit_opts['message'] = "Backed out changeset %s" % short(node)
461 commit_opts['force_editor'] = True
461 commit_opts['force_editor'] = True
462 commit(ui, repo, **commit_opts)
462 commit(ui, repo, **commit_opts)
463 def nice(node):
463 def nice(node):
464 return '%d:%s' % (repo.changelog.rev(node), short(node))
464 return '%d:%s' % (repo.changelog.rev(node), short(node))
465 ui.status(_('changeset %s backs out changeset %s\n') %
465 ui.status(_('changeset %s backs out changeset %s\n') %
466 (nice(repo.changelog.tip()), nice(node)))
466 (nice(repo.changelog.tip()), nice(node)))
467 if opts.get('merge') and op1 != node:
467 if opts.get('merge') and op1 != node:
468 hg.clean(repo, op1, show_stats=False)
468 hg.clean(repo, op1, show_stats=False)
469 ui.status(_('merging with changeset %s\n')
469 ui.status(_('merging with changeset %s\n')
470 % nice(repo.changelog.tip()))
470 % nice(repo.changelog.tip()))
471 try:
471 try:
472 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
472 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
473 return hg.merge(repo, hex(repo.changelog.tip()))
473 return hg.merge(repo, hex(repo.changelog.tip()))
474 finally:
474 finally:
475 ui.setconfig('ui', 'forcemerge', '')
475 ui.setconfig('ui', 'forcemerge', '')
476 return 0
476 return 0
477
477
478 @command('bisect',
478 @command('bisect',
479 [('r', 'reset', False, _('reset bisect state')),
479 [('r', 'reset', False, _('reset bisect state')),
480 ('g', 'good', False, _('mark changeset good')),
480 ('g', 'good', False, _('mark changeset good')),
481 ('b', 'bad', False, _('mark changeset bad')),
481 ('b', 'bad', False, _('mark changeset bad')),
482 ('s', 'skip', False, _('skip testing changeset')),
482 ('s', 'skip', False, _('skip testing changeset')),
483 ('e', 'extend', False, _('extend the bisect range')),
483 ('e', 'extend', False, _('extend the bisect range')),
484 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
484 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
485 ('U', 'noupdate', False, _('do not update to target'))],
485 ('U', 'noupdate', False, _('do not update to target'))],
486 _("[-gbsr] [-U] [-c CMD] [REV]"))
486 _("[-gbsr] [-U] [-c CMD] [REV]"))
487 def bisect(ui, repo, rev=None, extra=None, command=None,
487 def bisect(ui, repo, rev=None, extra=None, command=None,
488 reset=None, good=None, bad=None, skip=None, extend=None,
488 reset=None, good=None, bad=None, skip=None, extend=None,
489 noupdate=None):
489 noupdate=None):
490 """subdivision search of changesets
490 """subdivision search of changesets
491
491
492 This command helps to find changesets which introduce problems. To
492 This command helps to find changesets which introduce problems. To
493 use, mark the earliest changeset you know exhibits the problem as
493 use, mark the earliest changeset you know exhibits the problem as
494 bad, then mark the latest changeset which is free from the problem
494 bad, then mark the latest changeset which is free from the problem
495 as good. Bisect will update your working directory to a revision
495 as good. Bisect will update your working directory to a revision
496 for testing (unless the -U/--noupdate option is specified). Once
496 for testing (unless the -U/--noupdate option is specified). Once
497 you have performed tests, mark the working directory as good or
497 you have performed tests, mark the working directory as good or
498 bad, and bisect will either update to another candidate changeset
498 bad, and bisect will either update to another candidate changeset
499 or announce that it has found the bad revision.
499 or announce that it has found the bad revision.
500
500
501 As a shortcut, you can also use the revision argument to mark a
501 As a shortcut, you can also use the revision argument to mark a
502 revision as good or bad without checking it out first.
502 revision as good or bad without checking it out first.
503
503
504 If you supply a command, it will be used for automatic bisection.
504 If you supply a command, it will be used for automatic bisection.
505 Its exit status will be used to mark revisions as good or bad:
505 Its exit status will be used to mark revisions as good or bad:
506 status 0 means good, 125 means to skip the revision, 127
506 status 0 means good, 125 means to skip the revision, 127
507 (command not found) will abort the bisection, and any other
507 (command not found) will abort the bisection, and any other
508 non-zero exit status means the revision is bad.
508 non-zero exit status means the revision is bad.
509
509
510 .. container:: verbose
510 .. container:: verbose
511
511
512 Some examples:
512 Some examples:
513
513
514 - start a bisection with known bad revision 12, and good revision 34::
514 - start a bisection with known bad revision 12, and good revision 34::
515
515
516 hg bisect --bad 34
516 hg bisect --bad 34
517 hg bisect --good 12
517 hg bisect --good 12
518
518
519 - advance the current bisection by marking current revision as good or
519 - advance the current bisection by marking current revision as good or
520 bad::
520 bad::
521
521
522 hg bisect --good
522 hg bisect --good
523 hg bisect --bad
523 hg bisect --bad
524
524
525 - mark the current revision, or a known revision, to be skipped (eg. if
525 - mark the current revision, or a known revision, to be skipped (eg. if
526 that revision is not usable because of another issue)::
526 that revision is not usable because of another issue)::
527
527
528 hg bisect --skip
528 hg bisect --skip
529 hg bisect --skip 23
529 hg bisect --skip 23
530
530
531 - forget the current bisection::
531 - forget the current bisection::
532
532
533 hg bisect --reset
533 hg bisect --reset
534
534
535 - use 'make && make tests' to automatically find the first broken
535 - use 'make && make tests' to automatically find the first broken
536 revision::
536 revision::
537
537
538 hg bisect --reset
538 hg bisect --reset
539 hg bisect --bad 34
539 hg bisect --bad 34
540 hg bisect --good 12
540 hg bisect --good 12
541 hg bisect --command 'make && make tests'
541 hg bisect --command 'make && make tests'
542
542
543 - see all changesets whose states are already known in the current
543 - see all changesets whose states are already known in the current
544 bisection::
544 bisection::
545
545
546 hg log -r "bisect(pruned)"
546 hg log -r "bisect(pruned)"
547
547
548 - see all changesets that took part in the current bisection::
548 - see all changesets that took part in the current bisection::
549
549
550 hg log -r "bisect(range)"
550 hg log -r "bisect(range)"
551
551
552 - with the graphlog extension, you can even get a nice graph::
552 - with the graphlog extension, you can even get a nice graph::
553
553
554 hg log --graph -r "bisect(range)"
554 hg log --graph -r "bisect(range)"
555
555
556 See :hg:`help revsets` for more about the `bisect()` keyword.
556 See :hg:`help revsets` for more about the `bisect()` keyword.
557
557
558 Returns 0 on success.
558 Returns 0 on success.
559 """
559 """
560 def extendbisectrange(nodes, good):
560 def extendbisectrange(nodes, good):
561 # bisect is incomplete when it ends on a merge node and
561 # bisect is incomplete when it ends on a merge node and
562 # one of the parent was not checked.
562 # one of the parent was not checked.
563 parents = repo[nodes[0]].parents()
563 parents = repo[nodes[0]].parents()
564 if len(parents) > 1:
564 if len(parents) > 1:
565 side = good and state['bad'] or state['good']
565 side = good and state['bad'] or state['good']
566 num = len(set(i.node() for i in parents) & set(side))
566 num = len(set(i.node() for i in parents) & set(side))
567 if num == 1:
567 if num == 1:
568 return parents[0].ancestor(parents[1])
568 return parents[0].ancestor(parents[1])
569 return None
569 return None
570
570
571 def print_result(nodes, good):
571 def print_result(nodes, good):
572 displayer = cmdutil.show_changeset(ui, repo, {})
572 displayer = cmdutil.show_changeset(ui, repo, {})
573 if len(nodes) == 1:
573 if len(nodes) == 1:
574 # narrowed it down to a single revision
574 # narrowed it down to a single revision
575 if good:
575 if good:
576 ui.write(_("The first good revision is:\n"))
576 ui.write(_("The first good revision is:\n"))
577 else:
577 else:
578 ui.write(_("The first bad revision is:\n"))
578 ui.write(_("The first bad revision is:\n"))
579 displayer.show(repo[nodes[0]])
579 displayer.show(repo[nodes[0]])
580 extendnode = extendbisectrange(nodes, good)
580 extendnode = extendbisectrange(nodes, good)
581 if extendnode is not None:
581 if extendnode is not None:
582 ui.write(_('Not all ancestors of this changeset have been'
582 ui.write(_('Not all ancestors of this changeset have been'
583 ' checked.\nUse bisect --extend to continue the '
583 ' checked.\nUse bisect --extend to continue the '
584 'bisection from\nthe common ancestor, %s.\n')
584 'bisection from\nthe common ancestor, %s.\n')
585 % extendnode)
585 % extendnode)
586 else:
586 else:
587 # multiple possible revisions
587 # multiple possible revisions
588 if good:
588 if good:
589 ui.write(_("Due to skipped revisions, the first "
589 ui.write(_("Due to skipped revisions, the first "
590 "good revision could be any of:\n"))
590 "good revision could be any of:\n"))
591 else:
591 else:
592 ui.write(_("Due to skipped revisions, the first "
592 ui.write(_("Due to skipped revisions, the first "
593 "bad revision could be any of:\n"))
593 "bad revision could be any of:\n"))
594 for n in nodes:
594 for n in nodes:
595 displayer.show(repo[n])
595 displayer.show(repo[n])
596 displayer.close()
596 displayer.close()
597
597
598 def check_state(state, interactive=True):
598 def check_state(state, interactive=True):
599 if not state['good'] or not state['bad']:
599 if not state['good'] or not state['bad']:
600 if (good or bad or skip or reset) and interactive:
600 if (good or bad or skip or reset) and interactive:
601 return
601 return
602 if not state['good']:
602 if not state['good']:
603 raise util.Abort(_('cannot bisect (no known good revisions)'))
603 raise util.Abort(_('cannot bisect (no known good revisions)'))
604 else:
604 else:
605 raise util.Abort(_('cannot bisect (no known bad revisions)'))
605 raise util.Abort(_('cannot bisect (no known bad revisions)'))
606 return True
606 return True
607
607
608 # backward compatibility
608 # backward compatibility
609 if rev in "good bad reset init".split():
609 if rev in "good bad reset init".split():
610 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
610 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
611 cmd, rev, extra = rev, extra, None
611 cmd, rev, extra = rev, extra, None
612 if cmd == "good":
612 if cmd == "good":
613 good = True
613 good = True
614 elif cmd == "bad":
614 elif cmd == "bad":
615 bad = True
615 bad = True
616 else:
616 else:
617 reset = True
617 reset = True
618 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
618 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
619 raise util.Abort(_('incompatible arguments'))
619 raise util.Abort(_('incompatible arguments'))
620
620
621 if reset:
621 if reset:
622 p = repo.join("bisect.state")
622 p = repo.join("bisect.state")
623 if os.path.exists(p):
623 if os.path.exists(p):
624 os.unlink(p)
624 os.unlink(p)
625 return
625 return
626
626
627 state = hbisect.load_state(repo)
627 state = hbisect.load_state(repo)
628
628
629 if command:
629 if command:
630 changesets = 1
630 changesets = 1
631 try:
631 try:
632 while changesets:
632 while changesets:
633 # update state
633 # update state
634 status = util.system(command, out=ui.fout)
634 status = util.system(command, out=ui.fout)
635 if status == 125:
635 if status == 125:
636 transition = "skip"
636 transition = "skip"
637 elif status == 0:
637 elif status == 0:
638 transition = "good"
638 transition = "good"
639 # status < 0 means process was killed
639 # status < 0 means process was killed
640 elif status == 127:
640 elif status == 127:
641 raise util.Abort(_("failed to execute %s") % command)
641 raise util.Abort(_("failed to execute %s") % command)
642 elif status < 0:
642 elif status < 0:
643 raise util.Abort(_("%s killed") % command)
643 raise util.Abort(_("%s killed") % command)
644 else:
644 else:
645 transition = "bad"
645 transition = "bad"
646 ctx = scmutil.revsingle(repo, rev)
646 ctx = scmutil.revsingle(repo, rev)
647 rev = None # clear for future iterations
647 rev = None # clear for future iterations
648 state[transition].append(ctx.node())
648 state[transition].append(ctx.node())
649 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
649 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
650 check_state(state, interactive=False)
650 check_state(state, interactive=False)
651 # bisect
651 # bisect
652 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
652 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
653 # update to next check
653 # update to next check
654 cmdutil.bailifchanged(repo)
654 cmdutil.bailifchanged(repo)
655 hg.clean(repo, nodes[0], show_stats=False)
655 hg.clean(repo, nodes[0], show_stats=False)
656 finally:
656 finally:
657 hbisect.save_state(repo, state)
657 hbisect.save_state(repo, state)
658 print_result(nodes, good)
658 print_result(nodes, good)
659 return
659 return
660
660
661 # update state
661 # update state
662
662
663 if rev:
663 if rev:
664 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
664 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
665 else:
665 else:
666 nodes = [repo.lookup('.')]
666 nodes = [repo.lookup('.')]
667
667
668 if good or bad or skip:
668 if good or bad or skip:
669 if good:
669 if good:
670 state['good'] += nodes
670 state['good'] += nodes
671 elif bad:
671 elif bad:
672 state['bad'] += nodes
672 state['bad'] += nodes
673 elif skip:
673 elif skip:
674 state['skip'] += nodes
674 state['skip'] += nodes
675 hbisect.save_state(repo, state)
675 hbisect.save_state(repo, state)
676
676
677 if not check_state(state):
677 if not check_state(state):
678 return
678 return
679
679
680 # actually bisect
680 # actually bisect
681 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
681 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
682 if extend:
682 if extend:
683 if not changesets:
683 if not changesets:
684 extendnode = extendbisectrange(nodes, good)
684 extendnode = extendbisectrange(nodes, good)
685 if extendnode is not None:
685 if extendnode is not None:
686 ui.write(_("Extending search to changeset %d:%s\n"
686 ui.write(_("Extending search to changeset %d:%s\n"
687 % (extendnode.rev(), extendnode)))
687 % (extendnode.rev(), extendnode)))
688 if noupdate:
688 if noupdate:
689 return
689 return
690 cmdutil.bailifchanged(repo)
690 cmdutil.bailifchanged(repo)
691 return hg.clean(repo, extendnode.node())
691 return hg.clean(repo, extendnode.node())
692 raise util.Abort(_("nothing to extend"))
692 raise util.Abort(_("nothing to extend"))
693
693
694 if changesets == 0:
694 if changesets == 0:
695 print_result(nodes, good)
695 print_result(nodes, good)
696 else:
696 else:
697 assert len(nodes) == 1 # only a single node can be tested next
697 assert len(nodes) == 1 # only a single node can be tested next
698 node = nodes[0]
698 node = nodes[0]
699 # compute the approximate number of remaining tests
699 # compute the approximate number of remaining tests
700 tests, size = 0, 2
700 tests, size = 0, 2
701 while size <= changesets:
701 while size <= changesets:
702 tests, size = tests + 1, size * 2
702 tests, size = tests + 1, size * 2
703 rev = repo.changelog.rev(node)
703 rev = repo.changelog.rev(node)
704 ui.write(_("Testing changeset %d:%s "
704 ui.write(_("Testing changeset %d:%s "
705 "(%d changesets remaining, ~%d tests)\n")
705 "(%d changesets remaining, ~%d tests)\n")
706 % (rev, short(node), changesets, tests))
706 % (rev, short(node), changesets, tests))
707 if not noupdate:
707 if not noupdate:
708 cmdutil.bailifchanged(repo)
708 cmdutil.bailifchanged(repo)
709 return hg.clean(repo, node)
709 return hg.clean(repo, node)
710
710
711 @command('bookmarks',
711 @command('bookmarks',
712 [('f', 'force', False, _('force')),
712 [('f', 'force', False, _('force')),
713 ('r', 'rev', '', _('revision'), _('REV')),
713 ('r', 'rev', '', _('revision'), _('REV')),
714 ('d', 'delete', False, _('delete a given bookmark')),
714 ('d', 'delete', False, _('delete a given bookmark')),
715 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
715 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
716 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
716 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
717 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
717 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
718 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
718 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
719 rename=None, inactive=False):
719 rename=None, inactive=False):
720 '''track a line of development with movable markers
720 '''track a line of development with movable markers
721
721
722 Bookmarks are pointers to certain commits that move when
722 Bookmarks are pointers to certain commits that move when
723 committing. Bookmarks are local. They can be renamed, copied and
723 committing. Bookmarks are local. They can be renamed, copied and
724 deleted. It is possible to use bookmark names in :hg:`merge` and
724 deleted. It is possible to use bookmark names in :hg:`merge` and
725 :hg:`update` to merge and update respectively to a given bookmark.
725 :hg:`update` to merge and update respectively to a given bookmark.
726
726
727 You can use :hg:`bookmark NAME` to set a bookmark on the working
727 You can use :hg:`bookmark NAME` to set a bookmark on the working
728 directory's parent revision with the given name. If you specify
728 directory's parent revision with the given name. If you specify
729 a revision using -r REV (where REV may be an existing bookmark),
729 a revision using -r REV (where REV may be an existing bookmark),
730 the bookmark is assigned to that revision.
730 the bookmark is assigned to that revision.
731
731
732 Bookmarks can be pushed and pulled between repositories (see :hg:`help
732 Bookmarks can be pushed and pulled between repositories (see :hg:`help
733 push` and :hg:`help pull`). This requires both the local and remote
733 push` and :hg:`help pull`). This requires both the local and remote
734 repositories to support bookmarks. For versions prior to 1.8, this means
734 repositories to support bookmarks. For versions prior to 1.8, this means
735 the bookmarks extension must be enabled.
735 the bookmarks extension must be enabled.
736 '''
736 '''
737 hexfn = ui.debugflag and hex or short
737 hexfn = ui.debugflag and hex or short
738 marks = repo._bookmarks
738 marks = repo._bookmarks
739 cur = repo.changectx('.').node()
739 cur = repo.changectx('.').node()
740
740
741 if rename:
741 if rename:
742 if rename not in marks:
742 if rename not in marks:
743 raise util.Abort(_("bookmark '%s' does not exist") % rename)
743 raise util.Abort(_("bookmark '%s' does not exist") % rename)
744 if mark in marks and not force:
744 if mark in marks and not force:
745 raise util.Abort(_("bookmark '%s' already exists "
745 raise util.Abort(_("bookmark '%s' already exists "
746 "(use -f to force)") % mark)
746 "(use -f to force)") % mark)
747 if mark is None:
747 if mark is None:
748 raise util.Abort(_("new bookmark name required"))
748 raise util.Abort(_("new bookmark name required"))
749 marks[mark] = marks[rename]
749 marks[mark] = marks[rename]
750 if repo._bookmarkcurrent == rename and not inactive:
750 if repo._bookmarkcurrent == rename and not inactive:
751 bookmarks.setcurrent(repo, mark)
751 bookmarks.setcurrent(repo, mark)
752 del marks[rename]
752 del marks[rename]
753 bookmarks.write(repo)
753 bookmarks.write(repo)
754 return
754 return
755
755
756 if delete:
756 if delete:
757 if mark is None:
757 if mark is None:
758 raise util.Abort(_("bookmark name required"))
758 raise util.Abort(_("bookmark name required"))
759 if mark not in marks:
759 if mark not in marks:
760 raise util.Abort(_("bookmark '%s' does not exist") % mark)
760 raise util.Abort(_("bookmark '%s' does not exist") % mark)
761 if mark == repo._bookmarkcurrent:
761 if mark == repo._bookmarkcurrent:
762 bookmarks.setcurrent(repo, None)
762 bookmarks.setcurrent(repo, None)
763 del marks[mark]
763 del marks[mark]
764 bookmarks.write(repo)
764 bookmarks.write(repo)
765 return
765 return
766
766
767 if mark is not None:
767 if mark is not None:
768 if "\n" in mark:
768 if "\n" in mark:
769 raise util.Abort(_("bookmark name cannot contain newlines"))
769 raise util.Abort(_("bookmark name cannot contain newlines"))
770 mark = mark.strip()
770 mark = mark.strip()
771 if not mark:
771 if not mark:
772 raise util.Abort(_("bookmark names cannot consist entirely of "
772 raise util.Abort(_("bookmark names cannot consist entirely of "
773 "whitespace"))
773 "whitespace"))
774 if inactive and mark == repo._bookmarkcurrent:
774 if inactive and mark == repo._bookmarkcurrent:
775 bookmarks.setcurrent(repo, None)
775 bookmarks.setcurrent(repo, None)
776 return
776 return
777 if mark in marks and not force:
777 if mark in marks and not force:
778 raise util.Abort(_("bookmark '%s' already exists "
778 raise util.Abort(_("bookmark '%s' already exists "
779 "(use -f to force)") % mark)
779 "(use -f to force)") % mark)
780 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
780 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
781 and not force):
781 and not force):
782 raise util.Abort(
782 raise util.Abort(
783 _("a bookmark cannot have the name of an existing branch"))
783 _("a bookmark cannot have the name of an existing branch"))
784 if rev:
784 if rev:
785 marks[mark] = repo.lookup(rev)
785 marks[mark] = repo.lookup(rev)
786 else:
786 else:
787 marks[mark] = repo.changectx('.').node()
787 marks[mark] = repo.changectx('.').node()
788 if not inactive and repo.changectx('.').node() == marks[mark]:
788 if not inactive and repo.changectx('.').node() == marks[mark]:
789 bookmarks.setcurrent(repo, mark)
789 bookmarks.setcurrent(repo, mark)
790 bookmarks.write(repo)
790 bookmarks.write(repo)
791 return
791 return
792
792
793 if mark is None:
793 if mark is None:
794 if rev:
794 if rev:
795 raise util.Abort(_("bookmark name required"))
795 raise util.Abort(_("bookmark name required"))
796 if len(marks) == 0:
796 if len(marks) == 0:
797 ui.status(_("no bookmarks set\n"))
797 ui.status(_("no bookmarks set\n"))
798 else:
798 else:
799 for bmark, n in sorted(marks.iteritems()):
799 for bmark, n in sorted(marks.iteritems()):
800 current = repo._bookmarkcurrent
800 current = repo._bookmarkcurrent
801 if bmark == current and n == cur:
801 if bmark == current and n == cur:
802 prefix, label = '*', 'bookmarks.current'
802 prefix, label = '*', 'bookmarks.current'
803 else:
803 else:
804 prefix, label = ' ', ''
804 prefix, label = ' ', ''
805
805
806 if ui.quiet:
806 if ui.quiet:
807 ui.write("%s\n" % bmark, label=label)
807 ui.write("%s\n" % bmark, label=label)
808 else:
808 else:
809 ui.write(" %s %-25s %d:%s\n" % (
809 ui.write(" %s %-25s %d:%s\n" % (
810 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
810 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
811 label=label)
811 label=label)
812 return
812 return
813
813
814 @command('branch',
814 @command('branch',
815 [('f', 'force', None,
815 [('f', 'force', None,
816 _('set branch name even if it shadows an existing branch')),
816 _('set branch name even if it shadows an existing branch')),
817 ('C', 'clean', None, _('reset branch name to parent branch name'))],
817 ('C', 'clean', None, _('reset branch name to parent branch name'))],
818 _('[-fC] [NAME]'))
818 _('[-fC] [NAME]'))
819 def branch(ui, repo, label=None, **opts):
819 def branch(ui, repo, label=None, **opts):
820 """set or show the current branch name
820 """set or show the current branch name
821
821
822 With no argument, show the current branch name. With one argument,
822 With no argument, show the current branch name. With one argument,
823 set the working directory branch name (the branch will not exist
823 set the working directory branch name (the branch will not exist
824 in the repository until the next commit). Standard practice
824 in the repository until the next commit). Standard practice
825 recommends that primary development take place on the 'default'
825 recommends that primary development take place on the 'default'
826 branch.
826 branch.
827
827
828 Unless -f/--force is specified, branch will not let you set a
828 Unless -f/--force is specified, branch will not let you set a
829 branch name that already exists, even if it's inactive.
829 branch name that already exists, even if it's inactive.
830
830
831 Use -C/--clean to reset the working directory branch to that of
831 Use -C/--clean to reset the working directory branch to that of
832 the parent of the working directory, negating a previous branch
832 the parent of the working directory, negating a previous branch
833 change.
833 change.
834
834
835 Use the command :hg:`update` to switch to an existing branch. Use
835 Use the command :hg:`update` to switch to an existing branch. Use
836 :hg:`commit --close-branch` to mark this branch as closed.
836 :hg:`commit --close-branch` to mark this branch as closed.
837
837
838 .. note::
838 .. note::
839 Branch names are permanent. Use :hg:`bookmark` to create a
839 Branch names are permanent. Use :hg:`bookmark` to create a
840 light-weight bookmark instead. See :hg:`help glossary` for more
840 light-weight bookmark instead. See :hg:`help glossary` for more
841 information about named branches and bookmarks.
841 information about named branches and bookmarks.
842
842
843 Returns 0 on success.
843 Returns 0 on success.
844 """
844 """
845
845
846 if opts.get('clean'):
846 if opts.get('clean'):
847 label = repo[None].p1().branch()
847 label = repo[None].p1().branch()
848 repo.dirstate.setbranch(label)
848 repo.dirstate.setbranch(label)
849 ui.status(_('reset working directory to branch %s\n') % label)
849 ui.status(_('reset working directory to branch %s\n') % label)
850 elif label:
850 elif label:
851 if not opts.get('force') and label in repo.branchtags():
851 if not opts.get('force') and label in repo.branchtags():
852 if label not in [p.branch() for p in repo.parents()]:
852 if label not in [p.branch() for p in repo.parents()]:
853 raise util.Abort(_('a branch of the same name already exists'),
853 raise util.Abort(_('a branch of the same name already exists'),
854 # i18n: "it" refers to an existing branch
854 # i18n: "it" refers to an existing branch
855 hint=_("use 'hg update' to switch to it"))
855 hint=_("use 'hg update' to switch to it"))
856 repo.dirstate.setbranch(label)
856 repo.dirstate.setbranch(label)
857 ui.status(_('marked working directory as branch %s\n') % label)
857 ui.status(_('marked working directory as branch %s\n') % label)
858 else:
858 else:
859 ui.write("%s\n" % repo.dirstate.branch())
859 ui.write("%s\n" % repo.dirstate.branch())
860
860
861 @command('branches',
861 @command('branches',
862 [('a', 'active', False, _('show only branches that have unmerged heads')),
862 [('a', 'active', False, _('show only branches that have unmerged heads')),
863 ('c', 'closed', False, _('show normal and closed branches'))],
863 ('c', 'closed', False, _('show normal and closed branches'))],
864 _('[-ac]'))
864 _('[-ac]'))
865 def branches(ui, repo, active=False, closed=False):
865 def branches(ui, repo, active=False, closed=False):
866 """list repository named branches
866 """list repository named branches
867
867
868 List the repository's named branches, indicating which ones are
868 List the repository's named branches, indicating which ones are
869 inactive. If -c/--closed is specified, also list branches which have
869 inactive. If -c/--closed is specified, also list branches which have
870 been marked closed (see :hg:`commit --close-branch`).
870 been marked closed (see :hg:`commit --close-branch`).
871
871
872 If -a/--active is specified, only show active branches. A branch
872 If -a/--active is specified, only show active branches. A branch
873 is considered active if it contains repository heads.
873 is considered active if it contains repository heads.
874
874
875 Use the command :hg:`update` to switch to an existing branch.
875 Use the command :hg:`update` to switch to an existing branch.
876
876
877 Returns 0.
877 Returns 0.
878 """
878 """
879
879
880 hexfunc = ui.debugflag and hex or short
880 hexfunc = ui.debugflag and hex or short
881 activebranches = [repo[n].branch() for n in repo.heads()]
881 activebranches = [repo[n].branch() for n in repo.heads()]
882 def testactive(tag, node):
882 def testactive(tag, node):
883 realhead = tag in activebranches
883 realhead = tag in activebranches
884 open = node in repo.branchheads(tag, closed=False)
884 open = node in repo.branchheads(tag, closed=False)
885 return realhead and open
885 return realhead and open
886 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
886 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
887 for tag, node in repo.branchtags().items()],
887 for tag, node in repo.branchtags().items()],
888 reverse=True)
888 reverse=True)
889
889
890 for isactive, node, tag in branches:
890 for isactive, node, tag in branches:
891 if (not active) or isactive:
891 if (not active) or isactive:
892 if ui.quiet:
892 if ui.quiet:
893 ui.write("%s\n" % tag)
893 ui.write("%s\n" % tag)
894 else:
894 else:
895 hn = repo.lookup(node)
895 hn = repo.lookup(node)
896 if isactive:
896 if isactive:
897 label = 'branches.active'
897 label = 'branches.active'
898 notice = ''
898 notice = ''
899 elif hn not in repo.branchheads(tag, closed=False):
899 elif hn not in repo.branchheads(tag, closed=False):
900 if not closed:
900 if not closed:
901 continue
901 continue
902 label = 'branches.closed'
902 label = 'branches.closed'
903 notice = _(' (closed)')
903 notice = _(' (closed)')
904 else:
904 else:
905 label = 'branches.inactive'
905 label = 'branches.inactive'
906 notice = _(' (inactive)')
906 notice = _(' (inactive)')
907 if tag == repo.dirstate.branch():
907 if tag == repo.dirstate.branch():
908 label = 'branches.current'
908 label = 'branches.current'
909 rev = str(node).rjust(31 - encoding.colwidth(tag))
909 rev = str(node).rjust(31 - encoding.colwidth(tag))
910 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
910 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
911 tag = ui.label(tag, label)
911 tag = ui.label(tag, label)
912 ui.write("%s %s%s\n" % (tag, rev, notice))
912 ui.write("%s %s%s\n" % (tag, rev, notice))
913
913
914 @command('bundle',
914 @command('bundle',
915 [('f', 'force', None, _('run even when the destination is unrelated')),
915 [('f', 'force', None, _('run even when the destination is unrelated')),
916 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
916 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
917 _('REV')),
917 _('REV')),
918 ('b', 'branch', [], _('a specific branch you would like to bundle'),
918 ('b', 'branch', [], _('a specific branch you would like to bundle'),
919 _('BRANCH')),
919 _('BRANCH')),
920 ('', 'base', [],
920 ('', 'base', [],
921 _('a base changeset assumed to be available at the destination'),
921 _('a base changeset assumed to be available at the destination'),
922 _('REV')),
922 _('REV')),
923 ('a', 'all', None, _('bundle all changesets in the repository')),
923 ('a', 'all', None, _('bundle all changesets in the repository')),
924 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
924 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
925 ] + remoteopts,
925 ] + remoteopts,
926 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
926 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
927 def bundle(ui, repo, fname, dest=None, **opts):
927 def bundle(ui, repo, fname, dest=None, **opts):
928 """create a changegroup file
928 """create a changegroup file
929
929
930 Generate a compressed changegroup file collecting changesets not
930 Generate a compressed changegroup file collecting changesets not
931 known to be in another repository.
931 known to be in another repository.
932
932
933 If you omit the destination repository, then hg assumes the
933 If you omit the destination repository, then hg assumes the
934 destination will have all the nodes you specify with --base
934 destination will have all the nodes you specify with --base
935 parameters. To create a bundle containing all changesets, use
935 parameters. To create a bundle containing all changesets, use
936 -a/--all (or --base null).
936 -a/--all (or --base null).
937
937
938 You can change compression method with the -t/--type option.
938 You can change compression method with the -t/--type option.
939 The available compression methods are: none, bzip2, and
939 The available compression methods are: none, bzip2, and
940 gzip (by default, bundles are compressed using bzip2).
940 gzip (by default, bundles are compressed using bzip2).
941
941
942 The bundle file can then be transferred using conventional means
942 The bundle file can then be transferred using conventional means
943 and applied to another repository with the unbundle or pull
943 and applied to another repository with the unbundle or pull
944 command. This is useful when direct push and pull are not
944 command. This is useful when direct push and pull are not
945 available or when exporting an entire repository is undesirable.
945 available or when exporting an entire repository is undesirable.
946
946
947 Applying bundles preserves all changeset contents including
947 Applying bundles preserves all changeset contents including
948 permissions, copy/rename information, and revision history.
948 permissions, copy/rename information, and revision history.
949
949
950 Returns 0 on success, 1 if no changes found.
950 Returns 0 on success, 1 if no changes found.
951 """
951 """
952 revs = None
952 revs = None
953 if 'rev' in opts:
953 if 'rev' in opts:
954 revs = scmutil.revrange(repo, opts['rev'])
954 revs = scmutil.revrange(repo, opts['rev'])
955
955
956 if opts.get('all'):
956 if opts.get('all'):
957 base = ['null']
957 base = ['null']
958 else:
958 else:
959 base = scmutil.revrange(repo, opts.get('base'))
959 base = scmutil.revrange(repo, opts.get('base'))
960 if base:
960 if base:
961 if dest:
961 if dest:
962 raise util.Abort(_("--base is incompatible with specifying "
962 raise util.Abort(_("--base is incompatible with specifying "
963 "a destination"))
963 "a destination"))
964 common = [repo.lookup(rev) for rev in base]
964 common = [repo.lookup(rev) for rev in base]
965 heads = revs and map(repo.lookup, revs) or revs
965 heads = revs and map(repo.lookup, revs) or revs
966 else:
966 else:
967 dest = ui.expandpath(dest or 'default-push', dest or 'default')
967 dest = ui.expandpath(dest or 'default-push', dest or 'default')
968 dest, branches = hg.parseurl(dest, opts.get('branch'))
968 dest, branches = hg.parseurl(dest, opts.get('branch'))
969 other = hg.peer(repo, opts, dest)
969 other = hg.peer(repo, opts, dest)
970 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
970 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
971 heads = revs and map(repo.lookup, revs) or revs
971 heads = revs and map(repo.lookup, revs) or revs
972 common, outheads = discovery.findcommonoutgoing(repo, other,
972 common, outheads = discovery.findcommonoutgoing(repo, other,
973 onlyheads=heads,
973 onlyheads=heads,
974 force=opts.get('force'))
974 force=opts.get('force'))
975
975
976 cg = repo.getbundle('bundle', common=common, heads=heads)
976 cg = repo.getbundle('bundle', common=common, heads=heads)
977 if not cg:
977 if not cg:
978 ui.status(_("no changes found\n"))
978 ui.status(_("no changes found\n"))
979 return 1
979 return 1
980
980
981 bundletype = opts.get('type', 'bzip2').lower()
981 bundletype = opts.get('type', 'bzip2').lower()
982 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
982 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
983 bundletype = btypes.get(bundletype)
983 bundletype = btypes.get(bundletype)
984 if bundletype not in changegroup.bundletypes:
984 if bundletype not in changegroup.bundletypes:
985 raise util.Abort(_('unknown bundle type specified with --type'))
985 raise util.Abort(_('unknown bundle type specified with --type'))
986
986
987 changegroup.writebundle(cg, fname, bundletype)
987 changegroup.writebundle(cg, fname, bundletype)
988
988
989 @command('cat',
989 @command('cat',
990 [('o', 'output', '',
990 [('o', 'output', '',
991 _('print output to file with formatted name'), _('FORMAT')),
991 _('print output to file with formatted name'), _('FORMAT')),
992 ('r', 'rev', '', _('print the given revision'), _('REV')),
992 ('r', 'rev', '', _('print the given revision'), _('REV')),
993 ('', 'decode', None, _('apply any matching decode filter')),
993 ('', 'decode', None, _('apply any matching decode filter')),
994 ] + walkopts,
994 ] + walkopts,
995 _('[OPTION]... FILE...'))
995 _('[OPTION]... FILE...'))
996 def cat(ui, repo, file1, *pats, **opts):
996 def cat(ui, repo, file1, *pats, **opts):
997 """output the current or given revision of files
997 """output the current or given revision of files
998
998
999 Print the specified files as they were at the given revision. If
999 Print the specified files as they were at the given revision. If
1000 no revision is given, the parent of the working directory is used,
1000 no revision is given, the parent of the working directory is used,
1001 or tip if no revision is checked out.
1001 or tip if no revision is checked out.
1002
1002
1003 Output may be to a file, in which case the name of the file is
1003 Output may be to a file, in which case the name of the file is
1004 given using a format string. The formatting rules are the same as
1004 given using a format string. The formatting rules are the same as
1005 for the export command, with the following additions:
1005 for the export command, with the following additions:
1006
1006
1007 :``%s``: basename of file being printed
1007 :``%s``: basename of file being printed
1008 :``%d``: dirname of file being printed, or '.' if in repository root
1008 :``%d``: dirname of file being printed, or '.' if in repository root
1009 :``%p``: root-relative path name of file being printed
1009 :``%p``: root-relative path name of file being printed
1010
1010
1011 Returns 0 on success.
1011 Returns 0 on success.
1012 """
1012 """
1013 ctx = scmutil.revsingle(repo, opts.get('rev'))
1013 ctx = scmutil.revsingle(repo, opts.get('rev'))
1014 err = 1
1014 err = 1
1015 m = scmutil.match(ctx, (file1,) + pats, opts)
1015 m = scmutil.match(ctx, (file1,) + pats, opts)
1016 for abs in ctx.walk(m):
1016 for abs in ctx.walk(m):
1017 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1017 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1018 pathname=abs)
1018 pathname=abs)
1019 data = ctx[abs].data()
1019 data = ctx[abs].data()
1020 if opts.get('decode'):
1020 if opts.get('decode'):
1021 data = repo.wwritedata(abs, data)
1021 data = repo.wwritedata(abs, data)
1022 fp.write(data)
1022 fp.write(data)
1023 fp.close()
1023 fp.close()
1024 err = 0
1024 err = 0
1025 return err
1025 return err
1026
1026
1027 @command('^clone',
1027 @command('^clone',
1028 [('U', 'noupdate', None,
1028 [('U', 'noupdate', None,
1029 _('the clone will include an empty working copy (only a repository)')),
1029 _('the clone will include an empty working copy (only a repository)')),
1030 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1030 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1031 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1031 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1032 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1032 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1033 ('', 'pull', None, _('use pull protocol to copy metadata')),
1033 ('', 'pull', None, _('use pull protocol to copy metadata')),
1034 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1034 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1035 ] + remoteopts,
1035 ] + remoteopts,
1036 _('[OPTION]... SOURCE [DEST]'))
1036 _('[OPTION]... SOURCE [DEST]'))
1037 def clone(ui, source, dest=None, **opts):
1037 def clone(ui, source, dest=None, **opts):
1038 """make a copy of an existing repository
1038 """make a copy of an existing repository
1039
1039
1040 Create a copy of an existing repository in a new directory.
1040 Create a copy of an existing repository in a new directory.
1041
1041
1042 If no destination directory name is specified, it defaults to the
1042 If no destination directory name is specified, it defaults to the
1043 basename of the source.
1043 basename of the source.
1044
1044
1045 The location of the source is added to the new repository's
1045 The location of the source is added to the new repository's
1046 ``.hg/hgrc`` file, as the default to be used for future pulls.
1046 ``.hg/hgrc`` file, as the default to be used for future pulls.
1047
1047
1048 Only local paths and ``ssh://`` URLs are supported as
1048 Only local paths and ``ssh://`` URLs are supported as
1049 destinations. For ``ssh://`` destinations, no working directory or
1049 destinations. For ``ssh://`` destinations, no working directory or
1050 ``.hg/hgrc`` will be created on the remote side.
1050 ``.hg/hgrc`` will be created on the remote side.
1051
1051
1052 To pull only a subset of changesets, specify one or more revisions
1052 To pull only a subset of changesets, specify one or more revisions
1053 identifiers with -r/--rev or branches with -b/--branch. The
1053 identifiers with -r/--rev or branches with -b/--branch. The
1054 resulting clone will contain only the specified changesets and
1054 resulting clone will contain only the specified changesets and
1055 their ancestors. These options (or 'clone src#rev dest') imply
1055 their ancestors. These options (or 'clone src#rev dest') imply
1056 --pull, even for local source repositories. Note that specifying a
1056 --pull, even for local source repositories. Note that specifying a
1057 tag will include the tagged changeset but not the changeset
1057 tag will include the tagged changeset but not the changeset
1058 containing the tag.
1058 containing the tag.
1059
1059
1060 To check out a particular version, use -u/--update, or
1060 To check out a particular version, use -u/--update, or
1061 -U/--noupdate to create a clone with no working directory.
1061 -U/--noupdate to create a clone with no working directory.
1062
1062
1063 .. container:: verbose
1063 .. container:: verbose
1064
1064
1065 For efficiency, hardlinks are used for cloning whenever the
1065 For efficiency, hardlinks are used for cloning whenever the
1066 source and destination are on the same filesystem (note this
1066 source and destination are on the same filesystem (note this
1067 applies only to the repository data, not to the working
1067 applies only to the repository data, not to the working
1068 directory). Some filesystems, such as AFS, implement hardlinking
1068 directory). Some filesystems, such as AFS, implement hardlinking
1069 incorrectly, but do not report errors. In these cases, use the
1069 incorrectly, but do not report errors. In these cases, use the
1070 --pull option to avoid hardlinking.
1070 --pull option to avoid hardlinking.
1071
1071
1072 In some cases, you can clone repositories and the working
1072 In some cases, you can clone repositories and the working
1073 directory using full hardlinks with ::
1073 directory using full hardlinks with ::
1074
1074
1075 $ cp -al REPO REPOCLONE
1075 $ cp -al REPO REPOCLONE
1076
1076
1077 This is the fastest way to clone, but it is not always safe. The
1077 This is the fastest way to clone, but it is not always safe. The
1078 operation is not atomic (making sure REPO is not modified during
1078 operation is not atomic (making sure REPO is not modified during
1079 the operation is up to you) and you have to make sure your
1079 the operation is up to you) and you have to make sure your
1080 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1080 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1081 so). Also, this is not compatible with certain extensions that
1081 so). Also, this is not compatible with certain extensions that
1082 place their metadata under the .hg directory, such as mq.
1082 place their metadata under the .hg directory, such as mq.
1083
1083
1084 Mercurial will update the working directory to the first applicable
1084 Mercurial will update the working directory to the first applicable
1085 revision from this list:
1085 revision from this list:
1086
1086
1087 a) null if -U or the source repository has no changesets
1087 a) null if -U or the source repository has no changesets
1088 b) if -u . and the source repository is local, the first parent of
1088 b) if -u . and the source repository is local, the first parent of
1089 the source repository's working directory
1089 the source repository's working directory
1090 c) the changeset specified with -u (if a branch name, this means the
1090 c) the changeset specified with -u (if a branch name, this means the
1091 latest head of that branch)
1091 latest head of that branch)
1092 d) the changeset specified with -r
1092 d) the changeset specified with -r
1093 e) the tipmost head specified with -b
1093 e) the tipmost head specified with -b
1094 f) the tipmost head specified with the url#branch source syntax
1094 f) the tipmost head specified with the url#branch source syntax
1095 g) the tipmost head of the default branch
1095 g) the tipmost head of the default branch
1096 h) tip
1096 h) tip
1097
1097
1098 Examples:
1098 Examples:
1099
1099
1100 - clone a remote repository to a new directory named hg/::
1100 - clone a remote repository to a new directory named hg/::
1101
1101
1102 hg clone http://selenic.com/hg
1102 hg clone http://selenic.com/hg
1103
1103
1104 - create a lightweight local clone::
1104 - create a lightweight local clone::
1105
1105
1106 hg clone project/ project-feature/
1106 hg clone project/ project-feature/
1107
1107
1108 - clone from an absolute path on an ssh server (note double-slash)::
1108 - clone from an absolute path on an ssh server (note double-slash)::
1109
1109
1110 hg clone ssh://user@server//home/projects/alpha/
1110 hg clone ssh://user@server//home/projects/alpha/
1111
1111
1112 - do a high-speed clone over a LAN while checking out a
1112 - do a high-speed clone over a LAN while checking out a
1113 specified version::
1113 specified version::
1114
1114
1115 hg clone --uncompressed http://server/repo -u 1.5
1115 hg clone --uncompressed http://server/repo -u 1.5
1116
1116
1117 - create a repository without changesets after a particular revision::
1117 - create a repository without changesets after a particular revision::
1118
1118
1119 hg clone -r 04e544 experimental/ good/
1119 hg clone -r 04e544 experimental/ good/
1120
1120
1121 - clone (and track) a particular named branch::
1121 - clone (and track) a particular named branch::
1122
1122
1123 hg clone http://selenic.com/hg#stable
1123 hg clone http://selenic.com/hg#stable
1124
1124
1125 See :hg:`help urls` for details on specifying URLs.
1125 See :hg:`help urls` for details on specifying URLs.
1126
1126
1127 Returns 0 on success.
1127 Returns 0 on success.
1128 """
1128 """
1129 if opts.get('noupdate') and opts.get('updaterev'):
1129 if opts.get('noupdate') and opts.get('updaterev'):
1130 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1130 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1131
1131
1132 r = hg.clone(ui, opts, source, dest,
1132 r = hg.clone(ui, opts, source, dest,
1133 pull=opts.get('pull'),
1133 pull=opts.get('pull'),
1134 stream=opts.get('uncompressed'),
1134 stream=opts.get('uncompressed'),
1135 rev=opts.get('rev'),
1135 rev=opts.get('rev'),
1136 update=opts.get('updaterev') or not opts.get('noupdate'),
1136 update=opts.get('updaterev') or not opts.get('noupdate'),
1137 branch=opts.get('branch'))
1137 branch=opts.get('branch'))
1138
1138
1139 return r is None
1139 return r is None
1140
1140
1141 @command('^commit|ci',
1141 @command('^commit|ci',
1142 [('A', 'addremove', None,
1142 [('A', 'addremove', None,
1143 _('mark new/missing files as added/removed before committing')),
1143 _('mark new/missing files as added/removed before committing')),
1144 ('', 'close-branch', None,
1144 ('', 'close-branch', None,
1145 _('mark a branch as closed, hiding it from the branch list')),
1145 _('mark a branch as closed, hiding it from the branch list')),
1146 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1146 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1147 _('[OPTION]... [FILE]...'))
1147 _('[OPTION]... [FILE]...'))
1148 def commit(ui, repo, *pats, **opts):
1148 def commit(ui, repo, *pats, **opts):
1149 """commit the specified files or all outstanding changes
1149 """commit the specified files or all outstanding changes
1150
1150
1151 Commit changes to the given files into the repository. Unlike a
1151 Commit changes to the given files into the repository. Unlike a
1152 centralized SCM, this operation is a local operation. See
1152 centralized SCM, this operation is a local operation. See
1153 :hg:`push` for a way to actively distribute your changes.
1153 :hg:`push` for a way to actively distribute your changes.
1154
1154
1155 If a list of files is omitted, all changes reported by :hg:`status`
1155 If a list of files is omitted, all changes reported by :hg:`status`
1156 will be committed.
1156 will be committed.
1157
1157
1158 If you are committing the result of a merge, do not provide any
1158 If you are committing the result of a merge, do not provide any
1159 filenames or -I/-X filters.
1159 filenames or -I/-X filters.
1160
1160
1161 If no commit message is specified, Mercurial starts your
1161 If no commit message is specified, Mercurial starts your
1162 configured editor where you can enter a message. In case your
1162 configured editor where you can enter a message. In case your
1163 commit fails, you will find a backup of your message in
1163 commit fails, you will find a backup of your message in
1164 ``.hg/last-message.txt``.
1164 ``.hg/last-message.txt``.
1165
1165
1166 See :hg:`help dates` for a list of formats valid for -d/--date.
1166 See :hg:`help dates` for a list of formats valid for -d/--date.
1167
1167
1168 Returns 0 on success, 1 if nothing changed.
1168 Returns 0 on success, 1 if nothing changed.
1169 """
1169 """
1170 if opts.get('subrepos'):
1170 if opts.get('subrepos'):
1171 # Let --subrepos on the command line overide config setting.
1171 # Let --subrepos on the command line overide config setting.
1172 ui.setconfig('ui', 'commitsubrepos', True)
1172 ui.setconfig('ui', 'commitsubrepos', True)
1173
1173
1174 extra = {}
1174 extra = {}
1175 if opts.get('close_branch'):
1175 if opts.get('close_branch'):
1176 if repo['.'].node() not in repo.branchheads():
1176 if repo['.'].node() not in repo.branchheads():
1177 # The topo heads set is included in the branch heads set of the
1177 # The topo heads set is included in the branch heads set of the
1178 # current branch, so it's sufficient to test branchheads
1178 # current branch, so it's sufficient to test branchheads
1179 raise util.Abort(_('can only close branch heads'))
1179 raise util.Abort(_('can only close branch heads'))
1180 extra['close'] = 1
1180 extra['close'] = 1
1181 e = cmdutil.commiteditor
1181 e = cmdutil.commiteditor
1182 if opts.get('force_editor'):
1182 if opts.get('force_editor'):
1183 e = cmdutil.commitforceeditor
1183 e = cmdutil.commitforceeditor
1184
1184
1185 def commitfunc(ui, repo, message, match, opts):
1185 def commitfunc(ui, repo, message, match, opts):
1186 return repo.commit(message, opts.get('user'), opts.get('date'), match,
1186 return repo.commit(message, opts.get('user'), opts.get('date'), match,
1187 editor=e, extra=extra)
1187 editor=e, extra=extra)
1188
1188
1189 branch = repo[None].branch()
1189 branch = repo[None].branch()
1190 bheads = repo.branchheads(branch)
1190 bheads = repo.branchheads(branch)
1191
1191
1192 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1192 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1193 if not node:
1193 if not node:
1194 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1194 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1195 if stat[3]:
1195 if stat[3]:
1196 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
1196 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
1197 % len(stat[3]))
1197 % len(stat[3]))
1198 else:
1198 else:
1199 ui.status(_("nothing changed\n"))
1199 ui.status(_("nothing changed\n"))
1200 return 1
1200 return 1
1201
1201
1202 ctx = repo[node]
1202 ctx = repo[node]
1203 parents = ctx.parents()
1203 parents = ctx.parents()
1204
1204
1205 if (bheads and node not in bheads and not
1205 if (bheads and node not in bheads and not
1206 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1206 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1207 ui.status(_('created new head\n'))
1207 ui.status(_('created new head\n'))
1208 # The message is not printed for initial roots. For the other
1208 # The message is not printed for initial roots. For the other
1209 # changesets, it is printed in the following situations:
1209 # changesets, it is printed in the following situations:
1210 #
1210 #
1211 # Par column: for the 2 parents with ...
1211 # Par column: for the 2 parents with ...
1212 # N: null or no parent
1212 # N: null or no parent
1213 # B: parent is on another named branch
1213 # B: parent is on another named branch
1214 # C: parent is a regular non head changeset
1214 # C: parent is a regular non head changeset
1215 # H: parent was a branch head of the current branch
1215 # H: parent was a branch head of the current branch
1216 # Msg column: whether we print "created new head" message
1216 # Msg column: whether we print "created new head" message
1217 # In the following, it is assumed that there already exists some
1217 # In the following, it is assumed that there already exists some
1218 # initial branch heads of the current branch, otherwise nothing is
1218 # initial branch heads of the current branch, otherwise nothing is
1219 # printed anyway.
1219 # printed anyway.
1220 #
1220 #
1221 # Par Msg Comment
1221 # Par Msg Comment
1222 # NN y additional topo root
1222 # NN y additional topo root
1223 #
1223 #
1224 # BN y additional branch root
1224 # BN y additional branch root
1225 # CN y additional topo head
1225 # CN y additional topo head
1226 # HN n usual case
1226 # HN n usual case
1227 #
1227 #
1228 # BB y weird additional branch root
1228 # BB y weird additional branch root
1229 # CB y branch merge
1229 # CB y branch merge
1230 # HB n merge with named branch
1230 # HB n merge with named branch
1231 #
1231 #
1232 # CC y additional head from merge
1232 # CC y additional head from merge
1233 # CH n merge with a head
1233 # CH n merge with a head
1234 #
1234 #
1235 # HH n head merge: head count decreases
1235 # HH n head merge: head count decreases
1236
1236
1237 if not opts.get('close_branch'):
1237 if not opts.get('close_branch'):
1238 for r in parents:
1238 for r in parents:
1239 if r.extra().get('close') and r.branch() == branch:
1239 if r.extra().get('close') and r.branch() == branch:
1240 ui.status(_('reopening closed branch head %d\n') % r)
1240 ui.status(_('reopening closed branch head %d\n') % r)
1241
1241
1242 if ui.debugflag:
1242 if ui.debugflag:
1243 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1243 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1244 elif ui.verbose:
1244 elif ui.verbose:
1245 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1245 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1246
1246
1247 @command('copy|cp',
1247 @command('copy|cp',
1248 [('A', 'after', None, _('record a copy that has already occurred')),
1248 [('A', 'after', None, _('record a copy that has already occurred')),
1249 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1249 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1250 ] + walkopts + dryrunopts,
1250 ] + walkopts + dryrunopts,
1251 _('[OPTION]... [SOURCE]... DEST'))
1251 _('[OPTION]... [SOURCE]... DEST'))
1252 def copy(ui, repo, *pats, **opts):
1252 def copy(ui, repo, *pats, **opts):
1253 """mark files as copied for the next commit
1253 """mark files as copied for the next commit
1254
1254
1255 Mark dest as having copies of source files. If dest is a
1255 Mark dest as having copies of source files. If dest is a
1256 directory, copies are put in that directory. If dest is a file,
1256 directory, copies are put in that directory. If dest is a file,
1257 the source must be a single file.
1257 the source must be a single file.
1258
1258
1259 By default, this command copies the contents of files as they
1259 By default, this command copies the contents of files as they
1260 exist in the working directory. If invoked with -A/--after, the
1260 exist in the working directory. If invoked with -A/--after, the
1261 operation is recorded, but no copying is performed.
1261 operation is recorded, but no copying is performed.
1262
1262
1263 This command takes effect with the next commit. To undo a copy
1263 This command takes effect with the next commit. To undo a copy
1264 before that, see :hg:`revert`.
1264 before that, see :hg:`revert`.
1265
1265
1266 Returns 0 on success, 1 if errors are encountered.
1266 Returns 0 on success, 1 if errors are encountered.
1267 """
1267 """
1268 wlock = repo.wlock(False)
1268 wlock = repo.wlock(False)
1269 try:
1269 try:
1270 return cmdutil.copy(ui, repo, pats, opts)
1270 return cmdutil.copy(ui, repo, pats, opts)
1271 finally:
1271 finally:
1272 wlock.release()
1272 wlock.release()
1273
1273
1274 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1274 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1275 def debugancestor(ui, repo, *args):
1275 def debugancestor(ui, repo, *args):
1276 """find the ancestor revision of two revisions in a given index"""
1276 """find the ancestor revision of two revisions in a given index"""
1277 if len(args) == 3:
1277 if len(args) == 3:
1278 index, rev1, rev2 = args
1278 index, rev1, rev2 = args
1279 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1279 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1280 lookup = r.lookup
1280 lookup = r.lookup
1281 elif len(args) == 2:
1281 elif len(args) == 2:
1282 if not repo:
1282 if not repo:
1283 raise util.Abort(_("there is no Mercurial repository here "
1283 raise util.Abort(_("there is no Mercurial repository here "
1284 "(.hg not found)"))
1284 "(.hg not found)"))
1285 rev1, rev2 = args
1285 rev1, rev2 = args
1286 r = repo.changelog
1286 r = repo.changelog
1287 lookup = repo.lookup
1287 lookup = repo.lookup
1288 else:
1288 else:
1289 raise util.Abort(_('either two or three arguments required'))
1289 raise util.Abort(_('either two or three arguments required'))
1290 a = r.ancestor(lookup(rev1), lookup(rev2))
1290 a = r.ancestor(lookup(rev1), lookup(rev2))
1291 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1291 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1292
1292
1293 @command('debugbuilddag',
1293 @command('debugbuilddag',
1294 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1294 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1295 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1295 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1296 ('n', 'new-file', None, _('add new file at each rev'))],
1296 ('n', 'new-file', None, _('add new file at each rev'))],
1297 _('[OPTION]... [TEXT]'))
1297 _('[OPTION]... [TEXT]'))
1298 def debugbuilddag(ui, repo, text=None,
1298 def debugbuilddag(ui, repo, text=None,
1299 mergeable_file=False,
1299 mergeable_file=False,
1300 overwritten_file=False,
1300 overwritten_file=False,
1301 new_file=False):
1301 new_file=False):
1302 """builds a repo with a given DAG from scratch in the current empty repo
1302 """builds a repo with a given DAG from scratch in the current empty repo
1303
1303
1304 The description of the DAG is read from stdin if not given on the
1304 The description of the DAG is read from stdin if not given on the
1305 command line.
1305 command line.
1306
1306
1307 Elements:
1307 Elements:
1308
1308
1309 - "+n" is a linear run of n nodes based on the current default parent
1309 - "+n" is a linear run of n nodes based on the current default parent
1310 - "." is a single node based on the current default parent
1310 - "." is a single node based on the current default parent
1311 - "$" resets the default parent to null (implied at the start);
1311 - "$" resets the default parent to null (implied at the start);
1312 otherwise the default parent is always the last node created
1312 otherwise the default parent is always the last node created
1313 - "<p" sets the default parent to the backref p
1313 - "<p" sets the default parent to the backref p
1314 - "*p" is a fork at parent p, which is a backref
1314 - "*p" is a fork at parent p, which is a backref
1315 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1315 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1316 - "/p2" is a merge of the preceding node and p2
1316 - "/p2" is a merge of the preceding node and p2
1317 - ":tag" defines a local tag for the preceding node
1317 - ":tag" defines a local tag for the preceding node
1318 - "@branch" sets the named branch for subsequent nodes
1318 - "@branch" sets the named branch for subsequent nodes
1319 - "#...\\n" is a comment up to the end of the line
1319 - "#...\\n" is a comment up to the end of the line
1320
1320
1321 Whitespace between the above elements is ignored.
1321 Whitespace between the above elements is ignored.
1322
1322
1323 A backref is either
1323 A backref is either
1324
1324
1325 - a number n, which references the node curr-n, where curr is the current
1325 - a number n, which references the node curr-n, where curr is the current
1326 node, or
1326 node, or
1327 - the name of a local tag you placed earlier using ":tag", or
1327 - the name of a local tag you placed earlier using ":tag", or
1328 - empty to denote the default parent.
1328 - empty to denote the default parent.
1329
1329
1330 All string valued-elements are either strictly alphanumeric, or must
1330 All string valued-elements are either strictly alphanumeric, or must
1331 be enclosed in double quotes ("..."), with "\\" as escape character.
1331 be enclosed in double quotes ("..."), with "\\" as escape character.
1332 """
1332 """
1333
1333
1334 if text is None:
1334 if text is None:
1335 ui.status(_("reading DAG from stdin\n"))
1335 ui.status(_("reading DAG from stdin\n"))
1336 text = ui.fin.read()
1336 text = ui.fin.read()
1337
1337
1338 cl = repo.changelog
1338 cl = repo.changelog
1339 if len(cl) > 0:
1339 if len(cl) > 0:
1340 raise util.Abort(_('repository is not empty'))
1340 raise util.Abort(_('repository is not empty'))
1341
1341
1342 # determine number of revs in DAG
1342 # determine number of revs in DAG
1343 total = 0
1343 total = 0
1344 for type, data in dagparser.parsedag(text):
1344 for type, data in dagparser.parsedag(text):
1345 if type == 'n':
1345 if type == 'n':
1346 total += 1
1346 total += 1
1347
1347
1348 if mergeable_file:
1348 if mergeable_file:
1349 linesperrev = 2
1349 linesperrev = 2
1350 # make a file with k lines per rev
1350 # make a file with k lines per rev
1351 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1351 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1352 initialmergedlines.append("")
1352 initialmergedlines.append("")
1353
1353
1354 tags = []
1354 tags = []
1355
1355
1356 tr = repo.transaction("builddag")
1356 tr = repo.transaction("builddag")
1357 try:
1357 try:
1358
1358
1359 at = -1
1359 at = -1
1360 atbranch = 'default'
1360 atbranch = 'default'
1361 nodeids = []
1361 nodeids = []
1362 ui.progress(_('building'), 0, unit=_('revisions'), total=total)
1362 ui.progress(_('building'), 0, unit=_('revisions'), total=total)
1363 for type, data in dagparser.parsedag(text):
1363 for type, data in dagparser.parsedag(text):
1364 if type == 'n':
1364 if type == 'n':
1365 ui.note('node %s\n' % str(data))
1365 ui.note('node %s\n' % str(data))
1366 id, ps = data
1366 id, ps = data
1367
1367
1368 files = []
1368 files = []
1369 fctxs = {}
1369 fctxs = {}
1370
1370
1371 p2 = None
1371 p2 = None
1372 if mergeable_file:
1372 if mergeable_file:
1373 fn = "mf"
1373 fn = "mf"
1374 p1 = repo[ps[0]]
1374 p1 = repo[ps[0]]
1375 if len(ps) > 1:
1375 if len(ps) > 1:
1376 p2 = repo[ps[1]]
1376 p2 = repo[ps[1]]
1377 pa = p1.ancestor(p2)
1377 pa = p1.ancestor(p2)
1378 base, local, other = [x[fn].data() for x in pa, p1, p2]
1378 base, local, other = [x[fn].data() for x in pa, p1, p2]
1379 m3 = simplemerge.Merge3Text(base, local, other)
1379 m3 = simplemerge.Merge3Text(base, local, other)
1380 ml = [l.strip() for l in m3.merge_lines()]
1380 ml = [l.strip() for l in m3.merge_lines()]
1381 ml.append("")
1381 ml.append("")
1382 elif at > 0:
1382 elif at > 0:
1383 ml = p1[fn].data().split("\n")
1383 ml = p1[fn].data().split("\n")
1384 else:
1384 else:
1385 ml = initialmergedlines
1385 ml = initialmergedlines
1386 ml[id * linesperrev] += " r%i" % id
1386 ml[id * linesperrev] += " r%i" % id
1387 mergedtext = "\n".join(ml)
1387 mergedtext = "\n".join(ml)
1388 files.append(fn)
1388 files.append(fn)
1389 fctxs[fn] = context.memfilectx(fn, mergedtext)
1389 fctxs[fn] = context.memfilectx(fn, mergedtext)
1390
1390
1391 if overwritten_file:
1391 if overwritten_file:
1392 fn = "of"
1392 fn = "of"
1393 files.append(fn)
1393 files.append(fn)
1394 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1394 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1395
1395
1396 if new_file:
1396 if new_file:
1397 fn = "nf%i" % id
1397 fn = "nf%i" % id
1398 files.append(fn)
1398 files.append(fn)
1399 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1399 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1400 if len(ps) > 1:
1400 if len(ps) > 1:
1401 if not p2:
1401 if not p2:
1402 p2 = repo[ps[1]]
1402 p2 = repo[ps[1]]
1403 for fn in p2:
1403 for fn in p2:
1404 if fn.startswith("nf"):
1404 if fn.startswith("nf"):
1405 files.append(fn)
1405 files.append(fn)
1406 fctxs[fn] = p2[fn]
1406 fctxs[fn] = p2[fn]
1407
1407
1408 def fctxfn(repo, cx, path):
1408 def fctxfn(repo, cx, path):
1409 return fctxs.get(path)
1409 return fctxs.get(path)
1410
1410
1411 if len(ps) == 0 or ps[0] < 0:
1411 if len(ps) == 0 or ps[0] < 0:
1412 pars = [None, None]
1412 pars = [None, None]
1413 elif len(ps) == 1:
1413 elif len(ps) == 1:
1414 pars = [nodeids[ps[0]], None]
1414 pars = [nodeids[ps[0]], None]
1415 else:
1415 else:
1416 pars = [nodeids[p] for p in ps]
1416 pars = [nodeids[p] for p in ps]
1417 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1417 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1418 date=(id, 0),
1418 date=(id, 0),
1419 user="debugbuilddag",
1419 user="debugbuilddag",
1420 extra={'branch': atbranch})
1420 extra={'branch': atbranch})
1421 nodeid = repo.commitctx(cx)
1421 nodeid = repo.commitctx(cx)
1422 nodeids.append(nodeid)
1422 nodeids.append(nodeid)
1423 at = id
1423 at = id
1424 elif type == 'l':
1424 elif type == 'l':
1425 id, name = data
1425 id, name = data
1426 ui.note('tag %s\n' % name)
1426 ui.note('tag %s\n' % name)
1427 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1427 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1428 elif type == 'a':
1428 elif type == 'a':
1429 ui.note('branch %s\n' % data)
1429 ui.note('branch %s\n' % data)
1430 atbranch = data
1430 atbranch = data
1431 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1431 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1432 tr.close()
1432 tr.close()
1433 finally:
1433 finally:
1434 ui.progress(_('building'), None)
1434 ui.progress(_('building'), None)
1435 tr.release()
1435 tr.release()
1436
1436
1437 if tags:
1437 if tags:
1438 repo.opener.write("localtags", "".join(tags))
1438 repo.opener.write("localtags", "".join(tags))
1439
1439
1440 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1440 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1441 def debugbundle(ui, bundlepath, all=None, **opts):
1441 def debugbundle(ui, bundlepath, all=None, **opts):
1442 """lists the contents of a bundle"""
1442 """lists the contents of a bundle"""
1443 f = url.open(ui, bundlepath)
1443 f = url.open(ui, bundlepath)
1444 try:
1444 try:
1445 gen = changegroup.readbundle(f, bundlepath)
1445 gen = changegroup.readbundle(f, bundlepath)
1446 if all:
1446 if all:
1447 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1447 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1448
1448
1449 def showchunks(named):
1449 def showchunks(named):
1450 ui.write("\n%s\n" % named)
1450 ui.write("\n%s\n" % named)
1451 chain = None
1451 chain = None
1452 while True:
1452 while True:
1453 chunkdata = gen.deltachunk(chain)
1453 chunkdata = gen.deltachunk(chain)
1454 if not chunkdata:
1454 if not chunkdata:
1455 break
1455 break
1456 node = chunkdata['node']
1456 node = chunkdata['node']
1457 p1 = chunkdata['p1']
1457 p1 = chunkdata['p1']
1458 p2 = chunkdata['p2']
1458 p2 = chunkdata['p2']
1459 cs = chunkdata['cs']
1459 cs = chunkdata['cs']
1460 deltabase = chunkdata['deltabase']
1460 deltabase = chunkdata['deltabase']
1461 delta = chunkdata['delta']
1461 delta = chunkdata['delta']
1462 ui.write("%s %s %s %s %s %s\n" %
1462 ui.write("%s %s %s %s %s %s\n" %
1463 (hex(node), hex(p1), hex(p2),
1463 (hex(node), hex(p1), hex(p2),
1464 hex(cs), hex(deltabase), len(delta)))
1464 hex(cs), hex(deltabase), len(delta)))
1465 chain = node
1465 chain = node
1466
1466
1467 chunkdata = gen.changelogheader()
1467 chunkdata = gen.changelogheader()
1468 showchunks("changelog")
1468 showchunks("changelog")
1469 chunkdata = gen.manifestheader()
1469 chunkdata = gen.manifestheader()
1470 showchunks("manifest")
1470 showchunks("manifest")
1471 while True:
1471 while True:
1472 chunkdata = gen.filelogheader()
1472 chunkdata = gen.filelogheader()
1473 if not chunkdata:
1473 if not chunkdata:
1474 break
1474 break
1475 fname = chunkdata['filename']
1475 fname = chunkdata['filename']
1476 showchunks(fname)
1476 showchunks(fname)
1477 else:
1477 else:
1478 chunkdata = gen.changelogheader()
1478 chunkdata = gen.changelogheader()
1479 chain = None
1479 chain = None
1480 while True:
1480 while True:
1481 chunkdata = gen.deltachunk(chain)
1481 chunkdata = gen.deltachunk(chain)
1482 if not chunkdata:
1482 if not chunkdata:
1483 break
1483 break
1484 node = chunkdata['node']
1484 node = chunkdata['node']
1485 ui.write("%s\n" % hex(node))
1485 ui.write("%s\n" % hex(node))
1486 chain = node
1486 chain = node
1487 finally:
1487 finally:
1488 f.close()
1488 f.close()
1489
1489
1490 @command('debugcheckstate', [], '')
1490 @command('debugcheckstate', [], '')
1491 def debugcheckstate(ui, repo):
1491 def debugcheckstate(ui, repo):
1492 """validate the correctness of the current dirstate"""
1492 """validate the correctness of the current dirstate"""
1493 parent1, parent2 = repo.dirstate.parents()
1493 parent1, parent2 = repo.dirstate.parents()
1494 m1 = repo[parent1].manifest()
1494 m1 = repo[parent1].manifest()
1495 m2 = repo[parent2].manifest()
1495 m2 = repo[parent2].manifest()
1496 errors = 0
1496 errors = 0
1497 for f in repo.dirstate:
1497 for f in repo.dirstate:
1498 state = repo.dirstate[f]
1498 state = repo.dirstate[f]
1499 if state in "nr" and f not in m1:
1499 if state in "nr" and f not in m1:
1500 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1500 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1501 errors += 1
1501 errors += 1
1502 if state in "a" and f in m1:
1502 if state in "a" and f in m1:
1503 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1503 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1504 errors += 1
1504 errors += 1
1505 if state in "m" and f not in m1 and f not in m2:
1505 if state in "m" and f not in m1 and f not in m2:
1506 ui.warn(_("%s in state %s, but not in either manifest\n") %
1506 ui.warn(_("%s in state %s, but not in either manifest\n") %
1507 (f, state))
1507 (f, state))
1508 errors += 1
1508 errors += 1
1509 for f in m1:
1509 for f in m1:
1510 state = repo.dirstate[f]
1510 state = repo.dirstate[f]
1511 if state not in "nrm":
1511 if state not in "nrm":
1512 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1512 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1513 errors += 1
1513 errors += 1
1514 if errors:
1514 if errors:
1515 error = _(".hg/dirstate inconsistent with current parent's manifest")
1515 error = _(".hg/dirstate inconsistent with current parent's manifest")
1516 raise util.Abort(error)
1516 raise util.Abort(error)
1517
1517
1518 @command('debugcommands', [], _('[COMMAND]'))
1518 @command('debugcommands', [], _('[COMMAND]'))
1519 def debugcommands(ui, cmd='', *args):
1519 def debugcommands(ui, cmd='', *args):
1520 """list all available commands and options"""
1520 """list all available commands and options"""
1521 for cmd, vals in sorted(table.iteritems()):
1521 for cmd, vals in sorted(table.iteritems()):
1522 cmd = cmd.split('|')[0].strip('^')
1522 cmd = cmd.split('|')[0].strip('^')
1523 opts = ', '.join([i[1] for i in vals[1]])
1523 opts = ', '.join([i[1] for i in vals[1]])
1524 ui.write('%s: %s\n' % (cmd, opts))
1524 ui.write('%s: %s\n' % (cmd, opts))
1525
1525
1526 @command('debugcomplete',
1526 @command('debugcomplete',
1527 [('o', 'options', None, _('show the command options'))],
1527 [('o', 'options', None, _('show the command options'))],
1528 _('[-o] CMD'))
1528 _('[-o] CMD'))
1529 def debugcomplete(ui, cmd='', **opts):
1529 def debugcomplete(ui, cmd='', **opts):
1530 """returns the completion list associated with the given command"""
1530 """returns the completion list associated with the given command"""
1531
1531
1532 if opts.get('options'):
1532 if opts.get('options'):
1533 options = []
1533 options = []
1534 otables = [globalopts]
1534 otables = [globalopts]
1535 if cmd:
1535 if cmd:
1536 aliases, entry = cmdutil.findcmd(cmd, table, False)
1536 aliases, entry = cmdutil.findcmd(cmd, table, False)
1537 otables.append(entry[1])
1537 otables.append(entry[1])
1538 for t in otables:
1538 for t in otables:
1539 for o in t:
1539 for o in t:
1540 if "(DEPRECATED)" in o[3]:
1540 if "(DEPRECATED)" in o[3]:
1541 continue
1541 continue
1542 if o[0]:
1542 if o[0]:
1543 options.append('-%s' % o[0])
1543 options.append('-%s' % o[0])
1544 options.append('--%s' % o[1])
1544 options.append('--%s' % o[1])
1545 ui.write("%s\n" % "\n".join(options))
1545 ui.write("%s\n" % "\n".join(options))
1546 return
1546 return
1547
1547
1548 cmdlist = cmdutil.findpossible(cmd, table)
1548 cmdlist = cmdutil.findpossible(cmd, table)
1549 if ui.verbose:
1549 if ui.verbose:
1550 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1550 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1551 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1551 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1552
1552
1553 @command('debugdag',
1553 @command('debugdag',
1554 [('t', 'tags', None, _('use tags as labels')),
1554 [('t', 'tags', None, _('use tags as labels')),
1555 ('b', 'branches', None, _('annotate with branch names')),
1555 ('b', 'branches', None, _('annotate with branch names')),
1556 ('', 'dots', None, _('use dots for runs')),
1556 ('', 'dots', None, _('use dots for runs')),
1557 ('s', 'spaces', None, _('separate elements by spaces'))],
1557 ('s', 'spaces', None, _('separate elements by spaces'))],
1558 _('[OPTION]... [FILE [REV]...]'))
1558 _('[OPTION]... [FILE [REV]...]'))
1559 def debugdag(ui, repo, file_=None, *revs, **opts):
1559 def debugdag(ui, repo, file_=None, *revs, **opts):
1560 """format the changelog or an index DAG as a concise textual description
1560 """format the changelog or an index DAG as a concise textual description
1561
1561
1562 If you pass a revlog index, the revlog's DAG is emitted. If you list
1562 If you pass a revlog index, the revlog's DAG is emitted. If you list
1563 revision numbers, they get labelled in the output as rN.
1563 revision numbers, they get labelled in the output as rN.
1564
1564
1565 Otherwise, the changelog DAG of the current repo is emitted.
1565 Otherwise, the changelog DAG of the current repo is emitted.
1566 """
1566 """
1567 spaces = opts.get('spaces')
1567 spaces = opts.get('spaces')
1568 dots = opts.get('dots')
1568 dots = opts.get('dots')
1569 if file_:
1569 if file_:
1570 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1570 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1571 revs = set((int(r) for r in revs))
1571 revs = set((int(r) for r in revs))
1572 def events():
1572 def events():
1573 for r in rlog:
1573 for r in rlog:
1574 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1574 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1575 if r in revs:
1575 if r in revs:
1576 yield 'l', (r, "r%i" % r)
1576 yield 'l', (r, "r%i" % r)
1577 elif repo:
1577 elif repo:
1578 cl = repo.changelog
1578 cl = repo.changelog
1579 tags = opts.get('tags')
1579 tags = opts.get('tags')
1580 branches = opts.get('branches')
1580 branches = opts.get('branches')
1581 if tags:
1581 if tags:
1582 labels = {}
1582 labels = {}
1583 for l, n in repo.tags().items():
1583 for l, n in repo.tags().items():
1584 labels.setdefault(cl.rev(n), []).append(l)
1584 labels.setdefault(cl.rev(n), []).append(l)
1585 def events():
1585 def events():
1586 b = "default"
1586 b = "default"
1587 for r in cl:
1587 for r in cl:
1588 if branches:
1588 if branches:
1589 newb = cl.read(cl.node(r))[5]['branch']
1589 newb = cl.read(cl.node(r))[5]['branch']
1590 if newb != b:
1590 if newb != b:
1591 yield 'a', newb
1591 yield 'a', newb
1592 b = newb
1592 b = newb
1593 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1593 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1594 if tags:
1594 if tags:
1595 ls = labels.get(r)
1595 ls = labels.get(r)
1596 if ls:
1596 if ls:
1597 for l in ls:
1597 for l in ls:
1598 yield 'l', (r, l)
1598 yield 'l', (r, l)
1599 else:
1599 else:
1600 raise util.Abort(_('need repo for changelog dag'))
1600 raise util.Abort(_('need repo for changelog dag'))
1601
1601
1602 for line in dagparser.dagtextlines(events(),
1602 for line in dagparser.dagtextlines(events(),
1603 addspaces=spaces,
1603 addspaces=spaces,
1604 wraplabels=True,
1604 wraplabels=True,
1605 wrapannotations=True,
1605 wrapannotations=True,
1606 wrapnonlinear=dots,
1606 wrapnonlinear=dots,
1607 usedots=dots,
1607 usedots=dots,
1608 maxlinewidth=70):
1608 maxlinewidth=70):
1609 ui.write(line)
1609 ui.write(line)
1610 ui.write("\n")
1610 ui.write("\n")
1611
1611
1612 @command('debugdata',
1612 @command('debugdata',
1613 [('c', 'changelog', False, _('open changelog')),
1613 [('c', 'changelog', False, _('open changelog')),
1614 ('m', 'manifest', False, _('open manifest'))],
1614 ('m', 'manifest', False, _('open manifest'))],
1615 _('-c|-m|FILE REV'))
1615 _('-c|-m|FILE REV'))
1616 def debugdata(ui, repo, file_, rev = None, **opts):
1616 def debugdata(ui, repo, file_, rev = None, **opts):
1617 """dump the contents of a data file revision"""
1617 """dump the contents of a data file revision"""
1618 if opts.get('changelog') or opts.get('manifest'):
1618 if opts.get('changelog') or opts.get('manifest'):
1619 file_, rev = None, file_
1619 file_, rev = None, file_
1620 elif rev is None:
1620 elif rev is None:
1621 raise error.CommandError('debugdata', _('invalid arguments'))
1621 raise error.CommandError('debugdata', _('invalid arguments'))
1622 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1622 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1623 try:
1623 try:
1624 ui.write(r.revision(r.lookup(rev)))
1624 ui.write(r.revision(r.lookup(rev)))
1625 except KeyError:
1625 except KeyError:
1626 raise util.Abort(_('invalid revision identifier %s') % rev)
1626 raise util.Abort(_('invalid revision identifier %s') % rev)
1627
1627
1628 @command('debugdate',
1628 @command('debugdate',
1629 [('e', 'extended', None, _('try extended date formats'))],
1629 [('e', 'extended', None, _('try extended date formats'))],
1630 _('[-e] DATE [RANGE]'))
1630 _('[-e] DATE [RANGE]'))
1631 def debugdate(ui, date, range=None, **opts):
1631 def debugdate(ui, date, range=None, **opts):
1632 """parse and display a date"""
1632 """parse and display a date"""
1633 if opts["extended"]:
1633 if opts["extended"]:
1634 d = util.parsedate(date, util.extendeddateformats)
1634 d = util.parsedate(date, util.extendeddateformats)
1635 else:
1635 else:
1636 d = util.parsedate(date)
1636 d = util.parsedate(date)
1637 ui.write("internal: %s %s\n" % d)
1637 ui.write("internal: %s %s\n" % d)
1638 ui.write("standard: %s\n" % util.datestr(d))
1638 ui.write("standard: %s\n" % util.datestr(d))
1639 if range:
1639 if range:
1640 m = util.matchdate(range)
1640 m = util.matchdate(range)
1641 ui.write("match: %s\n" % m(d[0]))
1641 ui.write("match: %s\n" % m(d[0]))
1642
1642
1643 @command('debugdiscovery',
1643 @command('debugdiscovery',
1644 [('', 'old', None, _('use old-style discovery')),
1644 [('', 'old', None, _('use old-style discovery')),
1645 ('', 'nonheads', None,
1645 ('', 'nonheads', None,
1646 _('use old-style discovery with non-heads included')),
1646 _('use old-style discovery with non-heads included')),
1647 ] + remoteopts,
1647 ] + remoteopts,
1648 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1648 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1649 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1649 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1650 """runs the changeset discovery protocol in isolation"""
1650 """runs the changeset discovery protocol in isolation"""
1651 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1651 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1652 remote = hg.peer(repo, opts, remoteurl)
1652 remote = hg.peer(repo, opts, remoteurl)
1653 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1653 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1654
1654
1655 # make sure tests are repeatable
1655 # make sure tests are repeatable
1656 random.seed(12323)
1656 random.seed(12323)
1657
1657
1658 def doit(localheads, remoteheads):
1658 def doit(localheads, remoteheads):
1659 if opts.get('old'):
1659 if opts.get('old'):
1660 if localheads:
1660 if localheads:
1661 raise util.Abort('cannot use localheads with old style discovery')
1661 raise util.Abort('cannot use localheads with old style discovery')
1662 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1662 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1663 force=True)
1663 force=True)
1664 common = set(common)
1664 common = set(common)
1665 if not opts.get('nonheads'):
1665 if not opts.get('nonheads'):
1666 ui.write("unpruned common: %s\n" % " ".join([short(n)
1666 ui.write("unpruned common: %s\n" % " ".join([short(n)
1667 for n in common]))
1667 for n in common]))
1668 dag = dagutil.revlogdag(repo.changelog)
1668 dag = dagutil.revlogdag(repo.changelog)
1669 all = dag.ancestorset(dag.internalizeall(common))
1669 all = dag.ancestorset(dag.internalizeall(common))
1670 common = dag.externalizeall(dag.headsetofconnecteds(all))
1670 common = dag.externalizeall(dag.headsetofconnecteds(all))
1671 else:
1671 else:
1672 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1672 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1673 common = set(common)
1673 common = set(common)
1674 rheads = set(hds)
1674 rheads = set(hds)
1675 lheads = set(repo.heads())
1675 lheads = set(repo.heads())
1676 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1676 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1677 if lheads <= common:
1677 if lheads <= common:
1678 ui.write("local is subset\n")
1678 ui.write("local is subset\n")
1679 elif rheads <= common:
1679 elif rheads <= common:
1680 ui.write("remote is subset\n")
1680 ui.write("remote is subset\n")
1681
1681
1682 serverlogs = opts.get('serverlog')
1682 serverlogs = opts.get('serverlog')
1683 if serverlogs:
1683 if serverlogs:
1684 for filename in serverlogs:
1684 for filename in serverlogs:
1685 logfile = open(filename, 'r')
1685 logfile = open(filename, 'r')
1686 try:
1686 try:
1687 line = logfile.readline()
1687 line = logfile.readline()
1688 while line:
1688 while line:
1689 parts = line.strip().split(';')
1689 parts = line.strip().split(';')
1690 op = parts[1]
1690 op = parts[1]
1691 if op == 'cg':
1691 if op == 'cg':
1692 pass
1692 pass
1693 elif op == 'cgss':
1693 elif op == 'cgss':
1694 doit(parts[2].split(' '), parts[3].split(' '))
1694 doit(parts[2].split(' '), parts[3].split(' '))
1695 elif op == 'unb':
1695 elif op == 'unb':
1696 doit(parts[3].split(' '), parts[2].split(' '))
1696 doit(parts[3].split(' '), parts[2].split(' '))
1697 line = logfile.readline()
1697 line = logfile.readline()
1698 finally:
1698 finally:
1699 logfile.close()
1699 logfile.close()
1700
1700
1701 else:
1701 else:
1702 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1702 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1703 opts.get('remote_head'))
1703 opts.get('remote_head'))
1704 localrevs = opts.get('local_head')
1704 localrevs = opts.get('local_head')
1705 doit(localrevs, remoterevs)
1705 doit(localrevs, remoterevs)
1706
1706
1707 @command('debugfileset', [], ('REVSPEC'))
1707 @command('debugfileset', [], ('REVSPEC'))
1708 def debugfileset(ui, repo, expr):
1708 def debugfileset(ui, repo, expr):
1709 '''parse and apply a fileset specification'''
1709 '''parse and apply a fileset specification'''
1710 if ui.verbose:
1710 if ui.verbose:
1711 tree = fileset.parse(expr)[0]
1711 tree = fileset.parse(expr)[0]
1712 ui.note(tree, "\n")
1712 ui.note(tree, "\n")
1713
1713
1714 for f in fileset.getfileset(repo[None], expr):
1714 for f in fileset.getfileset(repo[None], expr):
1715 ui.write("%s\n" % f)
1715 ui.write("%s\n" % f)
1716
1716
1717 @command('debugfsinfo', [], _('[PATH]'))
1717 @command('debugfsinfo', [], _('[PATH]'))
1718 def debugfsinfo(ui, path = "."):
1718 def debugfsinfo(ui, path = "."):
1719 """show information detected about current filesystem"""
1719 """show information detected about current filesystem"""
1720 util.writefile('.debugfsinfo', '')
1720 util.writefile('.debugfsinfo', '')
1721 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1721 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1722 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1722 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1723 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1723 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1724 and 'yes' or 'no'))
1724 and 'yes' or 'no'))
1725 os.unlink('.debugfsinfo')
1725 os.unlink('.debugfsinfo')
1726
1726
1727 @command('debuggetbundle',
1727 @command('debuggetbundle',
1728 [('H', 'head', [], _('id of head node'), _('ID')),
1728 [('H', 'head', [], _('id of head node'), _('ID')),
1729 ('C', 'common', [], _('id of common node'), _('ID')),
1729 ('C', 'common', [], _('id of common node'), _('ID')),
1730 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1730 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1731 _('REPO FILE [-H|-C ID]...'))
1731 _('REPO FILE [-H|-C ID]...'))
1732 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1732 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1733 """retrieves a bundle from a repo
1733 """retrieves a bundle from a repo
1734
1734
1735 Every ID must be a full-length hex node id string. Saves the bundle to the
1735 Every ID must be a full-length hex node id string. Saves the bundle to the
1736 given file.
1736 given file.
1737 """
1737 """
1738 repo = hg.peer(ui, opts, repopath)
1738 repo = hg.peer(ui, opts, repopath)
1739 if not repo.capable('getbundle'):
1739 if not repo.capable('getbundle'):
1740 raise util.Abort("getbundle() not supported by target repository")
1740 raise util.Abort("getbundle() not supported by target repository")
1741 args = {}
1741 args = {}
1742 if common:
1742 if common:
1743 args['common'] = [bin(s) for s in common]
1743 args['common'] = [bin(s) for s in common]
1744 if head:
1744 if head:
1745 args['heads'] = [bin(s) for s in head]
1745 args['heads'] = [bin(s) for s in head]
1746 bundle = repo.getbundle('debug', **args)
1746 bundle = repo.getbundle('debug', **args)
1747
1747
1748 bundletype = opts.get('type', 'bzip2').lower()
1748 bundletype = opts.get('type', 'bzip2').lower()
1749 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1749 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1750 bundletype = btypes.get(bundletype)
1750 bundletype = btypes.get(bundletype)
1751 if bundletype not in changegroup.bundletypes:
1751 if bundletype not in changegroup.bundletypes:
1752 raise util.Abort(_('unknown bundle type specified with --type'))
1752 raise util.Abort(_('unknown bundle type specified with --type'))
1753 changegroup.writebundle(bundle, bundlepath, bundletype)
1753 changegroup.writebundle(bundle, bundlepath, bundletype)
1754
1754
1755 @command('debugignore', [], '')
1755 @command('debugignore', [], '')
1756 def debugignore(ui, repo, *values, **opts):
1756 def debugignore(ui, repo, *values, **opts):
1757 """display the combined ignore pattern"""
1757 """display the combined ignore pattern"""
1758 ignore = repo.dirstate._ignore
1758 ignore = repo.dirstate._ignore
1759 includepat = getattr(ignore, 'includepat', None)
1759 includepat = getattr(ignore, 'includepat', None)
1760 if includepat is not None:
1760 if includepat is not None:
1761 ui.write("%s\n" % includepat)
1761 ui.write("%s\n" % includepat)
1762 else:
1762 else:
1763 raise util.Abort(_("no ignore patterns found"))
1763 raise util.Abort(_("no ignore patterns found"))
1764
1764
1765 @command('debugindex',
1765 @command('debugindex',
1766 [('c', 'changelog', False, _('open changelog')),
1766 [('c', 'changelog', False, _('open changelog')),
1767 ('m', 'manifest', False, _('open manifest')),
1767 ('m', 'manifest', False, _('open manifest')),
1768 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1768 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1769 _('[-f FORMAT] -c|-m|FILE'))
1769 _('[-f FORMAT] -c|-m|FILE'))
1770 def debugindex(ui, repo, file_ = None, **opts):
1770 def debugindex(ui, repo, file_ = None, **opts):
1771 """dump the contents of an index file"""
1771 """dump the contents of an index file"""
1772 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1772 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1773 format = opts.get('format', 0)
1773 format = opts.get('format', 0)
1774 if format not in (0, 1):
1774 if format not in (0, 1):
1775 raise util.Abort(_("unknown format %d") % format)
1775 raise util.Abort(_("unknown format %d") % format)
1776
1776
1777 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1777 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1778 if generaldelta:
1778 if generaldelta:
1779 basehdr = ' delta'
1779 basehdr = ' delta'
1780 else:
1780 else:
1781 basehdr = ' base'
1781 basehdr = ' base'
1782
1782
1783 if format == 0:
1783 if format == 0:
1784 ui.write(" rev offset length " + basehdr + " linkrev"
1784 ui.write(" rev offset length " + basehdr + " linkrev"
1785 " nodeid p1 p2\n")
1785 " nodeid p1 p2\n")
1786 elif format == 1:
1786 elif format == 1:
1787 ui.write(" rev flag offset length"
1787 ui.write(" rev flag offset length"
1788 " size " + basehdr + " link p1 p2 nodeid\n")
1788 " size " + basehdr + " link p1 p2 nodeid\n")
1789
1789
1790 for i in r:
1790 for i in r:
1791 node = r.node(i)
1791 node = r.node(i)
1792 if generaldelta:
1792 if generaldelta:
1793 base = r.deltaparent(i)
1793 base = r.deltaparent(i)
1794 else:
1794 else:
1795 base = r.chainbase(i)
1795 base = r.chainbase(i)
1796 if format == 0:
1796 if format == 0:
1797 try:
1797 try:
1798 pp = r.parents(node)
1798 pp = r.parents(node)
1799 except:
1799 except:
1800 pp = [nullid, nullid]
1800 pp = [nullid, nullid]
1801 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1801 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1802 i, r.start(i), r.length(i), base, r.linkrev(i),
1802 i, r.start(i), r.length(i), base, r.linkrev(i),
1803 short(node), short(pp[0]), short(pp[1])))
1803 short(node), short(pp[0]), short(pp[1])))
1804 elif format == 1:
1804 elif format == 1:
1805 pr = r.parentrevs(i)
1805 pr = r.parentrevs(i)
1806 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1806 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1807 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1807 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1808 base, r.linkrev(i), pr[0], pr[1], short(node)))
1808 base, r.linkrev(i), pr[0], pr[1], short(node)))
1809
1809
1810 @command('debugindexdot', [], _('FILE'))
1810 @command('debugindexdot', [], _('FILE'))
1811 def debugindexdot(ui, repo, file_):
1811 def debugindexdot(ui, repo, file_):
1812 """dump an index DAG as a graphviz dot file"""
1812 """dump an index DAG as a graphviz dot file"""
1813 r = None
1813 r = None
1814 if repo:
1814 if repo:
1815 filelog = repo.file(file_)
1815 filelog = repo.file(file_)
1816 if len(filelog):
1816 if len(filelog):
1817 r = filelog
1817 r = filelog
1818 if not r:
1818 if not r:
1819 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1819 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1820 ui.write("digraph G {\n")
1820 ui.write("digraph G {\n")
1821 for i in r:
1821 for i in r:
1822 node = r.node(i)
1822 node = r.node(i)
1823 pp = r.parents(node)
1823 pp = r.parents(node)
1824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1825 if pp[1] != nullid:
1825 if pp[1] != nullid:
1826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1827 ui.write("}\n")
1827 ui.write("}\n")
1828
1828
1829 @command('debuginstall', [], '')
1829 @command('debuginstall', [], '')
1830 def debuginstall(ui):
1830 def debuginstall(ui):
1831 '''test Mercurial installation
1831 '''test Mercurial installation
1832
1832
1833 Returns 0 on success.
1833 Returns 0 on success.
1834 '''
1834 '''
1835
1835
1836 def writetemp(contents):
1836 def writetemp(contents):
1837 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1837 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1838 f = os.fdopen(fd, "wb")
1838 f = os.fdopen(fd, "wb")
1839 f.write(contents)
1839 f.write(contents)
1840 f.close()
1840 f.close()
1841 return name
1841 return name
1842
1842
1843 problems = 0
1843 problems = 0
1844
1844
1845 # encoding
1845 # encoding
1846 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1846 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1847 try:
1847 try:
1848 encoding.fromlocal("test")
1848 encoding.fromlocal("test")
1849 except util.Abort, inst:
1849 except util.Abort, inst:
1850 ui.write(" %s\n" % inst)
1850 ui.write(" %s\n" % inst)
1851 ui.write(_(" (check that your locale is properly set)\n"))
1851 ui.write(_(" (check that your locale is properly set)\n"))
1852 problems += 1
1852 problems += 1
1853
1853
1854 # compiled modules
1854 # compiled modules
1855 ui.status(_("Checking installed modules (%s)...\n")
1855 ui.status(_("Checking installed modules (%s)...\n")
1856 % os.path.dirname(__file__))
1856 % os.path.dirname(__file__))
1857 try:
1857 try:
1858 import bdiff, mpatch, base85, osutil
1858 import bdiff, mpatch, base85, osutil
1859 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1859 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1860 except Exception, inst:
1860 except Exception, inst:
1861 ui.write(" %s\n" % inst)
1861 ui.write(" %s\n" % inst)
1862 ui.write(_(" One or more extensions could not be found"))
1862 ui.write(_(" One or more extensions could not be found"))
1863 ui.write(_(" (check that you compiled the extensions)\n"))
1863 ui.write(_(" (check that you compiled the extensions)\n"))
1864 problems += 1
1864 problems += 1
1865
1865
1866 # templates
1866 # templates
1867 import templater
1867 import templater
1868 p = templater.templatepath()
1868 p = templater.templatepath()
1869 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1869 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1870 try:
1870 try:
1871 templater.templater(templater.templatepath("map-cmdline.default"))
1871 templater.templater(templater.templatepath("map-cmdline.default"))
1872 except Exception, inst:
1872 except Exception, inst:
1873 ui.write(" %s\n" % inst)
1873 ui.write(" %s\n" % inst)
1874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1875 problems += 1
1875 problems += 1
1876
1876
1877 # editor
1877 # editor
1878 ui.status(_("Checking commit editor...\n"))
1878 ui.status(_("Checking commit editor...\n"))
1879 editor = ui.geteditor()
1879 editor = ui.geteditor()
1880 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
1880 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
1881 if not cmdpath:
1881 if not cmdpath:
1882 if editor == 'vi':
1882 if editor == 'vi':
1883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1884 ui.write(_(" (specify a commit editor in your configuration"
1884 ui.write(_(" (specify a commit editor in your configuration"
1885 " file)\n"))
1885 " file)\n"))
1886 else:
1886 else:
1887 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1887 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1888 ui.write(_(" (specify a commit editor in your configuration"
1888 ui.write(_(" (specify a commit editor in your configuration"
1889 " file)\n"))
1889 " file)\n"))
1890 problems += 1
1890 problems += 1
1891
1891
1892 # check username
1892 # check username
1893 ui.status(_("Checking username...\n"))
1893 ui.status(_("Checking username...\n"))
1894 try:
1894 try:
1895 ui.username()
1895 ui.username()
1896 except util.Abort, e:
1896 except util.Abort, e:
1897 ui.write(" %s\n" % e)
1897 ui.write(" %s\n" % e)
1898 ui.write(_(" (specify a username in your configuration file)\n"))
1898 ui.write(_(" (specify a username in your configuration file)\n"))
1899 problems += 1
1899 problems += 1
1900
1900
1901 if not problems:
1901 if not problems:
1902 ui.status(_("No problems detected\n"))
1902 ui.status(_("No problems detected\n"))
1903 else:
1903 else:
1904 ui.write(_("%s problems detected,"
1904 ui.write(_("%s problems detected,"
1905 " please check your install!\n") % problems)
1905 " please check your install!\n") % problems)
1906
1906
1907 return problems
1907 return problems
1908
1908
1909 @command('debugknown', [], _('REPO ID...'))
1909 @command('debugknown', [], _('REPO ID...'))
1910 def debugknown(ui, repopath, *ids, **opts):
1910 def debugknown(ui, repopath, *ids, **opts):
1911 """test whether node ids are known to a repo
1911 """test whether node ids are known to a repo
1912
1912
1913 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1913 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1914 indicating unknown/known.
1914 indicating unknown/known.
1915 """
1915 """
1916 repo = hg.peer(ui, opts, repopath)
1916 repo = hg.peer(ui, opts, repopath)
1917 if not repo.capable('known'):
1917 if not repo.capable('known'):
1918 raise util.Abort("known() not supported by target repository")
1918 raise util.Abort("known() not supported by target repository")
1919 flags = repo.known([bin(s) for s in ids])
1919 flags = repo.known([bin(s) for s in ids])
1920 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1920 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1921
1921
1922 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
1922 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
1923 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1923 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1924 '''access the pushkey key/value protocol
1924 '''access the pushkey key/value protocol
1925
1925
1926 With two args, list the keys in the given namespace.
1926 With two args, list the keys in the given namespace.
1927
1927
1928 With five args, set a key to new if it currently is set to old.
1928 With five args, set a key to new if it currently is set to old.
1929 Reports success or failure.
1929 Reports success or failure.
1930 '''
1930 '''
1931
1931
1932 target = hg.peer(ui, {}, repopath)
1932 target = hg.peer(ui, {}, repopath)
1933 if keyinfo:
1933 if keyinfo:
1934 key, old, new = keyinfo
1934 key, old, new = keyinfo
1935 r = target.pushkey(namespace, key, old, new)
1935 r = target.pushkey(namespace, key, old, new)
1936 ui.status(str(r) + '\n')
1936 ui.status(str(r) + '\n')
1937 return not r
1937 return not r
1938 else:
1938 else:
1939 for k, v in target.listkeys(namespace).iteritems():
1939 for k, v in target.listkeys(namespace).iteritems():
1940 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1940 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1941 v.encode('string-escape')))
1941 v.encode('string-escape')))
1942
1942
1943 @command('debugrebuildstate',
1943 @command('debugrebuildstate',
1944 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
1944 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
1945 _('[-r REV] [REV]'))
1945 _('[-r REV] [REV]'))
1946 def debugrebuildstate(ui, repo, rev="tip"):
1946 def debugrebuildstate(ui, repo, rev="tip"):
1947 """rebuild the dirstate as it would look like for the given revision"""
1947 """rebuild the dirstate as it would look like for the given revision"""
1948 ctx = scmutil.revsingle(repo, rev)
1948 ctx = scmutil.revsingle(repo, rev)
1949 wlock = repo.wlock()
1949 wlock = repo.wlock()
1950 try:
1950 try:
1951 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1951 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1952 finally:
1952 finally:
1953 wlock.release()
1953 wlock.release()
1954
1954
1955 @command('debugrename',
1955 @command('debugrename',
1956 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1956 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1957 _('[-r REV] FILE'))
1957 _('[-r REV] FILE'))
1958 def debugrename(ui, repo, file1, *pats, **opts):
1958 def debugrename(ui, repo, file1, *pats, **opts):
1959 """dump rename information"""
1959 """dump rename information"""
1960
1960
1961 ctx = scmutil.revsingle(repo, opts.get('rev'))
1961 ctx = scmutil.revsingle(repo, opts.get('rev'))
1962 m = scmutil.match(ctx, (file1,) + pats, opts)
1962 m = scmutil.match(ctx, (file1,) + pats, opts)
1963 for abs in ctx.walk(m):
1963 for abs in ctx.walk(m):
1964 fctx = ctx[abs]
1964 fctx = ctx[abs]
1965 o = fctx.filelog().renamed(fctx.filenode())
1965 o = fctx.filelog().renamed(fctx.filenode())
1966 rel = m.rel(abs)
1966 rel = m.rel(abs)
1967 if o:
1967 if o:
1968 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1968 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1969 else:
1969 else:
1970 ui.write(_("%s not renamed\n") % rel)
1970 ui.write(_("%s not renamed\n") % rel)
1971
1971
1972 @command('debugrevlog',
1972 @command('debugrevlog',
1973 [('c', 'changelog', False, _('open changelog')),
1973 [('c', 'changelog', False, _('open changelog')),
1974 ('m', 'manifest', False, _('open manifest')),
1974 ('m', 'manifest', False, _('open manifest')),
1975 ('d', 'dump', False, _('dump index data'))],
1975 ('d', 'dump', False, _('dump index data'))],
1976 _('-c|-m|FILE'))
1976 _('-c|-m|FILE'))
1977 def debugrevlog(ui, repo, file_ = None, **opts):
1977 def debugrevlog(ui, repo, file_ = None, **opts):
1978 """show data and statistics about a revlog"""
1978 """show data and statistics about a revlog"""
1979 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1979 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1980
1980
1981 if opts.get("dump"):
1981 if opts.get("dump"):
1982 numrevs = len(r)
1982 numrevs = len(r)
1983 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
1983 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
1984 " rawsize totalsize compression heads\n")
1984 " rawsize totalsize compression heads\n")
1985 ts = 0
1985 ts = 0
1986 heads = set()
1986 heads = set()
1987 for rev in xrange(numrevs):
1987 for rev in xrange(numrevs):
1988 dbase = r.deltaparent(rev)
1988 dbase = r.deltaparent(rev)
1989 if dbase == -1:
1989 if dbase == -1:
1990 dbase = rev
1990 dbase = rev
1991 cbase = r.chainbase(rev)
1991 cbase = r.chainbase(rev)
1992 p1, p2 = r.parentrevs(rev)
1992 p1, p2 = r.parentrevs(rev)
1993 rs = r.rawsize(rev)
1993 rs = r.rawsize(rev)
1994 ts = ts + rs
1994 ts = ts + rs
1995 heads -= set(r.parentrevs(rev))
1995 heads -= set(r.parentrevs(rev))
1996 heads.add(rev)
1996 heads.add(rev)
1997 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
1997 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
1998 (rev, p1, p2, r.start(rev), r.end(rev),
1998 (rev, p1, p2, r.start(rev), r.end(rev),
1999 r.start(dbase), r.start(cbase),
1999 r.start(dbase), r.start(cbase),
2000 r.start(p1), r.start(p2),
2000 r.start(p1), r.start(p2),
2001 rs, ts, ts / r.end(rev), len(heads)))
2001 rs, ts, ts / r.end(rev), len(heads)))
2002 return 0
2002 return 0
2003
2003
2004 v = r.version
2004 v = r.version
2005 format = v & 0xFFFF
2005 format = v & 0xFFFF
2006 flags = []
2006 flags = []
2007 gdelta = False
2007 gdelta = False
2008 if v & revlog.REVLOGNGINLINEDATA:
2008 if v & revlog.REVLOGNGINLINEDATA:
2009 flags.append('inline')
2009 flags.append('inline')
2010 if v & revlog.REVLOGGENERALDELTA:
2010 if v & revlog.REVLOGGENERALDELTA:
2011 gdelta = True
2011 gdelta = True
2012 flags.append('generaldelta')
2012 flags.append('generaldelta')
2013 if not flags:
2013 if not flags:
2014 flags = ['(none)']
2014 flags = ['(none)']
2015
2015
2016 nummerges = 0
2016 nummerges = 0
2017 numfull = 0
2017 numfull = 0
2018 numprev = 0
2018 numprev = 0
2019 nump1 = 0
2019 nump1 = 0
2020 nump2 = 0
2020 nump2 = 0
2021 numother = 0
2021 numother = 0
2022 nump1prev = 0
2022 nump1prev = 0
2023 nump2prev = 0
2023 nump2prev = 0
2024 chainlengths = []
2024 chainlengths = []
2025
2025
2026 datasize = [None, 0, 0L]
2026 datasize = [None, 0, 0L]
2027 fullsize = [None, 0, 0L]
2027 fullsize = [None, 0, 0L]
2028 deltasize = [None, 0, 0L]
2028 deltasize = [None, 0, 0L]
2029
2029
2030 def addsize(size, l):
2030 def addsize(size, l):
2031 if l[0] is None or size < l[0]:
2031 if l[0] is None or size < l[0]:
2032 l[0] = size
2032 l[0] = size
2033 if size > l[1]:
2033 if size > l[1]:
2034 l[1] = size
2034 l[1] = size
2035 l[2] += size
2035 l[2] += size
2036
2036
2037 numrevs = len(r)
2037 numrevs = len(r)
2038 for rev in xrange(numrevs):
2038 for rev in xrange(numrevs):
2039 p1, p2 = r.parentrevs(rev)
2039 p1, p2 = r.parentrevs(rev)
2040 delta = r.deltaparent(rev)
2040 delta = r.deltaparent(rev)
2041 if format > 0:
2041 if format > 0:
2042 addsize(r.rawsize(rev), datasize)
2042 addsize(r.rawsize(rev), datasize)
2043 if p2 != nullrev:
2043 if p2 != nullrev:
2044 nummerges += 1
2044 nummerges += 1
2045 size = r.length(rev)
2045 size = r.length(rev)
2046 if delta == nullrev:
2046 if delta == nullrev:
2047 chainlengths.append(0)
2047 chainlengths.append(0)
2048 numfull += 1
2048 numfull += 1
2049 addsize(size, fullsize)
2049 addsize(size, fullsize)
2050 else:
2050 else:
2051 chainlengths.append(chainlengths[delta] + 1)
2051 chainlengths.append(chainlengths[delta] + 1)
2052 addsize(size, deltasize)
2052 addsize(size, deltasize)
2053 if delta == rev - 1:
2053 if delta == rev - 1:
2054 numprev += 1
2054 numprev += 1
2055 if delta == p1:
2055 if delta == p1:
2056 nump1prev += 1
2056 nump1prev += 1
2057 elif delta == p2:
2057 elif delta == p2:
2058 nump2prev += 1
2058 nump2prev += 1
2059 elif delta == p1:
2059 elif delta == p1:
2060 nump1 += 1
2060 nump1 += 1
2061 elif delta == p2:
2061 elif delta == p2:
2062 nump2 += 1
2062 nump2 += 1
2063 elif delta != nullrev:
2063 elif delta != nullrev:
2064 numother += 1
2064 numother += 1
2065
2065
2066 numdeltas = numrevs - numfull
2066 numdeltas = numrevs - numfull
2067 numoprev = numprev - nump1prev - nump2prev
2067 numoprev = numprev - nump1prev - nump2prev
2068 totalrawsize = datasize[2]
2068 totalrawsize = datasize[2]
2069 datasize[2] /= numrevs
2069 datasize[2] /= numrevs
2070 fulltotal = fullsize[2]
2070 fulltotal = fullsize[2]
2071 fullsize[2] /= numfull
2071 fullsize[2] /= numfull
2072 deltatotal = deltasize[2]
2072 deltatotal = deltasize[2]
2073 deltasize[2] /= numrevs - numfull
2073 deltasize[2] /= numrevs - numfull
2074 totalsize = fulltotal + deltatotal
2074 totalsize = fulltotal + deltatotal
2075 avgchainlen = sum(chainlengths) / numrevs
2075 avgchainlen = sum(chainlengths) / numrevs
2076 compratio = totalrawsize / totalsize
2076 compratio = totalrawsize / totalsize
2077
2077
2078 basedfmtstr = '%%%dd\n'
2078 basedfmtstr = '%%%dd\n'
2079 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2079 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2080
2080
2081 def dfmtstr(max):
2081 def dfmtstr(max):
2082 return basedfmtstr % len(str(max))
2082 return basedfmtstr % len(str(max))
2083 def pcfmtstr(max, padding=0):
2083 def pcfmtstr(max, padding=0):
2084 return basepcfmtstr % (len(str(max)), ' ' * padding)
2084 return basepcfmtstr % (len(str(max)), ' ' * padding)
2085
2085
2086 def pcfmt(value, total):
2086 def pcfmt(value, total):
2087 return (value, 100 * float(value) / total)
2087 return (value, 100 * float(value) / total)
2088
2088
2089 ui.write('format : %d\n' % format)
2089 ui.write('format : %d\n' % format)
2090 ui.write('flags : %s\n' % ', '.join(flags))
2090 ui.write('flags : %s\n' % ', '.join(flags))
2091
2091
2092 ui.write('\n')
2092 ui.write('\n')
2093 fmt = pcfmtstr(totalsize)
2093 fmt = pcfmtstr(totalsize)
2094 fmt2 = dfmtstr(totalsize)
2094 fmt2 = dfmtstr(totalsize)
2095 ui.write('revisions : ' + fmt2 % numrevs)
2095 ui.write('revisions : ' + fmt2 % numrevs)
2096 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2096 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2097 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2097 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2098 ui.write('revisions : ' + fmt2 % numrevs)
2098 ui.write('revisions : ' + fmt2 % numrevs)
2099 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2099 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2100 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2100 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2101 ui.write('revision size : ' + fmt2 % totalsize)
2101 ui.write('revision size : ' + fmt2 % totalsize)
2102 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2102 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2103 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2103 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2104
2104
2105 ui.write('\n')
2105 ui.write('\n')
2106 fmt = dfmtstr(max(avgchainlen, compratio))
2106 fmt = dfmtstr(max(avgchainlen, compratio))
2107 ui.write('avg chain length : ' + fmt % avgchainlen)
2107 ui.write('avg chain length : ' + fmt % avgchainlen)
2108 ui.write('compression ratio : ' + fmt % compratio)
2108 ui.write('compression ratio : ' + fmt % compratio)
2109
2109
2110 if format > 0:
2110 if format > 0:
2111 ui.write('\n')
2111 ui.write('\n')
2112 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2112 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2113 % tuple(datasize))
2113 % tuple(datasize))
2114 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2114 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2115 % tuple(fullsize))
2115 % tuple(fullsize))
2116 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2116 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2117 % tuple(deltasize))
2117 % tuple(deltasize))
2118
2118
2119 if numdeltas > 0:
2119 if numdeltas > 0:
2120 ui.write('\n')
2120 ui.write('\n')
2121 fmt = pcfmtstr(numdeltas)
2121 fmt = pcfmtstr(numdeltas)
2122 fmt2 = pcfmtstr(numdeltas, 4)
2122 fmt2 = pcfmtstr(numdeltas, 4)
2123 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2123 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2124 if numprev > 0:
2124 if numprev > 0:
2125 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
2125 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
2126 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
2126 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
2127 ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
2127 ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
2128 if gdelta:
2128 if gdelta:
2129 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2129 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2130 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2130 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2131 ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
2131 ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
2132
2132
2133 @command('debugrevspec', [], ('REVSPEC'))
2133 @command('debugrevspec', [], ('REVSPEC'))
2134 def debugrevspec(ui, repo, expr):
2134 def debugrevspec(ui, repo, expr):
2135 '''parse and apply a revision specification'''
2135 '''parse and apply a revision specification'''
2136 if ui.verbose:
2136 if ui.verbose:
2137 tree = revset.parse(expr)[0]
2137 tree = revset.parse(expr)[0]
2138 ui.note(tree, "\n")
2138 ui.note(tree, "\n")
2139 newtree = revset.findaliases(ui, tree)
2139 newtree = revset.findaliases(ui, tree)
2140 if newtree != tree:
2140 if newtree != tree:
2141 ui.note(newtree, "\n")
2141 ui.note(newtree, "\n")
2142 func = revset.match(ui, expr)
2142 func = revset.match(ui, expr)
2143 for c in func(repo, range(len(repo))):
2143 for c in func(repo, range(len(repo))):
2144 ui.write("%s\n" % c)
2144 ui.write("%s\n" % c)
2145
2145
2146 @command('debugsetparents', [], _('REV1 [REV2]'))
2146 @command('debugsetparents', [], _('REV1 [REV2]'))
2147 def debugsetparents(ui, repo, rev1, rev2=None):
2147 def debugsetparents(ui, repo, rev1, rev2=None):
2148 """manually set the parents of the current working directory
2148 """manually set the parents of the current working directory
2149
2149
2150 This is useful for writing repository conversion tools, but should
2150 This is useful for writing repository conversion tools, but should
2151 be used with care.
2151 be used with care.
2152
2152
2153 Returns 0 on success.
2153 Returns 0 on success.
2154 """
2154 """
2155
2155
2156 r1 = scmutil.revsingle(repo, rev1).node()
2156 r1 = scmutil.revsingle(repo, rev1).node()
2157 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2157 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2158
2158
2159 wlock = repo.wlock()
2159 wlock = repo.wlock()
2160 try:
2160 try:
2161 repo.dirstate.setparents(r1, r2)
2161 repo.dirstate.setparents(r1, r2)
2162 finally:
2162 finally:
2163 wlock.release()
2163 wlock.release()
2164
2164
2165 @command('debugstate',
2165 @command('debugstate',
2166 [('', 'nodates', None, _('do not display the saved mtime')),
2166 [('', 'nodates', None, _('do not display the saved mtime')),
2167 ('', 'datesort', None, _('sort by saved mtime'))],
2167 ('', 'datesort', None, _('sort by saved mtime'))],
2168 _('[OPTION]...'))
2168 _('[OPTION]...'))
2169 def debugstate(ui, repo, nodates=None, datesort=None):
2169 def debugstate(ui, repo, nodates=None, datesort=None):
2170 """show the contents of the current dirstate"""
2170 """show the contents of the current dirstate"""
2171 timestr = ""
2171 timestr = ""
2172 showdate = not nodates
2172 showdate = not nodates
2173 if datesort:
2173 if datesort:
2174 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2174 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2175 else:
2175 else:
2176 keyfunc = None # sort by filename
2176 keyfunc = None # sort by filename
2177 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2177 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2178 if showdate:
2178 if showdate:
2179 if ent[3] == -1:
2179 if ent[3] == -1:
2180 # Pad or slice to locale representation
2180 # Pad or slice to locale representation
2181 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2181 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2182 time.localtime(0)))
2182 time.localtime(0)))
2183 timestr = 'unset'
2183 timestr = 'unset'
2184 timestr = (timestr[:locale_len] +
2184 timestr = (timestr[:locale_len] +
2185 ' ' * (locale_len - len(timestr)))
2185 ' ' * (locale_len - len(timestr)))
2186 else:
2186 else:
2187 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2187 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2188 time.localtime(ent[3]))
2188 time.localtime(ent[3]))
2189 if ent[1] & 020000:
2189 if ent[1] & 020000:
2190 mode = 'lnk'
2190 mode = 'lnk'
2191 else:
2191 else:
2192 mode = '%3o' % (ent[1] & 0777)
2192 mode = '%3o' % (ent[1] & 0777)
2193 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2193 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2194 for f in repo.dirstate.copies():
2194 for f in repo.dirstate.copies():
2195 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2195 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2196
2196
2197 @command('debugsub',
2197 @command('debugsub',
2198 [('r', 'rev', '',
2198 [('r', 'rev', '',
2199 _('revision to check'), _('REV'))],
2199 _('revision to check'), _('REV'))],
2200 _('[-r REV] [REV]'))
2200 _('[-r REV] [REV]'))
2201 def debugsub(ui, repo, rev=None):
2201 def debugsub(ui, repo, rev=None):
2202 ctx = scmutil.revsingle(repo, rev, None)
2202 ctx = scmutil.revsingle(repo, rev, None)
2203 for k, v in sorted(ctx.substate.items()):
2203 for k, v in sorted(ctx.substate.items()):
2204 ui.write('path %s\n' % k)
2204 ui.write('path %s\n' % k)
2205 ui.write(' source %s\n' % v[0])
2205 ui.write(' source %s\n' % v[0])
2206 ui.write(' revision %s\n' % v[1])
2206 ui.write(' revision %s\n' % v[1])
2207
2207
2208 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2208 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2209 def debugwalk(ui, repo, *pats, **opts):
2209 def debugwalk(ui, repo, *pats, **opts):
2210 """show how files match on given patterns"""
2210 """show how files match on given patterns"""
2211 m = scmutil.match(repo[None], pats, opts)
2211 m = scmutil.match(repo[None], pats, opts)
2212 items = list(repo.walk(m))
2212 items = list(repo.walk(m))
2213 if not items:
2213 if not items:
2214 return
2214 return
2215 fmt = 'f %%-%ds %%-%ds %%s' % (
2215 fmt = 'f %%-%ds %%-%ds %%s' % (
2216 max([len(abs) for abs in items]),
2216 max([len(abs) for abs in items]),
2217 max([len(m.rel(abs)) for abs in items]))
2217 max([len(m.rel(abs)) for abs in items]))
2218 for abs in items:
2218 for abs in items:
2219 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2219 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2220 ui.write("%s\n" % line.rstrip())
2220 ui.write("%s\n" % line.rstrip())
2221
2221
2222 @command('debugwireargs',
2222 @command('debugwireargs',
2223 [('', 'three', '', 'three'),
2223 [('', 'three', '', 'three'),
2224 ('', 'four', '', 'four'),
2224 ('', 'four', '', 'four'),
2225 ('', 'five', '', 'five'),
2225 ('', 'five', '', 'five'),
2226 ] + remoteopts,
2226 ] + remoteopts,
2227 _('REPO [OPTIONS]... [ONE [TWO]]'))
2227 _('REPO [OPTIONS]... [ONE [TWO]]'))
2228 def debugwireargs(ui, repopath, *vals, **opts):
2228 def debugwireargs(ui, repopath, *vals, **opts):
2229 repo = hg.peer(ui, opts, repopath)
2229 repo = hg.peer(ui, opts, repopath)
2230 for opt in remoteopts:
2230 for opt in remoteopts:
2231 del opts[opt[1]]
2231 del opts[opt[1]]
2232 args = {}
2232 args = {}
2233 for k, v in opts.iteritems():
2233 for k, v in opts.iteritems():
2234 if v:
2234 if v:
2235 args[k] = v
2235 args[k] = v
2236 # run twice to check that we don't mess up the stream for the next command
2236 # run twice to check that we don't mess up the stream for the next command
2237 res1 = repo.debugwireargs(*vals, **args)
2237 res1 = repo.debugwireargs(*vals, **args)
2238 res2 = repo.debugwireargs(*vals, **args)
2238 res2 = repo.debugwireargs(*vals, **args)
2239 ui.write("%s\n" % res1)
2239 ui.write("%s\n" % res1)
2240 if res1 != res2:
2240 if res1 != res2:
2241 ui.warn("%s\n" % res2)
2241 ui.warn("%s\n" % res2)
2242
2242
2243 @command('^diff',
2243 @command('^diff',
2244 [('r', 'rev', [], _('revision'), _('REV')),
2244 [('r', 'rev', [], _('revision'), _('REV')),
2245 ('c', 'change', '', _('change made by revision'), _('REV'))
2245 ('c', 'change', '', _('change made by revision'), _('REV'))
2246 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2246 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2247 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2247 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2248 def diff(ui, repo, *pats, **opts):
2248 def diff(ui, repo, *pats, **opts):
2249 """diff repository (or selected files)
2249 """diff repository (or selected files)
2250
2250
2251 Show differences between revisions for the specified files.
2251 Show differences between revisions for the specified files.
2252
2252
2253 Differences between files are shown using the unified diff format.
2253 Differences between files are shown using the unified diff format.
2254
2254
2255 .. note::
2255 .. note::
2256 diff may generate unexpected results for merges, as it will
2256 diff may generate unexpected results for merges, as it will
2257 default to comparing against the working directory's first
2257 default to comparing against the working directory's first
2258 parent changeset if no revisions are specified.
2258 parent changeset if no revisions are specified.
2259
2259
2260 When two revision arguments are given, then changes are shown
2260 When two revision arguments are given, then changes are shown
2261 between those revisions. If only one revision is specified then
2261 between those revisions. If only one revision is specified then
2262 that revision is compared to the working directory, and, when no
2262 that revision is compared to the working directory, and, when no
2263 revisions are specified, the working directory files are compared
2263 revisions are specified, the working directory files are compared
2264 to its parent.
2264 to its parent.
2265
2265
2266 Alternatively you can specify -c/--change with a revision to see
2266 Alternatively you can specify -c/--change with a revision to see
2267 the changes in that changeset relative to its first parent.
2267 the changes in that changeset relative to its first parent.
2268
2268
2269 Without the -a/--text option, diff will avoid generating diffs of
2269 Without the -a/--text option, diff will avoid generating diffs of
2270 files it detects as binary. With -a, diff will generate a diff
2270 files it detects as binary. With -a, diff will generate a diff
2271 anyway, probably with undesirable results.
2271 anyway, probably with undesirable results.
2272
2272
2273 Use the -g/--git option to generate diffs in the git extended diff
2273 Use the -g/--git option to generate diffs in the git extended diff
2274 format. For more information, read :hg:`help diffs`.
2274 format. For more information, read :hg:`help diffs`.
2275
2275
2276 .. container:: verbose
2276 .. container:: verbose
2277
2277
2278 Examples:
2278 Examples:
2279
2279
2280 - compare a file in the current working directory to its parent::
2280 - compare a file in the current working directory to its parent::
2281
2281
2282 hg diff foo.c
2282 hg diff foo.c
2283
2283
2284 - compare two historical versions of a directory, with rename info::
2284 - compare two historical versions of a directory, with rename info::
2285
2285
2286 hg diff --git -r 1.0:1.2 lib/
2286 hg diff --git -r 1.0:1.2 lib/
2287
2287
2288 - get change stats relative to the last change on some date::
2288 - get change stats relative to the last change on some date::
2289
2289
2290 hg diff --stat -r "date('may 2')"
2290 hg diff --stat -r "date('may 2')"
2291
2291
2292 - diff all newly-added files that contain a keyword::
2292 - diff all newly-added files that contain a keyword::
2293
2293
2294 hg diff "set:added() and grep(GNU)"
2294 hg diff "set:added() and grep(GNU)"
2295
2295
2296 - compare a revision and its parents::
2296 - compare a revision and its parents::
2297
2297
2298 hg diff -c 9353 # compare against first parent
2298 hg diff -c 9353 # compare against first parent
2299 hg diff -r 9353^:9353 # same using revset syntax
2299 hg diff -r 9353^:9353 # same using revset syntax
2300 hg diff -r 9353^2:9353 # compare against the second parent
2300 hg diff -r 9353^2:9353 # compare against the second parent
2301
2301
2302 Returns 0 on success.
2302 Returns 0 on success.
2303 """
2303 """
2304
2304
2305 revs = opts.get('rev')
2305 revs = opts.get('rev')
2306 change = opts.get('change')
2306 change = opts.get('change')
2307 stat = opts.get('stat')
2307 stat = opts.get('stat')
2308 reverse = opts.get('reverse')
2308 reverse = opts.get('reverse')
2309
2309
2310 if revs and change:
2310 if revs and change:
2311 msg = _('cannot specify --rev and --change at the same time')
2311 msg = _('cannot specify --rev and --change at the same time')
2312 raise util.Abort(msg)
2312 raise util.Abort(msg)
2313 elif change:
2313 elif change:
2314 node2 = scmutil.revsingle(repo, change, None).node()
2314 node2 = scmutil.revsingle(repo, change, None).node()
2315 node1 = repo[node2].p1().node()
2315 node1 = repo[node2].p1().node()
2316 else:
2316 else:
2317 node1, node2 = scmutil.revpair(repo, revs)
2317 node1, node2 = scmutil.revpair(repo, revs)
2318
2318
2319 if reverse:
2319 if reverse:
2320 node1, node2 = node2, node1
2320 node1, node2 = node2, node1
2321
2321
2322 diffopts = patch.diffopts(ui, opts)
2322 diffopts = patch.diffopts(ui, opts)
2323 m = scmutil.match(repo[node2], pats, opts)
2323 m = scmutil.match(repo[node2], pats, opts)
2324 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2324 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2325 listsubrepos=opts.get('subrepos'))
2325 listsubrepos=opts.get('subrepos'))
2326
2326
2327 @command('^export',
2327 @command('^export',
2328 [('o', 'output', '',
2328 [('o', 'output', '',
2329 _('print output to file with formatted name'), _('FORMAT')),
2329 _('print output to file with formatted name'), _('FORMAT')),
2330 ('', 'switch-parent', None, _('diff against the second parent')),
2330 ('', 'switch-parent', None, _('diff against the second parent')),
2331 ('r', 'rev', [], _('revisions to export'), _('REV')),
2331 ('r', 'rev', [], _('revisions to export'), _('REV')),
2332 ] + diffopts,
2332 ] + diffopts,
2333 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2333 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2334 def export(ui, repo, *changesets, **opts):
2334 def export(ui, repo, *changesets, **opts):
2335 """dump the header and diffs for one or more changesets
2335 """dump the header and diffs for one or more changesets
2336
2336
2337 Print the changeset header and diffs for one or more revisions.
2337 Print the changeset header and diffs for one or more revisions.
2338
2338
2339 The information shown in the changeset header is: author, date,
2339 The information shown in the changeset header is: author, date,
2340 branch name (if non-default), changeset hash, parent(s) and commit
2340 branch name (if non-default), changeset hash, parent(s) and commit
2341 comment.
2341 comment.
2342
2342
2343 .. note::
2343 .. note::
2344 export may generate unexpected diff output for merge
2344 export may generate unexpected diff output for merge
2345 changesets, as it will compare the merge changeset against its
2345 changesets, as it will compare the merge changeset against its
2346 first parent only.
2346 first parent only.
2347
2347
2348 Output may be to a file, in which case the name of the file is
2348 Output may be to a file, in which case the name of the file is
2349 given using a format string. The formatting rules are as follows:
2349 given using a format string. The formatting rules are as follows:
2350
2350
2351 :``%%``: literal "%" character
2351 :``%%``: literal "%" character
2352 :``%H``: changeset hash (40 hexadecimal digits)
2352 :``%H``: changeset hash (40 hexadecimal digits)
2353 :``%N``: number of patches being generated
2353 :``%N``: number of patches being generated
2354 :``%R``: changeset revision number
2354 :``%R``: changeset revision number
2355 :``%b``: basename of the exporting repository
2355 :``%b``: basename of the exporting repository
2356 :``%h``: short-form changeset hash (12 hexadecimal digits)
2356 :``%h``: short-form changeset hash (12 hexadecimal digits)
2357 :``%m``: first line of the commit message (only alphanumeric characters)
2357 :``%m``: first line of the commit message (only alphanumeric characters)
2358 :``%n``: zero-padded sequence number, starting at 1
2358 :``%n``: zero-padded sequence number, starting at 1
2359 :``%r``: zero-padded changeset revision number
2359 :``%r``: zero-padded changeset revision number
2360
2360
2361 Without the -a/--text option, export will avoid generating diffs
2361 Without the -a/--text option, export will avoid generating diffs
2362 of files it detects as binary. With -a, export will generate a
2362 of files it detects as binary. With -a, export will generate a
2363 diff anyway, probably with undesirable results.
2363 diff anyway, probably with undesirable results.
2364
2364
2365 Use the -g/--git option to generate diffs in the git extended diff
2365 Use the -g/--git option to generate diffs in the git extended diff
2366 format. See :hg:`help diffs` for more information.
2366 format. See :hg:`help diffs` for more information.
2367
2367
2368 With the --switch-parent option, the diff will be against the
2368 With the --switch-parent option, the diff will be against the
2369 second parent. It can be useful to review a merge.
2369 second parent. It can be useful to review a merge.
2370
2370
2371 .. container:: verbose
2371 .. container:: verbose
2372
2372
2373 Examples:
2373 Examples:
2374
2374
2375 - use export and import to transplant a bugfix to the current
2375 - use export and import to transplant a bugfix to the current
2376 branch::
2376 branch::
2377
2377
2378 hg export -r 9353 | hg import -
2378 hg export -r 9353 | hg import -
2379
2379
2380 - export all the changesets between two revisions to a file with
2380 - export all the changesets between two revisions to a file with
2381 rename information::
2381 rename information::
2382
2382
2383 hg export --git -r 123:150 > changes.txt
2383 hg export --git -r 123:150 > changes.txt
2384
2384
2385 - split outgoing changes into a series of patches with
2385 - split outgoing changes into a series of patches with
2386 descriptive names::
2386 descriptive names::
2387
2387
2388 hg export -r "outgoing()" -o "%n-%m.patch"
2388 hg export -r "outgoing()" -o "%n-%m.patch"
2389
2389
2390 Returns 0 on success.
2390 Returns 0 on success.
2391 """
2391 """
2392 changesets += tuple(opts.get('rev', []))
2392 changesets += tuple(opts.get('rev', []))
2393 if not changesets:
2393 if not changesets:
2394 raise util.Abort(_("export requires at least one changeset"))
2394 raise util.Abort(_("export requires at least one changeset"))
2395 revs = scmutil.revrange(repo, changesets)
2395 revs = scmutil.revrange(repo, changesets)
2396 if len(revs) > 1:
2396 if len(revs) > 1:
2397 ui.note(_('exporting patches:\n'))
2397 ui.note(_('exporting patches:\n'))
2398 else:
2398 else:
2399 ui.note(_('exporting patch:\n'))
2399 ui.note(_('exporting patch:\n'))
2400 cmdutil.export(repo, revs, template=opts.get('output'),
2400 cmdutil.export(repo, revs, template=opts.get('output'),
2401 switch_parent=opts.get('switch_parent'),
2401 switch_parent=opts.get('switch_parent'),
2402 opts=patch.diffopts(ui, opts))
2402 opts=patch.diffopts(ui, opts))
2403
2403
2404 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2404 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2405 def forget(ui, repo, *pats, **opts):
2405 def forget(ui, repo, *pats, **opts):
2406 """forget the specified files on the next commit
2406 """forget the specified files on the next commit
2407
2407
2408 Mark the specified files so they will no longer be tracked
2408 Mark the specified files so they will no longer be tracked
2409 after the next commit.
2409 after the next commit.
2410
2410
2411 This only removes files from the current branch, not from the
2411 This only removes files from the current branch, not from the
2412 entire project history, and it does not delete them from the
2412 entire project history, and it does not delete them from the
2413 working directory.
2413 working directory.
2414
2414
2415 To undo a forget before the next commit, see :hg:`add`.
2415 To undo a forget before the next commit, see :hg:`add`.
2416
2416
2417 .. container:: verbose
2417 .. container:: verbose
2418
2418
2419 Examples:
2419 Examples:
2420
2420
2421 - forget newly-added binary files::
2421 - forget newly-added binary files::
2422
2422
2423 hg forget "set:added() and binary()"
2423 hg forget "set:added() and binary()"
2424
2424
2425 - forget files that would be excluded by .hgignore::
2425 - forget files that would be excluded by .hgignore::
2426
2426
2427 hg forget "set:hgignore()"
2427 hg forget "set:hgignore()"
2428
2428
2429 Returns 0 on success.
2429 Returns 0 on success.
2430 """
2430 """
2431
2431
2432 if not pats:
2432 if not pats:
2433 raise util.Abort(_('no files specified'))
2433 raise util.Abort(_('no files specified'))
2434
2434
2435 m = scmutil.match(repo[None], pats, opts)
2435 m = scmutil.match(repo[None], pats, opts)
2436 s = repo.status(match=m, clean=True)
2436 s = repo.status(match=m, clean=True)
2437 forget = sorted(s[0] + s[1] + s[3] + s[6])
2437 forget = sorted(s[0] + s[1] + s[3] + s[6])
2438 errs = 0
2438 errs = 0
2439
2439
2440 for f in m.files():
2440 for f in m.files():
2441 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2441 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2442 if os.path.exists(m.rel(f)):
2442 if os.path.exists(m.rel(f)):
2443 ui.warn(_('not removing %s: file is already untracked\n')
2443 ui.warn(_('not removing %s: file is already untracked\n')
2444 % m.rel(f))
2444 % m.rel(f))
2445 errs = 1
2445 errs = 1
2446
2446
2447 for f in forget:
2447 for f in forget:
2448 if ui.verbose or not m.exact(f):
2448 if ui.verbose or not m.exact(f):
2449 ui.status(_('removing %s\n') % m.rel(f))
2449 ui.status(_('removing %s\n') % m.rel(f))
2450
2450
2451 repo[None].forget(forget)
2451 repo[None].forget(forget)
2452 return errs
2452 return errs
2453
2453
2454 @command(
2454 @command(
2455 'graft',
2455 'graft',
2456 [('c', 'continue', False, _('resume interrupted graft')),
2456 [('c', 'continue', False, _('resume interrupted graft')),
2457 ('e', 'edit', False, _('invoke editor on commit messages')),
2457 ('e', 'edit', False, _('invoke editor on commit messages')),
2458 ('D', 'currentdate', False,
2458 ('D', 'currentdate', False,
2459 _('record the current date as commit date')),
2459 _('record the current date as commit date')),
2460 ('U', 'currentuser', False,
2460 ('U', 'currentuser', False,
2461 _('record the current user as committer'), _('DATE'))]
2461 _('record the current user as committer'), _('DATE'))]
2462 + commitopts2 + mergetoolopts,
2462 + commitopts2 + mergetoolopts,
2463 _('[OPTION]... REVISION...'))
2463 _('[OPTION]... REVISION...'))
2464 def graft(ui, repo, *revs, **opts):
2464 def graft(ui, repo, *revs, **opts):
2465 '''copy changes from other branches onto the current branch
2465 '''copy changes from other branches onto the current branch
2466
2466
2467 This command uses Mercurial's merge logic to copy individual
2467 This command uses Mercurial's merge logic to copy individual
2468 changes from other branches without merging branches in the
2468 changes from other branches without merging branches in the
2469 history graph. This is sometimes known as 'backporting' or
2469 history graph. This is sometimes known as 'backporting' or
2470 'cherry-picking'. By default, graft will copy user, date, and
2470 'cherry-picking'. By default, graft will copy user, date, and
2471 description from the source changesets.
2471 description from the source changesets.
2472
2472
2473 Changesets that are ancestors of the current revision, that have
2473 Changesets that are ancestors of the current revision, that have
2474 already been grafted, or that are merges will be skipped.
2474 already been grafted, or that are merges will be skipped.
2475
2475
2476 If a graft merge results in conflicts, the graft process is
2476 If a graft merge results in conflicts, the graft process is
2477 aborted so that the current merge can be manually resolved. Once
2477 aborted so that the current merge can be manually resolved. Once
2478 all conflicts are addressed, the graft process can be continued
2478 all conflicts are addressed, the graft process can be continued
2479 with the -c/--continue option.
2479 with the -c/--continue option.
2480
2480
2481 .. note::
2481 .. note::
2482 The -c/--continue option does not reapply earlier options.
2482 The -c/--continue option does not reapply earlier options.
2483
2483
2484 .. container:: verbose
2484 .. container:: verbose
2485
2485
2486 Examples:
2486 Examples:
2487
2487
2488 - copy a single change to the stable branch and edit its description::
2488 - copy a single change to the stable branch and edit its description::
2489
2489
2490 hg update stable
2490 hg update stable
2491 hg graft --edit 9393
2491 hg graft --edit 9393
2492
2492
2493 - graft a range of changesets with one exception, updating dates::
2493 - graft a range of changesets with one exception, updating dates::
2494
2494
2495 hg graft -D "2085::2093 and not 2091"
2495 hg graft -D "2085::2093 and not 2091"
2496
2496
2497 - continue a graft after resolving conflicts::
2497 - continue a graft after resolving conflicts::
2498
2498
2499 hg graft -c
2499 hg graft -c
2500
2500
2501 - show the source of a grafted changeset::
2501 - show the source of a grafted changeset::
2502
2502
2503 hg log --debug -r tip
2503 hg log --debug -r tip
2504
2504
2505 Returns 0 on successful completion.
2505 Returns 0 on successful completion.
2506 '''
2506 '''
2507
2507
2508 if not opts.get('user') and opts.get('currentuser'):
2508 if not opts.get('user') and opts.get('currentuser'):
2509 opts['user'] = ui.username()
2509 opts['user'] = ui.username()
2510 if not opts.get('date') and opts.get('currentdate'):
2510 if not opts.get('date') and opts.get('currentdate'):
2511 opts['date'] = "%d %d" % util.makedate()
2511 opts['date'] = "%d %d" % util.makedate()
2512
2512
2513 editor = None
2513 editor = None
2514 if opts.get('edit'):
2514 if opts.get('edit'):
2515 editor = cmdutil.commitforceeditor
2515 editor = cmdutil.commitforceeditor
2516
2516
2517 cont = False
2517 cont = False
2518 if opts['continue']:
2518 if opts['continue']:
2519 cont = True
2519 cont = True
2520 if revs:
2520 if revs:
2521 raise util.Abort(_("can't specify --continue and revisions"))
2521 raise util.Abort(_("can't specify --continue and revisions"))
2522 # read in unfinished revisions
2522 # read in unfinished revisions
2523 try:
2523 try:
2524 nodes = repo.opener.read('graftstate').splitlines()
2524 nodes = repo.opener.read('graftstate').splitlines()
2525 revs = [repo[node].rev() for node in nodes]
2525 revs = [repo[node].rev() for node in nodes]
2526 except IOError, inst:
2526 except IOError, inst:
2527 if inst.errno != errno.ENOENT:
2527 if inst.errno != errno.ENOENT:
2528 raise
2528 raise
2529 raise util.Abort(_("no graft state found, can't continue"))
2529 raise util.Abort(_("no graft state found, can't continue"))
2530 else:
2530 else:
2531 cmdutil.bailifchanged(repo)
2531 cmdutil.bailifchanged(repo)
2532 if not revs:
2532 if not revs:
2533 raise util.Abort(_('no revisions specified'))
2533 raise util.Abort(_('no revisions specified'))
2534 revs = scmutil.revrange(repo, revs)
2534 revs = scmutil.revrange(repo, revs)
2535
2535
2536 # check for merges
2536 # check for merges
2537 for ctx in repo.set('%ld and merge()', revs):
2537 for ctx in repo.set('%ld and merge()', revs):
2538 ui.warn(_('skipping ungraftable merge revision %s\n') % ctx.rev())
2538 ui.warn(_('skipping ungraftable merge revision %s\n') % ctx.rev())
2539 revs.remove(ctx.rev())
2539 revs.remove(ctx.rev())
2540 if not revs:
2540 if not revs:
2541 return -1
2541 return -1
2542
2542
2543 # check for ancestors of dest branch
2543 # check for ancestors of dest branch
2544 for ctx in repo.set('::. and %ld', revs):
2544 for ctx in repo.set('::. and %ld', revs):
2545 ui.warn(_('skipping ancestor revision %s\n') % ctx.rev())
2545 ui.warn(_('skipping ancestor revision %s\n') % ctx.rev())
2546 revs.remove(ctx.rev())
2546 revs.remove(ctx.rev())
2547 if not revs:
2547 if not revs:
2548 return -1
2548 return -1
2549
2549
2550 # check ancestors for earlier grafts
2550 # check ancestors for earlier grafts
2551 ui.debug('scanning for duplicate grafts\n')
2551 ui.debug('scanning for duplicate grafts\n')
2552 for ctx in repo.set("::. - ::%ld", revs):
2552 for ctx in repo.set("::. - ::%ld", revs):
2553 n = ctx.extra().get('source')
2553 n = ctx.extra().get('source')
2554 if n and n in repo:
2554 if n and n in repo:
2555 r = repo[n].rev()
2555 r = repo[n].rev()
2556 if r in revs:
2556 if r in revs:
2557 ui.warn(_('skipping already grafted revision %s\n') % r)
2557 ui.warn(_('skipping already grafted revision %s\n') % r)
2558 revs.remove(r)
2558 revs.remove(r)
2559 if not revs:
2559 if not revs:
2560 return -1
2560 return -1
2561
2561
2562 for pos, ctx in enumerate(repo.set("%ld", revs)):
2562 for pos, ctx in enumerate(repo.set("%ld", revs)):
2563 current = repo['.']
2563 current = repo['.']
2564 ui.status('grafting revision %s\n' % ctx.rev())
2564 ui.status('grafting revision %s\n' % ctx.rev())
2565
2565
2566 # we don't merge the first commit when continuing
2566 # we don't merge the first commit when continuing
2567 if not cont:
2567 if not cont:
2568 # perform the graft merge with p1(rev) as 'ancestor'
2568 # perform the graft merge with p1(rev) as 'ancestor'
2569 try:
2569 try:
2570 # ui.forcemerge is an internal variable, do not document
2570 # ui.forcemerge is an internal variable, do not document
2571 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2571 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2572 stats = mergemod.update(repo, ctx.node(), True, True, False,
2572 stats = mergemod.update(repo, ctx.node(), True, True, False,
2573 ctx.p1().node())
2573 ctx.p1().node())
2574 finally:
2574 finally:
2575 ui.setconfig('ui', 'forcemerge', '')
2575 ui.setconfig('ui', 'forcemerge', '')
2576 # drop the second merge parent
2576 # drop the second merge parent
2577 repo.dirstate.setparents(current.node(), nullid)
2577 repo.dirstate.setparents(current.node(), nullid)
2578 repo.dirstate.write()
2578 repo.dirstate.write()
2579 # fix up dirstate for copies and renames
2579 # fix up dirstate for copies and renames
2580 cmdutil.duplicatecopies(repo, ctx.rev(), current.node(), nullid)
2580 cmdutil.duplicatecopies(repo, ctx.rev(), current.node(), nullid)
2581 # report any conflicts
2581 # report any conflicts
2582 if stats and stats[3] > 0:
2582 if stats and stats[3] > 0:
2583 # write out state for --continue
2583 # write out state for --continue
2584 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2584 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2585 repo.opener.write('graftstate', ''.join(nodelines))
2585 repo.opener.write('graftstate', ''.join(nodelines))
2586 raise util.Abort(
2586 raise util.Abort(
2587 _("unresolved conflicts, can't continue"),
2587 _("unresolved conflicts, can't continue"),
2588 hint=_('use hg resolve and hg graft --continue'))
2588 hint=_('use hg resolve and hg graft --continue'))
2589 else:
2589 else:
2590 cont = False
2590 cont = False
2591
2591
2592 # commit
2592 # commit
2593 extra = {'source': ctx.hex()}
2593 extra = {'source': ctx.hex()}
2594 user = ctx.user()
2594 user = ctx.user()
2595 if opts.get('user'):
2595 if opts.get('user'):
2596 user = opts['user']
2596 user = opts['user']
2597 date = ctx.date()
2597 date = ctx.date()
2598 if opts.get('date'):
2598 if opts.get('date'):
2599 date = opts['date']
2599 date = opts['date']
2600 repo.commit(text=ctx.description(), user=user,
2600 repo.commit(text=ctx.description(), user=user,
2601 date=date, extra=extra, editor=editor)
2601 date=date, extra=extra, editor=editor)
2602
2602
2603 # remove state when we complete successfully
2603 # remove state when we complete successfully
2604 if os.path.exists(repo.join('graftstate')):
2604 if os.path.exists(repo.join('graftstate')):
2605 util.unlinkpath(repo.join('graftstate'))
2605 util.unlinkpath(repo.join('graftstate'))
2606
2606
2607 return 0
2607 return 0
2608
2608
2609 @command('grep',
2609 @command('grep',
2610 [('0', 'print0', None, _('end fields with NUL')),
2610 [('0', 'print0', None, _('end fields with NUL')),
2611 ('', 'all', None, _('print all revisions that match')),
2611 ('', 'all', None, _('print all revisions that match')),
2612 ('a', 'text', None, _('treat all files as text')),
2612 ('a', 'text', None, _('treat all files as text')),
2613 ('f', 'follow', None,
2613 ('f', 'follow', None,
2614 _('follow changeset history,'
2614 _('follow changeset history,'
2615 ' or file history across copies and renames')),
2615 ' or file history across copies and renames')),
2616 ('i', 'ignore-case', None, _('ignore case when matching')),
2616 ('i', 'ignore-case', None, _('ignore case when matching')),
2617 ('l', 'files-with-matches', None,
2617 ('l', 'files-with-matches', None,
2618 _('print only filenames and revisions that match')),
2618 _('print only filenames and revisions that match')),
2619 ('n', 'line-number', None, _('print matching line numbers')),
2619 ('n', 'line-number', None, _('print matching line numbers')),
2620 ('r', 'rev', [],
2620 ('r', 'rev', [],
2621 _('only search files changed within revision range'), _('REV')),
2621 _('only search files changed within revision range'), _('REV')),
2622 ('u', 'user', None, _('list the author (long with -v)')),
2622 ('u', 'user', None, _('list the author (long with -v)')),
2623 ('d', 'date', None, _('list the date (short with -q)')),
2623 ('d', 'date', None, _('list the date (short with -q)')),
2624 ] + walkopts,
2624 ] + walkopts,
2625 _('[OPTION]... PATTERN [FILE]...'))
2625 _('[OPTION]... PATTERN [FILE]...'))
2626 def grep(ui, repo, pattern, *pats, **opts):
2626 def grep(ui, repo, pattern, *pats, **opts):
2627 """search for a pattern in specified files and revisions
2627 """search for a pattern in specified files and revisions
2628
2628
2629 Search revisions of files for a regular expression.
2629 Search revisions of files for a regular expression.
2630
2630
2631 This command behaves differently than Unix grep. It only accepts
2631 This command behaves differently than Unix grep. It only accepts
2632 Python/Perl regexps. It searches repository history, not the
2632 Python/Perl regexps. It searches repository history, not the
2633 working directory. It always prints the revision number in which a
2633 working directory. It always prints the revision number in which a
2634 match appears.
2634 match appears.
2635
2635
2636 By default, grep only prints output for the first revision of a
2636 By default, grep only prints output for the first revision of a
2637 file in which it finds a match. To get it to print every revision
2637 file in which it finds a match. To get it to print every revision
2638 that contains a change in match status ("-" for a match that
2638 that contains a change in match status ("-" for a match that
2639 becomes a non-match, or "+" for a non-match that becomes a match),
2639 becomes a non-match, or "+" for a non-match that becomes a match),
2640 use the --all flag.
2640 use the --all flag.
2641
2641
2642 Returns 0 if a match is found, 1 otherwise.
2642 Returns 0 if a match is found, 1 otherwise.
2643 """
2643 """
2644 reflags = 0
2644 reflags = 0
2645 if opts.get('ignore_case'):
2645 if opts.get('ignore_case'):
2646 reflags |= re.I
2646 reflags |= re.I
2647 try:
2647 try:
2648 regexp = re.compile(pattern, reflags)
2648 regexp = re.compile(pattern, reflags)
2649 except re.error, inst:
2649 except re.error, inst:
2650 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2650 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2651 return 1
2651 return 1
2652 sep, eol = ':', '\n'
2652 sep, eol = ':', '\n'
2653 if opts.get('print0'):
2653 if opts.get('print0'):
2654 sep = eol = '\0'
2654 sep = eol = '\0'
2655
2655
2656 getfile = util.lrucachefunc(repo.file)
2656 getfile = util.lrucachefunc(repo.file)
2657
2657
2658 def matchlines(body):
2658 def matchlines(body):
2659 begin = 0
2659 begin = 0
2660 linenum = 0
2660 linenum = 0
2661 while True:
2661 while True:
2662 match = regexp.search(body, begin)
2662 match = regexp.search(body, begin)
2663 if not match:
2663 if not match:
2664 break
2664 break
2665 mstart, mend = match.span()
2665 mstart, mend = match.span()
2666 linenum += body.count('\n', begin, mstart) + 1
2666 linenum += body.count('\n', begin, mstart) + 1
2667 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2667 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2668 begin = body.find('\n', mend) + 1 or len(body) + 1
2668 begin = body.find('\n', mend) + 1 or len(body) + 1
2669 lend = begin - 1
2669 lend = begin - 1
2670 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2670 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2671
2671
2672 class linestate(object):
2672 class linestate(object):
2673 def __init__(self, line, linenum, colstart, colend):
2673 def __init__(self, line, linenum, colstart, colend):
2674 self.line = line
2674 self.line = line
2675 self.linenum = linenum
2675 self.linenum = linenum
2676 self.colstart = colstart
2676 self.colstart = colstart
2677 self.colend = colend
2677 self.colend = colend
2678
2678
2679 def __hash__(self):
2679 def __hash__(self):
2680 return hash((self.linenum, self.line))
2680 return hash((self.linenum, self.line))
2681
2681
2682 def __eq__(self, other):
2682 def __eq__(self, other):
2683 return self.line == other.line
2683 return self.line == other.line
2684
2684
2685 matches = {}
2685 matches = {}
2686 copies = {}
2686 copies = {}
2687 def grepbody(fn, rev, body):
2687 def grepbody(fn, rev, body):
2688 matches[rev].setdefault(fn, [])
2688 matches[rev].setdefault(fn, [])
2689 m = matches[rev][fn]
2689 m = matches[rev][fn]
2690 for lnum, cstart, cend, line in matchlines(body):
2690 for lnum, cstart, cend, line in matchlines(body):
2691 s = linestate(line, lnum, cstart, cend)
2691 s = linestate(line, lnum, cstart, cend)
2692 m.append(s)
2692 m.append(s)
2693
2693
2694 def difflinestates(a, b):
2694 def difflinestates(a, b):
2695 sm = difflib.SequenceMatcher(None, a, b)
2695 sm = difflib.SequenceMatcher(None, a, b)
2696 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2696 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2697 if tag == 'insert':
2697 if tag == 'insert':
2698 for i in xrange(blo, bhi):
2698 for i in xrange(blo, bhi):
2699 yield ('+', b[i])
2699 yield ('+', b[i])
2700 elif tag == 'delete':
2700 elif tag == 'delete':
2701 for i in xrange(alo, ahi):
2701 for i in xrange(alo, ahi):
2702 yield ('-', a[i])
2702 yield ('-', a[i])
2703 elif tag == 'replace':
2703 elif tag == 'replace':
2704 for i in xrange(alo, ahi):
2704 for i in xrange(alo, ahi):
2705 yield ('-', a[i])
2705 yield ('-', a[i])
2706 for i in xrange(blo, bhi):
2706 for i in xrange(blo, bhi):
2707 yield ('+', b[i])
2707 yield ('+', b[i])
2708
2708
2709 def display(fn, ctx, pstates, states):
2709 def display(fn, ctx, pstates, states):
2710 rev = ctx.rev()
2710 rev = ctx.rev()
2711 datefunc = ui.quiet and util.shortdate or util.datestr
2711 datefunc = ui.quiet and util.shortdate or util.datestr
2712 found = False
2712 found = False
2713 filerevmatches = {}
2713 filerevmatches = {}
2714 def binary():
2714 def binary():
2715 flog = getfile(fn)
2715 flog = getfile(fn)
2716 return util.binary(flog.read(ctx.filenode(fn)))
2716 return util.binary(flog.read(ctx.filenode(fn)))
2717
2717
2718 if opts.get('all'):
2718 if opts.get('all'):
2719 iter = difflinestates(pstates, states)
2719 iter = difflinestates(pstates, states)
2720 else:
2720 else:
2721 iter = [('', l) for l in states]
2721 iter = [('', l) for l in states]
2722 for change, l in iter:
2722 for change, l in iter:
2723 cols = [fn, str(rev)]
2723 cols = [fn, str(rev)]
2724 before, match, after = None, None, None
2724 before, match, after = None, None, None
2725 if opts.get('line_number'):
2725 if opts.get('line_number'):
2726 cols.append(str(l.linenum))
2726 cols.append(str(l.linenum))
2727 if opts.get('all'):
2727 if opts.get('all'):
2728 cols.append(change)
2728 cols.append(change)
2729 if opts.get('user'):
2729 if opts.get('user'):
2730 cols.append(ui.shortuser(ctx.user()))
2730 cols.append(ui.shortuser(ctx.user()))
2731 if opts.get('date'):
2731 if opts.get('date'):
2732 cols.append(datefunc(ctx.date()))
2732 cols.append(datefunc(ctx.date()))
2733 if opts.get('files_with_matches'):
2733 if opts.get('files_with_matches'):
2734 c = (fn, rev)
2734 c = (fn, rev)
2735 if c in filerevmatches:
2735 if c in filerevmatches:
2736 continue
2736 continue
2737 filerevmatches[c] = 1
2737 filerevmatches[c] = 1
2738 else:
2738 else:
2739 before = l.line[:l.colstart]
2739 before = l.line[:l.colstart]
2740 match = l.line[l.colstart:l.colend]
2740 match = l.line[l.colstart:l.colend]
2741 after = l.line[l.colend:]
2741 after = l.line[l.colend:]
2742 ui.write(sep.join(cols))
2742 ui.write(sep.join(cols))
2743 if before is not None:
2743 if before is not None:
2744 if not opts.get('text') and binary():
2744 if not opts.get('text') and binary():
2745 ui.write(sep + " Binary file matches")
2745 ui.write(sep + " Binary file matches")
2746 else:
2746 else:
2747 ui.write(sep + before)
2747 ui.write(sep + before)
2748 ui.write(match, label='grep.match')
2748 ui.write(match, label='grep.match')
2749 ui.write(after)
2749 ui.write(after)
2750 ui.write(eol)
2750 ui.write(eol)
2751 found = True
2751 found = True
2752 return found
2752 return found
2753
2753
2754 skip = {}
2754 skip = {}
2755 revfiles = {}
2755 revfiles = {}
2756 matchfn = scmutil.match(repo[None], pats, opts)
2756 matchfn = scmutil.match(repo[None], pats, opts)
2757 found = False
2757 found = False
2758 follow = opts.get('follow')
2758 follow = opts.get('follow')
2759
2759
2760 def prep(ctx, fns):
2760 def prep(ctx, fns):
2761 rev = ctx.rev()
2761 rev = ctx.rev()
2762 pctx = ctx.p1()
2762 pctx = ctx.p1()
2763 parent = pctx.rev()
2763 parent = pctx.rev()
2764 matches.setdefault(rev, {})
2764 matches.setdefault(rev, {})
2765 matches.setdefault(parent, {})
2765 matches.setdefault(parent, {})
2766 files = revfiles.setdefault(rev, [])
2766 files = revfiles.setdefault(rev, [])
2767 for fn in fns:
2767 for fn in fns:
2768 flog = getfile(fn)
2768 flog = getfile(fn)
2769 try:
2769 try:
2770 fnode = ctx.filenode(fn)
2770 fnode = ctx.filenode(fn)
2771 except error.LookupError:
2771 except error.LookupError:
2772 continue
2772 continue
2773
2773
2774 copied = flog.renamed(fnode)
2774 copied = flog.renamed(fnode)
2775 copy = follow and copied and copied[0]
2775 copy = follow and copied and copied[0]
2776 if copy:
2776 if copy:
2777 copies.setdefault(rev, {})[fn] = copy
2777 copies.setdefault(rev, {})[fn] = copy
2778 if fn in skip:
2778 if fn in skip:
2779 if copy:
2779 if copy:
2780 skip[copy] = True
2780 skip[copy] = True
2781 continue
2781 continue
2782 files.append(fn)
2782 files.append(fn)
2783
2783
2784 if fn not in matches[rev]:
2784 if fn not in matches[rev]:
2785 grepbody(fn, rev, flog.read(fnode))
2785 grepbody(fn, rev, flog.read(fnode))
2786
2786
2787 pfn = copy or fn
2787 pfn = copy or fn
2788 if pfn not in matches[parent]:
2788 if pfn not in matches[parent]:
2789 try:
2789 try:
2790 fnode = pctx.filenode(pfn)
2790 fnode = pctx.filenode(pfn)
2791 grepbody(pfn, parent, flog.read(fnode))
2791 grepbody(pfn, parent, flog.read(fnode))
2792 except error.LookupError:
2792 except error.LookupError:
2793 pass
2793 pass
2794
2794
2795 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2795 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2796 rev = ctx.rev()
2796 rev = ctx.rev()
2797 parent = ctx.p1().rev()
2797 parent = ctx.p1().rev()
2798 for fn in sorted(revfiles.get(rev, [])):
2798 for fn in sorted(revfiles.get(rev, [])):
2799 states = matches[rev][fn]
2799 states = matches[rev][fn]
2800 copy = copies.get(rev, {}).get(fn)
2800 copy = copies.get(rev, {}).get(fn)
2801 if fn in skip:
2801 if fn in skip:
2802 if copy:
2802 if copy:
2803 skip[copy] = True
2803 skip[copy] = True
2804 continue
2804 continue
2805 pstates = matches.get(parent, {}).get(copy or fn, [])
2805 pstates = matches.get(parent, {}).get(copy or fn, [])
2806 if pstates or states:
2806 if pstates or states:
2807 r = display(fn, ctx, pstates, states)
2807 r = display(fn, ctx, pstates, states)
2808 found = found or r
2808 found = found or r
2809 if r and not opts.get('all'):
2809 if r and not opts.get('all'):
2810 skip[fn] = True
2810 skip[fn] = True
2811 if copy:
2811 if copy:
2812 skip[copy] = True
2812 skip[copy] = True
2813 del matches[rev]
2813 del matches[rev]
2814 del revfiles[rev]
2814 del revfiles[rev]
2815
2815
2816 return not found
2816 return not found
2817
2817
2818 @command('heads',
2818 @command('heads',
2819 [('r', 'rev', '',
2819 [('r', 'rev', '',
2820 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2820 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2821 ('t', 'topo', False, _('show topological heads only')),
2821 ('t', 'topo', False, _('show topological heads only')),
2822 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2822 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2823 ('c', 'closed', False, _('show normal and closed branch heads')),
2823 ('c', 'closed', False, _('show normal and closed branch heads')),
2824 ] + templateopts,
2824 ] + templateopts,
2825 _('[-ac] [-r STARTREV] [REV]...'))
2825 _('[-ac] [-r STARTREV] [REV]...'))
2826 def heads(ui, repo, *branchrevs, **opts):
2826 def heads(ui, repo, *branchrevs, **opts):
2827 """show current repository heads or show branch heads
2827 """show current repository heads or show branch heads
2828
2828
2829 With no arguments, show all repository branch heads.
2829 With no arguments, show all repository branch heads.
2830
2830
2831 Repository "heads" are changesets with no child changesets. They are
2831 Repository "heads" are changesets with no child changesets. They are
2832 where development generally takes place and are the usual targets
2832 where development generally takes place and are the usual targets
2833 for update and merge operations. Branch heads are changesets that have
2833 for update and merge operations. Branch heads are changesets that have
2834 no child changeset on the same branch.
2834 no child changeset on the same branch.
2835
2835
2836 If one or more REVs are given, only branch heads on the branches
2836 If one or more REVs are given, only branch heads on the branches
2837 associated with the specified changesets are shown. This means
2837 associated with the specified changesets are shown. This means
2838 that you can use :hg:`heads foo` to see the heads on a branch
2838 that you can use :hg:`heads foo` to see the heads on a branch
2839 named ``foo``.
2839 named ``foo``.
2840
2840
2841 If -c/--closed is specified, also show branch heads marked closed
2841 If -c/--closed is specified, also show branch heads marked closed
2842 (see :hg:`commit --close-branch`).
2842 (see :hg:`commit --close-branch`).
2843
2843
2844 If STARTREV is specified, only those heads that are descendants of
2844 If STARTREV is specified, only those heads that are descendants of
2845 STARTREV will be displayed.
2845 STARTREV will be displayed.
2846
2846
2847 If -t/--topo is specified, named branch mechanics will be ignored and only
2847 If -t/--topo is specified, named branch mechanics will be ignored and only
2848 changesets without children will be shown.
2848 changesets without children will be shown.
2849
2849
2850 Returns 0 if matching heads are found, 1 if not.
2850 Returns 0 if matching heads are found, 1 if not.
2851 """
2851 """
2852
2852
2853 start = None
2853 start = None
2854 if 'rev' in opts:
2854 if 'rev' in opts:
2855 start = scmutil.revsingle(repo, opts['rev'], None).node()
2855 start = scmutil.revsingle(repo, opts['rev'], None).node()
2856
2856
2857 if opts.get('topo'):
2857 if opts.get('topo'):
2858 heads = [repo[h] for h in repo.heads(start)]
2858 heads = [repo[h] for h in repo.heads(start)]
2859 else:
2859 else:
2860 heads = []
2860 heads = []
2861 for branch in repo.branchmap():
2861 for branch in repo.branchmap():
2862 heads += repo.branchheads(branch, start, opts.get('closed'))
2862 heads += repo.branchheads(branch, start, opts.get('closed'))
2863 heads = [repo[h] for h in heads]
2863 heads = [repo[h] for h in heads]
2864
2864
2865 if branchrevs:
2865 if branchrevs:
2866 branches = set(repo[br].branch() for br in branchrevs)
2866 branches = set(repo[br].branch() for br in branchrevs)
2867 heads = [h for h in heads if h.branch() in branches]
2867 heads = [h for h in heads if h.branch() in branches]
2868
2868
2869 if opts.get('active') and branchrevs:
2869 if opts.get('active') and branchrevs:
2870 dagheads = repo.heads(start)
2870 dagheads = repo.heads(start)
2871 heads = [h for h in heads if h.node() in dagheads]
2871 heads = [h for h in heads if h.node() in dagheads]
2872
2872
2873 if branchrevs:
2873 if branchrevs:
2874 haveheads = set(h.branch() for h in heads)
2874 haveheads = set(h.branch() for h in heads)
2875 if branches - haveheads:
2875 if branches - haveheads:
2876 headless = ', '.join(b for b in branches - haveheads)
2876 headless = ', '.join(b for b in branches - haveheads)
2877 msg = _('no open branch heads found on branches %s')
2877 msg = _('no open branch heads found on branches %s')
2878 if opts.get('rev'):
2878 if opts.get('rev'):
2879 msg += _(' (started at %s)' % opts['rev'])
2879 msg += _(' (started at %s)' % opts['rev'])
2880 ui.warn((msg + '\n') % headless)
2880 ui.warn((msg + '\n') % headless)
2881
2881
2882 if not heads:
2882 if not heads:
2883 return 1
2883 return 1
2884
2884
2885 heads = sorted(heads, key=lambda x: -x.rev())
2885 heads = sorted(heads, key=lambda x: -x.rev())
2886 displayer = cmdutil.show_changeset(ui, repo, opts)
2886 displayer = cmdutil.show_changeset(ui, repo, opts)
2887 for ctx in heads:
2887 for ctx in heads:
2888 displayer.show(ctx)
2888 displayer.show(ctx)
2889 displayer.close()
2889 displayer.close()
2890
2890
2891 @command('help',
2891 @command('help',
2892 [('e', 'extension', None, _('show only help for extensions')),
2892 [('e', 'extension', None, _('show only help for extensions')),
2893 ('c', 'command', None, _('show only help for commands'))],
2893 ('c', 'command', None, _('show only help for commands'))],
2894 _('[-ec] [TOPIC]'))
2894 _('[-ec] [TOPIC]'))
2895 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
2895 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
2896 """show help for a given topic or a help overview
2896 """show help for a given topic or a help overview
2897
2897
2898 With no arguments, print a list of commands with short help messages.
2898 With no arguments, print a list of commands with short help messages.
2899
2899
2900 Given a topic, extension, or command name, print help for that
2900 Given a topic, extension, or command name, print help for that
2901 topic.
2901 topic.
2902
2902
2903 Returns 0 if successful.
2903 Returns 0 if successful.
2904 """
2904 """
2905
2905
2906 textwidth = min(ui.termwidth(), 80) - 2
2906 textwidth = min(ui.termwidth(), 80) - 2
2907
2907
2908 def optrst(options):
2908 def optrst(options):
2909 data = []
2909 data = []
2910 multioccur = False
2910 multioccur = False
2911 for option in options:
2911 for option in options:
2912 if len(option) == 5:
2912 if len(option) == 5:
2913 shortopt, longopt, default, desc, optlabel = option
2913 shortopt, longopt, default, desc, optlabel = option
2914 else:
2914 else:
2915 shortopt, longopt, default, desc = option
2915 shortopt, longopt, default, desc = option
2916 optlabel = _("VALUE") # default label
2916 optlabel = _("VALUE") # default label
2917
2917
2918 if _("DEPRECATED") in desc and not ui.verbose:
2918 if _("DEPRECATED") in desc and not ui.verbose:
2919 continue
2919 continue
2920
2920
2921 so = ''
2921 so = ''
2922 if shortopt:
2922 if shortopt:
2923 so = '-' + shortopt
2923 so = '-' + shortopt
2924 lo = '--' + longopt
2924 lo = '--' + longopt
2925 if default:
2925 if default:
2926 desc += _(" (default: %s)") % default
2926 desc += _(" (default: %s)") % default
2927
2927
2928 if isinstance(default, list):
2928 if isinstance(default, list):
2929 lo += " %s [+]" % optlabel
2929 lo += " %s [+]" % optlabel
2930 multioccur = True
2930 multioccur = True
2931 elif (default is not None) and not isinstance(default, bool):
2931 elif (default is not None) and not isinstance(default, bool):
2932 lo += " %s" % optlabel
2932 lo += " %s" % optlabel
2933
2933
2934 data.append((so, lo, desc))
2934 data.append((so, lo, desc))
2935
2935
2936 rst = minirst.maketable(data, 1)
2936 rst = minirst.maketable(data, 1)
2937
2937
2938 if multioccur:
2938 if multioccur:
2939 rst += _("\n[+] marked option can be specified multiple times\n")
2939 rst += _("\n[+] marked option can be specified multiple times\n")
2940
2940
2941 return rst
2941 return rst
2942
2942
2943 # list all option lists
2943 # list all option lists
2944 def opttext(optlist, width):
2944 def opttext(optlist, width):
2945 rst = ''
2945 rst = ''
2946 if not optlist:
2946 if not optlist:
2947 return ''
2947 return ''
2948
2948
2949 for title, options in optlist:
2949 for title, options in optlist:
2950 rst += '\n%s\n' % title
2950 rst += '\n%s\n' % title
2951 if options:
2951 if options:
2952 rst += "\n"
2952 rst += "\n"
2953 rst += optrst(options)
2953 rst += optrst(options)
2954 rst += '\n'
2954 rst += '\n'
2955
2955
2956 return '\n' + minirst.format(rst, width)
2956 return '\n' + minirst.format(rst, width)
2957
2957
2958 def addglobalopts(optlist, aliases):
2958 def addglobalopts(optlist, aliases):
2959 if ui.quiet:
2959 if ui.quiet:
2960 return []
2960 return []
2961
2961
2962 if ui.verbose:
2962 if ui.verbose:
2963 optlist.append((_("global options:"), globalopts))
2963 optlist.append((_("global options:"), globalopts))
2964 if name == 'shortlist':
2964 if name == 'shortlist':
2965 optlist.append((_('use "hg help" for the full list '
2965 optlist.append((_('use "hg help" for the full list '
2966 'of commands'), ()))
2966 'of commands'), ()))
2967 else:
2967 else:
2968 if name == 'shortlist':
2968 if name == 'shortlist':
2969 msg = _('use "hg help" for the full list of commands '
2969 msg = _('use "hg help" for the full list of commands '
2970 'or "hg -v" for details')
2970 'or "hg -v" for details')
2971 elif name and not full:
2971 elif name and not full:
2972 msg = _('use "hg help %s" to show the full help text' % name)
2972 msg = _('use "hg help %s" to show the full help text' % name)
2973 elif aliases:
2973 elif aliases:
2974 msg = _('use "hg -v help%s" to show builtin aliases and '
2974 msg = _('use "hg -v help%s" to show builtin aliases and '
2975 'global options') % (name and " " + name or "")
2975 'global options') % (name and " " + name or "")
2976 else:
2976 else:
2977 msg = _('use "hg -v help %s" to show more info') % name
2977 msg = _('use "hg -v help %s" to show more info') % name
2978 optlist.append((msg, ()))
2978 optlist.append((msg, ()))
2979
2979
2980 def helpcmd(name):
2980 def helpcmd(name):
2981 try:
2981 try:
2982 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2982 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2983 except error.AmbiguousCommand, inst:
2983 except error.AmbiguousCommand, inst:
2984 # py3k fix: except vars can't be used outside the scope of the
2984 # py3k fix: except vars can't be used outside the scope of the
2985 # except block, nor can be used inside a lambda. python issue4617
2985 # except block, nor can be used inside a lambda. python issue4617
2986 prefix = inst.args[0]
2986 prefix = inst.args[0]
2987 select = lambda c: c.lstrip('^').startswith(prefix)
2987 select = lambda c: c.lstrip('^').startswith(prefix)
2988 helplist(select)
2988 helplist(select)
2989 return
2989 return
2990
2990
2991 # check if it's an invalid alias and display its error if it is
2991 # check if it's an invalid alias and display its error if it is
2992 if getattr(entry[0], 'badalias', False):
2992 if getattr(entry[0], 'badalias', False):
2993 if not unknowncmd:
2993 if not unknowncmd:
2994 entry[0](ui)
2994 entry[0](ui)
2995 return
2995 return
2996
2996
2997 rst = ""
2997 rst = ""
2998
2998
2999 # synopsis
2999 # synopsis
3000 if len(entry) > 2:
3000 if len(entry) > 2:
3001 if entry[2].startswith('hg'):
3001 if entry[2].startswith('hg'):
3002 rst += "%s\n" % entry[2]
3002 rst += "%s\n" % entry[2]
3003 else:
3003 else:
3004 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3004 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3005 else:
3005 else:
3006 rst += 'hg %s\n' % aliases[0]
3006 rst += 'hg %s\n' % aliases[0]
3007
3007
3008 # aliases
3008 # aliases
3009 if full and not ui.quiet and len(aliases) > 1:
3009 if full and not ui.quiet and len(aliases) > 1:
3010 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3010 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3011
3011
3012 # description
3012 # description
3013 doc = gettext(entry[0].__doc__)
3013 doc = gettext(entry[0].__doc__)
3014 if not doc:
3014 if not doc:
3015 doc = _("(no help text available)")
3015 doc = _("(no help text available)")
3016 if util.safehasattr(entry[0], 'definition'): # aliased command
3016 if util.safehasattr(entry[0], 'definition'): # aliased command
3017 if entry[0].definition.startswith('!'): # shell alias
3017 if entry[0].definition.startswith('!'): # shell alias
3018 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3018 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3019 else:
3019 else:
3020 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3020 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3021 if ui.quiet or not full:
3021 if ui.quiet or not full:
3022 doc = doc.splitlines()[0]
3022 doc = doc.splitlines()[0]
3023 rst += "\n" + doc + "\n"
3023 rst += "\n" + doc + "\n"
3024
3024
3025 # check if this command shadows a non-trivial (multi-line)
3025 # check if this command shadows a non-trivial (multi-line)
3026 # extension help text
3026 # extension help text
3027 try:
3027 try:
3028 mod = extensions.find(name)
3028 mod = extensions.find(name)
3029 doc = gettext(mod.__doc__) or ''
3029 doc = gettext(mod.__doc__) or ''
3030 if '\n' in doc.strip():
3030 if '\n' in doc.strip():
3031 msg = _('use "hg help -e %s" to show help for '
3031 msg = _('use "hg help -e %s" to show help for '
3032 'the %s extension') % (name, name)
3032 'the %s extension') % (name, name)
3033 rst += '\n%s\n' % msg
3033 rst += '\n%s\n' % msg
3034 except KeyError:
3034 except KeyError:
3035 pass
3035 pass
3036
3036
3037 # options
3037 # options
3038 if not ui.quiet and entry[1]:
3038 if not ui.quiet and entry[1]:
3039 rst += '\noptions:\n\n'
3039 rst += '\noptions:\n\n'
3040 rst += optrst(entry[1])
3040 rst += optrst(entry[1])
3041
3041
3042 if ui.verbose:
3042 if ui.verbose:
3043 rst += '\nglobal options:\n\n'
3043 rst += '\nglobal options:\n\n'
3044 rst += optrst(globalopts)
3044 rst += optrst(globalopts)
3045
3045
3046 keep = ui.verbose and ['verbose'] or []
3046 keep = ui.verbose and ['verbose'] or []
3047 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3047 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3048 ui.write(formatted)
3048 ui.write(formatted)
3049
3049
3050 if not ui.verbose:
3050 if not ui.verbose:
3051 if not full:
3051 if not full:
3052 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3052 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3053 % name)
3053 % name)
3054 elif not ui.quiet:
3054 elif not ui.quiet:
3055 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3055 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3056
3056
3057
3057
3058 def helplist(select=None):
3058 def helplist(select=None):
3059 # list of commands
3059 # list of commands
3060 if name == "shortlist":
3060 if name == "shortlist":
3061 header = _('basic commands:\n\n')
3061 header = _('basic commands:\n\n')
3062 else:
3062 else:
3063 header = _('list of commands:\n\n')
3063 header = _('list of commands:\n\n')
3064
3064
3065 h = {}
3065 h = {}
3066 cmds = {}
3066 cmds = {}
3067 for c, e in table.iteritems():
3067 for c, e in table.iteritems():
3068 f = c.split("|", 1)[0]
3068 f = c.split("|", 1)[0]
3069 if select and not select(f):
3069 if select and not select(f):
3070 continue
3070 continue
3071 if (not select and name != 'shortlist' and
3071 if (not select and name != 'shortlist' and
3072 e[0].__module__ != __name__):
3072 e[0].__module__ != __name__):
3073 continue
3073 continue
3074 if name == "shortlist" and not f.startswith("^"):
3074 if name == "shortlist" and not f.startswith("^"):
3075 continue
3075 continue
3076 f = f.lstrip("^")
3076 f = f.lstrip("^")
3077 if not ui.debugflag and f.startswith("debug"):
3077 if not ui.debugflag and f.startswith("debug"):
3078 continue
3078 continue
3079 doc = e[0].__doc__
3079 doc = e[0].__doc__
3080 if doc and 'DEPRECATED' in doc and not ui.verbose:
3080 if doc and 'DEPRECATED' in doc and not ui.verbose:
3081 continue
3081 continue
3082 doc = gettext(doc)
3082 doc = gettext(doc)
3083 if not doc:
3083 if not doc:
3084 doc = _("(no help text available)")
3084 doc = _("(no help text available)")
3085 h[f] = doc.splitlines()[0].rstrip()
3085 h[f] = doc.splitlines()[0].rstrip()
3086 cmds[f] = c.lstrip("^")
3086 cmds[f] = c.lstrip("^")
3087
3087
3088 if not h:
3088 if not h:
3089 ui.status(_('no commands defined\n'))
3089 ui.status(_('no commands defined\n'))
3090 return
3090 return
3091
3091
3092 ui.status(header)
3092 ui.status(header)
3093 fns = sorted(h)
3093 fns = sorted(h)
3094 m = max(map(len, fns))
3094 m = max(map(len, fns))
3095 for f in fns:
3095 for f in fns:
3096 if ui.verbose:
3096 if ui.verbose:
3097 commands = cmds[f].replace("|",", ")
3097 commands = cmds[f].replace("|",", ")
3098 ui.write(" %s:\n %s\n"%(commands, h[f]))
3098 ui.write(" %s:\n %s\n"%(commands, h[f]))
3099 else:
3099 else:
3100 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3100 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3101 initindent=' %-*s ' % (m, f),
3101 initindent=' %-*s ' % (m, f),
3102 hangindent=' ' * (m + 4))))
3102 hangindent=' ' * (m + 4))))
3103
3103
3104 if not name:
3104 if not name:
3105 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3105 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3106 if text:
3106 if text:
3107 ui.write("\n%s" % minirst.format(text, textwidth))
3107 ui.write("\n%s" % minirst.format(text, textwidth))
3108
3108
3109 ui.write(_("\nadditional help topics:\n\n"))
3109 ui.write(_("\nadditional help topics:\n\n"))
3110 topics = []
3110 topics = []
3111 for names, header, doc in help.helptable:
3111 for names, header, doc in help.helptable:
3112 topics.append((sorted(names, key=len, reverse=True)[0], header))
3112 topics.append((sorted(names, key=len, reverse=True)[0], header))
3113 topics_len = max([len(s[0]) for s in topics])
3113 topics_len = max([len(s[0]) for s in topics])
3114 for t, desc in topics:
3114 for t, desc in topics:
3115 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3115 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3116
3116
3117 optlist = []
3117 optlist = []
3118 addglobalopts(optlist, True)
3118 addglobalopts(optlist, True)
3119 ui.write(opttext(optlist, textwidth))
3119 ui.write(opttext(optlist, textwidth))
3120
3120
3121 def helptopic(name):
3121 def helptopic(name):
3122 for names, header, doc in help.helptable:
3122 for names, header, doc in help.helptable:
3123 if name in names:
3123 if name in names:
3124 break
3124 break
3125 else:
3125 else:
3126 raise error.UnknownCommand(name)
3126 raise error.UnknownCommand(name)
3127
3127
3128 # description
3128 # description
3129 if not doc:
3129 if not doc:
3130 doc = _("(no help text available)")
3130 doc = _("(no help text available)")
3131 if util.safehasattr(doc, '__call__'):
3131 if util.safehasattr(doc, '__call__'):
3132 doc = doc()
3132 doc = doc()
3133
3133
3134 ui.write("%s\n\n" % header)
3134 ui.write("%s\n\n" % header)
3135 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3135 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3136 try:
3136 try:
3137 cmdutil.findcmd(name, table)
3137 cmdutil.findcmd(name, table)
3138 ui.write(_('\nuse "hg help -c %s" to see help for '
3138 ui.write(_('\nuse "hg help -c %s" to see help for '
3139 'the %s command\n') % (name, name))
3139 'the %s command\n') % (name, name))
3140 except error.UnknownCommand:
3140 except error.UnknownCommand:
3141 pass
3141 pass
3142
3142
3143 def helpext(name):
3143 def helpext(name):
3144 try:
3144 try:
3145 mod = extensions.find(name)
3145 mod = extensions.find(name)
3146 doc = gettext(mod.__doc__) or _('no help text available')
3146 doc = gettext(mod.__doc__) or _('no help text available')
3147 except KeyError:
3147 except KeyError:
3148 mod = None
3148 mod = None
3149 doc = extensions.disabledext(name)
3149 doc = extensions.disabledext(name)
3150 if not doc:
3150 if not doc:
3151 raise error.UnknownCommand(name)
3151 raise error.UnknownCommand(name)
3152
3152
3153 if '\n' not in doc:
3153 if '\n' not in doc:
3154 head, tail = doc, ""
3154 head, tail = doc, ""
3155 else:
3155 else:
3156 head, tail = doc.split('\n', 1)
3156 head, tail = doc.split('\n', 1)
3157 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3157 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3158 if tail:
3158 if tail:
3159 ui.write(minirst.format(tail, textwidth))
3159 ui.write(minirst.format(tail, textwidth))
3160 ui.status('\n')
3160 ui.status('\n')
3161
3161
3162 if mod:
3162 if mod:
3163 try:
3163 try:
3164 ct = mod.cmdtable
3164 ct = mod.cmdtable
3165 except AttributeError:
3165 except AttributeError:
3166 ct = {}
3166 ct = {}
3167 modcmds = set([c.split('|', 1)[0] for c in ct])
3167 modcmds = set([c.split('|', 1)[0] for c in ct])
3168 helplist(modcmds.__contains__)
3168 helplist(modcmds.__contains__)
3169 else:
3169 else:
3170 ui.write(_('use "hg help extensions" for information on enabling '
3170 ui.write(_('use "hg help extensions" for information on enabling '
3171 'extensions\n'))
3171 'extensions\n'))
3172
3172
3173 def helpextcmd(name):
3173 def helpextcmd(name):
3174 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
3174 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
3175 doc = gettext(mod.__doc__).splitlines()[0]
3175 doc = gettext(mod.__doc__).splitlines()[0]
3176
3176
3177 msg = help.listexts(_("'%s' is provided by the following "
3177 msg = help.listexts(_("'%s' is provided by the following "
3178 "extension:") % cmd, {ext: doc}, indent=4)
3178 "extension:") % cmd, {ext: doc}, indent=4)
3179 ui.write(minirst.format(msg, textwidth))
3179 ui.write(minirst.format(msg, textwidth))
3180 ui.write('\n')
3180 ui.write('\n')
3181 ui.write(_('use "hg help extensions" for information on enabling '
3181 ui.write(_('use "hg help extensions" for information on enabling '
3182 'extensions\n'))
3182 'extensions\n'))
3183
3183
3184 if name and name != 'shortlist':
3184 if name and name != 'shortlist':
3185 i = None
3185 i = None
3186 if unknowncmd:
3186 if unknowncmd:
3187 queries = (helpextcmd,)
3187 queries = (helpextcmd,)
3188 elif opts.get('extension'):
3188 elif opts.get('extension'):
3189 queries = (helpext,)
3189 queries = (helpext,)
3190 elif opts.get('command'):
3190 elif opts.get('command'):
3191 queries = (helpcmd,)
3191 queries = (helpcmd,)
3192 else:
3192 else:
3193 queries = (helptopic, helpcmd, helpext, helpextcmd)
3193 queries = (helptopic, helpcmd, helpext, helpextcmd)
3194 for f in queries:
3194 for f in queries:
3195 try:
3195 try:
3196 f(name)
3196 f(name)
3197 i = None
3197 i = None
3198 break
3198 break
3199 except error.UnknownCommand, inst:
3199 except error.UnknownCommand, inst:
3200 i = inst
3200 i = inst
3201 if i:
3201 if i:
3202 raise i
3202 raise i
3203 else:
3203 else:
3204 # program name
3204 # program name
3205 ui.status(_("Mercurial Distributed SCM\n"))
3205 ui.status(_("Mercurial Distributed SCM\n"))
3206 ui.status('\n')
3206 ui.status('\n')
3207 helplist()
3207 helplist()
3208
3208
3209
3209
3210 @command('identify|id',
3210 @command('identify|id',
3211 [('r', 'rev', '',
3211 [('r', 'rev', '',
3212 _('identify the specified revision'), _('REV')),
3212 _('identify the specified revision'), _('REV')),
3213 ('n', 'num', None, _('show local revision number')),
3213 ('n', 'num', None, _('show local revision number')),
3214 ('i', 'id', None, _('show global revision id')),
3214 ('i', 'id', None, _('show global revision id')),
3215 ('b', 'branch', None, _('show branch')),
3215 ('b', 'branch', None, _('show branch')),
3216 ('t', 'tags', None, _('show tags')),
3216 ('t', 'tags', None, _('show tags')),
3217 ('B', 'bookmarks', None, _('show bookmarks'))],
3217 ('B', 'bookmarks', None, _('show bookmarks'))],
3218 _('[-nibtB] [-r REV] [SOURCE]'))
3218 _('[-nibtB] [-r REV] [SOURCE]'))
3219 def identify(ui, repo, source=None, rev=None,
3219 def identify(ui, repo, source=None, rev=None,
3220 num=None, id=None, branch=None, tags=None, bookmarks=None):
3220 num=None, id=None, branch=None, tags=None, bookmarks=None):
3221 """identify the working copy or specified revision
3221 """identify the working copy or specified revision
3222
3222
3223 Print a summary identifying the repository state at REV using one or
3223 Print a summary identifying the repository state at REV using one or
3224 two parent hash identifiers, followed by a "+" if the working
3224 two parent hash identifiers, followed by a "+" if the working
3225 directory has uncommitted changes, the branch name (if not default),
3225 directory has uncommitted changes, the branch name (if not default),
3226 a list of tags, and a list of bookmarks.
3226 a list of tags, and a list of bookmarks.
3227
3227
3228 When REV is not given, print a summary of the current state of the
3228 When REV is not given, print a summary of the current state of the
3229 repository.
3229 repository.
3230
3230
3231 Specifying a path to a repository root or Mercurial bundle will
3231 Specifying a path to a repository root or Mercurial bundle will
3232 cause lookup to operate on that repository/bundle.
3232 cause lookup to operate on that repository/bundle.
3233
3233
3234 .. container:: verbose
3234 .. container:: verbose
3235
3235
3236 Examples:
3236 Examples:
3237
3237
3238 - generate a build identifier for the working directory::
3238 - generate a build identifier for the working directory::
3239
3239
3240 hg id --id > build-id.dat
3240 hg id --id > build-id.dat
3241
3241
3242 - find the revision corresponding to a tag::
3242 - find the revision corresponding to a tag::
3243
3243
3244 hg id -n -r 1.3
3244 hg id -n -r 1.3
3245
3245
3246 - check the most recent revision of a remote repository::
3246 - check the most recent revision of a remote repository::
3247
3247
3248 hg id -r tip http://selenic.com/hg/
3248 hg id -r tip http://selenic.com/hg/
3249
3249
3250 Returns 0 if successful.
3250 Returns 0 if successful.
3251 """
3251 """
3252
3252
3253 if not repo and not source:
3253 if not repo and not source:
3254 raise util.Abort(_("there is no Mercurial repository here "
3254 raise util.Abort(_("there is no Mercurial repository here "
3255 "(.hg not found)"))
3255 "(.hg not found)"))
3256
3256
3257 hexfunc = ui.debugflag and hex or short
3257 hexfunc = ui.debugflag and hex or short
3258 default = not (num or id or branch or tags or bookmarks)
3258 default = not (num or id or branch or tags or bookmarks)
3259 output = []
3259 output = []
3260 revs = []
3260 revs = []
3261
3261
3262 if source:
3262 if source:
3263 source, branches = hg.parseurl(ui.expandpath(source))
3263 source, branches = hg.parseurl(ui.expandpath(source))
3264 repo = hg.peer(ui, {}, source)
3264 repo = hg.peer(ui, {}, source)
3265 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3265 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3266
3266
3267 if not repo.local():
3267 if not repo.local():
3268 if num or branch or tags:
3268 if num or branch or tags:
3269 raise util.Abort(
3269 raise util.Abort(
3270 _("can't query remote revision number, branch, or tags"))
3270 _("can't query remote revision number, branch, or tags"))
3271 if not rev and revs:
3271 if not rev and revs:
3272 rev = revs[0]
3272 rev = revs[0]
3273 if not rev:
3273 if not rev:
3274 rev = "tip"
3274 rev = "tip"
3275
3275
3276 remoterev = repo.lookup(rev)
3276 remoterev = repo.lookup(rev)
3277 if default or id:
3277 if default or id:
3278 output = [hexfunc(remoterev)]
3278 output = [hexfunc(remoterev)]
3279
3279
3280 def getbms():
3280 def getbms():
3281 bms = []
3281 bms = []
3282
3282
3283 if 'bookmarks' in repo.listkeys('namespaces'):
3283 if 'bookmarks' in repo.listkeys('namespaces'):
3284 hexremoterev = hex(remoterev)
3284 hexremoterev = hex(remoterev)
3285 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3285 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3286 if bmr == hexremoterev]
3286 if bmr == hexremoterev]
3287
3287
3288 return bms
3288 return bms
3289
3289
3290 if bookmarks:
3290 if bookmarks:
3291 output.extend(getbms())
3291 output.extend(getbms())
3292 elif default and not ui.quiet:
3292 elif default and not ui.quiet:
3293 # multiple bookmarks for a single parent separated by '/'
3293 # multiple bookmarks for a single parent separated by '/'
3294 bm = '/'.join(getbms())
3294 bm = '/'.join(getbms())
3295 if bm:
3295 if bm:
3296 output.append(bm)
3296 output.append(bm)
3297 else:
3297 else:
3298 if not rev:
3298 if not rev:
3299 ctx = repo[None]
3299 ctx = repo[None]
3300 parents = ctx.parents()
3300 parents = ctx.parents()
3301 changed = ""
3301 changed = ""
3302 if default or id or num:
3302 if default or id or num:
3303 changed = util.any(repo.status()) and "+" or ""
3303 changed = util.any(repo.status()) and "+" or ""
3304 if default or id:
3304 if default or id:
3305 output = ["%s%s" %
3305 output = ["%s%s" %
3306 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3306 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3307 if num:
3307 if num:
3308 output.append("%s%s" %
3308 output.append("%s%s" %
3309 ('+'.join([str(p.rev()) for p in parents]), changed))
3309 ('+'.join([str(p.rev()) for p in parents]), changed))
3310 else:
3310 else:
3311 ctx = scmutil.revsingle(repo, rev)
3311 ctx = scmutil.revsingle(repo, rev)
3312 if default or id:
3312 if default or id:
3313 output = [hexfunc(ctx.node())]
3313 output = [hexfunc(ctx.node())]
3314 if num:
3314 if num:
3315 output.append(str(ctx.rev()))
3315 output.append(str(ctx.rev()))
3316
3316
3317 if default and not ui.quiet:
3317 if default and not ui.quiet:
3318 b = ctx.branch()
3318 b = ctx.branch()
3319 if b != 'default':
3319 if b != 'default':
3320 output.append("(%s)" % b)
3320 output.append("(%s)" % b)
3321
3321
3322 # multiple tags for a single parent separated by '/'
3322 # multiple tags for a single parent separated by '/'
3323 t = '/'.join(ctx.tags())
3323 t = '/'.join(ctx.tags())
3324 if t:
3324 if t:
3325 output.append(t)
3325 output.append(t)
3326
3326
3327 # multiple bookmarks for a single parent separated by '/'
3327 # multiple bookmarks for a single parent separated by '/'
3328 bm = '/'.join(ctx.bookmarks())
3328 bm = '/'.join(ctx.bookmarks())
3329 if bm:
3329 if bm:
3330 output.append(bm)
3330 output.append(bm)
3331 else:
3331 else:
3332 if branch:
3332 if branch:
3333 output.append(ctx.branch())
3333 output.append(ctx.branch())
3334
3334
3335 if tags:
3335 if tags:
3336 output.extend(ctx.tags())
3336 output.extend(ctx.tags())
3337
3337
3338 if bookmarks:
3338 if bookmarks:
3339 output.extend(ctx.bookmarks())
3339 output.extend(ctx.bookmarks())
3340
3340
3341 ui.write("%s\n" % ' '.join(output))
3341 ui.write("%s\n" % ' '.join(output))
3342
3342
3343 @command('import|patch',
3343 @command('import|patch',
3344 [('p', 'strip', 1,
3344 [('p', 'strip', 1,
3345 _('directory strip option for patch. This has the same '
3345 _('directory strip option for patch. This has the same '
3346 'meaning as the corresponding patch option'), _('NUM')),
3346 'meaning as the corresponding patch option'), _('NUM')),
3347 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3347 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3348 ('e', 'edit', False, _('invoke editor on commit messages')),
3348 ('e', 'edit', False, _('invoke editor on commit messages')),
3349 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3349 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3350 ('', 'no-commit', None,
3350 ('', 'no-commit', None,
3351 _("don't commit, just update the working directory")),
3351 _("don't commit, just update the working directory")),
3352 ('', 'bypass', None,
3352 ('', 'bypass', None,
3353 _("apply patch without touching the working directory")),
3353 _("apply patch without touching the working directory")),
3354 ('', 'exact', None,
3354 ('', 'exact', None,
3355 _('apply patch to the nodes from which it was generated')),
3355 _('apply patch to the nodes from which it was generated')),
3356 ('', 'import-branch', None,
3356 ('', 'import-branch', None,
3357 _('use any branch information in patch (implied by --exact)'))] +
3357 _('use any branch information in patch (implied by --exact)'))] +
3358 commitopts + commitopts2 + similarityopts,
3358 commitopts + commitopts2 + similarityopts,
3359 _('[OPTION]... PATCH...'))
3359 _('[OPTION]... PATCH...'))
3360 def import_(ui, repo, patch1=None, *patches, **opts):
3360 def import_(ui, repo, patch1=None, *patches, **opts):
3361 """import an ordered set of patches
3361 """import an ordered set of patches
3362
3362
3363 Import a list of patches and commit them individually (unless
3363 Import a list of patches and commit them individually (unless
3364 --no-commit is specified).
3364 --no-commit is specified).
3365
3365
3366 If there are outstanding changes in the working directory, import
3366 If there are outstanding changes in the working directory, import
3367 will abort unless given the -f/--force flag.
3367 will abort unless given the -f/--force flag.
3368
3368
3369 You can import a patch straight from a mail message. Even patches
3369 You can import a patch straight from a mail message. Even patches
3370 as attachments work (to use the body part, it must have type
3370 as attachments work (to use the body part, it must have type
3371 text/plain or text/x-patch). From and Subject headers of email
3371 text/plain or text/x-patch). From and Subject headers of email
3372 message are used as default committer and commit message. All
3372 message are used as default committer and commit message. All
3373 text/plain body parts before first diff are added to commit
3373 text/plain body parts before first diff are added to commit
3374 message.
3374 message.
3375
3375
3376 If the imported patch was generated by :hg:`export`, user and
3376 If the imported patch was generated by :hg:`export`, user and
3377 description from patch override values from message headers and
3377 description from patch override values from message headers and
3378 body. Values given on command line with -m/--message and -u/--user
3378 body. Values given on command line with -m/--message and -u/--user
3379 override these.
3379 override these.
3380
3380
3381 If --exact is specified, import will set the working directory to
3381 If --exact is specified, import will set the working directory to
3382 the parent of each patch before applying it, and will abort if the
3382 the parent of each patch before applying it, and will abort if the
3383 resulting changeset has a different ID than the one recorded in
3383 resulting changeset has a different ID than the one recorded in
3384 the patch. This may happen due to character set problems or other
3384 the patch. This may happen due to character set problems or other
3385 deficiencies in the text patch format.
3385 deficiencies in the text patch format.
3386
3386
3387 Use --bypass to apply and commit patches directly to the
3387 Use --bypass to apply and commit patches directly to the
3388 repository, not touching the working directory. Without --exact,
3388 repository, not touching the working directory. Without --exact,
3389 patches will be applied on top of the working directory parent
3389 patches will be applied on top of the working directory parent
3390 revision.
3390 revision.
3391
3391
3392 With -s/--similarity, hg will attempt to discover renames and
3392 With -s/--similarity, hg will attempt to discover renames and
3393 copies in the patch in the same way as 'addremove'.
3393 copies in the patch in the same way as 'addremove'.
3394
3394
3395 To read a patch from standard input, use "-" as the patch name. If
3395 To read a patch from standard input, use "-" as the patch name. If
3396 a URL is specified, the patch will be downloaded from it.
3396 a URL is specified, the patch will be downloaded from it.
3397 See :hg:`help dates` for a list of formats valid for -d/--date.
3397 See :hg:`help dates` for a list of formats valid for -d/--date.
3398
3398
3399 .. container:: verbose
3399 .. container:: verbose
3400
3400
3401 Examples:
3401 Examples:
3402
3402
3403 - import a traditional patch from a website and detect renames::
3403 - import a traditional patch from a website and detect renames::
3404
3404
3405 hg import -s 80 http://example.com/bugfix.patch
3405 hg import -s 80 http://example.com/bugfix.patch
3406
3406
3407 - import a changeset from an hgweb server::
3407 - import a changeset from an hgweb server::
3408
3408
3409 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3409 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3410
3410
3411 - import all the patches in an Unix-style mbox::
3411 - import all the patches in an Unix-style mbox::
3412
3412
3413 hg import incoming-patches.mbox
3413 hg import incoming-patches.mbox
3414
3414
3415 - attempt to exactly restore an exported changeset (not always
3415 - attempt to exactly restore an exported changeset (not always
3416 possible)::
3416 possible)::
3417
3417
3418 hg import --exact proposed-fix.patch
3418 hg import --exact proposed-fix.patch
3419
3419
3420 Returns 0 on success.
3420 Returns 0 on success.
3421 """
3421 """
3422
3422
3423 if not patch1:
3423 if not patch1:
3424 raise util.Abort(_('need at least one patch to import'))
3424 raise util.Abort(_('need at least one patch to import'))
3425
3425
3426 patches = (patch1,) + patches
3426 patches = (patch1,) + patches
3427
3427
3428 date = opts.get('date')
3428 date = opts.get('date')
3429 if date:
3429 if date:
3430 opts['date'] = util.parsedate(date)
3430 opts['date'] = util.parsedate(date)
3431
3431
3432 editor = cmdutil.commiteditor
3432 editor = cmdutil.commiteditor
3433 if opts.get('edit'):
3433 if opts.get('edit'):
3434 editor = cmdutil.commitforceeditor
3434 editor = cmdutil.commitforceeditor
3435
3435
3436 update = not opts.get('bypass')
3436 update = not opts.get('bypass')
3437 if not update and opts.get('no_commit'):
3437 if not update and opts.get('no_commit'):
3438 raise util.Abort(_('cannot use --no-commit with --bypass'))
3438 raise util.Abort(_('cannot use --no-commit with --bypass'))
3439 try:
3439 try:
3440 sim = float(opts.get('similarity') or 0)
3440 sim = float(opts.get('similarity') or 0)
3441 except ValueError:
3441 except ValueError:
3442 raise util.Abort(_('similarity must be a number'))
3442 raise util.Abort(_('similarity must be a number'))
3443 if sim < 0 or sim > 100:
3443 if sim < 0 or sim > 100:
3444 raise util.Abort(_('similarity must be between 0 and 100'))
3444 raise util.Abort(_('similarity must be between 0 and 100'))
3445 if sim and not update:
3445 if sim and not update:
3446 raise util.Abort(_('cannot use --similarity with --bypass'))
3446 raise util.Abort(_('cannot use --similarity with --bypass'))
3447
3447
3448 if (opts.get('exact') or not opts.get('force')) and update:
3448 if (opts.get('exact') or not opts.get('force')) and update:
3449 cmdutil.bailifchanged(repo)
3449 cmdutil.bailifchanged(repo)
3450
3450
3451 base = opts["base"]
3451 base = opts["base"]
3452 strip = opts["strip"]
3452 strip = opts["strip"]
3453 wlock = lock = tr = None
3453 wlock = lock = tr = None
3454 msgs = []
3454 msgs = []
3455
3455
3456 def checkexact(repo, n, nodeid):
3456 def checkexact(repo, n, nodeid):
3457 if opts.get('exact') and hex(n) != nodeid:
3457 if opts.get('exact') and hex(n) != nodeid:
3458 repo.rollback()
3458 repo.rollback()
3459 raise util.Abort(_('patch is damaged or loses information'))
3459 raise util.Abort(_('patch is damaged or loses information'))
3460
3460
3461 def tryone(ui, hunk, parents):
3461 def tryone(ui, hunk, parents):
3462 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3462 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3463 patch.extract(ui, hunk)
3463 patch.extract(ui, hunk)
3464
3464
3465 if not tmpname:
3465 if not tmpname:
3466 return (None, None)
3466 return (None, None)
3467 msg = _('applied to working directory')
3467 msg = _('applied to working directory')
3468
3468
3469 try:
3469 try:
3470 cmdline_message = cmdutil.logmessage(ui, opts)
3470 cmdline_message = cmdutil.logmessage(ui, opts)
3471 if cmdline_message:
3471 if cmdline_message:
3472 # pickup the cmdline msg
3472 # pickup the cmdline msg
3473 message = cmdline_message
3473 message = cmdline_message
3474 elif message:
3474 elif message:
3475 # pickup the patch msg
3475 # pickup the patch msg
3476 message = message.strip()
3476 message = message.strip()
3477 else:
3477 else:
3478 # launch the editor
3478 # launch the editor
3479 message = None
3479 message = None
3480 ui.debug('message:\n%s\n' % message)
3480 ui.debug('message:\n%s\n' % message)
3481
3481
3482 if len(parents) == 1:
3482 if len(parents) == 1:
3483 parents.append(repo[nullid])
3483 parents.append(repo[nullid])
3484 if opts.get('exact'):
3484 if opts.get('exact'):
3485 if not nodeid or not p1:
3485 if not nodeid or not p1:
3486 raise util.Abort(_('not a Mercurial patch'))
3486 raise util.Abort(_('not a Mercurial patch'))
3487 p1 = repo[p1]
3487 p1 = repo[p1]
3488 p2 = repo[p2 or nullid]
3488 p2 = repo[p2 or nullid]
3489 elif p2:
3489 elif p2:
3490 try:
3490 try:
3491 p1 = repo[p1]
3491 p1 = repo[p1]
3492 p2 = repo[p2]
3492 p2 = repo[p2]
3493 except error.RepoError:
3493 except error.RepoError:
3494 p1, p2 = parents
3494 p1, p2 = parents
3495 else:
3495 else:
3496 p1, p2 = parents
3496 p1, p2 = parents
3497
3497
3498 n = None
3498 n = None
3499 if update:
3499 if update:
3500 if opts.get('exact') and p1 != parents[0]:
3500 if opts.get('exact') and p1 != parents[0]:
3501 hg.clean(repo, p1.node())
3501 hg.clean(repo, p1.node())
3502 if p1 != parents[0] and p2 != parents[1]:
3502 if p1 != parents[0] and p2 != parents[1]:
3503 repo.dirstate.setparents(p1.node(), p2.node())
3503 repo.dirstate.setparents(p1.node(), p2.node())
3504
3504
3505 if opts.get('exact') or opts.get('import_branch'):
3505 if opts.get('exact') or opts.get('import_branch'):
3506 repo.dirstate.setbranch(branch or 'default')
3506 repo.dirstate.setbranch(branch or 'default')
3507
3507
3508 files = set()
3508 files = set()
3509 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3509 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3510 eolmode=None, similarity=sim / 100.0)
3510 eolmode=None, similarity=sim / 100.0)
3511 files = list(files)
3511 files = list(files)
3512 if opts.get('no_commit'):
3512 if opts.get('no_commit'):
3513 if message:
3513 if message:
3514 msgs.append(message)
3514 msgs.append(message)
3515 else:
3515 else:
3516 if opts.get('exact'):
3516 if opts.get('exact'):
3517 m = None
3517 m = None
3518 else:
3518 else:
3519 m = scmutil.matchfiles(repo, files or [])
3519 m = scmutil.matchfiles(repo, files or [])
3520 n = repo.commit(message, opts.get('user') or user,
3520 n = repo.commit(message, opts.get('user') or user,
3521 opts.get('date') or date, match=m,
3521 opts.get('date') or date, match=m,
3522 editor=editor)
3522 editor=editor)
3523 checkexact(repo, n, nodeid)
3523 checkexact(repo, n, nodeid)
3524 else:
3524 else:
3525 if opts.get('exact') or opts.get('import_branch'):
3525 if opts.get('exact') or opts.get('import_branch'):
3526 branch = branch or 'default'
3526 branch = branch or 'default'
3527 else:
3527 else:
3528 branch = p1.branch()
3528 branch = p1.branch()
3529 store = patch.filestore()
3529 store = patch.filestore()
3530 try:
3530 try:
3531 files = set()
3531 files = set()
3532 try:
3532 try:
3533 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3533 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3534 files, eolmode=None)
3534 files, eolmode=None)
3535 except patch.PatchError, e:
3535 except patch.PatchError, e:
3536 raise util.Abort(str(e))
3536 raise util.Abort(str(e))
3537 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3537 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3538 message,
3538 message,
3539 opts.get('user') or user,
3539 opts.get('user') or user,
3540 opts.get('date') or date,
3540 opts.get('date') or date,
3541 branch, files, store,
3541 branch, files, store,
3542 editor=cmdutil.commiteditor)
3542 editor=cmdutil.commiteditor)
3543 repo.savecommitmessage(memctx.description())
3543 repo.savecommitmessage(memctx.description())
3544 n = memctx.commit()
3544 n = memctx.commit()
3545 checkexact(repo, n, nodeid)
3545 checkexact(repo, n, nodeid)
3546 finally:
3546 finally:
3547 store.close()
3547 store.close()
3548 if n:
3548 if n:
3549 # i18n: refers to a short changeset id
3549 # i18n: refers to a short changeset id
3550 msg = _('created %s') % short(n)
3550 msg = _('created %s') % short(n)
3551 return (msg, n)
3551 return (msg, n)
3552 finally:
3552 finally:
3553 os.unlink(tmpname)
3553 os.unlink(tmpname)
3554
3554
3555 try:
3555 try:
3556 try:
3556 try:
3557 wlock = repo.wlock()
3557 wlock = repo.wlock()
3558 lock = repo.lock()
3558 lock = repo.lock()
3559 tr = repo.transaction('import')
3559 tr = repo.transaction('import')
3560 parents = repo.parents()
3560 parents = repo.parents()
3561 for patchurl in patches:
3561 for patchurl in patches:
3562 if patchurl == '-':
3562 if patchurl == '-':
3563 ui.status(_('applying patch from stdin\n'))
3563 ui.status(_('applying patch from stdin\n'))
3564 patchfile = ui.fin
3564 patchfile = ui.fin
3565 patchurl = 'stdin' # for error message
3565 patchurl = 'stdin' # for error message
3566 else:
3566 else:
3567 patchurl = os.path.join(base, patchurl)
3567 patchurl = os.path.join(base, patchurl)
3568 ui.status(_('applying %s\n') % patchurl)
3568 ui.status(_('applying %s\n') % patchurl)
3569 patchfile = url.open(ui, patchurl)
3569 patchfile = url.open(ui, patchurl)
3570
3570
3571 haspatch = False
3571 haspatch = False
3572 for hunk in patch.split(patchfile):
3572 for hunk in patch.split(patchfile):
3573 (msg, node) = tryone(ui, hunk, parents)
3573 (msg, node) = tryone(ui, hunk, parents)
3574 if msg:
3574 if msg:
3575 haspatch = True
3575 haspatch = True
3576 ui.note(msg + '\n')
3576 ui.note(msg + '\n')
3577 if update or opts.get('exact'):
3577 if update or opts.get('exact'):
3578 parents = repo.parents()
3578 parents = repo.parents()
3579 else:
3579 else:
3580 parents = [repo[node]]
3580 parents = [repo[node]]
3581
3581
3582 if not haspatch:
3582 if not haspatch:
3583 raise util.Abort(_('%s: no diffs found') % patchurl)
3583 raise util.Abort(_('%s: no diffs found') % patchurl)
3584
3584
3585 tr.close()
3585 tr.close()
3586 if msgs:
3586 if msgs:
3587 repo.savecommitmessage('\n* * *\n'.join(msgs))
3587 repo.savecommitmessage('\n* * *\n'.join(msgs))
3588 except:
3588 except:
3589 # wlock.release() indirectly calls dirstate.write(): since
3589 # wlock.release() indirectly calls dirstate.write(): since
3590 # we're crashing, we do not want to change the working dir
3590 # we're crashing, we do not want to change the working dir
3591 # parent after all, so make sure it writes nothing
3591 # parent after all, so make sure it writes nothing
3592 repo.dirstate.invalidate()
3592 repo.dirstate.invalidate()
3593 raise
3593 raise
3594 finally:
3594 finally:
3595 if tr:
3595 if tr:
3596 tr.release()
3596 tr.release()
3597 release(lock, wlock)
3597 release(lock, wlock)
3598
3598
3599 @command('incoming|in',
3599 @command('incoming|in',
3600 [('f', 'force', None,
3600 [('f', 'force', None,
3601 _('run even if remote repository is unrelated')),
3601 _('run even if remote repository is unrelated')),
3602 ('n', 'newest-first', None, _('show newest record first')),
3602 ('n', 'newest-first', None, _('show newest record first')),
3603 ('', 'bundle', '',
3603 ('', 'bundle', '',
3604 _('file to store the bundles into'), _('FILE')),
3604 _('file to store the bundles into'), _('FILE')),
3605 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3605 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3606 ('B', 'bookmarks', False, _("compare bookmarks")),
3606 ('B', 'bookmarks', False, _("compare bookmarks")),
3607 ('b', 'branch', [],
3607 ('b', 'branch', [],
3608 _('a specific branch you would like to pull'), _('BRANCH')),
3608 _('a specific branch you would like to pull'), _('BRANCH')),
3609 ] + logopts + remoteopts + subrepoopts,
3609 ] + logopts + remoteopts + subrepoopts,
3610 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3610 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3611 def incoming(ui, repo, source="default", **opts):
3611 def incoming(ui, repo, source="default", **opts):
3612 """show new changesets found in source
3612 """show new changesets found in source
3613
3613
3614 Show new changesets found in the specified path/URL or the default
3614 Show new changesets found in the specified path/URL or the default
3615 pull location. These are the changesets that would have been pulled
3615 pull location. These are the changesets that would have been pulled
3616 if a pull at the time you issued this command.
3616 if a pull at the time you issued this command.
3617
3617
3618 For remote repository, using --bundle avoids downloading the
3618 For remote repository, using --bundle avoids downloading the
3619 changesets twice if the incoming is followed by a pull.
3619 changesets twice if the incoming is followed by a pull.
3620
3620
3621 See pull for valid source format details.
3621 See pull for valid source format details.
3622
3622
3623 Returns 0 if there are incoming changes, 1 otherwise.
3623 Returns 0 if there are incoming changes, 1 otherwise.
3624 """
3624 """
3625 if opts.get('bundle') and opts.get('subrepos'):
3625 if opts.get('bundle') and opts.get('subrepos'):
3626 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3626 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3627
3627
3628 if opts.get('bookmarks'):
3628 if opts.get('bookmarks'):
3629 source, branches = hg.parseurl(ui.expandpath(source),
3629 source, branches = hg.parseurl(ui.expandpath(source),
3630 opts.get('branch'))
3630 opts.get('branch'))
3631 other = hg.peer(repo, opts, source)
3631 other = hg.peer(repo, opts, source)
3632 if 'bookmarks' not in other.listkeys('namespaces'):
3632 if 'bookmarks' not in other.listkeys('namespaces'):
3633 ui.warn(_("remote doesn't support bookmarks\n"))
3633 ui.warn(_("remote doesn't support bookmarks\n"))
3634 return 0
3634 return 0
3635 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3635 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3636 return bookmarks.diff(ui, repo, other)
3636 return bookmarks.diff(ui, repo, other)
3637
3637
3638 repo._subtoppath = ui.expandpath(source)
3638 repo._subtoppath = ui.expandpath(source)
3639 try:
3639 try:
3640 return hg.incoming(ui, repo, source, opts)
3640 return hg.incoming(ui, repo, source, opts)
3641 finally:
3641 finally:
3642 del repo._subtoppath
3642 del repo._subtoppath
3643
3643
3644
3644
3645 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3645 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3646 def init(ui, dest=".", **opts):
3646 def init(ui, dest=".", **opts):
3647 """create a new repository in the given directory
3647 """create a new repository in the given directory
3648
3648
3649 Initialize a new repository in the given directory. If the given
3649 Initialize a new repository in the given directory. If the given
3650 directory does not exist, it will be created.
3650 directory does not exist, it will be created.
3651
3651
3652 If no directory is given, the current directory is used.
3652 If no directory is given, the current directory is used.
3653
3653
3654 It is possible to specify an ``ssh://`` URL as the destination.
3654 It is possible to specify an ``ssh://`` URL as the destination.
3655 See :hg:`help urls` for more information.
3655 See :hg:`help urls` for more information.
3656
3656
3657 Returns 0 on success.
3657 Returns 0 on success.
3658 """
3658 """
3659 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3659 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3660
3660
3661 @command('locate',
3661 @command('locate',
3662 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3662 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3663 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3663 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3664 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3664 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3665 ] + walkopts,
3665 ] + walkopts,
3666 _('[OPTION]... [PATTERN]...'))
3666 _('[OPTION]... [PATTERN]...'))
3667 def locate(ui, repo, *pats, **opts):
3667 def locate(ui, repo, *pats, **opts):
3668 """locate files matching specific patterns
3668 """locate files matching specific patterns
3669
3669
3670 Print files under Mercurial control in the working directory whose
3670 Print files under Mercurial control in the working directory whose
3671 names match the given patterns.
3671 names match the given patterns.
3672
3672
3673 By default, this command searches all directories in the working
3673 By default, this command searches all directories in the working
3674 directory. To search just the current directory and its
3674 directory. To search just the current directory and its
3675 subdirectories, use "--include .".
3675 subdirectories, use "--include .".
3676
3676
3677 If no patterns are given to match, this command prints the names
3677 If no patterns are given to match, this command prints the names
3678 of all files under Mercurial control in the working directory.
3678 of all files under Mercurial control in the working directory.
3679
3679
3680 If you want to feed the output of this command into the "xargs"
3680 If you want to feed the output of this command into the "xargs"
3681 command, use the -0 option to both this command and "xargs". This
3681 command, use the -0 option to both this command and "xargs". This
3682 will avoid the problem of "xargs" treating single filenames that
3682 will avoid the problem of "xargs" treating single filenames that
3683 contain whitespace as multiple filenames.
3683 contain whitespace as multiple filenames.
3684
3684
3685 Returns 0 if a match is found, 1 otherwise.
3685 Returns 0 if a match is found, 1 otherwise.
3686 """
3686 """
3687 end = opts.get('print0') and '\0' or '\n'
3687 end = opts.get('print0') and '\0' or '\n'
3688 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3688 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3689
3689
3690 ret = 1
3690 ret = 1
3691 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3691 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3692 m.bad = lambda x, y: False
3692 m.bad = lambda x, y: False
3693 for abs in repo[rev].walk(m):
3693 for abs in repo[rev].walk(m):
3694 if not rev and abs not in repo.dirstate:
3694 if not rev and abs not in repo.dirstate:
3695 continue
3695 continue
3696 if opts.get('fullpath'):
3696 if opts.get('fullpath'):
3697 ui.write(repo.wjoin(abs), end)
3697 ui.write(repo.wjoin(abs), end)
3698 else:
3698 else:
3699 ui.write(((pats and m.rel(abs)) or abs), end)
3699 ui.write(((pats and m.rel(abs)) or abs), end)
3700 ret = 0
3700 ret = 0
3701
3701
3702 return ret
3702 return ret
3703
3703
3704 @command('^log|history',
3704 @command('^log|history',
3705 [('f', 'follow', None,
3705 [('f', 'follow', None,
3706 _('follow changeset history, or file history across copies and renames')),
3706 _('follow changeset history, or file history across copies and renames')),
3707 ('', 'follow-first', None,
3707 ('', 'follow-first', None,
3708 _('only follow the first parent of merge changesets')),
3708 _('only follow the first parent of merge changesets')),
3709 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3709 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3710 ('C', 'copies', None, _('show copied files')),
3710 ('C', 'copies', None, _('show copied files')),
3711 ('k', 'keyword', [],
3711 ('k', 'keyword', [],
3712 _('do case-insensitive search for a given text'), _('TEXT')),
3712 _('do case-insensitive search for a given text'), _('TEXT')),
3713 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3713 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3714 ('', 'removed', None, _('include revisions where files were removed')),
3714 ('', 'removed', None, _('include revisions where files were removed')),
3715 ('m', 'only-merges', None, _('show only merges')),
3715 ('m', 'only-merges', None, _('show only merges')),
3716 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3716 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3717 ('', 'only-branch', [],
3717 ('', 'only-branch', [],
3718 _('show only changesets within the given named branch (DEPRECATED)'),
3718 _('show only changesets within the given named branch (DEPRECATED)'),
3719 _('BRANCH')),
3719 _('BRANCH')),
3720 ('b', 'branch', [],
3720 ('b', 'branch', [],
3721 _('show changesets within the given named branch'), _('BRANCH')),
3721 _('show changesets within the given named branch'), _('BRANCH')),
3722 ('P', 'prune', [],
3722 ('P', 'prune', [],
3723 _('do not display revision or any of its ancestors'), _('REV')),
3723 _('do not display revision or any of its ancestors'), _('REV')),
3724 ('', 'hidden', False, _('show hidden changesets')),
3724 ('', 'hidden', False, _('show hidden changesets')),
3725 ] + logopts + walkopts,
3725 ] + logopts + walkopts,
3726 _('[OPTION]... [FILE]'))
3726 _('[OPTION]... [FILE]'))
3727 def log(ui, repo, *pats, **opts):
3727 def log(ui, repo, *pats, **opts):
3728 """show revision history of entire repository or files
3728 """show revision history of entire repository or files
3729
3729
3730 Print the revision history of the specified files or the entire
3730 Print the revision history of the specified files or the entire
3731 project.
3731 project.
3732
3732
3733 If no revision range is specified, the default is ``tip:0`` unless
3733 If no revision range is specified, the default is ``tip:0`` unless
3734 --follow is set, in which case the working directory parent is
3734 --follow is set, in which case the working directory parent is
3735 used as the starting revision.
3735 used as the starting revision.
3736
3736
3737 File history is shown without following rename or copy history of
3737 File history is shown without following rename or copy history of
3738 files. Use -f/--follow with a filename to follow history across
3738 files. Use -f/--follow with a filename to follow history across
3739 renames and copies. --follow without a filename will only show
3739 renames and copies. --follow without a filename will only show
3740 ancestors or descendants of the starting revision.
3740 ancestors or descendants of the starting revision.
3741
3741
3742 By default this command prints revision number and changeset id,
3742 By default this command prints revision number and changeset id,
3743 tags, non-trivial parents, user, date and time, and a summary for
3743 tags, non-trivial parents, user, date and time, and a summary for
3744 each commit. When the -v/--verbose switch is used, the list of
3744 each commit. When the -v/--verbose switch is used, the list of
3745 changed files and full commit message are shown.
3745 changed files and full commit message are shown.
3746
3746
3747 .. note::
3747 .. note::
3748 log -p/--patch may generate unexpected diff output for merge
3748 log -p/--patch may generate unexpected diff output for merge
3749 changesets, as it will only compare the merge changeset against
3749 changesets, as it will only compare the merge changeset against
3750 its first parent. Also, only files different from BOTH parents
3750 its first parent. Also, only files different from BOTH parents
3751 will appear in files:.
3751 will appear in files:.
3752
3752
3753 .. note::
3753 .. note::
3754 for performance reasons, log FILE may omit duplicate changes
3754 for performance reasons, log FILE may omit duplicate changes
3755 made on branches and will not show deletions. To see all
3755 made on branches and will not show deletions. To see all
3756 changes including duplicates and deletions, use the --removed
3756 changes including duplicates and deletions, use the --removed
3757 switch.
3757 switch.
3758
3758
3759 .. container:: verbose
3759 .. container:: verbose
3760
3760
3761 Some examples:
3761 Some examples:
3762
3762
3763 - changesets with full descriptions and file lists::
3763 - changesets with full descriptions and file lists::
3764
3764
3765 hg log -v
3765 hg log -v
3766
3766
3767 - changesets ancestral to the working directory::
3767 - changesets ancestral to the working directory::
3768
3768
3769 hg log -f
3769 hg log -f
3770
3770
3771 - last 10 commits on the current branch::
3771 - last 10 commits on the current branch::
3772
3772
3773 hg log -l 10 -b .
3773 hg log -l 10 -b .
3774
3774
3775 - changesets showing all modifications of a file, including removals::
3775 - changesets showing all modifications of a file, including removals::
3776
3776
3777 hg log --removed file.c
3777 hg log --removed file.c
3778
3778
3779 - all changesets that touch a directory, with diffs, excluding merges::
3779 - all changesets that touch a directory, with diffs, excluding merges::
3780
3780
3781 hg log -Mp lib/
3781 hg log -Mp lib/
3782
3782
3783 - all revision numbers that match a keyword::
3783 - all revision numbers that match a keyword::
3784
3784
3785 hg log -k bug --template "{rev}\\n"
3785 hg log -k bug --template "{rev}\\n"
3786
3786
3787 - check if a given changeset is included is a tagged release::
3787 - check if a given changeset is included is a tagged release::
3788
3788
3789 hg log -r "a21ccf and ancestor(1.9)"
3789 hg log -r "a21ccf and ancestor(1.9)"
3790
3790
3791 - find all changesets by some user in a date range::
3791 - find all changesets by some user in a date range::
3792
3792
3793 hg log -k alice -d "may 2008 to jul 2008"
3793 hg log -k alice -d "may 2008 to jul 2008"
3794
3794
3795 - summary of all changesets after the last tag::
3795 - summary of all changesets after the last tag::
3796
3796
3797 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3797 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3798
3798
3799 See :hg:`help dates` for a list of formats valid for -d/--date.
3799 See :hg:`help dates` for a list of formats valid for -d/--date.
3800
3800
3801 See :hg:`help revisions` and :hg:`help revsets` for more about
3801 See :hg:`help revisions` and :hg:`help revsets` for more about
3802 specifying revisions.
3802 specifying revisions.
3803
3803
3804 Returns 0 on success.
3804 Returns 0 on success.
3805 """
3805 """
3806
3806
3807 matchfn = scmutil.match(repo[None], pats, opts)
3807 matchfn = scmutil.match(repo[None], pats, opts)
3808 limit = cmdutil.loglimit(opts)
3808 limit = cmdutil.loglimit(opts)
3809 count = 0
3809 count = 0
3810
3810
3811 endrev = None
3811 endrev = None
3812 if opts.get('copies') and opts.get('rev'):
3812 if opts.get('copies') and opts.get('rev'):
3813 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
3813 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
3814
3814
3815 df = False
3815 df = False
3816 if opts["date"]:
3816 if opts["date"]:
3817 df = util.matchdate(opts["date"])
3817 df = util.matchdate(opts["date"])
3818
3818
3819 branches = opts.get('branch', []) + opts.get('only_branch', [])
3819 branches = opts.get('branch', []) + opts.get('only_branch', [])
3820 opts['branch'] = [repo.lookupbranch(b) for b in branches]
3820 opts['branch'] = [repo.lookupbranch(b) for b in branches]
3821
3821
3822 displayer = cmdutil.show_changeset(ui, repo, opts, True)
3822 displayer = cmdutil.show_changeset(ui, repo, opts, True)
3823 def prep(ctx, fns):
3823 def prep(ctx, fns):
3824 rev = ctx.rev()
3824 rev = ctx.rev()
3825 parents = [p for p in repo.changelog.parentrevs(rev)
3825 parents = [p for p in repo.changelog.parentrevs(rev)
3826 if p != nullrev]
3826 if p != nullrev]
3827 if opts.get('no_merges') and len(parents) == 2:
3827 if opts.get('no_merges') and len(parents) == 2:
3828 return
3828 return
3829 if opts.get('only_merges') and len(parents) != 2:
3829 if opts.get('only_merges') and len(parents) != 2:
3830 return
3830 return
3831 if opts.get('branch') and ctx.branch() not in opts['branch']:
3831 if opts.get('branch') and ctx.branch() not in opts['branch']:
3832 return
3832 return
3833 if not opts.get('hidden') and ctx.hidden():
3833 if not opts.get('hidden') and ctx.hidden():
3834 return
3834 return
3835 if df and not df(ctx.date()[0]):
3835 if df and not df(ctx.date()[0]):
3836 return
3836 return
3837 if opts['user'] and not [k for k in opts['user']
3837 if opts['user'] and not [k for k in opts['user']
3838 if k.lower() in ctx.user().lower()]:
3838 if k.lower() in ctx.user().lower()]:
3839 return
3839 return
3840 if opts.get('keyword'):
3840 if opts.get('keyword'):
3841 for k in [kw.lower() for kw in opts['keyword']]:
3841 for k in [kw.lower() for kw in opts['keyword']]:
3842 if (k in ctx.user().lower() or
3842 if (k in ctx.user().lower() or
3843 k in ctx.description().lower() or
3843 k in ctx.description().lower() or
3844 k in " ".join(ctx.files()).lower()):
3844 k in " ".join(ctx.files()).lower()):
3845 break
3845 break
3846 else:
3846 else:
3847 return
3847 return
3848
3848
3849 copies = None
3849 copies = None
3850 if opts.get('copies') and rev:
3850 if opts.get('copies') and rev:
3851 copies = []
3851 copies = []
3852 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3852 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3853 for fn in ctx.files():
3853 for fn in ctx.files():
3854 rename = getrenamed(fn, rev)
3854 rename = getrenamed(fn, rev)
3855 if rename:
3855 if rename:
3856 copies.append((fn, rename[0]))
3856 copies.append((fn, rename[0]))
3857
3857
3858 revmatchfn = None
3858 revmatchfn = None
3859 if opts.get('patch') or opts.get('stat'):
3859 if opts.get('patch') or opts.get('stat'):
3860 if opts.get('follow') or opts.get('follow_first'):
3860 if opts.get('follow') or opts.get('follow_first'):
3861 # note: this might be wrong when following through merges
3861 # note: this might be wrong when following through merges
3862 revmatchfn = scmutil.match(repo[None], fns, default='path')
3862 revmatchfn = scmutil.match(repo[None], fns, default='path')
3863 else:
3863 else:
3864 revmatchfn = matchfn
3864 revmatchfn = matchfn
3865
3865
3866 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3866 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3867
3867
3868 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3868 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3869 if count == limit:
3869 if count == limit:
3870 break
3870 break
3871 if displayer.flush(ctx.rev()):
3871 if displayer.flush(ctx.rev()):
3872 count += 1
3872 count += 1
3873 displayer.close()
3873 displayer.close()
3874
3874
3875 @command('manifest',
3875 @command('manifest',
3876 [('r', 'rev', '', _('revision to display'), _('REV')),
3876 [('r', 'rev', '', _('revision to display'), _('REV')),
3877 ('', 'all', False, _("list files from all revisions"))],
3877 ('', 'all', False, _("list files from all revisions"))],
3878 _('[-r REV]'))
3878 _('[-r REV]'))
3879 def manifest(ui, repo, node=None, rev=None, **opts):
3879 def manifest(ui, repo, node=None, rev=None, **opts):
3880 """output the current or given revision of the project manifest
3880 """output the current or given revision of the project manifest
3881
3881
3882 Print a list of version controlled files for the given revision.
3882 Print a list of version controlled files for the given revision.
3883 If no revision is given, the first parent of the working directory
3883 If no revision is given, the first parent of the working directory
3884 is used, or the null revision if no revision is checked out.
3884 is used, or the null revision if no revision is checked out.
3885
3885
3886 With -v, print file permissions, symlink and executable bits.
3886 With -v, print file permissions, symlink and executable bits.
3887 With --debug, print file revision hashes.
3887 With --debug, print file revision hashes.
3888
3888
3889 If option --all is specified, the list of all files from all revisions
3889 If option --all is specified, the list of all files from all revisions
3890 is printed. This includes deleted and renamed files.
3890 is printed. This includes deleted and renamed files.
3891
3891
3892 Returns 0 on success.
3892 Returns 0 on success.
3893 """
3893 """
3894 if opts.get('all'):
3894 if opts.get('all'):
3895 if rev or node:
3895 if rev or node:
3896 raise util.Abort(_("can't specify a revision with --all"))
3896 raise util.Abort(_("can't specify a revision with --all"))
3897
3897
3898 res = []
3898 res = []
3899 prefix = "data/"
3899 prefix = "data/"
3900 suffix = ".i"
3900 suffix = ".i"
3901 plen = len(prefix)
3901 plen = len(prefix)
3902 slen = len(suffix)
3902 slen = len(suffix)
3903 lock = repo.lock()
3903 lock = repo.lock()
3904 try:
3904 try:
3905 for fn, b, size in repo.store.datafiles():
3905 for fn, b, size in repo.store.datafiles():
3906 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3906 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3907 res.append(fn[plen:-slen])
3907 res.append(fn[plen:-slen])
3908 finally:
3908 finally:
3909 lock.release()
3909 lock.release()
3910 for f in sorted(res):
3910 for f in sorted(res):
3911 ui.write("%s\n" % f)
3911 ui.write("%s\n" % f)
3912 return
3912 return
3913
3913
3914 if rev and node:
3914 if rev and node:
3915 raise util.Abort(_("please specify just one revision"))
3915 raise util.Abort(_("please specify just one revision"))
3916
3916
3917 if not node:
3917 if not node:
3918 node = rev
3918 node = rev
3919
3919
3920 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
3920 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
3921 ctx = scmutil.revsingle(repo, node)
3921 ctx = scmutil.revsingle(repo, node)
3922 for f in ctx:
3922 for f in ctx:
3923 if ui.debugflag:
3923 if ui.debugflag:
3924 ui.write("%40s " % hex(ctx.manifest()[f]))
3924 ui.write("%40s " % hex(ctx.manifest()[f]))
3925 if ui.verbose:
3925 if ui.verbose:
3926 ui.write(decor[ctx.flags(f)])
3926 ui.write(decor[ctx.flags(f)])
3927 ui.write("%s\n" % f)
3927 ui.write("%s\n" % f)
3928
3928
3929 @command('^merge',
3929 @command('^merge',
3930 [('f', 'force', None, _('force a merge with outstanding changes')),
3930 [('f', 'force', None, _('force a merge with outstanding changes')),
3931 ('r', 'rev', '', _('revision to merge'), _('REV')),
3931 ('r', 'rev', '', _('revision to merge'), _('REV')),
3932 ('P', 'preview', None,
3932 ('P', 'preview', None,
3933 _('review revisions to merge (no merge is performed)'))
3933 _('review revisions to merge (no merge is performed)'))
3934 ] + mergetoolopts,
3934 ] + mergetoolopts,
3935 _('[-P] [-f] [[-r] REV]'))
3935 _('[-P] [-f] [[-r] REV]'))
3936 def merge(ui, repo, node=None, **opts):
3936 def merge(ui, repo, node=None, **opts):
3937 """merge working directory with another revision
3937 """merge working directory with another revision
3938
3938
3939 The current working directory is updated with all changes made in
3939 The current working directory is updated with all changes made in
3940 the requested revision since the last common predecessor revision.
3940 the requested revision since the last common predecessor revision.
3941
3941
3942 Files that changed between either parent are marked as changed for
3942 Files that changed between either parent are marked as changed for
3943 the next commit and a commit must be performed before any further
3943 the next commit and a commit must be performed before any further
3944 updates to the repository are allowed. The next commit will have
3944 updates to the repository are allowed. The next commit will have
3945 two parents.
3945 two parents.
3946
3946
3947 ``--tool`` can be used to specify the merge tool used for file
3947 ``--tool`` can be used to specify the merge tool used for file
3948 merges. It overrides the HGMERGE environment variable and your
3948 merges. It overrides the HGMERGE environment variable and your
3949 configuration files. See :hg:`help merge-tools` for options.
3949 configuration files. See :hg:`help merge-tools` for options.
3950
3950
3951 If no revision is specified, the working directory's parent is a
3951 If no revision is specified, the working directory's parent is a
3952 head revision, and the current branch contains exactly one other
3952 head revision, and the current branch contains exactly one other
3953 head, the other head is merged with by default. Otherwise, an
3953 head, the other head is merged with by default. Otherwise, an
3954 explicit revision with which to merge with must be provided.
3954 explicit revision with which to merge with must be provided.
3955
3955
3956 :hg:`resolve` must be used to resolve unresolved files.
3956 :hg:`resolve` must be used to resolve unresolved files.
3957
3957
3958 To undo an uncommitted merge, use :hg:`update --clean .` which
3958 To undo an uncommitted merge, use :hg:`update --clean .` which
3959 will check out a clean copy of the original merge parent, losing
3959 will check out a clean copy of the original merge parent, losing
3960 all changes.
3960 all changes.
3961
3961
3962 Returns 0 on success, 1 if there are unresolved files.
3962 Returns 0 on success, 1 if there are unresolved files.
3963 """
3963 """
3964
3964
3965 if opts.get('rev') and node:
3965 if opts.get('rev') and node:
3966 raise util.Abort(_("please specify just one revision"))
3966 raise util.Abort(_("please specify just one revision"))
3967 if not node:
3967 if not node:
3968 node = opts.get('rev')
3968 node = opts.get('rev')
3969
3969
3970 if not node:
3970 if not node:
3971 branch = repo[None].branch()
3971 branch = repo[None].branch()
3972 bheads = repo.branchheads(branch)
3972 bheads = repo.branchheads(branch)
3973 if len(bheads) > 2:
3973 if len(bheads) > 2:
3974 raise util.Abort(_("branch '%s' has %d heads - "
3974 raise util.Abort(_("branch '%s' has %d heads - "
3975 "please merge with an explicit rev")
3975 "please merge with an explicit rev")
3976 % (branch, len(bheads)),
3976 % (branch, len(bheads)),
3977 hint=_("run 'hg heads .' to see heads"))
3977 hint=_("run 'hg heads .' to see heads"))
3978
3978
3979 parent = repo.dirstate.p1()
3979 parent = repo.dirstate.p1()
3980 if len(bheads) == 1:
3980 if len(bheads) == 1:
3981 if len(repo.heads()) > 1:
3981 if len(repo.heads()) > 1:
3982 raise util.Abort(_("branch '%s' has one head - "
3982 raise util.Abort(_("branch '%s' has one head - "
3983 "please merge with an explicit rev")
3983 "please merge with an explicit rev")
3984 % branch,
3984 % branch,
3985 hint=_("run 'hg heads' to see all heads"))
3985 hint=_("run 'hg heads' to see all heads"))
3986 msg = _('there is nothing to merge')
3986 msg = _('there is nothing to merge')
3987 if parent != repo.lookup(repo[None].branch()):
3987 if parent != repo.lookup(repo[None].branch()):
3988 msg = _('%s - use "hg update" instead') % msg
3988 msg = _('%s - use "hg update" instead') % msg
3989 raise util.Abort(msg)
3989 raise util.Abort(msg)
3990
3990
3991 if parent not in bheads:
3991 if parent not in bheads:
3992 raise util.Abort(_('working directory not at a head revision'),
3992 raise util.Abort(_('working directory not at a head revision'),
3993 hint=_("use 'hg update' or merge with an "
3993 hint=_("use 'hg update' or merge with an "
3994 "explicit revision"))
3994 "explicit revision"))
3995 node = parent == bheads[0] and bheads[-1] or bheads[0]
3995 node = parent == bheads[0] and bheads[-1] or bheads[0]
3996 else:
3996 else:
3997 node = scmutil.revsingle(repo, node).node()
3997 node = scmutil.revsingle(repo, node).node()
3998
3998
3999 if opts.get('preview'):
3999 if opts.get('preview'):
4000 # find nodes that are ancestors of p2 but not of p1
4000 # find nodes that are ancestors of p2 but not of p1
4001 p1 = repo.lookup('.')
4001 p1 = repo.lookup('.')
4002 p2 = repo.lookup(node)
4002 p2 = repo.lookup(node)
4003 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4003 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4004
4004
4005 displayer = cmdutil.show_changeset(ui, repo, opts)
4005 displayer = cmdutil.show_changeset(ui, repo, opts)
4006 for node in nodes:
4006 for node in nodes:
4007 displayer.show(repo[node])
4007 displayer.show(repo[node])
4008 displayer.close()
4008 displayer.close()
4009 return 0
4009 return 0
4010
4010
4011 try:
4011 try:
4012 # ui.forcemerge is an internal variable, do not document
4012 # ui.forcemerge is an internal variable, do not document
4013 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4013 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4014 return hg.merge(repo, node, force=opts.get('force'))
4014 return hg.merge(repo, node, force=opts.get('force'))
4015 finally:
4015 finally:
4016 ui.setconfig('ui', 'forcemerge', '')
4016 ui.setconfig('ui', 'forcemerge', '')
4017
4017
4018 @command('outgoing|out',
4018 @command('outgoing|out',
4019 [('f', 'force', None, _('run even when the destination is unrelated')),
4019 [('f', 'force', None, _('run even when the destination is unrelated')),
4020 ('r', 'rev', [],
4020 ('r', 'rev', [],
4021 _('a changeset intended to be included in the destination'), _('REV')),
4021 _('a changeset intended to be included in the destination'), _('REV')),
4022 ('n', 'newest-first', None, _('show newest record first')),
4022 ('n', 'newest-first', None, _('show newest record first')),
4023 ('B', 'bookmarks', False, _('compare bookmarks')),
4023 ('B', 'bookmarks', False, _('compare bookmarks')),
4024 ('b', 'branch', [], _('a specific branch you would like to push'),
4024 ('b', 'branch', [], _('a specific branch you would like to push'),
4025 _('BRANCH')),
4025 _('BRANCH')),
4026 ] + logopts + remoteopts + subrepoopts,
4026 ] + logopts + remoteopts + subrepoopts,
4027 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4027 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4028 def outgoing(ui, repo, dest=None, **opts):
4028 def outgoing(ui, repo, dest=None, **opts):
4029 """show changesets not found in the destination
4029 """show changesets not found in the destination
4030
4030
4031 Show changesets not found in the specified destination repository
4031 Show changesets not found in the specified destination repository
4032 or the default push location. These are the changesets that would
4032 or the default push location. These are the changesets that would
4033 be pushed if a push was requested.
4033 be pushed if a push was requested.
4034
4034
4035 See pull for details of valid destination formats.
4035 See pull for details of valid destination formats.
4036
4036
4037 Returns 0 if there are outgoing changes, 1 otherwise.
4037 Returns 0 if there are outgoing changes, 1 otherwise.
4038 """
4038 """
4039
4039
4040 if opts.get('bookmarks'):
4040 if opts.get('bookmarks'):
4041 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4041 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4042 dest, branches = hg.parseurl(dest, opts.get('branch'))
4042 dest, branches = hg.parseurl(dest, opts.get('branch'))
4043 other = hg.peer(repo, opts, dest)
4043 other = hg.peer(repo, opts, dest)
4044 if 'bookmarks' not in other.listkeys('namespaces'):
4044 if 'bookmarks' not in other.listkeys('namespaces'):
4045 ui.warn(_("remote doesn't support bookmarks\n"))
4045 ui.warn(_("remote doesn't support bookmarks\n"))
4046 return 0
4046 return 0
4047 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4047 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4048 return bookmarks.diff(ui, other, repo)
4048 return bookmarks.diff(ui, other, repo)
4049
4049
4050 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4050 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4051 try:
4051 try:
4052 return hg.outgoing(ui, repo, dest, opts)
4052 return hg.outgoing(ui, repo, dest, opts)
4053 finally:
4053 finally:
4054 del repo._subtoppath
4054 del repo._subtoppath
4055
4055
4056 @command('parents',
4056 @command('parents',
4057 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4057 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4058 ] + templateopts,
4058 ] + templateopts,
4059 _('[-r REV] [FILE]'))
4059 _('[-r REV] [FILE]'))
4060 def parents(ui, repo, file_=None, **opts):
4060 def parents(ui, repo, file_=None, **opts):
4061 """show the parents of the working directory or revision
4061 """show the parents of the working directory or revision
4062
4062
4063 Print the working directory's parent revisions. If a revision is
4063 Print the working directory's parent revisions. If a revision is
4064 given via -r/--rev, the parent of that revision will be printed.
4064 given via -r/--rev, the parent of that revision will be printed.
4065 If a file argument is given, the revision in which the file was
4065 If a file argument is given, the revision in which the file was
4066 last changed (before the working directory revision or the
4066 last changed (before the working directory revision or the
4067 argument to --rev if given) is printed.
4067 argument to --rev if given) is printed.
4068
4068
4069 Returns 0 on success.
4069 Returns 0 on success.
4070 """
4070 """
4071
4071
4072 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4072 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4073
4073
4074 if file_:
4074 if file_:
4075 m = scmutil.match(ctx, (file_,), opts)
4075 m = scmutil.match(ctx, (file_,), opts)
4076 if m.anypats() or len(m.files()) != 1:
4076 if m.anypats() or len(m.files()) != 1:
4077 raise util.Abort(_('can only specify an explicit filename'))
4077 raise util.Abort(_('can only specify an explicit filename'))
4078 file_ = m.files()[0]
4078 file_ = m.files()[0]
4079 filenodes = []
4079 filenodes = []
4080 for cp in ctx.parents():
4080 for cp in ctx.parents():
4081 if not cp:
4081 if not cp:
4082 continue
4082 continue
4083 try:
4083 try:
4084 filenodes.append(cp.filenode(file_))
4084 filenodes.append(cp.filenode(file_))
4085 except error.LookupError:
4085 except error.LookupError:
4086 pass
4086 pass
4087 if not filenodes:
4087 if not filenodes:
4088 raise util.Abort(_("'%s' not found in manifest!") % file_)
4088 raise util.Abort(_("'%s' not found in manifest!") % file_)
4089 fl = repo.file(file_)
4089 fl = repo.file(file_)
4090 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4090 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4091 else:
4091 else:
4092 p = [cp.node() for cp in ctx.parents()]
4092 p = [cp.node() for cp in ctx.parents()]
4093
4093
4094 displayer = cmdutil.show_changeset(ui, repo, opts)
4094 displayer = cmdutil.show_changeset(ui, repo, opts)
4095 for n in p:
4095 for n in p:
4096 if n != nullid:
4096 if n != nullid:
4097 displayer.show(repo[n])
4097 displayer.show(repo[n])
4098 displayer.close()
4098 displayer.close()
4099
4099
4100 @command('paths', [], _('[NAME]'))
4100 @command('paths', [], _('[NAME]'))
4101 def paths(ui, repo, search=None):
4101 def paths(ui, repo, search=None):
4102 """show aliases for remote repositories
4102 """show aliases for remote repositories
4103
4103
4104 Show definition of symbolic path name NAME. If no name is given,
4104 Show definition of symbolic path name NAME. If no name is given,
4105 show definition of all available names.
4105 show definition of all available names.
4106
4106
4107 Option -q/--quiet suppresses all output when searching for NAME
4107 Option -q/--quiet suppresses all output when searching for NAME
4108 and shows only the path names when listing all definitions.
4108 and shows only the path names when listing all definitions.
4109
4109
4110 Path names are defined in the [paths] section of your
4110 Path names are defined in the [paths] section of your
4111 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4111 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4112 repository, ``.hg/hgrc`` is used, too.
4112 repository, ``.hg/hgrc`` is used, too.
4113
4113
4114 The path names ``default`` and ``default-push`` have a special
4114 The path names ``default`` and ``default-push`` have a special
4115 meaning. When performing a push or pull operation, they are used
4115 meaning. When performing a push or pull operation, they are used
4116 as fallbacks if no location is specified on the command-line.
4116 as fallbacks if no location is specified on the command-line.
4117 When ``default-push`` is set, it will be used for push and
4117 When ``default-push`` is set, it will be used for push and
4118 ``default`` will be used for pull; otherwise ``default`` is used
4118 ``default`` will be used for pull; otherwise ``default`` is used
4119 as the fallback for both. When cloning a repository, the clone
4119 as the fallback for both. When cloning a repository, the clone
4120 source is written as ``default`` in ``.hg/hgrc``. Note that
4120 source is written as ``default`` in ``.hg/hgrc``. Note that
4121 ``default`` and ``default-push`` apply to all inbound (e.g.
4121 ``default`` and ``default-push`` apply to all inbound (e.g.
4122 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4122 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4123 :hg:`bundle`) operations.
4123 :hg:`bundle`) operations.
4124
4124
4125 See :hg:`help urls` for more information.
4125 See :hg:`help urls` for more information.
4126
4126
4127 Returns 0 on success.
4127 Returns 0 on success.
4128 """
4128 """
4129 if search:
4129 if search:
4130 for name, path in ui.configitems("paths"):
4130 for name, path in ui.configitems("paths"):
4131 if name == search:
4131 if name == search:
4132 ui.status("%s\n" % util.hidepassword(path))
4132 ui.status("%s\n" % util.hidepassword(path))
4133 return
4133 return
4134 if not ui.quiet:
4134 if not ui.quiet:
4135 ui.warn(_("not found!\n"))
4135 ui.warn(_("not found!\n"))
4136 return 1
4136 return 1
4137 else:
4137 else:
4138 for name, path in ui.configitems("paths"):
4138 for name, path in ui.configitems("paths"):
4139 if ui.quiet:
4139 if ui.quiet:
4140 ui.write("%s\n" % name)
4140 ui.write("%s\n" % name)
4141 else:
4141 else:
4142 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4142 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4143
4143
4144 def postincoming(ui, repo, modheads, optupdate, checkout):
4144 def postincoming(ui, repo, modheads, optupdate, checkout):
4145 if modheads == 0:
4145 if modheads == 0:
4146 return
4146 return
4147 if optupdate:
4147 if optupdate:
4148 try:
4148 try:
4149 return hg.update(repo, checkout)
4149 return hg.update(repo, checkout)
4150 except util.Abort, inst:
4150 except util.Abort, inst:
4151 ui.warn(_("not updating: %s\n" % str(inst)))
4151 ui.warn(_("not updating: %s\n" % str(inst)))
4152 return 0
4152 return 0
4153 if modheads > 1:
4153 if modheads > 1:
4154 currentbranchheads = len(repo.branchheads())
4154 currentbranchheads = len(repo.branchheads())
4155 if currentbranchheads == modheads:
4155 if currentbranchheads == modheads:
4156 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4156 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4157 elif currentbranchheads > 1:
4157 elif currentbranchheads > 1:
4158 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
4158 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
4159 else:
4159 else:
4160 ui.status(_("(run 'hg heads' to see heads)\n"))
4160 ui.status(_("(run 'hg heads' to see heads)\n"))
4161 else:
4161 else:
4162 ui.status(_("(run 'hg update' to get a working copy)\n"))
4162 ui.status(_("(run 'hg update' to get a working copy)\n"))
4163
4163
4164 @command('^pull',
4164 @command('^pull',
4165 [('u', 'update', None,
4165 [('u', 'update', None,
4166 _('update to new branch head if changesets were pulled')),
4166 _('update to new branch head if changesets were pulled')),
4167 ('f', 'force', None, _('run even when remote repository is unrelated')),
4167 ('f', 'force', None, _('run even when remote repository is unrelated')),
4168 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4168 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4169 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4169 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4170 ('b', 'branch', [], _('a specific branch you would like to pull'),
4170 ('b', 'branch', [], _('a specific branch you would like to pull'),
4171 _('BRANCH')),
4171 _('BRANCH')),
4172 ] + remoteopts,
4172 ] + remoteopts,
4173 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4173 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4174 def pull(ui, repo, source="default", **opts):
4174 def pull(ui, repo, source="default", **opts):
4175 """pull changes from the specified source
4175 """pull changes from the specified source
4176
4176
4177 Pull changes from a remote repository to a local one.
4177 Pull changes from a remote repository to a local one.
4178
4178
4179 This finds all changes from the repository at the specified path
4179 This finds all changes from the repository at the specified path
4180 or URL and adds them to a local repository (the current one unless
4180 or URL and adds them to a local repository (the current one unless
4181 -R is specified). By default, this does not update the copy of the
4181 -R is specified). By default, this does not update the copy of the
4182 project in the working directory.
4182 project in the working directory.
4183
4183
4184 Use :hg:`incoming` if you want to see what would have been added
4184 Use :hg:`incoming` if you want to see what would have been added
4185 by a pull at the time you issued this command. If you then decide
4185 by a pull at the time you issued this command. If you then decide
4186 to add those changes to the repository, you should use :hg:`pull
4186 to add those changes to the repository, you should use :hg:`pull
4187 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4187 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4188
4188
4189 If SOURCE is omitted, the 'default' path will be used.
4189 If SOURCE is omitted, the 'default' path will be used.
4190 See :hg:`help urls` for more information.
4190 See :hg:`help urls` for more information.
4191
4191
4192 Returns 0 on success, 1 if an update had unresolved files.
4192 Returns 0 on success, 1 if an update had unresolved files.
4193 """
4193 """
4194 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4194 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4195 other = hg.peer(repo, opts, source)
4195 other = hg.peer(repo, opts, source)
4196 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4196 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4197 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4197 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4198
4198
4199 if opts.get('bookmark'):
4199 if opts.get('bookmark'):
4200 if not revs:
4200 if not revs:
4201 revs = []
4201 revs = []
4202 rb = other.listkeys('bookmarks')
4202 rb = other.listkeys('bookmarks')
4203 for b in opts['bookmark']:
4203 for b in opts['bookmark']:
4204 if b not in rb:
4204 if b not in rb:
4205 raise util.Abort(_('remote bookmark %s not found!') % b)
4205 raise util.Abort(_('remote bookmark %s not found!') % b)
4206 revs.append(rb[b])
4206 revs.append(rb[b])
4207
4207
4208 if revs:
4208 if revs:
4209 try:
4209 try:
4210 revs = [other.lookup(rev) for rev in revs]
4210 revs = [other.lookup(rev) for rev in revs]
4211 except error.CapabilityError:
4211 except error.CapabilityError:
4212 err = _("other repository doesn't support revision lookup, "
4212 err = _("other repository doesn't support revision lookup, "
4213 "so a rev cannot be specified.")
4213 "so a rev cannot be specified.")
4214 raise util.Abort(err)
4214 raise util.Abort(err)
4215
4215
4216 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4216 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4217 bookmarks.updatefromremote(ui, repo, other)
4217 bookmarks.updatefromremote(ui, repo, other)
4218 if checkout:
4218 if checkout:
4219 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4219 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4220 repo._subtoppath = source
4220 repo._subtoppath = source
4221 try:
4221 try:
4222 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4222 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4223
4223
4224 finally:
4224 finally:
4225 del repo._subtoppath
4225 del repo._subtoppath
4226
4226
4227 # update specified bookmarks
4227 # update specified bookmarks
4228 if opts.get('bookmark'):
4228 if opts.get('bookmark'):
4229 for b in opts['bookmark']:
4229 for b in opts['bookmark']:
4230 # explicit pull overrides local bookmark if any
4230 # explicit pull overrides local bookmark if any
4231 ui.status(_("importing bookmark %s\n") % b)
4231 ui.status(_("importing bookmark %s\n") % b)
4232 repo._bookmarks[b] = repo[rb[b]].node()
4232 repo._bookmarks[b] = repo[rb[b]].node()
4233 bookmarks.write(repo)
4233 bookmarks.write(repo)
4234
4234
4235 return ret
4235 return ret
4236
4236
4237 @command('^push',
4237 @command('^push',
4238 [('f', 'force', None, _('force push')),
4238 [('f', 'force', None, _('force push')),
4239 ('r', 'rev', [],
4239 ('r', 'rev', [],
4240 _('a changeset intended to be included in the destination'),
4240 _('a changeset intended to be included in the destination'),
4241 _('REV')),
4241 _('REV')),
4242 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4242 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4243 ('b', 'branch', [],
4243 ('b', 'branch', [],
4244 _('a specific branch you would like to push'), _('BRANCH')),
4244 _('a specific branch you would like to push'), _('BRANCH')),
4245 ('', 'new-branch', False, _('allow pushing a new branch')),
4245 ('', 'new-branch', False, _('allow pushing a new branch')),
4246 ] + remoteopts,
4246 ] + remoteopts,
4247 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4247 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4248 def push(ui, repo, dest=None, **opts):
4248 def push(ui, repo, dest=None, **opts):
4249 """push changes to the specified destination
4249 """push changes to the specified destination
4250
4250
4251 Push changesets from the local repository to the specified
4251 Push changesets from the local repository to the specified
4252 destination.
4252 destination.
4253
4253
4254 This operation is symmetrical to pull: it is identical to a pull
4254 This operation is symmetrical to pull: it is identical to a pull
4255 in the destination repository from the current one.
4255 in the destination repository from the current one.
4256
4256
4257 By default, push will not allow creation of new heads at the
4257 By default, push will not allow creation of new heads at the
4258 destination, since multiple heads would make it unclear which head
4258 destination, since multiple heads would make it unclear which head
4259 to use. In this situation, it is recommended to pull and merge
4259 to use. In this situation, it is recommended to pull and merge
4260 before pushing.
4260 before pushing.
4261
4261
4262 Use --new-branch if you want to allow push to create a new named
4262 Use --new-branch if you want to allow push to create a new named
4263 branch that is not present at the destination. This allows you to
4263 branch that is not present at the destination. This allows you to
4264 only create a new branch without forcing other changes.
4264 only create a new branch without forcing other changes.
4265
4265
4266 Use -f/--force to override the default behavior and push all
4266 Use -f/--force to override the default behavior and push all
4267 changesets on all branches.
4267 changesets on all branches.
4268
4268
4269 If -r/--rev is used, the specified revision and all its ancestors
4269 If -r/--rev is used, the specified revision and all its ancestors
4270 will be pushed to the remote repository.
4270 will be pushed to the remote repository.
4271
4271
4272 Please see :hg:`help urls` for important details about ``ssh://``
4272 Please see :hg:`help urls` for important details about ``ssh://``
4273 URLs. If DESTINATION is omitted, a default path will be used.
4273 URLs. If DESTINATION is omitted, a default path will be used.
4274
4274
4275 Returns 0 if push was successful, 1 if nothing to push.
4275 Returns 0 if push was successful, 1 if nothing to push.
4276 """
4276 """
4277
4277
4278 if opts.get('bookmark'):
4278 if opts.get('bookmark'):
4279 for b in opts['bookmark']:
4279 for b in opts['bookmark']:
4280 # translate -B options to -r so changesets get pushed
4280 # translate -B options to -r so changesets get pushed
4281 if b in repo._bookmarks:
4281 if b in repo._bookmarks:
4282 opts.setdefault('rev', []).append(b)
4282 opts.setdefault('rev', []).append(b)
4283 else:
4283 else:
4284 # if we try to push a deleted bookmark, translate it to null
4284 # if we try to push a deleted bookmark, translate it to null
4285 # this lets simultaneous -r, -b options continue working
4285 # this lets simultaneous -r, -b options continue working
4286 opts.setdefault('rev', []).append("null")
4286 opts.setdefault('rev', []).append("null")
4287
4287
4288 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4288 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4289 dest, branches = hg.parseurl(dest, opts.get('branch'))
4289 dest, branches = hg.parseurl(dest, opts.get('branch'))
4290 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4290 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4291 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4291 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4292 other = hg.peer(repo, opts, dest)
4292 other = hg.peer(repo, opts, dest)
4293 if revs:
4293 if revs:
4294 revs = [repo.lookup(rev) for rev in revs]
4294 revs = [repo.lookup(rev) for rev in revs]
4295
4295
4296 repo._subtoppath = dest
4296 repo._subtoppath = dest
4297 try:
4297 try:
4298 # push subrepos depth-first for coherent ordering
4298 # push subrepos depth-first for coherent ordering
4299 c = repo['']
4299 c = repo['']
4300 subs = c.substate # only repos that are committed
4300 subs = c.substate # only repos that are committed
4301 for s in sorted(subs):
4301 for s in sorted(subs):
4302 if not c.sub(s).push(opts.get('force')):
4302 if not c.sub(s).push(opts.get('force')):
4303 return False
4303 return False
4304 finally:
4304 finally:
4305 del repo._subtoppath
4305 del repo._subtoppath
4306 result = repo.push(other, opts.get('force'), revs=revs,
4306 result = repo.push(other, opts.get('force'), revs=revs,
4307 newbranch=opts.get('new_branch'))
4307 newbranch=opts.get('new_branch'))
4308
4308
4309 result = (result == 0)
4309 result = (result == 0)
4310
4310
4311 if opts.get('bookmark'):
4311 if opts.get('bookmark'):
4312 rb = other.listkeys('bookmarks')
4312 rb = other.listkeys('bookmarks')
4313 for b in opts['bookmark']:
4313 for b in opts['bookmark']:
4314 # explicit push overrides remote bookmark if any
4314 # explicit push overrides remote bookmark if any
4315 if b in repo._bookmarks:
4315 if b in repo._bookmarks:
4316 ui.status(_("exporting bookmark %s\n") % b)
4316 ui.status(_("exporting bookmark %s\n") % b)
4317 new = repo[b].hex()
4317 new = repo[b].hex()
4318 elif b in rb:
4318 elif b in rb:
4319 ui.status(_("deleting remote bookmark %s\n") % b)
4319 ui.status(_("deleting remote bookmark %s\n") % b)
4320 new = '' # delete
4320 new = '' # delete
4321 else:
4321 else:
4322 ui.warn(_('bookmark %s does not exist on the local '
4322 ui.warn(_('bookmark %s does not exist on the local '
4323 'or remote repository!\n') % b)
4323 'or remote repository!\n') % b)
4324 return 2
4324 return 2
4325 old = rb.get(b, '')
4325 old = rb.get(b, '')
4326 r = other.pushkey('bookmarks', b, old, new)
4326 r = other.pushkey('bookmarks', b, old, new)
4327 if not r:
4327 if not r:
4328 ui.warn(_('updating bookmark %s failed!\n') % b)
4328 ui.warn(_('updating bookmark %s failed!\n') % b)
4329 if not result:
4329 if not result:
4330 result = 2
4330 result = 2
4331
4331
4332 return result
4332 return result
4333
4333
4334 @command('recover', [])
4334 @command('recover', [])
4335 def recover(ui, repo):
4335 def recover(ui, repo):
4336 """roll back an interrupted transaction
4336 """roll back an interrupted transaction
4337
4337
4338 Recover from an interrupted commit or pull.
4338 Recover from an interrupted commit or pull.
4339
4339
4340 This command tries to fix the repository status after an
4340 This command tries to fix the repository status after an
4341 interrupted operation. It should only be necessary when Mercurial
4341 interrupted operation. It should only be necessary when Mercurial
4342 suggests it.
4342 suggests it.
4343
4343
4344 Returns 0 if successful, 1 if nothing to recover or verify fails.
4344 Returns 0 if successful, 1 if nothing to recover or verify fails.
4345 """
4345 """
4346 if repo.recover():
4346 if repo.recover():
4347 return hg.verify(repo)
4347 return hg.verify(repo)
4348 return 1
4348 return 1
4349
4349
4350 @command('^remove|rm',
4350 @command('^remove|rm',
4351 [('A', 'after', None, _('record delete for missing files')),
4351 [('A', 'after', None, _('record delete for missing files')),
4352 ('f', 'force', None,
4352 ('f', 'force', None,
4353 _('remove (and delete) file even if added or modified')),
4353 _('remove (and delete) file even if added or modified')),
4354 ] + walkopts,
4354 ] + walkopts,
4355 _('[OPTION]... FILE...'))
4355 _('[OPTION]... FILE...'))
4356 def remove(ui, repo, *pats, **opts):
4356 def remove(ui, repo, *pats, **opts):
4357 """remove the specified files on the next commit
4357 """remove the specified files on the next commit
4358
4358
4359 Schedule the indicated files for removal from the current branch.
4359 Schedule the indicated files for removal from the current branch.
4360
4360
4361 This command schedules the files to be removed at the next commit.
4361 This command schedules the files to be removed at the next commit.
4362 To undo a remove before that, see :hg:`revert`. To undo added
4362 To undo a remove before that, see :hg:`revert`. To undo added
4363 files, see :hg:`forget`.
4363 files, see :hg:`forget`.
4364
4364
4365 .. container:: verbose
4365 .. container:: verbose
4366
4366
4367 -A/--after can be used to remove only files that have already
4367 -A/--after can be used to remove only files that have already
4368 been deleted, -f/--force can be used to force deletion, and -Af
4368 been deleted, -f/--force can be used to force deletion, and -Af
4369 can be used to remove files from the next revision without
4369 can be used to remove files from the next revision without
4370 deleting them from the working directory.
4370 deleting them from the working directory.
4371
4371
4372 The following table details the behavior of remove for different
4372 The following table details the behavior of remove for different
4373 file states (columns) and option combinations (rows). The file
4373 file states (columns) and option combinations (rows). The file
4374 states are Added [A], Clean [C], Modified [M] and Missing [!]
4374 states are Added [A], Clean [C], Modified [M] and Missing [!]
4375 (as reported by :hg:`status`). The actions are Warn, Remove
4375 (as reported by :hg:`status`). The actions are Warn, Remove
4376 (from branch) and Delete (from disk):
4376 (from branch) and Delete (from disk):
4377
4377
4378 ======= == == == ==
4378 ======= == == == ==
4379 A C M !
4379 A C M !
4380 ======= == == == ==
4380 ======= == == == ==
4381 none W RD W R
4381 none W RD W R
4382 -f R RD RD R
4382 -f R RD RD R
4383 -A W W W R
4383 -A W W W R
4384 -Af R R R R
4384 -Af R R R R
4385 ======= == == == ==
4385 ======= == == == ==
4386
4386
4387 Note that remove never deletes files in Added [A] state from the
4387 Note that remove never deletes files in Added [A] state from the
4388 working directory, not even if option --force is specified.
4388 working directory, not even if option --force is specified.
4389
4389
4390 Returns 0 on success, 1 if any warnings encountered.
4390 Returns 0 on success, 1 if any warnings encountered.
4391 """
4391 """
4392
4392
4393 ret = 0
4393 ret = 0
4394 after, force = opts.get('after'), opts.get('force')
4394 after, force = opts.get('after'), opts.get('force')
4395 if not pats and not after:
4395 if not pats and not after:
4396 raise util.Abort(_('no files specified'))
4396 raise util.Abort(_('no files specified'))
4397
4397
4398 m = scmutil.match(repo[None], pats, opts)
4398 m = scmutil.match(repo[None], pats, opts)
4399 s = repo.status(match=m, clean=True)
4399 s = repo.status(match=m, clean=True)
4400 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4400 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4401
4401
4402 for f in m.files():
4402 for f in m.files():
4403 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4403 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4404 if os.path.exists(m.rel(f)):
4404 if os.path.exists(m.rel(f)):
4405 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4405 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4406 ret = 1
4406 ret = 1
4407
4407
4408 if force:
4408 if force:
4409 list = modified + deleted + clean + added
4409 list = modified + deleted + clean + added
4410 elif after:
4410 elif after:
4411 list = deleted
4411 list = deleted
4412 for f in modified + added + clean:
4412 for f in modified + added + clean:
4413 ui.warn(_('not removing %s: file still exists (use -f'
4413 ui.warn(_('not removing %s: file still exists (use -f'
4414 ' to force removal)\n') % m.rel(f))
4414 ' to force removal)\n') % m.rel(f))
4415 ret = 1
4415 ret = 1
4416 else:
4416 else:
4417 list = deleted + clean
4417 list = deleted + clean
4418 for f in modified:
4418 for f in modified:
4419 ui.warn(_('not removing %s: file is modified (use -f'
4419 ui.warn(_('not removing %s: file is modified (use -f'
4420 ' to force removal)\n') % m.rel(f))
4420 ' to force removal)\n') % m.rel(f))
4421 ret = 1
4421 ret = 1
4422 for f in added:
4422 for f in added:
4423 ui.warn(_('not removing %s: file has been marked for add'
4423 ui.warn(_('not removing %s: file has been marked for add'
4424 ' (use forget to undo)\n') % m.rel(f))
4424 ' (use forget to undo)\n') % m.rel(f))
4425 ret = 1
4425 ret = 1
4426
4426
4427 for f in sorted(list):
4427 for f in sorted(list):
4428 if ui.verbose or not m.exact(f):
4428 if ui.verbose or not m.exact(f):
4429 ui.status(_('removing %s\n') % m.rel(f))
4429 ui.status(_('removing %s\n') % m.rel(f))
4430
4430
4431 wlock = repo.wlock()
4431 wlock = repo.wlock()
4432 try:
4432 try:
4433 if not after:
4433 if not after:
4434 for f in list:
4434 for f in list:
4435 if f in added:
4435 if f in added:
4436 continue # we never unlink added files on remove
4436 continue # we never unlink added files on remove
4437 try:
4437 try:
4438 util.unlinkpath(repo.wjoin(f))
4438 util.unlinkpath(repo.wjoin(f))
4439 except OSError, inst:
4439 except OSError, inst:
4440 if inst.errno != errno.ENOENT:
4440 if inst.errno != errno.ENOENT:
4441 raise
4441 raise
4442 repo[None].forget(list)
4442 repo[None].forget(list)
4443 finally:
4443 finally:
4444 wlock.release()
4444 wlock.release()
4445
4445
4446 return ret
4446 return ret
4447
4447
4448 @command('rename|move|mv',
4448 @command('rename|move|mv',
4449 [('A', 'after', None, _('record a rename that has already occurred')),
4449 [('A', 'after', None, _('record a rename that has already occurred')),
4450 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4450 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4451 ] + walkopts + dryrunopts,
4451 ] + walkopts + dryrunopts,
4452 _('[OPTION]... SOURCE... DEST'))
4452 _('[OPTION]... SOURCE... DEST'))
4453 def rename(ui, repo, *pats, **opts):
4453 def rename(ui, repo, *pats, **opts):
4454 """rename files; equivalent of copy + remove
4454 """rename files; equivalent of copy + remove
4455
4455
4456 Mark dest as copies of sources; mark sources for deletion. If dest
4456 Mark dest as copies of sources; mark sources for deletion. If dest
4457 is a directory, copies are put in that directory. If dest is a
4457 is a directory, copies are put in that directory. If dest is a
4458 file, there can only be one source.
4458 file, there can only be one source.
4459
4459
4460 By default, this command copies the contents of files as they
4460 By default, this command copies the contents of files as they
4461 exist in the working directory. If invoked with -A/--after, the
4461 exist in the working directory. If invoked with -A/--after, the
4462 operation is recorded, but no copying is performed.
4462 operation is recorded, but no copying is performed.
4463
4463
4464 This command takes effect at the next commit. To undo a rename
4464 This command takes effect at the next commit. To undo a rename
4465 before that, see :hg:`revert`.
4465 before that, see :hg:`revert`.
4466
4466
4467 Returns 0 on success, 1 if errors are encountered.
4467 Returns 0 on success, 1 if errors are encountered.
4468 """
4468 """
4469 wlock = repo.wlock(False)
4469 wlock = repo.wlock(False)
4470 try:
4470 try:
4471 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4471 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4472 finally:
4472 finally:
4473 wlock.release()
4473 wlock.release()
4474
4474
4475 @command('resolve',
4475 @command('resolve',
4476 [('a', 'all', None, _('select all unresolved files')),
4476 [('a', 'all', None, _('select all unresolved files')),
4477 ('l', 'list', None, _('list state of files needing merge')),
4477 ('l', 'list', None, _('list state of files needing merge')),
4478 ('m', 'mark', None, _('mark files as resolved')),
4478 ('m', 'mark', None, _('mark files as resolved')),
4479 ('u', 'unmark', None, _('mark files as unresolved')),
4479 ('u', 'unmark', None, _('mark files as unresolved')),
4480 ('n', 'no-status', None, _('hide status prefix'))]
4480 ('n', 'no-status', None, _('hide status prefix'))]
4481 + mergetoolopts + walkopts,
4481 + mergetoolopts + walkopts,
4482 _('[OPTION]... [FILE]...'))
4482 _('[OPTION]... [FILE]...'))
4483 def resolve(ui, repo, *pats, **opts):
4483 def resolve(ui, repo, *pats, **opts):
4484 """redo merges or set/view the merge status of files
4484 """redo merges or set/view the merge status of files
4485
4485
4486 Merges with unresolved conflicts are often the result of
4486 Merges with unresolved conflicts are often the result of
4487 non-interactive merging using the ``internal:merge`` configuration
4487 non-interactive merging using the ``internal:merge`` configuration
4488 setting, or a command-line merge tool like ``diff3``. The resolve
4488 setting, or a command-line merge tool like ``diff3``. The resolve
4489 command is used to manage the files involved in a merge, after
4489 command is used to manage the files involved in a merge, after
4490 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4490 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4491 working directory must have two parents).
4491 working directory must have two parents).
4492
4492
4493 The resolve command can be used in the following ways:
4493 The resolve command can be used in the following ways:
4494
4494
4495 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4495 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4496 files, discarding any previous merge attempts. Re-merging is not
4496 files, discarding any previous merge attempts. Re-merging is not
4497 performed for files already marked as resolved. Use ``--all/-a``
4497 performed for files already marked as resolved. Use ``--all/-a``
4498 to select all unresolved files. ``--tool`` can be used to specify
4498 to select all unresolved files. ``--tool`` can be used to specify
4499 the merge tool used for the given files. It overrides the HGMERGE
4499 the merge tool used for the given files. It overrides the HGMERGE
4500 environment variable and your configuration files. Previous file
4500 environment variable and your configuration files. Previous file
4501 contents are saved with a ``.orig`` suffix.
4501 contents are saved with a ``.orig`` suffix.
4502
4502
4503 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4503 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4504 (e.g. after having manually fixed-up the files). The default is
4504 (e.g. after having manually fixed-up the files). The default is
4505 to mark all unresolved files.
4505 to mark all unresolved files.
4506
4506
4507 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4507 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4508 default is to mark all resolved files.
4508 default is to mark all resolved files.
4509
4509
4510 - :hg:`resolve -l`: list files which had or still have conflicts.
4510 - :hg:`resolve -l`: list files which had or still have conflicts.
4511 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4511 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4512
4512
4513 Note that Mercurial will not let you commit files with unresolved
4513 Note that Mercurial will not let you commit files with unresolved
4514 merge conflicts. You must use :hg:`resolve -m ...` before you can
4514 merge conflicts. You must use :hg:`resolve -m ...` before you can
4515 commit after a conflicting merge.
4515 commit after a conflicting merge.
4516
4516
4517 Returns 0 on success, 1 if any files fail a resolve attempt.
4517 Returns 0 on success, 1 if any files fail a resolve attempt.
4518 """
4518 """
4519
4519
4520 all, mark, unmark, show, nostatus = \
4520 all, mark, unmark, show, nostatus = \
4521 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4521 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4522
4522
4523 if (show and (mark or unmark)) or (mark and unmark):
4523 if (show and (mark or unmark)) or (mark and unmark):
4524 raise util.Abort(_("too many options specified"))
4524 raise util.Abort(_("too many options specified"))
4525 if pats and all:
4525 if pats and all:
4526 raise util.Abort(_("can't specify --all and patterns"))
4526 raise util.Abort(_("can't specify --all and patterns"))
4527 if not (all or pats or show or mark or unmark):
4527 if not (all or pats or show or mark or unmark):
4528 raise util.Abort(_('no files or directories specified; '
4528 raise util.Abort(_('no files or directories specified; '
4529 'use --all to remerge all files'))
4529 'use --all to remerge all files'))
4530
4530
4531 ms = mergemod.mergestate(repo)
4531 ms = mergemod.mergestate(repo)
4532 m = scmutil.match(repo[None], pats, opts)
4532 m = scmutil.match(repo[None], pats, opts)
4533 ret = 0
4533 ret = 0
4534
4534
4535 for f in ms:
4535 for f in ms:
4536 if m(f):
4536 if m(f):
4537 if show:
4537 if show:
4538 if nostatus:
4538 if nostatus:
4539 ui.write("%s\n" % f)
4539 ui.write("%s\n" % f)
4540 else:
4540 else:
4541 ui.write("%s %s\n" % (ms[f].upper(), f),
4541 ui.write("%s %s\n" % (ms[f].upper(), f),
4542 label='resolve.' +
4542 label='resolve.' +
4543 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4543 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4544 elif mark:
4544 elif mark:
4545 ms.mark(f, "r")
4545 ms.mark(f, "r")
4546 elif unmark:
4546 elif unmark:
4547 ms.mark(f, "u")
4547 ms.mark(f, "u")
4548 else:
4548 else:
4549 wctx = repo[None]
4549 wctx = repo[None]
4550 mctx = wctx.parents()[-1]
4550 mctx = wctx.parents()[-1]
4551
4551
4552 # backup pre-resolve (merge uses .orig for its own purposes)
4552 # backup pre-resolve (merge uses .orig for its own purposes)
4553 a = repo.wjoin(f)
4553 a = repo.wjoin(f)
4554 util.copyfile(a, a + ".resolve")
4554 util.copyfile(a, a + ".resolve")
4555
4555
4556 try:
4556 try:
4557 # resolve file
4557 # resolve file
4558 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4558 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4559 if ms.resolve(f, wctx, mctx):
4559 if ms.resolve(f, wctx, mctx):
4560 ret = 1
4560 ret = 1
4561 finally:
4561 finally:
4562 ui.setconfig('ui', 'forcemerge', '')
4562 ui.setconfig('ui', 'forcemerge', '')
4563
4563
4564 # replace filemerge's .orig file with our resolve file
4564 # replace filemerge's .orig file with our resolve file
4565 util.rename(a + ".resolve", a + ".orig")
4565 util.rename(a + ".resolve", a + ".orig")
4566
4566
4567 ms.commit()
4567 ms.commit()
4568 return ret
4568 return ret
4569
4569
4570 @command('revert',
4570 @command('revert',
4571 [('a', 'all', None, _('revert all changes when no arguments given')),
4571 [('a', 'all', None, _('revert all changes when no arguments given')),
4572 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4572 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4573 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4573 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4574 ('C', 'no-backup', None, _('do not save backup copies of files')),
4574 ('C', 'no-backup', None, _('do not save backup copies of files')),
4575 ] + walkopts + dryrunopts,
4575 ] + walkopts + dryrunopts,
4576 _('[OPTION]... [-r REV] [NAME]...'))
4576 _('[OPTION]... [-r REV] [NAME]...'))
4577 def revert(ui, repo, *pats, **opts):
4577 def revert(ui, repo, *pats, **opts):
4578 """restore files to their checkout state
4578 """restore files to their checkout state
4579
4579
4580 .. note::
4580 .. note::
4581 To check out earlier revisions, you should use :hg:`update REV`.
4581 To check out earlier revisions, you should use :hg:`update REV`.
4582 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4582 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4583
4583
4584 With no revision specified, revert the specified files or directories
4584 With no revision specified, revert the specified files or directories
4585 to the contents they had in the parent of the working directory.
4585 to the contents they had in the parent of the working directory.
4586 This restores the contents of files to an unmodified
4586 This restores the contents of files to an unmodified
4587 state and unschedules adds, removes, copies, and renames. If the
4587 state and unschedules adds, removes, copies, and renames. If the
4588 working directory has two parents, you must explicitly specify a
4588 working directory has two parents, you must explicitly specify a
4589 revision.
4589 revision.
4590
4590
4591 Using the -r/--rev or -d/--date options, revert the given files or
4591 Using the -r/--rev or -d/--date options, revert the given files or
4592 directories to their states as of a specific revision. Because
4592 directories to their states as of a specific revision. Because
4593 revert does not change the working directory parents, this will
4593 revert does not change the working directory parents, this will
4594 cause these files to appear modified. This can be helpful to "back
4594 cause these files to appear modified. This can be helpful to "back
4595 out" some or all of an earlier change. See :hg:`backout` for a
4595 out" some or all of an earlier change. See :hg:`backout` for a
4596 related method.
4596 related method.
4597
4597
4598 Modified files are saved with a .orig suffix before reverting.
4598 Modified files are saved with a .orig suffix before reverting.
4599 To disable these backups, use --no-backup.
4599 To disable these backups, use --no-backup.
4600
4600
4601 See :hg:`help dates` for a list of formats valid for -d/--date.
4601 See :hg:`help dates` for a list of formats valid for -d/--date.
4602
4602
4603 Returns 0 on success.
4603 Returns 0 on success.
4604 """
4604 """
4605
4605
4606 if opts.get("date"):
4606 if opts.get("date"):
4607 if opts.get("rev"):
4607 if opts.get("rev"):
4608 raise util.Abort(_("you can't specify a revision and a date"))
4608 raise util.Abort(_("you can't specify a revision and a date"))
4609 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4609 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4610
4610
4611 parent, p2 = repo.dirstate.parents()
4611 parent, p2 = repo.dirstate.parents()
4612 if not opts.get('rev') and p2 != nullid:
4612 if not opts.get('rev') and p2 != nullid:
4613 # revert after merge is a trap for new users (issue2915)
4613 # revert after merge is a trap for new users (issue2915)
4614 raise util.Abort(_('uncommitted merge with no revision specified'),
4614 raise util.Abort(_('uncommitted merge with no revision specified'),
4615 hint=_('use "hg update" or see "hg help revert"'))
4615 hint=_('use "hg update" or see "hg help revert"'))
4616
4616
4617 ctx = scmutil.revsingle(repo, opts.get('rev'))
4617 ctx = scmutil.revsingle(repo, opts.get('rev'))
4618 node = ctx.node()
4618 node = ctx.node()
4619
4619
4620 if not pats and not opts.get('all'):
4620 if not pats and not opts.get('all'):
4621 msg = _("no files or directories specified")
4621 msg = _("no files or directories specified")
4622 if p2 != nullid:
4622 if p2 != nullid:
4623 hint = _("uncommitted merge, use --all to discard all changes,"
4623 hint = _("uncommitted merge, use --all to discard all changes,"
4624 " or 'hg update -C .' to abort the merge")
4624 " or 'hg update -C .' to abort the merge")
4625 raise util.Abort(msg, hint=hint)
4625 raise util.Abort(msg, hint=hint)
4626 dirty = util.any(repo.status())
4626 dirty = util.any(repo.status())
4627 if node != parent:
4627 if node != parent:
4628 if dirty:
4628 if dirty:
4629 hint = _("uncommitted changes, use --all to discard all"
4629 hint = _("uncommitted changes, use --all to discard all"
4630 " changes, or 'hg update %s' to update") % ctx.rev()
4630 " changes, or 'hg update %s' to update") % ctx.rev()
4631 else:
4631 else:
4632 hint = _("use --all to revert all files,"
4632 hint = _("use --all to revert all files,"
4633 " or 'hg update %s' to update") % ctx.rev()
4633 " or 'hg update %s' to update") % ctx.rev()
4634 elif dirty:
4634 elif dirty:
4635 hint = _("uncommitted changes, use --all to discard all changes")
4635 hint = _("uncommitted changes, use --all to discard all changes")
4636 else:
4636 else:
4637 hint = _("use --all to revert all files")
4637 hint = _("use --all to revert all files")
4638 raise util.Abort(msg, hint=hint)
4638 raise util.Abort(msg, hint=hint)
4639
4639
4640 mf = ctx.manifest()
4640 mf = ctx.manifest()
4641 if node == parent:
4641 if node == parent:
4642 pmf = mf
4642 pmf = mf
4643 else:
4643 else:
4644 pmf = None
4644 pmf = None
4645
4645
4646 # need all matching names in dirstate and manifest of target rev,
4646 # need all matching names in dirstate and manifest of target rev,
4647 # so have to walk both. do not print errors if files exist in one
4647 # so have to walk both. do not print errors if files exist in one
4648 # but not other.
4648 # but not other.
4649
4649
4650 names = {}
4650 names = {}
4651
4651
4652 wlock = repo.wlock()
4652 wlock = repo.wlock()
4653 try:
4653 try:
4654 # walk dirstate.
4654 # walk dirstate.
4655
4655
4656 m = scmutil.match(repo[None], pats, opts)
4656 m = scmutil.match(repo[None], pats, opts)
4657 m.bad = lambda x, y: False
4657 m.bad = lambda x, y: False
4658 for abs in repo.walk(m):
4658 for abs in repo.walk(m):
4659 names[abs] = m.rel(abs), m.exact(abs)
4659 names[abs] = m.rel(abs), m.exact(abs)
4660
4660
4661 # walk target manifest.
4661 # walk target manifest.
4662
4662
4663 def badfn(path, msg):
4663 def badfn(path, msg):
4664 if path in names:
4664 if path in names:
4665 return
4665 return
4666 if path in repo[node].substate:
4666 if path in repo[node].substate:
4667 ui.warn("%s: %s\n" % (m.rel(path),
4667 ui.warn("%s: %s\n" % (m.rel(path),
4668 'reverting subrepos is unsupported'))
4668 'reverting subrepos is unsupported'))
4669 return
4669 return
4670 path_ = path + '/'
4670 path_ = path + '/'
4671 for f in names:
4671 for f in names:
4672 if f.startswith(path_):
4672 if f.startswith(path_):
4673 return
4673 return
4674 ui.warn("%s: %s\n" % (m.rel(path), msg))
4674 ui.warn("%s: %s\n" % (m.rel(path), msg))
4675
4675
4676 m = scmutil.match(repo[node], pats, opts)
4676 m = scmutil.match(repo[node], pats, opts)
4677 m.bad = badfn
4677 m.bad = badfn
4678 for abs in repo[node].walk(m):
4678 for abs in repo[node].walk(m):
4679 if abs not in names:
4679 if abs not in names:
4680 names[abs] = m.rel(abs), m.exact(abs)
4680 names[abs] = m.rel(abs), m.exact(abs)
4681
4681
4682 m = scmutil.matchfiles(repo, names)
4682 m = scmutil.matchfiles(repo, names)
4683 changes = repo.status(match=m)[:4]
4683 changes = repo.status(match=m)[:4]
4684 modified, added, removed, deleted = map(set, changes)
4684 modified, added, removed, deleted = map(set, changes)
4685
4685
4686 # if f is a rename, also revert the source
4686 # if f is a rename, also revert the source
4687 cwd = repo.getcwd()
4687 cwd = repo.getcwd()
4688 for f in added:
4688 for f in added:
4689 src = repo.dirstate.copied(f)
4689 src = repo.dirstate.copied(f)
4690 if src and src not in names and repo.dirstate[src] == 'r':
4690 if src and src not in names and repo.dirstate[src] == 'r':
4691 removed.add(src)
4691 removed.add(src)
4692 names[src] = (repo.pathto(src, cwd), True)
4692 names[src] = (repo.pathto(src, cwd), True)
4693
4693
4694 def removeforget(abs):
4694 def removeforget(abs):
4695 if repo.dirstate[abs] == 'a':
4695 if repo.dirstate[abs] == 'a':
4696 return _('forgetting %s\n')
4696 return _('forgetting %s\n')
4697 return _('removing %s\n')
4697 return _('removing %s\n')
4698
4698
4699 revert = ([], _('reverting %s\n'))
4699 revert = ([], _('reverting %s\n'))
4700 add = ([], _('adding %s\n'))
4700 add = ([], _('adding %s\n'))
4701 remove = ([], removeforget)
4701 remove = ([], removeforget)
4702 undelete = ([], _('undeleting %s\n'))
4702 undelete = ([], _('undeleting %s\n'))
4703
4703
4704 disptable = (
4704 disptable = (
4705 # dispatch table:
4705 # dispatch table:
4706 # file state
4706 # file state
4707 # action if in target manifest
4707 # action if in target manifest
4708 # action if not in target manifest
4708 # action if not in target manifest
4709 # make backup if in target manifest
4709 # make backup if in target manifest
4710 # make backup if not in target manifest
4710 # make backup if not in target manifest
4711 (modified, revert, remove, True, True),
4711 (modified, revert, remove, True, True),
4712 (added, revert, remove, True, False),
4712 (added, revert, remove, True, False),
4713 (removed, undelete, None, False, False),
4713 (removed, undelete, None, False, False),
4714 (deleted, revert, remove, False, False),
4714 (deleted, revert, remove, False, False),
4715 )
4715 )
4716
4716
4717 for abs, (rel, exact) in sorted(names.items()):
4717 for abs, (rel, exact) in sorted(names.items()):
4718 mfentry = mf.get(abs)
4718 mfentry = mf.get(abs)
4719 target = repo.wjoin(abs)
4719 target = repo.wjoin(abs)
4720 def handle(xlist, dobackup):
4720 def handle(xlist, dobackup):
4721 xlist[0].append(abs)
4721 xlist[0].append(abs)
4722 if (dobackup and not opts.get('no_backup') and
4722 if (dobackup and not opts.get('no_backup') and
4723 os.path.lexists(target)):
4723 os.path.lexists(target)):
4724 bakname = "%s.orig" % rel
4724 bakname = "%s.orig" % rel
4725 ui.note(_('saving current version of %s as %s\n') %
4725 ui.note(_('saving current version of %s as %s\n') %
4726 (rel, bakname))
4726 (rel, bakname))
4727 if not opts.get('dry_run'):
4727 if not opts.get('dry_run'):
4728 util.rename(target, bakname)
4728 util.rename(target, bakname)
4729 if ui.verbose or not exact:
4729 if ui.verbose or not exact:
4730 msg = xlist[1]
4730 msg = xlist[1]
4731 if not isinstance(msg, basestring):
4731 if not isinstance(msg, basestring):
4732 msg = msg(abs)
4732 msg = msg(abs)
4733 ui.status(msg % rel)
4733 ui.status(msg % rel)
4734 for table, hitlist, misslist, backuphit, backupmiss in disptable:
4734 for table, hitlist, misslist, backuphit, backupmiss in disptable:
4735 if abs not in table:
4735 if abs not in table:
4736 continue
4736 continue
4737 # file has changed in dirstate
4737 # file has changed in dirstate
4738 if mfentry:
4738 if mfentry:
4739 handle(hitlist, backuphit)
4739 handle(hitlist, backuphit)
4740 elif misslist is not None:
4740 elif misslist is not None:
4741 handle(misslist, backupmiss)
4741 handle(misslist, backupmiss)
4742 break
4742 break
4743 else:
4743 else:
4744 if abs not in repo.dirstate:
4744 if abs not in repo.dirstate:
4745 if mfentry:
4745 if mfentry:
4746 handle(add, True)
4746 handle(add, True)
4747 elif exact:
4747 elif exact:
4748 ui.warn(_('file not managed: %s\n') % rel)
4748 ui.warn(_('file not managed: %s\n') % rel)
4749 continue
4749 continue
4750 # file has not changed in dirstate
4750 # file has not changed in dirstate
4751 if node == parent:
4751 if node == parent:
4752 if exact:
4752 if exact:
4753 ui.warn(_('no changes needed to %s\n') % rel)
4753 ui.warn(_('no changes needed to %s\n') % rel)
4754 continue
4754 continue
4755 if pmf is None:
4755 if pmf is None:
4756 # only need parent manifest in this unlikely case,
4756 # only need parent manifest in this unlikely case,
4757 # so do not read by default
4757 # so do not read by default
4758 pmf = repo[parent].manifest()
4758 pmf = repo[parent].manifest()
4759 if abs in pmf and mfentry:
4759 if abs in pmf and mfentry:
4760 # if version of file is same in parent and target
4760 # if version of file is same in parent and target
4761 # manifests, do nothing
4761 # manifests, do nothing
4762 if (pmf[abs] != mfentry or
4762 if (pmf[abs] != mfentry or
4763 pmf.flags(abs) != mf.flags(abs)):
4763 pmf.flags(abs) != mf.flags(abs)):
4764 handle(revert, False)
4764 handle(revert, False)
4765 else:
4765 else:
4766 handle(remove, False)
4766 handle(remove, False)
4767
4767
4768 if not opts.get('dry_run'):
4768 if not opts.get('dry_run'):
4769 def checkout(f):
4769 def checkout(f):
4770 fc = ctx[f]
4770 fc = ctx[f]
4771 repo.wwrite(f, fc.data(), fc.flags())
4771 repo.wwrite(f, fc.data(), fc.flags())
4772
4772
4773 audit_path = scmutil.pathauditor(repo.root)
4773 audit_path = scmutil.pathauditor(repo.root)
4774 for f in remove[0]:
4774 for f in remove[0]:
4775 if repo.dirstate[f] == 'a':
4775 if repo.dirstate[f] == 'a':
4776 repo.dirstate.drop(f)
4776 repo.dirstate.drop(f)
4777 continue
4777 continue
4778 audit_path(f)
4778 audit_path(f)
4779 try:
4779 try:
4780 util.unlinkpath(repo.wjoin(f))
4780 util.unlinkpath(repo.wjoin(f))
4781 except OSError:
4781 except OSError:
4782 pass
4782 pass
4783 repo.dirstate.remove(f)
4783 repo.dirstate.remove(f)
4784
4784
4785 normal = None
4785 normal = None
4786 if node == parent:
4786 if node == parent:
4787 # We're reverting to our parent. If possible, we'd like status
4787 # We're reverting to our parent. If possible, we'd like status
4788 # to report the file as clean. We have to use normallookup for
4788 # to report the file as clean. We have to use normallookup for
4789 # merges to avoid losing information about merged/dirty files.
4789 # merges to avoid losing information about merged/dirty files.
4790 if p2 != nullid:
4790 if p2 != nullid:
4791 normal = repo.dirstate.normallookup
4791 normal = repo.dirstate.normallookup
4792 else:
4792 else:
4793 normal = repo.dirstate.normal
4793 normal = repo.dirstate.normal
4794 for f in revert[0]:
4794 for f in revert[0]:
4795 checkout(f)
4795 checkout(f)
4796 if normal:
4796 if normal:
4797 normal(f)
4797 normal(f)
4798
4798
4799 for f in add[0]:
4799 for f in add[0]:
4800 checkout(f)
4800 checkout(f)
4801 repo.dirstate.add(f)
4801 repo.dirstate.add(f)
4802
4802
4803 normal = repo.dirstate.normallookup
4803 normal = repo.dirstate.normallookup
4804 if node == parent and p2 == nullid:
4804 if node == parent and p2 == nullid:
4805 normal = repo.dirstate.normal
4805 normal = repo.dirstate.normal
4806 for f in undelete[0]:
4806 for f in undelete[0]:
4807 checkout(f)
4807 checkout(f)
4808 normal(f)
4808 normal(f)
4809
4809
4810 finally:
4810 finally:
4811 wlock.release()
4811 wlock.release()
4812
4812
4813 @command('rollback', dryrunopts +
4813 @command('rollback', dryrunopts +
4814 [('f', 'force', False, _('ignore safety measures'))])
4814 [('f', 'force', False, _('ignore safety measures'))])
4815 def rollback(ui, repo, **opts):
4815 def rollback(ui, repo, **opts):
4816 """roll back the last transaction (dangerous)
4816 """roll back the last transaction (dangerous)
4817
4817
4818 This command should be used with care. There is only one level of
4818 This command should be used with care. There is only one level of
4819 rollback, and there is no way to undo a rollback. It will also
4819 rollback, and there is no way to undo a rollback. It will also
4820 restore the dirstate at the time of the last transaction, losing
4820 restore the dirstate at the time of the last transaction, losing
4821 any dirstate changes since that time. This command does not alter
4821 any dirstate changes since that time. This command does not alter
4822 the working directory.
4822 the working directory.
4823
4823
4824 Transactions are used to encapsulate the effects of all commands
4824 Transactions are used to encapsulate the effects of all commands
4825 that create new changesets or propagate existing changesets into a
4825 that create new changesets or propagate existing changesets into a
4826 repository. For example, the following commands are transactional,
4826 repository. For example, the following commands are transactional,
4827 and their effects can be rolled back:
4827 and their effects can be rolled back:
4828
4828
4829 - commit
4829 - commit
4830 - import
4830 - import
4831 - pull
4831 - pull
4832 - push (with this repository as the destination)
4832 - push (with this repository as the destination)
4833 - unbundle
4833 - unbundle
4834
4834
4835 It's possible to lose data with rollback: commit, update back to
4835 It's possible to lose data with rollback: commit, update back to
4836 an older changeset, and then rollback. The update removes the
4836 an older changeset, and then rollback. The update removes the
4837 changes you committed from the working directory, and rollback
4837 changes you committed from the working directory, and rollback
4838 removes them from history. To avoid data loss, you must pass
4838 removes them from history. To avoid data loss, you must pass
4839 --force in this case.
4839 --force in this case.
4840
4840
4841 This command is not intended for use on public repositories. Once
4841 This command is not intended for use on public repositories. Once
4842 changes are visible for pull by other users, rolling a transaction
4842 changes are visible for pull by other users, rolling a transaction
4843 back locally is ineffective (someone else may already have pulled
4843 back locally is ineffective (someone else may already have pulled
4844 the changes). Furthermore, a race is possible with readers of the
4844 the changes). Furthermore, a race is possible with readers of the
4845 repository; for example an in-progress pull from the repository
4845 repository; for example an in-progress pull from the repository
4846 may fail if a rollback is performed.
4846 may fail if a rollback is performed.
4847
4847
4848 Returns 0 on success, 1 if no rollback data is available.
4848 Returns 0 on success, 1 if no rollback data is available.
4849 """
4849 """
4850 return repo.rollback(dryrun=opts.get('dry_run'),
4850 return repo.rollback(dryrun=opts.get('dry_run'),
4851 force=opts.get('force'))
4851 force=opts.get('force'))
4852
4852
4853 @command('root', [])
4853 @command('root', [])
4854 def root(ui, repo):
4854 def root(ui, repo):
4855 """print the root (top) of the current working directory
4855 """print the root (top) of the current working directory
4856
4856
4857 Print the root directory of the current repository.
4857 Print the root directory of the current repository.
4858
4858
4859 Returns 0 on success.
4859 Returns 0 on success.
4860 """
4860 """
4861 ui.write(repo.root + "\n")
4861 ui.write(repo.root + "\n")
4862
4862
4863 @command('^serve',
4863 @command('^serve',
4864 [('A', 'accesslog', '', _('name of access log file to write to'),
4864 [('A', 'accesslog', '', _('name of access log file to write to'),
4865 _('FILE')),
4865 _('FILE')),
4866 ('d', 'daemon', None, _('run server in background')),
4866 ('d', 'daemon', None, _('run server in background')),
4867 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4867 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4868 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4868 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4869 # use string type, then we can check if something was passed
4869 # use string type, then we can check if something was passed
4870 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4870 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4871 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4871 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4872 _('ADDR')),
4872 _('ADDR')),
4873 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4873 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4874 _('PREFIX')),
4874 _('PREFIX')),
4875 ('n', 'name', '',
4875 ('n', 'name', '',
4876 _('name to show in web pages (default: working directory)'), _('NAME')),
4876 _('name to show in web pages (default: working directory)'), _('NAME')),
4877 ('', 'web-conf', '',
4877 ('', 'web-conf', '',
4878 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4878 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4879 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4879 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4880 _('FILE')),
4880 _('FILE')),
4881 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4881 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4882 ('', 'stdio', None, _('for remote clients')),
4882 ('', 'stdio', None, _('for remote clients')),
4883 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
4883 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
4884 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4884 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4885 ('', 'style', '', _('template style to use'), _('STYLE')),
4885 ('', 'style', '', _('template style to use'), _('STYLE')),
4886 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4886 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4887 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
4887 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
4888 _('[OPTION]...'))
4888 _('[OPTION]...'))
4889 def serve(ui, repo, **opts):
4889 def serve(ui, repo, **opts):
4890 """start stand-alone webserver
4890 """start stand-alone webserver
4891
4891
4892 Start a local HTTP repository browser and pull server. You can use
4892 Start a local HTTP repository browser and pull server. You can use
4893 this for ad-hoc sharing and browsing of repositories. It is
4893 this for ad-hoc sharing and browsing of repositories. It is
4894 recommended to use a real web server to serve a repository for
4894 recommended to use a real web server to serve a repository for
4895 longer periods of time.
4895 longer periods of time.
4896
4896
4897 Please note that the server does not implement access control.
4897 Please note that the server does not implement access control.
4898 This means that, by default, anybody can read from the server and
4898 This means that, by default, anybody can read from the server and
4899 nobody can write to it by default. Set the ``web.allow_push``
4899 nobody can write to it by default. Set the ``web.allow_push``
4900 option to ``*`` to allow everybody to push to the server. You
4900 option to ``*`` to allow everybody to push to the server. You
4901 should use a real web server if you need to authenticate users.
4901 should use a real web server if you need to authenticate users.
4902
4902
4903 By default, the server logs accesses to stdout and errors to
4903 By default, the server logs accesses to stdout and errors to
4904 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4904 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4905 files.
4905 files.
4906
4906
4907 To have the server choose a free port number to listen on, specify
4907 To have the server choose a free port number to listen on, specify
4908 a port number of 0; in this case, the server will print the port
4908 a port number of 0; in this case, the server will print the port
4909 number it uses.
4909 number it uses.
4910
4910
4911 Returns 0 on success.
4911 Returns 0 on success.
4912 """
4912 """
4913
4913
4914 if opts["stdio"] and opts["cmdserver"]:
4914 if opts["stdio"] and opts["cmdserver"]:
4915 raise util.Abort(_("cannot use --stdio with --cmdserver"))
4915 raise util.Abort(_("cannot use --stdio with --cmdserver"))
4916
4916
4917 def checkrepo():
4917 def checkrepo():
4918 if repo is None:
4918 if repo is None:
4919 raise error.RepoError(_("There is no Mercurial repository here"
4919 raise error.RepoError(_("There is no Mercurial repository here"
4920 " (.hg not found)"))
4920 " (.hg not found)"))
4921
4921
4922 if opts["stdio"]:
4922 if opts["stdio"]:
4923 checkrepo()
4923 checkrepo()
4924 s = sshserver.sshserver(ui, repo)
4924 s = sshserver.sshserver(ui, repo)
4925 s.serve_forever()
4925 s.serve_forever()
4926
4926
4927 if opts["cmdserver"]:
4927 if opts["cmdserver"]:
4928 checkrepo()
4928 checkrepo()
4929 s = commandserver.server(ui, repo, opts["cmdserver"])
4929 s = commandserver.server(ui, repo, opts["cmdserver"])
4930 return s.serve()
4930 return s.serve()
4931
4931
4932 # this way we can check if something was given in the command-line
4932 # this way we can check if something was given in the command-line
4933 if opts.get('port'):
4933 if opts.get('port'):
4934 opts['port'] = util.getport(opts.get('port'))
4934 opts['port'] = util.getport(opts.get('port'))
4935
4935
4936 baseui = repo and repo.baseui or ui
4936 baseui = repo and repo.baseui or ui
4937 optlist = ("name templates style address port prefix ipv6"
4937 optlist = ("name templates style address port prefix ipv6"
4938 " accesslog errorlog certificate encoding")
4938 " accesslog errorlog certificate encoding")
4939 for o in optlist.split():
4939 for o in optlist.split():
4940 val = opts.get(o, '')
4940 val = opts.get(o, '')
4941 if val in (None, ''): # should check against default options instead
4941 if val in (None, ''): # should check against default options instead
4942 continue
4942 continue
4943 baseui.setconfig("web", o, val)
4943 baseui.setconfig("web", o, val)
4944 if repo and repo.ui != baseui:
4944 if repo and repo.ui != baseui:
4945 repo.ui.setconfig("web", o, val)
4945 repo.ui.setconfig("web", o, val)
4946
4946
4947 o = opts.get('web_conf') or opts.get('webdir_conf')
4947 o = opts.get('web_conf') or opts.get('webdir_conf')
4948 if not o:
4948 if not o:
4949 if not repo:
4949 if not repo:
4950 raise error.RepoError(_("There is no Mercurial repository"
4950 raise error.RepoError(_("There is no Mercurial repository"
4951 " here (.hg not found)"))
4951 " here (.hg not found)"))
4952 o = repo.root
4952 o = repo.root
4953
4953
4954 app = hgweb.hgweb(o, baseui=ui)
4954 app = hgweb.hgweb(o, baseui=ui)
4955
4955
4956 class service(object):
4956 class service(object):
4957 def init(self):
4957 def init(self):
4958 util.setsignalhandler()
4958 util.setsignalhandler()
4959 self.httpd = hgweb.server.create_server(ui, app)
4959 self.httpd = hgweb.server.create_server(ui, app)
4960
4960
4961 if opts['port'] and not ui.verbose:
4961 if opts['port'] and not ui.verbose:
4962 return
4962 return
4963
4963
4964 if self.httpd.prefix:
4964 if self.httpd.prefix:
4965 prefix = self.httpd.prefix.strip('/') + '/'
4965 prefix = self.httpd.prefix.strip('/') + '/'
4966 else:
4966 else:
4967 prefix = ''
4967 prefix = ''
4968
4968
4969 port = ':%d' % self.httpd.port
4969 port = ':%d' % self.httpd.port
4970 if port == ':80':
4970 if port == ':80':
4971 port = ''
4971 port = ''
4972
4972
4973 bindaddr = self.httpd.addr
4973 bindaddr = self.httpd.addr
4974 if bindaddr == '0.0.0.0':
4974 if bindaddr == '0.0.0.0':
4975 bindaddr = '*'
4975 bindaddr = '*'
4976 elif ':' in bindaddr: # IPv6
4976 elif ':' in bindaddr: # IPv6
4977 bindaddr = '[%s]' % bindaddr
4977 bindaddr = '[%s]' % bindaddr
4978
4978
4979 fqaddr = self.httpd.fqaddr
4979 fqaddr = self.httpd.fqaddr
4980 if ':' in fqaddr:
4980 if ':' in fqaddr:
4981 fqaddr = '[%s]' % fqaddr
4981 fqaddr = '[%s]' % fqaddr
4982 if opts['port']:
4982 if opts['port']:
4983 write = ui.status
4983 write = ui.status
4984 else:
4984 else:
4985 write = ui.write
4985 write = ui.write
4986 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
4986 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
4987 (fqaddr, port, prefix, bindaddr, self.httpd.port))
4987 (fqaddr, port, prefix, bindaddr, self.httpd.port))
4988
4988
4989 def run(self):
4989 def run(self):
4990 self.httpd.serve_forever()
4990 self.httpd.serve_forever()
4991
4991
4992 service = service()
4992 service = service()
4993
4993
4994 cmdutil.service(opts, initfn=service.init, runfn=service.run)
4994 cmdutil.service(opts, initfn=service.init, runfn=service.run)
4995
4995
4996 @command('showconfig|debugconfig',
4996 @command('showconfig|debugconfig',
4997 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4997 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4998 _('[-u] [NAME]...'))
4998 _('[-u] [NAME]...'))
4999 def showconfig(ui, repo, *values, **opts):
4999 def showconfig(ui, repo, *values, **opts):
5000 """show combined config settings from all hgrc files
5000 """show combined config settings from all hgrc files
5001
5001
5002 With no arguments, print names and values of all config items.
5002 With no arguments, print names and values of all config items.
5003
5003
5004 With one argument of the form section.name, print just the value
5004 With one argument of the form section.name, print just the value
5005 of that config item.
5005 of that config item.
5006
5006
5007 With multiple arguments, print names and values of all config
5007 With multiple arguments, print names and values of all config
5008 items with matching section names.
5008 items with matching section names.
5009
5009
5010 With --debug, the source (filename and line number) is printed
5010 With --debug, the source (filename and line number) is printed
5011 for each config item.
5011 for each config item.
5012
5012
5013 Returns 0 on success.
5013 Returns 0 on success.
5014 """
5014 """
5015
5015
5016 for f in scmutil.rcpath():
5016 for f in scmutil.rcpath():
5017 ui.debug('read config from: %s\n' % f)
5017 ui.debug('read config from: %s\n' % f)
5018 untrusted = bool(opts.get('untrusted'))
5018 untrusted = bool(opts.get('untrusted'))
5019 if values:
5019 if values:
5020 sections = [v for v in values if '.' not in v]
5020 sections = [v for v in values if '.' not in v]
5021 items = [v for v in values if '.' in v]
5021 items = [v for v in values if '.' in v]
5022 if len(items) > 1 or items and sections:
5022 if len(items) > 1 or items and sections:
5023 raise util.Abort(_('only one config item permitted'))
5023 raise util.Abort(_('only one config item permitted'))
5024 for section, name, value in ui.walkconfig(untrusted=untrusted):
5024 for section, name, value in ui.walkconfig(untrusted=untrusted):
5025 value = str(value).replace('\n', '\\n')
5025 value = str(value).replace('\n', '\\n')
5026 sectname = section + '.' + name
5026 sectname = section + '.' + name
5027 if values:
5027 if values:
5028 for v in values:
5028 for v in values:
5029 if v == section:
5029 if v == section:
5030 ui.debug('%s: ' %
5030 ui.debug('%s: ' %
5031 ui.configsource(section, name, untrusted))
5031 ui.configsource(section, name, untrusted))
5032 ui.write('%s=%s\n' % (sectname, value))
5032 ui.write('%s=%s\n' % (sectname, value))
5033 elif v == sectname:
5033 elif v == sectname:
5034 ui.debug('%s: ' %
5034 ui.debug('%s: ' %
5035 ui.configsource(section, name, untrusted))
5035 ui.configsource(section, name, untrusted))
5036 ui.write(value, '\n')
5036 ui.write(value, '\n')
5037 else:
5037 else:
5038 ui.debug('%s: ' %
5038 ui.debug('%s: ' %
5039 ui.configsource(section, name, untrusted))
5039 ui.configsource(section, name, untrusted))
5040 ui.write('%s=%s\n' % (sectname, value))
5040 ui.write('%s=%s\n' % (sectname, value))
5041
5041
5042 @command('^status|st',
5042 @command('^status|st',
5043 [('A', 'all', None, _('show status of all files')),
5043 [('A', 'all', None, _('show status of all files')),
5044 ('m', 'modified', None, _('show only modified files')),
5044 ('m', 'modified', None, _('show only modified files')),
5045 ('a', 'added', None, _('show only added files')),
5045 ('a', 'added', None, _('show only added files')),
5046 ('r', 'removed', None, _('show only removed files')),
5046 ('r', 'removed', None, _('show only removed files')),
5047 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5047 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5048 ('c', 'clean', None, _('show only files without changes')),
5048 ('c', 'clean', None, _('show only files without changes')),
5049 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5049 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5050 ('i', 'ignored', None, _('show only ignored files')),
5050 ('i', 'ignored', None, _('show only ignored files')),
5051 ('n', 'no-status', None, _('hide status prefix')),
5051 ('n', 'no-status', None, _('hide status prefix')),
5052 ('C', 'copies', None, _('show source of copied files')),
5052 ('C', 'copies', None, _('show source of copied files')),
5053 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5053 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5054 ('', 'rev', [], _('show difference from revision'), _('REV')),
5054 ('', 'rev', [], _('show difference from revision'), _('REV')),
5055 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5055 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5056 ] + walkopts + subrepoopts,
5056 ] + walkopts + subrepoopts,
5057 _('[OPTION]... [FILE]...'))
5057 _('[OPTION]... [FILE]...'))
5058 def status(ui, repo, *pats, **opts):
5058 def status(ui, repo, *pats, **opts):
5059 """show changed files in the working directory
5059 """show changed files in the working directory
5060
5060
5061 Show status of files in the repository. If names are given, only
5061 Show status of files in the repository. If names are given, only
5062 files that match are shown. Files that are clean or ignored or
5062 files that match are shown. Files that are clean or ignored or
5063 the source of a copy/move operation, are not listed unless
5063 the source of a copy/move operation, are not listed unless
5064 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5064 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5065 Unless options described with "show only ..." are given, the
5065 Unless options described with "show only ..." are given, the
5066 options -mardu are used.
5066 options -mardu are used.
5067
5067
5068 Option -q/--quiet hides untracked (unknown and ignored) files
5068 Option -q/--quiet hides untracked (unknown and ignored) files
5069 unless explicitly requested with -u/--unknown or -i/--ignored.
5069 unless explicitly requested with -u/--unknown or -i/--ignored.
5070
5070
5071 .. note::
5071 .. note::
5072 status may appear to disagree with diff if permissions have
5072 status may appear to disagree with diff if permissions have
5073 changed or a merge has occurred. The standard diff format does
5073 changed or a merge has occurred. The standard diff format does
5074 not report permission changes and diff only reports changes
5074 not report permission changes and diff only reports changes
5075 relative to one merge parent.
5075 relative to one merge parent.
5076
5076
5077 If one revision is given, it is used as the base revision.
5077 If one revision is given, it is used as the base revision.
5078 If two revisions are given, the differences between them are
5078 If two revisions are given, the differences between them are
5079 shown. The --change option can also be used as a shortcut to list
5079 shown. The --change option can also be used as a shortcut to list
5080 the changed files of a revision from its first parent.
5080 the changed files of a revision from its first parent.
5081
5081
5082 The codes used to show the status of files are::
5082 The codes used to show the status of files are::
5083
5083
5084 M = modified
5084 M = modified
5085 A = added
5085 A = added
5086 R = removed
5086 R = removed
5087 C = clean
5087 C = clean
5088 ! = missing (deleted by non-hg command, but still tracked)
5088 ! = missing (deleted by non-hg command, but still tracked)
5089 ? = not tracked
5089 ? = not tracked
5090 I = ignored
5090 I = ignored
5091 = origin of the previous file listed as A (added)
5091 = origin of the previous file listed as A (added)
5092
5092
5093 .. container:: verbose
5093 .. container:: verbose
5094
5094
5095 Examples:
5095 Examples:
5096
5096
5097 - show changes in the working directory relative to a changeset:
5097 - show changes in the working directory relative to a changeset:
5098
5098
5099 hg status --rev 9353
5099 hg status --rev 9353
5100
5100
5101 - show all changes including copies in an existing changeset::
5101 - show all changes including copies in an existing changeset::
5102
5102
5103 hg status --copies --change 9353
5103 hg status --copies --change 9353
5104
5104
5105 - get a NUL separated list of added files, suitable for xargs::
5105 - get a NUL separated list of added files, suitable for xargs::
5106
5106
5107 hg status -an0
5107 hg status -an0
5108
5108
5109 Returns 0 on success.
5109 Returns 0 on success.
5110 """
5110 """
5111
5111
5112 revs = opts.get('rev')
5112 revs = opts.get('rev')
5113 change = opts.get('change')
5113 change = opts.get('change')
5114
5114
5115 if revs and change:
5115 if revs and change:
5116 msg = _('cannot specify --rev and --change at the same time')
5116 msg = _('cannot specify --rev and --change at the same time')
5117 raise util.Abort(msg)
5117 raise util.Abort(msg)
5118 elif change:
5118 elif change:
5119 node2 = repo.lookup(change)
5119 node2 = repo.lookup(change)
5120 node1 = repo[node2].p1().node()
5120 node1 = repo[node2].p1().node()
5121 else:
5121 else:
5122 node1, node2 = scmutil.revpair(repo, revs)
5122 node1, node2 = scmutil.revpair(repo, revs)
5123
5123
5124 cwd = (pats and repo.getcwd()) or ''
5124 cwd = (pats and repo.getcwd()) or ''
5125 end = opts.get('print0') and '\0' or '\n'
5125 end = opts.get('print0') and '\0' or '\n'
5126 copy = {}
5126 copy = {}
5127 states = 'modified added removed deleted unknown ignored clean'.split()
5127 states = 'modified added removed deleted unknown ignored clean'.split()
5128 show = [k for k in states if opts.get(k)]
5128 show = [k for k in states if opts.get(k)]
5129 if opts.get('all'):
5129 if opts.get('all'):
5130 show += ui.quiet and (states[:4] + ['clean']) or states
5130 show += ui.quiet and (states[:4] + ['clean']) or states
5131 if not show:
5131 if not show:
5132 show = ui.quiet and states[:4] or states[:5]
5132 show = ui.quiet and states[:4] or states[:5]
5133
5133
5134 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5134 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5135 'ignored' in show, 'clean' in show, 'unknown' in show,
5135 'ignored' in show, 'clean' in show, 'unknown' in show,
5136 opts.get('subrepos'))
5136 opts.get('subrepos'))
5137 changestates = zip(states, 'MAR!?IC', stat)
5137 changestates = zip(states, 'MAR!?IC', stat)
5138
5138
5139 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5139 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5140 ctxn = repo[nullid]
5140 ctxn = repo[nullid]
5141 ctx1 = repo[node1]
5141 ctx1 = repo[node1]
5142 ctx2 = repo[node2]
5142 ctx2 = repo[node2]
5143 added = stat[1]
5143 added = stat[1]
5144 if node2 is None:
5144 if node2 is None:
5145 added = stat[0] + stat[1] # merged?
5145 added = stat[0] + stat[1] # merged?
5146
5146
5147 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
5147 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
5148 if k in added:
5148 if k in added:
5149 copy[k] = v
5149 copy[k] = v
5150 elif v in added:
5150 elif v in added:
5151 copy[v] = k
5151 copy[v] = k
5152
5152
5153 for state, char, files in changestates:
5153 for state, char, files in changestates:
5154 if state in show:
5154 if state in show:
5155 format = "%s %%s%s" % (char, end)
5155 format = "%s %%s%s" % (char, end)
5156 if opts.get('no_status'):
5156 if opts.get('no_status'):
5157 format = "%%s%s" % end
5157 format = "%%s%s" % end
5158
5158
5159 for f in files:
5159 for f in files:
5160 ui.write(format % repo.pathto(f, cwd),
5160 ui.write(format % repo.pathto(f, cwd),
5161 label='status.' + state)
5161 label='status.' + state)
5162 if f in copy:
5162 if f in copy:
5163 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
5163 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
5164 label='status.copied')
5164 label='status.copied')
5165
5165
5166 @command('^summary|sum',
5166 @command('^summary|sum',
5167 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5167 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5168 def summary(ui, repo, **opts):
5168 def summary(ui, repo, **opts):
5169 """summarize working directory state
5169 """summarize working directory state
5170
5170
5171 This generates a brief summary of the working directory state,
5171 This generates a brief summary of the working directory state,
5172 including parents, branch, commit status, and available updates.
5172 including parents, branch, commit status, and available updates.
5173
5173
5174 With the --remote option, this will check the default paths for
5174 With the --remote option, this will check the default paths for
5175 incoming and outgoing changes. This can be time-consuming.
5175 incoming and outgoing changes. This can be time-consuming.
5176
5176
5177 Returns 0 on success.
5177 Returns 0 on success.
5178 """
5178 """
5179
5179
5180 ctx = repo[None]
5180 ctx = repo[None]
5181 parents = ctx.parents()
5181 parents = ctx.parents()
5182 pnode = parents[0].node()
5182 pnode = parents[0].node()
5183 marks = []
5183 marks = []
5184
5184
5185 for p in parents:
5185 for p in parents:
5186 # label with log.changeset (instead of log.parent) since this
5186 # label with log.changeset (instead of log.parent) since this
5187 # shows a working directory parent *changeset*:
5187 # shows a working directory parent *changeset*:
5188 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5188 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5189 label='log.changeset')
5189 label='log.changeset')
5190 ui.write(' '.join(p.tags()), label='log.tag')
5190 ui.write(' '.join(p.tags()), label='log.tag')
5191 if p.bookmarks():
5191 if p.bookmarks():
5192 marks.extend(p.bookmarks())
5192 marks.extend(p.bookmarks())
5193 if p.rev() == -1:
5193 if p.rev() == -1:
5194 if not len(repo):
5194 if not len(repo):
5195 ui.write(_(' (empty repository)'))
5195 ui.write(_(' (empty repository)'))
5196 else:
5196 else:
5197 ui.write(_(' (no revision checked out)'))
5197 ui.write(_(' (no revision checked out)'))
5198 ui.write('\n')
5198 ui.write('\n')
5199 if p.description():
5199 if p.description():
5200 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5200 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5201 label='log.summary')
5201 label='log.summary')
5202
5202
5203 branch = ctx.branch()
5203 branch = ctx.branch()
5204 bheads = repo.branchheads(branch)
5204 bheads = repo.branchheads(branch)
5205 m = _('branch: %s\n') % branch
5205 m = _('branch: %s\n') % branch
5206 if branch != 'default':
5206 if branch != 'default':
5207 ui.write(m, label='log.branch')
5207 ui.write(m, label='log.branch')
5208 else:
5208 else:
5209 ui.status(m, label='log.branch')
5209 ui.status(m, label='log.branch')
5210
5210
5211 if marks:
5211 if marks:
5212 current = repo._bookmarkcurrent
5212 current = repo._bookmarkcurrent
5213 ui.write(_('bookmarks:'), label='log.bookmark')
5213 ui.write(_('bookmarks:'), label='log.bookmark')
5214 if current is not None:
5214 if current is not None:
5215 try:
5215 try:
5216 marks.remove(current)
5216 marks.remove(current)
5217 ui.write(' *' + current, label='bookmarks.current')
5217 ui.write(' *' + current, label='bookmarks.current')
5218 except ValueError:
5218 except ValueError:
5219 # current bookmark not in parent ctx marks
5219 # current bookmark not in parent ctx marks
5220 pass
5220 pass
5221 for m in marks:
5221 for m in marks:
5222 ui.write(' ' + m, label='log.bookmark')
5222 ui.write(' ' + m, label='log.bookmark')
5223 ui.write('\n', label='log.bookmark')
5223 ui.write('\n', label='log.bookmark')
5224
5224
5225 st = list(repo.status(unknown=True))[:6]
5225 st = list(repo.status(unknown=True))[:6]
5226
5226
5227 c = repo.dirstate.copies()
5227 c = repo.dirstate.copies()
5228 copied, renamed = [], []
5228 copied, renamed = [], []
5229 for d, s in c.iteritems():
5229 for d, s in c.iteritems():
5230 if s in st[2]:
5230 if s in st[2]:
5231 st[2].remove(s)
5231 st[2].remove(s)
5232 renamed.append(d)
5232 renamed.append(d)
5233 else:
5233 else:
5234 copied.append(d)
5234 copied.append(d)
5235 if d in st[1]:
5235 if d in st[1]:
5236 st[1].remove(d)
5236 st[1].remove(d)
5237 st.insert(3, renamed)
5237 st.insert(3, renamed)
5238 st.insert(4, copied)
5238 st.insert(4, copied)
5239
5239
5240 ms = mergemod.mergestate(repo)
5240 ms = mergemod.mergestate(repo)
5241 st.append([f for f in ms if ms[f] == 'u'])
5241 st.append([f for f in ms if ms[f] == 'u'])
5242
5242
5243 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5243 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5244 st.append(subs)
5244 st.append(subs)
5245
5245
5246 labels = [ui.label(_('%d modified'), 'status.modified'),
5246 labels = [ui.label(_('%d modified'), 'status.modified'),
5247 ui.label(_('%d added'), 'status.added'),
5247 ui.label(_('%d added'), 'status.added'),
5248 ui.label(_('%d removed'), 'status.removed'),
5248 ui.label(_('%d removed'), 'status.removed'),
5249 ui.label(_('%d renamed'), 'status.copied'),
5249 ui.label(_('%d renamed'), 'status.copied'),
5250 ui.label(_('%d copied'), 'status.copied'),
5250 ui.label(_('%d copied'), 'status.copied'),
5251 ui.label(_('%d deleted'), 'status.deleted'),
5251 ui.label(_('%d deleted'), 'status.deleted'),
5252 ui.label(_('%d unknown'), 'status.unknown'),
5252 ui.label(_('%d unknown'), 'status.unknown'),
5253 ui.label(_('%d ignored'), 'status.ignored'),
5253 ui.label(_('%d ignored'), 'status.ignored'),
5254 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5254 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5255 ui.label(_('%d subrepos'), 'status.modified')]
5255 ui.label(_('%d subrepos'), 'status.modified')]
5256 t = []
5256 t = []
5257 for s, l in zip(st, labels):
5257 for s, l in zip(st, labels):
5258 if s:
5258 if s:
5259 t.append(l % len(s))
5259 t.append(l % len(s))
5260
5260
5261 t = ', '.join(t)
5261 t = ', '.join(t)
5262 cleanworkdir = False
5262 cleanworkdir = False
5263
5263
5264 if len(parents) > 1:
5264 if len(parents) > 1:
5265 t += _(' (merge)')
5265 t += _(' (merge)')
5266 elif branch != parents[0].branch():
5266 elif branch != parents[0].branch():
5267 t += _(' (new branch)')
5267 t += _(' (new branch)')
5268 elif (parents[0].extra().get('close') and
5268 elif (parents[0].extra().get('close') and
5269 pnode in repo.branchheads(branch, closed=True)):
5269 pnode in repo.branchheads(branch, closed=True)):
5270 t += _(' (head closed)')
5270 t += _(' (head closed)')
5271 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5271 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5272 t += _(' (clean)')
5272 t += _(' (clean)')
5273 cleanworkdir = True
5273 cleanworkdir = True
5274 elif pnode not in bheads:
5274 elif pnode not in bheads:
5275 t += _(' (new branch head)')
5275 t += _(' (new branch head)')
5276
5276
5277 if cleanworkdir:
5277 if cleanworkdir:
5278 ui.status(_('commit: %s\n') % t.strip())
5278 ui.status(_('commit: %s\n') % t.strip())
5279 else:
5279 else:
5280 ui.write(_('commit: %s\n') % t.strip())
5280 ui.write(_('commit: %s\n') % t.strip())
5281
5281
5282 # all ancestors of branch heads - all ancestors of parent = new csets
5282 # all ancestors of branch heads - all ancestors of parent = new csets
5283 new = [0] * len(repo)
5283 new = [0] * len(repo)
5284 cl = repo.changelog
5284 cl = repo.changelog
5285 for a in [cl.rev(n) for n in bheads]:
5285 for a in [cl.rev(n) for n in bheads]:
5286 new[a] = 1
5286 new[a] = 1
5287 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5287 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5288 new[a] = 1
5288 new[a] = 1
5289 for a in [p.rev() for p in parents]:
5289 for a in [p.rev() for p in parents]:
5290 if a >= 0:
5290 if a >= 0:
5291 new[a] = 0
5291 new[a] = 0
5292 for a in cl.ancestors(*[p.rev() for p in parents]):
5292 for a in cl.ancestors(*[p.rev() for p in parents]):
5293 new[a] = 0
5293 new[a] = 0
5294 new = sum(new)
5294 new = sum(new)
5295
5295
5296 if new == 0:
5296 if new == 0:
5297 ui.status(_('update: (current)\n'))
5297 ui.status(_('update: (current)\n'))
5298 elif pnode not in bheads:
5298 elif pnode not in bheads:
5299 ui.write(_('update: %d new changesets (update)\n') % new)
5299 ui.write(_('update: %d new changesets (update)\n') % new)
5300 else:
5300 else:
5301 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5301 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5302 (new, len(bheads)))
5302 (new, len(bheads)))
5303
5303
5304 if opts.get('remote'):
5304 if opts.get('remote'):
5305 t = []
5305 t = []
5306 source, branches = hg.parseurl(ui.expandpath('default'))
5306 source, branches = hg.parseurl(ui.expandpath('default'))
5307 other = hg.peer(repo, {}, source)
5307 other = hg.peer(repo, {}, source)
5308 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
5308 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
5309 ui.debug('comparing with %s\n' % util.hidepassword(source))
5309 ui.debug('comparing with %s\n' % util.hidepassword(source))
5310 repo.ui.pushbuffer()
5310 repo.ui.pushbuffer()
5311 commoninc = discovery.findcommonincoming(repo, other)
5311 commoninc = discovery.findcommonincoming(repo, other)
5312 _common, incoming, _rheads = commoninc
5312 _common, incoming, _rheads = commoninc
5313 repo.ui.popbuffer()
5313 repo.ui.popbuffer()
5314 if incoming:
5314 if incoming:
5315 t.append(_('1 or more incoming'))
5315 t.append(_('1 or more incoming'))
5316
5316
5317 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5317 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5318 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5318 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5319 if source != dest:
5319 if source != dest:
5320 other = hg.peer(repo, {}, dest)
5320 other = hg.peer(repo, {}, dest)
5321 commoninc = None
5321 commoninc = None
5322 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5322 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5323 repo.ui.pushbuffer()
5323 repo.ui.pushbuffer()
5324 common, outheads = discovery.findcommonoutgoing(repo, other,
5324 common, outheads = discovery.findcommonoutgoing(repo, other,
5325 commoninc=commoninc)
5325 commoninc=commoninc)
5326 repo.ui.popbuffer()
5326 repo.ui.popbuffer()
5327 o = repo.changelog.findmissing(common=common, heads=outheads)
5327 o = repo.changelog.findmissing(common=common, heads=outheads)
5328 if o:
5328 if o:
5329 t.append(_('%d outgoing') % len(o))
5329 t.append(_('%d outgoing') % len(o))
5330 if 'bookmarks' in other.listkeys('namespaces'):
5330 if 'bookmarks' in other.listkeys('namespaces'):
5331 lmarks = repo.listkeys('bookmarks')
5331 lmarks = repo.listkeys('bookmarks')
5332 rmarks = other.listkeys('bookmarks')
5332 rmarks = other.listkeys('bookmarks')
5333 diff = set(rmarks) - set(lmarks)
5333 diff = set(rmarks) - set(lmarks)
5334 if len(diff) > 0:
5334 if len(diff) > 0:
5335 t.append(_('%d incoming bookmarks') % len(diff))
5335 t.append(_('%d incoming bookmarks') % len(diff))
5336 diff = set(lmarks) - set(rmarks)
5336 diff = set(lmarks) - set(rmarks)
5337 if len(diff) > 0:
5337 if len(diff) > 0:
5338 t.append(_('%d outgoing bookmarks') % len(diff))
5338 t.append(_('%d outgoing bookmarks') % len(diff))
5339
5339
5340 if t:
5340 if t:
5341 ui.write(_('remote: %s\n') % (', '.join(t)))
5341 ui.write(_('remote: %s\n') % (', '.join(t)))
5342 else:
5342 else:
5343 ui.status(_('remote: (synced)\n'))
5343 ui.status(_('remote: (synced)\n'))
5344
5344
5345 @command('tag',
5345 @command('tag',
5346 [('f', 'force', None, _('force tag')),
5346 [('f', 'force', None, _('force tag')),
5347 ('l', 'local', None, _('make the tag local')),
5347 ('l', 'local', None, _('make the tag local')),
5348 ('r', 'rev', '', _('revision to tag'), _('REV')),
5348 ('r', 'rev', '', _('revision to tag'), _('REV')),
5349 ('', 'remove', None, _('remove a tag')),
5349 ('', 'remove', None, _('remove a tag')),
5350 # -l/--local is already there, commitopts cannot be used
5350 # -l/--local is already there, commitopts cannot be used
5351 ('e', 'edit', None, _('edit commit message')),
5351 ('e', 'edit', None, _('edit commit message')),
5352 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5352 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5353 ] + commitopts2,
5353 ] + commitopts2,
5354 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5354 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5355 def tag(ui, repo, name1, *names, **opts):
5355 def tag(ui, repo, name1, *names, **opts):
5356 """add one or more tags for the current or given revision
5356 """add one or more tags for the current or given revision
5357
5357
5358 Name a particular revision using <name>.
5358 Name a particular revision using <name>.
5359
5359
5360 Tags are used to name particular revisions of the repository and are
5360 Tags are used to name particular revisions of the repository and are
5361 very useful to compare different revisions, to go back to significant
5361 very useful to compare different revisions, to go back to significant
5362 earlier versions or to mark branch points as releases, etc. Changing
5362 earlier versions or to mark branch points as releases, etc. Changing
5363 an existing tag is normally disallowed; use -f/--force to override.
5363 an existing tag is normally disallowed; use -f/--force to override.
5364
5364
5365 If no revision is given, the parent of the working directory is
5365 If no revision is given, the parent of the working directory is
5366 used, or tip if no revision is checked out.
5366 used, or tip if no revision is checked out.
5367
5367
5368 To facilitate version control, distribution, and merging of tags,
5368 To facilitate version control, distribution, and merging of tags,
5369 they are stored as a file named ".hgtags" which is managed similarly
5369 they are stored as a file named ".hgtags" which is managed similarly
5370 to other project files and can be hand-edited if necessary. This
5370 to other project files and can be hand-edited if necessary. This
5371 also means that tagging creates a new commit. The file
5371 also means that tagging creates a new commit. The file
5372 ".hg/localtags" is used for local tags (not shared among
5372 ".hg/localtags" is used for local tags (not shared among
5373 repositories).
5373 repositories).
5374
5374
5375 Tag commits are usually made at the head of a branch. If the parent
5375 Tag commits are usually made at the head of a branch. If the parent
5376 of the working directory is not a branch head, :hg:`tag` aborts; use
5376 of the working directory is not a branch head, :hg:`tag` aborts; use
5377 -f/--force to force the tag commit to be based on a non-head
5377 -f/--force to force the tag commit to be based on a non-head
5378 changeset.
5378 changeset.
5379
5379
5380 See :hg:`help dates` for a list of formats valid for -d/--date.
5380 See :hg:`help dates` for a list of formats valid for -d/--date.
5381
5381
5382 Since tag names have priority over branch names during revision
5382 Since tag names have priority over branch names during revision
5383 lookup, using an existing branch name as a tag name is discouraged.
5383 lookup, using an existing branch name as a tag name is discouraged.
5384
5384
5385 Returns 0 on success.
5385 Returns 0 on success.
5386 """
5386 """
5387
5387
5388 rev_ = "."
5388 rev_ = "."
5389 names = [t.strip() for t in (name1,) + names]
5389 names = [t.strip() for t in (name1,) + names]
5390 if len(names) != len(set(names)):
5390 if len(names) != len(set(names)):
5391 raise util.Abort(_('tag names must be unique'))
5391 raise util.Abort(_('tag names must be unique'))
5392 for n in names:
5392 for n in names:
5393 if n in ['tip', '.', 'null']:
5393 if n in ['tip', '.', 'null']:
5394 raise util.Abort(_("the name '%s' is reserved") % n)
5394 raise util.Abort(_("the name '%s' is reserved") % n)
5395 if not n:
5395 if not n:
5396 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
5396 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
5397 if opts.get('rev') and opts.get('remove'):
5397 if opts.get('rev') and opts.get('remove'):
5398 raise util.Abort(_("--rev and --remove are incompatible"))
5398 raise util.Abort(_("--rev and --remove are incompatible"))
5399 if opts.get('rev'):
5399 if opts.get('rev'):
5400 rev_ = opts['rev']
5400 rev_ = opts['rev']
5401 message = opts.get('message')
5401 message = opts.get('message')
5402 if opts.get('remove'):
5402 if opts.get('remove'):
5403 expectedtype = opts.get('local') and 'local' or 'global'
5403 expectedtype = opts.get('local') and 'local' or 'global'
5404 for n in names:
5404 for n in names:
5405 if not repo.tagtype(n):
5405 if not repo.tagtype(n):
5406 raise util.Abort(_("tag '%s' does not exist") % n)
5406 raise util.Abort(_("tag '%s' does not exist") % n)
5407 if repo.tagtype(n) != expectedtype:
5407 if repo.tagtype(n) != expectedtype:
5408 if expectedtype == 'global':
5408 if expectedtype == 'global':
5409 raise util.Abort(_("tag '%s' is not a global tag") % n)
5409 raise util.Abort(_("tag '%s' is not a global tag") % n)
5410 else:
5410 else:
5411 raise util.Abort(_("tag '%s' is not a local tag") % n)
5411 raise util.Abort(_("tag '%s' is not a local tag") % n)
5412 rev_ = nullid
5412 rev_ = nullid
5413 if not message:
5413 if not message:
5414 # we don't translate commit messages
5414 # we don't translate commit messages
5415 message = 'Removed tag %s' % ', '.join(names)
5415 message = 'Removed tag %s' % ', '.join(names)
5416 elif not opts.get('force'):
5416 elif not opts.get('force'):
5417 for n in names:
5417 for n in names:
5418 if n in repo.tags():
5418 if n in repo.tags():
5419 raise util.Abort(_("tag '%s' already exists "
5419 raise util.Abort(_("tag '%s' already exists "
5420 "(use -f to force)") % n)
5420 "(use -f to force)") % n)
5421 if not opts.get('local'):
5421 if not opts.get('local'):
5422 p1, p2 = repo.dirstate.parents()
5422 p1, p2 = repo.dirstate.parents()
5423 if p2 != nullid:
5423 if p2 != nullid:
5424 raise util.Abort(_('uncommitted merge'))
5424 raise util.Abort(_('uncommitted merge'))
5425 bheads = repo.branchheads()
5425 bheads = repo.branchheads()
5426 if not opts.get('force') and bheads and p1 not in bheads:
5426 if not opts.get('force') and bheads and p1 not in bheads:
5427 raise util.Abort(_('not at a branch head (use -f to force)'))
5427 raise util.Abort(_('not at a branch head (use -f to force)'))
5428 r = scmutil.revsingle(repo, rev_).node()
5428 r = scmutil.revsingle(repo, rev_).node()
5429
5429
5430 if not message:
5430 if not message:
5431 # we don't translate commit messages
5431 # we don't translate commit messages
5432 message = ('Added tag %s for changeset %s' %
5432 message = ('Added tag %s for changeset %s' %
5433 (', '.join(names), short(r)))
5433 (', '.join(names), short(r)))
5434
5434
5435 date = opts.get('date')
5435 date = opts.get('date')
5436 if date:
5436 if date:
5437 date = util.parsedate(date)
5437 date = util.parsedate(date)
5438
5438
5439 if opts.get('edit'):
5439 if opts.get('edit'):
5440 message = ui.edit(message, ui.username())
5440 message = ui.edit(message, ui.username())
5441
5441
5442 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5442 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5443
5443
5444 @command('tags', [], '')
5444 @command('tags', [], '')
5445 def tags(ui, repo):
5445 def tags(ui, repo):
5446 """list repository tags
5446 """list repository tags
5447
5447
5448 This lists both regular and local tags. When the -v/--verbose
5448 This lists both regular and local tags. When the -v/--verbose
5449 switch is used, a third column "local" is printed for local tags.
5449 switch is used, a third column "local" is printed for local tags.
5450
5450
5451 Returns 0 on success.
5451 Returns 0 on success.
5452 """
5452 """
5453
5453
5454 hexfunc = ui.debugflag and hex or short
5454 hexfunc = ui.debugflag and hex or short
5455 tagtype = ""
5455 tagtype = ""
5456
5456
5457 for t, n in reversed(repo.tagslist()):
5457 for t, n in reversed(repo.tagslist()):
5458 if ui.quiet:
5458 if ui.quiet:
5459 ui.write("%s\n" % t, label='tags.normal')
5459 ui.write("%s\n" % t, label='tags.normal')
5460 continue
5460 continue
5461
5461
5462 hn = hexfunc(n)
5462 hn = hexfunc(n)
5463 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5463 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5464 rev = ui.label(r, 'log.changeset')
5464 rev = ui.label(r, 'log.changeset')
5465 spaces = " " * (30 - encoding.colwidth(t))
5465 spaces = " " * (30 - encoding.colwidth(t))
5466
5466
5467 tag = ui.label(t, 'tags.normal')
5467 tag = ui.label(t, 'tags.normal')
5468 if ui.verbose:
5468 if ui.verbose:
5469 if repo.tagtype(t) == 'local':
5469 if repo.tagtype(t) == 'local':
5470 tagtype = " local"
5470 tagtype = " local"
5471 tag = ui.label(t, 'tags.local')
5471 tag = ui.label(t, 'tags.local')
5472 else:
5472 else:
5473 tagtype = ""
5473 tagtype = ""
5474 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5474 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5475
5475
5476 @command('tip',
5476 @command('tip',
5477 [('p', 'patch', None, _('show patch')),
5477 [('p', 'patch', None, _('show patch')),
5478 ('g', 'git', None, _('use git extended diff format')),
5478 ('g', 'git', None, _('use git extended diff format')),
5479 ] + templateopts,
5479 ] + templateopts,
5480 _('[-p] [-g]'))
5480 _('[-p] [-g]'))
5481 def tip(ui, repo, **opts):
5481 def tip(ui, repo, **opts):
5482 """show the tip revision
5482 """show the tip revision
5483
5483
5484 The tip revision (usually just called the tip) is the changeset
5484 The tip revision (usually just called the tip) is the changeset
5485 most recently added to the repository (and therefore the most
5485 most recently added to the repository (and therefore the most
5486 recently changed head).
5486 recently changed head).
5487
5487
5488 If you have just made a commit, that commit will be the tip. If
5488 If you have just made a commit, that commit will be the tip. If
5489 you have just pulled changes from another repository, the tip of
5489 you have just pulled changes from another repository, the tip of
5490 that repository becomes the current tip. The "tip" tag is special
5490 that repository becomes the current tip. The "tip" tag is special
5491 and cannot be renamed or assigned to a different changeset.
5491 and cannot be renamed or assigned to a different changeset.
5492
5492
5493 Returns 0 on success.
5493 Returns 0 on success.
5494 """
5494 """
5495 displayer = cmdutil.show_changeset(ui, repo, opts)
5495 displayer = cmdutil.show_changeset(ui, repo, opts)
5496 displayer.show(repo[len(repo) - 1])
5496 displayer.show(repo[len(repo) - 1])
5497 displayer.close()
5497 displayer.close()
5498
5498
5499 @command('unbundle',
5499 @command('unbundle',
5500 [('u', 'update', None,
5500 [('u', 'update', None,
5501 _('update to new branch head if changesets were unbundled'))],
5501 _('update to new branch head if changesets were unbundled'))],
5502 _('[-u] FILE...'))
5502 _('[-u] FILE...'))
5503 def unbundle(ui, repo, fname1, *fnames, **opts):
5503 def unbundle(ui, repo, fname1, *fnames, **opts):
5504 """apply one or more changegroup files
5504 """apply one or more changegroup files
5505
5505
5506 Apply one or more compressed changegroup files generated by the
5506 Apply one or more compressed changegroup files generated by the
5507 bundle command.
5507 bundle command.
5508
5508
5509 Returns 0 on success, 1 if an update has unresolved files.
5509 Returns 0 on success, 1 if an update has unresolved files.
5510 """
5510 """
5511 fnames = (fname1,) + fnames
5511 fnames = (fname1,) + fnames
5512
5512
5513 lock = repo.lock()
5513 lock = repo.lock()
5514 wc = repo['.']
5514 wc = repo['.']
5515 try:
5515 try:
5516 for fname in fnames:
5516 for fname in fnames:
5517 f = url.open(ui, fname)
5517 f = url.open(ui, fname)
5518 gen = changegroup.readbundle(f, fname)
5518 gen = changegroup.readbundle(f, fname)
5519 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
5519 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
5520 lock=lock)
5520 lock=lock)
5521 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5521 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5522 finally:
5522 finally:
5523 lock.release()
5523 lock.release()
5524 return postincoming(ui, repo, modheads, opts.get('update'), None)
5524 return postincoming(ui, repo, modheads, opts.get('update'), None)
5525
5525
5526 @command('^update|up|checkout|co',
5526 @command('^update|up|checkout|co',
5527 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5527 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5528 ('c', 'check', None,
5528 ('c', 'check', None,
5529 _('update across branches if no uncommitted changes')),
5529 _('update across branches if no uncommitted changes')),
5530 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5530 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5531 ('r', 'rev', '', _('revision'), _('REV'))],
5531 ('r', 'rev', '', _('revision'), _('REV'))],
5532 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5532 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5533 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5533 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5534 """update working directory (or switch revisions)
5534 """update working directory (or switch revisions)
5535
5535
5536 Update the repository's working directory to the specified
5536 Update the repository's working directory to the specified
5537 changeset. If no changeset is specified, update to the tip of the
5537 changeset. If no changeset is specified, update to the tip of the
5538 current named branch.
5538 current named branch.
5539
5539
5540 If the changeset is not a descendant of the working directory's
5540 If the changeset is not a descendant of the working directory's
5541 parent, the update is aborted. With the -c/--check option, the
5541 parent, the update is aborted. With the -c/--check option, the
5542 working directory is checked for uncommitted changes; if none are
5542 working directory is checked for uncommitted changes; if none are
5543 found, the working directory is updated to the specified
5543 found, the working directory is updated to the specified
5544 changeset.
5544 changeset.
5545
5545
5546 Update sets the working directory's parent revison to the specified
5546 Update sets the working directory's parent revison to the specified
5547 changeset (see :hg:`help parents`).
5547 changeset (see :hg:`help parents`).
5548
5548
5549 The following rules apply when the working directory contains
5549 The following rules apply when the working directory contains
5550 uncommitted changes:
5550 uncommitted changes:
5551
5551
5552 1. If neither -c/--check nor -C/--clean is specified, and if
5552 1. If neither -c/--check nor -C/--clean is specified, and if
5553 the requested changeset is an ancestor or descendant of
5553 the requested changeset is an ancestor or descendant of
5554 the working directory's parent, the uncommitted changes
5554 the working directory's parent, the uncommitted changes
5555 are merged into the requested changeset and the merged
5555 are merged into the requested changeset and the merged
5556 result is left uncommitted. If the requested changeset is
5556 result is left uncommitted. If the requested changeset is
5557 not an ancestor or descendant (that is, it is on another
5557 not an ancestor or descendant (that is, it is on another
5558 branch), the update is aborted and the uncommitted changes
5558 branch), the update is aborted and the uncommitted changes
5559 are preserved.
5559 are preserved.
5560
5560
5561 2. With the -c/--check option, the update is aborted and the
5561 2. With the -c/--check option, the update is aborted and the
5562 uncommitted changes are preserved.
5562 uncommitted changes are preserved.
5563
5563
5564 3. With the -C/--clean option, uncommitted changes are discarded and
5564 3. With the -C/--clean option, uncommitted changes are discarded and
5565 the working directory is updated to the requested changeset.
5565 the working directory is updated to the requested changeset.
5566
5566
5567 Use null as the changeset to remove the working directory (like
5567 Use null as the changeset to remove the working directory (like
5568 :hg:`clone -U`).
5568 :hg:`clone -U`).
5569
5569
5570 If you want to revert just one file to an older revision, use
5570 If you want to revert just one file to an older revision, use
5571 :hg:`revert [-r REV] NAME`.
5571 :hg:`revert [-r REV] NAME`.
5572
5572
5573 See :hg:`help dates` for a list of formats valid for -d/--date.
5573 See :hg:`help dates` for a list of formats valid for -d/--date.
5574
5574
5575 Returns 0 on success, 1 if there are unresolved files.
5575 Returns 0 on success, 1 if there are unresolved files.
5576 """
5576 """
5577 if rev and node:
5577 if rev and node:
5578 raise util.Abort(_("please specify just one revision"))
5578 raise util.Abort(_("please specify just one revision"))
5579
5579
5580 if rev is None or rev == '':
5580 if rev is None or rev == '':
5581 rev = node
5581 rev = node
5582
5582
5583 # if we defined a bookmark, we have to remember the original bookmark name
5583 # if we defined a bookmark, we have to remember the original bookmark name
5584 brev = rev
5584 brev = rev
5585 rev = scmutil.revsingle(repo, rev, rev).rev()
5585 rev = scmutil.revsingle(repo, rev, rev).rev()
5586
5586
5587 if check and clean:
5587 if check and clean:
5588 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5588 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5589
5589
5590 if check:
5590 if check:
5591 # we could use dirty() but we can ignore merge and branch trivia
5591 # we could use dirty() but we can ignore merge and branch trivia
5592 c = repo[None]
5592 c = repo[None]
5593 if c.modified() or c.added() or c.removed():
5593 if c.modified() or c.added() or c.removed():
5594 raise util.Abort(_("uncommitted local changes"))
5594 raise util.Abort(_("uncommitted local changes"))
5595
5595
5596 if date:
5596 if date:
5597 if rev is not None:
5597 if rev is not None:
5598 raise util.Abort(_("you can't specify a revision and a date"))
5598 raise util.Abort(_("you can't specify a revision and a date"))
5599 rev = cmdutil.finddate(ui, repo, date)
5599 rev = cmdutil.finddate(ui, repo, date)
5600
5600
5601 if clean or check:
5601 if clean or check:
5602 ret = hg.clean(repo, rev)
5602 ret = hg.clean(repo, rev)
5603 else:
5603 else:
5604 ret = hg.update(repo, rev)
5604 ret = hg.update(repo, rev)
5605
5605
5606 if brev in repo._bookmarks:
5606 if brev in repo._bookmarks:
5607 bookmarks.setcurrent(repo, brev)
5607 bookmarks.setcurrent(repo, brev)
5608
5608
5609 return ret
5609 return ret
5610
5610
5611 @command('verify', [])
5611 @command('verify', [])
5612 def verify(ui, repo):
5612 def verify(ui, repo):
5613 """verify the integrity of the repository
5613 """verify the integrity of the repository
5614
5614
5615 Verify the integrity of the current repository.
5615 Verify the integrity of the current repository.
5616
5616
5617 This will perform an extensive check of the repository's
5617 This will perform an extensive check of the repository's
5618 integrity, validating the hashes and checksums of each entry in
5618 integrity, validating the hashes and checksums of each entry in
5619 the changelog, manifest, and tracked files, as well as the
5619 the changelog, manifest, and tracked files, as well as the
5620 integrity of their crosslinks and indices.
5620 integrity of their crosslinks and indices.
5621
5621
5622 Returns 0 on success, 1 if errors are encountered.
5622 Returns 0 on success, 1 if errors are encountered.
5623 """
5623 """
5624 return hg.verify(repo)
5624 return hg.verify(repo)
5625
5625
5626 @command('version', [])
5626 @command('version', [])
5627 def version_(ui):
5627 def version_(ui):
5628 """output version and copyright information"""
5628 """output version and copyright information"""
5629 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5629 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5630 % util.version())
5630 % util.version())
5631 ui.status(_(
5631 ui.status(_(
5632 "(see http://mercurial.selenic.com for more information)\n"
5632 "(see http://mercurial.selenic.com for more information)\n"
5633 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
5633 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
5634 "This is free software; see the source for copying conditions. "
5634 "This is free software; see the source for copying conditions. "
5635 "There is NO\nwarranty; "
5635 "There is NO\nwarranty; "
5636 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5636 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5637 ))
5637 ))
5638
5638
5639 norepo = ("clone init version help debugcommands debugcomplete"
5639 norepo = ("clone init version help debugcommands debugcomplete"
5640 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5640 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5641 " debugknown debuggetbundle debugbundle")
5641 " debugknown debuggetbundle debugbundle")
5642 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5642 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5643 " debugdata debugindex debugindexdot debugrevlog")
5643 " debugdata debugindex debugindexdot debugrevlog")
@@ -1,578 +1,578 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
13 import lock, util, extensions, error, node
13 import lock, util, extensions, error, node
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(branch):
35 def primary(branch):
36 if branch == '.':
36 if branch == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 branch = lrepo.dirstate.branch()
39 branch = lrepo.dirstate.branch()
40 if branch in branchmap:
40 if branch in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 if not primary(branch):
47 if not primary(branch):
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 if hashbranch:
49 if hashbranch:
50 if not primary(hashbranch):
50 if not primary(hashbranch):
51 revs.append(hashbranch)
51 revs.append(hashbranch)
52 return revs, revs[0]
52 return revs, revs[0]
53
53
54 def parseurl(path, branches=None):
54 def parseurl(path, branches=None):
55 '''parse url#branch, returning (url, (branch, branches))'''
55 '''parse url#branch, returning (url, (branch, branches))'''
56
56
57 u = util.url(path)
57 u = util.url(path)
58 branch = None
58 branch = None
59 if u.fragment:
59 if u.fragment:
60 branch = u.fragment
60 branch = u.fragment
61 u.fragment = None
61 u.fragment = None
62 return str(u), (branch, branches or [])
62 return str(u), (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _peerlookup(path):
73 def _peerlookup(path):
74 u = util.url(path)
74 u = util.url(path)
75 scheme = u.scheme or 'file'
75 scheme = u.scheme or 'file'
76 thing = schemes.get(scheme) or schemes['file']
76 thing = schemes.get(scheme) or schemes['file']
77 try:
77 try:
78 return thing(path)
78 return thing(path)
79 except TypeError:
79 except TypeError:
80 return thing
80 return thing
81
81
82 def islocal(repo):
82 def islocal(repo):
83 '''return true if repo or path is local'''
83 '''return true if repo or path is local'''
84 if isinstance(repo, str):
84 if isinstance(repo, str):
85 try:
85 try:
86 return _peerlookup(repo).islocal(repo)
86 return _peerlookup(repo).islocal(repo)
87 except AttributeError:
87 except AttributeError:
88 return False
88 return False
89 return repo.local()
89 return repo.local()
90
90
91 def repository(ui, path='', create=False):
91 def repository(ui, path='', create=False):
92 """return a repository object for the specified path"""
92 """return a repository object for the specified path"""
93 repo = _peerlookup(path).instance(ui, path, create)
93 repo = _peerlookup(path).instance(ui, path, create)
94 ui = getattr(repo, "ui", ui)
94 ui = getattr(repo, "ui", ui)
95 for name, module in extensions.extensions():
95 for name, module in extensions.extensions():
96 hook = getattr(module, 'reposetup', None)
96 hook = getattr(module, 'reposetup', None)
97 if hook:
97 if hook:
98 hook(ui, repo)
98 hook(ui, repo)
99 return repo
99 return repo
100
100
101 def peer(uiorrepo, opts, path, create=False):
101 def peer(uiorrepo, opts, path, create=False):
102 '''return a repository peer for the specified path'''
102 '''return a repository peer for the specified path'''
103 rui = remoteui(uiorrepo, opts)
103 rui = remoteui(uiorrepo, opts)
104 return repository(rui, path, create)
104 return repository(rui, path, create)
105
105
106 def defaultdest(source):
106 def defaultdest(source):
107 '''return default destination of clone if none is given'''
107 '''return default destination of clone if none is given'''
108 return os.path.basename(os.path.normpath(source))
108 return os.path.basename(os.path.normpath(source))
109
109
110 def share(ui, source, dest=None, update=True):
110 def share(ui, source, dest=None, update=True):
111 '''create a shared repository'''
111 '''create a shared repository'''
112
112
113 if not islocal(source):
113 if not islocal(source):
114 raise util.Abort(_('can only share local repositories'))
114 raise util.Abort(_('can only share local repositories'))
115
115
116 if not dest:
116 if not dest:
117 dest = defaultdest(source)
117 dest = defaultdest(source)
118 else:
118 else:
119 dest = ui.expandpath(dest)
119 dest = ui.expandpath(dest)
120
120
121 if isinstance(source, str):
121 if isinstance(source, str):
122 origsource = ui.expandpath(source)
122 origsource = ui.expandpath(source)
123 source, branches = parseurl(origsource)
123 source, branches = parseurl(origsource)
124 srcrepo = repository(ui, source)
124 srcrepo = repository(ui, source)
125 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
125 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
126 else:
126 else:
127 srcrepo = source
127 srcrepo = source
128 origsource = source = srcrepo.url()
128 origsource = source = srcrepo.url()
129 checkout = None
129 checkout = None
130
130
131 sharedpath = srcrepo.sharedpath # if our source is already sharing
131 sharedpath = srcrepo.sharedpath # if our source is already sharing
132
132
133 root = util.realpath(dest)
133 root = os.path.realpath(dest)
134 roothg = os.path.join(root, '.hg')
134 roothg = os.path.join(root, '.hg')
135
135
136 if os.path.exists(roothg):
136 if os.path.exists(roothg):
137 raise util.Abort(_('destination already exists'))
137 raise util.Abort(_('destination already exists'))
138
138
139 if not os.path.isdir(root):
139 if not os.path.isdir(root):
140 os.mkdir(root)
140 os.mkdir(root)
141 util.makedir(roothg, notindexed=True)
141 util.makedir(roothg, notindexed=True)
142
142
143 requirements = ''
143 requirements = ''
144 try:
144 try:
145 requirements = srcrepo.opener.read('requires')
145 requirements = srcrepo.opener.read('requires')
146 except IOError, inst:
146 except IOError, inst:
147 if inst.errno != errno.ENOENT:
147 if inst.errno != errno.ENOENT:
148 raise
148 raise
149
149
150 requirements += 'shared\n'
150 requirements += 'shared\n'
151 util.writefile(os.path.join(roothg, 'requires'), requirements)
151 util.writefile(os.path.join(roothg, 'requires'), requirements)
152 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
152 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
153
153
154 r = repository(ui, root)
154 r = repository(ui, root)
155
155
156 default = srcrepo.ui.config('paths', 'default')
156 default = srcrepo.ui.config('paths', 'default')
157 if default:
157 if default:
158 fp = r.opener("hgrc", "w", text=True)
158 fp = r.opener("hgrc", "w", text=True)
159 fp.write("[paths]\n")
159 fp.write("[paths]\n")
160 fp.write("default = %s\n" % default)
160 fp.write("default = %s\n" % default)
161 fp.close()
161 fp.close()
162
162
163 if update:
163 if update:
164 r.ui.status(_("updating working directory\n"))
164 r.ui.status(_("updating working directory\n"))
165 if update is not True:
165 if update is not True:
166 checkout = update
166 checkout = update
167 for test in (checkout, 'default', 'tip'):
167 for test in (checkout, 'default', 'tip'):
168 if test is None:
168 if test is None:
169 continue
169 continue
170 try:
170 try:
171 uprev = r.lookup(test)
171 uprev = r.lookup(test)
172 break
172 break
173 except error.RepoLookupError:
173 except error.RepoLookupError:
174 continue
174 continue
175 _update(r, uprev)
175 _update(r, uprev)
176
176
177 def copystore(ui, srcrepo, destpath):
177 def copystore(ui, srcrepo, destpath):
178 '''copy files from store of srcrepo in destpath
178 '''copy files from store of srcrepo in destpath
179
179
180 returns destlock
180 returns destlock
181 '''
181 '''
182 destlock = None
182 destlock = None
183 try:
183 try:
184 hardlink = None
184 hardlink = None
185 num = 0
185 num = 0
186 for f in srcrepo.store.copylist():
186 for f in srcrepo.store.copylist():
187 src = os.path.join(srcrepo.sharedpath, f)
187 src = os.path.join(srcrepo.sharedpath, f)
188 dst = os.path.join(destpath, f)
188 dst = os.path.join(destpath, f)
189 dstbase = os.path.dirname(dst)
189 dstbase = os.path.dirname(dst)
190 if dstbase and not os.path.exists(dstbase):
190 if dstbase and not os.path.exists(dstbase):
191 os.mkdir(dstbase)
191 os.mkdir(dstbase)
192 if os.path.exists(src):
192 if os.path.exists(src):
193 if dst.endswith('data'):
193 if dst.endswith('data'):
194 # lock to avoid premature writing to the target
194 # lock to avoid premature writing to the target
195 destlock = lock.lock(os.path.join(dstbase, "lock"))
195 destlock = lock.lock(os.path.join(dstbase, "lock"))
196 hardlink, n = util.copyfiles(src, dst, hardlink)
196 hardlink, n = util.copyfiles(src, dst, hardlink)
197 num += n
197 num += n
198 if hardlink:
198 if hardlink:
199 ui.debug("linked %d files\n" % num)
199 ui.debug("linked %d files\n" % num)
200 else:
200 else:
201 ui.debug("copied %d files\n" % num)
201 ui.debug("copied %d files\n" % num)
202 return destlock
202 return destlock
203 except:
203 except:
204 release(destlock)
204 release(destlock)
205 raise
205 raise
206
206
207 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
207 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
208 update=True, stream=False, branch=None):
208 update=True, stream=False, branch=None):
209 """Make a copy of an existing repository.
209 """Make a copy of an existing repository.
210
210
211 Create a copy of an existing repository in a new directory. The
211 Create a copy of an existing repository in a new directory. The
212 source and destination are URLs, as passed to the repository
212 source and destination are URLs, as passed to the repository
213 function. Returns a pair of repository objects, the source and
213 function. Returns a pair of repository objects, the source and
214 newly created destination.
214 newly created destination.
215
215
216 The location of the source is added to the new repository's
216 The location of the source is added to the new repository's
217 .hg/hgrc file, as the default to be used for future pulls and
217 .hg/hgrc file, as the default to be used for future pulls and
218 pushes.
218 pushes.
219
219
220 If an exception is raised, the partly cloned/updated destination
220 If an exception is raised, the partly cloned/updated destination
221 repository will be deleted.
221 repository will be deleted.
222
222
223 Arguments:
223 Arguments:
224
224
225 source: repository object or URL
225 source: repository object or URL
226
226
227 dest: URL of destination repository to create (defaults to base
227 dest: URL of destination repository to create (defaults to base
228 name of source repository)
228 name of source repository)
229
229
230 pull: always pull from source repository, even in local case
230 pull: always pull from source repository, even in local case
231
231
232 stream: stream raw data uncompressed from repository (fast over
232 stream: stream raw data uncompressed from repository (fast over
233 LAN, slow over WAN)
233 LAN, slow over WAN)
234
234
235 rev: revision to clone up to (implies pull=True)
235 rev: revision to clone up to (implies pull=True)
236
236
237 update: update working directory after clone completes, if
237 update: update working directory after clone completes, if
238 destination is local repository (True means update to default rev,
238 destination is local repository (True means update to default rev,
239 anything else is treated as a revision)
239 anything else is treated as a revision)
240
240
241 branch: branches to clone
241 branch: branches to clone
242 """
242 """
243
243
244 if isinstance(source, str):
244 if isinstance(source, str):
245 origsource = ui.expandpath(source)
245 origsource = ui.expandpath(source)
246 source, branch = parseurl(origsource, branch)
246 source, branch = parseurl(origsource, branch)
247 srcrepo = repository(remoteui(ui, peeropts), source)
247 srcrepo = repository(remoteui(ui, peeropts), source)
248 else:
248 else:
249 srcrepo = source
249 srcrepo = source
250 branch = (None, branch or [])
250 branch = (None, branch or [])
251 origsource = source = srcrepo.url()
251 origsource = source = srcrepo.url()
252 rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
252 rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
253
253
254 if dest is None:
254 if dest is None:
255 dest = defaultdest(source)
255 dest = defaultdest(source)
256 ui.status(_("destination directory: %s\n") % dest)
256 ui.status(_("destination directory: %s\n") % dest)
257 else:
257 else:
258 dest = ui.expandpath(dest)
258 dest = ui.expandpath(dest)
259
259
260 dest = util.urllocalpath(dest)
260 dest = util.urllocalpath(dest)
261 source = util.urllocalpath(source)
261 source = util.urllocalpath(source)
262
262
263 if os.path.exists(dest):
263 if os.path.exists(dest):
264 if not os.path.isdir(dest):
264 if not os.path.isdir(dest):
265 raise util.Abort(_("destination '%s' already exists") % dest)
265 raise util.Abort(_("destination '%s' already exists") % dest)
266 elif os.listdir(dest):
266 elif os.listdir(dest):
267 raise util.Abort(_("destination '%s' is not empty") % dest)
267 raise util.Abort(_("destination '%s' is not empty") % dest)
268
268
269 class DirCleanup(object):
269 class DirCleanup(object):
270 def __init__(self, dir_):
270 def __init__(self, dir_):
271 self.rmtree = shutil.rmtree
271 self.rmtree = shutil.rmtree
272 self.dir_ = dir_
272 self.dir_ = dir_
273 def close(self):
273 def close(self):
274 self.dir_ = None
274 self.dir_ = None
275 def cleanup(self):
275 def cleanup(self):
276 if self.dir_:
276 if self.dir_:
277 self.rmtree(self.dir_, True)
277 self.rmtree(self.dir_, True)
278
278
279 srclock = destlock = dircleanup = None
279 srclock = destlock = dircleanup = None
280 try:
280 try:
281 abspath = origsource
281 abspath = origsource
282 if islocal(origsource):
282 if islocal(origsource):
283 abspath = os.path.abspath(util.urllocalpath(origsource))
283 abspath = os.path.abspath(util.urllocalpath(origsource))
284
284
285 if islocal(dest):
285 if islocal(dest):
286 dircleanup = DirCleanup(dest)
286 dircleanup = DirCleanup(dest)
287
287
288 copy = False
288 copy = False
289 if srcrepo.cancopy() and islocal(dest):
289 if srcrepo.cancopy() and islocal(dest):
290 copy = not pull and not rev
290 copy = not pull and not rev
291
291
292 if copy:
292 if copy:
293 try:
293 try:
294 # we use a lock here because if we race with commit, we
294 # we use a lock here because if we race with commit, we
295 # can end up with extra data in the cloned revlogs that's
295 # can end up with extra data in the cloned revlogs that's
296 # not pointed to by changesets, thus causing verify to
296 # not pointed to by changesets, thus causing verify to
297 # fail
297 # fail
298 srclock = srcrepo.lock(wait=False)
298 srclock = srcrepo.lock(wait=False)
299 except error.LockError:
299 except error.LockError:
300 copy = False
300 copy = False
301
301
302 if copy:
302 if copy:
303 srcrepo.hook('preoutgoing', throw=True, source='clone')
303 srcrepo.hook('preoutgoing', throw=True, source='clone')
304 hgdir = util.realpath(os.path.join(dest, ".hg"))
304 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
305 if not os.path.exists(dest):
305 if not os.path.exists(dest):
306 os.mkdir(dest)
306 os.mkdir(dest)
307 else:
307 else:
308 # only clean up directories we create ourselves
308 # only clean up directories we create ourselves
309 dircleanup.dir_ = hgdir
309 dircleanup.dir_ = hgdir
310 try:
310 try:
311 destpath = hgdir
311 destpath = hgdir
312 util.makedir(destpath, notindexed=True)
312 util.makedir(destpath, notindexed=True)
313 except OSError, inst:
313 except OSError, inst:
314 if inst.errno == errno.EEXIST:
314 if inst.errno == errno.EEXIST:
315 dircleanup.close()
315 dircleanup.close()
316 raise util.Abort(_("destination '%s' already exists")
316 raise util.Abort(_("destination '%s' already exists")
317 % dest)
317 % dest)
318 raise
318 raise
319
319
320 destlock = copystore(ui, srcrepo, destpath)
320 destlock = copystore(ui, srcrepo, destpath)
321
321
322 # we need to re-init the repo after manually copying the data
322 # we need to re-init the repo after manually copying the data
323 # into it
323 # into it
324 destrepo = repository(remoteui(ui, peeropts), dest)
324 destrepo = repository(remoteui(ui, peeropts), dest)
325 srcrepo.hook('outgoing', source='clone',
325 srcrepo.hook('outgoing', source='clone',
326 node=node.hex(node.nullid))
326 node=node.hex(node.nullid))
327 else:
327 else:
328 try:
328 try:
329 destrepo = repository(remoteui(ui, peeropts), dest,
329 destrepo = repository(remoteui(ui, peeropts), dest,
330 create=True)
330 create=True)
331 except OSError, inst:
331 except OSError, inst:
332 if inst.errno == errno.EEXIST:
332 if inst.errno == errno.EEXIST:
333 dircleanup.close()
333 dircleanup.close()
334 raise util.Abort(_("destination '%s' already exists")
334 raise util.Abort(_("destination '%s' already exists")
335 % dest)
335 % dest)
336 raise
336 raise
337
337
338 revs = None
338 revs = None
339 if rev:
339 if rev:
340 if not srcrepo.capable('lookup'):
340 if not srcrepo.capable('lookup'):
341 raise util.Abort(_("src repository does not support "
341 raise util.Abort(_("src repository does not support "
342 "revision lookup and so doesn't "
342 "revision lookup and so doesn't "
343 "support clone by revision"))
343 "support clone by revision"))
344 revs = [srcrepo.lookup(r) for r in rev]
344 revs = [srcrepo.lookup(r) for r in rev]
345 checkout = revs[0]
345 checkout = revs[0]
346 if destrepo.local():
346 if destrepo.local():
347 destrepo.clone(srcrepo, heads=revs, stream=stream)
347 destrepo.clone(srcrepo, heads=revs, stream=stream)
348 elif srcrepo.local():
348 elif srcrepo.local():
349 srcrepo.push(destrepo, revs=revs)
349 srcrepo.push(destrepo, revs=revs)
350 else:
350 else:
351 raise util.Abort(_("clone from remote to remote not supported"))
351 raise util.Abort(_("clone from remote to remote not supported"))
352
352
353 if dircleanup:
353 if dircleanup:
354 dircleanup.close()
354 dircleanup.close()
355
355
356 if destrepo.local():
356 if destrepo.local():
357 fp = destrepo.opener("hgrc", "w", text=True)
357 fp = destrepo.opener("hgrc", "w", text=True)
358 fp.write("[paths]\n")
358 fp.write("[paths]\n")
359 fp.write("default = %s\n" % abspath)
359 fp.write("default = %s\n" % abspath)
360 fp.close()
360 fp.close()
361
361
362 destrepo.ui.setconfig('paths', 'default', abspath)
362 destrepo.ui.setconfig('paths', 'default', abspath)
363
363
364 if update:
364 if update:
365 if update is not True:
365 if update is not True:
366 checkout = update
366 checkout = update
367 if srcrepo.local():
367 if srcrepo.local():
368 checkout = srcrepo.lookup(update)
368 checkout = srcrepo.lookup(update)
369 for test in (checkout, 'default', 'tip'):
369 for test in (checkout, 'default', 'tip'):
370 if test is None:
370 if test is None:
371 continue
371 continue
372 try:
372 try:
373 uprev = destrepo.lookup(test)
373 uprev = destrepo.lookup(test)
374 break
374 break
375 except error.RepoLookupError:
375 except error.RepoLookupError:
376 continue
376 continue
377 bn = destrepo[uprev].branch()
377 bn = destrepo[uprev].branch()
378 destrepo.ui.status(_("updating to branch %s\n") % bn)
378 destrepo.ui.status(_("updating to branch %s\n") % bn)
379 _update(destrepo, uprev)
379 _update(destrepo, uprev)
380
380
381 # clone all bookmarks
381 # clone all bookmarks
382 if destrepo.local() and srcrepo.capable("pushkey"):
382 if destrepo.local() and srcrepo.capable("pushkey"):
383 rb = srcrepo.listkeys('bookmarks')
383 rb = srcrepo.listkeys('bookmarks')
384 for k, n in rb.iteritems():
384 for k, n in rb.iteritems():
385 try:
385 try:
386 m = destrepo.lookup(n)
386 m = destrepo.lookup(n)
387 destrepo._bookmarks[k] = m
387 destrepo._bookmarks[k] = m
388 except error.RepoLookupError:
388 except error.RepoLookupError:
389 pass
389 pass
390 if rb:
390 if rb:
391 bookmarks.write(destrepo)
391 bookmarks.write(destrepo)
392 elif srcrepo.local() and destrepo.capable("pushkey"):
392 elif srcrepo.local() and destrepo.capable("pushkey"):
393 for k, n in srcrepo._bookmarks.iteritems():
393 for k, n in srcrepo._bookmarks.iteritems():
394 destrepo.pushkey('bookmarks', k, '', hex(n))
394 destrepo.pushkey('bookmarks', k, '', hex(n))
395
395
396 return srcrepo, destrepo
396 return srcrepo, destrepo
397 finally:
397 finally:
398 release(srclock, destlock)
398 release(srclock, destlock)
399 if dircleanup is not None:
399 if dircleanup is not None:
400 dircleanup.cleanup()
400 dircleanup.cleanup()
401
401
402 def _showstats(repo, stats):
402 def _showstats(repo, stats):
403 repo.ui.status(_("%d files updated, %d files merged, "
403 repo.ui.status(_("%d files updated, %d files merged, "
404 "%d files removed, %d files unresolved\n") % stats)
404 "%d files removed, %d files unresolved\n") % stats)
405
405
406 def update(repo, node):
406 def update(repo, node):
407 """update the working directory to node, merging linear changes"""
407 """update the working directory to node, merging linear changes"""
408 stats = mergemod.update(repo, node, False, False, None)
408 stats = mergemod.update(repo, node, False, False, None)
409 _showstats(repo, stats)
409 _showstats(repo, stats)
410 if stats[3]:
410 if stats[3]:
411 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
411 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
412 return stats[3] > 0
412 return stats[3] > 0
413
413
414 # naming conflict in clone()
414 # naming conflict in clone()
415 _update = update
415 _update = update
416
416
417 def clean(repo, node, show_stats=True):
417 def clean(repo, node, show_stats=True):
418 """forcibly switch the working directory to node, clobbering changes"""
418 """forcibly switch the working directory to node, clobbering changes"""
419 stats = mergemod.update(repo, node, False, True, None)
419 stats = mergemod.update(repo, node, False, True, None)
420 if show_stats:
420 if show_stats:
421 _showstats(repo, stats)
421 _showstats(repo, stats)
422 return stats[3] > 0
422 return stats[3] > 0
423
423
424 def merge(repo, node, force=None, remind=True):
424 def merge(repo, node, force=None, remind=True):
425 """Branch merge with node, resolving changes. Return true if any
425 """Branch merge with node, resolving changes. Return true if any
426 unresolved conflicts."""
426 unresolved conflicts."""
427 stats = mergemod.update(repo, node, True, force, False)
427 stats = mergemod.update(repo, node, True, force, False)
428 _showstats(repo, stats)
428 _showstats(repo, stats)
429 if stats[3]:
429 if stats[3]:
430 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
430 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
431 "or 'hg update -C .' to abandon\n"))
431 "or 'hg update -C .' to abandon\n"))
432 elif remind:
432 elif remind:
433 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
433 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
434 return stats[3] > 0
434 return stats[3] > 0
435
435
436 def _incoming(displaychlist, subreporecurse, ui, repo, source,
436 def _incoming(displaychlist, subreporecurse, ui, repo, source,
437 opts, buffered=False):
437 opts, buffered=False):
438 """
438 """
439 Helper for incoming / gincoming.
439 Helper for incoming / gincoming.
440 displaychlist gets called with
440 displaychlist gets called with
441 (remoterepo, incomingchangesetlist, displayer) parameters,
441 (remoterepo, incomingchangesetlist, displayer) parameters,
442 and is supposed to contain only code that can't be unified.
442 and is supposed to contain only code that can't be unified.
443 """
443 """
444 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
444 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
445 other = peer(repo, opts, source)
445 other = peer(repo, opts, source)
446 ui.status(_('comparing with %s\n') % util.hidepassword(source))
446 ui.status(_('comparing with %s\n') % util.hidepassword(source))
447 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
447 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
448
448
449 if revs:
449 if revs:
450 revs = [other.lookup(rev) for rev in revs]
450 revs = [other.lookup(rev) for rev in revs]
451 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
451 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
452 revs, opts["bundle"], opts["force"])
452 revs, opts["bundle"], opts["force"])
453 try:
453 try:
454 if not chlist:
454 if not chlist:
455 ui.status(_("no changes found\n"))
455 ui.status(_("no changes found\n"))
456 return subreporecurse()
456 return subreporecurse()
457
457
458 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
458 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
459
459
460 # XXX once graphlog extension makes it into core,
460 # XXX once graphlog extension makes it into core,
461 # should be replaced by a if graph/else
461 # should be replaced by a if graph/else
462 displaychlist(other, chlist, displayer)
462 displaychlist(other, chlist, displayer)
463
463
464 displayer.close()
464 displayer.close()
465 finally:
465 finally:
466 cleanupfn()
466 cleanupfn()
467 subreporecurse()
467 subreporecurse()
468 return 0 # exit code is zero since we found incoming changes
468 return 0 # exit code is zero since we found incoming changes
469
469
470 def incoming(ui, repo, source, opts):
470 def incoming(ui, repo, source, opts):
471 def subreporecurse():
471 def subreporecurse():
472 ret = 1
472 ret = 1
473 if opts.get('subrepos'):
473 if opts.get('subrepos'):
474 ctx = repo[None]
474 ctx = repo[None]
475 for subpath in sorted(ctx.substate):
475 for subpath in sorted(ctx.substate):
476 sub = ctx.sub(subpath)
476 sub = ctx.sub(subpath)
477 ret = min(ret, sub.incoming(ui, source, opts))
477 ret = min(ret, sub.incoming(ui, source, opts))
478 return ret
478 return ret
479
479
480 def display(other, chlist, displayer):
480 def display(other, chlist, displayer):
481 limit = cmdutil.loglimit(opts)
481 limit = cmdutil.loglimit(opts)
482 if opts.get('newest_first'):
482 if opts.get('newest_first'):
483 chlist.reverse()
483 chlist.reverse()
484 count = 0
484 count = 0
485 for n in chlist:
485 for n in chlist:
486 if limit is not None and count >= limit:
486 if limit is not None and count >= limit:
487 break
487 break
488 parents = [p for p in other.changelog.parents(n) if p != nullid]
488 parents = [p for p in other.changelog.parents(n) if p != nullid]
489 if opts.get('no_merges') and len(parents) == 2:
489 if opts.get('no_merges') and len(parents) == 2:
490 continue
490 continue
491 count += 1
491 count += 1
492 displayer.show(other[n])
492 displayer.show(other[n])
493 return _incoming(display, subreporecurse, ui, repo, source, opts)
493 return _incoming(display, subreporecurse, ui, repo, source, opts)
494
494
495 def _outgoing(ui, repo, dest, opts):
495 def _outgoing(ui, repo, dest, opts):
496 dest = ui.expandpath(dest or 'default-push', dest or 'default')
496 dest = ui.expandpath(dest or 'default-push', dest or 'default')
497 dest, branches = parseurl(dest, opts.get('branch'))
497 dest, branches = parseurl(dest, opts.get('branch'))
498 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
498 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
499 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
499 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
500 if revs:
500 if revs:
501 revs = [repo.lookup(rev) for rev in revs]
501 revs = [repo.lookup(rev) for rev in revs]
502
502
503 other = peer(repo, opts, dest)
503 other = peer(repo, opts, dest)
504 common, outheads = discovery.findcommonoutgoing(repo, other, revs,
504 common, outheads = discovery.findcommonoutgoing(repo, other, revs,
505 force=opts.get('force'))
505 force=opts.get('force'))
506 o = repo.changelog.findmissing(common, outheads)
506 o = repo.changelog.findmissing(common, outheads)
507 if not o:
507 if not o:
508 ui.status(_("no changes found\n"))
508 ui.status(_("no changes found\n"))
509 return None
509 return None
510 return o
510 return o
511
511
512 def outgoing(ui, repo, dest, opts):
512 def outgoing(ui, repo, dest, opts):
513 def recurse():
513 def recurse():
514 ret = 1
514 ret = 1
515 if opts.get('subrepos'):
515 if opts.get('subrepos'):
516 ctx = repo[None]
516 ctx = repo[None]
517 for subpath in sorted(ctx.substate):
517 for subpath in sorted(ctx.substate):
518 sub = ctx.sub(subpath)
518 sub = ctx.sub(subpath)
519 ret = min(ret, sub.outgoing(ui, dest, opts))
519 ret = min(ret, sub.outgoing(ui, dest, opts))
520 return ret
520 return ret
521
521
522 limit = cmdutil.loglimit(opts)
522 limit = cmdutil.loglimit(opts)
523 o = _outgoing(ui, repo, dest, opts)
523 o = _outgoing(ui, repo, dest, opts)
524 if o is None:
524 if o is None:
525 return recurse()
525 return recurse()
526
526
527 if opts.get('newest_first'):
527 if opts.get('newest_first'):
528 o.reverse()
528 o.reverse()
529 displayer = cmdutil.show_changeset(ui, repo, opts)
529 displayer = cmdutil.show_changeset(ui, repo, opts)
530 count = 0
530 count = 0
531 for n in o:
531 for n in o:
532 if limit is not None and count >= limit:
532 if limit is not None and count >= limit:
533 break
533 break
534 parents = [p for p in repo.changelog.parents(n) if p != nullid]
534 parents = [p for p in repo.changelog.parents(n) if p != nullid]
535 if opts.get('no_merges') and len(parents) == 2:
535 if opts.get('no_merges') and len(parents) == 2:
536 continue
536 continue
537 count += 1
537 count += 1
538 displayer.show(repo[n])
538 displayer.show(repo[n])
539 displayer.close()
539 displayer.close()
540 recurse()
540 recurse()
541 return 0 # exit code is zero since we found outgoing changes
541 return 0 # exit code is zero since we found outgoing changes
542
542
543 def revert(repo, node, choose):
543 def revert(repo, node, choose):
544 """revert changes to revision in node without updating dirstate"""
544 """revert changes to revision in node without updating dirstate"""
545 return mergemod.update(repo, node, False, True, choose)[3] > 0
545 return mergemod.update(repo, node, False, True, choose)[3] > 0
546
546
547 def verify(repo):
547 def verify(repo):
548 """verify the consistency of a repository"""
548 """verify the consistency of a repository"""
549 return verifymod.verify(repo)
549 return verifymod.verify(repo)
550
550
551 def remoteui(src, opts):
551 def remoteui(src, opts):
552 'build a remote ui from ui or repo and opts'
552 'build a remote ui from ui or repo and opts'
553 if util.safehasattr(src, 'baseui'): # looks like a repository
553 if util.safehasattr(src, 'baseui'): # looks like a repository
554 dst = src.baseui.copy() # drop repo-specific config
554 dst = src.baseui.copy() # drop repo-specific config
555 src = src.ui # copy target options from repo
555 src = src.ui # copy target options from repo
556 else: # assume it's a global ui object
556 else: # assume it's a global ui object
557 dst = src.copy() # keep all global options
557 dst = src.copy() # keep all global options
558
558
559 # copy ssh-specific options
559 # copy ssh-specific options
560 for o in 'ssh', 'remotecmd':
560 for o in 'ssh', 'remotecmd':
561 v = opts.get(o) or src.config('ui', o)
561 v = opts.get(o) or src.config('ui', o)
562 if v:
562 if v:
563 dst.setconfig("ui", o, v)
563 dst.setconfig("ui", o, v)
564
564
565 # copy bundle-specific options
565 # copy bundle-specific options
566 r = src.config('bundle', 'mainreporoot')
566 r = src.config('bundle', 'mainreporoot')
567 if r:
567 if r:
568 dst.setconfig('bundle', 'mainreporoot', r)
568 dst.setconfig('bundle', 'mainreporoot', r)
569
569
570 # copy selected local settings to the remote ui
570 # copy selected local settings to the remote ui
571 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
571 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
572 for key, val in src.configitems(sect):
572 for key, val in src.configitems(sect):
573 dst.setconfig(sect, key, val)
573 dst.setconfig(sect, key, val)
574 v = src.config('web', 'cacerts')
574 v = src.config('web', 'cacerts')
575 if v:
575 if v:
576 dst.setconfig('web', 'cacerts', util.expandpath(v))
576 dst.setconfig('web', 'cacerts', util.expandpath(v))
577
577
578 return dst
578 return dst
@@ -1,2101 +1,2101 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class localrepository(repo.repository):
22 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known', 'getbundle'))
24 'known', 'getbundle'))
25 supportedformats = set(('revlogv1', 'generaldelta'))
25 supportedformats = set(('revlogv1', 'generaldelta'))
26 supported = supportedformats | set(('store', 'fncache', 'shared',
26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 'dotencode'))
27 'dotencode'))
28
28
29 def __init__(self, baseui, path=None, create=False):
29 def __init__(self, baseui, path=None, create=False):
30 repo.repository.__init__(self)
30 repo.repository.__init__(self)
31 self.root = util.realpath(util.expandpath(path))
31 self.root = os.path.realpath(util.expandpath(path))
32 self.path = os.path.join(self.root, ".hg")
32 self.path = os.path.join(self.root, ".hg")
33 self.origroot = path
33 self.origroot = path
34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
35 self.opener = scmutil.opener(self.path)
35 self.opener = scmutil.opener(self.path)
36 self.wopener = scmutil.opener(self.root)
36 self.wopener = scmutil.opener(self.root)
37 self.baseui = baseui
37 self.baseui = baseui
38 self.ui = baseui.copy()
38 self.ui = baseui.copy()
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 extensions.loadall(self.ui)
42 extensions.loadall(self.ui)
43 except IOError:
43 except IOError:
44 pass
44 pass
45
45
46 if not os.path.isdir(self.path):
46 if not os.path.isdir(self.path):
47 if create:
47 if create:
48 if not os.path.exists(path):
48 if not os.path.exists(path):
49 util.makedirs(path)
49 util.makedirs(path)
50 util.makedir(self.path, notindexed=True)
50 util.makedir(self.path, notindexed=True)
51 requirements = ["revlogv1"]
51 requirements = ["revlogv1"]
52 if self.ui.configbool('format', 'usestore', True):
52 if self.ui.configbool('format', 'usestore', True):
53 os.mkdir(os.path.join(self.path, "store"))
53 os.mkdir(os.path.join(self.path, "store"))
54 requirements.append("store")
54 requirements.append("store")
55 if self.ui.configbool('format', 'usefncache', True):
55 if self.ui.configbool('format', 'usefncache', True):
56 requirements.append("fncache")
56 requirements.append("fncache")
57 if self.ui.configbool('format', 'dotencode', True):
57 if self.ui.configbool('format', 'dotencode', True):
58 requirements.append('dotencode')
58 requirements.append('dotencode')
59 # create an invalid changelog
59 # create an invalid changelog
60 self.opener.append(
60 self.opener.append(
61 "00changelog.i",
61 "00changelog.i",
62 '\0\0\0\2' # represents revlogv2
62 '\0\0\0\2' # represents revlogv2
63 ' dummy changelog to prevent using the old repo layout'
63 ' dummy changelog to prevent using the old repo layout'
64 )
64 )
65 if self.ui.configbool('format', 'generaldelta', False):
65 if self.ui.configbool('format', 'generaldelta', False):
66 requirements.append("generaldelta")
66 requirements.append("generaldelta")
67 requirements = set(requirements)
67 requirements = set(requirements)
68 else:
68 else:
69 raise error.RepoError(_("repository %s not found") % path)
69 raise error.RepoError(_("repository %s not found") % path)
70 elif create:
70 elif create:
71 raise error.RepoError(_("repository %s already exists") % path)
71 raise error.RepoError(_("repository %s already exists") % path)
72 else:
72 else:
73 try:
73 try:
74 requirements = scmutil.readrequires(self.opener, self.supported)
74 requirements = scmutil.readrequires(self.opener, self.supported)
75 except IOError, inst:
75 except IOError, inst:
76 if inst.errno != errno.ENOENT:
76 if inst.errno != errno.ENOENT:
77 raise
77 raise
78 requirements = set()
78 requirements = set()
79
79
80 self.sharedpath = self.path
80 self.sharedpath = self.path
81 try:
81 try:
82 s = util.realpath(self.opener.read("sharedpath").rstrip('\n'))
82 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
83 if not os.path.exists(s):
83 if not os.path.exists(s):
84 raise error.RepoError(
84 raise error.RepoError(
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
86 self.sharedpath = s
86 self.sharedpath = s
87 except IOError, inst:
87 except IOError, inst:
88 if inst.errno != errno.ENOENT:
88 if inst.errno != errno.ENOENT:
89 raise
89 raise
90
90
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
92 self.spath = self.store.path
92 self.spath = self.store.path
93 self.sopener = self.store.opener
93 self.sopener = self.store.opener
94 self.sjoin = self.store.join
94 self.sjoin = self.store.join
95 self.opener.createmode = self.store.createmode
95 self.opener.createmode = self.store.createmode
96 self._applyrequirements(requirements)
96 self._applyrequirements(requirements)
97 if create:
97 if create:
98 self._writerequirements()
98 self._writerequirements()
99
99
100
100
101 self._branchcache = None
101 self._branchcache = None
102 self._branchcachetip = None
102 self._branchcachetip = None
103 self.filterpats = {}
103 self.filterpats = {}
104 self._datafilters = {}
104 self._datafilters = {}
105 self._transref = self._lockref = self._wlockref = None
105 self._transref = self._lockref = self._wlockref = None
106
106
107 # A cache for various files under .hg/ that tracks file changes,
107 # A cache for various files under .hg/ that tracks file changes,
108 # (used by the filecache decorator)
108 # (used by the filecache decorator)
109 #
109 #
110 # Maps a property name to its util.filecacheentry
110 # Maps a property name to its util.filecacheentry
111 self._filecache = {}
111 self._filecache = {}
112
112
113 def _applyrequirements(self, requirements):
113 def _applyrequirements(self, requirements):
114 self.requirements = requirements
114 self.requirements = requirements
115 openerreqs = set(('revlogv1', 'generaldelta'))
115 openerreqs = set(('revlogv1', 'generaldelta'))
116 self.sopener.options = dict((r, 1) for r in requirements
116 self.sopener.options = dict((r, 1) for r in requirements
117 if r in openerreqs)
117 if r in openerreqs)
118
118
119 def _writerequirements(self):
119 def _writerequirements(self):
120 reqfile = self.opener("requires", "w")
120 reqfile = self.opener("requires", "w")
121 for r in self.requirements:
121 for r in self.requirements:
122 reqfile.write("%s\n" % r)
122 reqfile.write("%s\n" % r)
123 reqfile.close()
123 reqfile.close()
124
124
125 def _checknested(self, path):
125 def _checknested(self, path):
126 """Determine if path is a legal nested repository."""
126 """Determine if path is a legal nested repository."""
127 if not path.startswith(self.root):
127 if not path.startswith(self.root):
128 return False
128 return False
129 subpath = path[len(self.root) + 1:]
129 subpath = path[len(self.root) + 1:]
130
130
131 # XXX: Checking against the current working copy is wrong in
131 # XXX: Checking against the current working copy is wrong in
132 # the sense that it can reject things like
132 # the sense that it can reject things like
133 #
133 #
134 # $ hg cat -r 10 sub/x.txt
134 # $ hg cat -r 10 sub/x.txt
135 #
135 #
136 # if sub/ is no longer a subrepository in the working copy
136 # if sub/ is no longer a subrepository in the working copy
137 # parent revision.
137 # parent revision.
138 #
138 #
139 # However, it can of course also allow things that would have
139 # However, it can of course also allow things that would have
140 # been rejected before, such as the above cat command if sub/
140 # been rejected before, such as the above cat command if sub/
141 # is a subrepository now, but was a normal directory before.
141 # is a subrepository now, but was a normal directory before.
142 # The old path auditor would have rejected by mistake since it
142 # The old path auditor would have rejected by mistake since it
143 # panics when it sees sub/.hg/.
143 # panics when it sees sub/.hg/.
144 #
144 #
145 # All in all, checking against the working copy seems sensible
145 # All in all, checking against the working copy seems sensible
146 # since we want to prevent access to nested repositories on
146 # since we want to prevent access to nested repositories on
147 # the filesystem *now*.
147 # the filesystem *now*.
148 ctx = self[None]
148 ctx = self[None]
149 parts = util.splitpath(subpath)
149 parts = util.splitpath(subpath)
150 while parts:
150 while parts:
151 prefix = os.sep.join(parts)
151 prefix = os.sep.join(parts)
152 if prefix in ctx.substate:
152 if prefix in ctx.substate:
153 if prefix == subpath:
153 if prefix == subpath:
154 return True
154 return True
155 else:
155 else:
156 sub = ctx.sub(prefix)
156 sub = ctx.sub(prefix)
157 return sub.checknested(subpath[len(prefix) + 1:])
157 return sub.checknested(subpath[len(prefix) + 1:])
158 else:
158 else:
159 parts.pop()
159 parts.pop()
160 return False
160 return False
161
161
162 @filecache('bookmarks')
162 @filecache('bookmarks')
163 def _bookmarks(self):
163 def _bookmarks(self):
164 return bookmarks.read(self)
164 return bookmarks.read(self)
165
165
166 @filecache('bookmarks.current')
166 @filecache('bookmarks.current')
167 def _bookmarkcurrent(self):
167 def _bookmarkcurrent(self):
168 return bookmarks.readcurrent(self)
168 return bookmarks.readcurrent(self)
169
169
170 def _writebookmarks(self, marks):
170 def _writebookmarks(self, marks):
171 bookmarks.write(self)
171 bookmarks.write(self)
172
172
173 @filecache('00changelog.i', True)
173 @filecache('00changelog.i', True)
174 def changelog(self):
174 def changelog(self):
175 c = changelog.changelog(self.sopener)
175 c = changelog.changelog(self.sopener)
176 if 'HG_PENDING' in os.environ:
176 if 'HG_PENDING' in os.environ:
177 p = os.environ['HG_PENDING']
177 p = os.environ['HG_PENDING']
178 if p.startswith(self.root):
178 if p.startswith(self.root):
179 c.readpending('00changelog.i.a')
179 c.readpending('00changelog.i.a')
180 return c
180 return c
181
181
182 @filecache('00manifest.i', True)
182 @filecache('00manifest.i', True)
183 def manifest(self):
183 def manifest(self):
184 return manifest.manifest(self.sopener)
184 return manifest.manifest(self.sopener)
185
185
186 @filecache('dirstate')
186 @filecache('dirstate')
187 def dirstate(self):
187 def dirstate(self):
188 warned = [0]
188 warned = [0]
189 def validate(node):
189 def validate(node):
190 try:
190 try:
191 self.changelog.rev(node)
191 self.changelog.rev(node)
192 return node
192 return node
193 except error.LookupError:
193 except error.LookupError:
194 if not warned[0]:
194 if not warned[0]:
195 warned[0] = True
195 warned[0] = True
196 self.ui.warn(_("warning: ignoring unknown"
196 self.ui.warn(_("warning: ignoring unknown"
197 " working parent %s!\n") % short(node))
197 " working parent %s!\n") % short(node))
198 return nullid
198 return nullid
199
199
200 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
200 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
201
201
202 def __getitem__(self, changeid):
202 def __getitem__(self, changeid):
203 if changeid is None:
203 if changeid is None:
204 return context.workingctx(self)
204 return context.workingctx(self)
205 return context.changectx(self, changeid)
205 return context.changectx(self, changeid)
206
206
207 def __contains__(self, changeid):
207 def __contains__(self, changeid):
208 try:
208 try:
209 return bool(self.lookup(changeid))
209 return bool(self.lookup(changeid))
210 except error.RepoLookupError:
210 except error.RepoLookupError:
211 return False
211 return False
212
212
213 def __nonzero__(self):
213 def __nonzero__(self):
214 return True
214 return True
215
215
216 def __len__(self):
216 def __len__(self):
217 return len(self.changelog)
217 return len(self.changelog)
218
218
219 def __iter__(self):
219 def __iter__(self):
220 for i in xrange(len(self)):
220 for i in xrange(len(self)):
221 yield i
221 yield i
222
222
223 def set(self, expr, *args):
223 def set(self, expr, *args):
224 '''
224 '''
225 Yield a context for each matching revision, after doing arg
225 Yield a context for each matching revision, after doing arg
226 replacement via revset.formatspec
226 replacement via revset.formatspec
227 '''
227 '''
228
228
229 expr = revset.formatspec(expr, *args)
229 expr = revset.formatspec(expr, *args)
230 m = revset.match(None, expr)
230 m = revset.match(None, expr)
231 for r in m(self, range(len(self))):
231 for r in m(self, range(len(self))):
232 yield self[r]
232 yield self[r]
233
233
234 def url(self):
234 def url(self):
235 return 'file:' + self.root
235 return 'file:' + self.root
236
236
237 def hook(self, name, throw=False, **args):
237 def hook(self, name, throw=False, **args):
238 return hook.hook(self.ui, self, name, throw, **args)
238 return hook.hook(self.ui, self, name, throw, **args)
239
239
240 tag_disallowed = ':\r\n'
240 tag_disallowed = ':\r\n'
241
241
242 def _tag(self, names, node, message, local, user, date, extra={}):
242 def _tag(self, names, node, message, local, user, date, extra={}):
243 if isinstance(names, str):
243 if isinstance(names, str):
244 allchars = names
244 allchars = names
245 names = (names,)
245 names = (names,)
246 else:
246 else:
247 allchars = ''.join(names)
247 allchars = ''.join(names)
248 for c in self.tag_disallowed:
248 for c in self.tag_disallowed:
249 if c in allchars:
249 if c in allchars:
250 raise util.Abort(_('%r cannot be used in a tag name') % c)
250 raise util.Abort(_('%r cannot be used in a tag name') % c)
251
251
252 branches = self.branchmap()
252 branches = self.branchmap()
253 for name in names:
253 for name in names:
254 self.hook('pretag', throw=True, node=hex(node), tag=name,
254 self.hook('pretag', throw=True, node=hex(node), tag=name,
255 local=local)
255 local=local)
256 if name in branches:
256 if name in branches:
257 self.ui.warn(_("warning: tag %s conflicts with existing"
257 self.ui.warn(_("warning: tag %s conflicts with existing"
258 " branch name\n") % name)
258 " branch name\n") % name)
259
259
260 def writetags(fp, names, munge, prevtags):
260 def writetags(fp, names, munge, prevtags):
261 fp.seek(0, 2)
261 fp.seek(0, 2)
262 if prevtags and prevtags[-1] != '\n':
262 if prevtags and prevtags[-1] != '\n':
263 fp.write('\n')
263 fp.write('\n')
264 for name in names:
264 for name in names:
265 m = munge and munge(name) or name
265 m = munge and munge(name) or name
266 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
266 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
267 old = self.tags().get(name, nullid)
267 old = self.tags().get(name, nullid)
268 fp.write('%s %s\n' % (hex(old), m))
268 fp.write('%s %s\n' % (hex(old), m))
269 fp.write('%s %s\n' % (hex(node), m))
269 fp.write('%s %s\n' % (hex(node), m))
270 fp.close()
270 fp.close()
271
271
272 prevtags = ''
272 prevtags = ''
273 if local:
273 if local:
274 try:
274 try:
275 fp = self.opener('localtags', 'r+')
275 fp = self.opener('localtags', 'r+')
276 except IOError:
276 except IOError:
277 fp = self.opener('localtags', 'a')
277 fp = self.opener('localtags', 'a')
278 else:
278 else:
279 prevtags = fp.read()
279 prevtags = fp.read()
280
280
281 # local tags are stored in the current charset
281 # local tags are stored in the current charset
282 writetags(fp, names, None, prevtags)
282 writetags(fp, names, None, prevtags)
283 for name in names:
283 for name in names:
284 self.hook('tag', node=hex(node), tag=name, local=local)
284 self.hook('tag', node=hex(node), tag=name, local=local)
285 return
285 return
286
286
287 try:
287 try:
288 fp = self.wfile('.hgtags', 'rb+')
288 fp = self.wfile('.hgtags', 'rb+')
289 except IOError, e:
289 except IOError, e:
290 if e.errno != errno.ENOENT:
290 if e.errno != errno.ENOENT:
291 raise
291 raise
292 fp = self.wfile('.hgtags', 'ab')
292 fp = self.wfile('.hgtags', 'ab')
293 else:
293 else:
294 prevtags = fp.read()
294 prevtags = fp.read()
295
295
296 # committed tags are stored in UTF-8
296 # committed tags are stored in UTF-8
297 writetags(fp, names, encoding.fromlocal, prevtags)
297 writetags(fp, names, encoding.fromlocal, prevtags)
298
298
299 fp.close()
299 fp.close()
300
300
301 if '.hgtags' not in self.dirstate:
301 if '.hgtags' not in self.dirstate:
302 self[None].add(['.hgtags'])
302 self[None].add(['.hgtags'])
303
303
304 m = matchmod.exact(self.root, '', ['.hgtags'])
304 m = matchmod.exact(self.root, '', ['.hgtags'])
305 tagnode = self.commit(message, user, date, extra=extra, match=m)
305 tagnode = self.commit(message, user, date, extra=extra, match=m)
306
306
307 for name in names:
307 for name in names:
308 self.hook('tag', node=hex(node), tag=name, local=local)
308 self.hook('tag', node=hex(node), tag=name, local=local)
309
309
310 return tagnode
310 return tagnode
311
311
312 def tag(self, names, node, message, local, user, date):
312 def tag(self, names, node, message, local, user, date):
313 '''tag a revision with one or more symbolic names.
313 '''tag a revision with one or more symbolic names.
314
314
315 names is a list of strings or, when adding a single tag, names may be a
315 names is a list of strings or, when adding a single tag, names may be a
316 string.
316 string.
317
317
318 if local is True, the tags are stored in a per-repository file.
318 if local is True, the tags are stored in a per-repository file.
319 otherwise, they are stored in the .hgtags file, and a new
319 otherwise, they are stored in the .hgtags file, and a new
320 changeset is committed with the change.
320 changeset is committed with the change.
321
321
322 keyword arguments:
322 keyword arguments:
323
323
324 local: whether to store tags in non-version-controlled file
324 local: whether to store tags in non-version-controlled file
325 (default False)
325 (default False)
326
326
327 message: commit message to use if committing
327 message: commit message to use if committing
328
328
329 user: name of user to use if committing
329 user: name of user to use if committing
330
330
331 date: date tuple to use if committing'''
331 date: date tuple to use if committing'''
332
332
333 if not local:
333 if not local:
334 for x in self.status()[:5]:
334 for x in self.status()[:5]:
335 if '.hgtags' in x:
335 if '.hgtags' in x:
336 raise util.Abort(_('working copy of .hgtags is changed '
336 raise util.Abort(_('working copy of .hgtags is changed '
337 '(please commit .hgtags manually)'))
337 '(please commit .hgtags manually)'))
338
338
339 self.tags() # instantiate the cache
339 self.tags() # instantiate the cache
340 self._tag(names, node, message, local, user, date)
340 self._tag(names, node, message, local, user, date)
341
341
342 @propertycache
342 @propertycache
343 def _tagscache(self):
343 def _tagscache(self):
344 '''Returns a tagscache object that contains various tags related caches.'''
344 '''Returns a tagscache object that contains various tags related caches.'''
345
345
346 # This simplifies its cache management by having one decorated
346 # This simplifies its cache management by having one decorated
347 # function (this one) and the rest simply fetch things from it.
347 # function (this one) and the rest simply fetch things from it.
348 class tagscache(object):
348 class tagscache(object):
349 def __init__(self):
349 def __init__(self):
350 # These two define the set of tags for this repository. tags
350 # These two define the set of tags for this repository. tags
351 # maps tag name to node; tagtypes maps tag name to 'global' or
351 # maps tag name to node; tagtypes maps tag name to 'global' or
352 # 'local'. (Global tags are defined by .hgtags across all
352 # 'local'. (Global tags are defined by .hgtags across all
353 # heads, and local tags are defined in .hg/localtags.)
353 # heads, and local tags are defined in .hg/localtags.)
354 # They constitute the in-memory cache of tags.
354 # They constitute the in-memory cache of tags.
355 self.tags = self.tagtypes = None
355 self.tags = self.tagtypes = None
356
356
357 self.nodetagscache = self.tagslist = None
357 self.nodetagscache = self.tagslist = None
358
358
359 cache = tagscache()
359 cache = tagscache()
360 cache.tags, cache.tagtypes = self._findtags()
360 cache.tags, cache.tagtypes = self._findtags()
361
361
362 return cache
362 return cache
363
363
364 def tags(self):
364 def tags(self):
365 '''return a mapping of tag to node'''
365 '''return a mapping of tag to node'''
366 return self._tagscache.tags
366 return self._tagscache.tags
367
367
368 def _findtags(self):
368 def _findtags(self):
369 '''Do the hard work of finding tags. Return a pair of dicts
369 '''Do the hard work of finding tags. Return a pair of dicts
370 (tags, tagtypes) where tags maps tag name to node, and tagtypes
370 (tags, tagtypes) where tags maps tag name to node, and tagtypes
371 maps tag name to a string like \'global\' or \'local\'.
371 maps tag name to a string like \'global\' or \'local\'.
372 Subclasses or extensions are free to add their own tags, but
372 Subclasses or extensions are free to add their own tags, but
373 should be aware that the returned dicts will be retained for the
373 should be aware that the returned dicts will be retained for the
374 duration of the localrepo object.'''
374 duration of the localrepo object.'''
375
375
376 # XXX what tagtype should subclasses/extensions use? Currently
376 # XXX what tagtype should subclasses/extensions use? Currently
377 # mq and bookmarks add tags, but do not set the tagtype at all.
377 # mq and bookmarks add tags, but do not set the tagtype at all.
378 # Should each extension invent its own tag type? Should there
378 # Should each extension invent its own tag type? Should there
379 # be one tagtype for all such "virtual" tags? Or is the status
379 # be one tagtype for all such "virtual" tags? Or is the status
380 # quo fine?
380 # quo fine?
381
381
382 alltags = {} # map tag name to (node, hist)
382 alltags = {} # map tag name to (node, hist)
383 tagtypes = {}
383 tagtypes = {}
384
384
385 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
385 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
386 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
386 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
387
387
388 # Build the return dicts. Have to re-encode tag names because
388 # Build the return dicts. Have to re-encode tag names because
389 # the tags module always uses UTF-8 (in order not to lose info
389 # the tags module always uses UTF-8 (in order not to lose info
390 # writing to the cache), but the rest of Mercurial wants them in
390 # writing to the cache), but the rest of Mercurial wants them in
391 # local encoding.
391 # local encoding.
392 tags = {}
392 tags = {}
393 for (name, (node, hist)) in alltags.iteritems():
393 for (name, (node, hist)) in alltags.iteritems():
394 if node != nullid:
394 if node != nullid:
395 try:
395 try:
396 # ignore tags to unknown nodes
396 # ignore tags to unknown nodes
397 self.changelog.lookup(node)
397 self.changelog.lookup(node)
398 tags[encoding.tolocal(name)] = node
398 tags[encoding.tolocal(name)] = node
399 except error.LookupError:
399 except error.LookupError:
400 pass
400 pass
401 tags['tip'] = self.changelog.tip()
401 tags['tip'] = self.changelog.tip()
402 tagtypes = dict([(encoding.tolocal(name), value)
402 tagtypes = dict([(encoding.tolocal(name), value)
403 for (name, value) in tagtypes.iteritems()])
403 for (name, value) in tagtypes.iteritems()])
404 return (tags, tagtypes)
404 return (tags, tagtypes)
405
405
406 def tagtype(self, tagname):
406 def tagtype(self, tagname):
407 '''
407 '''
408 return the type of the given tag. result can be:
408 return the type of the given tag. result can be:
409
409
410 'local' : a local tag
410 'local' : a local tag
411 'global' : a global tag
411 'global' : a global tag
412 None : tag does not exist
412 None : tag does not exist
413 '''
413 '''
414
414
415 return self._tagscache.tagtypes.get(tagname)
415 return self._tagscache.tagtypes.get(tagname)
416
416
417 def tagslist(self):
417 def tagslist(self):
418 '''return a list of tags ordered by revision'''
418 '''return a list of tags ordered by revision'''
419 if not self._tagscache.tagslist:
419 if not self._tagscache.tagslist:
420 l = []
420 l = []
421 for t, n in self.tags().iteritems():
421 for t, n in self.tags().iteritems():
422 r = self.changelog.rev(n)
422 r = self.changelog.rev(n)
423 l.append((r, t, n))
423 l.append((r, t, n))
424 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
424 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
425
425
426 return self._tagscache.tagslist
426 return self._tagscache.tagslist
427
427
428 def nodetags(self, node):
428 def nodetags(self, node):
429 '''return the tags associated with a node'''
429 '''return the tags associated with a node'''
430 if not self._tagscache.nodetagscache:
430 if not self._tagscache.nodetagscache:
431 nodetagscache = {}
431 nodetagscache = {}
432 for t, n in self.tags().iteritems():
432 for t, n in self.tags().iteritems():
433 nodetagscache.setdefault(n, []).append(t)
433 nodetagscache.setdefault(n, []).append(t)
434 for tags in nodetagscache.itervalues():
434 for tags in nodetagscache.itervalues():
435 tags.sort()
435 tags.sort()
436 self._tagscache.nodetagscache = nodetagscache
436 self._tagscache.nodetagscache = nodetagscache
437 return self._tagscache.nodetagscache.get(node, [])
437 return self._tagscache.nodetagscache.get(node, [])
438
438
439 def nodebookmarks(self, node):
439 def nodebookmarks(self, node):
440 marks = []
440 marks = []
441 for bookmark, n in self._bookmarks.iteritems():
441 for bookmark, n in self._bookmarks.iteritems():
442 if n == node:
442 if n == node:
443 marks.append(bookmark)
443 marks.append(bookmark)
444 return sorted(marks)
444 return sorted(marks)
445
445
446 def _branchtags(self, partial, lrev):
446 def _branchtags(self, partial, lrev):
447 # TODO: rename this function?
447 # TODO: rename this function?
448 tiprev = len(self) - 1
448 tiprev = len(self) - 1
449 if lrev != tiprev:
449 if lrev != tiprev:
450 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
450 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
451 self._updatebranchcache(partial, ctxgen)
451 self._updatebranchcache(partial, ctxgen)
452 self._writebranchcache(partial, self.changelog.tip(), tiprev)
452 self._writebranchcache(partial, self.changelog.tip(), tiprev)
453
453
454 return partial
454 return partial
455
455
456 def updatebranchcache(self):
456 def updatebranchcache(self):
457 tip = self.changelog.tip()
457 tip = self.changelog.tip()
458 if self._branchcache is not None and self._branchcachetip == tip:
458 if self._branchcache is not None and self._branchcachetip == tip:
459 return self._branchcache
459 return self._branchcache
460
460
461 oldtip = self._branchcachetip
461 oldtip = self._branchcachetip
462 self._branchcachetip = tip
462 self._branchcachetip = tip
463 if oldtip is None or oldtip not in self.changelog.nodemap:
463 if oldtip is None or oldtip not in self.changelog.nodemap:
464 partial, last, lrev = self._readbranchcache()
464 partial, last, lrev = self._readbranchcache()
465 else:
465 else:
466 lrev = self.changelog.rev(oldtip)
466 lrev = self.changelog.rev(oldtip)
467 partial = self._branchcache
467 partial = self._branchcache
468
468
469 self._branchtags(partial, lrev)
469 self._branchtags(partial, lrev)
470 # this private cache holds all heads (not just tips)
470 # this private cache holds all heads (not just tips)
471 self._branchcache = partial
471 self._branchcache = partial
472
472
473 def branchmap(self):
473 def branchmap(self):
474 '''returns a dictionary {branch: [branchheads]}'''
474 '''returns a dictionary {branch: [branchheads]}'''
475 self.updatebranchcache()
475 self.updatebranchcache()
476 return self._branchcache
476 return self._branchcache
477
477
478 def branchtags(self):
478 def branchtags(self):
479 '''return a dict where branch names map to the tipmost head of
479 '''return a dict where branch names map to the tipmost head of
480 the branch, open heads come before closed'''
480 the branch, open heads come before closed'''
481 bt = {}
481 bt = {}
482 for bn, heads in self.branchmap().iteritems():
482 for bn, heads in self.branchmap().iteritems():
483 tip = heads[-1]
483 tip = heads[-1]
484 for h in reversed(heads):
484 for h in reversed(heads):
485 if 'close' not in self.changelog.read(h)[5]:
485 if 'close' not in self.changelog.read(h)[5]:
486 tip = h
486 tip = h
487 break
487 break
488 bt[bn] = tip
488 bt[bn] = tip
489 return bt
489 return bt
490
490
491 def _readbranchcache(self):
491 def _readbranchcache(self):
492 partial = {}
492 partial = {}
493 try:
493 try:
494 f = self.opener("cache/branchheads")
494 f = self.opener("cache/branchheads")
495 lines = f.read().split('\n')
495 lines = f.read().split('\n')
496 f.close()
496 f.close()
497 except (IOError, OSError):
497 except (IOError, OSError):
498 return {}, nullid, nullrev
498 return {}, nullid, nullrev
499
499
500 try:
500 try:
501 last, lrev = lines.pop(0).split(" ", 1)
501 last, lrev = lines.pop(0).split(" ", 1)
502 last, lrev = bin(last), int(lrev)
502 last, lrev = bin(last), int(lrev)
503 if lrev >= len(self) or self[lrev].node() != last:
503 if lrev >= len(self) or self[lrev].node() != last:
504 # invalidate the cache
504 # invalidate the cache
505 raise ValueError('invalidating branch cache (tip differs)')
505 raise ValueError('invalidating branch cache (tip differs)')
506 for l in lines:
506 for l in lines:
507 if not l:
507 if not l:
508 continue
508 continue
509 node, label = l.split(" ", 1)
509 node, label = l.split(" ", 1)
510 label = encoding.tolocal(label.strip())
510 label = encoding.tolocal(label.strip())
511 partial.setdefault(label, []).append(bin(node))
511 partial.setdefault(label, []).append(bin(node))
512 except KeyboardInterrupt:
512 except KeyboardInterrupt:
513 raise
513 raise
514 except Exception, inst:
514 except Exception, inst:
515 if self.ui.debugflag:
515 if self.ui.debugflag:
516 self.ui.warn(str(inst), '\n')
516 self.ui.warn(str(inst), '\n')
517 partial, last, lrev = {}, nullid, nullrev
517 partial, last, lrev = {}, nullid, nullrev
518 return partial, last, lrev
518 return partial, last, lrev
519
519
520 def _writebranchcache(self, branches, tip, tiprev):
520 def _writebranchcache(self, branches, tip, tiprev):
521 try:
521 try:
522 f = self.opener("cache/branchheads", "w", atomictemp=True)
522 f = self.opener("cache/branchheads", "w", atomictemp=True)
523 f.write("%s %s\n" % (hex(tip), tiprev))
523 f.write("%s %s\n" % (hex(tip), tiprev))
524 for label, nodes in branches.iteritems():
524 for label, nodes in branches.iteritems():
525 for node in nodes:
525 for node in nodes:
526 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
526 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
527 f.close()
527 f.close()
528 except (IOError, OSError):
528 except (IOError, OSError):
529 pass
529 pass
530
530
531 def _updatebranchcache(self, partial, ctxgen):
531 def _updatebranchcache(self, partial, ctxgen):
532 # collect new branch entries
532 # collect new branch entries
533 newbranches = {}
533 newbranches = {}
534 for c in ctxgen:
534 for c in ctxgen:
535 newbranches.setdefault(c.branch(), []).append(c.node())
535 newbranches.setdefault(c.branch(), []).append(c.node())
536 # if older branchheads are reachable from new ones, they aren't
536 # if older branchheads are reachable from new ones, they aren't
537 # really branchheads. Note checking parents is insufficient:
537 # really branchheads. Note checking parents is insufficient:
538 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
538 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
539 for branch, newnodes in newbranches.iteritems():
539 for branch, newnodes in newbranches.iteritems():
540 bheads = partial.setdefault(branch, [])
540 bheads = partial.setdefault(branch, [])
541 bheads.extend(newnodes)
541 bheads.extend(newnodes)
542 if len(bheads) <= 1:
542 if len(bheads) <= 1:
543 continue
543 continue
544 bheads = sorted(bheads, key=lambda x: self[x].rev())
544 bheads = sorted(bheads, key=lambda x: self[x].rev())
545 # starting from tip means fewer passes over reachable
545 # starting from tip means fewer passes over reachable
546 while newnodes:
546 while newnodes:
547 latest = newnodes.pop()
547 latest = newnodes.pop()
548 if latest not in bheads:
548 if latest not in bheads:
549 continue
549 continue
550 minbhrev = self[bheads[0]].node()
550 minbhrev = self[bheads[0]].node()
551 reachable = self.changelog.reachable(latest, minbhrev)
551 reachable = self.changelog.reachable(latest, minbhrev)
552 reachable.remove(latest)
552 reachable.remove(latest)
553 if reachable:
553 if reachable:
554 bheads = [b for b in bheads if b not in reachable]
554 bheads = [b for b in bheads if b not in reachable]
555 partial[branch] = bheads
555 partial[branch] = bheads
556
556
557 def lookup(self, key):
557 def lookup(self, key):
558 if isinstance(key, int):
558 if isinstance(key, int):
559 return self.changelog.node(key)
559 return self.changelog.node(key)
560 elif key == '.':
560 elif key == '.':
561 return self.dirstate.p1()
561 return self.dirstate.p1()
562 elif key == 'null':
562 elif key == 'null':
563 return nullid
563 return nullid
564 elif key == 'tip':
564 elif key == 'tip':
565 return self.changelog.tip()
565 return self.changelog.tip()
566 n = self.changelog._match(key)
566 n = self.changelog._match(key)
567 if n:
567 if n:
568 return n
568 return n
569 if key in self._bookmarks:
569 if key in self._bookmarks:
570 return self._bookmarks[key]
570 return self._bookmarks[key]
571 if key in self.tags():
571 if key in self.tags():
572 return self.tags()[key]
572 return self.tags()[key]
573 if key in self.branchtags():
573 if key in self.branchtags():
574 return self.branchtags()[key]
574 return self.branchtags()[key]
575 n = self.changelog._partialmatch(key)
575 n = self.changelog._partialmatch(key)
576 if n:
576 if n:
577 return n
577 return n
578
578
579 # can't find key, check if it might have come from damaged dirstate
579 # can't find key, check if it might have come from damaged dirstate
580 if key in self.dirstate.parents():
580 if key in self.dirstate.parents():
581 raise error.Abort(_("working directory has unknown parent '%s'!")
581 raise error.Abort(_("working directory has unknown parent '%s'!")
582 % short(key))
582 % short(key))
583 try:
583 try:
584 if len(key) == 20:
584 if len(key) == 20:
585 key = hex(key)
585 key = hex(key)
586 except TypeError:
586 except TypeError:
587 pass
587 pass
588 raise error.RepoLookupError(_("unknown revision '%s'") % key)
588 raise error.RepoLookupError(_("unknown revision '%s'") % key)
589
589
590 def lookupbranch(self, key, remote=None):
590 def lookupbranch(self, key, remote=None):
591 repo = remote or self
591 repo = remote or self
592 if key in repo.branchmap():
592 if key in repo.branchmap():
593 return key
593 return key
594
594
595 repo = (remote and remote.local()) and remote or self
595 repo = (remote and remote.local()) and remote or self
596 return repo[key].branch()
596 return repo[key].branch()
597
597
598 def known(self, nodes):
598 def known(self, nodes):
599 nm = self.changelog.nodemap
599 nm = self.changelog.nodemap
600 return [(n in nm) for n in nodes]
600 return [(n in nm) for n in nodes]
601
601
602 def local(self):
602 def local(self):
603 return self
603 return self
604
604
605 def join(self, f):
605 def join(self, f):
606 return os.path.join(self.path, f)
606 return os.path.join(self.path, f)
607
607
608 def wjoin(self, f):
608 def wjoin(self, f):
609 return os.path.join(self.root, f)
609 return os.path.join(self.root, f)
610
610
611 def file(self, f):
611 def file(self, f):
612 if f[0] == '/':
612 if f[0] == '/':
613 f = f[1:]
613 f = f[1:]
614 return filelog.filelog(self.sopener, f)
614 return filelog.filelog(self.sopener, f)
615
615
616 def changectx(self, changeid):
616 def changectx(self, changeid):
617 return self[changeid]
617 return self[changeid]
618
618
619 def parents(self, changeid=None):
619 def parents(self, changeid=None):
620 '''get list of changectxs for parents of changeid'''
620 '''get list of changectxs for parents of changeid'''
621 return self[changeid].parents()
621 return self[changeid].parents()
622
622
623 def filectx(self, path, changeid=None, fileid=None):
623 def filectx(self, path, changeid=None, fileid=None):
624 """changeid can be a changeset revision, node, or tag.
624 """changeid can be a changeset revision, node, or tag.
625 fileid can be a file revision or node."""
625 fileid can be a file revision or node."""
626 return context.filectx(self, path, changeid, fileid)
626 return context.filectx(self, path, changeid, fileid)
627
627
628 def getcwd(self):
628 def getcwd(self):
629 return self.dirstate.getcwd()
629 return self.dirstate.getcwd()
630
630
631 def pathto(self, f, cwd=None):
631 def pathto(self, f, cwd=None):
632 return self.dirstate.pathto(f, cwd)
632 return self.dirstate.pathto(f, cwd)
633
633
634 def wfile(self, f, mode='r'):
634 def wfile(self, f, mode='r'):
635 return self.wopener(f, mode)
635 return self.wopener(f, mode)
636
636
637 def _link(self, f):
637 def _link(self, f):
638 return os.path.islink(self.wjoin(f))
638 return os.path.islink(self.wjoin(f))
639
639
640 def _loadfilter(self, filter):
640 def _loadfilter(self, filter):
641 if filter not in self.filterpats:
641 if filter not in self.filterpats:
642 l = []
642 l = []
643 for pat, cmd in self.ui.configitems(filter):
643 for pat, cmd in self.ui.configitems(filter):
644 if cmd == '!':
644 if cmd == '!':
645 continue
645 continue
646 mf = matchmod.match(self.root, '', [pat])
646 mf = matchmod.match(self.root, '', [pat])
647 fn = None
647 fn = None
648 params = cmd
648 params = cmd
649 for name, filterfn in self._datafilters.iteritems():
649 for name, filterfn in self._datafilters.iteritems():
650 if cmd.startswith(name):
650 if cmd.startswith(name):
651 fn = filterfn
651 fn = filterfn
652 params = cmd[len(name):].lstrip()
652 params = cmd[len(name):].lstrip()
653 break
653 break
654 if not fn:
654 if not fn:
655 fn = lambda s, c, **kwargs: util.filter(s, c)
655 fn = lambda s, c, **kwargs: util.filter(s, c)
656 # Wrap old filters not supporting keyword arguments
656 # Wrap old filters not supporting keyword arguments
657 if not inspect.getargspec(fn)[2]:
657 if not inspect.getargspec(fn)[2]:
658 oldfn = fn
658 oldfn = fn
659 fn = lambda s, c, **kwargs: oldfn(s, c)
659 fn = lambda s, c, **kwargs: oldfn(s, c)
660 l.append((mf, fn, params))
660 l.append((mf, fn, params))
661 self.filterpats[filter] = l
661 self.filterpats[filter] = l
662 return self.filterpats[filter]
662 return self.filterpats[filter]
663
663
664 def _filter(self, filterpats, filename, data):
664 def _filter(self, filterpats, filename, data):
665 for mf, fn, cmd in filterpats:
665 for mf, fn, cmd in filterpats:
666 if mf(filename):
666 if mf(filename):
667 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
667 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
668 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
668 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
669 break
669 break
670
670
671 return data
671 return data
672
672
673 @propertycache
673 @propertycache
674 def _encodefilterpats(self):
674 def _encodefilterpats(self):
675 return self._loadfilter('encode')
675 return self._loadfilter('encode')
676
676
677 @propertycache
677 @propertycache
678 def _decodefilterpats(self):
678 def _decodefilterpats(self):
679 return self._loadfilter('decode')
679 return self._loadfilter('decode')
680
680
681 def adddatafilter(self, name, filter):
681 def adddatafilter(self, name, filter):
682 self._datafilters[name] = filter
682 self._datafilters[name] = filter
683
683
684 def wread(self, filename):
684 def wread(self, filename):
685 if self._link(filename):
685 if self._link(filename):
686 data = os.readlink(self.wjoin(filename))
686 data = os.readlink(self.wjoin(filename))
687 else:
687 else:
688 data = self.wopener.read(filename)
688 data = self.wopener.read(filename)
689 return self._filter(self._encodefilterpats, filename, data)
689 return self._filter(self._encodefilterpats, filename, data)
690
690
691 def wwrite(self, filename, data, flags):
691 def wwrite(self, filename, data, flags):
692 data = self._filter(self._decodefilterpats, filename, data)
692 data = self._filter(self._decodefilterpats, filename, data)
693 if 'l' in flags:
693 if 'l' in flags:
694 self.wopener.symlink(data, filename)
694 self.wopener.symlink(data, filename)
695 else:
695 else:
696 self.wopener.write(filename, data)
696 self.wopener.write(filename, data)
697 if 'x' in flags:
697 if 'x' in flags:
698 util.setflags(self.wjoin(filename), False, True)
698 util.setflags(self.wjoin(filename), False, True)
699
699
700 def wwritedata(self, filename, data):
700 def wwritedata(self, filename, data):
701 return self._filter(self._decodefilterpats, filename, data)
701 return self._filter(self._decodefilterpats, filename, data)
702
702
703 def transaction(self, desc):
703 def transaction(self, desc):
704 tr = self._transref and self._transref() or None
704 tr = self._transref and self._transref() or None
705 if tr and tr.running():
705 if tr and tr.running():
706 return tr.nest()
706 return tr.nest()
707
707
708 # abort here if the journal already exists
708 # abort here if the journal already exists
709 if os.path.exists(self.sjoin("journal")):
709 if os.path.exists(self.sjoin("journal")):
710 raise error.RepoError(
710 raise error.RepoError(
711 _("abandoned transaction found - run hg recover"))
711 _("abandoned transaction found - run hg recover"))
712
712
713 journalfiles = self._writejournal(desc)
713 journalfiles = self._writejournal(desc)
714 renames = [(x, undoname(x)) for x in journalfiles]
714 renames = [(x, undoname(x)) for x in journalfiles]
715
715
716 tr = transaction.transaction(self.ui.warn, self.sopener,
716 tr = transaction.transaction(self.ui.warn, self.sopener,
717 self.sjoin("journal"),
717 self.sjoin("journal"),
718 aftertrans(renames),
718 aftertrans(renames),
719 self.store.createmode)
719 self.store.createmode)
720 self._transref = weakref.ref(tr)
720 self._transref = weakref.ref(tr)
721 return tr
721 return tr
722
722
723 def _writejournal(self, desc):
723 def _writejournal(self, desc):
724 # save dirstate for rollback
724 # save dirstate for rollback
725 try:
725 try:
726 ds = self.opener.read("dirstate")
726 ds = self.opener.read("dirstate")
727 except IOError:
727 except IOError:
728 ds = ""
728 ds = ""
729 self.opener.write("journal.dirstate", ds)
729 self.opener.write("journal.dirstate", ds)
730 self.opener.write("journal.branch",
730 self.opener.write("journal.branch",
731 encoding.fromlocal(self.dirstate.branch()))
731 encoding.fromlocal(self.dirstate.branch()))
732 self.opener.write("journal.desc",
732 self.opener.write("journal.desc",
733 "%d\n%s\n" % (len(self), desc))
733 "%d\n%s\n" % (len(self), desc))
734
734
735 bkname = self.join('bookmarks')
735 bkname = self.join('bookmarks')
736 if os.path.exists(bkname):
736 if os.path.exists(bkname):
737 util.copyfile(bkname, self.join('journal.bookmarks'))
737 util.copyfile(bkname, self.join('journal.bookmarks'))
738 else:
738 else:
739 self.opener.write('journal.bookmarks', '')
739 self.opener.write('journal.bookmarks', '')
740
740
741 return (self.sjoin('journal'), self.join('journal.dirstate'),
741 return (self.sjoin('journal'), self.join('journal.dirstate'),
742 self.join('journal.branch'), self.join('journal.desc'),
742 self.join('journal.branch'), self.join('journal.desc'),
743 self.join('journal.bookmarks'))
743 self.join('journal.bookmarks'))
744
744
745 def recover(self):
745 def recover(self):
746 lock = self.lock()
746 lock = self.lock()
747 try:
747 try:
748 if os.path.exists(self.sjoin("journal")):
748 if os.path.exists(self.sjoin("journal")):
749 self.ui.status(_("rolling back interrupted transaction\n"))
749 self.ui.status(_("rolling back interrupted transaction\n"))
750 transaction.rollback(self.sopener, self.sjoin("journal"),
750 transaction.rollback(self.sopener, self.sjoin("journal"),
751 self.ui.warn)
751 self.ui.warn)
752 self.invalidate()
752 self.invalidate()
753 return True
753 return True
754 else:
754 else:
755 self.ui.warn(_("no interrupted transaction available\n"))
755 self.ui.warn(_("no interrupted transaction available\n"))
756 return False
756 return False
757 finally:
757 finally:
758 lock.release()
758 lock.release()
759
759
760 def rollback(self, dryrun=False, force=False):
760 def rollback(self, dryrun=False, force=False):
761 wlock = lock = None
761 wlock = lock = None
762 try:
762 try:
763 wlock = self.wlock()
763 wlock = self.wlock()
764 lock = self.lock()
764 lock = self.lock()
765 if os.path.exists(self.sjoin("undo")):
765 if os.path.exists(self.sjoin("undo")):
766 return self._rollback(dryrun, force)
766 return self._rollback(dryrun, force)
767 else:
767 else:
768 self.ui.warn(_("no rollback information available\n"))
768 self.ui.warn(_("no rollback information available\n"))
769 return 1
769 return 1
770 finally:
770 finally:
771 release(lock, wlock)
771 release(lock, wlock)
772
772
773 def _rollback(self, dryrun, force):
773 def _rollback(self, dryrun, force):
774 ui = self.ui
774 ui = self.ui
775 try:
775 try:
776 args = self.opener.read('undo.desc').splitlines()
776 args = self.opener.read('undo.desc').splitlines()
777 (oldlen, desc, detail) = (int(args[0]), args[1], None)
777 (oldlen, desc, detail) = (int(args[0]), args[1], None)
778 if len(args) >= 3:
778 if len(args) >= 3:
779 detail = args[2]
779 detail = args[2]
780 oldtip = oldlen - 1
780 oldtip = oldlen - 1
781
781
782 if detail and ui.verbose:
782 if detail and ui.verbose:
783 msg = (_('repository tip rolled back to revision %s'
783 msg = (_('repository tip rolled back to revision %s'
784 ' (undo %s: %s)\n')
784 ' (undo %s: %s)\n')
785 % (oldtip, desc, detail))
785 % (oldtip, desc, detail))
786 else:
786 else:
787 msg = (_('repository tip rolled back to revision %s'
787 msg = (_('repository tip rolled back to revision %s'
788 ' (undo %s)\n')
788 ' (undo %s)\n')
789 % (oldtip, desc))
789 % (oldtip, desc))
790 except IOError:
790 except IOError:
791 msg = _('rolling back unknown transaction\n')
791 msg = _('rolling back unknown transaction\n')
792 desc = None
792 desc = None
793
793
794 if not force and self['.'] != self['tip'] and desc == 'commit':
794 if not force and self['.'] != self['tip'] and desc == 'commit':
795 raise util.Abort(
795 raise util.Abort(
796 _('rollback of last commit while not checked out '
796 _('rollback of last commit while not checked out '
797 'may lose data'), hint=_('use -f to force'))
797 'may lose data'), hint=_('use -f to force'))
798
798
799 ui.status(msg)
799 ui.status(msg)
800 if dryrun:
800 if dryrun:
801 return 0
801 return 0
802
802
803 parents = self.dirstate.parents()
803 parents = self.dirstate.parents()
804 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
804 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
805 if os.path.exists(self.join('undo.bookmarks')):
805 if os.path.exists(self.join('undo.bookmarks')):
806 util.rename(self.join('undo.bookmarks'),
806 util.rename(self.join('undo.bookmarks'),
807 self.join('bookmarks'))
807 self.join('bookmarks'))
808 self.invalidate()
808 self.invalidate()
809
809
810 parentgone = (parents[0] not in self.changelog.nodemap or
810 parentgone = (parents[0] not in self.changelog.nodemap or
811 parents[1] not in self.changelog.nodemap)
811 parents[1] not in self.changelog.nodemap)
812 if parentgone:
812 if parentgone:
813 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
813 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
814 try:
814 try:
815 branch = self.opener.read('undo.branch')
815 branch = self.opener.read('undo.branch')
816 self.dirstate.setbranch(branch)
816 self.dirstate.setbranch(branch)
817 except IOError:
817 except IOError:
818 ui.warn(_('named branch could not be reset: '
818 ui.warn(_('named branch could not be reset: '
819 'current branch is still \'%s\'\n')
819 'current branch is still \'%s\'\n')
820 % self.dirstate.branch())
820 % self.dirstate.branch())
821
821
822 self.dirstate.invalidate()
822 self.dirstate.invalidate()
823 self.destroyed()
823 self.destroyed()
824 parents = tuple([p.rev() for p in self.parents()])
824 parents = tuple([p.rev() for p in self.parents()])
825 if len(parents) > 1:
825 if len(parents) > 1:
826 ui.status(_('working directory now based on '
826 ui.status(_('working directory now based on '
827 'revisions %d and %d\n') % parents)
827 'revisions %d and %d\n') % parents)
828 else:
828 else:
829 ui.status(_('working directory now based on '
829 ui.status(_('working directory now based on '
830 'revision %d\n') % parents)
830 'revision %d\n') % parents)
831 return 0
831 return 0
832
832
833 def invalidatecaches(self):
833 def invalidatecaches(self):
834 try:
834 try:
835 delattr(self, '_tagscache')
835 delattr(self, '_tagscache')
836 except AttributeError:
836 except AttributeError:
837 pass
837 pass
838
838
839 self._branchcache = None # in UTF-8
839 self._branchcache = None # in UTF-8
840 self._branchcachetip = None
840 self._branchcachetip = None
841
841
842 def invalidatedirstate(self):
842 def invalidatedirstate(self):
843 '''Invalidates the dirstate, causing the next call to dirstate
843 '''Invalidates the dirstate, causing the next call to dirstate
844 to check if it was modified since the last time it was read,
844 to check if it was modified since the last time it was read,
845 rereading it if it has.
845 rereading it if it has.
846
846
847 This is different to dirstate.invalidate() that it doesn't always
847 This is different to dirstate.invalidate() that it doesn't always
848 rereads the dirstate. Use dirstate.invalidate() if you want to
848 rereads the dirstate. Use dirstate.invalidate() if you want to
849 explicitly read the dirstate again (i.e. restoring it to a previous
849 explicitly read the dirstate again (i.e. restoring it to a previous
850 known good state).'''
850 known good state).'''
851 try:
851 try:
852 delattr(self, 'dirstate')
852 delattr(self, 'dirstate')
853 except AttributeError:
853 except AttributeError:
854 pass
854 pass
855
855
856 def invalidate(self):
856 def invalidate(self):
857 for k in self._filecache:
857 for k in self._filecache:
858 # dirstate is invalidated separately in invalidatedirstate()
858 # dirstate is invalidated separately in invalidatedirstate()
859 if k == 'dirstate':
859 if k == 'dirstate':
860 continue
860 continue
861
861
862 try:
862 try:
863 delattr(self, k)
863 delattr(self, k)
864 except AttributeError:
864 except AttributeError:
865 pass
865 pass
866 self.invalidatecaches()
866 self.invalidatecaches()
867
867
868 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
868 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
869 try:
869 try:
870 l = lock.lock(lockname, 0, releasefn, desc=desc)
870 l = lock.lock(lockname, 0, releasefn, desc=desc)
871 except error.LockHeld, inst:
871 except error.LockHeld, inst:
872 if not wait:
872 if not wait:
873 raise
873 raise
874 self.ui.warn(_("waiting for lock on %s held by %r\n") %
874 self.ui.warn(_("waiting for lock on %s held by %r\n") %
875 (desc, inst.locker))
875 (desc, inst.locker))
876 # default to 600 seconds timeout
876 # default to 600 seconds timeout
877 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
877 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
878 releasefn, desc=desc)
878 releasefn, desc=desc)
879 if acquirefn:
879 if acquirefn:
880 acquirefn()
880 acquirefn()
881 return l
881 return l
882
882
883 def lock(self, wait=True):
883 def lock(self, wait=True):
884 '''Lock the repository store (.hg/store) and return a weak reference
884 '''Lock the repository store (.hg/store) and return a weak reference
885 to the lock. Use this before modifying the store (e.g. committing or
885 to the lock. Use this before modifying the store (e.g. committing or
886 stripping). If you are opening a transaction, get a lock as well.)'''
886 stripping). If you are opening a transaction, get a lock as well.)'''
887 l = self._lockref and self._lockref()
887 l = self._lockref and self._lockref()
888 if l is not None and l.held:
888 if l is not None and l.held:
889 l.lock()
889 l.lock()
890 return l
890 return l
891
891
892 def unlock():
892 def unlock():
893 self.store.write()
893 self.store.write()
894 for k, ce in self._filecache.items():
894 for k, ce in self._filecache.items():
895 if k == 'dirstate':
895 if k == 'dirstate':
896 continue
896 continue
897 ce.refresh()
897 ce.refresh()
898
898
899 l = self._lock(self.sjoin("lock"), wait, unlock,
899 l = self._lock(self.sjoin("lock"), wait, unlock,
900 self.invalidate, _('repository %s') % self.origroot)
900 self.invalidate, _('repository %s') % self.origroot)
901 self._lockref = weakref.ref(l)
901 self._lockref = weakref.ref(l)
902 return l
902 return l
903
903
904 def wlock(self, wait=True):
904 def wlock(self, wait=True):
905 '''Lock the non-store parts of the repository (everything under
905 '''Lock the non-store parts of the repository (everything under
906 .hg except .hg/store) and return a weak reference to the lock.
906 .hg except .hg/store) and return a weak reference to the lock.
907 Use this before modifying files in .hg.'''
907 Use this before modifying files in .hg.'''
908 l = self._wlockref and self._wlockref()
908 l = self._wlockref and self._wlockref()
909 if l is not None and l.held:
909 if l is not None and l.held:
910 l.lock()
910 l.lock()
911 return l
911 return l
912
912
913 def unlock():
913 def unlock():
914 self.dirstate.write()
914 self.dirstate.write()
915 ce = self._filecache.get('dirstate')
915 ce = self._filecache.get('dirstate')
916 if ce:
916 if ce:
917 ce.refresh()
917 ce.refresh()
918
918
919 l = self._lock(self.join("wlock"), wait, unlock,
919 l = self._lock(self.join("wlock"), wait, unlock,
920 self.invalidatedirstate, _('working directory of %s') %
920 self.invalidatedirstate, _('working directory of %s') %
921 self.origroot)
921 self.origroot)
922 self._wlockref = weakref.ref(l)
922 self._wlockref = weakref.ref(l)
923 return l
923 return l
924
924
925 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
925 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
926 """
926 """
927 commit an individual file as part of a larger transaction
927 commit an individual file as part of a larger transaction
928 """
928 """
929
929
930 fname = fctx.path()
930 fname = fctx.path()
931 text = fctx.data()
931 text = fctx.data()
932 flog = self.file(fname)
932 flog = self.file(fname)
933 fparent1 = manifest1.get(fname, nullid)
933 fparent1 = manifest1.get(fname, nullid)
934 fparent2 = fparent2o = manifest2.get(fname, nullid)
934 fparent2 = fparent2o = manifest2.get(fname, nullid)
935
935
936 meta = {}
936 meta = {}
937 copy = fctx.renamed()
937 copy = fctx.renamed()
938 if copy and copy[0] != fname:
938 if copy and copy[0] != fname:
939 # Mark the new revision of this file as a copy of another
939 # Mark the new revision of this file as a copy of another
940 # file. This copy data will effectively act as a parent
940 # file. This copy data will effectively act as a parent
941 # of this new revision. If this is a merge, the first
941 # of this new revision. If this is a merge, the first
942 # parent will be the nullid (meaning "look up the copy data")
942 # parent will be the nullid (meaning "look up the copy data")
943 # and the second one will be the other parent. For example:
943 # and the second one will be the other parent. For example:
944 #
944 #
945 # 0 --- 1 --- 3 rev1 changes file foo
945 # 0 --- 1 --- 3 rev1 changes file foo
946 # \ / rev2 renames foo to bar and changes it
946 # \ / rev2 renames foo to bar and changes it
947 # \- 2 -/ rev3 should have bar with all changes and
947 # \- 2 -/ rev3 should have bar with all changes and
948 # should record that bar descends from
948 # should record that bar descends from
949 # bar in rev2 and foo in rev1
949 # bar in rev2 and foo in rev1
950 #
950 #
951 # this allows this merge to succeed:
951 # this allows this merge to succeed:
952 #
952 #
953 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
953 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
954 # \ / merging rev3 and rev4 should use bar@rev2
954 # \ / merging rev3 and rev4 should use bar@rev2
955 # \- 2 --- 4 as the merge base
955 # \- 2 --- 4 as the merge base
956 #
956 #
957
957
958 cfname = copy[0]
958 cfname = copy[0]
959 crev = manifest1.get(cfname)
959 crev = manifest1.get(cfname)
960 newfparent = fparent2
960 newfparent = fparent2
961
961
962 if manifest2: # branch merge
962 if manifest2: # branch merge
963 if fparent2 == nullid or crev is None: # copied on remote side
963 if fparent2 == nullid or crev is None: # copied on remote side
964 if cfname in manifest2:
964 if cfname in manifest2:
965 crev = manifest2[cfname]
965 crev = manifest2[cfname]
966 newfparent = fparent1
966 newfparent = fparent1
967
967
968 # find source in nearest ancestor if we've lost track
968 # find source in nearest ancestor if we've lost track
969 if not crev:
969 if not crev:
970 self.ui.debug(" %s: searching for copy revision for %s\n" %
970 self.ui.debug(" %s: searching for copy revision for %s\n" %
971 (fname, cfname))
971 (fname, cfname))
972 for ancestor in self[None].ancestors():
972 for ancestor in self[None].ancestors():
973 if cfname in ancestor:
973 if cfname in ancestor:
974 crev = ancestor[cfname].filenode()
974 crev = ancestor[cfname].filenode()
975 break
975 break
976
976
977 if crev:
977 if crev:
978 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
978 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
979 meta["copy"] = cfname
979 meta["copy"] = cfname
980 meta["copyrev"] = hex(crev)
980 meta["copyrev"] = hex(crev)
981 fparent1, fparent2 = nullid, newfparent
981 fparent1, fparent2 = nullid, newfparent
982 else:
982 else:
983 self.ui.warn(_("warning: can't find ancestor for '%s' "
983 self.ui.warn(_("warning: can't find ancestor for '%s' "
984 "copied from '%s'!\n") % (fname, cfname))
984 "copied from '%s'!\n") % (fname, cfname))
985
985
986 elif fparent2 != nullid:
986 elif fparent2 != nullid:
987 # is one parent an ancestor of the other?
987 # is one parent an ancestor of the other?
988 fparentancestor = flog.ancestor(fparent1, fparent2)
988 fparentancestor = flog.ancestor(fparent1, fparent2)
989 if fparentancestor == fparent1:
989 if fparentancestor == fparent1:
990 fparent1, fparent2 = fparent2, nullid
990 fparent1, fparent2 = fparent2, nullid
991 elif fparentancestor == fparent2:
991 elif fparentancestor == fparent2:
992 fparent2 = nullid
992 fparent2 = nullid
993
993
994 # is the file changed?
994 # is the file changed?
995 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
995 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
996 changelist.append(fname)
996 changelist.append(fname)
997 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
997 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
998
998
999 # are just the flags changed during merge?
999 # are just the flags changed during merge?
1000 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1000 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1001 changelist.append(fname)
1001 changelist.append(fname)
1002
1002
1003 return fparent1
1003 return fparent1
1004
1004
1005 def commit(self, text="", user=None, date=None, match=None, force=False,
1005 def commit(self, text="", user=None, date=None, match=None, force=False,
1006 editor=False, extra={}):
1006 editor=False, extra={}):
1007 """Add a new revision to current repository.
1007 """Add a new revision to current repository.
1008
1008
1009 Revision information is gathered from the working directory,
1009 Revision information is gathered from the working directory,
1010 match can be used to filter the committed files. If editor is
1010 match can be used to filter the committed files. If editor is
1011 supplied, it is called to get a commit message.
1011 supplied, it is called to get a commit message.
1012 """
1012 """
1013
1013
1014 def fail(f, msg):
1014 def fail(f, msg):
1015 raise util.Abort('%s: %s' % (f, msg))
1015 raise util.Abort('%s: %s' % (f, msg))
1016
1016
1017 if not match:
1017 if not match:
1018 match = matchmod.always(self.root, '')
1018 match = matchmod.always(self.root, '')
1019
1019
1020 if not force:
1020 if not force:
1021 vdirs = []
1021 vdirs = []
1022 match.dir = vdirs.append
1022 match.dir = vdirs.append
1023 match.bad = fail
1023 match.bad = fail
1024
1024
1025 wlock = self.wlock()
1025 wlock = self.wlock()
1026 try:
1026 try:
1027 wctx = self[None]
1027 wctx = self[None]
1028 merge = len(wctx.parents()) > 1
1028 merge = len(wctx.parents()) > 1
1029
1029
1030 if (not force and merge and match and
1030 if (not force and merge and match and
1031 (match.files() or match.anypats())):
1031 (match.files() or match.anypats())):
1032 raise util.Abort(_('cannot partially commit a merge '
1032 raise util.Abort(_('cannot partially commit a merge '
1033 '(do not specify files or patterns)'))
1033 '(do not specify files or patterns)'))
1034
1034
1035 changes = self.status(match=match, clean=force)
1035 changes = self.status(match=match, clean=force)
1036 if force:
1036 if force:
1037 changes[0].extend(changes[6]) # mq may commit unchanged files
1037 changes[0].extend(changes[6]) # mq may commit unchanged files
1038
1038
1039 # check subrepos
1039 # check subrepos
1040 subs = []
1040 subs = []
1041 removedsubs = set()
1041 removedsubs = set()
1042 if '.hgsub' in wctx:
1042 if '.hgsub' in wctx:
1043 # only manage subrepos and .hgsubstate if .hgsub is present
1043 # only manage subrepos and .hgsubstate if .hgsub is present
1044 for p in wctx.parents():
1044 for p in wctx.parents():
1045 removedsubs.update(s for s in p.substate if match(s))
1045 removedsubs.update(s for s in p.substate if match(s))
1046 for s in wctx.substate:
1046 for s in wctx.substate:
1047 removedsubs.discard(s)
1047 removedsubs.discard(s)
1048 if match(s) and wctx.sub(s).dirty():
1048 if match(s) and wctx.sub(s).dirty():
1049 subs.append(s)
1049 subs.append(s)
1050 if (subs or removedsubs):
1050 if (subs or removedsubs):
1051 if (not match('.hgsub') and
1051 if (not match('.hgsub') and
1052 '.hgsub' in (wctx.modified() + wctx.added())):
1052 '.hgsub' in (wctx.modified() + wctx.added())):
1053 raise util.Abort(
1053 raise util.Abort(
1054 _("can't commit subrepos without .hgsub"))
1054 _("can't commit subrepos without .hgsub"))
1055 if '.hgsubstate' not in changes[0]:
1055 if '.hgsubstate' not in changes[0]:
1056 changes[0].insert(0, '.hgsubstate')
1056 changes[0].insert(0, '.hgsubstate')
1057 if '.hgsubstate' in changes[2]:
1057 if '.hgsubstate' in changes[2]:
1058 changes[2].remove('.hgsubstate')
1058 changes[2].remove('.hgsubstate')
1059 elif '.hgsub' in changes[2]:
1059 elif '.hgsub' in changes[2]:
1060 # clean up .hgsubstate when .hgsub is removed
1060 # clean up .hgsubstate when .hgsub is removed
1061 if ('.hgsubstate' in wctx and
1061 if ('.hgsubstate' in wctx and
1062 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1062 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1063 changes[2].insert(0, '.hgsubstate')
1063 changes[2].insert(0, '.hgsubstate')
1064
1064
1065 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1065 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1066 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1066 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1067 if changedsubs:
1067 if changedsubs:
1068 raise util.Abort(_("uncommitted changes in subrepo %s")
1068 raise util.Abort(_("uncommitted changes in subrepo %s")
1069 % changedsubs[0],
1069 % changedsubs[0],
1070 hint=_("use --subrepos for recursive commit"))
1070 hint=_("use --subrepos for recursive commit"))
1071
1071
1072 # make sure all explicit patterns are matched
1072 # make sure all explicit patterns are matched
1073 if not force and match.files():
1073 if not force and match.files():
1074 matched = set(changes[0] + changes[1] + changes[2])
1074 matched = set(changes[0] + changes[1] + changes[2])
1075
1075
1076 for f in match.files():
1076 for f in match.files():
1077 if f == '.' or f in matched or f in wctx.substate:
1077 if f == '.' or f in matched or f in wctx.substate:
1078 continue
1078 continue
1079 if f in changes[3]: # missing
1079 if f in changes[3]: # missing
1080 fail(f, _('file not found!'))
1080 fail(f, _('file not found!'))
1081 if f in vdirs: # visited directory
1081 if f in vdirs: # visited directory
1082 d = f + '/'
1082 d = f + '/'
1083 for mf in matched:
1083 for mf in matched:
1084 if mf.startswith(d):
1084 if mf.startswith(d):
1085 break
1085 break
1086 else:
1086 else:
1087 fail(f, _("no match under directory!"))
1087 fail(f, _("no match under directory!"))
1088 elif f not in self.dirstate:
1088 elif f not in self.dirstate:
1089 fail(f, _("file not tracked!"))
1089 fail(f, _("file not tracked!"))
1090
1090
1091 if (not force and not extra.get("close") and not merge
1091 if (not force and not extra.get("close") and not merge
1092 and not (changes[0] or changes[1] or changes[2])
1092 and not (changes[0] or changes[1] or changes[2])
1093 and wctx.branch() == wctx.p1().branch()):
1093 and wctx.branch() == wctx.p1().branch()):
1094 return None
1094 return None
1095
1095
1096 ms = mergemod.mergestate(self)
1096 ms = mergemod.mergestate(self)
1097 for f in changes[0]:
1097 for f in changes[0]:
1098 if f in ms and ms[f] == 'u':
1098 if f in ms and ms[f] == 'u':
1099 raise util.Abort(_("unresolved merge conflicts "
1099 raise util.Abort(_("unresolved merge conflicts "
1100 "(see hg help resolve)"))
1100 "(see hg help resolve)"))
1101
1101
1102 cctx = context.workingctx(self, text, user, date, extra, changes)
1102 cctx = context.workingctx(self, text, user, date, extra, changes)
1103 if editor:
1103 if editor:
1104 cctx._text = editor(self, cctx, subs)
1104 cctx._text = editor(self, cctx, subs)
1105 edited = (text != cctx._text)
1105 edited = (text != cctx._text)
1106
1106
1107 # commit subs
1107 # commit subs
1108 if subs or removedsubs:
1108 if subs or removedsubs:
1109 state = wctx.substate.copy()
1109 state = wctx.substate.copy()
1110 for s in sorted(subs):
1110 for s in sorted(subs):
1111 sub = wctx.sub(s)
1111 sub = wctx.sub(s)
1112 self.ui.status(_('committing subrepository %s\n') %
1112 self.ui.status(_('committing subrepository %s\n') %
1113 subrepo.subrelpath(sub))
1113 subrepo.subrelpath(sub))
1114 sr = sub.commit(cctx._text, user, date)
1114 sr = sub.commit(cctx._text, user, date)
1115 state[s] = (state[s][0], sr)
1115 state[s] = (state[s][0], sr)
1116 subrepo.writestate(self, state)
1116 subrepo.writestate(self, state)
1117
1117
1118 # Save commit message in case this transaction gets rolled back
1118 # Save commit message in case this transaction gets rolled back
1119 # (e.g. by a pretxncommit hook). Leave the content alone on
1119 # (e.g. by a pretxncommit hook). Leave the content alone on
1120 # the assumption that the user will use the same editor again.
1120 # the assumption that the user will use the same editor again.
1121 msgfn = self.savecommitmessage(cctx._text)
1121 msgfn = self.savecommitmessage(cctx._text)
1122
1122
1123 p1, p2 = self.dirstate.parents()
1123 p1, p2 = self.dirstate.parents()
1124 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1124 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1125 try:
1125 try:
1126 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1126 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1127 ret = self.commitctx(cctx, True)
1127 ret = self.commitctx(cctx, True)
1128 except:
1128 except:
1129 if edited:
1129 if edited:
1130 self.ui.write(
1130 self.ui.write(
1131 _('note: commit message saved in %s\n') % msgfn)
1131 _('note: commit message saved in %s\n') % msgfn)
1132 raise
1132 raise
1133
1133
1134 # update bookmarks, dirstate and mergestate
1134 # update bookmarks, dirstate and mergestate
1135 bookmarks.update(self, p1, ret)
1135 bookmarks.update(self, p1, ret)
1136 for f in changes[0] + changes[1]:
1136 for f in changes[0] + changes[1]:
1137 self.dirstate.normal(f)
1137 self.dirstate.normal(f)
1138 for f in changes[2]:
1138 for f in changes[2]:
1139 self.dirstate.drop(f)
1139 self.dirstate.drop(f)
1140 self.dirstate.setparents(ret)
1140 self.dirstate.setparents(ret)
1141 ms.reset()
1141 ms.reset()
1142 finally:
1142 finally:
1143 wlock.release()
1143 wlock.release()
1144
1144
1145 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1145 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1146 return ret
1146 return ret
1147
1147
1148 def commitctx(self, ctx, error=False):
1148 def commitctx(self, ctx, error=False):
1149 """Add a new revision to current repository.
1149 """Add a new revision to current repository.
1150 Revision information is passed via the context argument.
1150 Revision information is passed via the context argument.
1151 """
1151 """
1152
1152
1153 tr = lock = None
1153 tr = lock = None
1154 removed = list(ctx.removed())
1154 removed = list(ctx.removed())
1155 p1, p2 = ctx.p1(), ctx.p2()
1155 p1, p2 = ctx.p1(), ctx.p2()
1156 user = ctx.user()
1156 user = ctx.user()
1157
1157
1158 lock = self.lock()
1158 lock = self.lock()
1159 try:
1159 try:
1160 tr = self.transaction("commit")
1160 tr = self.transaction("commit")
1161 trp = weakref.proxy(tr)
1161 trp = weakref.proxy(tr)
1162
1162
1163 if ctx.files():
1163 if ctx.files():
1164 m1 = p1.manifest().copy()
1164 m1 = p1.manifest().copy()
1165 m2 = p2.manifest()
1165 m2 = p2.manifest()
1166
1166
1167 # check in files
1167 # check in files
1168 new = {}
1168 new = {}
1169 changed = []
1169 changed = []
1170 linkrev = len(self)
1170 linkrev = len(self)
1171 for f in sorted(ctx.modified() + ctx.added()):
1171 for f in sorted(ctx.modified() + ctx.added()):
1172 self.ui.note(f + "\n")
1172 self.ui.note(f + "\n")
1173 try:
1173 try:
1174 fctx = ctx[f]
1174 fctx = ctx[f]
1175 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1175 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1176 changed)
1176 changed)
1177 m1.set(f, fctx.flags())
1177 m1.set(f, fctx.flags())
1178 except OSError, inst:
1178 except OSError, inst:
1179 self.ui.warn(_("trouble committing %s!\n") % f)
1179 self.ui.warn(_("trouble committing %s!\n") % f)
1180 raise
1180 raise
1181 except IOError, inst:
1181 except IOError, inst:
1182 errcode = getattr(inst, 'errno', errno.ENOENT)
1182 errcode = getattr(inst, 'errno', errno.ENOENT)
1183 if error or errcode and errcode != errno.ENOENT:
1183 if error or errcode and errcode != errno.ENOENT:
1184 self.ui.warn(_("trouble committing %s!\n") % f)
1184 self.ui.warn(_("trouble committing %s!\n") % f)
1185 raise
1185 raise
1186 else:
1186 else:
1187 removed.append(f)
1187 removed.append(f)
1188
1188
1189 # update manifest
1189 # update manifest
1190 m1.update(new)
1190 m1.update(new)
1191 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1191 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1192 drop = [f for f in removed if f in m1]
1192 drop = [f for f in removed if f in m1]
1193 for f in drop:
1193 for f in drop:
1194 del m1[f]
1194 del m1[f]
1195 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1195 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1196 p2.manifestnode(), (new, drop))
1196 p2.manifestnode(), (new, drop))
1197 files = changed + removed
1197 files = changed + removed
1198 else:
1198 else:
1199 mn = p1.manifestnode()
1199 mn = p1.manifestnode()
1200 files = []
1200 files = []
1201
1201
1202 # update changelog
1202 # update changelog
1203 self.changelog.delayupdate()
1203 self.changelog.delayupdate()
1204 n = self.changelog.add(mn, files, ctx.description(),
1204 n = self.changelog.add(mn, files, ctx.description(),
1205 trp, p1.node(), p2.node(),
1205 trp, p1.node(), p2.node(),
1206 user, ctx.date(), ctx.extra().copy())
1206 user, ctx.date(), ctx.extra().copy())
1207 p = lambda: self.changelog.writepending() and self.root or ""
1207 p = lambda: self.changelog.writepending() and self.root or ""
1208 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1208 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1209 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1209 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1210 parent2=xp2, pending=p)
1210 parent2=xp2, pending=p)
1211 self.changelog.finalize(trp)
1211 self.changelog.finalize(trp)
1212 tr.close()
1212 tr.close()
1213
1213
1214 if self._branchcache:
1214 if self._branchcache:
1215 self.updatebranchcache()
1215 self.updatebranchcache()
1216 return n
1216 return n
1217 finally:
1217 finally:
1218 if tr:
1218 if tr:
1219 tr.release()
1219 tr.release()
1220 lock.release()
1220 lock.release()
1221
1221
1222 def destroyed(self):
1222 def destroyed(self):
1223 '''Inform the repository that nodes have been destroyed.
1223 '''Inform the repository that nodes have been destroyed.
1224 Intended for use by strip and rollback, so there's a common
1224 Intended for use by strip and rollback, so there's a common
1225 place for anything that has to be done after destroying history.'''
1225 place for anything that has to be done after destroying history.'''
1226 # XXX it might be nice if we could take the list of destroyed
1226 # XXX it might be nice if we could take the list of destroyed
1227 # nodes, but I don't see an easy way for rollback() to do that
1227 # nodes, but I don't see an easy way for rollback() to do that
1228
1228
1229 # Ensure the persistent tag cache is updated. Doing it now
1229 # Ensure the persistent tag cache is updated. Doing it now
1230 # means that the tag cache only has to worry about destroyed
1230 # means that the tag cache only has to worry about destroyed
1231 # heads immediately after a strip/rollback. That in turn
1231 # heads immediately after a strip/rollback. That in turn
1232 # guarantees that "cachetip == currenttip" (comparing both rev
1232 # guarantees that "cachetip == currenttip" (comparing both rev
1233 # and node) always means no nodes have been added or destroyed.
1233 # and node) always means no nodes have been added or destroyed.
1234
1234
1235 # XXX this is suboptimal when qrefresh'ing: we strip the current
1235 # XXX this is suboptimal when qrefresh'ing: we strip the current
1236 # head, refresh the tag cache, then immediately add a new head.
1236 # head, refresh the tag cache, then immediately add a new head.
1237 # But I think doing it this way is necessary for the "instant
1237 # But I think doing it this way is necessary for the "instant
1238 # tag cache retrieval" case to work.
1238 # tag cache retrieval" case to work.
1239 self.invalidatecaches()
1239 self.invalidatecaches()
1240
1240
1241 def walk(self, match, node=None):
1241 def walk(self, match, node=None):
1242 '''
1242 '''
1243 walk recursively through the directory tree or a given
1243 walk recursively through the directory tree or a given
1244 changeset, finding all files matched by the match
1244 changeset, finding all files matched by the match
1245 function
1245 function
1246 '''
1246 '''
1247 return self[node].walk(match)
1247 return self[node].walk(match)
1248
1248
1249 def status(self, node1='.', node2=None, match=None,
1249 def status(self, node1='.', node2=None, match=None,
1250 ignored=False, clean=False, unknown=False,
1250 ignored=False, clean=False, unknown=False,
1251 listsubrepos=False):
1251 listsubrepos=False):
1252 """return status of files between two nodes or node and working directory
1252 """return status of files between two nodes or node and working directory
1253
1253
1254 If node1 is None, use the first dirstate parent instead.
1254 If node1 is None, use the first dirstate parent instead.
1255 If node2 is None, compare node1 with working directory.
1255 If node2 is None, compare node1 with working directory.
1256 """
1256 """
1257
1257
1258 def mfmatches(ctx):
1258 def mfmatches(ctx):
1259 mf = ctx.manifest().copy()
1259 mf = ctx.manifest().copy()
1260 for fn in mf.keys():
1260 for fn in mf.keys():
1261 if not match(fn):
1261 if not match(fn):
1262 del mf[fn]
1262 del mf[fn]
1263 return mf
1263 return mf
1264
1264
1265 if isinstance(node1, context.changectx):
1265 if isinstance(node1, context.changectx):
1266 ctx1 = node1
1266 ctx1 = node1
1267 else:
1267 else:
1268 ctx1 = self[node1]
1268 ctx1 = self[node1]
1269 if isinstance(node2, context.changectx):
1269 if isinstance(node2, context.changectx):
1270 ctx2 = node2
1270 ctx2 = node2
1271 else:
1271 else:
1272 ctx2 = self[node2]
1272 ctx2 = self[node2]
1273
1273
1274 working = ctx2.rev() is None
1274 working = ctx2.rev() is None
1275 parentworking = working and ctx1 == self['.']
1275 parentworking = working and ctx1 == self['.']
1276 match = match or matchmod.always(self.root, self.getcwd())
1276 match = match or matchmod.always(self.root, self.getcwd())
1277 listignored, listclean, listunknown = ignored, clean, unknown
1277 listignored, listclean, listunknown = ignored, clean, unknown
1278
1278
1279 # load earliest manifest first for caching reasons
1279 # load earliest manifest first for caching reasons
1280 if not working and ctx2.rev() < ctx1.rev():
1280 if not working and ctx2.rev() < ctx1.rev():
1281 ctx2.manifest()
1281 ctx2.manifest()
1282
1282
1283 if not parentworking:
1283 if not parentworking:
1284 def bad(f, msg):
1284 def bad(f, msg):
1285 if f not in ctx1:
1285 if f not in ctx1:
1286 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1286 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1287 match.bad = bad
1287 match.bad = bad
1288
1288
1289 if working: # we need to scan the working dir
1289 if working: # we need to scan the working dir
1290 subrepos = []
1290 subrepos = []
1291 if '.hgsub' in self.dirstate:
1291 if '.hgsub' in self.dirstate:
1292 subrepos = ctx2.substate.keys()
1292 subrepos = ctx2.substate.keys()
1293 s = self.dirstate.status(match, subrepos, listignored,
1293 s = self.dirstate.status(match, subrepos, listignored,
1294 listclean, listunknown)
1294 listclean, listunknown)
1295 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1295 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1296
1296
1297 # check for any possibly clean files
1297 # check for any possibly clean files
1298 if parentworking and cmp:
1298 if parentworking and cmp:
1299 fixup = []
1299 fixup = []
1300 # do a full compare of any files that might have changed
1300 # do a full compare of any files that might have changed
1301 for f in sorted(cmp):
1301 for f in sorted(cmp):
1302 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1302 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1303 or ctx1[f].cmp(ctx2[f])):
1303 or ctx1[f].cmp(ctx2[f])):
1304 modified.append(f)
1304 modified.append(f)
1305 else:
1305 else:
1306 fixup.append(f)
1306 fixup.append(f)
1307
1307
1308 # update dirstate for files that are actually clean
1308 # update dirstate for files that are actually clean
1309 if fixup:
1309 if fixup:
1310 if listclean:
1310 if listclean:
1311 clean += fixup
1311 clean += fixup
1312
1312
1313 try:
1313 try:
1314 # updating the dirstate is optional
1314 # updating the dirstate is optional
1315 # so we don't wait on the lock
1315 # so we don't wait on the lock
1316 wlock = self.wlock(False)
1316 wlock = self.wlock(False)
1317 try:
1317 try:
1318 for f in fixup:
1318 for f in fixup:
1319 self.dirstate.normal(f)
1319 self.dirstate.normal(f)
1320 finally:
1320 finally:
1321 wlock.release()
1321 wlock.release()
1322 except error.LockError:
1322 except error.LockError:
1323 pass
1323 pass
1324
1324
1325 if not parentworking:
1325 if not parentworking:
1326 mf1 = mfmatches(ctx1)
1326 mf1 = mfmatches(ctx1)
1327 if working:
1327 if working:
1328 # we are comparing working dir against non-parent
1328 # we are comparing working dir against non-parent
1329 # generate a pseudo-manifest for the working dir
1329 # generate a pseudo-manifest for the working dir
1330 mf2 = mfmatches(self['.'])
1330 mf2 = mfmatches(self['.'])
1331 for f in cmp + modified + added:
1331 for f in cmp + modified + added:
1332 mf2[f] = None
1332 mf2[f] = None
1333 mf2.set(f, ctx2.flags(f))
1333 mf2.set(f, ctx2.flags(f))
1334 for f in removed:
1334 for f in removed:
1335 if f in mf2:
1335 if f in mf2:
1336 del mf2[f]
1336 del mf2[f]
1337 else:
1337 else:
1338 # we are comparing two revisions
1338 # we are comparing two revisions
1339 deleted, unknown, ignored = [], [], []
1339 deleted, unknown, ignored = [], [], []
1340 mf2 = mfmatches(ctx2)
1340 mf2 = mfmatches(ctx2)
1341
1341
1342 modified, added, clean = [], [], []
1342 modified, added, clean = [], [], []
1343 for fn in mf2:
1343 for fn in mf2:
1344 if fn in mf1:
1344 if fn in mf1:
1345 if (fn not in deleted and
1345 if (fn not in deleted and
1346 (mf1.flags(fn) != mf2.flags(fn) or
1346 (mf1.flags(fn) != mf2.flags(fn) or
1347 (mf1[fn] != mf2[fn] and
1347 (mf1[fn] != mf2[fn] and
1348 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1348 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1349 modified.append(fn)
1349 modified.append(fn)
1350 elif listclean:
1350 elif listclean:
1351 clean.append(fn)
1351 clean.append(fn)
1352 del mf1[fn]
1352 del mf1[fn]
1353 elif fn not in deleted:
1353 elif fn not in deleted:
1354 added.append(fn)
1354 added.append(fn)
1355 removed = mf1.keys()
1355 removed = mf1.keys()
1356
1356
1357 if working and modified and not self.dirstate._checklink:
1357 if working and modified and not self.dirstate._checklink:
1358 # Symlink placeholders may get non-symlink-like contents
1358 # Symlink placeholders may get non-symlink-like contents
1359 # via user error or dereferencing by NFS or Samba servers,
1359 # via user error or dereferencing by NFS or Samba servers,
1360 # so we filter out any placeholders that don't look like a
1360 # so we filter out any placeholders that don't look like a
1361 # symlink
1361 # symlink
1362 sane = []
1362 sane = []
1363 for f in modified:
1363 for f in modified:
1364 if ctx2.flags(f) == 'l':
1364 if ctx2.flags(f) == 'l':
1365 d = ctx2[f].data()
1365 d = ctx2[f].data()
1366 if len(d) >= 1024 or '\n' in d or util.binary(d):
1366 if len(d) >= 1024 or '\n' in d or util.binary(d):
1367 self.ui.debug('ignoring suspect symlink placeholder'
1367 self.ui.debug('ignoring suspect symlink placeholder'
1368 ' "%s"\n' % f)
1368 ' "%s"\n' % f)
1369 continue
1369 continue
1370 sane.append(f)
1370 sane.append(f)
1371 modified = sane
1371 modified = sane
1372
1372
1373 r = modified, added, removed, deleted, unknown, ignored, clean
1373 r = modified, added, removed, deleted, unknown, ignored, clean
1374
1374
1375 if listsubrepos:
1375 if listsubrepos:
1376 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1376 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1377 if working:
1377 if working:
1378 rev2 = None
1378 rev2 = None
1379 else:
1379 else:
1380 rev2 = ctx2.substate[subpath][1]
1380 rev2 = ctx2.substate[subpath][1]
1381 try:
1381 try:
1382 submatch = matchmod.narrowmatcher(subpath, match)
1382 submatch = matchmod.narrowmatcher(subpath, match)
1383 s = sub.status(rev2, match=submatch, ignored=listignored,
1383 s = sub.status(rev2, match=submatch, ignored=listignored,
1384 clean=listclean, unknown=listunknown,
1384 clean=listclean, unknown=listunknown,
1385 listsubrepos=True)
1385 listsubrepos=True)
1386 for rfiles, sfiles in zip(r, s):
1386 for rfiles, sfiles in zip(r, s):
1387 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1387 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1388 except error.LookupError:
1388 except error.LookupError:
1389 self.ui.status(_("skipping missing subrepository: %s\n")
1389 self.ui.status(_("skipping missing subrepository: %s\n")
1390 % subpath)
1390 % subpath)
1391
1391
1392 for l in r:
1392 for l in r:
1393 l.sort()
1393 l.sort()
1394 return r
1394 return r
1395
1395
1396 def heads(self, start=None):
1396 def heads(self, start=None):
1397 heads = self.changelog.heads(start)
1397 heads = self.changelog.heads(start)
1398 # sort the output in rev descending order
1398 # sort the output in rev descending order
1399 return sorted(heads, key=self.changelog.rev, reverse=True)
1399 return sorted(heads, key=self.changelog.rev, reverse=True)
1400
1400
1401 def branchheads(self, branch=None, start=None, closed=False):
1401 def branchheads(self, branch=None, start=None, closed=False):
1402 '''return a (possibly filtered) list of heads for the given branch
1402 '''return a (possibly filtered) list of heads for the given branch
1403
1403
1404 Heads are returned in topological order, from newest to oldest.
1404 Heads are returned in topological order, from newest to oldest.
1405 If branch is None, use the dirstate branch.
1405 If branch is None, use the dirstate branch.
1406 If start is not None, return only heads reachable from start.
1406 If start is not None, return only heads reachable from start.
1407 If closed is True, return heads that are marked as closed as well.
1407 If closed is True, return heads that are marked as closed as well.
1408 '''
1408 '''
1409 if branch is None:
1409 if branch is None:
1410 branch = self[None].branch()
1410 branch = self[None].branch()
1411 branches = self.branchmap()
1411 branches = self.branchmap()
1412 if branch not in branches:
1412 if branch not in branches:
1413 return []
1413 return []
1414 # the cache returns heads ordered lowest to highest
1414 # the cache returns heads ordered lowest to highest
1415 bheads = list(reversed(branches[branch]))
1415 bheads = list(reversed(branches[branch]))
1416 if start is not None:
1416 if start is not None:
1417 # filter out the heads that cannot be reached from startrev
1417 # filter out the heads that cannot be reached from startrev
1418 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1418 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1419 bheads = [h for h in bheads if h in fbheads]
1419 bheads = [h for h in bheads if h in fbheads]
1420 if not closed:
1420 if not closed:
1421 bheads = [h for h in bheads if
1421 bheads = [h for h in bheads if
1422 ('close' not in self.changelog.read(h)[5])]
1422 ('close' not in self.changelog.read(h)[5])]
1423 return bheads
1423 return bheads
1424
1424
1425 def branches(self, nodes):
1425 def branches(self, nodes):
1426 if not nodes:
1426 if not nodes:
1427 nodes = [self.changelog.tip()]
1427 nodes = [self.changelog.tip()]
1428 b = []
1428 b = []
1429 for n in nodes:
1429 for n in nodes:
1430 t = n
1430 t = n
1431 while True:
1431 while True:
1432 p = self.changelog.parents(n)
1432 p = self.changelog.parents(n)
1433 if p[1] != nullid or p[0] == nullid:
1433 if p[1] != nullid or p[0] == nullid:
1434 b.append((t, n, p[0], p[1]))
1434 b.append((t, n, p[0], p[1]))
1435 break
1435 break
1436 n = p[0]
1436 n = p[0]
1437 return b
1437 return b
1438
1438
1439 def between(self, pairs):
1439 def between(self, pairs):
1440 r = []
1440 r = []
1441
1441
1442 for top, bottom in pairs:
1442 for top, bottom in pairs:
1443 n, l, i = top, [], 0
1443 n, l, i = top, [], 0
1444 f = 1
1444 f = 1
1445
1445
1446 while n != bottom and n != nullid:
1446 while n != bottom and n != nullid:
1447 p = self.changelog.parents(n)[0]
1447 p = self.changelog.parents(n)[0]
1448 if i == f:
1448 if i == f:
1449 l.append(n)
1449 l.append(n)
1450 f = f * 2
1450 f = f * 2
1451 n = p
1451 n = p
1452 i += 1
1452 i += 1
1453
1453
1454 r.append(l)
1454 r.append(l)
1455
1455
1456 return r
1456 return r
1457
1457
1458 def pull(self, remote, heads=None, force=False):
1458 def pull(self, remote, heads=None, force=False):
1459 lock = self.lock()
1459 lock = self.lock()
1460 try:
1460 try:
1461 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1461 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1462 force=force)
1462 force=force)
1463 common, fetch, rheads = tmp
1463 common, fetch, rheads = tmp
1464 if not fetch:
1464 if not fetch:
1465 self.ui.status(_("no changes found\n"))
1465 self.ui.status(_("no changes found\n"))
1466 result = 0
1466 result = 0
1467 else:
1467 else:
1468 if heads is None and list(common) == [nullid]:
1468 if heads is None and list(common) == [nullid]:
1469 self.ui.status(_("requesting all changes\n"))
1469 self.ui.status(_("requesting all changes\n"))
1470 elif heads is None and remote.capable('changegroupsubset'):
1470 elif heads is None and remote.capable('changegroupsubset'):
1471 # issue1320, avoid a race if remote changed after discovery
1471 # issue1320, avoid a race if remote changed after discovery
1472 heads = rheads
1472 heads = rheads
1473
1473
1474 if remote.capable('getbundle'):
1474 if remote.capable('getbundle'):
1475 cg = remote.getbundle('pull', common=common,
1475 cg = remote.getbundle('pull', common=common,
1476 heads=heads or rheads)
1476 heads=heads or rheads)
1477 elif heads is None:
1477 elif heads is None:
1478 cg = remote.changegroup(fetch, 'pull')
1478 cg = remote.changegroup(fetch, 'pull')
1479 elif not remote.capable('changegroupsubset'):
1479 elif not remote.capable('changegroupsubset'):
1480 raise util.Abort(_("partial pull cannot be done because "
1480 raise util.Abort(_("partial pull cannot be done because "
1481 "other repository doesn't support "
1481 "other repository doesn't support "
1482 "changegroupsubset."))
1482 "changegroupsubset."))
1483 else:
1483 else:
1484 cg = remote.changegroupsubset(fetch, heads, 'pull')
1484 cg = remote.changegroupsubset(fetch, heads, 'pull')
1485 result = self.addchangegroup(cg, 'pull', remote.url(),
1485 result = self.addchangegroup(cg, 'pull', remote.url(),
1486 lock=lock)
1486 lock=lock)
1487 finally:
1487 finally:
1488 lock.release()
1488 lock.release()
1489
1489
1490 return result
1490 return result
1491
1491
1492 def checkpush(self, force, revs):
1492 def checkpush(self, force, revs):
1493 """Extensions can override this function if additional checks have
1493 """Extensions can override this function if additional checks have
1494 to be performed before pushing, or call it if they override push
1494 to be performed before pushing, or call it if they override push
1495 command.
1495 command.
1496 """
1496 """
1497 pass
1497 pass
1498
1498
1499 def push(self, remote, force=False, revs=None, newbranch=False):
1499 def push(self, remote, force=False, revs=None, newbranch=False):
1500 '''Push outgoing changesets (limited by revs) from the current
1500 '''Push outgoing changesets (limited by revs) from the current
1501 repository to remote. Return an integer:
1501 repository to remote. Return an integer:
1502 - 0 means HTTP error *or* nothing to push
1502 - 0 means HTTP error *or* nothing to push
1503 - 1 means we pushed and remote head count is unchanged *or*
1503 - 1 means we pushed and remote head count is unchanged *or*
1504 we have outgoing changesets but refused to push
1504 we have outgoing changesets but refused to push
1505 - other values as described by addchangegroup()
1505 - other values as described by addchangegroup()
1506 '''
1506 '''
1507 # there are two ways to push to remote repo:
1507 # there are two ways to push to remote repo:
1508 #
1508 #
1509 # addchangegroup assumes local user can lock remote
1509 # addchangegroup assumes local user can lock remote
1510 # repo (local filesystem, old ssh servers).
1510 # repo (local filesystem, old ssh servers).
1511 #
1511 #
1512 # unbundle assumes local user cannot lock remote repo (new ssh
1512 # unbundle assumes local user cannot lock remote repo (new ssh
1513 # servers, http servers).
1513 # servers, http servers).
1514
1514
1515 self.checkpush(force, revs)
1515 self.checkpush(force, revs)
1516 lock = None
1516 lock = None
1517 unbundle = remote.capable('unbundle')
1517 unbundle = remote.capable('unbundle')
1518 if not unbundle:
1518 if not unbundle:
1519 lock = remote.lock()
1519 lock = remote.lock()
1520 try:
1520 try:
1521 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1521 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1522 newbranch)
1522 newbranch)
1523 ret = remote_heads
1523 ret = remote_heads
1524 if cg is not None:
1524 if cg is not None:
1525 if unbundle:
1525 if unbundle:
1526 # local repo finds heads on server, finds out what
1526 # local repo finds heads on server, finds out what
1527 # revs it must push. once revs transferred, if server
1527 # revs it must push. once revs transferred, if server
1528 # finds it has different heads (someone else won
1528 # finds it has different heads (someone else won
1529 # commit/push race), server aborts.
1529 # commit/push race), server aborts.
1530 if force:
1530 if force:
1531 remote_heads = ['force']
1531 remote_heads = ['force']
1532 # ssh: return remote's addchangegroup()
1532 # ssh: return remote's addchangegroup()
1533 # http: return remote's addchangegroup() or 0 for error
1533 # http: return remote's addchangegroup() or 0 for error
1534 ret = remote.unbundle(cg, remote_heads, 'push')
1534 ret = remote.unbundle(cg, remote_heads, 'push')
1535 else:
1535 else:
1536 # we return an integer indicating remote head count change
1536 # we return an integer indicating remote head count change
1537 ret = remote.addchangegroup(cg, 'push', self.url(),
1537 ret = remote.addchangegroup(cg, 'push', self.url(),
1538 lock=lock)
1538 lock=lock)
1539 finally:
1539 finally:
1540 if lock is not None:
1540 if lock is not None:
1541 lock.release()
1541 lock.release()
1542
1542
1543 self.ui.debug("checking for updated bookmarks\n")
1543 self.ui.debug("checking for updated bookmarks\n")
1544 rb = remote.listkeys('bookmarks')
1544 rb = remote.listkeys('bookmarks')
1545 for k in rb.keys():
1545 for k in rb.keys():
1546 if k in self._bookmarks:
1546 if k in self._bookmarks:
1547 nr, nl = rb[k], hex(self._bookmarks[k])
1547 nr, nl = rb[k], hex(self._bookmarks[k])
1548 if nr in self:
1548 if nr in self:
1549 cr = self[nr]
1549 cr = self[nr]
1550 cl = self[nl]
1550 cl = self[nl]
1551 if cl in cr.descendants():
1551 if cl in cr.descendants():
1552 r = remote.pushkey('bookmarks', k, nr, nl)
1552 r = remote.pushkey('bookmarks', k, nr, nl)
1553 if r:
1553 if r:
1554 self.ui.status(_("updating bookmark %s\n") % k)
1554 self.ui.status(_("updating bookmark %s\n") % k)
1555 else:
1555 else:
1556 self.ui.warn(_('updating bookmark %s'
1556 self.ui.warn(_('updating bookmark %s'
1557 ' failed!\n') % k)
1557 ' failed!\n') % k)
1558
1558
1559 return ret
1559 return ret
1560
1560
1561 def changegroupinfo(self, nodes, source):
1561 def changegroupinfo(self, nodes, source):
1562 if self.ui.verbose or source == 'bundle':
1562 if self.ui.verbose or source == 'bundle':
1563 self.ui.status(_("%d changesets found\n") % len(nodes))
1563 self.ui.status(_("%d changesets found\n") % len(nodes))
1564 if self.ui.debugflag:
1564 if self.ui.debugflag:
1565 self.ui.debug("list of changesets:\n")
1565 self.ui.debug("list of changesets:\n")
1566 for node in nodes:
1566 for node in nodes:
1567 self.ui.debug("%s\n" % hex(node))
1567 self.ui.debug("%s\n" % hex(node))
1568
1568
1569 def changegroupsubset(self, bases, heads, source):
1569 def changegroupsubset(self, bases, heads, source):
1570 """Compute a changegroup consisting of all the nodes that are
1570 """Compute a changegroup consisting of all the nodes that are
1571 descendants of any of the bases and ancestors of any of the heads.
1571 descendants of any of the bases and ancestors of any of the heads.
1572 Return a chunkbuffer object whose read() method will return
1572 Return a chunkbuffer object whose read() method will return
1573 successive changegroup chunks.
1573 successive changegroup chunks.
1574
1574
1575 It is fairly complex as determining which filenodes and which
1575 It is fairly complex as determining which filenodes and which
1576 manifest nodes need to be included for the changeset to be complete
1576 manifest nodes need to be included for the changeset to be complete
1577 is non-trivial.
1577 is non-trivial.
1578
1578
1579 Another wrinkle is doing the reverse, figuring out which changeset in
1579 Another wrinkle is doing the reverse, figuring out which changeset in
1580 the changegroup a particular filenode or manifestnode belongs to.
1580 the changegroup a particular filenode or manifestnode belongs to.
1581 """
1581 """
1582 cl = self.changelog
1582 cl = self.changelog
1583 if not bases:
1583 if not bases:
1584 bases = [nullid]
1584 bases = [nullid]
1585 csets, bases, heads = cl.nodesbetween(bases, heads)
1585 csets, bases, heads = cl.nodesbetween(bases, heads)
1586 # We assume that all ancestors of bases are known
1586 # We assume that all ancestors of bases are known
1587 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1587 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1588 return self._changegroupsubset(common, csets, heads, source)
1588 return self._changegroupsubset(common, csets, heads, source)
1589
1589
1590 def getbundle(self, source, heads=None, common=None):
1590 def getbundle(self, source, heads=None, common=None):
1591 """Like changegroupsubset, but returns the set difference between the
1591 """Like changegroupsubset, but returns the set difference between the
1592 ancestors of heads and the ancestors common.
1592 ancestors of heads and the ancestors common.
1593
1593
1594 If heads is None, use the local heads. If common is None, use [nullid].
1594 If heads is None, use the local heads. If common is None, use [nullid].
1595
1595
1596 The nodes in common might not all be known locally due to the way the
1596 The nodes in common might not all be known locally due to the way the
1597 current discovery protocol works.
1597 current discovery protocol works.
1598 """
1598 """
1599 cl = self.changelog
1599 cl = self.changelog
1600 if common:
1600 if common:
1601 nm = cl.nodemap
1601 nm = cl.nodemap
1602 common = [n for n in common if n in nm]
1602 common = [n for n in common if n in nm]
1603 else:
1603 else:
1604 common = [nullid]
1604 common = [nullid]
1605 if not heads:
1605 if not heads:
1606 heads = cl.heads()
1606 heads = cl.heads()
1607 common, missing = cl.findcommonmissing(common, heads)
1607 common, missing = cl.findcommonmissing(common, heads)
1608 if not missing:
1608 if not missing:
1609 return None
1609 return None
1610 return self._changegroupsubset(common, missing, heads, source)
1610 return self._changegroupsubset(common, missing, heads, source)
1611
1611
1612 def _changegroupsubset(self, commonrevs, csets, heads, source):
1612 def _changegroupsubset(self, commonrevs, csets, heads, source):
1613
1613
1614 cl = self.changelog
1614 cl = self.changelog
1615 mf = self.manifest
1615 mf = self.manifest
1616 mfs = {} # needed manifests
1616 mfs = {} # needed manifests
1617 fnodes = {} # needed file nodes
1617 fnodes = {} # needed file nodes
1618 changedfiles = set()
1618 changedfiles = set()
1619 fstate = ['', {}]
1619 fstate = ['', {}]
1620 count = [0]
1620 count = [0]
1621
1621
1622 # can we go through the fast path ?
1622 # can we go through the fast path ?
1623 heads.sort()
1623 heads.sort()
1624 if heads == sorted(self.heads()):
1624 if heads == sorted(self.heads()):
1625 return self._changegroup(csets, source)
1625 return self._changegroup(csets, source)
1626
1626
1627 # slow path
1627 # slow path
1628 self.hook('preoutgoing', throw=True, source=source)
1628 self.hook('preoutgoing', throw=True, source=source)
1629 self.changegroupinfo(csets, source)
1629 self.changegroupinfo(csets, source)
1630
1630
1631 # filter any nodes that claim to be part of the known set
1631 # filter any nodes that claim to be part of the known set
1632 def prune(revlog, missing):
1632 def prune(revlog, missing):
1633 return [n for n in missing
1633 return [n for n in missing
1634 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1634 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1635
1635
1636 def lookup(revlog, x):
1636 def lookup(revlog, x):
1637 if revlog == cl:
1637 if revlog == cl:
1638 c = cl.read(x)
1638 c = cl.read(x)
1639 changedfiles.update(c[3])
1639 changedfiles.update(c[3])
1640 mfs.setdefault(c[0], x)
1640 mfs.setdefault(c[0], x)
1641 count[0] += 1
1641 count[0] += 1
1642 self.ui.progress(_('bundling'), count[0],
1642 self.ui.progress(_('bundling'), count[0],
1643 unit=_('changesets'), total=len(csets))
1643 unit=_('changesets'), total=len(csets))
1644 return x
1644 return x
1645 elif revlog == mf:
1645 elif revlog == mf:
1646 clnode = mfs[x]
1646 clnode = mfs[x]
1647 mdata = mf.readfast(x)
1647 mdata = mf.readfast(x)
1648 for f in changedfiles:
1648 for f in changedfiles:
1649 if f in mdata:
1649 if f in mdata:
1650 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1650 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1651 count[0] += 1
1651 count[0] += 1
1652 self.ui.progress(_('bundling'), count[0],
1652 self.ui.progress(_('bundling'), count[0],
1653 unit=_('manifests'), total=len(mfs))
1653 unit=_('manifests'), total=len(mfs))
1654 return mfs[x]
1654 return mfs[x]
1655 else:
1655 else:
1656 self.ui.progress(
1656 self.ui.progress(
1657 _('bundling'), count[0], item=fstate[0],
1657 _('bundling'), count[0], item=fstate[0],
1658 unit=_('files'), total=len(changedfiles))
1658 unit=_('files'), total=len(changedfiles))
1659 return fstate[1][x]
1659 return fstate[1][x]
1660
1660
1661 bundler = changegroup.bundle10(lookup)
1661 bundler = changegroup.bundle10(lookup)
1662 reorder = self.ui.config('bundle', 'reorder', 'auto')
1662 reorder = self.ui.config('bundle', 'reorder', 'auto')
1663 if reorder == 'auto':
1663 if reorder == 'auto':
1664 reorder = None
1664 reorder = None
1665 else:
1665 else:
1666 reorder = util.parsebool(reorder)
1666 reorder = util.parsebool(reorder)
1667
1667
1668 def gengroup():
1668 def gengroup():
1669 # Create a changenode group generator that will call our functions
1669 # Create a changenode group generator that will call our functions
1670 # back to lookup the owning changenode and collect information.
1670 # back to lookup the owning changenode and collect information.
1671 for chunk in cl.group(csets, bundler, reorder=reorder):
1671 for chunk in cl.group(csets, bundler, reorder=reorder):
1672 yield chunk
1672 yield chunk
1673 self.ui.progress(_('bundling'), None)
1673 self.ui.progress(_('bundling'), None)
1674
1674
1675 # Create a generator for the manifestnodes that calls our lookup
1675 # Create a generator for the manifestnodes that calls our lookup
1676 # and data collection functions back.
1676 # and data collection functions back.
1677 count[0] = 0
1677 count[0] = 0
1678 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1678 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1679 yield chunk
1679 yield chunk
1680 self.ui.progress(_('bundling'), None)
1680 self.ui.progress(_('bundling'), None)
1681
1681
1682 mfs.clear()
1682 mfs.clear()
1683
1683
1684 # Go through all our files in order sorted by name.
1684 # Go through all our files in order sorted by name.
1685 count[0] = 0
1685 count[0] = 0
1686 for fname in sorted(changedfiles):
1686 for fname in sorted(changedfiles):
1687 filerevlog = self.file(fname)
1687 filerevlog = self.file(fname)
1688 if not len(filerevlog):
1688 if not len(filerevlog):
1689 raise util.Abort(_("empty or missing revlog for %s") % fname)
1689 raise util.Abort(_("empty or missing revlog for %s") % fname)
1690 fstate[0] = fname
1690 fstate[0] = fname
1691 fstate[1] = fnodes.pop(fname, {})
1691 fstate[1] = fnodes.pop(fname, {})
1692
1692
1693 nodelist = prune(filerevlog, fstate[1])
1693 nodelist = prune(filerevlog, fstate[1])
1694 if nodelist:
1694 if nodelist:
1695 count[0] += 1
1695 count[0] += 1
1696 yield bundler.fileheader(fname)
1696 yield bundler.fileheader(fname)
1697 for chunk in filerevlog.group(nodelist, bundler, reorder):
1697 for chunk in filerevlog.group(nodelist, bundler, reorder):
1698 yield chunk
1698 yield chunk
1699
1699
1700 # Signal that no more groups are left.
1700 # Signal that no more groups are left.
1701 yield bundler.close()
1701 yield bundler.close()
1702 self.ui.progress(_('bundling'), None)
1702 self.ui.progress(_('bundling'), None)
1703
1703
1704 if csets:
1704 if csets:
1705 self.hook('outgoing', node=hex(csets[0]), source=source)
1705 self.hook('outgoing', node=hex(csets[0]), source=source)
1706
1706
1707 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1707 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1708
1708
1709 def changegroup(self, basenodes, source):
1709 def changegroup(self, basenodes, source):
1710 # to avoid a race we use changegroupsubset() (issue1320)
1710 # to avoid a race we use changegroupsubset() (issue1320)
1711 return self.changegroupsubset(basenodes, self.heads(), source)
1711 return self.changegroupsubset(basenodes, self.heads(), source)
1712
1712
1713 def _changegroup(self, nodes, source):
1713 def _changegroup(self, nodes, source):
1714 """Compute the changegroup of all nodes that we have that a recipient
1714 """Compute the changegroup of all nodes that we have that a recipient
1715 doesn't. Return a chunkbuffer object whose read() method will return
1715 doesn't. Return a chunkbuffer object whose read() method will return
1716 successive changegroup chunks.
1716 successive changegroup chunks.
1717
1717
1718 This is much easier than the previous function as we can assume that
1718 This is much easier than the previous function as we can assume that
1719 the recipient has any changenode we aren't sending them.
1719 the recipient has any changenode we aren't sending them.
1720
1720
1721 nodes is the set of nodes to send"""
1721 nodes is the set of nodes to send"""
1722
1722
1723 cl = self.changelog
1723 cl = self.changelog
1724 mf = self.manifest
1724 mf = self.manifest
1725 mfs = {}
1725 mfs = {}
1726 changedfiles = set()
1726 changedfiles = set()
1727 fstate = ['']
1727 fstate = ['']
1728 count = [0]
1728 count = [0]
1729
1729
1730 self.hook('preoutgoing', throw=True, source=source)
1730 self.hook('preoutgoing', throw=True, source=source)
1731 self.changegroupinfo(nodes, source)
1731 self.changegroupinfo(nodes, source)
1732
1732
1733 revset = set([cl.rev(n) for n in nodes])
1733 revset = set([cl.rev(n) for n in nodes])
1734
1734
1735 def gennodelst(log):
1735 def gennodelst(log):
1736 return [log.node(r) for r in log if log.linkrev(r) in revset]
1736 return [log.node(r) for r in log if log.linkrev(r) in revset]
1737
1737
1738 def lookup(revlog, x):
1738 def lookup(revlog, x):
1739 if revlog == cl:
1739 if revlog == cl:
1740 c = cl.read(x)
1740 c = cl.read(x)
1741 changedfiles.update(c[3])
1741 changedfiles.update(c[3])
1742 mfs.setdefault(c[0], x)
1742 mfs.setdefault(c[0], x)
1743 count[0] += 1
1743 count[0] += 1
1744 self.ui.progress(_('bundling'), count[0],
1744 self.ui.progress(_('bundling'), count[0],
1745 unit=_('changesets'), total=len(nodes))
1745 unit=_('changesets'), total=len(nodes))
1746 return x
1746 return x
1747 elif revlog == mf:
1747 elif revlog == mf:
1748 count[0] += 1
1748 count[0] += 1
1749 self.ui.progress(_('bundling'), count[0],
1749 self.ui.progress(_('bundling'), count[0],
1750 unit=_('manifests'), total=len(mfs))
1750 unit=_('manifests'), total=len(mfs))
1751 return cl.node(revlog.linkrev(revlog.rev(x)))
1751 return cl.node(revlog.linkrev(revlog.rev(x)))
1752 else:
1752 else:
1753 self.ui.progress(
1753 self.ui.progress(
1754 _('bundling'), count[0], item=fstate[0],
1754 _('bundling'), count[0], item=fstate[0],
1755 total=len(changedfiles), unit=_('files'))
1755 total=len(changedfiles), unit=_('files'))
1756 return cl.node(revlog.linkrev(revlog.rev(x)))
1756 return cl.node(revlog.linkrev(revlog.rev(x)))
1757
1757
1758 bundler = changegroup.bundle10(lookup)
1758 bundler = changegroup.bundle10(lookup)
1759 reorder = self.ui.config('bundle', 'reorder', 'auto')
1759 reorder = self.ui.config('bundle', 'reorder', 'auto')
1760 if reorder == 'auto':
1760 if reorder == 'auto':
1761 reorder = None
1761 reorder = None
1762 else:
1762 else:
1763 reorder = util.parsebool(reorder)
1763 reorder = util.parsebool(reorder)
1764
1764
1765 def gengroup():
1765 def gengroup():
1766 '''yield a sequence of changegroup chunks (strings)'''
1766 '''yield a sequence of changegroup chunks (strings)'''
1767 # construct a list of all changed files
1767 # construct a list of all changed files
1768
1768
1769 for chunk in cl.group(nodes, bundler, reorder=reorder):
1769 for chunk in cl.group(nodes, bundler, reorder=reorder):
1770 yield chunk
1770 yield chunk
1771 self.ui.progress(_('bundling'), None)
1771 self.ui.progress(_('bundling'), None)
1772
1772
1773 count[0] = 0
1773 count[0] = 0
1774 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1774 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1775 yield chunk
1775 yield chunk
1776 self.ui.progress(_('bundling'), None)
1776 self.ui.progress(_('bundling'), None)
1777
1777
1778 count[0] = 0
1778 count[0] = 0
1779 for fname in sorted(changedfiles):
1779 for fname in sorted(changedfiles):
1780 filerevlog = self.file(fname)
1780 filerevlog = self.file(fname)
1781 if not len(filerevlog):
1781 if not len(filerevlog):
1782 raise util.Abort(_("empty or missing revlog for %s") % fname)
1782 raise util.Abort(_("empty or missing revlog for %s") % fname)
1783 fstate[0] = fname
1783 fstate[0] = fname
1784 nodelist = gennodelst(filerevlog)
1784 nodelist = gennodelst(filerevlog)
1785 if nodelist:
1785 if nodelist:
1786 count[0] += 1
1786 count[0] += 1
1787 yield bundler.fileheader(fname)
1787 yield bundler.fileheader(fname)
1788 for chunk in filerevlog.group(nodelist, bundler, reorder):
1788 for chunk in filerevlog.group(nodelist, bundler, reorder):
1789 yield chunk
1789 yield chunk
1790 yield bundler.close()
1790 yield bundler.close()
1791 self.ui.progress(_('bundling'), None)
1791 self.ui.progress(_('bundling'), None)
1792
1792
1793 if nodes:
1793 if nodes:
1794 self.hook('outgoing', node=hex(nodes[0]), source=source)
1794 self.hook('outgoing', node=hex(nodes[0]), source=source)
1795
1795
1796 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1796 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1797
1797
1798 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1798 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1799 """Add the changegroup returned by source.read() to this repo.
1799 """Add the changegroup returned by source.read() to this repo.
1800 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1800 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1801 the URL of the repo where this changegroup is coming from.
1801 the URL of the repo where this changegroup is coming from.
1802 If lock is not None, the function takes ownership of the lock
1802 If lock is not None, the function takes ownership of the lock
1803 and releases it after the changegroup is added.
1803 and releases it after the changegroup is added.
1804
1804
1805 Return an integer summarizing the change to this repo:
1805 Return an integer summarizing the change to this repo:
1806 - nothing changed or no source: 0
1806 - nothing changed or no source: 0
1807 - more heads than before: 1+added heads (2..n)
1807 - more heads than before: 1+added heads (2..n)
1808 - fewer heads than before: -1-removed heads (-2..-n)
1808 - fewer heads than before: -1-removed heads (-2..-n)
1809 - number of heads stays the same: 1
1809 - number of heads stays the same: 1
1810 """
1810 """
1811 def csmap(x):
1811 def csmap(x):
1812 self.ui.debug("add changeset %s\n" % short(x))
1812 self.ui.debug("add changeset %s\n" % short(x))
1813 return len(cl)
1813 return len(cl)
1814
1814
1815 def revmap(x):
1815 def revmap(x):
1816 return cl.rev(x)
1816 return cl.rev(x)
1817
1817
1818 if not source:
1818 if not source:
1819 return 0
1819 return 0
1820
1820
1821 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1821 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1822
1822
1823 changesets = files = revisions = 0
1823 changesets = files = revisions = 0
1824 efiles = set()
1824 efiles = set()
1825
1825
1826 # write changelog data to temp files so concurrent readers will not see
1826 # write changelog data to temp files so concurrent readers will not see
1827 # inconsistent view
1827 # inconsistent view
1828 cl = self.changelog
1828 cl = self.changelog
1829 cl.delayupdate()
1829 cl.delayupdate()
1830 oldheads = cl.heads()
1830 oldheads = cl.heads()
1831
1831
1832 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1832 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1833 try:
1833 try:
1834 trp = weakref.proxy(tr)
1834 trp = weakref.proxy(tr)
1835 # pull off the changeset group
1835 # pull off the changeset group
1836 self.ui.status(_("adding changesets\n"))
1836 self.ui.status(_("adding changesets\n"))
1837 clstart = len(cl)
1837 clstart = len(cl)
1838 class prog(object):
1838 class prog(object):
1839 step = _('changesets')
1839 step = _('changesets')
1840 count = 1
1840 count = 1
1841 ui = self.ui
1841 ui = self.ui
1842 total = None
1842 total = None
1843 def __call__(self):
1843 def __call__(self):
1844 self.ui.progress(self.step, self.count, unit=_('chunks'),
1844 self.ui.progress(self.step, self.count, unit=_('chunks'),
1845 total=self.total)
1845 total=self.total)
1846 self.count += 1
1846 self.count += 1
1847 pr = prog()
1847 pr = prog()
1848 source.callback = pr
1848 source.callback = pr
1849
1849
1850 source.changelogheader()
1850 source.changelogheader()
1851 if (cl.addgroup(source, csmap, trp) is None
1851 if (cl.addgroup(source, csmap, trp) is None
1852 and not emptyok):
1852 and not emptyok):
1853 raise util.Abort(_("received changelog group is empty"))
1853 raise util.Abort(_("received changelog group is empty"))
1854 clend = len(cl)
1854 clend = len(cl)
1855 changesets = clend - clstart
1855 changesets = clend - clstart
1856 for c in xrange(clstart, clend):
1856 for c in xrange(clstart, clend):
1857 efiles.update(self[c].files())
1857 efiles.update(self[c].files())
1858 efiles = len(efiles)
1858 efiles = len(efiles)
1859 self.ui.progress(_('changesets'), None)
1859 self.ui.progress(_('changesets'), None)
1860
1860
1861 # pull off the manifest group
1861 # pull off the manifest group
1862 self.ui.status(_("adding manifests\n"))
1862 self.ui.status(_("adding manifests\n"))
1863 pr.step = _('manifests')
1863 pr.step = _('manifests')
1864 pr.count = 1
1864 pr.count = 1
1865 pr.total = changesets # manifests <= changesets
1865 pr.total = changesets # manifests <= changesets
1866 # no need to check for empty manifest group here:
1866 # no need to check for empty manifest group here:
1867 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1867 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1868 # no new manifest will be created and the manifest group will
1868 # no new manifest will be created and the manifest group will
1869 # be empty during the pull
1869 # be empty during the pull
1870 source.manifestheader()
1870 source.manifestheader()
1871 self.manifest.addgroup(source, revmap, trp)
1871 self.manifest.addgroup(source, revmap, trp)
1872 self.ui.progress(_('manifests'), None)
1872 self.ui.progress(_('manifests'), None)
1873
1873
1874 needfiles = {}
1874 needfiles = {}
1875 if self.ui.configbool('server', 'validate', default=False):
1875 if self.ui.configbool('server', 'validate', default=False):
1876 # validate incoming csets have their manifests
1876 # validate incoming csets have their manifests
1877 for cset in xrange(clstart, clend):
1877 for cset in xrange(clstart, clend):
1878 mfest = self.changelog.read(self.changelog.node(cset))[0]
1878 mfest = self.changelog.read(self.changelog.node(cset))[0]
1879 mfest = self.manifest.readdelta(mfest)
1879 mfest = self.manifest.readdelta(mfest)
1880 # store file nodes we must see
1880 # store file nodes we must see
1881 for f, n in mfest.iteritems():
1881 for f, n in mfest.iteritems():
1882 needfiles.setdefault(f, set()).add(n)
1882 needfiles.setdefault(f, set()).add(n)
1883
1883
1884 # process the files
1884 # process the files
1885 self.ui.status(_("adding file changes\n"))
1885 self.ui.status(_("adding file changes\n"))
1886 pr.step = _('files')
1886 pr.step = _('files')
1887 pr.count = 1
1887 pr.count = 1
1888 pr.total = efiles
1888 pr.total = efiles
1889 source.callback = None
1889 source.callback = None
1890
1890
1891 while True:
1891 while True:
1892 chunkdata = source.filelogheader()
1892 chunkdata = source.filelogheader()
1893 if not chunkdata:
1893 if not chunkdata:
1894 break
1894 break
1895 f = chunkdata["filename"]
1895 f = chunkdata["filename"]
1896 self.ui.debug("adding %s revisions\n" % f)
1896 self.ui.debug("adding %s revisions\n" % f)
1897 pr()
1897 pr()
1898 fl = self.file(f)
1898 fl = self.file(f)
1899 o = len(fl)
1899 o = len(fl)
1900 if fl.addgroup(source, revmap, trp) is None:
1900 if fl.addgroup(source, revmap, trp) is None:
1901 raise util.Abort(_("received file revlog group is empty"))
1901 raise util.Abort(_("received file revlog group is empty"))
1902 revisions += len(fl) - o
1902 revisions += len(fl) - o
1903 files += 1
1903 files += 1
1904 if f in needfiles:
1904 if f in needfiles:
1905 needs = needfiles[f]
1905 needs = needfiles[f]
1906 for new in xrange(o, len(fl)):
1906 for new in xrange(o, len(fl)):
1907 n = fl.node(new)
1907 n = fl.node(new)
1908 if n in needs:
1908 if n in needs:
1909 needs.remove(n)
1909 needs.remove(n)
1910 if not needs:
1910 if not needs:
1911 del needfiles[f]
1911 del needfiles[f]
1912 self.ui.progress(_('files'), None)
1912 self.ui.progress(_('files'), None)
1913
1913
1914 for f, needs in needfiles.iteritems():
1914 for f, needs in needfiles.iteritems():
1915 fl = self.file(f)
1915 fl = self.file(f)
1916 for n in needs:
1916 for n in needs:
1917 try:
1917 try:
1918 fl.rev(n)
1918 fl.rev(n)
1919 except error.LookupError:
1919 except error.LookupError:
1920 raise util.Abort(
1920 raise util.Abort(
1921 _('missing file data for %s:%s - run hg verify') %
1921 _('missing file data for %s:%s - run hg verify') %
1922 (f, hex(n)))
1922 (f, hex(n)))
1923
1923
1924 dh = 0
1924 dh = 0
1925 if oldheads:
1925 if oldheads:
1926 heads = cl.heads()
1926 heads = cl.heads()
1927 dh = len(heads) - len(oldheads)
1927 dh = len(heads) - len(oldheads)
1928 for h in heads:
1928 for h in heads:
1929 if h not in oldheads and 'close' in self[h].extra():
1929 if h not in oldheads and 'close' in self[h].extra():
1930 dh -= 1
1930 dh -= 1
1931 htext = ""
1931 htext = ""
1932 if dh:
1932 if dh:
1933 htext = _(" (%+d heads)") % dh
1933 htext = _(" (%+d heads)") % dh
1934
1934
1935 self.ui.status(_("added %d changesets"
1935 self.ui.status(_("added %d changesets"
1936 " with %d changes to %d files%s\n")
1936 " with %d changes to %d files%s\n")
1937 % (changesets, revisions, files, htext))
1937 % (changesets, revisions, files, htext))
1938
1938
1939 if changesets > 0:
1939 if changesets > 0:
1940 p = lambda: cl.writepending() and self.root or ""
1940 p = lambda: cl.writepending() and self.root or ""
1941 self.hook('pretxnchangegroup', throw=True,
1941 self.hook('pretxnchangegroup', throw=True,
1942 node=hex(cl.node(clstart)), source=srctype,
1942 node=hex(cl.node(clstart)), source=srctype,
1943 url=url, pending=p)
1943 url=url, pending=p)
1944
1944
1945 # make changelog see real files again
1945 # make changelog see real files again
1946 cl.finalize(trp)
1946 cl.finalize(trp)
1947
1947
1948 tr.close()
1948 tr.close()
1949 finally:
1949 finally:
1950 tr.release()
1950 tr.release()
1951 if lock:
1951 if lock:
1952 lock.release()
1952 lock.release()
1953
1953
1954 if changesets > 0:
1954 if changesets > 0:
1955 # forcefully update the on-disk branch cache
1955 # forcefully update the on-disk branch cache
1956 self.ui.debug("updating the branch cache\n")
1956 self.ui.debug("updating the branch cache\n")
1957 self.updatebranchcache()
1957 self.updatebranchcache()
1958 self.hook("changegroup", node=hex(cl.node(clstart)),
1958 self.hook("changegroup", node=hex(cl.node(clstart)),
1959 source=srctype, url=url)
1959 source=srctype, url=url)
1960
1960
1961 for i in xrange(clstart, clend):
1961 for i in xrange(clstart, clend):
1962 self.hook("incoming", node=hex(cl.node(i)),
1962 self.hook("incoming", node=hex(cl.node(i)),
1963 source=srctype, url=url)
1963 source=srctype, url=url)
1964
1964
1965 # never return 0 here:
1965 # never return 0 here:
1966 if dh < 0:
1966 if dh < 0:
1967 return dh - 1
1967 return dh - 1
1968 else:
1968 else:
1969 return dh + 1
1969 return dh + 1
1970
1970
1971 def stream_in(self, remote, requirements):
1971 def stream_in(self, remote, requirements):
1972 lock = self.lock()
1972 lock = self.lock()
1973 try:
1973 try:
1974 fp = remote.stream_out()
1974 fp = remote.stream_out()
1975 l = fp.readline()
1975 l = fp.readline()
1976 try:
1976 try:
1977 resp = int(l)
1977 resp = int(l)
1978 except ValueError:
1978 except ValueError:
1979 raise error.ResponseError(
1979 raise error.ResponseError(
1980 _('Unexpected response from remote server:'), l)
1980 _('Unexpected response from remote server:'), l)
1981 if resp == 1:
1981 if resp == 1:
1982 raise util.Abort(_('operation forbidden by server'))
1982 raise util.Abort(_('operation forbidden by server'))
1983 elif resp == 2:
1983 elif resp == 2:
1984 raise util.Abort(_('locking the remote repository failed'))
1984 raise util.Abort(_('locking the remote repository failed'))
1985 elif resp != 0:
1985 elif resp != 0:
1986 raise util.Abort(_('the server sent an unknown error code'))
1986 raise util.Abort(_('the server sent an unknown error code'))
1987 self.ui.status(_('streaming all changes\n'))
1987 self.ui.status(_('streaming all changes\n'))
1988 l = fp.readline()
1988 l = fp.readline()
1989 try:
1989 try:
1990 total_files, total_bytes = map(int, l.split(' ', 1))
1990 total_files, total_bytes = map(int, l.split(' ', 1))
1991 except (ValueError, TypeError):
1991 except (ValueError, TypeError):
1992 raise error.ResponseError(
1992 raise error.ResponseError(
1993 _('Unexpected response from remote server:'), l)
1993 _('Unexpected response from remote server:'), l)
1994 self.ui.status(_('%d files to transfer, %s of data\n') %
1994 self.ui.status(_('%d files to transfer, %s of data\n') %
1995 (total_files, util.bytecount(total_bytes)))
1995 (total_files, util.bytecount(total_bytes)))
1996 start = time.time()
1996 start = time.time()
1997 for i in xrange(total_files):
1997 for i in xrange(total_files):
1998 # XXX doesn't support '\n' or '\r' in filenames
1998 # XXX doesn't support '\n' or '\r' in filenames
1999 l = fp.readline()
1999 l = fp.readline()
2000 try:
2000 try:
2001 name, size = l.split('\0', 1)
2001 name, size = l.split('\0', 1)
2002 size = int(size)
2002 size = int(size)
2003 except (ValueError, TypeError):
2003 except (ValueError, TypeError):
2004 raise error.ResponseError(
2004 raise error.ResponseError(
2005 _('Unexpected response from remote server:'), l)
2005 _('Unexpected response from remote server:'), l)
2006 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2006 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2007 # for backwards compat, name was partially encoded
2007 # for backwards compat, name was partially encoded
2008 ofp = self.sopener(store.decodedir(name), 'w')
2008 ofp = self.sopener(store.decodedir(name), 'w')
2009 for chunk in util.filechunkiter(fp, limit=size):
2009 for chunk in util.filechunkiter(fp, limit=size):
2010 ofp.write(chunk)
2010 ofp.write(chunk)
2011 ofp.close()
2011 ofp.close()
2012 elapsed = time.time() - start
2012 elapsed = time.time() - start
2013 if elapsed <= 0:
2013 if elapsed <= 0:
2014 elapsed = 0.001
2014 elapsed = 0.001
2015 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2015 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2016 (util.bytecount(total_bytes), elapsed,
2016 (util.bytecount(total_bytes), elapsed,
2017 util.bytecount(total_bytes / elapsed)))
2017 util.bytecount(total_bytes / elapsed)))
2018
2018
2019 # new requirements = old non-format requirements + new format-related
2019 # new requirements = old non-format requirements + new format-related
2020 # requirements from the streamed-in repository
2020 # requirements from the streamed-in repository
2021 requirements.update(set(self.requirements) - self.supportedformats)
2021 requirements.update(set(self.requirements) - self.supportedformats)
2022 self._applyrequirements(requirements)
2022 self._applyrequirements(requirements)
2023 self._writerequirements()
2023 self._writerequirements()
2024
2024
2025 self.invalidate()
2025 self.invalidate()
2026 return len(self.heads()) + 1
2026 return len(self.heads()) + 1
2027 finally:
2027 finally:
2028 lock.release()
2028 lock.release()
2029
2029
2030 def clone(self, remote, heads=[], stream=False):
2030 def clone(self, remote, heads=[], stream=False):
2031 '''clone remote repository.
2031 '''clone remote repository.
2032
2032
2033 keyword arguments:
2033 keyword arguments:
2034 heads: list of revs to clone (forces use of pull)
2034 heads: list of revs to clone (forces use of pull)
2035 stream: use streaming clone if possible'''
2035 stream: use streaming clone if possible'''
2036
2036
2037 # now, all clients that can request uncompressed clones can
2037 # now, all clients that can request uncompressed clones can
2038 # read repo formats supported by all servers that can serve
2038 # read repo formats supported by all servers that can serve
2039 # them.
2039 # them.
2040
2040
2041 # if revlog format changes, client will have to check version
2041 # if revlog format changes, client will have to check version
2042 # and format flags on "stream" capability, and use
2042 # and format flags on "stream" capability, and use
2043 # uncompressed only if compatible.
2043 # uncompressed only if compatible.
2044
2044
2045 if stream and not heads:
2045 if stream and not heads:
2046 # 'stream' means remote revlog format is revlogv1 only
2046 # 'stream' means remote revlog format is revlogv1 only
2047 if remote.capable('stream'):
2047 if remote.capable('stream'):
2048 return self.stream_in(remote, set(('revlogv1',)))
2048 return self.stream_in(remote, set(('revlogv1',)))
2049 # otherwise, 'streamreqs' contains the remote revlog format
2049 # otherwise, 'streamreqs' contains the remote revlog format
2050 streamreqs = remote.capable('streamreqs')
2050 streamreqs = remote.capable('streamreqs')
2051 if streamreqs:
2051 if streamreqs:
2052 streamreqs = set(streamreqs.split(','))
2052 streamreqs = set(streamreqs.split(','))
2053 # if we support it, stream in and adjust our requirements
2053 # if we support it, stream in and adjust our requirements
2054 if not streamreqs - self.supportedformats:
2054 if not streamreqs - self.supportedformats:
2055 return self.stream_in(remote, streamreqs)
2055 return self.stream_in(remote, streamreqs)
2056 return self.pull(remote, heads)
2056 return self.pull(remote, heads)
2057
2057
2058 def pushkey(self, namespace, key, old, new):
2058 def pushkey(self, namespace, key, old, new):
2059 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2059 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2060 old=old, new=new)
2060 old=old, new=new)
2061 ret = pushkey.push(self, namespace, key, old, new)
2061 ret = pushkey.push(self, namespace, key, old, new)
2062 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2062 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2063 ret=ret)
2063 ret=ret)
2064 return ret
2064 return ret
2065
2065
2066 def listkeys(self, namespace):
2066 def listkeys(self, namespace):
2067 self.hook('prelistkeys', throw=True, namespace=namespace)
2067 self.hook('prelistkeys', throw=True, namespace=namespace)
2068 values = pushkey.list(self, namespace)
2068 values = pushkey.list(self, namespace)
2069 self.hook('listkeys', namespace=namespace, values=values)
2069 self.hook('listkeys', namespace=namespace, values=values)
2070 return values
2070 return values
2071
2071
2072 def debugwireargs(self, one, two, three=None, four=None, five=None):
2072 def debugwireargs(self, one, two, three=None, four=None, five=None):
2073 '''used to test argument passing over the wire'''
2073 '''used to test argument passing over the wire'''
2074 return "%s %s %s %s %s" % (one, two, three, four, five)
2074 return "%s %s %s %s %s" % (one, two, three, four, five)
2075
2075
2076 def savecommitmessage(self, text):
2076 def savecommitmessage(self, text):
2077 fp = self.opener('last-message.txt', 'wb')
2077 fp = self.opener('last-message.txt', 'wb')
2078 try:
2078 try:
2079 fp.write(text)
2079 fp.write(text)
2080 finally:
2080 finally:
2081 fp.close()
2081 fp.close()
2082 return self.pathto(fp.name[len(self.root)+1:])
2082 return self.pathto(fp.name[len(self.root)+1:])
2083
2083
2084 # used to avoid circular references so destructors work
2084 # used to avoid circular references so destructors work
2085 def aftertrans(files):
2085 def aftertrans(files):
2086 renamefiles = [tuple(t) for t in files]
2086 renamefiles = [tuple(t) for t in files]
2087 def a():
2087 def a():
2088 for src, dest in renamefiles:
2088 for src, dest in renamefiles:
2089 util.rename(src, dest)
2089 util.rename(src, dest)
2090 return a
2090 return a
2091
2091
2092 def undoname(fn):
2092 def undoname(fn):
2093 base, name = os.path.split(fn)
2093 base, name = os.path.split(fn)
2094 assert name.startswith('journal')
2094 assert name.startswith('journal')
2095 return os.path.join(base, name.replace('journal', 'undo', 1))
2095 return os.path.join(base, name.replace('journal', 'undo', 1))
2096
2096
2097 def instance(ui, path, create):
2097 def instance(ui, path, create):
2098 return localrepository(ui, util.urllocalpath(path), create)
2098 return localrepository(ui, util.urllocalpath(path), create)
2099
2099
2100 def islocal(path):
2100 def islocal(path):
2101 return True
2101 return True
@@ -1,453 +1,453 b''
1 # Copyright (C) 2004, 2005 Canonical Ltd
1 # Copyright (C) 2004, 2005 Canonical Ltd
2 #
2 #
3 # This program is free software; you can redistribute it and/or modify
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
6 # (at your option) any later version.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU General Public License
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software
14 # along with this program; if not, write to the Free Software
15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
16
16
17 # mbp: "you know that thing where cvs gives you conflict markers?"
17 # mbp: "you know that thing where cvs gives you conflict markers?"
18 # s: "i hate that."
18 # s: "i hate that."
19
19
20 from i18n import _
20 from i18n import _
21 import scmutil, util, mdiff
21 import scmutil, util, mdiff
22 import sys, os
22 import sys, os
23
23
24 class CantReprocessAndShowBase(Exception):
24 class CantReprocessAndShowBase(Exception):
25 pass
25 pass
26
26
27 def intersect(ra, rb):
27 def intersect(ra, rb):
28 """Given two ranges return the range where they intersect or None.
28 """Given two ranges return the range where they intersect or None.
29
29
30 >>> intersect((0, 10), (0, 6))
30 >>> intersect((0, 10), (0, 6))
31 (0, 6)
31 (0, 6)
32 >>> intersect((0, 10), (5, 15))
32 >>> intersect((0, 10), (5, 15))
33 (5, 10)
33 (5, 10)
34 >>> intersect((0, 10), (10, 15))
34 >>> intersect((0, 10), (10, 15))
35 >>> intersect((0, 9), (10, 15))
35 >>> intersect((0, 9), (10, 15))
36 >>> intersect((0, 9), (7, 15))
36 >>> intersect((0, 9), (7, 15))
37 (7, 9)
37 (7, 9)
38 """
38 """
39 assert ra[0] <= ra[1]
39 assert ra[0] <= ra[1]
40 assert rb[0] <= rb[1]
40 assert rb[0] <= rb[1]
41
41
42 sa = max(ra[0], rb[0])
42 sa = max(ra[0], rb[0])
43 sb = min(ra[1], rb[1])
43 sb = min(ra[1], rb[1])
44 if sa < sb:
44 if sa < sb:
45 return sa, sb
45 return sa, sb
46 else:
46 else:
47 return None
47 return None
48
48
49 def compare_range(a, astart, aend, b, bstart, bend):
49 def compare_range(a, astart, aend, b, bstart, bend):
50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
51 """
51 """
52 if (aend - astart) != (bend - bstart):
52 if (aend - astart) != (bend - bstart):
53 return False
53 return False
54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
55 if a[ia] != b[ib]:
55 if a[ia] != b[ib]:
56 return False
56 return False
57 else:
57 else:
58 return True
58 return True
59
59
60 class Merge3Text(object):
60 class Merge3Text(object):
61 """3-way merge of texts.
61 """3-way merge of texts.
62
62
63 Given strings BASE, OTHER, THIS, tries to produce a combined text
63 Given strings BASE, OTHER, THIS, tries to produce a combined text
64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
66 self.basetext = basetext
66 self.basetext = basetext
67 self.atext = atext
67 self.atext = atext
68 self.btext = btext
68 self.btext = btext
69 if base is None:
69 if base is None:
70 base = mdiff.splitnewlines(basetext)
70 base = mdiff.splitnewlines(basetext)
71 if a is None:
71 if a is None:
72 a = mdiff.splitnewlines(atext)
72 a = mdiff.splitnewlines(atext)
73 if b is None:
73 if b is None:
74 b = mdiff.splitnewlines(btext)
74 b = mdiff.splitnewlines(btext)
75 self.base = base
75 self.base = base
76 self.a = a
76 self.a = a
77 self.b = b
77 self.b = b
78
78
79 def merge_lines(self,
79 def merge_lines(self,
80 name_a=None,
80 name_a=None,
81 name_b=None,
81 name_b=None,
82 name_base=None,
82 name_base=None,
83 start_marker='<<<<<<<',
83 start_marker='<<<<<<<',
84 mid_marker='=======',
84 mid_marker='=======',
85 end_marker='>>>>>>>',
85 end_marker='>>>>>>>',
86 base_marker=None,
86 base_marker=None,
87 reprocess=False):
87 reprocess=False):
88 """Return merge in cvs-like form.
88 """Return merge in cvs-like form.
89 """
89 """
90 self.conflicts = False
90 self.conflicts = False
91 newline = '\n'
91 newline = '\n'
92 if len(self.a) > 0:
92 if len(self.a) > 0:
93 if self.a[0].endswith('\r\n'):
93 if self.a[0].endswith('\r\n'):
94 newline = '\r\n'
94 newline = '\r\n'
95 elif self.a[0].endswith('\r'):
95 elif self.a[0].endswith('\r'):
96 newline = '\r'
96 newline = '\r'
97 if base_marker and reprocess:
97 if base_marker and reprocess:
98 raise CantReprocessAndShowBase()
98 raise CantReprocessAndShowBase()
99 if name_a:
99 if name_a:
100 start_marker = start_marker + ' ' + name_a
100 start_marker = start_marker + ' ' + name_a
101 if name_b:
101 if name_b:
102 end_marker = end_marker + ' ' + name_b
102 end_marker = end_marker + ' ' + name_b
103 if name_base and base_marker:
103 if name_base and base_marker:
104 base_marker = base_marker + ' ' + name_base
104 base_marker = base_marker + ' ' + name_base
105 merge_regions = self.merge_regions()
105 merge_regions = self.merge_regions()
106 if reprocess is True:
106 if reprocess is True:
107 merge_regions = self.reprocess_merge_regions(merge_regions)
107 merge_regions = self.reprocess_merge_regions(merge_regions)
108 for t in merge_regions:
108 for t in merge_regions:
109 what = t[0]
109 what = t[0]
110 if what == 'unchanged':
110 if what == 'unchanged':
111 for i in range(t[1], t[2]):
111 for i in range(t[1], t[2]):
112 yield self.base[i]
112 yield self.base[i]
113 elif what == 'a' or what == 'same':
113 elif what == 'a' or what == 'same':
114 for i in range(t[1], t[2]):
114 for i in range(t[1], t[2]):
115 yield self.a[i]
115 yield self.a[i]
116 elif what == 'b':
116 elif what == 'b':
117 for i in range(t[1], t[2]):
117 for i in range(t[1], t[2]):
118 yield self.b[i]
118 yield self.b[i]
119 elif what == 'conflict':
119 elif what == 'conflict':
120 self.conflicts = True
120 self.conflicts = True
121 yield start_marker + newline
121 yield start_marker + newline
122 for i in range(t[3], t[4]):
122 for i in range(t[3], t[4]):
123 yield self.a[i]
123 yield self.a[i]
124 if base_marker is not None:
124 if base_marker is not None:
125 yield base_marker + newline
125 yield base_marker + newline
126 for i in range(t[1], t[2]):
126 for i in range(t[1], t[2]):
127 yield self.base[i]
127 yield self.base[i]
128 yield mid_marker + newline
128 yield mid_marker + newline
129 for i in range(t[5], t[6]):
129 for i in range(t[5], t[6]):
130 yield self.b[i]
130 yield self.b[i]
131 yield end_marker + newline
131 yield end_marker + newline
132 else:
132 else:
133 raise ValueError(what)
133 raise ValueError(what)
134
134
135 def merge_annotated(self):
135 def merge_annotated(self):
136 """Return merge with conflicts, showing origin of lines.
136 """Return merge with conflicts, showing origin of lines.
137
137
138 Most useful for debugging merge.
138 Most useful for debugging merge.
139 """
139 """
140 for t in self.merge_regions():
140 for t in self.merge_regions():
141 what = t[0]
141 what = t[0]
142 if what == 'unchanged':
142 if what == 'unchanged':
143 for i in range(t[1], t[2]):
143 for i in range(t[1], t[2]):
144 yield 'u | ' + self.base[i]
144 yield 'u | ' + self.base[i]
145 elif what == 'a' or what == 'same':
145 elif what == 'a' or what == 'same':
146 for i in range(t[1], t[2]):
146 for i in range(t[1], t[2]):
147 yield what[0] + ' | ' + self.a[i]
147 yield what[0] + ' | ' + self.a[i]
148 elif what == 'b':
148 elif what == 'b':
149 for i in range(t[1], t[2]):
149 for i in range(t[1], t[2]):
150 yield 'b | ' + self.b[i]
150 yield 'b | ' + self.b[i]
151 elif what == 'conflict':
151 elif what == 'conflict':
152 yield '<<<<\n'
152 yield '<<<<\n'
153 for i in range(t[3], t[4]):
153 for i in range(t[3], t[4]):
154 yield 'A | ' + self.a[i]
154 yield 'A | ' + self.a[i]
155 yield '----\n'
155 yield '----\n'
156 for i in range(t[5], t[6]):
156 for i in range(t[5], t[6]):
157 yield 'B | ' + self.b[i]
157 yield 'B | ' + self.b[i]
158 yield '>>>>\n'
158 yield '>>>>\n'
159 else:
159 else:
160 raise ValueError(what)
160 raise ValueError(what)
161
161
162 def merge_groups(self):
162 def merge_groups(self):
163 """Yield sequence of line groups. Each one is a tuple:
163 """Yield sequence of line groups. Each one is a tuple:
164
164
165 'unchanged', lines
165 'unchanged', lines
166 Lines unchanged from base
166 Lines unchanged from base
167
167
168 'a', lines
168 'a', lines
169 Lines taken from a
169 Lines taken from a
170
170
171 'same', lines
171 'same', lines
172 Lines taken from a (and equal to b)
172 Lines taken from a (and equal to b)
173
173
174 'b', lines
174 'b', lines
175 Lines taken from b
175 Lines taken from b
176
176
177 'conflict', base_lines, a_lines, b_lines
177 'conflict', base_lines, a_lines, b_lines
178 Lines from base were changed to either a or b and conflict.
178 Lines from base were changed to either a or b and conflict.
179 """
179 """
180 for t in self.merge_regions():
180 for t in self.merge_regions():
181 what = t[0]
181 what = t[0]
182 if what == 'unchanged':
182 if what == 'unchanged':
183 yield what, self.base[t[1]:t[2]]
183 yield what, self.base[t[1]:t[2]]
184 elif what == 'a' or what == 'same':
184 elif what == 'a' or what == 'same':
185 yield what, self.a[t[1]:t[2]]
185 yield what, self.a[t[1]:t[2]]
186 elif what == 'b':
186 elif what == 'b':
187 yield what, self.b[t[1]:t[2]]
187 yield what, self.b[t[1]:t[2]]
188 elif what == 'conflict':
188 elif what == 'conflict':
189 yield (what,
189 yield (what,
190 self.base[t[1]:t[2]],
190 self.base[t[1]:t[2]],
191 self.a[t[3]:t[4]],
191 self.a[t[3]:t[4]],
192 self.b[t[5]:t[6]])
192 self.b[t[5]:t[6]])
193 else:
193 else:
194 raise ValueError(what)
194 raise ValueError(what)
195
195
196 def merge_regions(self):
196 def merge_regions(self):
197 """Return sequences of matching and conflicting regions.
197 """Return sequences of matching and conflicting regions.
198
198
199 This returns tuples, where the first value says what kind we
199 This returns tuples, where the first value says what kind we
200 have:
200 have:
201
201
202 'unchanged', start, end
202 'unchanged', start, end
203 Take a region of base[start:end]
203 Take a region of base[start:end]
204
204
205 'same', astart, aend
205 'same', astart, aend
206 b and a are different from base but give the same result
206 b and a are different from base but give the same result
207
207
208 'a', start, end
208 'a', start, end
209 Non-clashing insertion from a[start:end]
209 Non-clashing insertion from a[start:end]
210
210
211 Method is as follows:
211 Method is as follows:
212
212
213 The two sequences align only on regions which match the base
213 The two sequences align only on regions which match the base
214 and both descendants. These are found by doing a two-way diff
214 and both descendants. These are found by doing a two-way diff
215 of each one against the base, and then finding the
215 of each one against the base, and then finding the
216 intersections between those regions. These "sync regions"
216 intersections between those regions. These "sync regions"
217 are by definition unchanged in both and easily dealt with.
217 are by definition unchanged in both and easily dealt with.
218
218
219 The regions in between can be in any of three cases:
219 The regions in between can be in any of three cases:
220 conflicted, or changed on only one side.
220 conflicted, or changed on only one side.
221 """
221 """
222
222
223 # section a[0:ia] has been disposed of, etc
223 # section a[0:ia] has been disposed of, etc
224 iz = ia = ib = 0
224 iz = ia = ib = 0
225
225
226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
227 #print 'match base [%d:%d]' % (zmatch, zend)
227 #print 'match base [%d:%d]' % (zmatch, zend)
228
228
229 matchlen = zend - zmatch
229 matchlen = zend - zmatch
230 assert matchlen >= 0
230 assert matchlen >= 0
231 assert matchlen == (aend - amatch)
231 assert matchlen == (aend - amatch)
232 assert matchlen == (bend - bmatch)
232 assert matchlen == (bend - bmatch)
233
233
234 len_a = amatch - ia
234 len_a = amatch - ia
235 len_b = bmatch - ib
235 len_b = bmatch - ib
236 len_base = zmatch - iz
236 len_base = zmatch - iz
237 assert len_a >= 0
237 assert len_a >= 0
238 assert len_b >= 0
238 assert len_b >= 0
239 assert len_base >= 0
239 assert len_base >= 0
240
240
241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
242
242
243 if len_a or len_b:
243 if len_a or len_b:
244 # try to avoid actually slicing the lists
244 # try to avoid actually slicing the lists
245 equal_a = compare_range(self.a, ia, amatch,
245 equal_a = compare_range(self.a, ia, amatch,
246 self.base, iz, zmatch)
246 self.base, iz, zmatch)
247 equal_b = compare_range(self.b, ib, bmatch,
247 equal_b = compare_range(self.b, ib, bmatch,
248 self.base, iz, zmatch)
248 self.base, iz, zmatch)
249 same = compare_range(self.a, ia, amatch,
249 same = compare_range(self.a, ia, amatch,
250 self.b, ib, bmatch)
250 self.b, ib, bmatch)
251
251
252 if same:
252 if same:
253 yield 'same', ia, amatch
253 yield 'same', ia, amatch
254 elif equal_a and not equal_b:
254 elif equal_a and not equal_b:
255 yield 'b', ib, bmatch
255 yield 'b', ib, bmatch
256 elif equal_b and not equal_a:
256 elif equal_b and not equal_a:
257 yield 'a', ia, amatch
257 yield 'a', ia, amatch
258 elif not equal_a and not equal_b:
258 elif not equal_a and not equal_b:
259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
260 else:
260 else:
261 raise AssertionError("can't handle a=b=base but unmatched")
261 raise AssertionError("can't handle a=b=base but unmatched")
262
262
263 ia = amatch
263 ia = amatch
264 ib = bmatch
264 ib = bmatch
265 iz = zmatch
265 iz = zmatch
266
266
267 # if the same part of the base was deleted on both sides
267 # if the same part of the base was deleted on both sides
268 # that's OK, we can just skip it.
268 # that's OK, we can just skip it.
269
269
270
270
271 if matchlen > 0:
271 if matchlen > 0:
272 assert ia == amatch
272 assert ia == amatch
273 assert ib == bmatch
273 assert ib == bmatch
274 assert iz == zmatch
274 assert iz == zmatch
275
275
276 yield 'unchanged', zmatch, zend
276 yield 'unchanged', zmatch, zend
277 iz = zend
277 iz = zend
278 ia = aend
278 ia = aend
279 ib = bend
279 ib = bend
280
280
281 def reprocess_merge_regions(self, merge_regions):
281 def reprocess_merge_regions(self, merge_regions):
282 """Where there are conflict regions, remove the agreed lines.
282 """Where there are conflict regions, remove the agreed lines.
283
283
284 Lines where both A and B have made the same changes are
284 Lines where both A and B have made the same changes are
285 eliminated.
285 eliminated.
286 """
286 """
287 for region in merge_regions:
287 for region in merge_regions:
288 if region[0] != "conflict":
288 if region[0] != "conflict":
289 yield region
289 yield region
290 continue
290 continue
291 type, iz, zmatch, ia, amatch, ib, bmatch = region
291 type, iz, zmatch, ia, amatch, ib, bmatch = region
292 a_region = self.a[ia:amatch]
292 a_region = self.a[ia:amatch]
293 b_region = self.b[ib:bmatch]
293 b_region = self.b[ib:bmatch]
294 matches = mdiff.get_matching_blocks(''.join(a_region),
294 matches = mdiff.get_matching_blocks(''.join(a_region),
295 ''.join(b_region))
295 ''.join(b_region))
296 next_a = ia
296 next_a = ia
297 next_b = ib
297 next_b = ib
298 for region_ia, region_ib, region_len in matches[:-1]:
298 for region_ia, region_ib, region_len in matches[:-1]:
299 region_ia += ia
299 region_ia += ia
300 region_ib += ib
300 region_ib += ib
301 reg = self.mismatch_region(next_a, region_ia, next_b,
301 reg = self.mismatch_region(next_a, region_ia, next_b,
302 region_ib)
302 region_ib)
303 if reg is not None:
303 if reg is not None:
304 yield reg
304 yield reg
305 yield 'same', region_ia, region_len + region_ia
305 yield 'same', region_ia, region_len + region_ia
306 next_a = region_ia + region_len
306 next_a = region_ia + region_len
307 next_b = region_ib + region_len
307 next_b = region_ib + region_len
308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
309 if reg is not None:
309 if reg is not None:
310 yield reg
310 yield reg
311
311
312 def mismatch_region(next_a, region_ia, next_b, region_ib):
312 def mismatch_region(next_a, region_ia, next_b, region_ib):
313 if next_a < region_ia or next_b < region_ib:
313 if next_a < region_ia or next_b < region_ib:
314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
315 mismatch_region = staticmethod(mismatch_region)
315 mismatch_region = staticmethod(mismatch_region)
316
316
317 def find_sync_regions(self):
317 def find_sync_regions(self):
318 """Return a list of sync regions, where both descendants match the base.
318 """Return a list of sync regions, where both descendants match the base.
319
319
320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
321 always a zero-length sync region at the end of all the files.
321 always a zero-length sync region at the end of all the files.
322 """
322 """
323
323
324 ia = ib = 0
324 ia = ib = 0
325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
327 len_a = len(amatches)
327 len_a = len(amatches)
328 len_b = len(bmatches)
328 len_b = len(bmatches)
329
329
330 sl = []
330 sl = []
331
331
332 while ia < len_a and ib < len_b:
332 while ia < len_a and ib < len_b:
333 abase, amatch, alen = amatches[ia]
333 abase, amatch, alen = amatches[ia]
334 bbase, bmatch, blen = bmatches[ib]
334 bbase, bmatch, blen = bmatches[ib]
335
335
336 # there is an unconflicted block at i; how long does it
336 # there is an unconflicted block at i; how long does it
337 # extend? until whichever one ends earlier.
337 # extend? until whichever one ends earlier.
338 i = intersect((abase, abase + alen), (bbase, bbase + blen))
338 i = intersect((abase, abase + alen), (bbase, bbase + blen))
339 if i:
339 if i:
340 intbase = i[0]
340 intbase = i[0]
341 intend = i[1]
341 intend = i[1]
342 intlen = intend - intbase
342 intlen = intend - intbase
343
343
344 # found a match of base[i[0], i[1]]; this may be less than
344 # found a match of base[i[0], i[1]]; this may be less than
345 # the region that matches in either one
345 # the region that matches in either one
346 assert intlen <= alen
346 assert intlen <= alen
347 assert intlen <= blen
347 assert intlen <= blen
348 assert abase <= intbase
348 assert abase <= intbase
349 assert bbase <= intbase
349 assert bbase <= intbase
350
350
351 asub = amatch + (intbase - abase)
351 asub = amatch + (intbase - abase)
352 bsub = bmatch + (intbase - bbase)
352 bsub = bmatch + (intbase - bbase)
353 aend = asub + intlen
353 aend = asub + intlen
354 bend = bsub + intlen
354 bend = bsub + intlen
355
355
356 assert self.base[intbase:intend] == self.a[asub:aend], \
356 assert self.base[intbase:intend] == self.a[asub:aend], \
357 (self.base[intbase:intend], self.a[asub:aend])
357 (self.base[intbase:intend], self.a[asub:aend])
358
358
359 assert self.base[intbase:intend] == self.b[bsub:bend]
359 assert self.base[intbase:intend] == self.b[bsub:bend]
360
360
361 sl.append((intbase, intend,
361 sl.append((intbase, intend,
362 asub, aend,
362 asub, aend,
363 bsub, bend))
363 bsub, bend))
364
364
365 # advance whichever one ends first in the base text
365 # advance whichever one ends first in the base text
366 if (abase + alen) < (bbase + blen):
366 if (abase + alen) < (bbase + blen):
367 ia += 1
367 ia += 1
368 else:
368 else:
369 ib += 1
369 ib += 1
370
370
371 intbase = len(self.base)
371 intbase = len(self.base)
372 abase = len(self.a)
372 abase = len(self.a)
373 bbase = len(self.b)
373 bbase = len(self.b)
374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
375
375
376 return sl
376 return sl
377
377
378 def find_unconflicted(self):
378 def find_unconflicted(self):
379 """Return a list of ranges in base that are not conflicted."""
379 """Return a list of ranges in base that are not conflicted."""
380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
382
382
383 unc = []
383 unc = []
384
384
385 while am and bm:
385 while am and bm:
386 # there is an unconflicted block at i; how long does it
386 # there is an unconflicted block at i; how long does it
387 # extend? until whichever one ends earlier.
387 # extend? until whichever one ends earlier.
388 a1 = am[0][0]
388 a1 = am[0][0]
389 a2 = a1 + am[0][2]
389 a2 = a1 + am[0][2]
390 b1 = bm[0][0]
390 b1 = bm[0][0]
391 b2 = b1 + bm[0][2]
391 b2 = b1 + bm[0][2]
392 i = intersect((a1, a2), (b1, b2))
392 i = intersect((a1, a2), (b1, b2))
393 if i:
393 if i:
394 unc.append(i)
394 unc.append(i)
395
395
396 if a2 < b2:
396 if a2 < b2:
397 del am[0]
397 del am[0]
398 else:
398 else:
399 del bm[0]
399 del bm[0]
400
400
401 return unc
401 return unc
402
402
403 def simplemerge(ui, local, base, other, **opts):
403 def simplemerge(ui, local, base, other, **opts):
404 def readfile(filename):
404 def readfile(filename):
405 f = open(filename, "rb")
405 f = open(filename, "rb")
406 text = f.read()
406 text = f.read()
407 f.close()
407 f.close()
408 if util.binary(text):
408 if util.binary(text):
409 msg = _("%s looks like a binary file.") % filename
409 msg = _("%s looks like a binary file.") % filename
410 if not opts.get('quiet'):
410 if not opts.get('quiet'):
411 ui.warn(_('warning: %s\n') % msg)
411 ui.warn(_('warning: %s\n') % msg)
412 if not opts.get('text'):
412 if not opts.get('text'):
413 raise util.Abort(msg)
413 raise util.Abort(msg)
414 return text
414 return text
415
415
416 name_a = local
416 name_a = local
417 name_b = other
417 name_b = other
418 labels = opts.get('label', [])
418 labels = opts.get('label', [])
419 if labels:
419 if labels:
420 name_a = labels.pop(0)
420 name_a = labels.pop(0)
421 if labels:
421 if labels:
422 name_b = labels.pop(0)
422 name_b = labels.pop(0)
423 if labels:
423 if labels:
424 raise util.Abort(_("can only specify two labels."))
424 raise util.Abort(_("can only specify two labels."))
425
425
426 try:
426 try:
427 localtext = readfile(local)
427 localtext = readfile(local)
428 basetext = readfile(base)
428 basetext = readfile(base)
429 othertext = readfile(other)
429 othertext = readfile(other)
430 except util.Abort:
430 except util.Abort:
431 return 1
431 return 1
432
432
433 local = util.realpath(local)
433 local = os.path.realpath(local)
434 if not opts.get('print'):
434 if not opts.get('print'):
435 opener = scmutil.opener(os.path.dirname(local))
435 opener = scmutil.opener(os.path.dirname(local))
436 out = opener(os.path.basename(local), "w", atomictemp=True)
436 out = opener(os.path.basename(local), "w", atomictemp=True)
437 else:
437 else:
438 out = sys.stdout
438 out = sys.stdout
439
439
440 reprocess = not opts.get('no_minimal')
440 reprocess = not opts.get('no_minimal')
441
441
442 m3 = Merge3Text(basetext, localtext, othertext)
442 m3 = Merge3Text(basetext, localtext, othertext)
443 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
443 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
444 reprocess=reprocess):
444 reprocess=reprocess):
445 out.write(line)
445 out.write(line)
446
446
447 if not opts.get('print'):
447 if not opts.get('print'):
448 out.close()
448 out.close()
449
449
450 if m3.conflicts:
450 if m3.conflicts:
451 if not opts.get('quiet'):
451 if not opts.get('quiet'):
452 ui.warn(_("warning: conflicts during merge.\n"))
452 ui.warn(_("warning: conflicts during merge.\n"))
453 return 1
453 return 1
@@ -1,40 +1,40 b''
1 import os
1 import os
2 from mercurial import hg, ui, util
2 from mercurial import hg, ui
3 from mercurial.hgweb.hgwebdir_mod import hgwebdir
3 from mercurial.hgweb.hgwebdir_mod import hgwebdir
4
4
5 os.mkdir('webdir')
5 os.mkdir('webdir')
6 os.chdir('webdir')
6 os.chdir('webdir')
7
7
8 webdir = util.realpath('.')
8 webdir = os.path.realpath('.')
9
9
10 u = ui.ui()
10 u = ui.ui()
11 hg.repository(u, 'a', create=1)
11 hg.repository(u, 'a', create=1)
12 hg.repository(u, 'b', create=1)
12 hg.repository(u, 'b', create=1)
13 os.chdir('b')
13 os.chdir('b')
14 hg.repository(u, 'd', create=1)
14 hg.repository(u, 'd', create=1)
15 os.chdir('..')
15 os.chdir('..')
16 hg.repository(u, 'c', create=1)
16 hg.repository(u, 'c', create=1)
17 os.chdir('..')
17 os.chdir('..')
18
18
19 paths = {'t/a/': '%s/a' % webdir,
19 paths = {'t/a/': '%s/a' % webdir,
20 'b': '%s/b' % webdir,
20 'b': '%s/b' % webdir,
21 'coll': '%s/*' % webdir,
21 'coll': '%s/*' % webdir,
22 'rcoll': '%s/**' % webdir}
22 'rcoll': '%s/**' % webdir}
23
23
24 config = os.path.join(webdir, 'hgwebdir.conf')
24 config = os.path.join(webdir, 'hgwebdir.conf')
25 configfile = open(config, 'w')
25 configfile = open(config, 'w')
26 configfile.write('[paths]\n')
26 configfile.write('[paths]\n')
27 for k, v in paths.items():
27 for k, v in paths.items():
28 configfile.write('%s = %s\n' % (k, v))
28 configfile.write('%s = %s\n' % (k, v))
29 configfile.close()
29 configfile.close()
30
30
31 confwd = hgwebdir(config)
31 confwd = hgwebdir(config)
32 dictwd = hgwebdir(paths)
32 dictwd = hgwebdir(paths)
33
33
34 assert len(confwd.repos) == len(dictwd.repos), 'different numbers'
34 assert len(confwd.repos) == len(dictwd.repos), 'different numbers'
35 assert len(confwd.repos) == 9, 'expected 9 repos, found %d' % len(confwd.repos)
35 assert len(confwd.repos) == 9, 'expected 9 repos, found %d' % len(confwd.repos)
36
36
37 found = dict(confwd.repos)
37 found = dict(confwd.repos)
38 for key, path in dictwd.repos:
38 for key, path in dictwd.repos:
39 assert key in found, 'repository %s was not found' % key
39 assert key in found, 'repository %s was not found' % key
40 assert found[key] == path, 'different paths for repo %s' % key
40 assert found[key] == path, 'different paths for repo %s' % key
General Comments 0
You need to be logged in to leave comments. Login now