##// END OF EJS Templates
merge with abuehl
Matt Mackall -
r14238:d466d592 merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,25 +1,25 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Dump revlogs as raw data stream
2 # Dump revlogs as raw data stream
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
4
4
5 import sys
5 import sys
6 from mercurial import revlog, node, util
6 from mercurial import revlog, node, util
7
7
8 for fp in (sys.stdin, sys.stdout, sys.stderr):
8 for fp in (sys.stdin, sys.stdout, sys.stderr):
9 util.set_binary(fp)
9 util.setbinary(fp)
10
10
11 for f in sys.argv[1:]:
11 for f in sys.argv[1:]:
12 binopen = lambda fn: open(fn, 'rb')
12 binopen = lambda fn: open(fn, 'rb')
13 r = revlog.revlog(binopen, f)
13 r = revlog.revlog(binopen, f)
14 print "file:", f
14 print "file:", f
15 for i in r:
15 for i in r:
16 n = r.node(i)
16 n = r.node(i)
17 p = r.parents(n)
17 p = r.parents(n)
18 d = r.revision(n)
18 d = r.revision(n)
19 print "node:", node.hex(n)
19 print "node:", node.hex(n)
20 print "linkrev:", r.linkrev(i)
20 print "linkrev:", r.linkrev(i)
21 print "parents:", node.hex(p[0]), node.hex(p[1])
21 print "parents:", node.hex(p[0]), node.hex(p[1])
22 print "length:", len(d)
22 print "length:", len(d)
23 print "-start-"
23 print "-start-"
24 print d
24 print d
25 print "-end-"
25 print "-end-"
@@ -1,291 +1,291 b''
1 """reorder a revlog (the manifest by default) to save space
1 """reorder a revlog (the manifest by default) to save space
2
2
3 Specifically, this topologically sorts the revisions in the revlog so that
3 Specifically, this topologically sorts the revisions in the revlog so that
4 revisions on the same branch are adjacent as much as possible. This is a
4 revisions on the same branch are adjacent as much as possible. This is a
5 workaround for the fact that Mercurial computes deltas relative to the
5 workaround for the fact that Mercurial computes deltas relative to the
6 previous revision rather than relative to a parent revision.
6 previous revision rather than relative to a parent revision.
7
7
8 This is *not* safe to run on a changelog.
8 This is *not* safe to run on a changelog.
9 """
9 """
10
10
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
13 # renamed by Greg Ward <greg at gerg.ca>.
13 # renamed by Greg Ward <greg at gerg.ca>.
14
14
15 # XXX would be nice to have a way to verify the repository after shrinking,
15 # XXX would be nice to have a way to verify the repository after shrinking,
16 # e.g. by comparing "before" and "after" states of random changesets
16 # e.g. by comparing "before" and "after" states of random changesets
17 # (maybe: export before, shrink, export after, diff).
17 # (maybe: export before, shrink, export after, diff).
18
18
19 import os, tempfile, errno
19 import os, tempfile, errno
20 from mercurial import revlog, transaction, node, util, scmutil
20 from mercurial import revlog, transaction, node, util, scmutil
21 from mercurial import changegroup
21 from mercurial import changegroup
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24
24
25 def postorder(start, edges):
25 def postorder(start, edges):
26 result = []
26 result = []
27 visit = list(start)
27 visit = list(start)
28 finished = set()
28 finished = set()
29
29
30 while visit:
30 while visit:
31 cur = visit[-1]
31 cur = visit[-1]
32 for p in edges[cur]:
32 for p in edges[cur]:
33 # defend against node.nullrev because it's occasionally
33 # defend against node.nullrev because it's occasionally
34 # possible for a node to have parents (null, something)
34 # possible for a node to have parents (null, something)
35 # rather than (something, null)
35 # rather than (something, null)
36 if p not in finished and p != node.nullrev:
36 if p not in finished and p != node.nullrev:
37 visit.append(p)
37 visit.append(p)
38 break
38 break
39 else:
39 else:
40 result.append(cur)
40 result.append(cur)
41 finished.add(cur)
41 finished.add(cur)
42 visit.pop()
42 visit.pop()
43
43
44 return result
44 return result
45
45
46 def toposort_reversepostorder(ui, rl):
46 def toposort_reversepostorder(ui, rl):
47 # postorder of the reverse directed graph
47 # postorder of the reverse directed graph
48
48
49 # map rev to list of parent revs (p2 first)
49 # map rev to list of parent revs (p2 first)
50 parents = {}
50 parents = {}
51 heads = set()
51 heads = set()
52 ui.status(_('reading revs\n'))
52 ui.status(_('reading revs\n'))
53 try:
53 try:
54 for rev in rl:
54 for rev in rl:
55 ui.progress(_('reading'), rev, total=len(rl))
55 ui.progress(_('reading'), rev, total=len(rl))
56 (p1, p2) = rl.parentrevs(rev)
56 (p1, p2) = rl.parentrevs(rev)
57 if p1 == p2 == node.nullrev:
57 if p1 == p2 == node.nullrev:
58 parents[rev] = () # root node
58 parents[rev] = () # root node
59 elif p1 == p2 or p2 == node.nullrev:
59 elif p1 == p2 or p2 == node.nullrev:
60 parents[rev] = (p1,) # normal node
60 parents[rev] = (p1,) # normal node
61 else:
61 else:
62 parents[rev] = (p2, p1) # merge node
62 parents[rev] = (p2, p1) # merge node
63 heads.add(rev)
63 heads.add(rev)
64 for p in parents[rev]:
64 for p in parents[rev]:
65 heads.discard(p)
65 heads.discard(p)
66 finally:
66 finally:
67 ui.progress(_('reading'), None)
67 ui.progress(_('reading'), None)
68
68
69 heads = list(heads)
69 heads = list(heads)
70 heads.sort(reverse=True)
70 heads.sort(reverse=True)
71
71
72 ui.status(_('sorting revs\n'))
72 ui.status(_('sorting revs\n'))
73 return postorder(heads, parents)
73 return postorder(heads, parents)
74
74
75 def toposort_postorderreverse(ui, rl):
75 def toposort_postorderreverse(ui, rl):
76 # reverse-postorder of the reverse directed graph
76 # reverse-postorder of the reverse directed graph
77
77
78 children = {}
78 children = {}
79 roots = set()
79 roots = set()
80 ui.status(_('reading revs\n'))
80 ui.status(_('reading revs\n'))
81 try:
81 try:
82 for rev in rl:
82 for rev in rl:
83 ui.progress(_('reading'), rev, total=len(rl))
83 ui.progress(_('reading'), rev, total=len(rl))
84 (p1, p2) = rl.parentrevs(rev)
84 (p1, p2) = rl.parentrevs(rev)
85 if p1 == p2 == node.nullrev:
85 if p1 == p2 == node.nullrev:
86 roots.add(rev)
86 roots.add(rev)
87 children[rev] = []
87 children[rev] = []
88 if p1 != node.nullrev:
88 if p1 != node.nullrev:
89 children[p1].append(rev)
89 children[p1].append(rev)
90 if p2 != node.nullrev:
90 if p2 != node.nullrev:
91 children[p2].append(rev)
91 children[p2].append(rev)
92 finally:
92 finally:
93 ui.progress(_('reading'), None)
93 ui.progress(_('reading'), None)
94
94
95 roots = list(roots)
95 roots = list(roots)
96 roots.sort()
96 roots.sort()
97
97
98 ui.status(_('sorting revs\n'))
98 ui.status(_('sorting revs\n'))
99 result = postorder(roots, children)
99 result = postorder(roots, children)
100 result.reverse()
100 result.reverse()
101 return result
101 return result
102
102
103 def writerevs(ui, r1, r2, order, tr):
103 def writerevs(ui, r1, r2, order, tr):
104
104
105 ui.status(_('writing revs\n'))
105 ui.status(_('writing revs\n'))
106
106
107
107
108 order = [r1.node(r) for r in order]
108 order = [r1.node(r) for r in order]
109
109
110 # this is a bit ugly, but it works
110 # this is a bit ugly, but it works
111 count = [0]
111 count = [0]
112 def lookup(revl, x):
112 def lookup(revl, x):
113 count[0] += 1
113 count[0] += 1
114 ui.progress(_('writing'), count[0], total=len(order))
114 ui.progress(_('writing'), count[0], total=len(order))
115 return "%020d" % revl.linkrev(revl.rev(x))
115 return "%020d" % revl.linkrev(revl.rev(x))
116
116
117 unlookup = lambda x: int(x, 10)
117 unlookup = lambda x: int(x, 10)
118
118
119 try:
119 try:
120 bundler = changegroup.bundle10(lookup)
120 bundler = changegroup.bundle10(lookup)
121 group = util.chunkbuffer(r1.group(order, bundler))
121 group = util.chunkbuffer(r1.group(order, bundler))
122 group = changegroup.unbundle10(group, "UN")
122 group = changegroup.unbundle10(group, "UN")
123 r2.addgroup(group, unlookup, tr)
123 r2.addgroup(group, unlookup, tr)
124 finally:
124 finally:
125 ui.progress(_('writing'), None)
125 ui.progress(_('writing'), None)
126
126
127 def report(ui, r1, r2):
127 def report(ui, r1, r2):
128 def getsize(r):
128 def getsize(r):
129 s = 0
129 s = 0
130 for fn in (r.indexfile, r.datafile):
130 for fn in (r.indexfile, r.datafile):
131 try:
131 try:
132 s += os.stat(fn).st_size
132 s += os.stat(fn).st_size
133 except OSError, inst:
133 except OSError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return s
136 return s
137
137
138 oldsize = float(getsize(r1))
138 oldsize = float(getsize(r1))
139 newsize = float(getsize(r2))
139 newsize = float(getsize(r2))
140
140
141 # argh: have to pass an int to %d, because a float >= 2^32
141 # argh: have to pass an int to %d, because a float >= 2^32
142 # blows up under Python 2.5 or earlier
142 # blows up under Python 2.5 or earlier
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
144 % (int(oldsize), oldsize / 1024 / 1024))
144 % (int(oldsize), oldsize / 1024 / 1024))
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
146 % (int(newsize), newsize / 1024 / 1024))
146 % (int(newsize), newsize / 1024 / 1024))
147
147
148 shrink_percent = (oldsize - newsize) / oldsize * 100
148 shrink_percent = (oldsize - newsize) / oldsize * 100
149 shrink_factor = oldsize / newsize
149 shrink_factor = oldsize / newsize
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
151 % (shrink_percent, shrink_factor))
151 % (shrink_percent, shrink_factor))
152
152
153 def shrink(ui, repo, **opts):
153 def shrink(ui, repo, **opts):
154 """shrink a revlog by reordering revisions
154 """shrink a revlog by reordering revisions
155
155
156 Rewrites all the entries in some revlog of the current repository
156 Rewrites all the entries in some revlog of the current repository
157 (by default, the manifest log) to save space.
157 (by default, the manifest log) to save space.
158
158
159 Different sort algorithms have different performance
159 Different sort algorithms have different performance
160 characteristics. Use ``--sort`` to select a sort algorithm so you
160 characteristics. Use ``--sort`` to select a sort algorithm so you
161 can determine which works best for your data.
161 can determine which works best for your data.
162 """
162 """
163
163
164 if not repo.local():
164 if not repo.local():
165 raise util.Abort(_('not a local repository: %s') % repo.root)
165 raise util.Abort(_('not a local repository: %s') % repo.root)
166
166
167 fn = opts.get('revlog')
167 fn = opts.get('revlog')
168 if not fn:
168 if not fn:
169 indexfn = repo.sjoin('00manifest.i')
169 indexfn = repo.sjoin('00manifest.i')
170 else:
170 else:
171 if not fn.endswith('.i'):
171 if not fn.endswith('.i'):
172 raise util.Abort(_('--revlog option must specify the revlog index '
172 raise util.Abort(_('--revlog option must specify the revlog index '
173 'file (*.i), not %s') % opts.get('revlog'))
173 'file (*.i), not %s') % opts.get('revlog'))
174
174
175 indexfn = os.path.realpath(fn)
175 indexfn = os.path.realpath(fn)
176 store = repo.sjoin('')
176 store = repo.sjoin('')
177 if not indexfn.startswith(store):
177 if not indexfn.startswith(store):
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
179 'not %s') % (store, indexfn))
179 'not %s') % (store, indexfn))
180
180
181 sortname = opts['sort']
181 sortname = opts['sort']
182 try:
182 try:
183 toposort = globals()['toposort_' + sortname]
183 toposort = globals()['toposort_' + sortname]
184 except KeyError:
184 except KeyError:
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
186
186
187 if not os.path.exists(indexfn):
187 if not os.path.exists(indexfn):
188 raise util.Abort(_('no such file: %s') % indexfn)
188 raise util.Abort(_('no such file: %s') % indexfn)
189 if '00changelog' in indexfn:
189 if '00changelog' in indexfn:
190 raise util.Abort(_('shrinking the changelog '
190 raise util.Abort(_('shrinking the changelog '
191 'will corrupt your repository'))
191 'will corrupt your repository'))
192
192
193 ui.write(_('shrinking %s\n') % indexfn)
193 ui.write(_('shrinking %s\n') % indexfn)
194 prefix = os.path.basename(indexfn)[:-1]
194 prefix = os.path.basename(indexfn)[:-1]
195 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
195 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
196
196
197 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
197 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
198 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
198 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
199
199
200 datafn, tmpdatafn = r1.datafile, r2.datafile
200 datafn, tmpdatafn = r1.datafile, r2.datafile
201
201
202 oldindexfn = indexfn + '.old'
202 oldindexfn = indexfn + '.old'
203 olddatafn = datafn + '.old'
203 olddatafn = datafn + '.old'
204 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
204 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
205 raise util.Abort(_('one or both of\n'
205 raise util.Abort(_('one or both of\n'
206 ' %s\n'
206 ' %s\n'
207 ' %s\n'
207 ' %s\n'
208 'exists from a previous run; please clean up '
208 'exists from a previous run; please clean up '
209 'before running again') % (oldindexfn, olddatafn))
209 'before running again') % (oldindexfn, olddatafn))
210
210
211 # Don't use repo.transaction(), because then things get hairy with
211 # Don't use repo.transaction(), because then things get hairy with
212 # paths: some need to be relative to .hg, and some need to be
212 # paths: some need to be relative to .hg, and some need to be
213 # absolute. Doing it this way keeps things simple: everything is an
213 # absolute. Doing it this way keeps things simple: everything is an
214 # absolute path.
214 # absolute path.
215 lock = repo.lock(wait=False)
215 lock = repo.lock(wait=False)
216 tr = transaction.transaction(ui.warn,
216 tr = transaction.transaction(ui.warn,
217 open,
217 open,
218 repo.sjoin('journal'))
218 repo.sjoin('journal'))
219
219
220 def ignoremissing(func):
220 def ignoremissing(func):
221 def f(*args, **kw):
221 def f(*args, **kw):
222 try:
222 try:
223 return func(*args, **kw)
223 return func(*args, **kw)
224 except OSError, inst:
224 except OSError, inst:
225 if inst.errno != errno.ENOENT:
225 if inst.errno != errno.ENOENT:
226 raise
226 raise
227 return f
227 return f
228
228
229 try:
229 try:
230 try:
230 try:
231 order = toposort(ui, r1)
231 order = toposort(ui, r1)
232
232
233 suboptimal = 0
233 suboptimal = 0
234 for i in xrange(1, len(order)):
234 for i in xrange(1, len(order)):
235 parents = [p for p in r1.parentrevs(order[i])
235 parents = [p for p in r1.parentrevs(order[i])
236 if p != node.nullrev]
236 if p != node.nullrev]
237 if parents and order[i - 1] not in parents:
237 if parents and order[i - 1] not in parents:
238 suboptimal += 1
238 suboptimal += 1
239 ui.note(_('%d suboptimal nodes\n') % suboptimal)
239 ui.note(_('%d suboptimal nodes\n') % suboptimal)
240
240
241 writerevs(ui, r1, r2, order, tr)
241 writerevs(ui, r1, r2, order, tr)
242 report(ui, r1, r2)
242 report(ui, r1, r2)
243 tr.close()
243 tr.close()
244 except:
244 except:
245 # Abort transaction first, so we truncate the files before
245 # Abort transaction first, so we truncate the files before
246 # deleting them.
246 # deleting them.
247 tr.abort()
247 tr.abort()
248 for fn in (tmpindexfn, tmpdatafn):
248 for fn in (tmpindexfn, tmpdatafn):
249 ignoremissing(os.unlink)(fn)
249 ignoremissing(os.unlink)(fn)
250 raise
250 raise
251 if not opts.get('dry_run'):
251 if not opts.get('dry_run'):
252 # racy, both files cannot be renamed atomically
252 # racy, both files cannot be renamed atomically
253 # copy files
253 # copy files
254 util.os_link(indexfn, oldindexfn)
254 util.oslink(indexfn, oldindexfn)
255 ignoremissing(util.os_link)(datafn, olddatafn)
255 ignoremissing(util.oslink)(datafn, olddatafn)
256
256
257 # rename
257 # rename
258 util.rename(tmpindexfn, indexfn)
258 util.rename(tmpindexfn, indexfn)
259 try:
259 try:
260 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
260 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
261 util.rename(tmpdatafn, datafn)
261 util.rename(tmpdatafn, datafn)
262 except OSError, inst:
262 except OSError, inst:
263 if inst.errno != errno.ENOENT:
263 if inst.errno != errno.ENOENT:
264 raise
264 raise
265 ignoremissing(os.unlink)(datafn)
265 ignoremissing(os.unlink)(datafn)
266 else:
266 else:
267 for fn in (tmpindexfn, tmpdatafn):
267 for fn in (tmpindexfn, tmpdatafn):
268 ignoremissing(os.unlink)(fn)
268 ignoremissing(os.unlink)(fn)
269 finally:
269 finally:
270 lock.release()
270 lock.release()
271
271
272 if not opts.get('dry_run'):
272 if not opts.get('dry_run'):
273 ui.write(_('note: old revlog saved in:\n'
273 ui.write(_('note: old revlog saved in:\n'
274 ' %s\n'
274 ' %s\n'
275 ' %s\n'
275 ' %s\n'
276 '(You can delete those files when you are satisfied that your\n'
276 '(You can delete those files when you are satisfied that your\n'
277 'repository is still sane. '
277 'repository is still sane. '
278 'Running \'hg verify\' is strongly recommended.)\n')
278 'Running \'hg verify\' is strongly recommended.)\n')
279 % (oldindexfn, olddatafn))
279 % (oldindexfn, olddatafn))
280
280
281 cmdtable = {
281 cmdtable = {
282 'shrink': (shrink,
282 'shrink': (shrink,
283 [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
283 [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
284 ('n', 'dry-run', None, _('do not shrink, simulate only')),
284 ('n', 'dry-run', None, _('do not shrink, simulate only')),
285 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
285 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
286 ],
286 ],
287 _('hg shrink [--revlog PATH]'))
287 _('hg shrink [--revlog PATH]'))
288 }
288 }
289
289
290 if __name__ == "__main__":
290 if __name__ == "__main__":
291 print "shrink-revlog.py is now an extension (see hg help extensions)"
291 print "shrink-revlog.py is now an extension (see hg help extensions)"
@@ -1,67 +1,67 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 from mercurial import demandimport
3 from mercurial import demandimport
4 demandimport.enable()
4 demandimport.enable()
5
5
6 import os, sys
6 import os, sys
7 from mercurial.i18n import _
7 from mercurial.i18n import _
8 from mercurial import simplemerge, fancyopts, util, ui
8 from mercurial import simplemerge, fancyopts, util, ui
9
9
10 options = [('L', 'label', [], _('labels to use on conflict markers')),
10 options = [('L', 'label', [], _('labels to use on conflict markers')),
11 ('a', 'text', None, _('treat all files as text')),
11 ('a', 'text', None, _('treat all files as text')),
12 ('p', 'print', None,
12 ('p', 'print', None,
13 _('print results instead of overwriting LOCAL')),
13 _('print results instead of overwriting LOCAL')),
14 ('', 'no-minimal', None,
14 ('', 'no-minimal', None,
15 _('do not try to minimize conflict regions')),
15 _('do not try to minimize conflict regions')),
16 ('h', 'help', None, _('display help and exit')),
16 ('h', 'help', None, _('display help and exit')),
17 ('q', 'quiet', None, _('suppress output'))]
17 ('q', 'quiet', None, _('suppress output'))]
18
18
19 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
19 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
20
20
21 Simple three-way file merge utility with a minimal feature set.
21 Simple three-way file merge utility with a minimal feature set.
22
22
23 Apply to LOCAL the changes necessary to go from BASE to OTHER.
23 Apply to LOCAL the changes necessary to go from BASE to OTHER.
24
24
25 By default, LOCAL is overwritten with the results of this operation.
25 By default, LOCAL is overwritten with the results of this operation.
26 ''')
26 ''')
27
27
28 class ParseError(Exception):
28 class ParseError(Exception):
29 """Exception raised on errors in parsing the command line."""
29 """Exception raised on errors in parsing the command line."""
30
30
31 def showhelp():
31 def showhelp():
32 sys.stdout.write(usage)
32 sys.stdout.write(usage)
33 sys.stdout.write('\noptions:\n')
33 sys.stdout.write('\noptions:\n')
34
34
35 out_opts = []
35 out_opts = []
36 for shortopt, longopt, default, desc in options:
36 for shortopt, longopt, default, desc in options:
37 out_opts.append(('%2s%s' % (shortopt and '-%s' % shortopt,
37 out_opts.append(('%2s%s' % (shortopt and '-%s' % shortopt,
38 longopt and ' --%s' % longopt),
38 longopt and ' --%s' % longopt),
39 '%s' % desc))
39 '%s' % desc))
40 opts_len = max([len(opt[0]) for opt in out_opts])
40 opts_len = max([len(opt[0]) for opt in out_opts])
41 for first, second in out_opts:
41 for first, second in out_opts:
42 sys.stdout.write(' %-*s %s\n' % (opts_len, first, second))
42 sys.stdout.write(' %-*s %s\n' % (opts_len, first, second))
43
43
44 try:
44 try:
45 for fp in (sys.stdin, sys.stdout, sys.stderr):
45 for fp in (sys.stdin, sys.stdout, sys.stderr):
46 util.set_binary(fp)
46 util.setbinary(fp)
47
47
48 opts = {}
48 opts = {}
49 try:
49 try:
50 args = fancyopts.fancyopts(sys.argv[1:], options, opts)
50 args = fancyopts.fancyopts(sys.argv[1:], options, opts)
51 except fancyopts.getopt.GetoptError, e:
51 except fancyopts.getopt.GetoptError, e:
52 raise ParseError(e)
52 raise ParseError(e)
53 if opts['help']:
53 if opts['help']:
54 showhelp()
54 showhelp()
55 sys.exit(0)
55 sys.exit(0)
56 if len(args) != 3:
56 if len(args) != 3:
57 raise ParseError(_('wrong number of arguments'))
57 raise ParseError(_('wrong number of arguments'))
58 sys.exit(simplemerge.simplemerge(ui.ui(), *args, **opts))
58 sys.exit(simplemerge.simplemerge(ui.ui(), *args, **opts))
59 except ParseError, e:
59 except ParseError, e:
60 sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
60 sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
61 showhelp()
61 showhelp()
62 sys.exit(1)
62 sys.exit(1)
63 except util.Abort, e:
63 except util.Abort, e:
64 sys.stderr.write("abort: %s\n" % e)
64 sys.stderr.write("abort: %s\n" % e)
65 sys.exit(255)
65 sys.exit(255)
66 except KeyboardInterrupt:
66 except KeyboardInterrupt:
67 sys.exit(255)
67 sys.exit(255)
@@ -1,37 +1,37 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Undump a dump from dumprevlog
2 # Undump a dump from dumprevlog
3 # $ hg init
3 # $ hg init
4 # $ undumprevlog < repo.dump
4 # $ undumprevlog < repo.dump
5
5
6 import sys
6 import sys
7 from mercurial import revlog, node, scmutil, util, transaction
7 from mercurial import revlog, node, scmutil, util, transaction
8
8
9 for fp in (sys.stdin, sys.stdout, sys.stderr):
9 for fp in (sys.stdin, sys.stdout, sys.stderr):
10 util.set_binary(fp)
10 util.setbinary(fp)
11
11
12 opener = scmutil.opener('.', False)
12 opener = scmutil.opener('.', False)
13 tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
13 tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
14 while 1:
14 while 1:
15 l = sys.stdin.readline()
15 l = sys.stdin.readline()
16 if not l:
16 if not l:
17 break
17 break
18 if l.startswith("file:"):
18 if l.startswith("file:"):
19 f = l[6:-1]
19 f = l[6:-1]
20 r = revlog.revlog(opener, f)
20 r = revlog.revlog(opener, f)
21 print f
21 print f
22 elif l.startswith("node:"):
22 elif l.startswith("node:"):
23 n = node.bin(l[6:-1])
23 n = node.bin(l[6:-1])
24 elif l.startswith("linkrev:"):
24 elif l.startswith("linkrev:"):
25 lr = int(l[9:-1])
25 lr = int(l[9:-1])
26 elif l.startswith("parents:"):
26 elif l.startswith("parents:"):
27 p = l[9:-1].split()
27 p = l[9:-1].split()
28 p1 = node.bin(p[0])
28 p1 = node.bin(p[0])
29 p2 = node.bin(p[1])
29 p2 = node.bin(p[1])
30 elif l.startswith("length:"):
30 elif l.startswith("length:"):
31 length = int(l[8:-1])
31 length = int(l[8:-1])
32 sys.stdin.readline() # start marker
32 sys.stdin.readline() # start marker
33 d = sys.stdin.read(length)
33 d = sys.stdin.read(length)
34 sys.stdin.readline() # end marker
34 sys.stdin.readline() # end marker
35 r.addrevision(d, tr, lr, p1, p2)
35 r.addrevision(d, tr, lr, p1, p2)
36
36
37 tr.close()
37 tr.close()
@@ -1,38 +1,38 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # mercurial - scalable distributed SCM
3 # mercurial - scalable distributed SCM
4 #
4 #
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import os
10 import os
11 import sys
11 import sys
12
12
13 libdir = '@LIBDIR@'
13 libdir = '@LIBDIR@'
14
14
15 if libdir != '@' 'LIBDIR' '@':
15 if libdir != '@' 'LIBDIR' '@':
16 if not os.path.isabs(libdir):
16 if not os.path.isabs(libdir):
17 libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
17 libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
18 libdir)
18 libdir)
19 libdir = os.path.abspath(libdir)
19 libdir = os.path.abspath(libdir)
20 sys.path.insert(0, libdir)
20 sys.path.insert(0, libdir)
21
21
22 # enable importing on demand to reduce startup time
22 # enable importing on demand to reduce startup time
23 try:
23 try:
24 from mercurial import demandimport; demandimport.enable()
24 from mercurial import demandimport; demandimport.enable()
25 except ImportError:
25 except ImportError:
26 import sys
26 import sys
27 sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
27 sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
28 ' '.join(sys.path))
28 ' '.join(sys.path))
29 sys.stderr.write("(check your install and PYTHONPATH)\n")
29 sys.stderr.write("(check your install and PYTHONPATH)\n")
30 sys.exit(-1)
30 sys.exit(-1)
31
31
32 import mercurial.util
32 import mercurial.util
33 import mercurial.dispatch
33 import mercurial.dispatch
34
34
35 for fp in (sys.stdin, sys.stdout, sys.stderr):
35 for fp in (sys.stdin, sys.stdout, sys.stderr):
36 mercurial.util.set_binary(fp)
36 mercurial.util.setbinary(fp)
37
37
38 mercurial.dispatch.run()
38 mercurial.dispatch.run()
@@ -1,756 +1,756 b''
1 # bugzilla.py - bugzilla integration for mercurial
1 # bugzilla.py - bugzilla integration for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2011 Jim Hague <jim.hague@acm.org>
4 # Copyright 2011 Jim Hague <jim.hague@acm.org>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''hooks for integrating with the Bugzilla bug tracker
9 '''hooks for integrating with the Bugzilla bug tracker
10
10
11 This hook extension adds comments on bugs in Bugzilla when changesets
11 This hook extension adds comments on bugs in Bugzilla when changesets
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
12 that refer to bugs by Bugzilla ID are seen. The comment is formatted using
13 the Mercurial template mechanism.
13 the Mercurial template mechanism.
14
14
15 The hook does not change bug status.
15 The hook does not change bug status.
16
16
17 Three basic modes of access to Bugzilla are provided:
17 Three basic modes of access to Bugzilla are provided:
18
18
19 1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
19 1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
20
20
21 2. Check data via the Bugzilla XMLRPC interface and submit bug change
21 2. Check data via the Bugzilla XMLRPC interface and submit bug change
22 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
22 via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
23
23
24 3. Writing directly to the Bugzilla database. Only Bugzilla installations
24 3. Writing directly to the Bugzilla database. Only Bugzilla installations
25 using MySQL are supported. Requires Python MySQLdb.
25 using MySQL are supported. Requires Python MySQLdb.
26
26
27 Writing directly to the database is susceptible to schema changes, and
27 Writing directly to the database is susceptible to schema changes, and
28 relies on a Bugzilla contrib script to send out bug change
28 relies on a Bugzilla contrib script to send out bug change
29 notification emails. This script runs as the user running Mercurial,
29 notification emails. This script runs as the user running Mercurial,
30 must be run on the host with the Bugzilla install, and requires
30 must be run on the host with the Bugzilla install, and requires
31 permission to read Bugzilla configuration details and the necessary
31 permission to read Bugzilla configuration details and the necessary
32 MySQL user and password to have full access rights to the Bugzilla
32 MySQL user and password to have full access rights to the Bugzilla
33 database. For these reasons this access mode is now considered
33 database. For these reasons this access mode is now considered
34 deprecated, and will not be updated for new Bugzilla versions going
34 deprecated, and will not be updated for new Bugzilla versions going
35 forward.
35 forward.
36
36
37 Access via XMLRPC needs a Bugzilla username and password to be specified
37 Access via XMLRPC needs a Bugzilla username and password to be specified
38 in the configuration. Comments are added under that username. Since the
38 in the configuration. Comments are added under that username. Since the
39 configuration must be readable by all Mercurial users, it is recommended
39 configuration must be readable by all Mercurial users, it is recommended
40 that the rights of that user are restricted in Bugzilla to the minimum
40 that the rights of that user are restricted in Bugzilla to the minimum
41 necessary to add comments.
41 necessary to add comments.
42
42
43 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
43 Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
44 email to the Bugzilla email interface to submit comments to bugs.
44 email to the Bugzilla email interface to submit comments to bugs.
45 The From: address in the email is set to the email address of the Mercurial
45 The From: address in the email is set to the email address of the Mercurial
46 user, so the comment appears to come from the Mercurial user. In the event
46 user, so the comment appears to come from the Mercurial user. In the event
47 that the Mercurial user email is not recognised by Bugzilla as a Bugzilla
47 that the Mercurial user email is not recognised by Bugzilla as a Bugzilla
48 user, the email associated with the Bugzilla username used to log into
48 user, the email associated with the Bugzilla username used to log into
49 Bugzilla is used instead as the source of the comment.
49 Bugzilla is used instead as the source of the comment.
50
50
51 Configuration items common to all access modes:
51 Configuration items common to all access modes:
52
52
53 bugzilla.version
53 bugzilla.version
54 This access type to use. Values recognised are:
54 This access type to use. Values recognised are:
55
55
56 :``xmlrpc``: Bugzilla XMLRPC interface.
56 :``xmlrpc``: Bugzilla XMLRPC interface.
57 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
57 :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
58 :``3.0``: MySQL access, Bugzilla 3.0 and later.
58 :``3.0``: MySQL access, Bugzilla 3.0 and later.
59 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
59 :``2.18``: MySQL access, Bugzilla 2.18 and up to but not
60 including 3.0.
60 including 3.0.
61 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
61 :``2.16``: MySQL access, Bugzilla 2.16 and up to but not
62 including 2.18.
62 including 2.18.
63
63
64 bugzilla.regexp
64 bugzilla.regexp
65 Regular expression to match bug IDs in changeset commit message.
65 Regular expression to match bug IDs in changeset commit message.
66 Must contain one "()" group. The default expression matches ``Bug
66 Must contain one "()" group. The default expression matches ``Bug
67 1234``, ``Bug no. 1234``, ``Bug number 1234``, ``Bugs 1234,5678``,
67 1234``, ``Bug no. 1234``, ``Bug number 1234``, ``Bugs 1234,5678``,
68 ``Bug 1234 and 5678`` and variations thereof. Matching is case
68 ``Bug 1234 and 5678`` and variations thereof. Matching is case
69 insensitive.
69 insensitive.
70
70
71 bugzilla.style
71 bugzilla.style
72 The style file to use when formatting comments.
72 The style file to use when formatting comments.
73
73
74 bugzilla.template
74 bugzilla.template
75 Template to use when formatting comments. Overrides style if
75 Template to use when formatting comments. Overrides style if
76 specified. In addition to the usual Mercurial keywords, the
76 specified. In addition to the usual Mercurial keywords, the
77 extension specifies:
77 extension specifies:
78
78
79 :``{bug}``: The Bugzilla bug ID.
79 :``{bug}``: The Bugzilla bug ID.
80 :``{root}``: The full pathname of the Mercurial repository.
80 :``{root}``: The full pathname of the Mercurial repository.
81 :``{webroot}``: Stripped pathname of the Mercurial repository.
81 :``{webroot}``: Stripped pathname of the Mercurial repository.
82 :``{hgweb}``: Base URL for browsing Mercurial repositories.
82 :``{hgweb}``: Base URL for browsing Mercurial repositories.
83
83
84 Default ``changeset {node|short} in repo {root} refers to bug
84 Default ``changeset {node|short} in repo {root} refers to bug
85 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
85 {bug}.\\ndetails:\\n\\t{desc|tabindent}``
86
86
87 bugzilla.strip
87 bugzilla.strip
88 The number of path separator characters to strip from the front of
88 The number of path separator characters to strip from the front of
89 the Mercurial repository path (``{root}`` in templates) to produce
89 the Mercurial repository path (``{root}`` in templates) to produce
90 ``{webroot}``. For example, a repository with ``{root}``
90 ``{webroot}``. For example, a repository with ``{root}``
91 ``/var/local/my-project`` with a strip of 2 gives a value for
91 ``/var/local/my-project`` with a strip of 2 gives a value for
92 ``{webroot}`` of ``my-project``. Default 0.
92 ``{webroot}`` of ``my-project``. Default 0.
93
93
94 web.baseurl
94 web.baseurl
95 Base URL for browsing Mercurial repositories. Referenced from
95 Base URL for browsing Mercurial repositories. Referenced from
96 templates as ``{hgweb}``.
96 templates as ``{hgweb}``.
97
97
98 Configuration items common to XMLRPC+email and MySQL access modes:
98 Configuration items common to XMLRPC+email and MySQL access modes:
99
99
100 bugzilla.usermap
100 bugzilla.usermap
101 Path of file containing Mercurial committer email to Bugzilla user email
101 Path of file containing Mercurial committer email to Bugzilla user email
102 mappings. If specified, the file should contain one mapping per
102 mappings. If specified, the file should contain one mapping per
103 line::
103 line::
104
104
105 committer = Bugzilla user
105 committer = Bugzilla user
106
106
107 See also the ``[usermap]`` section.
107 See also the ``[usermap]`` section.
108
108
109 The ``[usermap]`` section is used to specify mappings of Mercurial
109 The ``[usermap]`` section is used to specify mappings of Mercurial
110 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
110 committer email to Bugzilla user email. See also ``bugzilla.usermap``.
111 Contains entries of the form ``committer = Bugzilla user``.
111 Contains entries of the form ``committer = Bugzilla user``.
112
112
113 XMLRPC access mode configuration:
113 XMLRPC access mode configuration:
114
114
115 bugzilla.bzurl
115 bugzilla.bzurl
116 The base URL for the Bugzilla installation.
116 The base URL for the Bugzilla installation.
117 Default ``http://localhost/bugzilla``.
117 Default ``http://localhost/bugzilla``.
118
118
119 bugzilla.user
119 bugzilla.user
120 The username to use to log into Bugzilla via XMLRPC. Default
120 The username to use to log into Bugzilla via XMLRPC. Default
121 ``bugs``.
121 ``bugs``.
122
122
123 bugzilla.password
123 bugzilla.password
124 The password for Bugzilla login.
124 The password for Bugzilla login.
125
125
126 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
126 XMLRPC+email access mode uses the XMLRPC access mode configuration items,
127 and also:
127 and also:
128
128
129 bugzilla.bzemail
129 bugzilla.bzemail
130 The Bugzilla email address.
130 The Bugzilla email address.
131
131
132 In addition, the Mercurial email settings must be configured. See the
132 In addition, the Mercurial email settings must be configured. See the
133 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
133 documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
134
134
135 MySQL access mode configuration:
135 MySQL access mode configuration:
136
136
137 bugzilla.host
137 bugzilla.host
138 Hostname of the MySQL server holding the Bugzilla database.
138 Hostname of the MySQL server holding the Bugzilla database.
139 Default ``localhost``.
139 Default ``localhost``.
140
140
141 bugzilla.db
141 bugzilla.db
142 Name of the Bugzilla database in MySQL. Default ``bugs``.
142 Name of the Bugzilla database in MySQL. Default ``bugs``.
143
143
144 bugzilla.user
144 bugzilla.user
145 Username to use to access MySQL server. Default ``bugs``.
145 Username to use to access MySQL server. Default ``bugs``.
146
146
147 bugzilla.password
147 bugzilla.password
148 Password to use to access MySQL server.
148 Password to use to access MySQL server.
149
149
150 bugzilla.timeout
150 bugzilla.timeout
151 Database connection timeout (seconds). Default 5.
151 Database connection timeout (seconds). Default 5.
152
152
153 bugzilla.bzuser
153 bugzilla.bzuser
154 Fallback Bugzilla user name to record comments with, if changeset
154 Fallback Bugzilla user name to record comments with, if changeset
155 committer cannot be found as a Bugzilla user.
155 committer cannot be found as a Bugzilla user.
156
156
157 bugzilla.bzdir
157 bugzilla.bzdir
158 Bugzilla install directory. Used by default notify. Default
158 Bugzilla install directory. Used by default notify. Default
159 ``/var/www/html/bugzilla``.
159 ``/var/www/html/bugzilla``.
160
160
161 bugzilla.notify
161 bugzilla.notify
162 The command to run to get Bugzilla to send bug change notification
162 The command to run to get Bugzilla to send bug change notification
163 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
163 emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
164 id) and ``user`` (committer bugzilla email). Default depends on
164 id) and ``user`` (committer bugzilla email). Default depends on
165 version; from 2.18 it is "cd %(bzdir)s && perl -T
165 version; from 2.18 it is "cd %(bzdir)s && perl -T
166 contrib/sendbugmail.pl %(id)s %(user)s".
166 contrib/sendbugmail.pl %(id)s %(user)s".
167
167
168 Activating the extension::
168 Activating the extension::
169
169
170 [extensions]
170 [extensions]
171 bugzilla =
171 bugzilla =
172
172
173 [hooks]
173 [hooks]
174 # run bugzilla hook on every change pulled or pushed in here
174 # run bugzilla hook on every change pulled or pushed in here
175 incoming.bugzilla = python:hgext.bugzilla.hook
175 incoming.bugzilla = python:hgext.bugzilla.hook
176
176
177 Example configurations:
177 Example configurations:
178
178
179 XMLRPC example configuration. This uses the Bugzilla at
179 XMLRPC example configuration. This uses the Bugzilla at
180 ``http://my-project.org/bugzilla``, logging in as user
180 ``http://my-project.org/bugzilla``, logging in as user
181 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
181 ``bugmail@my-project.org`` with password ``plugh``. It is used with a
182 collection of Mercurial repositories in ``/var/local/hg/repos/``,
182 collection of Mercurial repositories in ``/var/local/hg/repos/``,
183 with a web interface at ``http://my-project.org/hg``. ::
183 with a web interface at ``http://my-project.org/hg``. ::
184
184
185 [bugzilla]
185 [bugzilla]
186 bzurl=http://my-project.org/bugzilla
186 bzurl=http://my-project.org/bugzilla
187 user=bugmail@my-project.org
187 user=bugmail@my-project.org
188 password=plugh
188 password=plugh
189 version=xmlrpc
189 version=xmlrpc
190 template=Changeset {node|short} in {root|basename}.
190 template=Changeset {node|short} in {root|basename}.
191 {hgweb}/{webroot}/rev/{node|short}\\n
191 {hgweb}/{webroot}/rev/{node|short}\\n
192 {desc}\\n
192 {desc}\\n
193 strip=5
193 strip=5
194
194
195 [web]
195 [web]
196 baseurl=http://my-project.org/hg
196 baseurl=http://my-project.org/hg
197
197
198 XMLRPC+email example configuration. This uses the Bugzilla at
198 XMLRPC+email example configuration. This uses the Bugzilla at
199 ``http://my-project.org/bugzilla``, logging in as user
199 ``http://my-project.org/bugzilla``, logging in as user
200 ``bugmail@my-project.org`` wityh password ``plugh``. It is used with a
200 ``bugmail@my-project.org`` wityh password ``plugh``. It is used with a
201 collection of Mercurial repositories in ``/var/local/hg/repos/``,
201 collection of Mercurial repositories in ``/var/local/hg/repos/``,
202 with a web interface at ``http://my-project.org/hg``. Bug comments
202 with a web interface at ``http://my-project.org/hg``. Bug comments
203 are sent to the Bugzilla email address
203 are sent to the Bugzilla email address
204 ``bugzilla@my-project.org``. ::
204 ``bugzilla@my-project.org``. ::
205
205
206 [bugzilla]
206 [bugzilla]
207 bzurl=http://my-project.org/bugzilla
207 bzurl=http://my-project.org/bugzilla
208 user=bugmail@my-project.org
208 user=bugmail@my-project.org
209 password=plugh
209 password=plugh
210 version=xmlrpc
210 version=xmlrpc
211 bzemail=bugzilla@my-project.org
211 bzemail=bugzilla@my-project.org
212 template=Changeset {node|short} in {root|basename}.
212 template=Changeset {node|short} in {root|basename}.
213 {hgweb}/{webroot}/rev/{node|short}\\n
213 {hgweb}/{webroot}/rev/{node|short}\\n
214 {desc}\\n
214 {desc}\\n
215 strip=5
215 strip=5
216
216
217 [web]
217 [web]
218 baseurl=http://my-project.org/hg
218 baseurl=http://my-project.org/hg
219
219
220 [usermap]
220 [usermap]
221 user@emaildomain.com=user.name@bugzilladomain.com
221 user@emaildomain.com=user.name@bugzilladomain.com
222
222
223 MySQL example configuration. This has a local Bugzilla 3.2 installation
223 MySQL example configuration. This has a local Bugzilla 3.2 installation
224 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
224 in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
225 the Bugzilla database name is ``bugs`` and MySQL is
225 the Bugzilla database name is ``bugs`` and MySQL is
226 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
226 accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
227 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
227 with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
228 with a web interface at ``http://my-project.org/hg``. ::
228 with a web interface at ``http://my-project.org/hg``. ::
229
229
230 [bugzilla]
230 [bugzilla]
231 host=localhost
231 host=localhost
232 password=XYZZY
232 password=XYZZY
233 version=3.0
233 version=3.0
234 bzuser=unknown@domain.com
234 bzuser=unknown@domain.com
235 bzdir=/opt/bugzilla-3.2
235 bzdir=/opt/bugzilla-3.2
236 template=Changeset {node|short} in {root|basename}.
236 template=Changeset {node|short} in {root|basename}.
237 {hgweb}/{webroot}/rev/{node|short}\\n
237 {hgweb}/{webroot}/rev/{node|short}\\n
238 {desc}\\n
238 {desc}\\n
239 strip=5
239 strip=5
240
240
241 [web]
241 [web]
242 baseurl=http://my-project.org/hg
242 baseurl=http://my-project.org/hg
243
243
244 [usermap]
244 [usermap]
245 user@emaildomain.com=user.name@bugzilladomain.com
245 user@emaildomain.com=user.name@bugzilladomain.com
246
246
247 All the above add a comment to the Bugzilla bug record of the form::
247 All the above add a comment to the Bugzilla bug record of the form::
248
248
249 Changeset 3b16791d6642 in repository-name.
249 Changeset 3b16791d6642 in repository-name.
250 http://my-project.org/hg/repository-name/rev/3b16791d6642
250 http://my-project.org/hg/repository-name/rev/3b16791d6642
251
251
252 Changeset commit comment. Bug 1234.
252 Changeset commit comment. Bug 1234.
253 '''
253 '''
254
254
255 from mercurial.i18n import _
255 from mercurial.i18n import _
256 from mercurial.node import short
256 from mercurial.node import short
257 from mercurial import cmdutil, mail, templater, util
257 from mercurial import cmdutil, mail, templater, util
258 import re, time, xmlrpclib
258 import re, time, xmlrpclib
259
259
260 class bzaccess(object):
260 class bzaccess(object):
261 '''Base class for access to Bugzilla.'''
261 '''Base class for access to Bugzilla.'''
262
262
263 def __init__(self, ui):
263 def __init__(self, ui):
264 self.ui = ui
264 self.ui = ui
265 usermap = self.ui.config('bugzilla', 'usermap')
265 usermap = self.ui.config('bugzilla', 'usermap')
266 if usermap:
266 if usermap:
267 self.ui.readconfig(usermap, sections=['usermap'])
267 self.ui.readconfig(usermap, sections=['usermap'])
268
268
269 def map_committer(self, user):
269 def map_committer(self, user):
270 '''map name of committer to Bugzilla user name.'''
270 '''map name of committer to Bugzilla user name.'''
271 for committer, bzuser in self.ui.configitems('usermap'):
271 for committer, bzuser in self.ui.configitems('usermap'):
272 if committer.lower() == user.lower():
272 if committer.lower() == user.lower():
273 return bzuser
273 return bzuser
274 return user
274 return user
275
275
276 # Methods to be implemented by access classes.
276 # Methods to be implemented by access classes.
277 def filter_real_bug_ids(self, ids):
277 def filter_real_bug_ids(self, ids):
278 '''remove bug IDs that do not exist in Bugzilla from set.'''
278 '''remove bug IDs that do not exist in Bugzilla from set.'''
279 pass
279 pass
280
280
281 def filter_cset_known_bug_ids(self, node, ids):
281 def filter_cset_known_bug_ids(self, node, ids):
282 '''remove bug IDs where node occurs in comment text from set.'''
282 '''remove bug IDs where node occurs in comment text from set.'''
283 pass
283 pass
284
284
285 def add_comment(self, bugid, text, committer):
285 def add_comment(self, bugid, text, committer):
286 '''add comment to bug.
286 '''add comment to bug.
287
287
288 If possible add the comment as being from the committer of
288 If possible add the comment as being from the committer of
289 the changeset. Otherwise use the default Bugzilla user.
289 the changeset. Otherwise use the default Bugzilla user.
290 '''
290 '''
291 pass
291 pass
292
292
293 def notify(self, ids, committer):
293 def notify(self, ids, committer):
294 '''Force sending of Bugzilla notification emails.'''
294 '''Force sending of Bugzilla notification emails.'''
295 pass
295 pass
296
296
297 # Bugzilla via direct access to MySQL database.
297 # Bugzilla via direct access to MySQL database.
298 class bzmysql(bzaccess):
298 class bzmysql(bzaccess):
299 '''Support for direct MySQL access to Bugzilla.
299 '''Support for direct MySQL access to Bugzilla.
300
300
301 The earliest Bugzilla version this is tested with is version 2.16.
301 The earliest Bugzilla version this is tested with is version 2.16.
302
302
303 If your Bugzilla is version 3.2 or above, you are strongly
303 If your Bugzilla is version 3.2 or above, you are strongly
304 recommended to use the XMLRPC access method instead.
304 recommended to use the XMLRPC access method instead.
305 '''
305 '''
306
306
307 @staticmethod
307 @staticmethod
308 def sql_buglist(ids):
308 def sql_buglist(ids):
309 '''return SQL-friendly list of bug ids'''
309 '''return SQL-friendly list of bug ids'''
310 return '(' + ','.join(map(str, ids)) + ')'
310 return '(' + ','.join(map(str, ids)) + ')'
311
311
312 _MySQLdb = None
312 _MySQLdb = None
313
313
314 def __init__(self, ui):
314 def __init__(self, ui):
315 try:
315 try:
316 import MySQLdb as mysql
316 import MySQLdb as mysql
317 bzmysql._MySQLdb = mysql
317 bzmysql._MySQLdb = mysql
318 except ImportError, err:
318 except ImportError, err:
319 raise util.Abort(_('python mysql support not available: %s') % err)
319 raise util.Abort(_('python mysql support not available: %s') % err)
320
320
321 bzaccess.__init__(self, ui)
321 bzaccess.__init__(self, ui)
322
322
323 host = self.ui.config('bugzilla', 'host', 'localhost')
323 host = self.ui.config('bugzilla', 'host', 'localhost')
324 user = self.ui.config('bugzilla', 'user', 'bugs')
324 user = self.ui.config('bugzilla', 'user', 'bugs')
325 passwd = self.ui.config('bugzilla', 'password')
325 passwd = self.ui.config('bugzilla', 'password')
326 db = self.ui.config('bugzilla', 'db', 'bugs')
326 db = self.ui.config('bugzilla', 'db', 'bugs')
327 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
327 timeout = int(self.ui.config('bugzilla', 'timeout', 5))
328 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
328 self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
329 (host, db, user, '*' * len(passwd)))
329 (host, db, user, '*' * len(passwd)))
330 self.conn = bzmysql._MySQLdb.connect(host=host,
330 self.conn = bzmysql._MySQLdb.connect(host=host,
331 user=user, passwd=passwd,
331 user=user, passwd=passwd,
332 db=db,
332 db=db,
333 connect_timeout=timeout)
333 connect_timeout=timeout)
334 self.cursor = self.conn.cursor()
334 self.cursor = self.conn.cursor()
335 self.longdesc_id = self.get_longdesc_id()
335 self.longdesc_id = self.get_longdesc_id()
336 self.user_ids = {}
336 self.user_ids = {}
337 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
337 self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
338
338
339 def run(self, *args, **kwargs):
339 def run(self, *args, **kwargs):
340 '''run a query.'''
340 '''run a query.'''
341 self.ui.note(_('query: %s %s\n') % (args, kwargs))
341 self.ui.note(_('query: %s %s\n') % (args, kwargs))
342 try:
342 try:
343 self.cursor.execute(*args, **kwargs)
343 self.cursor.execute(*args, **kwargs)
344 except bzmysql._MySQLdb.MySQLError:
344 except bzmysql._MySQLdb.MySQLError:
345 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
345 self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
346 raise
346 raise
347
347
348 def get_longdesc_id(self):
348 def get_longdesc_id(self):
349 '''get identity of longdesc field'''
349 '''get identity of longdesc field'''
350 self.run('select fieldid from fielddefs where name = "longdesc"')
350 self.run('select fieldid from fielddefs where name = "longdesc"')
351 ids = self.cursor.fetchall()
351 ids = self.cursor.fetchall()
352 if len(ids) != 1:
352 if len(ids) != 1:
353 raise util.Abort(_('unknown database schema'))
353 raise util.Abort(_('unknown database schema'))
354 return ids[0][0]
354 return ids[0][0]
355
355
356 def filter_real_bug_ids(self, ids):
356 def filter_real_bug_ids(self, ids):
357 '''filter not-existing bug ids from set.'''
357 '''filter not-existing bug ids from set.'''
358 self.run('select bug_id from bugs where bug_id in %s' %
358 self.run('select bug_id from bugs where bug_id in %s' %
359 bzmysql.sql_buglist(ids))
359 bzmysql.sql_buglist(ids))
360 return set([c[0] for c in self.cursor.fetchall()])
360 return set([c[0] for c in self.cursor.fetchall()])
361
361
362 def filter_cset_known_bug_ids(self, node, ids):
362 def filter_cset_known_bug_ids(self, node, ids):
363 '''filter bug ids that already refer to this changeset from set.'''
363 '''filter bug ids that already refer to this changeset from set.'''
364
364
365 self.run('''select bug_id from longdescs where
365 self.run('''select bug_id from longdescs where
366 bug_id in %s and thetext like "%%%s%%"''' %
366 bug_id in %s and thetext like "%%%s%%"''' %
367 (bzmysql.sql_buglist(ids), short(node)))
367 (bzmysql.sql_buglist(ids), short(node)))
368 for (id,) in self.cursor.fetchall():
368 for (id,) in self.cursor.fetchall():
369 self.ui.status(_('bug %d already knows about changeset %s\n') %
369 self.ui.status(_('bug %d already knows about changeset %s\n') %
370 (id, short(node)))
370 (id, short(node)))
371 ids.discard(id)
371 ids.discard(id)
372 return ids
372 return ids
373
373
374 def notify(self, ids, committer):
374 def notify(self, ids, committer):
375 '''tell bugzilla to send mail.'''
375 '''tell bugzilla to send mail.'''
376
376
377 self.ui.status(_('telling bugzilla to send mail:\n'))
377 self.ui.status(_('telling bugzilla to send mail:\n'))
378 (user, userid) = self.get_bugzilla_user(committer)
378 (user, userid) = self.get_bugzilla_user(committer)
379 for id in ids:
379 for id in ids:
380 self.ui.status(_(' bug %s\n') % id)
380 self.ui.status(_(' bug %s\n') % id)
381 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
381 cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
382 bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
382 bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
383 try:
383 try:
384 # Backwards-compatible with old notify string, which
384 # Backwards-compatible with old notify string, which
385 # took one string. This will throw with a new format
385 # took one string. This will throw with a new format
386 # string.
386 # string.
387 cmd = cmdfmt % id
387 cmd = cmdfmt % id
388 except TypeError:
388 except TypeError:
389 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
389 cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
390 self.ui.note(_('running notify command %s\n') % cmd)
390 self.ui.note(_('running notify command %s\n') % cmd)
391 fp = util.popen('(%s) 2>&1' % cmd)
391 fp = util.popen('(%s) 2>&1' % cmd)
392 out = fp.read()
392 out = fp.read()
393 ret = fp.close()
393 ret = fp.close()
394 if ret:
394 if ret:
395 self.ui.warn(out)
395 self.ui.warn(out)
396 raise util.Abort(_('bugzilla notify command %s') %
396 raise util.Abort(_('bugzilla notify command %s') %
397 util.explain_exit(ret)[0])
397 util.explainexit(ret)[0])
398 self.ui.status(_('done\n'))
398 self.ui.status(_('done\n'))
399
399
400 def get_user_id(self, user):
400 def get_user_id(self, user):
401 '''look up numeric bugzilla user id.'''
401 '''look up numeric bugzilla user id.'''
402 try:
402 try:
403 return self.user_ids[user]
403 return self.user_ids[user]
404 except KeyError:
404 except KeyError:
405 try:
405 try:
406 userid = int(user)
406 userid = int(user)
407 except ValueError:
407 except ValueError:
408 self.ui.note(_('looking up user %s\n') % user)
408 self.ui.note(_('looking up user %s\n') % user)
409 self.run('''select userid from profiles
409 self.run('''select userid from profiles
410 where login_name like %s''', user)
410 where login_name like %s''', user)
411 all = self.cursor.fetchall()
411 all = self.cursor.fetchall()
412 if len(all) != 1:
412 if len(all) != 1:
413 raise KeyError(user)
413 raise KeyError(user)
414 userid = int(all[0][0])
414 userid = int(all[0][0])
415 self.user_ids[user] = userid
415 self.user_ids[user] = userid
416 return userid
416 return userid
417
417
418 def get_bugzilla_user(self, committer):
418 def get_bugzilla_user(self, committer):
419 '''See if committer is a registered bugzilla user. Return
419 '''See if committer is a registered bugzilla user. Return
420 bugzilla username and userid if so. If not, return default
420 bugzilla username and userid if so. If not, return default
421 bugzilla username and userid.'''
421 bugzilla username and userid.'''
422 user = self.map_committer(committer)
422 user = self.map_committer(committer)
423 try:
423 try:
424 userid = self.get_user_id(user)
424 userid = self.get_user_id(user)
425 except KeyError:
425 except KeyError:
426 try:
426 try:
427 defaultuser = self.ui.config('bugzilla', 'bzuser')
427 defaultuser = self.ui.config('bugzilla', 'bzuser')
428 if not defaultuser:
428 if not defaultuser:
429 raise util.Abort(_('cannot find bugzilla user id for %s') %
429 raise util.Abort(_('cannot find bugzilla user id for %s') %
430 user)
430 user)
431 userid = self.get_user_id(defaultuser)
431 userid = self.get_user_id(defaultuser)
432 user = defaultuser
432 user = defaultuser
433 except KeyError:
433 except KeyError:
434 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
434 raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
435 (user, defaultuser))
435 (user, defaultuser))
436 return (user, userid)
436 return (user, userid)
437
437
438 def add_comment(self, bugid, text, committer):
438 def add_comment(self, bugid, text, committer):
439 '''add comment to bug. try adding comment as committer of
439 '''add comment to bug. try adding comment as committer of
440 changeset, otherwise as default bugzilla user.'''
440 changeset, otherwise as default bugzilla user.'''
441 (user, userid) = self.get_bugzilla_user(committer)
441 (user, userid) = self.get_bugzilla_user(committer)
442 now = time.strftime('%Y-%m-%d %H:%M:%S')
442 now = time.strftime('%Y-%m-%d %H:%M:%S')
443 self.run('''insert into longdescs
443 self.run('''insert into longdescs
444 (bug_id, who, bug_when, thetext)
444 (bug_id, who, bug_when, thetext)
445 values (%s, %s, %s, %s)''',
445 values (%s, %s, %s, %s)''',
446 (bugid, userid, now, text))
446 (bugid, userid, now, text))
447 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
447 self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
448 values (%s, %s, %s, %s)''',
448 values (%s, %s, %s, %s)''',
449 (bugid, userid, now, self.longdesc_id))
449 (bugid, userid, now, self.longdesc_id))
450 self.conn.commit()
450 self.conn.commit()
451
451
452 class bzmysql_2_18(bzmysql):
452 class bzmysql_2_18(bzmysql):
453 '''support for bugzilla 2.18 series.'''
453 '''support for bugzilla 2.18 series.'''
454
454
455 def __init__(self, ui):
455 def __init__(self, ui):
456 bzmysql.__init__(self, ui)
456 bzmysql.__init__(self, ui)
457 self.default_notify = \
457 self.default_notify = \
458 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
458 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
459
459
460 class bzmysql_3_0(bzmysql_2_18):
460 class bzmysql_3_0(bzmysql_2_18):
461 '''support for bugzilla 3.0 series.'''
461 '''support for bugzilla 3.0 series.'''
462
462
463 def __init__(self, ui):
463 def __init__(self, ui):
464 bzmysql_2_18.__init__(self, ui)
464 bzmysql_2_18.__init__(self, ui)
465
465
466 def get_longdesc_id(self):
466 def get_longdesc_id(self):
467 '''get identity of longdesc field'''
467 '''get identity of longdesc field'''
468 self.run('select id from fielddefs where name = "longdesc"')
468 self.run('select id from fielddefs where name = "longdesc"')
469 ids = self.cursor.fetchall()
469 ids = self.cursor.fetchall()
470 if len(ids) != 1:
470 if len(ids) != 1:
471 raise util.Abort(_('unknown database schema'))
471 raise util.Abort(_('unknown database schema'))
472 return ids[0][0]
472 return ids[0][0]
473
473
474 # Buzgilla via XMLRPC interface.
474 # Buzgilla via XMLRPC interface.
475
475
476 class CookieSafeTransport(xmlrpclib.SafeTransport):
476 class CookieSafeTransport(xmlrpclib.SafeTransport):
477 """A SafeTransport that retains cookies over its lifetime.
477 """A SafeTransport that retains cookies over its lifetime.
478
478
479 The regular xmlrpclib transports ignore cookies. Which causes
479 The regular xmlrpclib transports ignore cookies. Which causes
480 a bit of a problem when you need a cookie-based login, as with
480 a bit of a problem when you need a cookie-based login, as with
481 the Bugzilla XMLRPC interface.
481 the Bugzilla XMLRPC interface.
482
482
483 So this is a SafeTransport which looks for cookies being set
483 So this is a SafeTransport which looks for cookies being set
484 in responses and saves them to add to all future requests.
484 in responses and saves them to add to all future requests.
485 It appears a SafeTransport can do both HTTP and HTTPS sessions,
485 It appears a SafeTransport can do both HTTP and HTTPS sessions,
486 which saves us having to do a CookieTransport too.
486 which saves us having to do a CookieTransport too.
487 """
487 """
488
488
489 # Inspiration drawn from
489 # Inspiration drawn from
490 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
490 # http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
491 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
491 # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
492
492
493 cookies = []
493 cookies = []
494 def send_cookies(self, connection):
494 def send_cookies(self, connection):
495 if self.cookies:
495 if self.cookies:
496 for cookie in self.cookies:
496 for cookie in self.cookies:
497 connection.putheader("Cookie", cookie)
497 connection.putheader("Cookie", cookie)
498
498
499 def request(self, host, handler, request_body, verbose=0):
499 def request(self, host, handler, request_body, verbose=0):
500 self.verbose = verbose
500 self.verbose = verbose
501
501
502 # issue XML-RPC request
502 # issue XML-RPC request
503 h = self.make_connection(host)
503 h = self.make_connection(host)
504 if verbose:
504 if verbose:
505 h.set_debuglevel(1)
505 h.set_debuglevel(1)
506
506
507 self.send_request(h, handler, request_body)
507 self.send_request(h, handler, request_body)
508 self.send_host(h, host)
508 self.send_host(h, host)
509 self.send_cookies(h)
509 self.send_cookies(h)
510 self.send_user_agent(h)
510 self.send_user_agent(h)
511 self.send_content(h, request_body)
511 self.send_content(h, request_body)
512
512
513 # Deal with differences between Python 2.4-2.6 and 2.7.
513 # Deal with differences between Python 2.4-2.6 and 2.7.
514 # In the former h is a HTTP(S). In the latter it's a
514 # In the former h is a HTTP(S). In the latter it's a
515 # HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of
515 # HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of
516 # HTTP(S) has an underlying HTTP(S)Connection, so extract
516 # HTTP(S) has an underlying HTTP(S)Connection, so extract
517 # that and use it.
517 # that and use it.
518 try:
518 try:
519 response = h.getresponse()
519 response = h.getresponse()
520 except AttributeError:
520 except AttributeError:
521 response = h._conn.getresponse()
521 response = h._conn.getresponse()
522
522
523 # Add any cookie definitions to our list.
523 # Add any cookie definitions to our list.
524 for header in response.msg.getallmatchingheaders("Set-Cookie"):
524 for header in response.msg.getallmatchingheaders("Set-Cookie"):
525 val = header.split(": ", 1)[1]
525 val = header.split(": ", 1)[1]
526 cookie = val.split(";", 1)[0]
526 cookie = val.split(";", 1)[0]
527 self.cookies.append(cookie)
527 self.cookies.append(cookie)
528
528
529 if response.status != 200:
529 if response.status != 200:
530 raise xmlrpclib.ProtocolError(host + handler, response.status,
530 raise xmlrpclib.ProtocolError(host + handler, response.status,
531 response.reason, response.msg.headers)
531 response.reason, response.msg.headers)
532
532
533 payload = response.read()
533 payload = response.read()
534 parser, unmarshaller = self.getparser()
534 parser, unmarshaller = self.getparser()
535 parser.feed(payload)
535 parser.feed(payload)
536 parser.close()
536 parser.close()
537
537
538 return unmarshaller.close()
538 return unmarshaller.close()
539
539
540 class bzxmlrpc(bzaccess):
540 class bzxmlrpc(bzaccess):
541 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
541 """Support for access to Bugzilla via the Bugzilla XMLRPC API.
542
542
543 Requires a minimum Bugzilla version 3.4.
543 Requires a minimum Bugzilla version 3.4.
544 """
544 """
545
545
546 def __init__(self, ui):
546 def __init__(self, ui):
547 bzaccess.__init__(self, ui)
547 bzaccess.__init__(self, ui)
548
548
549 bzweb = self.ui.config('bugzilla', 'bzurl',
549 bzweb = self.ui.config('bugzilla', 'bzurl',
550 'http://localhost/bugzilla/')
550 'http://localhost/bugzilla/')
551 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
551 bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
552
552
553 user = self.ui.config('bugzilla', 'user', 'bugs')
553 user = self.ui.config('bugzilla', 'user', 'bugs')
554 passwd = self.ui.config('bugzilla', 'password')
554 passwd = self.ui.config('bugzilla', 'password')
555
555
556 self.bzproxy = xmlrpclib.ServerProxy(bzweb, CookieSafeTransport())
556 self.bzproxy = xmlrpclib.ServerProxy(bzweb, CookieSafeTransport())
557 self.bzproxy.User.login(dict(login=user, password=passwd))
557 self.bzproxy.User.login(dict(login=user, password=passwd))
558
558
559 def get_bug_comments(self, id):
559 def get_bug_comments(self, id):
560 """Return a string with all comment text for a bug."""
560 """Return a string with all comment text for a bug."""
561 c = self.bzproxy.Bug.comments(dict(ids=[id]))
561 c = self.bzproxy.Bug.comments(dict(ids=[id]))
562 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
562 return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
563
563
564 def filter_real_bug_ids(self, ids):
564 def filter_real_bug_ids(self, ids):
565 res = set()
565 res = set()
566 bugs = self.bzproxy.Bug.get(dict(ids=sorted(ids), permissive=True))
566 bugs = self.bzproxy.Bug.get(dict(ids=sorted(ids), permissive=True))
567 for bug in bugs['bugs']:
567 for bug in bugs['bugs']:
568 res.add(bug['id'])
568 res.add(bug['id'])
569 return res
569 return res
570
570
571 def filter_cset_known_bug_ids(self, node, ids):
571 def filter_cset_known_bug_ids(self, node, ids):
572 for id in sorted(ids):
572 for id in sorted(ids):
573 if self.get_bug_comments(id).find(short(node)) != -1:
573 if self.get_bug_comments(id).find(short(node)) != -1:
574 self.ui.status(_('bug %d already knows about changeset %s\n') %
574 self.ui.status(_('bug %d already knows about changeset %s\n') %
575 (id, short(node)))
575 (id, short(node)))
576 ids.discard(id)
576 ids.discard(id)
577 return ids
577 return ids
578
578
579 def add_comment(self, bugid, text, committer):
579 def add_comment(self, bugid, text, committer):
580 self.bzproxy.Bug.add_comment(dict(id=bugid, comment=text))
580 self.bzproxy.Bug.add_comment(dict(id=bugid, comment=text))
581
581
582 class bzxmlrpcemail(bzxmlrpc):
582 class bzxmlrpcemail(bzxmlrpc):
583 """Read data from Bugzilla via XMLRPC, send updates via email.
583 """Read data from Bugzilla via XMLRPC, send updates via email.
584
584
585 Advantages of sending updates via email:
585 Advantages of sending updates via email:
586 1. Comments can be added as any user, not just logged in user.
586 1. Comments can be added as any user, not just logged in user.
587 2. Bug statuses and other fields not accessible via XMLRPC can
587 2. Bug statuses and other fields not accessible via XMLRPC can
588 be updated. This is not currently used.
588 be updated. This is not currently used.
589 """
589 """
590
590
591 def __init__(self, ui):
591 def __init__(self, ui):
592 bzxmlrpc.__init__(self, ui)
592 bzxmlrpc.__init__(self, ui)
593
593
594 self.bzemail = self.ui.config('bugzilla', 'bzemail')
594 self.bzemail = self.ui.config('bugzilla', 'bzemail')
595 if not self.bzemail:
595 if not self.bzemail:
596 raise util.Abort(_("configuration 'bzemail' missing"))
596 raise util.Abort(_("configuration 'bzemail' missing"))
597 mail.validateconfig(self.ui)
597 mail.validateconfig(self.ui)
598
598
599 def send_bug_modify_email(self, bugid, commands, comment, committer):
599 def send_bug_modify_email(self, bugid, commands, comment, committer):
600 '''send modification message to Bugzilla bug via email.
600 '''send modification message to Bugzilla bug via email.
601
601
602 The message format is documented in the Bugzilla email_in.pl
602 The message format is documented in the Bugzilla email_in.pl
603 specification. commands is a list of command lines, comment is the
603 specification. commands is a list of command lines, comment is the
604 comment text.
604 comment text.
605
605
606 To stop users from crafting commit comments with
606 To stop users from crafting commit comments with
607 Bugzilla commands, specify the bug ID via the message body, rather
607 Bugzilla commands, specify the bug ID via the message body, rather
608 than the subject line, and leave a blank line after it.
608 than the subject line, and leave a blank line after it.
609 '''
609 '''
610 user = self.map_committer(committer)
610 user = self.map_committer(committer)
611 matches = self.bzproxy.User.get(dict(match=[user]))
611 matches = self.bzproxy.User.get(dict(match=[user]))
612 if not matches['users']:
612 if not matches['users']:
613 user = self.ui.config('bugzilla', 'user', 'bugs')
613 user = self.ui.config('bugzilla', 'user', 'bugs')
614 matches = self.bzproxy.User.get(dict(match=[user]))
614 matches = self.bzproxy.User.get(dict(match=[user]))
615 if not matches['users']:
615 if not matches['users']:
616 raise util.Abort(_("default bugzilla user %s email not found") %
616 raise util.Abort(_("default bugzilla user %s email not found") %
617 user)
617 user)
618 user = matches['users'][0]['email']
618 user = matches['users'][0]['email']
619
619
620 text = "\n".join(commands) + "\n@bug_id = %d\n\n" % bugid + comment
620 text = "\n".join(commands) + "\n@bug_id = %d\n\n" % bugid + comment
621
621
622 _charsets = mail._charsets(self.ui)
622 _charsets = mail._charsets(self.ui)
623 user = mail.addressencode(self.ui, user, _charsets)
623 user = mail.addressencode(self.ui, user, _charsets)
624 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
624 bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
625 msg = mail.mimeencode(self.ui, text, _charsets)
625 msg = mail.mimeencode(self.ui, text, _charsets)
626 msg['From'] = user
626 msg['From'] = user
627 msg['To'] = bzemail
627 msg['To'] = bzemail
628 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
628 msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
629 sendmail = mail.connect(self.ui)
629 sendmail = mail.connect(self.ui)
630 sendmail(user, bzemail, msg.as_string())
630 sendmail(user, bzemail, msg.as_string())
631
631
632 def add_comment(self, bugid, text, committer):
632 def add_comment(self, bugid, text, committer):
633 self.send_bug_modify_email(bugid, [], text, committer)
633 self.send_bug_modify_email(bugid, [], text, committer)
634
634
635 class bugzilla(object):
635 class bugzilla(object):
636 # supported versions of bugzilla. different versions have
636 # supported versions of bugzilla. different versions have
637 # different schemas.
637 # different schemas.
638 _versions = {
638 _versions = {
639 '2.16': bzmysql,
639 '2.16': bzmysql,
640 '2.18': bzmysql_2_18,
640 '2.18': bzmysql_2_18,
641 '3.0': bzmysql_3_0,
641 '3.0': bzmysql_3_0,
642 'xmlrpc': bzxmlrpc,
642 'xmlrpc': bzxmlrpc,
643 'xmlrpc+email': bzxmlrpcemail
643 'xmlrpc+email': bzxmlrpcemail
644 }
644 }
645
645
646 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
646 _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
647 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
647 r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
648
648
649 _bz = None
649 _bz = None
650
650
651 def __init__(self, ui, repo):
651 def __init__(self, ui, repo):
652 self.ui = ui
652 self.ui = ui
653 self.repo = repo
653 self.repo = repo
654
654
655 def bz(self):
655 def bz(self):
656 '''return object that knows how to talk to bugzilla version in
656 '''return object that knows how to talk to bugzilla version in
657 use.'''
657 use.'''
658
658
659 if bugzilla._bz is None:
659 if bugzilla._bz is None:
660 bzversion = self.ui.config('bugzilla', 'version')
660 bzversion = self.ui.config('bugzilla', 'version')
661 try:
661 try:
662 bzclass = bugzilla._versions[bzversion]
662 bzclass = bugzilla._versions[bzversion]
663 except KeyError:
663 except KeyError:
664 raise util.Abort(_('bugzilla version %s not supported') %
664 raise util.Abort(_('bugzilla version %s not supported') %
665 bzversion)
665 bzversion)
666 bugzilla._bz = bzclass(self.ui)
666 bugzilla._bz = bzclass(self.ui)
667 return bugzilla._bz
667 return bugzilla._bz
668
668
669 def __getattr__(self, key):
669 def __getattr__(self, key):
670 return getattr(self.bz(), key)
670 return getattr(self.bz(), key)
671
671
672 _bug_re = None
672 _bug_re = None
673 _split_re = None
673 _split_re = None
674
674
675 def find_bug_ids(self, ctx):
675 def find_bug_ids(self, ctx):
676 '''return set of integer bug IDs from commit comment.
676 '''return set of integer bug IDs from commit comment.
677
677
678 Extract bug IDs from changeset comments. Filter out any that are
678 Extract bug IDs from changeset comments. Filter out any that are
679 not known to Bugzilla, and any that already have a reference to
679 not known to Bugzilla, and any that already have a reference to
680 the given changeset in their comments.
680 the given changeset in their comments.
681 '''
681 '''
682 if bugzilla._bug_re is None:
682 if bugzilla._bug_re is None:
683 bugzilla._bug_re = re.compile(
683 bugzilla._bug_re = re.compile(
684 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
684 self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
685 re.IGNORECASE)
685 re.IGNORECASE)
686 bugzilla._split_re = re.compile(r'\D+')
686 bugzilla._split_re = re.compile(r'\D+')
687 start = 0
687 start = 0
688 ids = set()
688 ids = set()
689 while True:
689 while True:
690 m = bugzilla._bug_re.search(ctx.description(), start)
690 m = bugzilla._bug_re.search(ctx.description(), start)
691 if not m:
691 if not m:
692 break
692 break
693 start = m.end()
693 start = m.end()
694 for id in bugzilla._split_re.split(m.group(1)):
694 for id in bugzilla._split_re.split(m.group(1)):
695 if not id:
695 if not id:
696 continue
696 continue
697 ids.add(int(id))
697 ids.add(int(id))
698 if ids:
698 if ids:
699 ids = self.filter_real_bug_ids(ids)
699 ids = self.filter_real_bug_ids(ids)
700 if ids:
700 if ids:
701 ids = self.filter_cset_known_bug_ids(ctx.node(), ids)
701 ids = self.filter_cset_known_bug_ids(ctx.node(), ids)
702 return ids
702 return ids
703
703
704 def update(self, bugid, ctx):
704 def update(self, bugid, ctx):
705 '''update bugzilla bug with reference to changeset.'''
705 '''update bugzilla bug with reference to changeset.'''
706
706
707 def webroot(root):
707 def webroot(root):
708 '''strip leading prefix of repo root and turn into
708 '''strip leading prefix of repo root and turn into
709 url-safe path.'''
709 url-safe path.'''
710 count = int(self.ui.config('bugzilla', 'strip', 0))
710 count = int(self.ui.config('bugzilla', 'strip', 0))
711 root = util.pconvert(root)
711 root = util.pconvert(root)
712 while count > 0:
712 while count > 0:
713 c = root.find('/')
713 c = root.find('/')
714 if c == -1:
714 if c == -1:
715 break
715 break
716 root = root[c + 1:]
716 root = root[c + 1:]
717 count -= 1
717 count -= 1
718 return root
718 return root
719
719
720 mapfile = self.ui.config('bugzilla', 'style')
720 mapfile = self.ui.config('bugzilla', 'style')
721 tmpl = self.ui.config('bugzilla', 'template')
721 tmpl = self.ui.config('bugzilla', 'template')
722 t = cmdutil.changeset_templater(self.ui, self.repo,
722 t = cmdutil.changeset_templater(self.ui, self.repo,
723 False, None, mapfile, False)
723 False, None, mapfile, False)
724 if not mapfile and not tmpl:
724 if not mapfile and not tmpl:
725 tmpl = _('changeset {node|short} in repo {root} refers '
725 tmpl = _('changeset {node|short} in repo {root} refers '
726 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
726 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
727 if tmpl:
727 if tmpl:
728 tmpl = templater.parsestring(tmpl, quoted=False)
728 tmpl = templater.parsestring(tmpl, quoted=False)
729 t.use_template(tmpl)
729 t.use_template(tmpl)
730 self.ui.pushbuffer()
730 self.ui.pushbuffer()
731 t.show(ctx, changes=ctx.changeset(),
731 t.show(ctx, changes=ctx.changeset(),
732 bug=str(bugid),
732 bug=str(bugid),
733 hgweb=self.ui.config('web', 'baseurl'),
733 hgweb=self.ui.config('web', 'baseurl'),
734 root=self.repo.root,
734 root=self.repo.root,
735 webroot=webroot(self.repo.root))
735 webroot=webroot(self.repo.root))
736 data = self.ui.popbuffer()
736 data = self.ui.popbuffer()
737 self.add_comment(bugid, data, util.email(ctx.user()))
737 self.add_comment(bugid, data, util.email(ctx.user()))
738
738
739 def hook(ui, repo, hooktype, node=None, **kwargs):
739 def hook(ui, repo, hooktype, node=None, **kwargs):
740 '''add comment to bugzilla for each changeset that refers to a
740 '''add comment to bugzilla for each changeset that refers to a
741 bugzilla bug id. only add a comment once per bug, so same change
741 bugzilla bug id. only add a comment once per bug, so same change
742 seen multiple times does not fill bug with duplicate data.'''
742 seen multiple times does not fill bug with duplicate data.'''
743 if node is None:
743 if node is None:
744 raise util.Abort(_('hook type %s does not pass a changeset id') %
744 raise util.Abort(_('hook type %s does not pass a changeset id') %
745 hooktype)
745 hooktype)
746 try:
746 try:
747 bz = bugzilla(ui, repo)
747 bz = bugzilla(ui, repo)
748 ctx = repo[node]
748 ctx = repo[node]
749 ids = bz.find_bug_ids(ctx)
749 ids = bz.find_bug_ids(ctx)
750 if ids:
750 if ids:
751 for id in ids:
751 for id in ids:
752 bz.update(id, ctx)
752 bz.update(id, ctx)
753 bz.notify(ids, util.email(ctx.user()))
753 bz.notify(ids, util.email(ctx.user()))
754 except Exception, e:
754 except Exception, e:
755 raise util.Abort(_('Bugzilla error: %s') % e)
755 raise util.Abort(_('Bugzilla error: %s') % e)
756
756
@@ -1,411 +1,411 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno
8 import base64, errno
9 import os
9 import os
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 def encodeargs(args):
14 def encodeargs(args):
15 def encodearg(s):
15 def encodearg(s):
16 lines = base64.encodestring(s)
16 lines = base64.encodestring(s)
17 lines = [l.splitlines()[0] for l in lines]
17 lines = [l.splitlines()[0] for l in lines]
18 return ''.join(lines)
18 return ''.join(lines)
19
19
20 s = pickle.dumps(args)
20 s = pickle.dumps(args)
21 return encodearg(s)
21 return encodearg(s)
22
22
23 def decodeargs(s):
23 def decodeargs(s):
24 s = base64.decodestring(s)
24 s = base64.decodestring(s)
25 return pickle.loads(s)
25 return pickle.loads(s)
26
26
27 class MissingTool(Exception):
27 class MissingTool(Exception):
28 pass
28 pass
29
29
30 def checktool(exe, name=None, abort=True):
30 def checktool(exe, name=None, abort=True):
31 name = name or exe
31 name = name or exe
32 if not util.find_exe(exe):
32 if not util.find_exe(exe):
33 exc = abort and util.Abort or MissingTool
33 exc = abort and util.Abort or MissingTool
34 raise exc(_('cannot find required "%s" tool') % name)
34 raise exc(_('cannot find required "%s" tool') % name)
35
35
36 class NoRepo(Exception):
36 class NoRepo(Exception):
37 pass
37 pass
38
38
39 SKIPREV = 'SKIP'
39 SKIPREV = 'SKIP'
40
40
41 class commit(object):
41 class commit(object):
42 def __init__(self, author, date, desc, parents, branch=None, rev=None,
42 def __init__(self, author, date, desc, parents, branch=None, rev=None,
43 extra={}, sortkey=None):
43 extra={}, sortkey=None):
44 self.author = author or 'unknown'
44 self.author = author or 'unknown'
45 self.date = date or '0 0'
45 self.date = date or '0 0'
46 self.desc = desc
46 self.desc = desc
47 self.parents = parents
47 self.parents = parents
48 self.branch = branch
48 self.branch = branch
49 self.rev = rev
49 self.rev = rev
50 self.extra = extra
50 self.extra = extra
51 self.sortkey = sortkey
51 self.sortkey = sortkey
52
52
53 class converter_source(object):
53 class converter_source(object):
54 """Conversion source interface"""
54 """Conversion source interface"""
55
55
56 def __init__(self, ui, path=None, rev=None):
56 def __init__(self, ui, path=None, rev=None):
57 """Initialize conversion source (or raise NoRepo("message")
57 """Initialize conversion source (or raise NoRepo("message")
58 exception if path is not a valid repository)"""
58 exception if path is not a valid repository)"""
59 self.ui = ui
59 self.ui = ui
60 self.path = path
60 self.path = path
61 self.rev = rev
61 self.rev = rev
62
62
63 self.encoding = 'utf-8'
63 self.encoding = 'utf-8'
64
64
65 def before(self):
65 def before(self):
66 pass
66 pass
67
67
68 def after(self):
68 def after(self):
69 pass
69 pass
70
70
71 def setrevmap(self, revmap):
71 def setrevmap(self, revmap):
72 """set the map of already-converted revisions"""
72 """set the map of already-converted revisions"""
73 pass
73 pass
74
74
75 def getheads(self):
75 def getheads(self):
76 """Return a list of this repository's heads"""
76 """Return a list of this repository's heads"""
77 raise NotImplementedError()
77 raise NotImplementedError()
78
78
79 def getfile(self, name, rev):
79 def getfile(self, name, rev):
80 """Return a pair (data, mode) where data is the file content
80 """Return a pair (data, mode) where data is the file content
81 as a string and mode one of '', 'x' or 'l'. rev is the
81 as a string and mode one of '', 'x' or 'l'. rev is the
82 identifier returned by a previous call to getchanges(). Raise
82 identifier returned by a previous call to getchanges(). Raise
83 IOError to indicate that name was deleted in rev.
83 IOError to indicate that name was deleted in rev.
84 """
84 """
85 raise NotImplementedError()
85 raise NotImplementedError()
86
86
87 def getchanges(self, version):
87 def getchanges(self, version):
88 """Returns a tuple of (files, copies).
88 """Returns a tuple of (files, copies).
89
89
90 files is a sorted list of (filename, id) tuples for all files
90 files is a sorted list of (filename, id) tuples for all files
91 changed between version and its first parent returned by
91 changed between version and its first parent returned by
92 getcommit(). id is the source revision id of the file.
92 getcommit(). id is the source revision id of the file.
93
93
94 copies is a dictionary of dest: source
94 copies is a dictionary of dest: source
95 """
95 """
96 raise NotImplementedError()
96 raise NotImplementedError()
97
97
98 def getcommit(self, version):
98 def getcommit(self, version):
99 """Return the commit object for version"""
99 """Return the commit object for version"""
100 raise NotImplementedError()
100 raise NotImplementedError()
101
101
102 def gettags(self):
102 def gettags(self):
103 """Return the tags as a dictionary of name: revision
103 """Return the tags as a dictionary of name: revision
104
104
105 Tag names must be UTF-8 strings.
105 Tag names must be UTF-8 strings.
106 """
106 """
107 raise NotImplementedError()
107 raise NotImplementedError()
108
108
109 def recode(self, s, encoding=None):
109 def recode(self, s, encoding=None):
110 if not encoding:
110 if not encoding:
111 encoding = self.encoding or 'utf-8'
111 encoding = self.encoding or 'utf-8'
112
112
113 if isinstance(s, unicode):
113 if isinstance(s, unicode):
114 return s.encode("utf-8")
114 return s.encode("utf-8")
115 try:
115 try:
116 return s.decode(encoding).encode("utf-8")
116 return s.decode(encoding).encode("utf-8")
117 except:
117 except:
118 try:
118 try:
119 return s.decode("latin-1").encode("utf-8")
119 return s.decode("latin-1").encode("utf-8")
120 except:
120 except:
121 return s.decode(encoding, "replace").encode("utf-8")
121 return s.decode(encoding, "replace").encode("utf-8")
122
122
123 def getchangedfiles(self, rev, i):
123 def getchangedfiles(self, rev, i):
124 """Return the files changed by rev compared to parent[i].
124 """Return the files changed by rev compared to parent[i].
125
125
126 i is an index selecting one of the parents of rev. The return
126 i is an index selecting one of the parents of rev. The return
127 value should be the list of files that are different in rev and
127 value should be the list of files that are different in rev and
128 this parent.
128 this parent.
129
129
130 If rev has no parents, i is None.
130 If rev has no parents, i is None.
131
131
132 This function is only needed to support --filemap
132 This function is only needed to support --filemap
133 """
133 """
134 raise NotImplementedError()
134 raise NotImplementedError()
135
135
136 def converted(self, rev, sinkrev):
136 def converted(self, rev, sinkrev):
137 '''Notify the source that a revision has been converted.'''
137 '''Notify the source that a revision has been converted.'''
138 pass
138 pass
139
139
140 def hasnativeorder(self):
140 def hasnativeorder(self):
141 """Return true if this source has a meaningful, native revision
141 """Return true if this source has a meaningful, native revision
142 order. For instance, Mercurial revisions are store sequentially
142 order. For instance, Mercurial revisions are store sequentially
143 while there is no such global ordering with Darcs.
143 while there is no such global ordering with Darcs.
144 """
144 """
145 return False
145 return False
146
146
147 def lookuprev(self, rev):
147 def lookuprev(self, rev):
148 """If rev is a meaningful revision reference in source, return
148 """If rev is a meaningful revision reference in source, return
149 the referenced identifier in the same format used by getcommit().
149 the referenced identifier in the same format used by getcommit().
150 return None otherwise.
150 return None otherwise.
151 """
151 """
152 return None
152 return None
153
153
154 def getbookmarks(self):
154 def getbookmarks(self):
155 """Return the bookmarks as a dictionary of name: revision
155 """Return the bookmarks as a dictionary of name: revision
156
156
157 Bookmark names are to be UTF-8 strings.
157 Bookmark names are to be UTF-8 strings.
158 """
158 """
159 return {}
159 return {}
160
160
161 class converter_sink(object):
161 class converter_sink(object):
162 """Conversion sink (target) interface"""
162 """Conversion sink (target) interface"""
163
163
164 def __init__(self, ui, path):
164 def __init__(self, ui, path):
165 """Initialize conversion sink (or raise NoRepo("message")
165 """Initialize conversion sink (or raise NoRepo("message")
166 exception if path is not a valid repository)
166 exception if path is not a valid repository)
167
167
168 created is a list of paths to remove if a fatal error occurs
168 created is a list of paths to remove if a fatal error occurs
169 later"""
169 later"""
170 self.ui = ui
170 self.ui = ui
171 self.path = path
171 self.path = path
172 self.created = []
172 self.created = []
173
173
174 def getheads(self):
174 def getheads(self):
175 """Return a list of this repository's heads"""
175 """Return a list of this repository's heads"""
176 raise NotImplementedError()
176 raise NotImplementedError()
177
177
178 def revmapfile(self):
178 def revmapfile(self):
179 """Path to a file that will contain lines
179 """Path to a file that will contain lines
180 source_rev_id sink_rev_id
180 source_rev_id sink_rev_id
181 mapping equivalent revision identifiers for each system."""
181 mapping equivalent revision identifiers for each system."""
182 raise NotImplementedError()
182 raise NotImplementedError()
183
183
184 def authorfile(self):
184 def authorfile(self):
185 """Path to a file that will contain lines
185 """Path to a file that will contain lines
186 srcauthor=dstauthor
186 srcauthor=dstauthor
187 mapping equivalent authors identifiers for each system."""
187 mapping equivalent authors identifiers for each system."""
188 return None
188 return None
189
189
190 def putcommit(self, files, copies, parents, commit, source, revmap):
190 def putcommit(self, files, copies, parents, commit, source, revmap):
191 """Create a revision with all changed files listed in 'files'
191 """Create a revision with all changed files listed in 'files'
192 and having listed parents. 'commit' is a commit object
192 and having listed parents. 'commit' is a commit object
193 containing at a minimum the author, date, and message for this
193 containing at a minimum the author, date, and message for this
194 changeset. 'files' is a list of (path, version) tuples,
194 changeset. 'files' is a list of (path, version) tuples,
195 'copies' is a dictionary mapping destinations to sources,
195 'copies' is a dictionary mapping destinations to sources,
196 'source' is the source repository, and 'revmap' is a mapfile
196 'source' is the source repository, and 'revmap' is a mapfile
197 of source revisions to converted revisions. Only getfile() and
197 of source revisions to converted revisions. Only getfile() and
198 lookuprev() should be called on 'source'.
198 lookuprev() should be called on 'source'.
199
199
200 Note that the sink repository is not told to update itself to
200 Note that the sink repository is not told to update itself to
201 a particular revision (or even what that revision would be)
201 a particular revision (or even what that revision would be)
202 before it receives the file data.
202 before it receives the file data.
203 """
203 """
204 raise NotImplementedError()
204 raise NotImplementedError()
205
205
206 def puttags(self, tags):
206 def puttags(self, tags):
207 """Put tags into sink.
207 """Put tags into sink.
208
208
209 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
209 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
210 Return a pair (tag_revision, tag_parent_revision), or (None, None)
210 Return a pair (tag_revision, tag_parent_revision), or (None, None)
211 if nothing was changed.
211 if nothing was changed.
212 """
212 """
213 raise NotImplementedError()
213 raise NotImplementedError()
214
214
215 def setbranch(self, branch, pbranches):
215 def setbranch(self, branch, pbranches):
216 """Set the current branch name. Called before the first putcommit
216 """Set the current branch name. Called before the first putcommit
217 on the branch.
217 on the branch.
218 branch: branch name for subsequent commits
218 branch: branch name for subsequent commits
219 pbranches: (converted parent revision, parent branch) tuples"""
219 pbranches: (converted parent revision, parent branch) tuples"""
220 pass
220 pass
221
221
222 def setfilemapmode(self, active):
222 def setfilemapmode(self, active):
223 """Tell the destination that we're using a filemap
223 """Tell the destination that we're using a filemap
224
224
225 Some converter_sources (svn in particular) can claim that a file
225 Some converter_sources (svn in particular) can claim that a file
226 was changed in a revision, even if there was no change. This method
226 was changed in a revision, even if there was no change. This method
227 tells the destination that we're using a filemap and that it should
227 tells the destination that we're using a filemap and that it should
228 filter empty revisions.
228 filter empty revisions.
229 """
229 """
230 pass
230 pass
231
231
232 def before(self):
232 def before(self):
233 pass
233 pass
234
234
235 def after(self):
235 def after(self):
236 pass
236 pass
237
237
238 def putbookmarks(self, bookmarks):
238 def putbookmarks(self, bookmarks):
239 """Put bookmarks into sink.
239 """Put bookmarks into sink.
240
240
241 bookmarks: {bookmarkname: sink_rev_id, ...}
241 bookmarks: {bookmarkname: sink_rev_id, ...}
242 where bookmarkname is an UTF-8 string.
242 where bookmarkname is an UTF-8 string.
243 """
243 """
244 pass
244 pass
245
245
246 class commandline(object):
246 class commandline(object):
247 def __init__(self, ui, command):
247 def __init__(self, ui, command):
248 self.ui = ui
248 self.ui = ui
249 self.command = command
249 self.command = command
250
250
251 def prerun(self):
251 def prerun(self):
252 pass
252 pass
253
253
254 def postrun(self):
254 def postrun(self):
255 pass
255 pass
256
256
257 def _cmdline(self, cmd, closestdin, *args, **kwargs):
257 def _cmdline(self, cmd, closestdin, *args, **kwargs):
258 cmdline = [self.command, cmd] + list(args)
258 cmdline = [self.command, cmd] + list(args)
259 for k, v in kwargs.iteritems():
259 for k, v in kwargs.iteritems():
260 if len(k) == 1:
260 if len(k) == 1:
261 cmdline.append('-' + k)
261 cmdline.append('-' + k)
262 else:
262 else:
263 cmdline.append('--' + k.replace('_', '-'))
263 cmdline.append('--' + k.replace('_', '-'))
264 try:
264 try:
265 if len(k) == 1:
265 if len(k) == 1:
266 cmdline.append('' + v)
266 cmdline.append('' + v)
267 else:
267 else:
268 cmdline[-1] += '=' + v
268 cmdline[-1] += '=' + v
269 except TypeError:
269 except TypeError:
270 pass
270 pass
271 cmdline = [util.shellquote(arg) for arg in cmdline]
271 cmdline = [util.shellquote(arg) for arg in cmdline]
272 if not self.ui.debugflag:
272 if not self.ui.debugflag:
273 cmdline += ['2>', util.nulldev]
273 cmdline += ['2>', util.nulldev]
274 if closestdin:
274 if closestdin:
275 cmdline += ['<', util.nulldev]
275 cmdline += ['<', util.nulldev]
276 cmdline = ' '.join(cmdline)
276 cmdline = ' '.join(cmdline)
277 return cmdline
277 return cmdline
278
278
279 def _run(self, cmd, *args, **kwargs):
279 def _run(self, cmd, *args, **kwargs):
280 return self._dorun(util.popen, cmd, True, *args, **kwargs)
280 return self._dorun(util.popen, cmd, True, *args, **kwargs)
281
281
282 def _run2(self, cmd, *args, **kwargs):
282 def _run2(self, cmd, *args, **kwargs):
283 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
283 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
284
284
285 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
285 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
286 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
286 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
287 self.ui.debug('running: %s\n' % (cmdline,))
287 self.ui.debug('running: %s\n' % (cmdline,))
288 self.prerun()
288 self.prerun()
289 try:
289 try:
290 return openfunc(cmdline)
290 return openfunc(cmdline)
291 finally:
291 finally:
292 self.postrun()
292 self.postrun()
293
293
294 def run(self, cmd, *args, **kwargs):
294 def run(self, cmd, *args, **kwargs):
295 fp = self._run(cmd, *args, **kwargs)
295 fp = self._run(cmd, *args, **kwargs)
296 output = fp.read()
296 output = fp.read()
297 self.ui.debug(output)
297 self.ui.debug(output)
298 return output, fp.close()
298 return output, fp.close()
299
299
300 def runlines(self, cmd, *args, **kwargs):
300 def runlines(self, cmd, *args, **kwargs):
301 fp = self._run(cmd, *args, **kwargs)
301 fp = self._run(cmd, *args, **kwargs)
302 output = fp.readlines()
302 output = fp.readlines()
303 self.ui.debug(''.join(output))
303 self.ui.debug(''.join(output))
304 return output, fp.close()
304 return output, fp.close()
305
305
306 def checkexit(self, status, output=''):
306 def checkexit(self, status, output=''):
307 if status:
307 if status:
308 if output:
308 if output:
309 self.ui.warn(_('%s error:\n') % self.command)
309 self.ui.warn(_('%s error:\n') % self.command)
310 self.ui.warn(output)
310 self.ui.warn(output)
311 msg = util.explain_exit(status)[0]
311 msg = util.explainexit(status)[0]
312 raise util.Abort('%s %s' % (self.command, msg))
312 raise util.Abort('%s %s' % (self.command, msg))
313
313
314 def run0(self, cmd, *args, **kwargs):
314 def run0(self, cmd, *args, **kwargs):
315 output, status = self.run(cmd, *args, **kwargs)
315 output, status = self.run(cmd, *args, **kwargs)
316 self.checkexit(status, output)
316 self.checkexit(status, output)
317 return output
317 return output
318
318
319 def runlines0(self, cmd, *args, **kwargs):
319 def runlines0(self, cmd, *args, **kwargs):
320 output, status = self.runlines(cmd, *args, **kwargs)
320 output, status = self.runlines(cmd, *args, **kwargs)
321 self.checkexit(status, ''.join(output))
321 self.checkexit(status, ''.join(output))
322 return output
322 return output
323
323
324 def getargmax(self):
324 def getargmax(self):
325 if '_argmax' in self.__dict__:
325 if '_argmax' in self.__dict__:
326 return self._argmax
326 return self._argmax
327
327
328 # POSIX requires at least 4096 bytes for ARG_MAX
328 # POSIX requires at least 4096 bytes for ARG_MAX
329 self._argmax = 4096
329 self._argmax = 4096
330 try:
330 try:
331 self._argmax = os.sysconf("SC_ARG_MAX")
331 self._argmax = os.sysconf("SC_ARG_MAX")
332 except:
332 except:
333 pass
333 pass
334
334
335 # Windows shells impose their own limits on command line length,
335 # Windows shells impose their own limits on command line length,
336 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
336 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
337 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
337 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
338 # details about cmd.exe limitations.
338 # details about cmd.exe limitations.
339
339
340 # Since ARG_MAX is for command line _and_ environment, lower our limit
340 # Since ARG_MAX is for command line _and_ environment, lower our limit
341 # (and make happy Windows shells while doing this).
341 # (and make happy Windows shells while doing this).
342
342
343 self._argmax = self._argmax / 2 - 1
343 self._argmax = self._argmax / 2 - 1
344 return self._argmax
344 return self._argmax
345
345
346 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
346 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
347 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
347 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
348 limit = self.getargmax() - cmdlen
348 limit = self.getargmax() - cmdlen
349 bytes = 0
349 bytes = 0
350 fl = []
350 fl = []
351 for fn in arglist:
351 for fn in arglist:
352 b = len(fn) + 3
352 b = len(fn) + 3
353 if bytes + b < limit or len(fl) == 0:
353 if bytes + b < limit or len(fl) == 0:
354 fl.append(fn)
354 fl.append(fn)
355 bytes += b
355 bytes += b
356 else:
356 else:
357 yield fl
357 yield fl
358 fl = [fn]
358 fl = [fn]
359 bytes = b
359 bytes = b
360 if fl:
360 if fl:
361 yield fl
361 yield fl
362
362
363 def xargs(self, arglist, cmd, *args, **kwargs):
363 def xargs(self, arglist, cmd, *args, **kwargs):
364 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
364 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
365 self.run0(cmd, *(list(args) + l), **kwargs)
365 self.run0(cmd, *(list(args) + l), **kwargs)
366
366
367 class mapfile(dict):
367 class mapfile(dict):
368 def __init__(self, ui, path):
368 def __init__(self, ui, path):
369 super(mapfile, self).__init__()
369 super(mapfile, self).__init__()
370 self.ui = ui
370 self.ui = ui
371 self.path = path
371 self.path = path
372 self.fp = None
372 self.fp = None
373 self.order = []
373 self.order = []
374 self._read()
374 self._read()
375
375
376 def _read(self):
376 def _read(self):
377 if not self.path:
377 if not self.path:
378 return
378 return
379 try:
379 try:
380 fp = open(self.path, 'r')
380 fp = open(self.path, 'r')
381 except IOError, err:
381 except IOError, err:
382 if err.errno != errno.ENOENT:
382 if err.errno != errno.ENOENT:
383 raise
383 raise
384 return
384 return
385 for i, line in enumerate(fp):
385 for i, line in enumerate(fp):
386 try:
386 try:
387 key, value = line.splitlines()[0].rsplit(' ', 1)
387 key, value = line.splitlines()[0].rsplit(' ', 1)
388 except ValueError:
388 except ValueError:
389 raise util.Abort(
389 raise util.Abort(
390 _('syntax error in %s(%d): key/value pair expected')
390 _('syntax error in %s(%d): key/value pair expected')
391 % (self.path, i + 1))
391 % (self.path, i + 1))
392 if key not in self:
392 if key not in self:
393 self.order.append(key)
393 self.order.append(key)
394 super(mapfile, self).__setitem__(key, value)
394 super(mapfile, self).__setitem__(key, value)
395 fp.close()
395 fp.close()
396
396
397 def __setitem__(self, key, value):
397 def __setitem__(self, key, value):
398 if self.fp is None:
398 if self.fp is None:
399 try:
399 try:
400 self.fp = open(self.path, 'a')
400 self.fp = open(self.path, 'a')
401 except IOError, err:
401 except IOError, err:
402 raise util.Abort(_('could not open map file %r: %s') %
402 raise util.Abort(_('could not open map file %r: %s') %
403 (self.path, err.strerror))
403 (self.path, err.strerror))
404 self.fp.write('%s %s\n' % (key, value))
404 self.fp.write('%s %s\n' % (key, value))
405 self.fp.flush()
405 self.fp.flush()
406 super(mapfile, self).__setitem__(key, value)
406 super(mapfile, self).__setitem__(key, value)
407
407
408 def close(self):
408 def close(self):
409 if self.fp:
409 if self.fp:
410 self.fp.close()
410 self.fp.close()
411 self.fp = None
411 self.fp = None
@@ -1,1175 +1,1175 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11 import urllib2
11 import urllib2
12
12
13 from mercurial import strutil, scmutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 # these bindings.
18 # these bindings.
19
19
20 from cStringIO import StringIO
20 from cStringIO import StringIO
21
21
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
24
24
25 try:
25 try:
26 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
27 import svn
27 import svn
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.ra
30 import svn.ra
31 import svn.delta
31 import svn.delta
32 import transport
32 import transport
33 import warnings
33 import warnings
34 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
35 module='svn.core',
35 module='svn.core',
36 category=DeprecationWarning)
36 category=DeprecationWarning)
37
37
38 except ImportError:
38 except ImportError:
39 svn = None
39 svn = None
40
40
41 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
42 pass
42 pass
43
43
44 def revsplit(rev):
44 def revsplit(rev):
45 """Parse a revision string and return (uuid, path, revnum)."""
45 """Parse a revision string and return (uuid, path, revnum)."""
46 url, revnum = rev.rsplit('@', 1)
46 url, revnum = rev.rsplit('@', 1)
47 parts = url.split('/', 1)
47 parts = url.split('/', 1)
48 mod = ''
48 mod = ''
49 if len(parts) > 1:
49 if len(parts) > 1:
50 mod = '/' + parts[1]
50 mod = '/' + parts[1]
51 return parts[0][4:], mod, int(revnum)
51 return parts[0][4:], mod, int(revnum)
52
52
53 def geturl(path):
53 def geturl(path):
54 try:
54 try:
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 except SubversionException:
56 except SubversionException:
57 pass
57 pass
58 if os.path.isdir(path):
58 if os.path.isdir(path):
59 path = os.path.normpath(os.path.abspath(path))
59 path = os.path.normpath(os.path.abspath(path))
60 if os.name == 'nt':
60 if os.name == 'nt':
61 path = '/' + util.normpath(path)
61 path = '/' + util.normpath(path)
62 # Module URL is later compared with the repository URL returned
62 # Module URL is later compared with the repository URL returned
63 # by svn API, which is UTF-8.
63 # by svn API, which is UTF-8.
64 path = encoding.tolocal(path)
64 path = encoding.tolocal(path)
65 return 'file://%s' % urllib.quote(path)
65 return 'file://%s' % urllib.quote(path)
66 return path
66 return path
67
67
68 def optrev(number):
68 def optrev(number):
69 optrev = svn.core.svn_opt_revision_t()
69 optrev = svn.core.svn_opt_revision_t()
70 optrev.kind = svn.core.svn_opt_revision_number
70 optrev.kind = svn.core.svn_opt_revision_number
71 optrev.value.number = number
71 optrev.value.number = number
72 return optrev
72 return optrev
73
73
74 class changedpath(object):
74 class changedpath(object):
75 def __init__(self, p):
75 def __init__(self, p):
76 self.copyfrom_path = p.copyfrom_path
76 self.copyfrom_path = p.copyfrom_path
77 self.copyfrom_rev = p.copyfrom_rev
77 self.copyfrom_rev = p.copyfrom_rev
78 self.action = p.action
78 self.action = p.action
79
79
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 strict_node_history=False):
81 strict_node_history=False):
82 protocol = -1
82 protocol = -1
83 def receiver(orig_paths, revnum, author, date, message, pool):
83 def receiver(orig_paths, revnum, author, date, message, pool):
84 if orig_paths is not None:
84 if orig_paths is not None:
85 for k, v in orig_paths.iteritems():
85 for k, v in orig_paths.iteritems():
86 orig_paths[k] = changedpath(v)
86 orig_paths[k] = changedpath(v)
87 pickle.dump((orig_paths, revnum, author, date, message),
87 pickle.dump((orig_paths, revnum, author, date, message),
88 fp, protocol)
88 fp, protocol)
89
89
90 try:
90 try:
91 # Use an ra of our own so that our parent can consume
91 # Use an ra of our own so that our parent can consume
92 # our results without confusing the server.
92 # our results without confusing the server.
93 t = transport.SvnRaTransport(url=url)
93 t = transport.SvnRaTransport(url=url)
94 svn.ra.get_log(t.ra, paths, start, end, limit,
94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 discover_changed_paths,
95 discover_changed_paths,
96 strict_node_history,
96 strict_node_history,
97 receiver)
97 receiver)
98 except SubversionException, (inst, num):
98 except SubversionException, (inst, num):
99 pickle.dump(num, fp, protocol)
99 pickle.dump(num, fp, protocol)
100 except IOError:
100 except IOError:
101 # Caller may interrupt the iteration
101 # Caller may interrupt the iteration
102 pickle.dump(None, fp, protocol)
102 pickle.dump(None, fp, protocol)
103 else:
103 else:
104 pickle.dump(None, fp, protocol)
104 pickle.dump(None, fp, protocol)
105 fp.close()
105 fp.close()
106 # With large history, cleanup process goes crazy and suddenly
106 # With large history, cleanup process goes crazy and suddenly
107 # consumes *huge* amount of memory. The output file being closed,
107 # consumes *huge* amount of memory. The output file being closed,
108 # there is no need for clean termination.
108 # there is no need for clean termination.
109 os._exit(0)
109 os._exit(0)
110
110
111 def debugsvnlog(ui, **opts):
111 def debugsvnlog(ui, **opts):
112 """Fetch SVN log in a subprocess and channel them back to parent to
112 """Fetch SVN log in a subprocess and channel them back to parent to
113 avoid memory collection issues.
113 avoid memory collection issues.
114 """
114 """
115 util.set_binary(sys.stdin)
115 util.setbinary(sys.stdin)
116 util.set_binary(sys.stdout)
116 util.setbinary(sys.stdout)
117 args = decodeargs(sys.stdin.read())
117 args = decodeargs(sys.stdin.read())
118 get_log_child(sys.stdout, *args)
118 get_log_child(sys.stdout, *args)
119
119
120 class logstream(object):
120 class logstream(object):
121 """Interruptible revision log iterator."""
121 """Interruptible revision log iterator."""
122 def __init__(self, stdout):
122 def __init__(self, stdout):
123 self._stdout = stdout
123 self._stdout = stdout
124
124
125 def __iter__(self):
125 def __iter__(self):
126 while True:
126 while True:
127 try:
127 try:
128 entry = pickle.load(self._stdout)
128 entry = pickle.load(self._stdout)
129 except EOFError:
129 except EOFError:
130 raise util.Abort(_('Mercurial failed to run itself, check'
130 raise util.Abort(_('Mercurial failed to run itself, check'
131 ' hg executable is in PATH'))
131 ' hg executable is in PATH'))
132 try:
132 try:
133 orig_paths, revnum, author, date, message = entry
133 orig_paths, revnum, author, date, message = entry
134 except:
134 except:
135 if entry is None:
135 if entry is None:
136 break
136 break
137 raise SubversionException("child raised exception", entry)
137 raise SubversionException("child raised exception", entry)
138 yield entry
138 yield entry
139
139
140 def close(self):
140 def close(self):
141 if self._stdout:
141 if self._stdout:
142 self._stdout.close()
142 self._stdout.close()
143 self._stdout = None
143 self._stdout = None
144
144
145
145
146 # Check to see if the given path is a local Subversion repo. Verify this by
146 # Check to see if the given path is a local Subversion repo. Verify this by
147 # looking for several svn-specific files and directories in the given
147 # looking for several svn-specific files and directories in the given
148 # directory.
148 # directory.
149 def filecheck(ui, path, proto):
149 def filecheck(ui, path, proto):
150 for x in ('locks', 'hooks', 'format', 'db'):
150 for x in ('locks', 'hooks', 'format', 'db'):
151 if not os.path.exists(os.path.join(path, x)):
151 if not os.path.exists(os.path.join(path, x)):
152 return False
152 return False
153 return True
153 return True
154
154
155 # Check to see if a given path is the root of an svn repo over http. We verify
155 # Check to see if a given path is the root of an svn repo over http. We verify
156 # this by requesting a version-controlled URL we know can't exist and looking
156 # this by requesting a version-controlled URL we know can't exist and looking
157 # for the svn-specific "not found" XML.
157 # for the svn-specific "not found" XML.
158 def httpcheck(ui, path, proto):
158 def httpcheck(ui, path, proto):
159 try:
159 try:
160 opener = urllib2.build_opener()
160 opener = urllib2.build_opener()
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 data = rsp.read()
162 data = rsp.read()
163 except urllib2.HTTPError, inst:
163 except urllib2.HTTPError, inst:
164 if inst.code != 404:
164 if inst.code != 404:
165 # Except for 404 we cannot know for sure this is not an svn repo
165 # Except for 404 we cannot know for sure this is not an svn repo
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 'be a subversion repository. Use --source-type if you '
167 'be a subversion repository. Use --source-type if you '
168 'know better.\n'))
168 'know better.\n'))
169 return True
169 return True
170 data = inst.fp.read()
170 data = inst.fp.read()
171 except:
171 except:
172 # Could be urllib2.URLError if the URL is invalid or anything else.
172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 return False
173 return False
174 return '<m:human-readable errcode="160013">' in data
174 return '<m:human-readable errcode="160013">' in data
175
175
176 protomap = {'http': httpcheck,
176 protomap = {'http': httpcheck,
177 'https': httpcheck,
177 'https': httpcheck,
178 'file': filecheck,
178 'file': filecheck,
179 }
179 }
180 def issvnurl(ui, url):
180 def issvnurl(ui, url):
181 try:
181 try:
182 proto, path = url.split('://', 1)
182 proto, path = url.split('://', 1)
183 if proto == 'file':
183 if proto == 'file':
184 path = urllib.url2pathname(path)
184 path = urllib.url2pathname(path)
185 except ValueError:
185 except ValueError:
186 proto = 'file'
186 proto = 'file'
187 path = os.path.abspath(url)
187 path = os.path.abspath(url)
188 if proto == 'file':
188 if proto == 'file':
189 path = path.replace(os.sep, '/')
189 path = path.replace(os.sep, '/')
190 check = protomap.get(proto, lambda *args: False)
190 check = protomap.get(proto, lambda *args: False)
191 while '/' in path:
191 while '/' in path:
192 if check(ui, path, proto):
192 if check(ui, path, proto):
193 return True
193 return True
194 path = path.rsplit('/', 1)[0]
194 path = path.rsplit('/', 1)[0]
195 return False
195 return False
196
196
197 # SVN conversion code stolen from bzr-svn and tailor
197 # SVN conversion code stolen from bzr-svn and tailor
198 #
198 #
199 # Subversion looks like a versioned filesystem, branches structures
199 # Subversion looks like a versioned filesystem, branches structures
200 # are defined by conventions and not enforced by the tool. First,
200 # are defined by conventions and not enforced by the tool. First,
201 # we define the potential branches (modules) as "trunk" and "branches"
201 # we define the potential branches (modules) as "trunk" and "branches"
202 # children directories. Revisions are then identified by their
202 # children directories. Revisions are then identified by their
203 # module and revision number (and a repository identifier).
203 # module and revision number (and a repository identifier).
204 #
204 #
205 # The revision graph is really a tree (or a forest). By default, a
205 # The revision graph is really a tree (or a forest). By default, a
206 # revision parent is the previous revision in the same module. If the
206 # revision parent is the previous revision in the same module. If the
207 # module directory is copied/moved from another module then the
207 # module directory is copied/moved from another module then the
208 # revision is the module root and its parent the source revision in
208 # revision is the module root and its parent the source revision in
209 # the parent module. A revision has at most one parent.
209 # the parent module. A revision has at most one parent.
210 #
210 #
211 class svn_source(converter_source):
211 class svn_source(converter_source):
212 def __init__(self, ui, url, rev=None):
212 def __init__(self, ui, url, rev=None):
213 super(svn_source, self).__init__(ui, url, rev=rev)
213 super(svn_source, self).__init__(ui, url, rev=rev)
214
214
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 (os.path.exists(url) and
216 (os.path.exists(url) and
217 os.path.exists(os.path.join(url, '.svn'))) or
217 os.path.exists(os.path.join(url, '.svn'))) or
218 issvnurl(ui, url)):
218 issvnurl(ui, url)):
219 raise NoRepo(_("%s does not look like a Subversion repository")
219 raise NoRepo(_("%s does not look like a Subversion repository")
220 % url)
220 % url)
221 if svn is None:
221 if svn is None:
222 raise MissingTool(_('Could not load Subversion python bindings'))
222 raise MissingTool(_('Could not load Subversion python bindings'))
223
223
224 try:
224 try:
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 if version < (1, 4):
226 if version < (1, 4):
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 '1.4 or later required') % version)
228 '1.4 or later required') % version)
229 except AttributeError:
229 except AttributeError:
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 'or later required'))
231 'or later required'))
232
232
233 self.lastrevs = {}
233 self.lastrevs = {}
234
234
235 latest = None
235 latest = None
236 try:
236 try:
237 # Support file://path@rev syntax. Useful e.g. to convert
237 # Support file://path@rev syntax. Useful e.g. to convert
238 # deleted branches.
238 # deleted branches.
239 at = url.rfind('@')
239 at = url.rfind('@')
240 if at >= 0:
240 if at >= 0:
241 latest = int(url[at + 1:])
241 latest = int(url[at + 1:])
242 url = url[:at]
242 url = url[:at]
243 except ValueError:
243 except ValueError:
244 pass
244 pass
245 self.url = geturl(url)
245 self.url = geturl(url)
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 try:
247 try:
248 self.transport = transport.SvnRaTransport(url=self.url)
248 self.transport = transport.SvnRaTransport(url=self.url)
249 self.ra = self.transport.ra
249 self.ra = self.transport.ra
250 self.ctx = self.transport.client
250 self.ctx = self.transport.client
251 self.baseurl = svn.ra.get_repos_root(self.ra)
251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 # Module is either empty or a repository path starting with
252 # Module is either empty or a repository path starting with
253 # a slash and not ending with a slash.
253 # a slash and not ending with a slash.
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 self.prevmodule = None
255 self.prevmodule = None
256 self.rootmodule = self.module
256 self.rootmodule = self.module
257 self.commits = {}
257 self.commits = {}
258 self.paths = {}
258 self.paths = {}
259 self.uuid = svn.ra.get_uuid(self.ra)
259 self.uuid = svn.ra.get_uuid(self.ra)
260 except SubversionException:
260 except SubversionException:
261 ui.traceback()
261 ui.traceback()
262 raise NoRepo(_("%s does not look like a Subversion repository")
262 raise NoRepo(_("%s does not look like a Subversion repository")
263 % self.url)
263 % self.url)
264
264
265 if rev:
265 if rev:
266 try:
266 try:
267 latest = int(rev)
267 latest = int(rev)
268 except ValueError:
268 except ValueError:
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270
270
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 try:
273 try:
274 self.startrev = int(self.startrev)
274 self.startrev = int(self.startrev)
275 if self.startrev < 0:
275 if self.startrev < 0:
276 self.startrev = 0
276 self.startrev = 0
277 except ValueError:
277 except ValueError:
278 raise util.Abort(_('svn: start revision %s is not an integer')
278 raise util.Abort(_('svn: start revision %s is not an integer')
279 % self.startrev)
279 % self.startrev)
280
280
281 try:
281 try:
282 self.head = self.latest(self.module, latest)
282 self.head = self.latest(self.module, latest)
283 except SvnPathNotFound:
283 except SvnPathNotFound:
284 self.head = None
284 self.head = None
285 if not self.head:
285 if not self.head:
286 raise util.Abort(_('no revision found in module %s')
286 raise util.Abort(_('no revision found in module %s')
287 % self.module)
287 % self.module)
288 self.last_changed = self.revnum(self.head)
288 self.last_changed = self.revnum(self.head)
289
289
290 self._changescache = None
290 self._changescache = None
291
291
292 if os.path.exists(os.path.join(url, '.svn/entries')):
292 if os.path.exists(os.path.join(url, '.svn/entries')):
293 self.wc = url
293 self.wc = url
294 else:
294 else:
295 self.wc = None
295 self.wc = None
296 self.convertfp = None
296 self.convertfp = None
297
297
298 def setrevmap(self, revmap):
298 def setrevmap(self, revmap):
299 lastrevs = {}
299 lastrevs = {}
300 for revid in revmap.iterkeys():
300 for revid in revmap.iterkeys():
301 uuid, module, revnum = revsplit(revid)
301 uuid, module, revnum = revsplit(revid)
302 lastrevnum = lastrevs.setdefault(module, revnum)
302 lastrevnum = lastrevs.setdefault(module, revnum)
303 if revnum > lastrevnum:
303 if revnum > lastrevnum:
304 lastrevs[module] = revnum
304 lastrevs[module] = revnum
305 self.lastrevs = lastrevs
305 self.lastrevs = lastrevs
306
306
307 def exists(self, path, optrev):
307 def exists(self, path, optrev):
308 try:
308 try:
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
310 optrev, False, self.ctx)
310 optrev, False, self.ctx)
311 return True
311 return True
312 except SubversionException:
312 except SubversionException:
313 return False
313 return False
314
314
315 def getheads(self):
315 def getheads(self):
316
316
317 def isdir(path, revnum):
317 def isdir(path, revnum):
318 kind = self._checkpath(path, revnum)
318 kind = self._checkpath(path, revnum)
319 return kind == svn.core.svn_node_dir
319 return kind == svn.core.svn_node_dir
320
320
321 def getcfgpath(name, rev):
321 def getcfgpath(name, rev):
322 cfgpath = self.ui.config('convert', 'svn.' + name)
322 cfgpath = self.ui.config('convert', 'svn.' + name)
323 if cfgpath is not None and cfgpath.strip() == '':
323 if cfgpath is not None and cfgpath.strip() == '':
324 return None
324 return None
325 path = (cfgpath or name).strip('/')
325 path = (cfgpath or name).strip('/')
326 if not self.exists(path, rev):
326 if not self.exists(path, rev):
327 if self.module.endswith(path) and name == 'trunk':
327 if self.module.endswith(path) and name == 'trunk':
328 # we are converting from inside this directory
328 # we are converting from inside this directory
329 return None
329 return None
330 if cfgpath:
330 if cfgpath:
331 raise util.Abort(_('expected %s to be at %r, but not found')
331 raise util.Abort(_('expected %s to be at %r, but not found')
332 % (name, path))
332 % (name, path))
333 return None
333 return None
334 self.ui.note(_('found %s at %r\n') % (name, path))
334 self.ui.note(_('found %s at %r\n') % (name, path))
335 return path
335 return path
336
336
337 rev = optrev(self.last_changed)
337 rev = optrev(self.last_changed)
338 oldmodule = ''
338 oldmodule = ''
339 trunk = getcfgpath('trunk', rev)
339 trunk = getcfgpath('trunk', rev)
340 self.tags = getcfgpath('tags', rev)
340 self.tags = getcfgpath('tags', rev)
341 branches = getcfgpath('branches', rev)
341 branches = getcfgpath('branches', rev)
342
342
343 # If the project has a trunk or branches, we will extract heads
343 # If the project has a trunk or branches, we will extract heads
344 # from them. We keep the project root otherwise.
344 # from them. We keep the project root otherwise.
345 if trunk:
345 if trunk:
346 oldmodule = self.module or ''
346 oldmodule = self.module or ''
347 self.module += '/' + trunk
347 self.module += '/' + trunk
348 self.head = self.latest(self.module, self.last_changed)
348 self.head = self.latest(self.module, self.last_changed)
349 if not self.head:
349 if not self.head:
350 raise util.Abort(_('no revision found in module %s')
350 raise util.Abort(_('no revision found in module %s')
351 % self.module)
351 % self.module)
352
352
353 # First head in the list is the module's head
353 # First head in the list is the module's head
354 self.heads = [self.head]
354 self.heads = [self.head]
355 if self.tags is not None:
355 if self.tags is not None:
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
357
357
358 # Check if branches bring a few more heads to the list
358 # Check if branches bring a few more heads to the list
359 if branches:
359 if branches:
360 rpath = self.url.strip('/')
360 rpath = self.url.strip('/')
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
362 rev, False, self.ctx)
362 rev, False, self.ctx)
363 for branch in branchnames.keys():
363 for branch in branchnames.keys():
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
365 if not isdir(module, self.last_changed):
365 if not isdir(module, self.last_changed):
366 continue
366 continue
367 brevid = self.latest(module, self.last_changed)
367 brevid = self.latest(module, self.last_changed)
368 if not brevid:
368 if not brevid:
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
370 continue
370 continue
371 self.ui.note(_('found branch %s at %d\n') %
371 self.ui.note(_('found branch %s at %d\n') %
372 (branch, self.revnum(brevid)))
372 (branch, self.revnum(brevid)))
373 self.heads.append(brevid)
373 self.heads.append(brevid)
374
374
375 if self.startrev and self.heads:
375 if self.startrev and self.heads:
376 if len(self.heads) > 1:
376 if len(self.heads) > 1:
377 raise util.Abort(_('svn: start revision is not supported '
377 raise util.Abort(_('svn: start revision is not supported '
378 'with more than one branch'))
378 'with more than one branch'))
379 revnum = self.revnum(self.heads[0])
379 revnum = self.revnum(self.heads[0])
380 if revnum < self.startrev:
380 if revnum < self.startrev:
381 raise util.Abort(
381 raise util.Abort(
382 _('svn: no revision found after start revision %d')
382 _('svn: no revision found after start revision %d')
383 % self.startrev)
383 % self.startrev)
384
384
385 return self.heads
385 return self.heads
386
386
387 def getchanges(self, rev):
387 def getchanges(self, rev):
388 if self._changescache and self._changescache[0] == rev:
388 if self._changescache and self._changescache[0] == rev:
389 return self._changescache[1]
389 return self._changescache[1]
390 self._changescache = None
390 self._changescache = None
391 (paths, parents) = self.paths[rev]
391 (paths, parents) = self.paths[rev]
392 if parents:
392 if parents:
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
394 else:
394 else:
395 # Perform a full checkout on roots
395 # Perform a full checkout on roots
396 uuid, module, revnum = revsplit(rev)
396 uuid, module, revnum = revsplit(rev)
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
398 optrev(revnum), True, self.ctx)
398 optrev(revnum), True, self.ctx)
399 files = [n for n, e in entries.iteritems()
399 files = [n for n, e in entries.iteritems()
400 if e.kind == svn.core.svn_node_file]
400 if e.kind == svn.core.svn_node_file]
401 copies = {}
401 copies = {}
402 self.removed = set()
402 self.removed = set()
403
403
404 files.sort()
404 files.sort()
405 files = zip(files, [rev] * len(files))
405 files = zip(files, [rev] * len(files))
406
406
407 # caller caches the result, so free it here to release memory
407 # caller caches the result, so free it here to release memory
408 del self.paths[rev]
408 del self.paths[rev]
409 return (files, copies)
409 return (files, copies)
410
410
411 def getchangedfiles(self, rev, i):
411 def getchangedfiles(self, rev, i):
412 changes = self.getchanges(rev)
412 changes = self.getchanges(rev)
413 self._changescache = (rev, changes)
413 self._changescache = (rev, changes)
414 return [f[0] for f in changes[0]]
414 return [f[0] for f in changes[0]]
415
415
416 def getcommit(self, rev):
416 def getcommit(self, rev):
417 if rev not in self.commits:
417 if rev not in self.commits:
418 uuid, module, revnum = revsplit(rev)
418 uuid, module, revnum = revsplit(rev)
419 self.module = module
419 self.module = module
420 self.reparent(module)
420 self.reparent(module)
421 # We assume that:
421 # We assume that:
422 # - requests for revisions after "stop" come from the
422 # - requests for revisions after "stop" come from the
423 # revision graph backward traversal. Cache all of them
423 # revision graph backward traversal. Cache all of them
424 # down to stop, they will be used eventually.
424 # down to stop, they will be used eventually.
425 # - requests for revisions before "stop" come to get
425 # - requests for revisions before "stop" come to get
426 # isolated branches parents. Just fetch what is needed.
426 # isolated branches parents. Just fetch what is needed.
427 stop = self.lastrevs.get(module, 0)
427 stop = self.lastrevs.get(module, 0)
428 if revnum < stop:
428 if revnum < stop:
429 stop = revnum + 1
429 stop = revnum + 1
430 self._fetch_revisions(revnum, stop)
430 self._fetch_revisions(revnum, stop)
431 commit = self.commits[rev]
431 commit = self.commits[rev]
432 # caller caches the result, so free it here to release memory
432 # caller caches the result, so free it here to release memory
433 del self.commits[rev]
433 del self.commits[rev]
434 return commit
434 return commit
435
435
436 def gettags(self):
436 def gettags(self):
437 tags = {}
437 tags = {}
438 if self.tags is None:
438 if self.tags is None:
439 return tags
439 return tags
440
440
441 # svn tags are just a convention, project branches left in a
441 # svn tags are just a convention, project branches left in a
442 # 'tags' directory. There is no other relationship than
442 # 'tags' directory. There is no other relationship than
443 # ancestry, which is expensive to discover and makes them hard
443 # ancestry, which is expensive to discover and makes them hard
444 # to update incrementally. Worse, past revisions may be
444 # to update incrementally. Worse, past revisions may be
445 # referenced by tags far away in the future, requiring a deep
445 # referenced by tags far away in the future, requiring a deep
446 # history traversal on every calculation. Current code
446 # history traversal on every calculation. Current code
447 # performs a single backward traversal, tracking moves within
447 # performs a single backward traversal, tracking moves within
448 # the tags directory (tag renaming) and recording a new tag
448 # the tags directory (tag renaming) and recording a new tag
449 # everytime a project is copied from outside the tags
449 # everytime a project is copied from outside the tags
450 # directory. It also lists deleted tags, this behaviour may
450 # directory. It also lists deleted tags, this behaviour may
451 # change in the future.
451 # change in the future.
452 pendings = []
452 pendings = []
453 tagspath = self.tags
453 tagspath = self.tags
454 start = svn.ra.get_latest_revnum(self.ra)
454 start = svn.ra.get_latest_revnum(self.ra)
455 stream = self._getlog([self.tags], start, self.startrev)
455 stream = self._getlog([self.tags], start, self.startrev)
456 try:
456 try:
457 for entry in stream:
457 for entry in stream:
458 origpaths, revnum, author, date, message = entry
458 origpaths, revnum, author, date, message = entry
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
460 in origpaths.iteritems() if e.copyfrom_path]
460 in origpaths.iteritems() if e.copyfrom_path]
461 # Apply moves/copies from more specific to general
461 # Apply moves/copies from more specific to general
462 copies.sort(reverse=True)
462 copies.sort(reverse=True)
463
463
464 srctagspath = tagspath
464 srctagspath = tagspath
465 if copies and copies[-1][2] == tagspath:
465 if copies and copies[-1][2] == tagspath:
466 # Track tags directory moves
466 # Track tags directory moves
467 srctagspath = copies.pop()[0]
467 srctagspath = copies.pop()[0]
468
468
469 for source, sourcerev, dest in copies:
469 for source, sourcerev, dest in copies:
470 if not dest.startswith(tagspath + '/'):
470 if not dest.startswith(tagspath + '/'):
471 continue
471 continue
472 for tag in pendings:
472 for tag in pendings:
473 if tag[0].startswith(dest):
473 if tag[0].startswith(dest):
474 tagpath = source + tag[0][len(dest):]
474 tagpath = source + tag[0][len(dest):]
475 tag[:2] = [tagpath, sourcerev]
475 tag[:2] = [tagpath, sourcerev]
476 break
476 break
477 else:
477 else:
478 pendings.append([source, sourcerev, dest])
478 pendings.append([source, sourcerev, dest])
479
479
480 # Filter out tags with children coming from different
480 # Filter out tags with children coming from different
481 # parts of the repository like:
481 # parts of the repository like:
482 # /tags/tag.1 (from /trunk:10)
482 # /tags/tag.1 (from /trunk:10)
483 # /tags/tag.1/foo (from /branches/foo:12)
483 # /tags/tag.1/foo (from /branches/foo:12)
484 # Here/tags/tag.1 discarded as well as its children.
484 # Here/tags/tag.1 discarded as well as its children.
485 # It happens with tools like cvs2svn. Such tags cannot
485 # It happens with tools like cvs2svn. Such tags cannot
486 # be represented in mercurial.
486 # be represented in mercurial.
487 addeds = dict((p, e.copyfrom_path) for p, e
487 addeds = dict((p, e.copyfrom_path) for p, e
488 in origpaths.iteritems()
488 in origpaths.iteritems()
489 if e.action == 'A' and e.copyfrom_path)
489 if e.action == 'A' and e.copyfrom_path)
490 badroots = set()
490 badroots = set()
491 for destroot in addeds:
491 for destroot in addeds:
492 for source, sourcerev, dest in pendings:
492 for source, sourcerev, dest in pendings:
493 if (not dest.startswith(destroot + '/')
493 if (not dest.startswith(destroot + '/')
494 or source.startswith(addeds[destroot] + '/')):
494 or source.startswith(addeds[destroot] + '/')):
495 continue
495 continue
496 badroots.add(destroot)
496 badroots.add(destroot)
497 break
497 break
498
498
499 for badroot in badroots:
499 for badroot in badroots:
500 pendings = [p for p in pendings if p[2] != badroot
500 pendings = [p for p in pendings if p[2] != badroot
501 and not p[2].startswith(badroot + '/')]
501 and not p[2].startswith(badroot + '/')]
502
502
503 # Tell tag renamings from tag creations
503 # Tell tag renamings from tag creations
504 remainings = []
504 remainings = []
505 for source, sourcerev, dest in pendings:
505 for source, sourcerev, dest in pendings:
506 tagname = dest.split('/')[-1]
506 tagname = dest.split('/')[-1]
507 if source.startswith(srctagspath):
507 if source.startswith(srctagspath):
508 remainings.append([source, sourcerev, tagname])
508 remainings.append([source, sourcerev, tagname])
509 continue
509 continue
510 if tagname in tags:
510 if tagname in tags:
511 # Keep the latest tag value
511 # Keep the latest tag value
512 continue
512 continue
513 # From revision may be fake, get one with changes
513 # From revision may be fake, get one with changes
514 try:
514 try:
515 tagid = self.latest(source, sourcerev)
515 tagid = self.latest(source, sourcerev)
516 if tagid and tagname not in tags:
516 if tagid and tagname not in tags:
517 tags[tagname] = tagid
517 tags[tagname] = tagid
518 except SvnPathNotFound:
518 except SvnPathNotFound:
519 # It happens when we are following directories
519 # It happens when we are following directories
520 # we assumed were copied with their parents
520 # we assumed were copied with their parents
521 # but were really created in the tag
521 # but were really created in the tag
522 # directory.
522 # directory.
523 pass
523 pass
524 pendings = remainings
524 pendings = remainings
525 tagspath = srctagspath
525 tagspath = srctagspath
526 finally:
526 finally:
527 stream.close()
527 stream.close()
528 return tags
528 return tags
529
529
530 def converted(self, rev, destrev):
530 def converted(self, rev, destrev):
531 if not self.wc:
531 if not self.wc:
532 return
532 return
533 if self.convertfp is None:
533 if self.convertfp is None:
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
535 'a')
535 'a')
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
537 self.convertfp.flush()
537 self.convertfp.flush()
538
538
539 def revid(self, revnum, module=None):
539 def revid(self, revnum, module=None):
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
541
541
542 def revnum(self, rev):
542 def revnum(self, rev):
543 return int(rev.split('@')[-1])
543 return int(rev.split('@')[-1])
544
544
545 def latest(self, path, stop=0):
545 def latest(self, path, stop=0):
546 """Find the latest revid affecting path, up to stop. It may return
546 """Find the latest revid affecting path, up to stop. It may return
547 a revision in a different module, since a branch may be moved without
547 a revision in a different module, since a branch may be moved without
548 a change being reported. Return None if computed module does not
548 a change being reported. Return None if computed module does not
549 belong to rootmodule subtree.
549 belong to rootmodule subtree.
550 """
550 """
551 if not path.startswith(self.rootmodule):
551 if not path.startswith(self.rootmodule):
552 # Requests on foreign branches may be forbidden at server level
552 # Requests on foreign branches may be forbidden at server level
553 self.ui.debug('ignoring foreign branch %r\n' % path)
553 self.ui.debug('ignoring foreign branch %r\n' % path)
554 return None
554 return None
555
555
556 if not stop:
556 if not stop:
557 stop = svn.ra.get_latest_revnum(self.ra)
557 stop = svn.ra.get_latest_revnum(self.ra)
558 try:
558 try:
559 prevmodule = self.reparent('')
559 prevmodule = self.reparent('')
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
561 self.reparent(prevmodule)
561 self.reparent(prevmodule)
562 except SubversionException:
562 except SubversionException:
563 dirent = None
563 dirent = None
564 if not dirent:
564 if not dirent:
565 raise SvnPathNotFound(_('%s not found up to revision %d')
565 raise SvnPathNotFound(_('%s not found up to revision %d')
566 % (path, stop))
566 % (path, stop))
567
567
568 # stat() gives us the previous revision on this line of
568 # stat() gives us the previous revision on this line of
569 # development, but it might be in *another module*. Fetch the
569 # development, but it might be in *another module*. Fetch the
570 # log and detect renames down to the latest revision.
570 # log and detect renames down to the latest revision.
571 stream = self._getlog([path], stop, dirent.created_rev)
571 stream = self._getlog([path], stop, dirent.created_rev)
572 try:
572 try:
573 for entry in stream:
573 for entry in stream:
574 paths, revnum, author, date, message = entry
574 paths, revnum, author, date, message = entry
575 if revnum <= dirent.created_rev:
575 if revnum <= dirent.created_rev:
576 break
576 break
577
577
578 for p in paths:
578 for p in paths:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
580 continue
580 continue
581 newpath = paths[p].copyfrom_path + path[len(p):]
581 newpath = paths[p].copyfrom_path + path[len(p):]
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
583 (path, newpath, revnum))
583 (path, newpath, revnum))
584 path = newpath
584 path = newpath
585 break
585 break
586 finally:
586 finally:
587 stream.close()
587 stream.close()
588
588
589 if not path.startswith(self.rootmodule):
589 if not path.startswith(self.rootmodule):
590 self.ui.debug('ignoring foreign branch %r\n' % path)
590 self.ui.debug('ignoring foreign branch %r\n' % path)
591 return None
591 return None
592 return self.revid(dirent.created_rev, path)
592 return self.revid(dirent.created_rev, path)
593
593
594 def reparent(self, module):
594 def reparent(self, module):
595 """Reparent the svn transport and return the previous parent."""
595 """Reparent the svn transport and return the previous parent."""
596 if self.prevmodule == module:
596 if self.prevmodule == module:
597 return module
597 return module
598 svnurl = self.baseurl + urllib.quote(module)
598 svnurl = self.baseurl + urllib.quote(module)
599 prevmodule = self.prevmodule
599 prevmodule = self.prevmodule
600 if prevmodule is None:
600 if prevmodule is None:
601 prevmodule = ''
601 prevmodule = ''
602 self.ui.debug("reparent to %s\n" % svnurl)
602 self.ui.debug("reparent to %s\n" % svnurl)
603 svn.ra.reparent(self.ra, svnurl)
603 svn.ra.reparent(self.ra, svnurl)
604 self.prevmodule = module
604 self.prevmodule = module
605 return prevmodule
605 return prevmodule
606
606
607 def expandpaths(self, rev, paths, parents):
607 def expandpaths(self, rev, paths, parents):
608 changed, removed = set(), set()
608 changed, removed = set(), set()
609 copies = {}
609 copies = {}
610
610
611 new_module, revnum = revsplit(rev)[1:]
611 new_module, revnum = revsplit(rev)[1:]
612 if new_module != self.module:
612 if new_module != self.module:
613 self.module = new_module
613 self.module = new_module
614 self.reparent(self.module)
614 self.reparent(self.module)
615
615
616 for i, (path, ent) in enumerate(paths):
616 for i, (path, ent) in enumerate(paths):
617 self.ui.progress(_('scanning paths'), i, item=path,
617 self.ui.progress(_('scanning paths'), i, item=path,
618 total=len(paths))
618 total=len(paths))
619 entrypath = self.getrelpath(path)
619 entrypath = self.getrelpath(path)
620
620
621 kind = self._checkpath(entrypath, revnum)
621 kind = self._checkpath(entrypath, revnum)
622 if kind == svn.core.svn_node_file:
622 if kind == svn.core.svn_node_file:
623 changed.add(self.recode(entrypath))
623 changed.add(self.recode(entrypath))
624 if not ent.copyfrom_path or not parents:
624 if not ent.copyfrom_path or not parents:
625 continue
625 continue
626 # Copy sources not in parent revisions cannot be
626 # Copy sources not in parent revisions cannot be
627 # represented, ignore their origin for now
627 # represented, ignore their origin for now
628 pmodule, prevnum = revsplit(parents[0])[1:]
628 pmodule, prevnum = revsplit(parents[0])[1:]
629 if ent.copyfrom_rev < prevnum:
629 if ent.copyfrom_rev < prevnum:
630 continue
630 continue
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
632 if not copyfrom_path:
632 if not copyfrom_path:
633 continue
633 continue
634 self.ui.debug("copied to %s from %s@%s\n" %
634 self.ui.debug("copied to %s from %s@%s\n" %
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
637 elif kind == 0: # gone, but had better be a deleted *file*
637 elif kind == 0: # gone, but had better be a deleted *file*
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
639 pmodule, prevnum = revsplit(parents[0])[1:]
639 pmodule, prevnum = revsplit(parents[0])[1:]
640 parentpath = pmodule + "/" + entrypath
640 parentpath = pmodule + "/" + entrypath
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
642
642
643 if fromkind == svn.core.svn_node_file:
643 if fromkind == svn.core.svn_node_file:
644 removed.add(self.recode(entrypath))
644 removed.add(self.recode(entrypath))
645 elif fromkind == svn.core.svn_node_dir:
645 elif fromkind == svn.core.svn_node_dir:
646 oroot = parentpath.strip('/')
646 oroot = parentpath.strip('/')
647 nroot = path.strip('/')
647 nroot = path.strip('/')
648 children = self._iterfiles(oroot, prevnum)
648 children = self._iterfiles(oroot, prevnum)
649 for childpath in children:
649 for childpath in children:
650 childpath = childpath.replace(oroot, nroot)
650 childpath = childpath.replace(oroot, nroot)
651 childpath = self.getrelpath("/" + childpath, pmodule)
651 childpath = self.getrelpath("/" + childpath, pmodule)
652 if childpath:
652 if childpath:
653 removed.add(self.recode(childpath))
653 removed.add(self.recode(childpath))
654 else:
654 else:
655 self.ui.debug('unknown path in revision %d: %s\n' % \
655 self.ui.debug('unknown path in revision %d: %s\n' % \
656 (revnum, path))
656 (revnum, path))
657 elif kind == svn.core.svn_node_dir:
657 elif kind == svn.core.svn_node_dir:
658 if ent.action == 'M':
658 if ent.action == 'M':
659 # If the directory just had a prop change,
659 # If the directory just had a prop change,
660 # then we shouldn't need to look for its children.
660 # then we shouldn't need to look for its children.
661 continue
661 continue
662 if ent.action == 'R' and parents:
662 if ent.action == 'R' and parents:
663 # If a directory is replacing a file, mark the previous
663 # If a directory is replacing a file, mark the previous
664 # file as deleted
664 # file as deleted
665 pmodule, prevnum = revsplit(parents[0])[1:]
665 pmodule, prevnum = revsplit(parents[0])[1:]
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
667 if pkind == svn.core.svn_node_file:
667 if pkind == svn.core.svn_node_file:
668 removed.add(self.recode(entrypath))
668 removed.add(self.recode(entrypath))
669 elif pkind == svn.core.svn_node_dir:
669 elif pkind == svn.core.svn_node_dir:
670 # We do not know what files were kept or removed,
670 # We do not know what files were kept or removed,
671 # mark them all as changed.
671 # mark them all as changed.
672 for childpath in self._iterfiles(pmodule, prevnum):
672 for childpath in self._iterfiles(pmodule, prevnum):
673 childpath = self.getrelpath("/" + childpath)
673 childpath = self.getrelpath("/" + childpath)
674 if childpath:
674 if childpath:
675 changed.add(self.recode(childpath))
675 changed.add(self.recode(childpath))
676
676
677 for childpath in self._iterfiles(path, revnum):
677 for childpath in self._iterfiles(path, revnum):
678 childpath = self.getrelpath("/" + childpath)
678 childpath = self.getrelpath("/" + childpath)
679 if childpath:
679 if childpath:
680 changed.add(self.recode(childpath))
680 changed.add(self.recode(childpath))
681
681
682 # Handle directory copies
682 # Handle directory copies
683 if not ent.copyfrom_path or not parents:
683 if not ent.copyfrom_path or not parents:
684 continue
684 continue
685 # Copy sources not in parent revisions cannot be
685 # Copy sources not in parent revisions cannot be
686 # represented, ignore their origin for now
686 # represented, ignore their origin for now
687 pmodule, prevnum = revsplit(parents[0])[1:]
687 pmodule, prevnum = revsplit(parents[0])[1:]
688 if ent.copyfrom_rev < prevnum:
688 if ent.copyfrom_rev < prevnum:
689 continue
689 continue
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
691 if not copyfrompath:
691 if not copyfrompath:
692 continue
692 continue
693 self.ui.debug("mark %s came from %s:%d\n"
693 self.ui.debug("mark %s came from %s:%d\n"
694 % (path, copyfrompath, ent.copyfrom_rev))
694 % (path, copyfrompath, ent.copyfrom_rev))
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
696 for childpath in children:
696 for childpath in children:
697 childpath = self.getrelpath("/" + childpath, pmodule)
697 childpath = self.getrelpath("/" + childpath, pmodule)
698 if not childpath:
698 if not childpath:
699 continue
699 continue
700 copytopath = path + childpath[len(copyfrompath):]
700 copytopath = path + childpath[len(copyfrompath):]
701 copytopath = self.getrelpath(copytopath)
701 copytopath = self.getrelpath(copytopath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
703
703
704 self.ui.progress(_('scanning paths'), None)
704 self.ui.progress(_('scanning paths'), None)
705 changed.update(removed)
705 changed.update(removed)
706 return (list(changed), removed, copies)
706 return (list(changed), removed, copies)
707
707
708 def _fetch_revisions(self, from_revnum, to_revnum):
708 def _fetch_revisions(self, from_revnum, to_revnum):
709 if from_revnum < to_revnum:
709 if from_revnum < to_revnum:
710 from_revnum, to_revnum = to_revnum, from_revnum
710 from_revnum, to_revnum = to_revnum, from_revnum
711
711
712 self.child_cset = None
712 self.child_cset = None
713
713
714 def parselogentry(orig_paths, revnum, author, date, message):
714 def parselogentry(orig_paths, revnum, author, date, message):
715 """Return the parsed commit object or None, and True if
715 """Return the parsed commit object or None, and True if
716 the revision is a branch root.
716 the revision is a branch root.
717 """
717 """
718 self.ui.debug("parsing revision %d (%d changes)\n" %
718 self.ui.debug("parsing revision %d (%d changes)\n" %
719 (revnum, len(orig_paths)))
719 (revnum, len(orig_paths)))
720
720
721 branched = False
721 branched = False
722 rev = self.revid(revnum)
722 rev = self.revid(revnum)
723 # branch log might return entries for a parent we already have
723 # branch log might return entries for a parent we already have
724
724
725 if rev in self.commits or revnum < to_revnum:
725 if rev in self.commits or revnum < to_revnum:
726 return None, branched
726 return None, branched
727
727
728 parents = []
728 parents = []
729 # check whether this revision is the start of a branch or part
729 # check whether this revision is the start of a branch or part
730 # of a branch renaming
730 # of a branch renaming
731 orig_paths = sorted(orig_paths.iteritems())
731 orig_paths = sorted(orig_paths.iteritems())
732 root_paths = [(p, e) for p, e in orig_paths
732 root_paths = [(p, e) for p, e in orig_paths
733 if self.module.startswith(p)]
733 if self.module.startswith(p)]
734 if root_paths:
734 if root_paths:
735 path, ent = root_paths[-1]
735 path, ent = root_paths[-1]
736 if ent.copyfrom_path:
736 if ent.copyfrom_path:
737 branched = True
737 branched = True
738 newpath = ent.copyfrom_path + self.module[len(path):]
738 newpath = ent.copyfrom_path + self.module[len(path):]
739 # ent.copyfrom_rev may not be the actual last revision
739 # ent.copyfrom_rev may not be the actual last revision
740 previd = self.latest(newpath, ent.copyfrom_rev)
740 previd = self.latest(newpath, ent.copyfrom_rev)
741 if previd is not None:
741 if previd is not None:
742 prevmodule, prevnum = revsplit(previd)[1:]
742 prevmodule, prevnum = revsplit(previd)[1:]
743 if prevnum >= self.startrev:
743 if prevnum >= self.startrev:
744 parents = [previd]
744 parents = [previd]
745 self.ui.note(
745 self.ui.note(
746 _('found parent of branch %s at %d: %s\n') %
746 _('found parent of branch %s at %d: %s\n') %
747 (self.module, prevnum, prevmodule))
747 (self.module, prevnum, prevmodule))
748 else:
748 else:
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
750
750
751 paths = []
751 paths = []
752 # filter out unrelated paths
752 # filter out unrelated paths
753 for path, ent in orig_paths:
753 for path, ent in orig_paths:
754 if self.getrelpath(path) is None:
754 if self.getrelpath(path) is None:
755 continue
755 continue
756 paths.append((path, ent))
756 paths.append((path, ent))
757
757
758 # Example SVN datetime. Includes microseconds.
758 # Example SVN datetime. Includes microseconds.
759 # ISO-8601 conformant
759 # ISO-8601 conformant
760 # '2007-01-04T17:35:00.902377Z'
760 # '2007-01-04T17:35:00.902377Z'
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
762
762
763 log = message and self.recode(message) or ''
763 log = message and self.recode(message) or ''
764 author = author and self.recode(author) or ''
764 author = author and self.recode(author) or ''
765 try:
765 try:
766 branch = self.module.split("/")[-1]
766 branch = self.module.split("/")[-1]
767 if branch == self.trunkname:
767 if branch == self.trunkname:
768 branch = None
768 branch = None
769 except IndexError:
769 except IndexError:
770 branch = None
770 branch = None
771
771
772 cset = commit(author=author,
772 cset = commit(author=author,
773 date=util.datestr(date),
773 date=util.datestr(date),
774 desc=log,
774 desc=log,
775 parents=parents,
775 parents=parents,
776 branch=branch,
776 branch=branch,
777 rev=rev)
777 rev=rev)
778
778
779 self.commits[rev] = cset
779 self.commits[rev] = cset
780 # The parents list is *shared* among self.paths and the
780 # The parents list is *shared* among self.paths and the
781 # commit object. Both will be updated below.
781 # commit object. Both will be updated below.
782 self.paths[rev] = (paths, cset.parents)
782 self.paths[rev] = (paths, cset.parents)
783 if self.child_cset and not self.child_cset.parents:
783 if self.child_cset and not self.child_cset.parents:
784 self.child_cset.parents[:] = [rev]
784 self.child_cset.parents[:] = [rev]
785 self.child_cset = cset
785 self.child_cset = cset
786 return cset, branched
786 return cset, branched
787
787
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
789 (self.module, from_revnum, to_revnum))
789 (self.module, from_revnum, to_revnum))
790
790
791 try:
791 try:
792 firstcset = None
792 firstcset = None
793 lastonbranch = False
793 lastonbranch = False
794 stream = self._getlog([self.module], from_revnum, to_revnum)
794 stream = self._getlog([self.module], from_revnum, to_revnum)
795 try:
795 try:
796 for entry in stream:
796 for entry in stream:
797 paths, revnum, author, date, message = entry
797 paths, revnum, author, date, message = entry
798 if revnum < self.startrev:
798 if revnum < self.startrev:
799 lastonbranch = True
799 lastonbranch = True
800 break
800 break
801 if not paths:
801 if not paths:
802 self.ui.debug('revision %d has no entries\n' % revnum)
802 self.ui.debug('revision %d has no entries\n' % revnum)
803 # If we ever leave the loop on an empty
803 # If we ever leave the loop on an empty
804 # revision, do not try to get a parent branch
804 # revision, do not try to get a parent branch
805 lastonbranch = lastonbranch or revnum == 0
805 lastonbranch = lastonbranch or revnum == 0
806 continue
806 continue
807 cset, lastonbranch = parselogentry(paths, revnum, author,
807 cset, lastonbranch = parselogentry(paths, revnum, author,
808 date, message)
808 date, message)
809 if cset:
809 if cset:
810 firstcset = cset
810 firstcset = cset
811 if lastonbranch:
811 if lastonbranch:
812 break
812 break
813 finally:
813 finally:
814 stream.close()
814 stream.close()
815
815
816 if not lastonbranch and firstcset and not firstcset.parents:
816 if not lastonbranch and firstcset and not firstcset.parents:
817 # The first revision of the sequence (the last fetched one)
817 # The first revision of the sequence (the last fetched one)
818 # has invalid parents if not a branch root. Find the parent
818 # has invalid parents if not a branch root. Find the parent
819 # revision now, if any.
819 # revision now, if any.
820 try:
820 try:
821 firstrevnum = self.revnum(firstcset.rev)
821 firstrevnum = self.revnum(firstcset.rev)
822 if firstrevnum > 1:
822 if firstrevnum > 1:
823 latest = self.latest(self.module, firstrevnum - 1)
823 latest = self.latest(self.module, firstrevnum - 1)
824 if latest:
824 if latest:
825 firstcset.parents.append(latest)
825 firstcset.parents.append(latest)
826 except SvnPathNotFound:
826 except SvnPathNotFound:
827 pass
827 pass
828 except SubversionException, (inst, num):
828 except SubversionException, (inst, num):
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
831 raise
831 raise
832
832
833 def getfile(self, file, rev):
833 def getfile(self, file, rev):
834 # TODO: ra.get_file transmits the whole file instead of diffs.
834 # TODO: ra.get_file transmits the whole file instead of diffs.
835 if file in self.removed:
835 if file in self.removed:
836 raise IOError()
836 raise IOError()
837 mode = ''
837 mode = ''
838 try:
838 try:
839 new_module, revnum = revsplit(rev)[1:]
839 new_module, revnum = revsplit(rev)[1:]
840 if self.module != new_module:
840 if self.module != new_module:
841 self.module = new_module
841 self.module = new_module
842 self.reparent(self.module)
842 self.reparent(self.module)
843 io = StringIO()
843 io = StringIO()
844 info = svn.ra.get_file(self.ra, file, revnum, io)
844 info = svn.ra.get_file(self.ra, file, revnum, io)
845 data = io.getvalue()
845 data = io.getvalue()
846 # ra.get_files() seems to keep a reference on the input buffer
846 # ra.get_files() seems to keep a reference on the input buffer
847 # preventing collection. Release it explicitely.
847 # preventing collection. Release it explicitely.
848 io.close()
848 io.close()
849 if isinstance(info, list):
849 if isinstance(info, list):
850 info = info[-1]
850 info = info[-1]
851 mode = ("svn:executable" in info) and 'x' or ''
851 mode = ("svn:executable" in info) and 'x' or ''
852 mode = ("svn:special" in info) and 'l' or mode
852 mode = ("svn:special" in info) and 'l' or mode
853 except SubversionException, e:
853 except SubversionException, e:
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
856 if e.apr_err in notfound: # File not found
856 if e.apr_err in notfound: # File not found
857 raise IOError()
857 raise IOError()
858 raise
858 raise
859 if mode == 'l':
859 if mode == 'l':
860 link_prefix = "link "
860 link_prefix = "link "
861 if data.startswith(link_prefix):
861 if data.startswith(link_prefix):
862 data = data[len(link_prefix):]
862 data = data[len(link_prefix):]
863 return data, mode
863 return data, mode
864
864
865 def _iterfiles(self, path, revnum):
865 def _iterfiles(self, path, revnum):
866 """Enumerate all files in path at revnum, recursively."""
866 """Enumerate all files in path at revnum, recursively."""
867 path = path.strip('/')
867 path = path.strip('/')
868 pool = Pool()
868 pool = Pool()
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
871 if path:
871 if path:
872 path += '/'
872 path += '/'
873 return ((path + p) for p, e in entries.iteritems()
873 return ((path + p) for p, e in entries.iteritems()
874 if e.kind == svn.core.svn_node_file)
874 if e.kind == svn.core.svn_node_file)
875
875
876 def getrelpath(self, path, module=None):
876 def getrelpath(self, path, module=None):
877 if module is None:
877 if module is None:
878 module = self.module
878 module = self.module
879 # Given the repository url of this wc, say
879 # Given the repository url of this wc, say
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 # extract the "entry" portion (a relative path) from what
881 # extract the "entry" portion (a relative path) from what
882 # svn log --xml says, ie
882 # svn log --xml says, ie
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
885 if path.startswith(module):
885 if path.startswith(module):
886 relative = path.rstrip('/')[len(module):]
886 relative = path.rstrip('/')[len(module):]
887 if relative.startswith('/'):
887 if relative.startswith('/'):
888 return relative[1:]
888 return relative[1:]
889 elif relative == '':
889 elif relative == '':
890 return relative
890 return relative
891
891
892 # The path is outside our tracked tree...
892 # The path is outside our tracked tree...
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
894 return None
894 return None
895
895
896 def _checkpath(self, path, revnum, module=None):
896 def _checkpath(self, path, revnum, module=None):
897 if module is not None:
897 if module is not None:
898 prevmodule = self.reparent('')
898 prevmodule = self.reparent('')
899 path = module + '/' + path
899 path = module + '/' + path
900 try:
900 try:
901 # ra.check_path does not like leading slashes very much, it leads
901 # ra.check_path does not like leading slashes very much, it leads
902 # to PROPFIND subversion errors
902 # to PROPFIND subversion errors
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
904 finally:
904 finally:
905 if module is not None:
905 if module is not None:
906 self.reparent(prevmodule)
906 self.reparent(prevmodule)
907
907
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
909 strict_node_history=False):
909 strict_node_history=False):
910 # Normalize path names, svn >= 1.5 only wants paths relative to
910 # Normalize path names, svn >= 1.5 only wants paths relative to
911 # supplied URL
911 # supplied URL
912 relpaths = []
912 relpaths = []
913 for p in paths:
913 for p in paths:
914 if not p.startswith('/'):
914 if not p.startswith('/'):
915 p = self.module + '/' + p
915 p = self.module + '/' + p
916 relpaths.append(p.strip('/'))
916 relpaths.append(p.strip('/'))
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
918 strict_node_history]
918 strict_node_history]
919 arg = encodeargs(args)
919 arg = encodeargs(args)
920 hgexe = util.hgexecutable()
920 hgexe = util.hgexecutable()
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
923 stdin.write(arg)
923 stdin.write(arg)
924 try:
924 try:
925 stdin.close()
925 stdin.close()
926 except IOError:
926 except IOError:
927 raise util.Abort(_('Mercurial failed to run itself, check'
927 raise util.Abort(_('Mercurial failed to run itself, check'
928 ' hg executable is in PATH'))
928 ' hg executable is in PATH'))
929 return logstream(stdout)
929 return logstream(stdout)
930
930
931 pre_revprop_change = '''#!/bin/sh
931 pre_revprop_change = '''#!/bin/sh
932
932
933 REPOS="$1"
933 REPOS="$1"
934 REV="$2"
934 REV="$2"
935 USER="$3"
935 USER="$3"
936 PROPNAME="$4"
936 PROPNAME="$4"
937 ACTION="$5"
937 ACTION="$5"
938
938
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
942
942
943 echo "Changing prohibited revision property" >&2
943 echo "Changing prohibited revision property" >&2
944 exit 1
944 exit 1
945 '''
945 '''
946
946
947 class svn_sink(converter_sink, commandline):
947 class svn_sink(converter_sink, commandline):
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
950
950
951 def prerun(self):
951 def prerun(self):
952 if self.wc:
952 if self.wc:
953 os.chdir(self.wc)
953 os.chdir(self.wc)
954
954
955 def postrun(self):
955 def postrun(self):
956 if self.wc:
956 if self.wc:
957 os.chdir(self.cwd)
957 os.chdir(self.cwd)
958
958
959 def join(self, name):
959 def join(self, name):
960 return os.path.join(self.wc, '.svn', name)
960 return os.path.join(self.wc, '.svn', name)
961
961
962 def revmapfile(self):
962 def revmapfile(self):
963 return self.join('hg-shamap')
963 return self.join('hg-shamap')
964
964
965 def authorfile(self):
965 def authorfile(self):
966 return self.join('hg-authormap')
966 return self.join('hg-authormap')
967
967
968 def __init__(self, ui, path):
968 def __init__(self, ui, path):
969
969
970 converter_sink.__init__(self, ui, path)
970 converter_sink.__init__(self, ui, path)
971 commandline.__init__(self, ui, 'svn')
971 commandline.__init__(self, ui, 'svn')
972 self.delete = []
972 self.delete = []
973 self.setexec = []
973 self.setexec = []
974 self.delexec = []
974 self.delexec = []
975 self.copies = []
975 self.copies = []
976 self.wc = None
976 self.wc = None
977 self.cwd = os.getcwd()
977 self.cwd = os.getcwd()
978
978
979 path = os.path.realpath(path)
979 path = os.path.realpath(path)
980
980
981 created = False
981 created = False
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
983 self.wc = path
983 self.wc = path
984 self.run0('update')
984 self.run0('update')
985 else:
985 else:
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
987
987
988 if os.path.isdir(os.path.dirname(path)):
988 if os.path.isdir(os.path.dirname(path)):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
990 ui.status(_('initializing svn repository %r\n') %
990 ui.status(_('initializing svn repository %r\n') %
991 os.path.basename(path))
991 os.path.basename(path))
992 commandline(ui, 'svnadmin').run0('create', path)
992 commandline(ui, 'svnadmin').run0('create', path)
993 created = path
993 created = path
994 path = util.normpath(path)
994 path = util.normpath(path)
995 if not path.startswith('/'):
995 if not path.startswith('/'):
996 path = '/' + path
996 path = '/' + path
997 path = 'file://' + path
997 path = 'file://' + path
998
998
999 ui.status(_('initializing svn working copy %r\n')
999 ui.status(_('initializing svn working copy %r\n')
1000 % os.path.basename(wcpath))
1000 % os.path.basename(wcpath))
1001 self.run0('checkout', path, wcpath)
1001 self.run0('checkout', path, wcpath)
1002
1002
1003 self.wc = wcpath
1003 self.wc = wcpath
1004 self.opener = scmutil.opener(self.wc)
1004 self.opener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1007 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1007 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1008
1008
1009 if created:
1009 if created:
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1011 fp = open(hook, 'w')
1011 fp = open(hook, 'w')
1012 fp.write(pre_revprop_change)
1012 fp.write(pre_revprop_change)
1013 fp.close()
1013 fp.close()
1014 util.set_flags(hook, False, True)
1014 util.setflags(hook, False, True)
1015
1015
1016 output = self.run0('info')
1016 output = self.run0('info')
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1018
1018
1019 def wjoin(self, *names):
1019 def wjoin(self, *names):
1020 return os.path.join(self.wc, *names)
1020 return os.path.join(self.wc, *names)
1021
1021
1022 def putfile(self, filename, flags, data):
1022 def putfile(self, filename, flags, data):
1023 if 'l' in flags:
1023 if 'l' in flags:
1024 self.wopener.symlink(data, filename)
1024 self.wopener.symlink(data, filename)
1025 else:
1025 else:
1026 try:
1026 try:
1027 if os.path.islink(self.wjoin(filename)):
1027 if os.path.islink(self.wjoin(filename)):
1028 os.unlink(filename)
1028 os.unlink(filename)
1029 except OSError:
1029 except OSError:
1030 pass
1030 pass
1031 self.wopener.write(filename, data)
1031 self.wopener.write(filename, data)
1032
1032
1033 if self.is_exec:
1033 if self.is_exec:
1034 was_exec = self.is_exec(self.wjoin(filename))
1034 was_exec = self.is_exec(self.wjoin(filename))
1035 else:
1035 else:
1036 # On filesystems not supporting execute-bit, there is no way
1036 # On filesystems not supporting execute-bit, there is no way
1037 # to know if it is set but asking subversion. Setting it
1037 # to know if it is set but asking subversion. Setting it
1038 # systematically is just as expensive and much simpler.
1038 # systematically is just as expensive and much simpler.
1039 was_exec = 'x' not in flags
1039 was_exec = 'x' not in flags
1040
1040
1041 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1042 if was_exec:
1042 if was_exec:
1043 if 'x' not in flags:
1043 if 'x' not in flags:
1044 self.delexec.append(filename)
1044 self.delexec.append(filename)
1045 else:
1045 else:
1046 if 'x' in flags:
1046 if 'x' in flags:
1047 self.setexec.append(filename)
1047 self.setexec.append(filename)
1048
1048
1049 def _copyfile(self, source, dest):
1049 def _copyfile(self, source, dest):
1050 # SVN's copy command pukes if the destination file exists, but
1050 # SVN's copy command pukes if the destination file exists, but
1051 # our copyfile method expects to record a copy that has
1051 # our copyfile method expects to record a copy that has
1052 # already occurred. Cross the semantic gap.
1052 # already occurred. Cross the semantic gap.
1053 wdest = self.wjoin(dest)
1053 wdest = self.wjoin(dest)
1054 exists = os.path.lexists(wdest)
1054 exists = os.path.lexists(wdest)
1055 if exists:
1055 if exists:
1056 fd, tempname = tempfile.mkstemp(
1056 fd, tempname = tempfile.mkstemp(
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1058 os.close(fd)
1058 os.close(fd)
1059 os.unlink(tempname)
1059 os.unlink(tempname)
1060 os.rename(wdest, tempname)
1060 os.rename(wdest, tempname)
1061 try:
1061 try:
1062 self.run0('copy', source, dest)
1062 self.run0('copy', source, dest)
1063 finally:
1063 finally:
1064 if exists:
1064 if exists:
1065 try:
1065 try:
1066 os.unlink(wdest)
1066 os.unlink(wdest)
1067 except OSError:
1067 except OSError:
1068 pass
1068 pass
1069 os.rename(tempname, wdest)
1069 os.rename(tempname, wdest)
1070
1070
1071 def dirs_of(self, files):
1071 def dirs_of(self, files):
1072 dirs = set()
1072 dirs = set()
1073 for f in files:
1073 for f in files:
1074 if os.path.isdir(self.wjoin(f)):
1074 if os.path.isdir(self.wjoin(f)):
1075 dirs.add(f)
1075 dirs.add(f)
1076 for i in strutil.rfindall(f, '/'):
1076 for i in strutil.rfindall(f, '/'):
1077 dirs.add(f[:i])
1077 dirs.add(f[:i])
1078 return dirs
1078 return dirs
1079
1079
1080 def add_dirs(self, files):
1080 def add_dirs(self, files):
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1083 if add_dirs:
1083 if add_dirs:
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1085 return add_dirs
1085 return add_dirs
1086
1086
1087 def add_files(self, files):
1087 def add_files(self, files):
1088 if files:
1088 if files:
1089 self.xargs(files, 'add', quiet=True)
1089 self.xargs(files, 'add', quiet=True)
1090 return files
1090 return files
1091
1091
1092 def tidy_dirs(self, names):
1092 def tidy_dirs(self, names):
1093 deleted = []
1093 deleted = []
1094 for d in sorted(self.dirs_of(names), reverse=True):
1094 for d in sorted(self.dirs_of(names), reverse=True):
1095 wd = self.wjoin(d)
1095 wd = self.wjoin(d)
1096 if os.listdir(wd) == '.svn':
1096 if os.listdir(wd) == '.svn':
1097 self.run0('delete', d)
1097 self.run0('delete', d)
1098 deleted.append(d)
1098 deleted.append(d)
1099 return deleted
1099 return deleted
1100
1100
1101 def addchild(self, parent, child):
1101 def addchild(self, parent, child):
1102 self.childmap[parent] = child
1102 self.childmap[parent] = child
1103
1103
1104 def revid(self, rev):
1104 def revid(self, rev):
1105 return u"svn:%s@%s" % (self.uuid, rev)
1105 return u"svn:%s@%s" % (self.uuid, rev)
1106
1106
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1108 # Apply changes to working copy
1108 # Apply changes to working copy
1109 for f, v in files:
1109 for f, v in files:
1110 try:
1110 try:
1111 data, mode = source.getfile(f, v)
1111 data, mode = source.getfile(f, v)
1112 except IOError:
1112 except IOError:
1113 self.delete.append(f)
1113 self.delete.append(f)
1114 else:
1114 else:
1115 self.putfile(f, mode, data)
1115 self.putfile(f, mode, data)
1116 if f in copies:
1116 if f in copies:
1117 self.copies.append([copies[f], f])
1117 self.copies.append([copies[f], f])
1118 files = [f[0] for f in files]
1118 files = [f[0] for f in files]
1119
1119
1120 for parent in parents:
1120 for parent in parents:
1121 try:
1121 try:
1122 return self.revid(self.childmap[parent])
1122 return self.revid(self.childmap[parent])
1123 except KeyError:
1123 except KeyError:
1124 pass
1124 pass
1125 entries = set(self.delete)
1125 entries = set(self.delete)
1126 files = frozenset(files)
1126 files = frozenset(files)
1127 entries.update(self.add_dirs(files.difference(entries)))
1127 entries.update(self.add_dirs(files.difference(entries)))
1128 if self.copies:
1128 if self.copies:
1129 for s, d in self.copies:
1129 for s, d in self.copies:
1130 self._copyfile(s, d)
1130 self._copyfile(s, d)
1131 self.copies = []
1131 self.copies = []
1132 if self.delete:
1132 if self.delete:
1133 self.xargs(self.delete, 'delete')
1133 self.xargs(self.delete, 'delete')
1134 self.delete = []
1134 self.delete = []
1135 entries.update(self.add_files(files.difference(entries)))
1135 entries.update(self.add_files(files.difference(entries)))
1136 entries.update(self.tidy_dirs(entries))
1136 entries.update(self.tidy_dirs(entries))
1137 if self.delexec:
1137 if self.delexec:
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1139 self.delexec = []
1139 self.delexec = []
1140 if self.setexec:
1140 if self.setexec:
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1142 self.setexec = []
1142 self.setexec = []
1143
1143
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1145 fp = os.fdopen(fd, 'w')
1145 fp = os.fdopen(fd, 'w')
1146 fp.write(commit.desc)
1146 fp.write(commit.desc)
1147 fp.close()
1147 fp.close()
1148 try:
1148 try:
1149 output = self.run0('commit',
1149 output = self.run0('commit',
1150 username=util.shortuser(commit.author),
1150 username=util.shortuser(commit.author),
1151 file=messagefile,
1151 file=messagefile,
1152 encoding='utf-8')
1152 encoding='utf-8')
1153 try:
1153 try:
1154 rev = self.commit_re.search(output).group(1)
1154 rev = self.commit_re.search(output).group(1)
1155 except AttributeError:
1155 except AttributeError:
1156 if not files:
1156 if not files:
1157 return parents[0]
1157 return parents[0]
1158 self.ui.warn(_('unexpected svn output:\n'))
1158 self.ui.warn(_('unexpected svn output:\n'))
1159 self.ui.warn(output)
1159 self.ui.warn(output)
1160 raise util.Abort(_('unable to cope with svn output'))
1160 raise util.Abort(_('unable to cope with svn output'))
1161 if commit.rev:
1161 if commit.rev:
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1163 revprop=True, revision=rev)
1163 revprop=True, revision=rev)
1164 if commit.branch and commit.branch != 'default':
1164 if commit.branch and commit.branch != 'default':
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1166 revprop=True, revision=rev)
1166 revprop=True, revision=rev)
1167 for parent in parents:
1167 for parent in parents:
1168 self.addchild(parent, rev)
1168 self.addchild(parent, rev)
1169 return self.revid(rev)
1169 return self.revid(rev)
1170 finally:
1170 finally:
1171 os.unlink(messagefile)
1171 os.unlink(messagefile)
1172
1172
1173 def puttags(self, tags):
1173 def puttags(self, tags):
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1175 return None, None
1175 return None, None
@@ -1,328 +1,328 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows to configure new diff commands, so
16 The extdiff extension also allows to configure new diff commands, so
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called vdiff, runs kdiff3
26 # add new command called vdiff, runs kdiff3
27 vdiff = kdiff3
27 vdiff = kdiff3
28
28
29 # add new command called meld, runs meld (no need to name twice)
29 # add new command called meld, runs meld (no need to name twice)
30 meld =
30 meld =
31
31
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
35 # your .vimrc
35 # your .vimrc
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
37
37
38 Tool arguments can include variables that are expanded at runtime::
38 Tool arguments can include variables that are expanded at runtime::
39
39
40 $parent1, $plabel1 - filename, descriptive label of first parent
40 $parent1, $plabel1 - filename, descriptive label of first parent
41 $child, $clabel - filename, descriptive label of child revision
41 $child, $clabel - filename, descriptive label of child revision
42 $parent2, $plabel2 - filename, descriptive label of second parent
42 $parent2, $plabel2 - filename, descriptive label of second parent
43 $root - repository root
43 $root - repository root
44 $parent is an alias for $parent1.
44 $parent is an alias for $parent1.
45
45
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 The extdiff extension will look in your [diff-tools] and [merge-tools]
47 sections for diff tool arguments, when none are specified in [extdiff].
47 sections for diff tool arguments, when none are specified in [extdiff].
48
48
49 ::
49 ::
50
50
51 [extdiff]
51 [extdiff]
52 kdiff3 =
52 kdiff3 =
53
53
54 [diff-tools]
54 [diff-tools]
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
56
56
57 You can use -I/-X and list of file or directory names like normal
57 You can use -I/-X and list of file or directory names like normal
58 :hg:`diff` command. The extdiff extension makes snapshots of only
58 :hg:`diff` command. The extdiff extension makes snapshots of only
59 needed files, so running the external diff program will actually be
59 needed files, so running the external diff program will actually be
60 pretty fast (at least faster than having to compare the entire tree).
60 pretty fast (at least faster than having to compare the entire tree).
61 '''
61 '''
62
62
63 from mercurial.i18n import _
63 from mercurial.i18n import _
64 from mercurial.node import short, nullid
64 from mercurial.node import short, nullid
65 from mercurial import cmdutil, scmutil, util, commands, encoding
65 from mercurial import cmdutil, scmutil, util, commands, encoding
66 import os, shlex, shutil, tempfile, re
66 import os, shlex, shutil, tempfile, re
67
67
68 def snapshot(ui, repo, files, node, tmproot):
68 def snapshot(ui, repo, files, node, tmproot):
69 '''snapshot files as of some revision
69 '''snapshot files as of some revision
70 if not using snapshot, -I/-X does not work and recursive diff
70 if not using snapshot, -I/-X does not work and recursive diff
71 in tools like kdiff3 and meld displays too many files.'''
71 in tools like kdiff3 and meld displays too many files.'''
72 dirname = os.path.basename(repo.root)
72 dirname = os.path.basename(repo.root)
73 if dirname == "":
73 if dirname == "":
74 dirname = "root"
74 dirname = "root"
75 if node is not None:
75 if node is not None:
76 dirname = '%s.%s' % (dirname, short(node))
76 dirname = '%s.%s' % (dirname, short(node))
77 base = os.path.join(tmproot, dirname)
77 base = os.path.join(tmproot, dirname)
78 os.mkdir(base)
78 os.mkdir(base)
79 if node is not None:
79 if node is not None:
80 ui.note(_('making snapshot of %d files from rev %s\n') %
80 ui.note(_('making snapshot of %d files from rev %s\n') %
81 (len(files), short(node)))
81 (len(files), short(node)))
82 else:
82 else:
83 ui.note(_('making snapshot of %d files from working directory\n') %
83 ui.note(_('making snapshot of %d files from working directory\n') %
84 (len(files)))
84 (len(files)))
85 wopener = scmutil.opener(base)
85 wopener = scmutil.opener(base)
86 fns_and_mtime = []
86 fns_and_mtime = []
87 ctx = repo[node]
87 ctx = repo[node]
88 for fn in files:
88 for fn in files:
89 wfn = util.pconvert(fn)
89 wfn = util.pconvert(fn)
90 if not wfn in ctx:
90 if not wfn in ctx:
91 # File doesn't exist; could be a bogus modify
91 # File doesn't exist; could be a bogus modify
92 continue
92 continue
93 ui.note(' %s\n' % wfn)
93 ui.note(' %s\n' % wfn)
94 dest = os.path.join(base, wfn)
94 dest = os.path.join(base, wfn)
95 fctx = ctx[wfn]
95 fctx = ctx[wfn]
96 data = repo.wwritedata(wfn, fctx.data())
96 data = repo.wwritedata(wfn, fctx.data())
97 if 'l' in fctx.flags():
97 if 'l' in fctx.flags():
98 wopener.symlink(data, wfn)
98 wopener.symlink(data, wfn)
99 else:
99 else:
100 wopener.write(wfn, data)
100 wopener.write(wfn, data)
101 if 'x' in fctx.flags():
101 if 'x' in fctx.flags():
102 util.set_flags(dest, False, True)
102 util.setflags(dest, False, True)
103 if node is None:
103 if node is None:
104 fns_and_mtime.append((dest, repo.wjoin(fn),
104 fns_and_mtime.append((dest, repo.wjoin(fn),
105 os.lstat(dest).st_mtime))
105 os.lstat(dest).st_mtime))
106 return dirname, fns_and_mtime
106 return dirname, fns_and_mtime
107
107
108 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
108 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
109 '''Do the actuall diff:
109 '''Do the actuall diff:
110
110
111 - copy to a temp structure if diffing 2 internal revisions
111 - copy to a temp structure if diffing 2 internal revisions
112 - copy to a temp structure if diffing working revision with
112 - copy to a temp structure if diffing working revision with
113 another one and more than 1 file is changed
113 another one and more than 1 file is changed
114 - just invoke the diff for a single file in the working dir
114 - just invoke the diff for a single file in the working dir
115 '''
115 '''
116
116
117 revs = opts.get('rev')
117 revs = opts.get('rev')
118 change = opts.get('change')
118 change = opts.get('change')
119 args = ' '.join(diffopts)
119 args = ' '.join(diffopts)
120 do3way = '$parent2' in args
120 do3way = '$parent2' in args
121
121
122 if revs and change:
122 if revs and change:
123 msg = _('cannot specify --rev and --change at the same time')
123 msg = _('cannot specify --rev and --change at the same time')
124 raise util.Abort(msg)
124 raise util.Abort(msg)
125 elif change:
125 elif change:
126 node2 = cmdutil.revsingle(repo, change, None).node()
126 node2 = cmdutil.revsingle(repo, change, None).node()
127 node1a, node1b = repo.changelog.parents(node2)
127 node1a, node1b = repo.changelog.parents(node2)
128 else:
128 else:
129 node1a, node2 = cmdutil.revpair(repo, revs)
129 node1a, node2 = cmdutil.revpair(repo, revs)
130 if not revs:
130 if not revs:
131 node1b = repo.dirstate.p2()
131 node1b = repo.dirstate.p2()
132 else:
132 else:
133 node1b = nullid
133 node1b = nullid
134
134
135 # Disable 3-way merge if there is only one parent
135 # Disable 3-way merge if there is only one parent
136 if do3way:
136 if do3way:
137 if node1b == nullid:
137 if node1b == nullid:
138 do3way = False
138 do3way = False
139
139
140 matcher = cmdutil.match(repo, pats, opts)
140 matcher = cmdutil.match(repo, pats, opts)
141 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
141 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
142 if do3way:
142 if do3way:
143 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
143 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
144 else:
144 else:
145 mod_b, add_b, rem_b = set(), set(), set()
145 mod_b, add_b, rem_b = set(), set(), set()
146 modadd = mod_a | add_a | mod_b | add_b
146 modadd = mod_a | add_a | mod_b | add_b
147 common = modadd | rem_a | rem_b
147 common = modadd | rem_a | rem_b
148 if not common:
148 if not common:
149 return 0
149 return 0
150
150
151 tmproot = tempfile.mkdtemp(prefix='extdiff.')
151 tmproot = tempfile.mkdtemp(prefix='extdiff.')
152 try:
152 try:
153 # Always make a copy of node1a (and node1b, if applicable)
153 # Always make a copy of node1a (and node1b, if applicable)
154 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
154 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
155 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
155 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
156 rev1a = '@%d' % repo[node1a].rev()
156 rev1a = '@%d' % repo[node1a].rev()
157 if do3way:
157 if do3way:
158 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
158 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
159 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
159 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
160 rev1b = '@%d' % repo[node1b].rev()
160 rev1b = '@%d' % repo[node1b].rev()
161 else:
161 else:
162 dir1b = None
162 dir1b = None
163 rev1b = ''
163 rev1b = ''
164
164
165 fns_and_mtime = []
165 fns_and_mtime = []
166
166
167 # If node2 in not the wc or there is >1 change, copy it
167 # If node2 in not the wc or there is >1 change, copy it
168 dir2root = ''
168 dir2root = ''
169 rev2 = ''
169 rev2 = ''
170 if node2:
170 if node2:
171 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
171 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
172 rev2 = '@%d' % repo[node2].rev()
172 rev2 = '@%d' % repo[node2].rev()
173 elif len(common) > 1:
173 elif len(common) > 1:
174 #we only actually need to get the files to copy back to
174 #we only actually need to get the files to copy back to
175 #the working dir in this case (because the other cases
175 #the working dir in this case (because the other cases
176 #are: diffing 2 revisions or single file -- in which case
176 #are: diffing 2 revisions or single file -- in which case
177 #the file is already directly passed to the diff tool).
177 #the file is already directly passed to the diff tool).
178 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
178 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
179 else:
179 else:
180 # This lets the diff tool open the changed file directly
180 # This lets the diff tool open the changed file directly
181 dir2 = ''
181 dir2 = ''
182 dir2root = repo.root
182 dir2root = repo.root
183
183
184 label1a = rev1a
184 label1a = rev1a
185 label1b = rev1b
185 label1b = rev1b
186 label2 = rev2
186 label2 = rev2
187
187
188 # If only one change, diff the files instead of the directories
188 # If only one change, diff the files instead of the directories
189 # Handle bogus modifies correctly by checking if the files exist
189 # Handle bogus modifies correctly by checking if the files exist
190 if len(common) == 1:
190 if len(common) == 1:
191 common_file = util.localpath(common.pop())
191 common_file = util.localpath(common.pop())
192 dir1a = os.path.join(tmproot, dir1a, common_file)
192 dir1a = os.path.join(tmproot, dir1a, common_file)
193 label1a = common_file + rev1a
193 label1a = common_file + rev1a
194 if not os.path.isfile(dir1a):
194 if not os.path.isfile(dir1a):
195 dir1a = os.devnull
195 dir1a = os.devnull
196 if do3way:
196 if do3way:
197 dir1b = os.path.join(tmproot, dir1b, common_file)
197 dir1b = os.path.join(tmproot, dir1b, common_file)
198 label1b = common_file + rev1b
198 label1b = common_file + rev1b
199 if not os.path.isfile(dir1b):
199 if not os.path.isfile(dir1b):
200 dir1b = os.devnull
200 dir1b = os.devnull
201 dir2 = os.path.join(dir2root, dir2, common_file)
201 dir2 = os.path.join(dir2root, dir2, common_file)
202 label2 = common_file + rev2
202 label2 = common_file + rev2
203
203
204 # Function to quote file/dir names in the argument string.
204 # Function to quote file/dir names in the argument string.
205 # When not operating in 3-way mode, an empty string is
205 # When not operating in 3-way mode, an empty string is
206 # returned for parent2
206 # returned for parent2
207 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
207 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
208 plabel1=label1a, plabel2=label1b,
208 plabel1=label1a, plabel2=label1b,
209 clabel=label2, child=dir2,
209 clabel=label2, child=dir2,
210 root=repo.root)
210 root=repo.root)
211 def quote(match):
211 def quote(match):
212 key = match.group()[1:]
212 key = match.group()[1:]
213 if not do3way and key == 'parent2':
213 if not do3way and key == 'parent2':
214 return ''
214 return ''
215 return util.shellquote(replace[key])
215 return util.shellquote(replace[key])
216
216
217 # Match parent2 first, so 'parent1?' will match both parent1 and parent
217 # Match parent2 first, so 'parent1?' will match both parent1 and parent
218 regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
218 regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
219 if not do3way and not re.search(regex, args):
219 if not do3way and not re.search(regex, args):
220 args += ' $parent1 $child'
220 args += ' $parent1 $child'
221 args = re.sub(regex, quote, args)
221 args = re.sub(regex, quote, args)
222 cmdline = util.shellquote(diffcmd) + ' ' + args
222 cmdline = util.shellquote(diffcmd) + ' ' + args
223
223
224 ui.debug('running %r in %s\n' % (cmdline, tmproot))
224 ui.debug('running %r in %s\n' % (cmdline, tmproot))
225 util.system(cmdline, cwd=tmproot)
225 util.system(cmdline, cwd=tmproot)
226
226
227 for copy_fn, working_fn, mtime in fns_and_mtime:
227 for copy_fn, working_fn, mtime in fns_and_mtime:
228 if os.lstat(copy_fn).st_mtime != mtime:
228 if os.lstat(copy_fn).st_mtime != mtime:
229 ui.debug('file changed while diffing. '
229 ui.debug('file changed while diffing. '
230 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
230 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
231 util.copyfile(copy_fn, working_fn)
231 util.copyfile(copy_fn, working_fn)
232
232
233 return 1
233 return 1
234 finally:
234 finally:
235 ui.note(_('cleaning up temp directory\n'))
235 ui.note(_('cleaning up temp directory\n'))
236 shutil.rmtree(tmproot)
236 shutil.rmtree(tmproot)
237
237
238 def extdiff(ui, repo, *pats, **opts):
238 def extdiff(ui, repo, *pats, **opts):
239 '''use external program to diff repository (or selected files)
239 '''use external program to diff repository (or selected files)
240
240
241 Show differences between revisions for the specified files, using
241 Show differences between revisions for the specified files, using
242 an external program. The default program used is diff, with
242 an external program. The default program used is diff, with
243 default options "-Npru".
243 default options "-Npru".
244
244
245 To select a different program, use the -p/--program option. The
245 To select a different program, use the -p/--program option. The
246 program will be passed the names of two directories to compare. To
246 program will be passed the names of two directories to compare. To
247 pass additional options to the program, use -o/--option. These
247 pass additional options to the program, use -o/--option. These
248 will be passed before the names of the directories to compare.
248 will be passed before the names of the directories to compare.
249
249
250 When two revision arguments are given, then changes are shown
250 When two revision arguments are given, then changes are shown
251 between those revisions. If only one revision is specified then
251 between those revisions. If only one revision is specified then
252 that revision is compared to the working directory, and, when no
252 that revision is compared to the working directory, and, when no
253 revisions are specified, the working directory files are compared
253 revisions are specified, the working directory files are compared
254 to its parent.'''
254 to its parent.'''
255 program = opts.get('program')
255 program = opts.get('program')
256 option = opts.get('option')
256 option = opts.get('option')
257 if not program:
257 if not program:
258 program = 'diff'
258 program = 'diff'
259 option = option or ['-Npru']
259 option = option or ['-Npru']
260 return dodiff(ui, repo, program, option, pats, opts)
260 return dodiff(ui, repo, program, option, pats, opts)
261
261
262 cmdtable = {
262 cmdtable = {
263 "extdiff":
263 "extdiff":
264 (extdiff,
264 (extdiff,
265 [('p', 'program', '',
265 [('p', 'program', '',
266 _('comparison program to run'), _('CMD')),
266 _('comparison program to run'), _('CMD')),
267 ('o', 'option', [],
267 ('o', 'option', [],
268 _('pass option to comparison program'), _('OPT')),
268 _('pass option to comparison program'), _('OPT')),
269 ('r', 'rev', [],
269 ('r', 'rev', [],
270 _('revision'), _('REV')),
270 _('revision'), _('REV')),
271 ('c', 'change', '',
271 ('c', 'change', '',
272 _('change made by revision'), _('REV')),
272 _('change made by revision'), _('REV')),
273 ] + commands.walkopts,
273 ] + commands.walkopts,
274 _('hg extdiff [OPT]... [FILE]...')),
274 _('hg extdiff [OPT]... [FILE]...')),
275 }
275 }
276
276
277 def uisetup(ui):
277 def uisetup(ui):
278 for cmd, path in ui.configitems('extdiff'):
278 for cmd, path in ui.configitems('extdiff'):
279 if cmd.startswith('cmd.'):
279 if cmd.startswith('cmd.'):
280 cmd = cmd[4:]
280 cmd = cmd[4:]
281 if not path:
281 if not path:
282 path = cmd
282 path = cmd
283 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
283 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
284 diffopts = diffopts and [diffopts] or []
284 diffopts = diffopts and [diffopts] or []
285 elif cmd.startswith('opts.'):
285 elif cmd.startswith('opts.'):
286 continue
286 continue
287 else:
287 else:
288 # command = path opts
288 # command = path opts
289 if path:
289 if path:
290 diffopts = shlex.split(path)
290 diffopts = shlex.split(path)
291 path = diffopts.pop(0)
291 path = diffopts.pop(0)
292 else:
292 else:
293 path, diffopts = cmd, []
293 path, diffopts = cmd, []
294 # look for diff arguments in [diff-tools] then [merge-tools]
294 # look for diff arguments in [diff-tools] then [merge-tools]
295 if diffopts == []:
295 if diffopts == []:
296 args = ui.config('diff-tools', cmd+'.diffargs') or \
296 args = ui.config('diff-tools', cmd+'.diffargs') or \
297 ui.config('merge-tools', cmd+'.diffargs')
297 ui.config('merge-tools', cmd+'.diffargs')
298 if args:
298 if args:
299 diffopts = shlex.split(args)
299 diffopts = shlex.split(args)
300 def save(cmd, path, diffopts):
300 def save(cmd, path, diffopts):
301 '''use closure to save diff command to use'''
301 '''use closure to save diff command to use'''
302 def mydiff(ui, repo, *pats, **opts):
302 def mydiff(ui, repo, *pats, **opts):
303 return dodiff(ui, repo, path, diffopts + opts['option'],
303 return dodiff(ui, repo, path, diffopts + opts['option'],
304 pats, opts)
304 pats, opts)
305 doc = _('''\
305 doc = _('''\
306 use %(path)s to diff repository (or selected files)
306 use %(path)s to diff repository (or selected files)
307
307
308 Show differences between revisions for the specified files, using
308 Show differences between revisions for the specified files, using
309 the %(path)s program.
309 the %(path)s program.
310
310
311 When two revision arguments are given, then changes are shown
311 When two revision arguments are given, then changes are shown
312 between those revisions. If only one revision is specified then
312 between those revisions. If only one revision is specified then
313 that revision is compared to the working directory, and, when no
313 that revision is compared to the working directory, and, when no
314 revisions are specified, the working directory files are compared
314 revisions are specified, the working directory files are compared
315 to its parent.\
315 to its parent.\
316 ''') % dict(path=util.uirepr(path))
316 ''') % dict(path=util.uirepr(path))
317
317
318 # We must translate the docstring right away since it is
318 # We must translate the docstring right away since it is
319 # used as a format string. The string will unfortunately
319 # used as a format string. The string will unfortunately
320 # be translated again in commands.helpcmd and this will
320 # be translated again in commands.helpcmd and this will
321 # fail when the docstring contains non-ASCII characters.
321 # fail when the docstring contains non-ASCII characters.
322 # Decoding the string to a Unicode string here (using the
322 # Decoding the string to a Unicode string here (using the
323 # right encoding) prevents that.
323 # right encoding) prevents that.
324 mydiff.__doc__ = doc.decode(encoding.encoding)
324 mydiff.__doc__ = doc.decode(encoding.encoding)
325 return mydiff
325 return mydiff
326 cmdtable[cmd] = (save(cmd, path, diffopts),
326 cmdtable[cmd] = (save(cmd, path, diffopts),
327 cmdtable['extdiff'][1][1:],
327 cmdtable['extdiff'][1][1:],
328 _('hg %s [OPTION]... [FILE]...') % cmd)
328 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,184 +1,184 b''
1 # Mercurial extension to provide 'hg relink' command
1 # Mercurial extension to provide 'hg relink' command
2 #
2 #
3 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """recreates hardlinks between repository clones"""
8 """recreates hardlinks between repository clones"""
9
9
10 from mercurial import hg, util
10 from mercurial import hg, util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 import os, stat
12 import os, stat
13
13
14 def relink(ui, repo, origin=None, **opts):
14 def relink(ui, repo, origin=None, **opts):
15 """recreate hardlinks between two repositories
15 """recreate hardlinks between two repositories
16
16
17 When repositories are cloned locally, their data files will be
17 When repositories are cloned locally, their data files will be
18 hardlinked so that they only use the space of a single repository.
18 hardlinked so that they only use the space of a single repository.
19
19
20 Unfortunately, subsequent pulls into either repository will break
20 Unfortunately, subsequent pulls into either repository will break
21 hardlinks for any files touched by the new changesets, even if
21 hardlinks for any files touched by the new changesets, even if
22 both repositories end up pulling the same changes.
22 both repositories end up pulling the same changes.
23
23
24 Similarly, passing --rev to "hg clone" will fail to use any
24 Similarly, passing --rev to "hg clone" will fail to use any
25 hardlinks, falling back to a complete copy of the source
25 hardlinks, falling back to a complete copy of the source
26 repository.
26 repository.
27
27
28 This command lets you recreate those hardlinks and reclaim that
28 This command lets you recreate those hardlinks and reclaim that
29 wasted space.
29 wasted space.
30
30
31 This repository will be relinked to share space with ORIGIN, which
31 This repository will be relinked to share space with ORIGIN, which
32 must be on the same local disk. If ORIGIN is omitted, looks for
32 must be on the same local disk. If ORIGIN is omitted, looks for
33 "default-relink", then "default", in [paths].
33 "default-relink", then "default", in [paths].
34
34
35 Do not attempt any read operations on this repository while the
35 Do not attempt any read operations on this repository while the
36 command is running. (Both repositories will be locked against
36 command is running. (Both repositories will be locked against
37 writes.)
37 writes.)
38 """
38 """
39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
40 raise util.Abort(_('hardlinks are not supported on this system'))
40 raise util.Abort(_('hardlinks are not supported on this system'))
41 src = hg.repository(hg.remoteui(repo, opts),
41 src = hg.repository(hg.remoteui(repo, opts),
42 ui.expandpath(origin or 'default-relink',
42 ui.expandpath(origin or 'default-relink',
43 origin or 'default'))
43 origin or 'default'))
44 if not src.local():
44 if not src.local():
45 raise util.Abort(_('must specify local origin repository'))
45 raise util.Abort(_('must specify local origin repository'))
46 ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
46 ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
47 if repo.root == src.root:
47 if repo.root == src.root:
48 ui.status(_('there is nothing to relink\n'))
48 ui.status(_('there is nothing to relink\n'))
49 return
49 return
50
50
51 locallock = repo.lock()
51 locallock = repo.lock()
52 try:
52 try:
53 remotelock = src.lock()
53 remotelock = src.lock()
54 try:
54 try:
55 candidates = sorted(collect(src, ui))
55 candidates = sorted(collect(src, ui))
56 targets = prune(candidates, src.store.path, repo.store.path, ui)
56 targets = prune(candidates, src.store.path, repo.store.path, ui)
57 do_relink(src.store.path, repo.store.path, targets, ui)
57 do_relink(src.store.path, repo.store.path, targets, ui)
58 finally:
58 finally:
59 remotelock.release()
59 remotelock.release()
60 finally:
60 finally:
61 locallock.release()
61 locallock.release()
62
62
63 def collect(src, ui):
63 def collect(src, ui):
64 seplen = len(os.path.sep)
64 seplen = len(os.path.sep)
65 candidates = []
65 candidates = []
66 live = len(src['tip'].manifest())
66 live = len(src['tip'].manifest())
67 # Your average repository has some files which were deleted before
67 # Your average repository has some files which were deleted before
68 # the tip revision. We account for that by assuming that there are
68 # the tip revision. We account for that by assuming that there are
69 # 3 tracked files for every 2 live files as of the tip version of
69 # 3 tracked files for every 2 live files as of the tip version of
70 # the repository.
70 # the repository.
71 #
71 #
72 # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
72 # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
73 total = live * 3 // 2
73 total = live * 3 // 2
74 src = src.store.path
74 src = src.store.path
75 pos = 0
75 pos = 0
76 ui.status(_("tip has %d files, estimated total number of files: %s\n")
76 ui.status(_("tip has %d files, estimated total number of files: %s\n")
77 % (live, total))
77 % (live, total))
78 for dirpath, dirnames, filenames in os.walk(src):
78 for dirpath, dirnames, filenames in os.walk(src):
79 dirnames.sort()
79 dirnames.sort()
80 relpath = dirpath[len(src) + seplen:]
80 relpath = dirpath[len(src) + seplen:]
81 for filename in sorted(filenames):
81 for filename in sorted(filenames):
82 if not filename[-2:] in ('.d', '.i'):
82 if not filename[-2:] in ('.d', '.i'):
83 continue
83 continue
84 st = os.stat(os.path.join(dirpath, filename))
84 st = os.stat(os.path.join(dirpath, filename))
85 if not stat.S_ISREG(st.st_mode):
85 if not stat.S_ISREG(st.st_mode):
86 continue
86 continue
87 pos += 1
87 pos += 1
88 candidates.append((os.path.join(relpath, filename), st))
88 candidates.append((os.path.join(relpath, filename), st))
89 ui.progress(_('collecting'), pos, filename, _('files'), total)
89 ui.progress(_('collecting'), pos, filename, _('files'), total)
90
90
91 ui.progress(_('collecting'), None)
91 ui.progress(_('collecting'), None)
92 ui.status(_('collected %d candidate storage files\n') % len(candidates))
92 ui.status(_('collected %d candidate storage files\n') % len(candidates))
93 return candidates
93 return candidates
94
94
95 def prune(candidates, src, dst, ui):
95 def prune(candidates, src, dst, ui):
96 def linkfilter(src, dst, st):
96 def linkfilter(src, dst, st):
97 try:
97 try:
98 ts = os.stat(dst)
98 ts = os.stat(dst)
99 except OSError:
99 except OSError:
100 # Destination doesn't have this file?
100 # Destination doesn't have this file?
101 return False
101 return False
102 if util.samefile(src, dst):
102 if util.samefile(src, dst):
103 return False
103 return False
104 if not util.samedevice(src, dst):
104 if not util.samedevice(src, dst):
105 # No point in continuing
105 # No point in continuing
106 raise util.Abort(
106 raise util.Abort(
107 _('source and destination are on different devices'))
107 _('source and destination are on different devices'))
108 if st.st_size != ts.st_size:
108 if st.st_size != ts.st_size:
109 return False
109 return False
110 return st
110 return st
111
111
112 targets = []
112 targets = []
113 total = len(candidates)
113 total = len(candidates)
114 pos = 0
114 pos = 0
115 for fn, st in candidates:
115 for fn, st in candidates:
116 pos += 1
116 pos += 1
117 srcpath = os.path.join(src, fn)
117 srcpath = os.path.join(src, fn)
118 tgt = os.path.join(dst, fn)
118 tgt = os.path.join(dst, fn)
119 ts = linkfilter(srcpath, tgt, st)
119 ts = linkfilter(srcpath, tgt, st)
120 if not ts:
120 if not ts:
121 ui.debug(_('not linkable: %s\n') % fn)
121 ui.debug(_('not linkable: %s\n') % fn)
122 continue
122 continue
123 targets.append((fn, ts.st_size))
123 targets.append((fn, ts.st_size))
124 ui.progress(_('pruning'), pos, fn, _('files'), total)
124 ui.progress(_('pruning'), pos, fn, _('files'), total)
125
125
126 ui.progress(_('pruning'), None)
126 ui.progress(_('pruning'), None)
127 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
127 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
128 return targets
128 return targets
129
129
130 def do_relink(src, dst, files, ui):
130 def do_relink(src, dst, files, ui):
131 def relinkfile(src, dst):
131 def relinkfile(src, dst):
132 bak = dst + '.bak'
132 bak = dst + '.bak'
133 os.rename(dst, bak)
133 os.rename(dst, bak)
134 try:
134 try:
135 util.os_link(src, dst)
135 util.oslink(src, dst)
136 except OSError:
136 except OSError:
137 os.rename(bak, dst)
137 os.rename(bak, dst)
138 raise
138 raise
139 os.remove(bak)
139 os.remove(bak)
140
140
141 CHUNKLEN = 65536
141 CHUNKLEN = 65536
142 relinked = 0
142 relinked = 0
143 savedbytes = 0
143 savedbytes = 0
144
144
145 pos = 0
145 pos = 0
146 total = len(files)
146 total = len(files)
147 for f, sz in files:
147 for f, sz in files:
148 pos += 1
148 pos += 1
149 source = os.path.join(src, f)
149 source = os.path.join(src, f)
150 tgt = os.path.join(dst, f)
150 tgt = os.path.join(dst, f)
151 # Binary mode, so that read() works correctly, especially on Windows
151 # Binary mode, so that read() works correctly, especially on Windows
152 sfp = file(source, 'rb')
152 sfp = file(source, 'rb')
153 dfp = file(tgt, 'rb')
153 dfp = file(tgt, 'rb')
154 sin = sfp.read(CHUNKLEN)
154 sin = sfp.read(CHUNKLEN)
155 while sin:
155 while sin:
156 din = dfp.read(CHUNKLEN)
156 din = dfp.read(CHUNKLEN)
157 if sin != din:
157 if sin != din:
158 break
158 break
159 sin = sfp.read(CHUNKLEN)
159 sin = sfp.read(CHUNKLEN)
160 sfp.close()
160 sfp.close()
161 dfp.close()
161 dfp.close()
162 if sin:
162 if sin:
163 ui.debug(_('not linkable: %s\n') % f)
163 ui.debug(_('not linkable: %s\n') % f)
164 continue
164 continue
165 try:
165 try:
166 relinkfile(source, tgt)
166 relinkfile(source, tgt)
167 ui.progress(_('relinking'), pos, f, _('files'), total)
167 ui.progress(_('relinking'), pos, f, _('files'), total)
168 relinked += 1
168 relinked += 1
169 savedbytes += sz
169 savedbytes += sz
170 except OSError, inst:
170 except OSError, inst:
171 ui.warn('%s: %s\n' % (tgt, str(inst)))
171 ui.warn('%s: %s\n' % (tgt, str(inst)))
172
172
173 ui.progress(_('relinking'), None)
173 ui.progress(_('relinking'), None)
174
174
175 ui.status(_('relinked %d files (%s reclaimed)\n') %
175 ui.status(_('relinked %d files (%s reclaimed)\n') %
176 (relinked, util.bytecount(savedbytes)))
176 (relinked, util.bytecount(savedbytes)))
177
177
178 cmdtable = {
178 cmdtable = {
179 'relink': (
179 'relink': (
180 relink,
180 relink,
181 [],
181 [],
182 _('[ORIGIN]')
182 _('[ORIGIN]')
183 )
183 )
184 }
184 }
@@ -1,1397 +1,1397 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, glob, tempfile
10 import os, sys, errno, re, glob, tempfile
11 import util, scmutil, templater, patch, error, templatekw
11 import util, scmutil, templater, patch, error, templatekw
12 import match as matchmod
12 import match as matchmod
13 import similar, revset, subrepo
13 import similar, revset, subrepo
14
14
15 revrangesep = ':'
15 revrangesep = ':'
16
16
17 def parsealiases(cmd):
17 def parsealiases(cmd):
18 return cmd.lstrip("^").split("|")
18 return cmd.lstrip("^").split("|")
19
19
20 def findpossible(cmd, table, strict=False):
20 def findpossible(cmd, table, strict=False):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = parsealiases(e)
29 aliases = parsealiases(e)
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not strict:
33 elif not strict:
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(cmd, table, strict=True):
49 def findcmd(cmd, table, strict=True):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(cmd, table, strict)
51 choice = findpossible(cmd, table, strict)
52
52
53 if cmd in choice:
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise error.AmbiguousCommand(cmd, clist)
59 raise error.AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise error.UnknownCommand(cmd)
64 raise error.UnknownCommand(cmd)
65
65
66 def findrepo(p):
66 def findrepo(p):
67 while not os.path.isdir(os.path.join(p, ".hg")):
67 while not os.path.isdir(os.path.join(p, ".hg")):
68 oldp, p = p, os.path.dirname(p)
68 oldp, p = p, os.path.dirname(p)
69 if p == oldp:
69 if p == oldp:
70 return None
70 return None
71
71
72 return p
72 return p
73
73
74 def bail_if_changed(repo):
74 def bail_if_changed(repo):
75 if repo.dirstate.p2() != nullid:
75 if repo.dirstate.p2() != nullid:
76 raise util.Abort(_('outstanding uncommitted merge'))
76 raise util.Abort(_('outstanding uncommitted merge'))
77 modified, added, removed, deleted = repo.status()[:4]
77 modified, added, removed, deleted = repo.status()[:4]
78 if modified or added or removed or deleted:
78 if modified or added or removed or deleted:
79 raise util.Abort(_("outstanding uncommitted changes"))
79 raise util.Abort(_("outstanding uncommitted changes"))
80
80
81 def logmessage(opts):
81 def logmessage(opts):
82 """ get the log message according to -m and -l option """
82 """ get the log message according to -m and -l option """
83 message = opts.get('message')
83 message = opts.get('message')
84 logfile = opts.get('logfile')
84 logfile = opts.get('logfile')
85
85
86 if message and logfile:
86 if message and logfile:
87 raise util.Abort(_('options --message and --logfile are mutually '
87 raise util.Abort(_('options --message and --logfile are mutually '
88 'exclusive'))
88 'exclusive'))
89 if not message and logfile:
89 if not message and logfile:
90 try:
90 try:
91 if logfile == '-':
91 if logfile == '-':
92 message = sys.stdin.read()
92 message = sys.stdin.read()
93 else:
93 else:
94 message = util.readfile(logfile)
94 message = util.readfile(logfile)
95 except IOError, inst:
95 except IOError, inst:
96 raise util.Abort(_("can't read commit message '%s': %s") %
96 raise util.Abort(_("can't read commit message '%s': %s") %
97 (logfile, inst.strerror))
97 (logfile, inst.strerror))
98 return message
98 return message
99
99
100 def loglimit(opts):
100 def loglimit(opts):
101 """get the log limit according to option -l/--limit"""
101 """get the log limit according to option -l/--limit"""
102 limit = opts.get('limit')
102 limit = opts.get('limit')
103 if limit:
103 if limit:
104 try:
104 try:
105 limit = int(limit)
105 limit = int(limit)
106 except ValueError:
106 except ValueError:
107 raise util.Abort(_('limit must be a positive integer'))
107 raise util.Abort(_('limit must be a positive integer'))
108 if limit <= 0:
108 if limit <= 0:
109 raise util.Abort(_('limit must be positive'))
109 raise util.Abort(_('limit must be positive'))
110 else:
110 else:
111 limit = None
111 limit = None
112 return limit
112 return limit
113
113
114 def revsingle(repo, revspec, default='.'):
114 def revsingle(repo, revspec, default='.'):
115 if not revspec:
115 if not revspec:
116 return repo[default]
116 return repo[default]
117
117
118 l = revrange(repo, [revspec])
118 l = revrange(repo, [revspec])
119 if len(l) < 1:
119 if len(l) < 1:
120 raise util.Abort(_('empty revision set'))
120 raise util.Abort(_('empty revision set'))
121 return repo[l[-1]]
121 return repo[l[-1]]
122
122
123 def revpair(repo, revs):
123 def revpair(repo, revs):
124 if not revs:
124 if not revs:
125 return repo.dirstate.p1(), None
125 return repo.dirstate.p1(), None
126
126
127 l = revrange(repo, revs)
127 l = revrange(repo, revs)
128
128
129 if len(l) == 0:
129 if len(l) == 0:
130 return repo.dirstate.p1(), None
130 return repo.dirstate.p1(), None
131
131
132 if len(l) == 1:
132 if len(l) == 1:
133 return repo.lookup(l[0]), None
133 return repo.lookup(l[0]), None
134
134
135 return repo.lookup(l[0]), repo.lookup(l[-1])
135 return repo.lookup(l[0]), repo.lookup(l[-1])
136
136
137 def revrange(repo, revs):
137 def revrange(repo, revs):
138 """Yield revision as strings from a list of revision specifications."""
138 """Yield revision as strings from a list of revision specifications."""
139
139
140 def revfix(repo, val, defval):
140 def revfix(repo, val, defval):
141 if not val and val != 0 and defval is not None:
141 if not val and val != 0 and defval is not None:
142 return defval
142 return defval
143 return repo.changelog.rev(repo.lookup(val))
143 return repo.changelog.rev(repo.lookup(val))
144
144
145 seen, l = set(), []
145 seen, l = set(), []
146 for spec in revs:
146 for spec in revs:
147 # attempt to parse old-style ranges first to deal with
147 # attempt to parse old-style ranges first to deal with
148 # things like old-tag which contain query metacharacters
148 # things like old-tag which contain query metacharacters
149 try:
149 try:
150 if isinstance(spec, int):
150 if isinstance(spec, int):
151 seen.add(spec)
151 seen.add(spec)
152 l.append(spec)
152 l.append(spec)
153 continue
153 continue
154
154
155 if revrangesep in spec:
155 if revrangesep in spec:
156 start, end = spec.split(revrangesep, 1)
156 start, end = spec.split(revrangesep, 1)
157 start = revfix(repo, start, 0)
157 start = revfix(repo, start, 0)
158 end = revfix(repo, end, len(repo) - 1)
158 end = revfix(repo, end, len(repo) - 1)
159 step = start > end and -1 or 1
159 step = start > end and -1 or 1
160 for rev in xrange(start, end + step, step):
160 for rev in xrange(start, end + step, step):
161 if rev in seen:
161 if rev in seen:
162 continue
162 continue
163 seen.add(rev)
163 seen.add(rev)
164 l.append(rev)
164 l.append(rev)
165 continue
165 continue
166 elif spec and spec in repo: # single unquoted rev
166 elif spec and spec in repo: # single unquoted rev
167 rev = revfix(repo, spec, None)
167 rev = revfix(repo, spec, None)
168 if rev in seen:
168 if rev in seen:
169 continue
169 continue
170 seen.add(rev)
170 seen.add(rev)
171 l.append(rev)
171 l.append(rev)
172 continue
172 continue
173 except error.RepoLookupError:
173 except error.RepoLookupError:
174 pass
174 pass
175
175
176 # fall through to new-style queries if old-style fails
176 # fall through to new-style queries if old-style fails
177 m = revset.match(repo.ui, spec)
177 m = revset.match(repo.ui, spec)
178 for r in m(repo, range(len(repo))):
178 for r in m(repo, range(len(repo))):
179 if r not in seen:
179 if r not in seen:
180 l.append(r)
180 l.append(r)
181 seen.update(l)
181 seen.update(l)
182
182
183 return l
183 return l
184
184
185 def make_filename(repo, pat, node,
185 def make_filename(repo, pat, node,
186 total=None, seqno=None, revwidth=None, pathname=None):
186 total=None, seqno=None, revwidth=None, pathname=None):
187 node_expander = {
187 node_expander = {
188 'H': lambda: hex(node),
188 'H': lambda: hex(node),
189 'R': lambda: str(repo.changelog.rev(node)),
189 'R': lambda: str(repo.changelog.rev(node)),
190 'h': lambda: short(node),
190 'h': lambda: short(node),
191 }
191 }
192 expander = {
192 expander = {
193 '%': lambda: '%',
193 '%': lambda: '%',
194 'b': lambda: os.path.basename(repo.root),
194 'b': lambda: os.path.basename(repo.root),
195 }
195 }
196
196
197 try:
197 try:
198 if node:
198 if node:
199 expander.update(node_expander)
199 expander.update(node_expander)
200 if node:
200 if node:
201 expander['r'] = (lambda:
201 expander['r'] = (lambda:
202 str(repo.changelog.rev(node)).zfill(revwidth or 0))
202 str(repo.changelog.rev(node)).zfill(revwidth or 0))
203 if total is not None:
203 if total is not None:
204 expander['N'] = lambda: str(total)
204 expander['N'] = lambda: str(total)
205 if seqno is not None:
205 if seqno is not None:
206 expander['n'] = lambda: str(seqno)
206 expander['n'] = lambda: str(seqno)
207 if total is not None and seqno is not None:
207 if total is not None and seqno is not None:
208 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
208 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
209 if pathname is not None:
209 if pathname is not None:
210 expander['s'] = lambda: os.path.basename(pathname)
210 expander['s'] = lambda: os.path.basename(pathname)
211 expander['d'] = lambda: os.path.dirname(pathname) or '.'
211 expander['d'] = lambda: os.path.dirname(pathname) or '.'
212 expander['p'] = lambda: pathname
212 expander['p'] = lambda: pathname
213
213
214 newname = []
214 newname = []
215 patlen = len(pat)
215 patlen = len(pat)
216 i = 0
216 i = 0
217 while i < patlen:
217 while i < patlen:
218 c = pat[i]
218 c = pat[i]
219 if c == '%':
219 if c == '%':
220 i += 1
220 i += 1
221 c = pat[i]
221 c = pat[i]
222 c = expander[c]()
222 c = expander[c]()
223 newname.append(c)
223 newname.append(c)
224 i += 1
224 i += 1
225 return ''.join(newname)
225 return ''.join(newname)
226 except KeyError, inst:
226 except KeyError, inst:
227 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
227 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
228 inst.args[0])
228 inst.args[0])
229
229
230 def make_file(repo, pat, node=None,
230 def make_file(repo, pat, node=None,
231 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
231 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
232
232
233 writable = mode not in ('r', 'rb')
233 writable = mode not in ('r', 'rb')
234
234
235 if not pat or pat == '-':
235 if not pat or pat == '-':
236 fp = writable and sys.stdout or sys.stdin
236 fp = writable and sys.stdout or sys.stdin
237 return os.fdopen(os.dup(fp.fileno()), mode)
237 return os.fdopen(os.dup(fp.fileno()), mode)
238 if hasattr(pat, 'write') and writable:
238 if hasattr(pat, 'write') and writable:
239 return pat
239 return pat
240 if hasattr(pat, 'read') and 'r' in mode:
240 if hasattr(pat, 'read') and 'r' in mode:
241 return pat
241 return pat
242 return open(make_filename(repo, pat, node, total, seqno, revwidth,
242 return open(make_filename(repo, pat, node, total, seqno, revwidth,
243 pathname),
243 pathname),
244 mode)
244 mode)
245
245
246 def expandpats(pats):
246 def expandpats(pats):
247 if not util.expandglobs:
247 if not util.expandglobs:
248 return list(pats)
248 return list(pats)
249 ret = []
249 ret = []
250 for p in pats:
250 for p in pats:
251 kind, name = matchmod._patsplit(p, None)
251 kind, name = matchmod._patsplit(p, None)
252 if kind is None:
252 if kind is None:
253 try:
253 try:
254 globbed = glob.glob(name)
254 globbed = glob.glob(name)
255 except re.error:
255 except re.error:
256 globbed = [name]
256 globbed = [name]
257 if globbed:
257 if globbed:
258 ret.extend(globbed)
258 ret.extend(globbed)
259 continue
259 continue
260 ret.append(p)
260 ret.append(p)
261 return ret
261 return ret
262
262
263 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
263 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
264 if pats == ("",):
264 if pats == ("",):
265 pats = []
265 pats = []
266 if not globbed and default == 'relpath':
266 if not globbed and default == 'relpath':
267 pats = expandpats(pats or [])
267 pats = expandpats(pats or [])
268 m = matchmod.match(repo.root, repo.getcwd(), pats,
268 m = matchmod.match(repo.root, repo.getcwd(), pats,
269 opts.get('include'), opts.get('exclude'), default,
269 opts.get('include'), opts.get('exclude'), default,
270 auditor=repo.auditor)
270 auditor=repo.auditor)
271 def badfn(f, msg):
271 def badfn(f, msg):
272 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
272 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
273 m.bad = badfn
273 m.bad = badfn
274 return m
274 return m
275
275
276 def matchall(repo):
276 def matchall(repo):
277 return matchmod.always(repo.root, repo.getcwd())
277 return matchmod.always(repo.root, repo.getcwd())
278
278
279 def matchfiles(repo, files):
279 def matchfiles(repo, files):
280 return matchmod.exact(repo.root, repo.getcwd(), files)
280 return matchmod.exact(repo.root, repo.getcwd(), files)
281
281
282 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
282 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 if dry_run is None:
283 if dry_run is None:
284 dry_run = opts.get('dry_run')
284 dry_run = opts.get('dry_run')
285 if similarity is None:
285 if similarity is None:
286 similarity = float(opts.get('similarity') or 0)
286 similarity = float(opts.get('similarity') or 0)
287 # we'd use status here, except handling of symlinks and ignore is tricky
287 # we'd use status here, except handling of symlinks and ignore is tricky
288 added, unknown, deleted, removed = [], [], [], []
288 added, unknown, deleted, removed = [], [], [], []
289 audit_path = scmutil.pathauditor(repo.root)
289 audit_path = scmutil.pathauditor(repo.root)
290 m = match(repo, pats, opts)
290 m = match(repo, pats, opts)
291 for abs in repo.walk(m):
291 for abs in repo.walk(m):
292 target = repo.wjoin(abs)
292 target = repo.wjoin(abs)
293 good = True
293 good = True
294 try:
294 try:
295 audit_path(abs)
295 audit_path(abs)
296 except (OSError, util.Abort):
296 except (OSError, util.Abort):
297 good = False
297 good = False
298 rel = m.rel(abs)
298 rel = m.rel(abs)
299 exact = m.exact(abs)
299 exact = m.exact(abs)
300 if good and abs not in repo.dirstate:
300 if good and abs not in repo.dirstate:
301 unknown.append(abs)
301 unknown.append(abs)
302 if repo.ui.verbose or not exact:
302 if repo.ui.verbose or not exact:
303 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
303 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
304 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
304 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
305 or (os.path.isdir(target) and not os.path.islink(target))):
305 or (os.path.isdir(target) and not os.path.islink(target))):
306 deleted.append(abs)
306 deleted.append(abs)
307 if repo.ui.verbose or not exact:
307 if repo.ui.verbose or not exact:
308 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
308 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
309 # for finding renames
309 # for finding renames
310 elif repo.dirstate[abs] == 'r':
310 elif repo.dirstate[abs] == 'r':
311 removed.append(abs)
311 removed.append(abs)
312 elif repo.dirstate[abs] == 'a':
312 elif repo.dirstate[abs] == 'a':
313 added.append(abs)
313 added.append(abs)
314 copies = {}
314 copies = {}
315 if similarity > 0:
315 if similarity > 0:
316 for old, new, score in similar.findrenames(repo,
316 for old, new, score in similar.findrenames(repo,
317 added + unknown, removed + deleted, similarity):
317 added + unknown, removed + deleted, similarity):
318 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
318 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
319 repo.ui.status(_('recording removal of %s as rename to %s '
319 repo.ui.status(_('recording removal of %s as rename to %s '
320 '(%d%% similar)\n') %
320 '(%d%% similar)\n') %
321 (m.rel(old), m.rel(new), score * 100))
321 (m.rel(old), m.rel(new), score * 100))
322 copies[new] = old
322 copies[new] = old
323
323
324 if not dry_run:
324 if not dry_run:
325 wctx = repo[None]
325 wctx = repo[None]
326 wlock = repo.wlock()
326 wlock = repo.wlock()
327 try:
327 try:
328 wctx.remove(deleted)
328 wctx.remove(deleted)
329 wctx.add(unknown)
329 wctx.add(unknown)
330 for new, old in copies.iteritems():
330 for new, old in copies.iteritems():
331 wctx.copy(old, new)
331 wctx.copy(old, new)
332 finally:
332 finally:
333 wlock.release()
333 wlock.release()
334
334
335 def updatedir(ui, repo, patches, similarity=0):
335 def updatedir(ui, repo, patches, similarity=0):
336 '''Update dirstate after patch application according to metadata'''
336 '''Update dirstate after patch application according to metadata'''
337 if not patches:
337 if not patches:
338 return
338 return
339 copies = []
339 copies = []
340 removes = set()
340 removes = set()
341 cfiles = patches.keys()
341 cfiles = patches.keys()
342 cwd = repo.getcwd()
342 cwd = repo.getcwd()
343 if cwd:
343 if cwd:
344 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
344 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
345 for f in patches:
345 for f in patches:
346 gp = patches[f]
346 gp = patches[f]
347 if not gp:
347 if not gp:
348 continue
348 continue
349 if gp.op == 'RENAME':
349 if gp.op == 'RENAME':
350 copies.append((gp.oldpath, gp.path))
350 copies.append((gp.oldpath, gp.path))
351 removes.add(gp.oldpath)
351 removes.add(gp.oldpath)
352 elif gp.op == 'COPY':
352 elif gp.op == 'COPY':
353 copies.append((gp.oldpath, gp.path))
353 copies.append((gp.oldpath, gp.path))
354 elif gp.op == 'DELETE':
354 elif gp.op == 'DELETE':
355 removes.add(gp.path)
355 removes.add(gp.path)
356
356
357 wctx = repo[None]
357 wctx = repo[None]
358 for src, dst in copies:
358 for src, dst in copies:
359 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
359 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
360 if (not similarity) and removes:
360 if (not similarity) and removes:
361 wctx.remove(sorted(removes), True)
361 wctx.remove(sorted(removes), True)
362
362
363 for f in patches:
363 for f in patches:
364 gp = patches[f]
364 gp = patches[f]
365 if gp and gp.mode:
365 if gp and gp.mode:
366 islink, isexec = gp.mode
366 islink, isexec = gp.mode
367 dst = repo.wjoin(gp.path)
367 dst = repo.wjoin(gp.path)
368 # patch won't create empty files
368 # patch won't create empty files
369 if gp.op == 'ADD' and not os.path.lexists(dst):
369 if gp.op == 'ADD' and not os.path.lexists(dst):
370 flags = (isexec and 'x' or '') + (islink and 'l' or '')
370 flags = (isexec and 'x' or '') + (islink and 'l' or '')
371 repo.wwrite(gp.path, '', flags)
371 repo.wwrite(gp.path, '', flags)
372 util.set_flags(dst, islink, isexec)
372 util.setflags(dst, islink, isexec)
373 addremove(repo, cfiles, similarity=similarity)
373 addremove(repo, cfiles, similarity=similarity)
374 files = patches.keys()
374 files = patches.keys()
375 files.extend([r for r in removes if r not in files])
375 files.extend([r for r in removes if r not in files])
376 return sorted(files)
376 return sorted(files)
377
377
378 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
378 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
379 """Update the dirstate to reflect the intent of copying src to dst. For
379 """Update the dirstate to reflect the intent of copying src to dst. For
380 different reasons it might not end with dst being marked as copied from src.
380 different reasons it might not end with dst being marked as copied from src.
381 """
381 """
382 origsrc = repo.dirstate.copied(src) or src
382 origsrc = repo.dirstate.copied(src) or src
383 if dst == origsrc: # copying back a copy?
383 if dst == origsrc: # copying back a copy?
384 if repo.dirstate[dst] not in 'mn' and not dryrun:
384 if repo.dirstate[dst] not in 'mn' and not dryrun:
385 repo.dirstate.normallookup(dst)
385 repo.dirstate.normallookup(dst)
386 else:
386 else:
387 if repo.dirstate[origsrc] == 'a' and origsrc == src:
387 if repo.dirstate[origsrc] == 'a' and origsrc == src:
388 if not ui.quiet:
388 if not ui.quiet:
389 ui.warn(_("%s has not been committed yet, so no copy "
389 ui.warn(_("%s has not been committed yet, so no copy "
390 "data will be stored for %s.\n")
390 "data will be stored for %s.\n")
391 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
391 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
392 if repo.dirstate[dst] in '?r' and not dryrun:
392 if repo.dirstate[dst] in '?r' and not dryrun:
393 wctx.add([dst])
393 wctx.add([dst])
394 elif not dryrun:
394 elif not dryrun:
395 wctx.copy(origsrc, dst)
395 wctx.copy(origsrc, dst)
396
396
397 def copy(ui, repo, pats, opts, rename=False):
397 def copy(ui, repo, pats, opts, rename=False):
398 # called with the repo lock held
398 # called with the repo lock held
399 #
399 #
400 # hgsep => pathname that uses "/" to separate directories
400 # hgsep => pathname that uses "/" to separate directories
401 # ossep => pathname that uses os.sep to separate directories
401 # ossep => pathname that uses os.sep to separate directories
402 cwd = repo.getcwd()
402 cwd = repo.getcwd()
403 targets = {}
403 targets = {}
404 after = opts.get("after")
404 after = opts.get("after")
405 dryrun = opts.get("dry_run")
405 dryrun = opts.get("dry_run")
406 wctx = repo[None]
406 wctx = repo[None]
407
407
408 def walkpat(pat):
408 def walkpat(pat):
409 srcs = []
409 srcs = []
410 badstates = after and '?' or '?r'
410 badstates = after and '?' or '?r'
411 m = match(repo, [pat], opts, globbed=True)
411 m = match(repo, [pat], opts, globbed=True)
412 for abs in repo.walk(m):
412 for abs in repo.walk(m):
413 state = repo.dirstate[abs]
413 state = repo.dirstate[abs]
414 rel = m.rel(abs)
414 rel = m.rel(abs)
415 exact = m.exact(abs)
415 exact = m.exact(abs)
416 if state in badstates:
416 if state in badstates:
417 if exact and state == '?':
417 if exact and state == '?':
418 ui.warn(_('%s: not copying - file is not managed\n') % rel)
418 ui.warn(_('%s: not copying - file is not managed\n') % rel)
419 if exact and state == 'r':
419 if exact and state == 'r':
420 ui.warn(_('%s: not copying - file has been marked for'
420 ui.warn(_('%s: not copying - file has been marked for'
421 ' remove\n') % rel)
421 ' remove\n') % rel)
422 continue
422 continue
423 # abs: hgsep
423 # abs: hgsep
424 # rel: ossep
424 # rel: ossep
425 srcs.append((abs, rel, exact))
425 srcs.append((abs, rel, exact))
426 return srcs
426 return srcs
427
427
428 # abssrc: hgsep
428 # abssrc: hgsep
429 # relsrc: ossep
429 # relsrc: ossep
430 # otarget: ossep
430 # otarget: ossep
431 def copyfile(abssrc, relsrc, otarget, exact):
431 def copyfile(abssrc, relsrc, otarget, exact):
432 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
432 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
433 reltarget = repo.pathto(abstarget, cwd)
433 reltarget = repo.pathto(abstarget, cwd)
434 target = repo.wjoin(abstarget)
434 target = repo.wjoin(abstarget)
435 src = repo.wjoin(abssrc)
435 src = repo.wjoin(abssrc)
436 state = repo.dirstate[abstarget]
436 state = repo.dirstate[abstarget]
437
437
438 scmutil.checkportable(ui, abstarget)
438 scmutil.checkportable(ui, abstarget)
439
439
440 # check for collisions
440 # check for collisions
441 prevsrc = targets.get(abstarget)
441 prevsrc = targets.get(abstarget)
442 if prevsrc is not None:
442 if prevsrc is not None:
443 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
443 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
444 (reltarget, repo.pathto(abssrc, cwd),
444 (reltarget, repo.pathto(abssrc, cwd),
445 repo.pathto(prevsrc, cwd)))
445 repo.pathto(prevsrc, cwd)))
446 return
446 return
447
447
448 # check for overwrites
448 # check for overwrites
449 exists = os.path.lexists(target)
449 exists = os.path.lexists(target)
450 if not after and exists or after and state in 'mn':
450 if not after and exists or after and state in 'mn':
451 if not opts['force']:
451 if not opts['force']:
452 ui.warn(_('%s: not overwriting - file exists\n') %
452 ui.warn(_('%s: not overwriting - file exists\n') %
453 reltarget)
453 reltarget)
454 return
454 return
455
455
456 if after:
456 if after:
457 if not exists:
457 if not exists:
458 if rename:
458 if rename:
459 ui.warn(_('%s: not recording move - %s does not exist\n') %
459 ui.warn(_('%s: not recording move - %s does not exist\n') %
460 (relsrc, reltarget))
460 (relsrc, reltarget))
461 else:
461 else:
462 ui.warn(_('%s: not recording copy - %s does not exist\n') %
462 ui.warn(_('%s: not recording copy - %s does not exist\n') %
463 (relsrc, reltarget))
463 (relsrc, reltarget))
464 return
464 return
465 elif not dryrun:
465 elif not dryrun:
466 try:
466 try:
467 if exists:
467 if exists:
468 os.unlink(target)
468 os.unlink(target)
469 targetdir = os.path.dirname(target) or '.'
469 targetdir = os.path.dirname(target) or '.'
470 if not os.path.isdir(targetdir):
470 if not os.path.isdir(targetdir):
471 os.makedirs(targetdir)
471 os.makedirs(targetdir)
472 util.copyfile(src, target)
472 util.copyfile(src, target)
473 except IOError, inst:
473 except IOError, inst:
474 if inst.errno == errno.ENOENT:
474 if inst.errno == errno.ENOENT:
475 ui.warn(_('%s: deleted in working copy\n') % relsrc)
475 ui.warn(_('%s: deleted in working copy\n') % relsrc)
476 else:
476 else:
477 ui.warn(_('%s: cannot copy - %s\n') %
477 ui.warn(_('%s: cannot copy - %s\n') %
478 (relsrc, inst.strerror))
478 (relsrc, inst.strerror))
479 return True # report a failure
479 return True # report a failure
480
480
481 if ui.verbose or not exact:
481 if ui.verbose or not exact:
482 if rename:
482 if rename:
483 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
483 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
484 else:
484 else:
485 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
485 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
486
486
487 targets[abstarget] = abssrc
487 targets[abstarget] = abssrc
488
488
489 # fix up dirstate
489 # fix up dirstate
490 dirstatecopy(ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd)
490 dirstatecopy(ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd)
491 if rename and not dryrun:
491 if rename and not dryrun:
492 wctx.remove([abssrc], not after)
492 wctx.remove([abssrc], not after)
493
493
494 # pat: ossep
494 # pat: ossep
495 # dest ossep
495 # dest ossep
496 # srcs: list of (hgsep, hgsep, ossep, bool)
496 # srcs: list of (hgsep, hgsep, ossep, bool)
497 # return: function that takes hgsep and returns ossep
497 # return: function that takes hgsep and returns ossep
498 def targetpathfn(pat, dest, srcs):
498 def targetpathfn(pat, dest, srcs):
499 if os.path.isdir(pat):
499 if os.path.isdir(pat):
500 abspfx = scmutil.canonpath(repo.root, cwd, pat)
500 abspfx = scmutil.canonpath(repo.root, cwd, pat)
501 abspfx = util.localpath(abspfx)
501 abspfx = util.localpath(abspfx)
502 if destdirexists:
502 if destdirexists:
503 striplen = len(os.path.split(abspfx)[0])
503 striplen = len(os.path.split(abspfx)[0])
504 else:
504 else:
505 striplen = len(abspfx)
505 striplen = len(abspfx)
506 if striplen:
506 if striplen:
507 striplen += len(os.sep)
507 striplen += len(os.sep)
508 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
508 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
509 elif destdirexists:
509 elif destdirexists:
510 res = lambda p: os.path.join(dest,
510 res = lambda p: os.path.join(dest,
511 os.path.basename(util.localpath(p)))
511 os.path.basename(util.localpath(p)))
512 else:
512 else:
513 res = lambda p: dest
513 res = lambda p: dest
514 return res
514 return res
515
515
516 # pat: ossep
516 # pat: ossep
517 # dest ossep
517 # dest ossep
518 # srcs: list of (hgsep, hgsep, ossep, bool)
518 # srcs: list of (hgsep, hgsep, ossep, bool)
519 # return: function that takes hgsep and returns ossep
519 # return: function that takes hgsep and returns ossep
520 def targetpathafterfn(pat, dest, srcs):
520 def targetpathafterfn(pat, dest, srcs):
521 if matchmod.patkind(pat):
521 if matchmod.patkind(pat):
522 # a mercurial pattern
522 # a mercurial pattern
523 res = lambda p: os.path.join(dest,
523 res = lambda p: os.path.join(dest,
524 os.path.basename(util.localpath(p)))
524 os.path.basename(util.localpath(p)))
525 else:
525 else:
526 abspfx = scmutil.canonpath(repo.root, cwd, pat)
526 abspfx = scmutil.canonpath(repo.root, cwd, pat)
527 if len(abspfx) < len(srcs[0][0]):
527 if len(abspfx) < len(srcs[0][0]):
528 # A directory. Either the target path contains the last
528 # A directory. Either the target path contains the last
529 # component of the source path or it does not.
529 # component of the source path or it does not.
530 def evalpath(striplen):
530 def evalpath(striplen):
531 score = 0
531 score = 0
532 for s in srcs:
532 for s in srcs:
533 t = os.path.join(dest, util.localpath(s[0])[striplen:])
533 t = os.path.join(dest, util.localpath(s[0])[striplen:])
534 if os.path.lexists(t):
534 if os.path.lexists(t):
535 score += 1
535 score += 1
536 return score
536 return score
537
537
538 abspfx = util.localpath(abspfx)
538 abspfx = util.localpath(abspfx)
539 striplen = len(abspfx)
539 striplen = len(abspfx)
540 if striplen:
540 if striplen:
541 striplen += len(os.sep)
541 striplen += len(os.sep)
542 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
542 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
543 score = evalpath(striplen)
543 score = evalpath(striplen)
544 striplen1 = len(os.path.split(abspfx)[0])
544 striplen1 = len(os.path.split(abspfx)[0])
545 if striplen1:
545 if striplen1:
546 striplen1 += len(os.sep)
546 striplen1 += len(os.sep)
547 if evalpath(striplen1) > score:
547 if evalpath(striplen1) > score:
548 striplen = striplen1
548 striplen = striplen1
549 res = lambda p: os.path.join(dest,
549 res = lambda p: os.path.join(dest,
550 util.localpath(p)[striplen:])
550 util.localpath(p)[striplen:])
551 else:
551 else:
552 # a file
552 # a file
553 if destdirexists:
553 if destdirexists:
554 res = lambda p: os.path.join(dest,
554 res = lambda p: os.path.join(dest,
555 os.path.basename(util.localpath(p)))
555 os.path.basename(util.localpath(p)))
556 else:
556 else:
557 res = lambda p: dest
557 res = lambda p: dest
558 return res
558 return res
559
559
560
560
561 pats = expandpats(pats)
561 pats = expandpats(pats)
562 if not pats:
562 if not pats:
563 raise util.Abort(_('no source or destination specified'))
563 raise util.Abort(_('no source or destination specified'))
564 if len(pats) == 1:
564 if len(pats) == 1:
565 raise util.Abort(_('no destination specified'))
565 raise util.Abort(_('no destination specified'))
566 dest = pats.pop()
566 dest = pats.pop()
567 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
567 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
568 if not destdirexists:
568 if not destdirexists:
569 if len(pats) > 1 or matchmod.patkind(pats[0]):
569 if len(pats) > 1 or matchmod.patkind(pats[0]):
570 raise util.Abort(_('with multiple sources, destination must be an '
570 raise util.Abort(_('with multiple sources, destination must be an '
571 'existing directory'))
571 'existing directory'))
572 if util.endswithsep(dest):
572 if util.endswithsep(dest):
573 raise util.Abort(_('destination %s is not a directory') % dest)
573 raise util.Abort(_('destination %s is not a directory') % dest)
574
574
575 tfn = targetpathfn
575 tfn = targetpathfn
576 if after:
576 if after:
577 tfn = targetpathafterfn
577 tfn = targetpathafterfn
578 copylist = []
578 copylist = []
579 for pat in pats:
579 for pat in pats:
580 srcs = walkpat(pat)
580 srcs = walkpat(pat)
581 if not srcs:
581 if not srcs:
582 continue
582 continue
583 copylist.append((tfn(pat, dest, srcs), srcs))
583 copylist.append((tfn(pat, dest, srcs), srcs))
584 if not copylist:
584 if not copylist:
585 raise util.Abort(_('no files to copy'))
585 raise util.Abort(_('no files to copy'))
586
586
587 errors = 0
587 errors = 0
588 for targetpath, srcs in copylist:
588 for targetpath, srcs in copylist:
589 for abssrc, relsrc, exact in srcs:
589 for abssrc, relsrc, exact in srcs:
590 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
590 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
591 errors += 1
591 errors += 1
592
592
593 if errors:
593 if errors:
594 ui.warn(_('(consider using --after)\n'))
594 ui.warn(_('(consider using --after)\n'))
595
595
596 return errors != 0
596 return errors != 0
597
597
598 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
598 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
599 runargs=None, appendpid=False):
599 runargs=None, appendpid=False):
600 '''Run a command as a service.'''
600 '''Run a command as a service.'''
601
601
602 if opts['daemon'] and not opts['daemon_pipefds']:
602 if opts['daemon'] and not opts['daemon_pipefds']:
603 # Signal child process startup with file removal
603 # Signal child process startup with file removal
604 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
604 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
605 os.close(lockfd)
605 os.close(lockfd)
606 try:
606 try:
607 if not runargs:
607 if not runargs:
608 runargs = util.hgcmd() + sys.argv[1:]
608 runargs = util.hgcmd() + sys.argv[1:]
609 runargs.append('--daemon-pipefds=%s' % lockpath)
609 runargs.append('--daemon-pipefds=%s' % lockpath)
610 # Don't pass --cwd to the child process, because we've already
610 # Don't pass --cwd to the child process, because we've already
611 # changed directory.
611 # changed directory.
612 for i in xrange(1, len(runargs)):
612 for i in xrange(1, len(runargs)):
613 if runargs[i].startswith('--cwd='):
613 if runargs[i].startswith('--cwd='):
614 del runargs[i]
614 del runargs[i]
615 break
615 break
616 elif runargs[i].startswith('--cwd'):
616 elif runargs[i].startswith('--cwd'):
617 del runargs[i:i + 2]
617 del runargs[i:i + 2]
618 break
618 break
619 def condfn():
619 def condfn():
620 return not os.path.exists(lockpath)
620 return not os.path.exists(lockpath)
621 pid = util.rundetached(runargs, condfn)
621 pid = util.rundetached(runargs, condfn)
622 if pid < 0:
622 if pid < 0:
623 raise util.Abort(_('child process failed to start'))
623 raise util.Abort(_('child process failed to start'))
624 finally:
624 finally:
625 try:
625 try:
626 os.unlink(lockpath)
626 os.unlink(lockpath)
627 except OSError, e:
627 except OSError, e:
628 if e.errno != errno.ENOENT:
628 if e.errno != errno.ENOENT:
629 raise
629 raise
630 if parentfn:
630 if parentfn:
631 return parentfn(pid)
631 return parentfn(pid)
632 else:
632 else:
633 return
633 return
634
634
635 if initfn:
635 if initfn:
636 initfn()
636 initfn()
637
637
638 if opts['pid_file']:
638 if opts['pid_file']:
639 mode = appendpid and 'a' or 'w'
639 mode = appendpid and 'a' or 'w'
640 fp = open(opts['pid_file'], mode)
640 fp = open(opts['pid_file'], mode)
641 fp.write(str(os.getpid()) + '\n')
641 fp.write(str(os.getpid()) + '\n')
642 fp.close()
642 fp.close()
643
643
644 if opts['daemon_pipefds']:
644 if opts['daemon_pipefds']:
645 lockpath = opts['daemon_pipefds']
645 lockpath = opts['daemon_pipefds']
646 try:
646 try:
647 os.setsid()
647 os.setsid()
648 except AttributeError:
648 except AttributeError:
649 pass
649 pass
650 os.unlink(lockpath)
650 os.unlink(lockpath)
651 util.hidewindow()
651 util.hidewindow()
652 sys.stdout.flush()
652 sys.stdout.flush()
653 sys.stderr.flush()
653 sys.stderr.flush()
654
654
655 nullfd = os.open(util.nulldev, os.O_RDWR)
655 nullfd = os.open(util.nulldev, os.O_RDWR)
656 logfilefd = nullfd
656 logfilefd = nullfd
657 if logfile:
657 if logfile:
658 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
658 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
659 os.dup2(nullfd, 0)
659 os.dup2(nullfd, 0)
660 os.dup2(logfilefd, 1)
660 os.dup2(logfilefd, 1)
661 os.dup2(logfilefd, 2)
661 os.dup2(logfilefd, 2)
662 if nullfd not in (0, 1, 2):
662 if nullfd not in (0, 1, 2):
663 os.close(nullfd)
663 os.close(nullfd)
664 if logfile and logfilefd not in (0, 1, 2):
664 if logfile and logfilefd not in (0, 1, 2):
665 os.close(logfilefd)
665 os.close(logfilefd)
666
666
667 if runfn:
667 if runfn:
668 return runfn()
668 return runfn()
669
669
670 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
670 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
671 opts=None):
671 opts=None):
672 '''export changesets as hg patches.'''
672 '''export changesets as hg patches.'''
673
673
674 total = len(revs)
674 total = len(revs)
675 revwidth = max([len(str(rev)) for rev in revs])
675 revwidth = max([len(str(rev)) for rev in revs])
676
676
677 def single(rev, seqno, fp):
677 def single(rev, seqno, fp):
678 ctx = repo[rev]
678 ctx = repo[rev]
679 node = ctx.node()
679 node = ctx.node()
680 parents = [p.node() for p in ctx.parents() if p]
680 parents = [p.node() for p in ctx.parents() if p]
681 branch = ctx.branch()
681 branch = ctx.branch()
682 if switch_parent:
682 if switch_parent:
683 parents.reverse()
683 parents.reverse()
684 prev = (parents and parents[0]) or nullid
684 prev = (parents and parents[0]) or nullid
685
685
686 shouldclose = False
686 shouldclose = False
687 if not fp:
687 if not fp:
688 fp = make_file(repo, template, node, total=total, seqno=seqno,
688 fp = make_file(repo, template, node, total=total, seqno=seqno,
689 revwidth=revwidth, mode='ab')
689 revwidth=revwidth, mode='ab')
690 if fp != template:
690 if fp != template:
691 shouldclose = True
691 shouldclose = True
692 if fp != sys.stdout and hasattr(fp, 'name'):
692 if fp != sys.stdout and hasattr(fp, 'name'):
693 repo.ui.note("%s\n" % fp.name)
693 repo.ui.note("%s\n" % fp.name)
694
694
695 fp.write("# HG changeset patch\n")
695 fp.write("# HG changeset patch\n")
696 fp.write("# User %s\n" % ctx.user())
696 fp.write("# User %s\n" % ctx.user())
697 fp.write("# Date %d %d\n" % ctx.date())
697 fp.write("# Date %d %d\n" % ctx.date())
698 if branch and branch != 'default':
698 if branch and branch != 'default':
699 fp.write("# Branch %s\n" % branch)
699 fp.write("# Branch %s\n" % branch)
700 fp.write("# Node ID %s\n" % hex(node))
700 fp.write("# Node ID %s\n" % hex(node))
701 fp.write("# Parent %s\n" % hex(prev))
701 fp.write("# Parent %s\n" % hex(prev))
702 if len(parents) > 1:
702 if len(parents) > 1:
703 fp.write("# Parent %s\n" % hex(parents[1]))
703 fp.write("# Parent %s\n" % hex(parents[1]))
704 fp.write(ctx.description().rstrip())
704 fp.write(ctx.description().rstrip())
705 fp.write("\n\n")
705 fp.write("\n\n")
706
706
707 for chunk in patch.diff(repo, prev, node, opts=opts):
707 for chunk in patch.diff(repo, prev, node, opts=opts):
708 fp.write(chunk)
708 fp.write(chunk)
709
709
710 if shouldclose:
710 if shouldclose:
711 fp.close()
711 fp.close()
712
712
713 for seqno, rev in enumerate(revs):
713 for seqno, rev in enumerate(revs):
714 single(rev, seqno + 1, fp)
714 single(rev, seqno + 1, fp)
715
715
716 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
716 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
717 changes=None, stat=False, fp=None, prefix='',
717 changes=None, stat=False, fp=None, prefix='',
718 listsubrepos=False):
718 listsubrepos=False):
719 '''show diff or diffstat.'''
719 '''show diff or diffstat.'''
720 if fp is None:
720 if fp is None:
721 write = ui.write
721 write = ui.write
722 else:
722 else:
723 def write(s, **kw):
723 def write(s, **kw):
724 fp.write(s)
724 fp.write(s)
725
725
726 if stat:
726 if stat:
727 diffopts = diffopts.copy(context=0)
727 diffopts = diffopts.copy(context=0)
728 width = 80
728 width = 80
729 if not ui.plain():
729 if not ui.plain():
730 width = ui.termwidth()
730 width = ui.termwidth()
731 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
731 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
732 prefix=prefix)
732 prefix=prefix)
733 for chunk, label in patch.diffstatui(util.iterlines(chunks),
733 for chunk, label in patch.diffstatui(util.iterlines(chunks),
734 width=width,
734 width=width,
735 git=diffopts.git):
735 git=diffopts.git):
736 write(chunk, label=label)
736 write(chunk, label=label)
737 else:
737 else:
738 for chunk, label in patch.diffui(repo, node1, node2, match,
738 for chunk, label in patch.diffui(repo, node1, node2, match,
739 changes, diffopts, prefix=prefix):
739 changes, diffopts, prefix=prefix):
740 write(chunk, label=label)
740 write(chunk, label=label)
741
741
742 if listsubrepos:
742 if listsubrepos:
743 ctx1 = repo[node1]
743 ctx1 = repo[node1]
744 ctx2 = repo[node2]
744 ctx2 = repo[node2]
745 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
745 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
746 if node2 is not None:
746 if node2 is not None:
747 node2 = ctx2.substate[subpath][1]
747 node2 = ctx2.substate[subpath][1]
748 submatch = matchmod.narrowmatcher(subpath, match)
748 submatch = matchmod.narrowmatcher(subpath, match)
749 sub.diff(diffopts, node2, submatch, changes=changes,
749 sub.diff(diffopts, node2, submatch, changes=changes,
750 stat=stat, fp=fp, prefix=prefix)
750 stat=stat, fp=fp, prefix=prefix)
751
751
752 class changeset_printer(object):
752 class changeset_printer(object):
753 '''show changeset information when templating not requested.'''
753 '''show changeset information when templating not requested.'''
754
754
755 def __init__(self, ui, repo, patch, diffopts, buffered):
755 def __init__(self, ui, repo, patch, diffopts, buffered):
756 self.ui = ui
756 self.ui = ui
757 self.repo = repo
757 self.repo = repo
758 self.buffered = buffered
758 self.buffered = buffered
759 self.patch = patch
759 self.patch = patch
760 self.diffopts = diffopts
760 self.diffopts = diffopts
761 self.header = {}
761 self.header = {}
762 self.hunk = {}
762 self.hunk = {}
763 self.lastheader = None
763 self.lastheader = None
764 self.footer = None
764 self.footer = None
765
765
766 def flush(self, rev):
766 def flush(self, rev):
767 if rev in self.header:
767 if rev in self.header:
768 h = self.header[rev]
768 h = self.header[rev]
769 if h != self.lastheader:
769 if h != self.lastheader:
770 self.lastheader = h
770 self.lastheader = h
771 self.ui.write(h)
771 self.ui.write(h)
772 del self.header[rev]
772 del self.header[rev]
773 if rev in self.hunk:
773 if rev in self.hunk:
774 self.ui.write(self.hunk[rev])
774 self.ui.write(self.hunk[rev])
775 del self.hunk[rev]
775 del self.hunk[rev]
776 return 1
776 return 1
777 return 0
777 return 0
778
778
779 def close(self):
779 def close(self):
780 if self.footer:
780 if self.footer:
781 self.ui.write(self.footer)
781 self.ui.write(self.footer)
782
782
783 def show(self, ctx, copies=None, matchfn=None, **props):
783 def show(self, ctx, copies=None, matchfn=None, **props):
784 if self.buffered:
784 if self.buffered:
785 self.ui.pushbuffer()
785 self.ui.pushbuffer()
786 self._show(ctx, copies, matchfn, props)
786 self._show(ctx, copies, matchfn, props)
787 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
787 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
788 else:
788 else:
789 self._show(ctx, copies, matchfn, props)
789 self._show(ctx, copies, matchfn, props)
790
790
791 def _show(self, ctx, copies, matchfn, props):
791 def _show(self, ctx, copies, matchfn, props):
792 '''show a single changeset or file revision'''
792 '''show a single changeset or file revision'''
793 changenode = ctx.node()
793 changenode = ctx.node()
794 rev = ctx.rev()
794 rev = ctx.rev()
795
795
796 if self.ui.quiet:
796 if self.ui.quiet:
797 self.ui.write("%d:%s\n" % (rev, short(changenode)),
797 self.ui.write("%d:%s\n" % (rev, short(changenode)),
798 label='log.node')
798 label='log.node')
799 return
799 return
800
800
801 log = self.repo.changelog
801 log = self.repo.changelog
802 date = util.datestr(ctx.date())
802 date = util.datestr(ctx.date())
803
803
804 hexfunc = self.ui.debugflag and hex or short
804 hexfunc = self.ui.debugflag and hex or short
805
805
806 parents = [(p, hexfunc(log.node(p)))
806 parents = [(p, hexfunc(log.node(p)))
807 for p in self._meaningful_parentrevs(log, rev)]
807 for p in self._meaningful_parentrevs(log, rev)]
808
808
809 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
809 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
810 label='log.changeset')
810 label='log.changeset')
811
811
812 branch = ctx.branch()
812 branch = ctx.branch()
813 # don't show the default branch name
813 # don't show the default branch name
814 if branch != 'default':
814 if branch != 'default':
815 self.ui.write(_("branch: %s\n") % branch,
815 self.ui.write(_("branch: %s\n") % branch,
816 label='log.branch')
816 label='log.branch')
817 for bookmark in self.repo.nodebookmarks(changenode):
817 for bookmark in self.repo.nodebookmarks(changenode):
818 self.ui.write(_("bookmark: %s\n") % bookmark,
818 self.ui.write(_("bookmark: %s\n") % bookmark,
819 label='log.bookmark')
819 label='log.bookmark')
820 for tag in self.repo.nodetags(changenode):
820 for tag in self.repo.nodetags(changenode):
821 self.ui.write(_("tag: %s\n") % tag,
821 self.ui.write(_("tag: %s\n") % tag,
822 label='log.tag')
822 label='log.tag')
823 for parent in parents:
823 for parent in parents:
824 self.ui.write(_("parent: %d:%s\n") % parent,
824 self.ui.write(_("parent: %d:%s\n") % parent,
825 label='log.parent')
825 label='log.parent')
826
826
827 if self.ui.debugflag:
827 if self.ui.debugflag:
828 mnode = ctx.manifestnode()
828 mnode = ctx.manifestnode()
829 self.ui.write(_("manifest: %d:%s\n") %
829 self.ui.write(_("manifest: %d:%s\n") %
830 (self.repo.manifest.rev(mnode), hex(mnode)),
830 (self.repo.manifest.rev(mnode), hex(mnode)),
831 label='ui.debug log.manifest')
831 label='ui.debug log.manifest')
832 self.ui.write(_("user: %s\n") % ctx.user(),
832 self.ui.write(_("user: %s\n") % ctx.user(),
833 label='log.user')
833 label='log.user')
834 self.ui.write(_("date: %s\n") % date,
834 self.ui.write(_("date: %s\n") % date,
835 label='log.date')
835 label='log.date')
836
836
837 if self.ui.debugflag:
837 if self.ui.debugflag:
838 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
838 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
839 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
839 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
840 files):
840 files):
841 if value:
841 if value:
842 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
842 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
843 label='ui.debug log.files')
843 label='ui.debug log.files')
844 elif ctx.files() and self.ui.verbose:
844 elif ctx.files() and self.ui.verbose:
845 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
845 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
846 label='ui.note log.files')
846 label='ui.note log.files')
847 if copies and self.ui.verbose:
847 if copies and self.ui.verbose:
848 copies = ['%s (%s)' % c for c in copies]
848 copies = ['%s (%s)' % c for c in copies]
849 self.ui.write(_("copies: %s\n") % ' '.join(copies),
849 self.ui.write(_("copies: %s\n") % ' '.join(copies),
850 label='ui.note log.copies')
850 label='ui.note log.copies')
851
851
852 extra = ctx.extra()
852 extra = ctx.extra()
853 if extra and self.ui.debugflag:
853 if extra and self.ui.debugflag:
854 for key, value in sorted(extra.items()):
854 for key, value in sorted(extra.items()):
855 self.ui.write(_("extra: %s=%s\n")
855 self.ui.write(_("extra: %s=%s\n")
856 % (key, value.encode('string_escape')),
856 % (key, value.encode('string_escape')),
857 label='ui.debug log.extra')
857 label='ui.debug log.extra')
858
858
859 description = ctx.description().strip()
859 description = ctx.description().strip()
860 if description:
860 if description:
861 if self.ui.verbose:
861 if self.ui.verbose:
862 self.ui.write(_("description:\n"),
862 self.ui.write(_("description:\n"),
863 label='ui.note log.description')
863 label='ui.note log.description')
864 self.ui.write(description,
864 self.ui.write(description,
865 label='ui.note log.description')
865 label='ui.note log.description')
866 self.ui.write("\n\n")
866 self.ui.write("\n\n")
867 else:
867 else:
868 self.ui.write(_("summary: %s\n") %
868 self.ui.write(_("summary: %s\n") %
869 description.splitlines()[0],
869 description.splitlines()[0],
870 label='log.summary')
870 label='log.summary')
871 self.ui.write("\n")
871 self.ui.write("\n")
872
872
873 self.showpatch(changenode, matchfn)
873 self.showpatch(changenode, matchfn)
874
874
875 def showpatch(self, node, matchfn):
875 def showpatch(self, node, matchfn):
876 if not matchfn:
876 if not matchfn:
877 matchfn = self.patch
877 matchfn = self.patch
878 if matchfn:
878 if matchfn:
879 stat = self.diffopts.get('stat')
879 stat = self.diffopts.get('stat')
880 diff = self.diffopts.get('patch')
880 diff = self.diffopts.get('patch')
881 diffopts = patch.diffopts(self.ui, self.diffopts)
881 diffopts = patch.diffopts(self.ui, self.diffopts)
882 prev = self.repo.changelog.parents(node)[0]
882 prev = self.repo.changelog.parents(node)[0]
883 if stat:
883 if stat:
884 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
884 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
885 match=matchfn, stat=True)
885 match=matchfn, stat=True)
886 if diff:
886 if diff:
887 if stat:
887 if stat:
888 self.ui.write("\n")
888 self.ui.write("\n")
889 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
889 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
890 match=matchfn, stat=False)
890 match=matchfn, stat=False)
891 self.ui.write("\n")
891 self.ui.write("\n")
892
892
893 def _meaningful_parentrevs(self, log, rev):
893 def _meaningful_parentrevs(self, log, rev):
894 """Return list of meaningful (or all if debug) parentrevs for rev.
894 """Return list of meaningful (or all if debug) parentrevs for rev.
895
895
896 For merges (two non-nullrev revisions) both parents are meaningful.
896 For merges (two non-nullrev revisions) both parents are meaningful.
897 Otherwise the first parent revision is considered meaningful if it
897 Otherwise the first parent revision is considered meaningful if it
898 is not the preceding revision.
898 is not the preceding revision.
899 """
899 """
900 parents = log.parentrevs(rev)
900 parents = log.parentrevs(rev)
901 if not self.ui.debugflag and parents[1] == nullrev:
901 if not self.ui.debugflag and parents[1] == nullrev:
902 if parents[0] >= rev - 1:
902 if parents[0] >= rev - 1:
903 parents = []
903 parents = []
904 else:
904 else:
905 parents = [parents[0]]
905 parents = [parents[0]]
906 return parents
906 return parents
907
907
908
908
909 class changeset_templater(changeset_printer):
909 class changeset_templater(changeset_printer):
910 '''format changeset information.'''
910 '''format changeset information.'''
911
911
912 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
912 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
913 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
913 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
914 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
914 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
915 defaulttempl = {
915 defaulttempl = {
916 'parent': '{rev}:{node|formatnode} ',
916 'parent': '{rev}:{node|formatnode} ',
917 'manifest': '{rev}:{node|formatnode}',
917 'manifest': '{rev}:{node|formatnode}',
918 'file_copy': '{name} ({source})',
918 'file_copy': '{name} ({source})',
919 'extra': '{key}={value|stringescape}'
919 'extra': '{key}={value|stringescape}'
920 }
920 }
921 # filecopy is preserved for compatibility reasons
921 # filecopy is preserved for compatibility reasons
922 defaulttempl['filecopy'] = defaulttempl['file_copy']
922 defaulttempl['filecopy'] = defaulttempl['file_copy']
923 self.t = templater.templater(mapfile, {'formatnode': formatnode},
923 self.t = templater.templater(mapfile, {'formatnode': formatnode},
924 cache=defaulttempl)
924 cache=defaulttempl)
925 self.cache = {}
925 self.cache = {}
926
926
927 def use_template(self, t):
927 def use_template(self, t):
928 '''set template string to use'''
928 '''set template string to use'''
929 self.t.cache['changeset'] = t
929 self.t.cache['changeset'] = t
930
930
931 def _meaningful_parentrevs(self, ctx):
931 def _meaningful_parentrevs(self, ctx):
932 """Return list of meaningful (or all if debug) parentrevs for rev.
932 """Return list of meaningful (or all if debug) parentrevs for rev.
933 """
933 """
934 parents = ctx.parents()
934 parents = ctx.parents()
935 if len(parents) > 1:
935 if len(parents) > 1:
936 return parents
936 return parents
937 if self.ui.debugflag:
937 if self.ui.debugflag:
938 return [parents[0], self.repo['null']]
938 return [parents[0], self.repo['null']]
939 if parents[0].rev() >= ctx.rev() - 1:
939 if parents[0].rev() >= ctx.rev() - 1:
940 return []
940 return []
941 return parents
941 return parents
942
942
943 def _show(self, ctx, copies, matchfn, props):
943 def _show(self, ctx, copies, matchfn, props):
944 '''show a single changeset or file revision'''
944 '''show a single changeset or file revision'''
945
945
946 showlist = templatekw.showlist
946 showlist = templatekw.showlist
947
947
948 # showparents() behaviour depends on ui trace level which
948 # showparents() behaviour depends on ui trace level which
949 # causes unexpected behaviours at templating level and makes
949 # causes unexpected behaviours at templating level and makes
950 # it harder to extract it in a standalone function. Its
950 # it harder to extract it in a standalone function. Its
951 # behaviour cannot be changed so leave it here for now.
951 # behaviour cannot be changed so leave it here for now.
952 def showparents(**args):
952 def showparents(**args):
953 ctx = args['ctx']
953 ctx = args['ctx']
954 parents = [[('rev', p.rev()), ('node', p.hex())]
954 parents = [[('rev', p.rev()), ('node', p.hex())]
955 for p in self._meaningful_parentrevs(ctx)]
955 for p in self._meaningful_parentrevs(ctx)]
956 return showlist('parent', parents, **args)
956 return showlist('parent', parents, **args)
957
957
958 props = props.copy()
958 props = props.copy()
959 props.update(templatekw.keywords)
959 props.update(templatekw.keywords)
960 props['parents'] = showparents
960 props['parents'] = showparents
961 props['templ'] = self.t
961 props['templ'] = self.t
962 props['ctx'] = ctx
962 props['ctx'] = ctx
963 props['repo'] = self.repo
963 props['repo'] = self.repo
964 props['revcache'] = {'copies': copies}
964 props['revcache'] = {'copies': copies}
965 props['cache'] = self.cache
965 props['cache'] = self.cache
966
966
967 # find correct templates for current mode
967 # find correct templates for current mode
968
968
969 tmplmodes = [
969 tmplmodes = [
970 (True, None),
970 (True, None),
971 (self.ui.verbose, 'verbose'),
971 (self.ui.verbose, 'verbose'),
972 (self.ui.quiet, 'quiet'),
972 (self.ui.quiet, 'quiet'),
973 (self.ui.debugflag, 'debug'),
973 (self.ui.debugflag, 'debug'),
974 ]
974 ]
975
975
976 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
976 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
977 for mode, postfix in tmplmodes:
977 for mode, postfix in tmplmodes:
978 for type in types:
978 for type in types:
979 cur = postfix and ('%s_%s' % (type, postfix)) or type
979 cur = postfix and ('%s_%s' % (type, postfix)) or type
980 if mode and cur in self.t:
980 if mode and cur in self.t:
981 types[type] = cur
981 types[type] = cur
982
982
983 try:
983 try:
984
984
985 # write header
985 # write header
986 if types['header']:
986 if types['header']:
987 h = templater.stringify(self.t(types['header'], **props))
987 h = templater.stringify(self.t(types['header'], **props))
988 if self.buffered:
988 if self.buffered:
989 self.header[ctx.rev()] = h
989 self.header[ctx.rev()] = h
990 else:
990 else:
991 if self.lastheader != h:
991 if self.lastheader != h:
992 self.lastheader = h
992 self.lastheader = h
993 self.ui.write(h)
993 self.ui.write(h)
994
994
995 # write changeset metadata, then patch if requested
995 # write changeset metadata, then patch if requested
996 key = types['changeset']
996 key = types['changeset']
997 self.ui.write(templater.stringify(self.t(key, **props)))
997 self.ui.write(templater.stringify(self.t(key, **props)))
998 self.showpatch(ctx.node(), matchfn)
998 self.showpatch(ctx.node(), matchfn)
999
999
1000 if types['footer']:
1000 if types['footer']:
1001 if not self.footer:
1001 if not self.footer:
1002 self.footer = templater.stringify(self.t(types['footer'],
1002 self.footer = templater.stringify(self.t(types['footer'],
1003 **props))
1003 **props))
1004
1004
1005 except KeyError, inst:
1005 except KeyError, inst:
1006 msg = _("%s: no key named '%s'")
1006 msg = _("%s: no key named '%s'")
1007 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1007 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1008 except SyntaxError, inst:
1008 except SyntaxError, inst:
1009 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1009 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1010
1010
1011 def show_changeset(ui, repo, opts, buffered=False):
1011 def show_changeset(ui, repo, opts, buffered=False):
1012 """show one changeset using template or regular display.
1012 """show one changeset using template or regular display.
1013
1013
1014 Display format will be the first non-empty hit of:
1014 Display format will be the first non-empty hit of:
1015 1. option 'template'
1015 1. option 'template'
1016 2. option 'style'
1016 2. option 'style'
1017 3. [ui] setting 'logtemplate'
1017 3. [ui] setting 'logtemplate'
1018 4. [ui] setting 'style'
1018 4. [ui] setting 'style'
1019 If all of these values are either the unset or the empty string,
1019 If all of these values are either the unset or the empty string,
1020 regular display via changeset_printer() is done.
1020 regular display via changeset_printer() is done.
1021 """
1021 """
1022 # options
1022 # options
1023 patch = False
1023 patch = False
1024 if opts.get('patch') or opts.get('stat'):
1024 if opts.get('patch') or opts.get('stat'):
1025 patch = matchall(repo)
1025 patch = matchall(repo)
1026
1026
1027 tmpl = opts.get('template')
1027 tmpl = opts.get('template')
1028 style = None
1028 style = None
1029 if tmpl:
1029 if tmpl:
1030 tmpl = templater.parsestring(tmpl, quoted=False)
1030 tmpl = templater.parsestring(tmpl, quoted=False)
1031 else:
1031 else:
1032 style = opts.get('style')
1032 style = opts.get('style')
1033
1033
1034 # ui settings
1034 # ui settings
1035 if not (tmpl or style):
1035 if not (tmpl or style):
1036 tmpl = ui.config('ui', 'logtemplate')
1036 tmpl = ui.config('ui', 'logtemplate')
1037 if tmpl:
1037 if tmpl:
1038 tmpl = templater.parsestring(tmpl)
1038 tmpl = templater.parsestring(tmpl)
1039 else:
1039 else:
1040 style = util.expandpath(ui.config('ui', 'style', ''))
1040 style = util.expandpath(ui.config('ui', 'style', ''))
1041
1041
1042 if not (tmpl or style):
1042 if not (tmpl or style):
1043 return changeset_printer(ui, repo, patch, opts, buffered)
1043 return changeset_printer(ui, repo, patch, opts, buffered)
1044
1044
1045 mapfile = None
1045 mapfile = None
1046 if style and not tmpl:
1046 if style and not tmpl:
1047 mapfile = style
1047 mapfile = style
1048 if not os.path.split(mapfile)[0]:
1048 if not os.path.split(mapfile)[0]:
1049 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1049 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1050 or templater.templatepath(mapfile))
1050 or templater.templatepath(mapfile))
1051 if mapname:
1051 if mapname:
1052 mapfile = mapname
1052 mapfile = mapname
1053
1053
1054 try:
1054 try:
1055 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
1055 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
1056 except SyntaxError, inst:
1056 except SyntaxError, inst:
1057 raise util.Abort(inst.args[0])
1057 raise util.Abort(inst.args[0])
1058 if tmpl:
1058 if tmpl:
1059 t.use_template(tmpl)
1059 t.use_template(tmpl)
1060 return t
1060 return t
1061
1061
1062 def finddate(ui, repo, date):
1062 def finddate(ui, repo, date):
1063 """Find the tipmost changeset that matches the given date spec"""
1063 """Find the tipmost changeset that matches the given date spec"""
1064
1064
1065 df = util.matchdate(date)
1065 df = util.matchdate(date)
1066 m = matchall(repo)
1066 m = matchall(repo)
1067 results = {}
1067 results = {}
1068
1068
1069 def prep(ctx, fns):
1069 def prep(ctx, fns):
1070 d = ctx.date()
1070 d = ctx.date()
1071 if df(d[0]):
1071 if df(d[0]):
1072 results[ctx.rev()] = d
1072 results[ctx.rev()] = d
1073
1073
1074 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1074 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1075 rev = ctx.rev()
1075 rev = ctx.rev()
1076 if rev in results:
1076 if rev in results:
1077 ui.status(_("Found revision %s from %s\n") %
1077 ui.status(_("Found revision %s from %s\n") %
1078 (rev, util.datestr(results[rev])))
1078 (rev, util.datestr(results[rev])))
1079 return str(rev)
1079 return str(rev)
1080
1080
1081 raise util.Abort(_("revision matching date not found"))
1081 raise util.Abort(_("revision matching date not found"))
1082
1082
1083 def walkchangerevs(repo, match, opts, prepare):
1083 def walkchangerevs(repo, match, opts, prepare):
1084 '''Iterate over files and the revs in which they changed.
1084 '''Iterate over files and the revs in which they changed.
1085
1085
1086 Callers most commonly need to iterate backwards over the history
1086 Callers most commonly need to iterate backwards over the history
1087 in which they are interested. Doing so has awful (quadratic-looking)
1087 in which they are interested. Doing so has awful (quadratic-looking)
1088 performance, so we use iterators in a "windowed" way.
1088 performance, so we use iterators in a "windowed" way.
1089
1089
1090 We walk a window of revisions in the desired order. Within the
1090 We walk a window of revisions in the desired order. Within the
1091 window, we first walk forwards to gather data, then in the desired
1091 window, we first walk forwards to gather data, then in the desired
1092 order (usually backwards) to display it.
1092 order (usually backwards) to display it.
1093
1093
1094 This function returns an iterator yielding contexts. Before
1094 This function returns an iterator yielding contexts. Before
1095 yielding each context, the iterator will first call the prepare
1095 yielding each context, the iterator will first call the prepare
1096 function on each context in the window in forward order.'''
1096 function on each context in the window in forward order.'''
1097
1097
1098 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1098 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1099 if start < end:
1099 if start < end:
1100 while start < end:
1100 while start < end:
1101 yield start, min(windowsize, end - start)
1101 yield start, min(windowsize, end - start)
1102 start += windowsize
1102 start += windowsize
1103 if windowsize < sizelimit:
1103 if windowsize < sizelimit:
1104 windowsize *= 2
1104 windowsize *= 2
1105 else:
1105 else:
1106 while start > end:
1106 while start > end:
1107 yield start, min(windowsize, start - end - 1)
1107 yield start, min(windowsize, start - end - 1)
1108 start -= windowsize
1108 start -= windowsize
1109 if windowsize < sizelimit:
1109 if windowsize < sizelimit:
1110 windowsize *= 2
1110 windowsize *= 2
1111
1111
1112 follow = opts.get('follow') or opts.get('follow_first')
1112 follow = opts.get('follow') or opts.get('follow_first')
1113
1113
1114 if not len(repo):
1114 if not len(repo):
1115 return []
1115 return []
1116
1116
1117 if follow:
1117 if follow:
1118 defrange = '%s:0' % repo['.'].rev()
1118 defrange = '%s:0' % repo['.'].rev()
1119 else:
1119 else:
1120 defrange = '-1:0'
1120 defrange = '-1:0'
1121 revs = revrange(repo, opts['rev'] or [defrange])
1121 revs = revrange(repo, opts['rev'] or [defrange])
1122 if not revs:
1122 if not revs:
1123 return []
1123 return []
1124 wanted = set()
1124 wanted = set()
1125 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1125 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1126 fncache = {}
1126 fncache = {}
1127 change = util.cachefunc(repo.changectx)
1127 change = util.cachefunc(repo.changectx)
1128
1128
1129 # First step is to fill wanted, the set of revisions that we want to yield.
1129 # First step is to fill wanted, the set of revisions that we want to yield.
1130 # When it does not induce extra cost, we also fill fncache for revisions in
1130 # When it does not induce extra cost, we also fill fncache for revisions in
1131 # wanted: a cache of filenames that were changed (ctx.files()) and that
1131 # wanted: a cache of filenames that were changed (ctx.files()) and that
1132 # match the file filtering conditions.
1132 # match the file filtering conditions.
1133
1133
1134 if not slowpath and not match.files():
1134 if not slowpath and not match.files():
1135 # No files, no patterns. Display all revs.
1135 # No files, no patterns. Display all revs.
1136 wanted = set(revs)
1136 wanted = set(revs)
1137 copies = []
1137 copies = []
1138
1138
1139 if not slowpath:
1139 if not slowpath:
1140 # We only have to read through the filelog to find wanted revisions
1140 # We only have to read through the filelog to find wanted revisions
1141
1141
1142 minrev, maxrev = min(revs), max(revs)
1142 minrev, maxrev = min(revs), max(revs)
1143 def filerevgen(filelog, last):
1143 def filerevgen(filelog, last):
1144 """
1144 """
1145 Only files, no patterns. Check the history of each file.
1145 Only files, no patterns. Check the history of each file.
1146
1146
1147 Examines filelog entries within minrev, maxrev linkrev range
1147 Examines filelog entries within minrev, maxrev linkrev range
1148 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1148 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1149 tuples in backwards order
1149 tuples in backwards order
1150 """
1150 """
1151 cl_count = len(repo)
1151 cl_count = len(repo)
1152 revs = []
1152 revs = []
1153 for j in xrange(0, last + 1):
1153 for j in xrange(0, last + 1):
1154 linkrev = filelog.linkrev(j)
1154 linkrev = filelog.linkrev(j)
1155 if linkrev < minrev:
1155 if linkrev < minrev:
1156 continue
1156 continue
1157 # only yield rev for which we have the changelog, it can
1157 # only yield rev for which we have the changelog, it can
1158 # happen while doing "hg log" during a pull or commit
1158 # happen while doing "hg log" during a pull or commit
1159 if linkrev >= cl_count:
1159 if linkrev >= cl_count:
1160 break
1160 break
1161
1161
1162 parentlinkrevs = []
1162 parentlinkrevs = []
1163 for p in filelog.parentrevs(j):
1163 for p in filelog.parentrevs(j):
1164 if p != nullrev:
1164 if p != nullrev:
1165 parentlinkrevs.append(filelog.linkrev(p))
1165 parentlinkrevs.append(filelog.linkrev(p))
1166 n = filelog.node(j)
1166 n = filelog.node(j)
1167 revs.append((linkrev, parentlinkrevs,
1167 revs.append((linkrev, parentlinkrevs,
1168 follow and filelog.renamed(n)))
1168 follow and filelog.renamed(n)))
1169
1169
1170 return reversed(revs)
1170 return reversed(revs)
1171 def iterfiles():
1171 def iterfiles():
1172 for filename in match.files():
1172 for filename in match.files():
1173 yield filename, None
1173 yield filename, None
1174 for filename_node in copies:
1174 for filename_node in copies:
1175 yield filename_node
1175 yield filename_node
1176 for file_, node in iterfiles():
1176 for file_, node in iterfiles():
1177 filelog = repo.file(file_)
1177 filelog = repo.file(file_)
1178 if not len(filelog):
1178 if not len(filelog):
1179 if node is None:
1179 if node is None:
1180 # A zero count may be a directory or deleted file, so
1180 # A zero count may be a directory or deleted file, so
1181 # try to find matching entries on the slow path.
1181 # try to find matching entries on the slow path.
1182 if follow:
1182 if follow:
1183 raise util.Abort(
1183 raise util.Abort(
1184 _('cannot follow nonexistent file: "%s"') % file_)
1184 _('cannot follow nonexistent file: "%s"') % file_)
1185 slowpath = True
1185 slowpath = True
1186 break
1186 break
1187 else:
1187 else:
1188 continue
1188 continue
1189
1189
1190 if node is None:
1190 if node is None:
1191 last = len(filelog) - 1
1191 last = len(filelog) - 1
1192 else:
1192 else:
1193 last = filelog.rev(node)
1193 last = filelog.rev(node)
1194
1194
1195
1195
1196 # keep track of all ancestors of the file
1196 # keep track of all ancestors of the file
1197 ancestors = set([filelog.linkrev(last)])
1197 ancestors = set([filelog.linkrev(last)])
1198
1198
1199 # iterate from latest to oldest revision
1199 # iterate from latest to oldest revision
1200 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1200 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1201 if not follow:
1201 if not follow:
1202 if rev > maxrev:
1202 if rev > maxrev:
1203 continue
1203 continue
1204 else:
1204 else:
1205 # Note that last might not be the first interesting
1205 # Note that last might not be the first interesting
1206 # rev to us:
1206 # rev to us:
1207 # if the file has been changed after maxrev, we'll
1207 # if the file has been changed after maxrev, we'll
1208 # have linkrev(last) > maxrev, and we still need
1208 # have linkrev(last) > maxrev, and we still need
1209 # to explore the file graph
1209 # to explore the file graph
1210 if rev not in ancestors:
1210 if rev not in ancestors:
1211 continue
1211 continue
1212 # XXX insert 1327 fix here
1212 # XXX insert 1327 fix here
1213 if flparentlinkrevs:
1213 if flparentlinkrevs:
1214 ancestors.update(flparentlinkrevs)
1214 ancestors.update(flparentlinkrevs)
1215
1215
1216 fncache.setdefault(rev, []).append(file_)
1216 fncache.setdefault(rev, []).append(file_)
1217 wanted.add(rev)
1217 wanted.add(rev)
1218 if copied:
1218 if copied:
1219 copies.append(copied)
1219 copies.append(copied)
1220 if slowpath:
1220 if slowpath:
1221 # We have to read the changelog to match filenames against
1221 # We have to read the changelog to match filenames against
1222 # changed files
1222 # changed files
1223
1223
1224 if follow:
1224 if follow:
1225 raise util.Abort(_('can only follow copies/renames for explicit '
1225 raise util.Abort(_('can only follow copies/renames for explicit '
1226 'filenames'))
1226 'filenames'))
1227
1227
1228 # The slow path checks files modified in every changeset.
1228 # The slow path checks files modified in every changeset.
1229 for i in sorted(revs):
1229 for i in sorted(revs):
1230 ctx = change(i)
1230 ctx = change(i)
1231 matches = filter(match, ctx.files())
1231 matches = filter(match, ctx.files())
1232 if matches:
1232 if matches:
1233 fncache[i] = matches
1233 fncache[i] = matches
1234 wanted.add(i)
1234 wanted.add(i)
1235
1235
1236 class followfilter(object):
1236 class followfilter(object):
1237 def __init__(self, onlyfirst=False):
1237 def __init__(self, onlyfirst=False):
1238 self.startrev = nullrev
1238 self.startrev = nullrev
1239 self.roots = set()
1239 self.roots = set()
1240 self.onlyfirst = onlyfirst
1240 self.onlyfirst = onlyfirst
1241
1241
1242 def match(self, rev):
1242 def match(self, rev):
1243 def realparents(rev):
1243 def realparents(rev):
1244 if self.onlyfirst:
1244 if self.onlyfirst:
1245 return repo.changelog.parentrevs(rev)[0:1]
1245 return repo.changelog.parentrevs(rev)[0:1]
1246 else:
1246 else:
1247 return filter(lambda x: x != nullrev,
1247 return filter(lambda x: x != nullrev,
1248 repo.changelog.parentrevs(rev))
1248 repo.changelog.parentrevs(rev))
1249
1249
1250 if self.startrev == nullrev:
1250 if self.startrev == nullrev:
1251 self.startrev = rev
1251 self.startrev = rev
1252 return True
1252 return True
1253
1253
1254 if rev > self.startrev:
1254 if rev > self.startrev:
1255 # forward: all descendants
1255 # forward: all descendants
1256 if not self.roots:
1256 if not self.roots:
1257 self.roots.add(self.startrev)
1257 self.roots.add(self.startrev)
1258 for parent in realparents(rev):
1258 for parent in realparents(rev):
1259 if parent in self.roots:
1259 if parent in self.roots:
1260 self.roots.add(rev)
1260 self.roots.add(rev)
1261 return True
1261 return True
1262 else:
1262 else:
1263 # backwards: all parents
1263 # backwards: all parents
1264 if not self.roots:
1264 if not self.roots:
1265 self.roots.update(realparents(self.startrev))
1265 self.roots.update(realparents(self.startrev))
1266 if rev in self.roots:
1266 if rev in self.roots:
1267 self.roots.remove(rev)
1267 self.roots.remove(rev)
1268 self.roots.update(realparents(rev))
1268 self.roots.update(realparents(rev))
1269 return True
1269 return True
1270
1270
1271 return False
1271 return False
1272
1272
1273 # it might be worthwhile to do this in the iterator if the rev range
1273 # it might be worthwhile to do this in the iterator if the rev range
1274 # is descending and the prune args are all within that range
1274 # is descending and the prune args are all within that range
1275 for rev in opts.get('prune', ()):
1275 for rev in opts.get('prune', ()):
1276 rev = repo.changelog.rev(repo.lookup(rev))
1276 rev = repo.changelog.rev(repo.lookup(rev))
1277 ff = followfilter()
1277 ff = followfilter()
1278 stop = min(revs[0], revs[-1])
1278 stop = min(revs[0], revs[-1])
1279 for x in xrange(rev, stop - 1, -1):
1279 for x in xrange(rev, stop - 1, -1):
1280 if ff.match(x):
1280 if ff.match(x):
1281 wanted.discard(x)
1281 wanted.discard(x)
1282
1282
1283 # Now that wanted is correctly initialized, we can iterate over the
1283 # Now that wanted is correctly initialized, we can iterate over the
1284 # revision range, yielding only revisions in wanted.
1284 # revision range, yielding only revisions in wanted.
1285 def iterate():
1285 def iterate():
1286 if follow and not match.files():
1286 if follow and not match.files():
1287 ff = followfilter(onlyfirst=opts.get('follow_first'))
1287 ff = followfilter(onlyfirst=opts.get('follow_first'))
1288 def want(rev):
1288 def want(rev):
1289 return ff.match(rev) and rev in wanted
1289 return ff.match(rev) and rev in wanted
1290 else:
1290 else:
1291 def want(rev):
1291 def want(rev):
1292 return rev in wanted
1292 return rev in wanted
1293
1293
1294 for i, window in increasing_windows(0, len(revs)):
1294 for i, window in increasing_windows(0, len(revs)):
1295 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1295 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1296 for rev in sorted(nrevs):
1296 for rev in sorted(nrevs):
1297 fns = fncache.get(rev)
1297 fns = fncache.get(rev)
1298 ctx = change(rev)
1298 ctx = change(rev)
1299 if not fns:
1299 if not fns:
1300 def fns_generator():
1300 def fns_generator():
1301 for f in ctx.files():
1301 for f in ctx.files():
1302 if match(f):
1302 if match(f):
1303 yield f
1303 yield f
1304 fns = fns_generator()
1304 fns = fns_generator()
1305 prepare(ctx, fns)
1305 prepare(ctx, fns)
1306 for rev in nrevs:
1306 for rev in nrevs:
1307 yield change(rev)
1307 yield change(rev)
1308 return iterate()
1308 return iterate()
1309
1309
1310 def add(ui, repo, match, dryrun, listsubrepos, prefix):
1310 def add(ui, repo, match, dryrun, listsubrepos, prefix):
1311 join = lambda f: os.path.join(prefix, f)
1311 join = lambda f: os.path.join(prefix, f)
1312 bad = []
1312 bad = []
1313 oldbad = match.bad
1313 oldbad = match.bad
1314 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1314 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1315 names = []
1315 names = []
1316 wctx = repo[None]
1316 wctx = repo[None]
1317 cca = None
1317 cca = None
1318 abort, warn = scmutil.checkportabilityalert(ui)
1318 abort, warn = scmutil.checkportabilityalert(ui)
1319 if abort or warn:
1319 if abort or warn:
1320 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1320 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1321 for f in repo.walk(match):
1321 for f in repo.walk(match):
1322 exact = match.exact(f)
1322 exact = match.exact(f)
1323 if exact or f not in repo.dirstate:
1323 if exact or f not in repo.dirstate:
1324 if cca:
1324 if cca:
1325 cca(f)
1325 cca(f)
1326 names.append(f)
1326 names.append(f)
1327 if ui.verbose or not exact:
1327 if ui.verbose or not exact:
1328 ui.status(_('adding %s\n') % match.rel(join(f)))
1328 ui.status(_('adding %s\n') % match.rel(join(f)))
1329
1329
1330 if listsubrepos:
1330 if listsubrepos:
1331 for subpath in wctx.substate:
1331 for subpath in wctx.substate:
1332 sub = wctx.sub(subpath)
1332 sub = wctx.sub(subpath)
1333 try:
1333 try:
1334 submatch = matchmod.narrowmatcher(subpath, match)
1334 submatch = matchmod.narrowmatcher(subpath, match)
1335 bad.extend(sub.add(ui, submatch, dryrun, prefix))
1335 bad.extend(sub.add(ui, submatch, dryrun, prefix))
1336 except error.LookupError:
1336 except error.LookupError:
1337 ui.status(_("skipping missing subrepository: %s\n")
1337 ui.status(_("skipping missing subrepository: %s\n")
1338 % join(subpath))
1338 % join(subpath))
1339
1339
1340 if not dryrun:
1340 if not dryrun:
1341 rejected = wctx.add(names, prefix)
1341 rejected = wctx.add(names, prefix)
1342 bad.extend(f for f in rejected if f in match.files())
1342 bad.extend(f for f in rejected if f in match.files())
1343 return bad
1343 return bad
1344
1344
1345 def commit(ui, repo, commitfunc, pats, opts):
1345 def commit(ui, repo, commitfunc, pats, opts):
1346 '''commit the specified files or all outstanding changes'''
1346 '''commit the specified files or all outstanding changes'''
1347 date = opts.get('date')
1347 date = opts.get('date')
1348 if date:
1348 if date:
1349 opts['date'] = util.parsedate(date)
1349 opts['date'] = util.parsedate(date)
1350 message = logmessage(opts)
1350 message = logmessage(opts)
1351
1351
1352 # extract addremove carefully -- this function can be called from a command
1352 # extract addremove carefully -- this function can be called from a command
1353 # that doesn't support addremove
1353 # that doesn't support addremove
1354 if opts.get('addremove'):
1354 if opts.get('addremove'):
1355 addremove(repo, pats, opts)
1355 addremove(repo, pats, opts)
1356
1356
1357 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1357 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1358
1358
1359 def commiteditor(repo, ctx, subs):
1359 def commiteditor(repo, ctx, subs):
1360 if ctx.description():
1360 if ctx.description():
1361 return ctx.description()
1361 return ctx.description()
1362 return commitforceeditor(repo, ctx, subs)
1362 return commitforceeditor(repo, ctx, subs)
1363
1363
1364 def commitforceeditor(repo, ctx, subs):
1364 def commitforceeditor(repo, ctx, subs):
1365 edittext = []
1365 edittext = []
1366 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1366 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1367 if ctx.description():
1367 if ctx.description():
1368 edittext.append(ctx.description())
1368 edittext.append(ctx.description())
1369 edittext.append("")
1369 edittext.append("")
1370 edittext.append("") # Empty line between message and comments.
1370 edittext.append("") # Empty line between message and comments.
1371 edittext.append(_("HG: Enter commit message."
1371 edittext.append(_("HG: Enter commit message."
1372 " Lines beginning with 'HG:' are removed."))
1372 " Lines beginning with 'HG:' are removed."))
1373 edittext.append(_("HG: Leave message empty to abort commit."))
1373 edittext.append(_("HG: Leave message empty to abort commit."))
1374 edittext.append("HG: --")
1374 edittext.append("HG: --")
1375 edittext.append(_("HG: user: %s") % ctx.user())
1375 edittext.append(_("HG: user: %s") % ctx.user())
1376 if ctx.p2():
1376 if ctx.p2():
1377 edittext.append(_("HG: branch merge"))
1377 edittext.append(_("HG: branch merge"))
1378 if ctx.branch():
1378 if ctx.branch():
1379 edittext.append(_("HG: branch '%s'") % ctx.branch())
1379 edittext.append(_("HG: branch '%s'") % ctx.branch())
1380 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1380 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1381 edittext.extend([_("HG: added %s") % f for f in added])
1381 edittext.extend([_("HG: added %s") % f for f in added])
1382 edittext.extend([_("HG: changed %s") % f for f in modified])
1382 edittext.extend([_("HG: changed %s") % f for f in modified])
1383 edittext.extend([_("HG: removed %s") % f for f in removed])
1383 edittext.extend([_("HG: removed %s") % f for f in removed])
1384 if not added and not modified and not removed:
1384 if not added and not modified and not removed:
1385 edittext.append(_("HG: no files changed"))
1385 edittext.append(_("HG: no files changed"))
1386 edittext.append("")
1386 edittext.append("")
1387 # run editor in the repository root
1387 # run editor in the repository root
1388 olddir = os.getcwd()
1388 olddir = os.getcwd()
1389 os.chdir(repo.root)
1389 os.chdir(repo.root)
1390 text = repo.ui.edit("\n".join(edittext), ctx.user())
1390 text = repo.ui.edit("\n".join(edittext), ctx.user())
1391 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1391 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1392 os.chdir(olddir)
1392 os.chdir(olddir)
1393
1393
1394 if not text.strip():
1394 if not text.strip():
1395 raise util.Abort(_("empty commit message"))
1395 raise util.Abort(_("empty commit message"))
1396
1396
1397 return text
1397 return text
@@ -1,4979 +1,4979 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, url, encoding, templatekw, discovery
13 import patch, help, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 import merge as mergemod
15 import merge as mergemod
16 import minirst, revset, templatefilters
16 import minirst, revset, templatefilters
17 import dagparser, context, simplemerge
17 import dagparser, context, simplemerge
18 import random, setdiscovery, treediscovery, dagutil
18 import random, setdiscovery, treediscovery, dagutil
19
19
20 # Commands start here, listed alphabetically
20 # Commands start here, listed alphabetically
21
21
22 def add(ui, repo, *pats, **opts):
22 def add(ui, repo, *pats, **opts):
23 """add the specified files on the next commit
23 """add the specified files on the next commit
24
24
25 Schedule files to be version controlled and added to the
25 Schedule files to be version controlled and added to the
26 repository.
26 repository.
27
27
28 The files will be added to the repository at the next commit. To
28 The files will be added to the repository at the next commit. To
29 undo an add before that, see :hg:`forget`.
29 undo an add before that, see :hg:`forget`.
30
30
31 If no names are given, add all files to the repository.
31 If no names are given, add all files to the repository.
32
32
33 .. container:: verbose
33 .. container:: verbose
34
34
35 An example showing how new (unknown) files are added
35 An example showing how new (unknown) files are added
36 automatically by :hg:`add`::
36 automatically by :hg:`add`::
37
37
38 $ ls
38 $ ls
39 foo.c
39 foo.c
40 $ hg status
40 $ hg status
41 ? foo.c
41 ? foo.c
42 $ hg add
42 $ hg add
43 adding foo.c
43 adding foo.c
44 $ hg status
44 $ hg status
45 A foo.c
45 A foo.c
46
46
47 Returns 0 if all files are successfully added.
47 Returns 0 if all files are successfully added.
48 """
48 """
49
49
50 m = cmdutil.match(repo, pats, opts)
50 m = cmdutil.match(repo, pats, opts)
51 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
52 opts.get('subrepos'), prefix="")
52 opts.get('subrepos'), prefix="")
53 return rejected and 1 or 0
53 return rejected and 1 or 0
54
54
55 def addremove(ui, repo, *pats, **opts):
55 def addremove(ui, repo, *pats, **opts):
56 """add all new files, delete all missing files
56 """add all new files, delete all missing files
57
57
58 Add all new files and remove all missing files from the
58 Add all new files and remove all missing files from the
59 repository.
59 repository.
60
60
61 New files are ignored if they match any of the patterns in
61 New files are ignored if they match any of the patterns in
62 ``.hgignore``. As with add, these changes take effect at the next
62 ``.hgignore``. As with add, these changes take effect at the next
63 commit.
63 commit.
64
64
65 Use the -s/--similarity option to detect renamed files. With a
65 Use the -s/--similarity option to detect renamed files. With a
66 parameter greater than 0, this compares every removed file with
66 parameter greater than 0, this compares every removed file with
67 every added file and records those similar enough as renames. This
67 every added file and records those similar enough as renames. This
68 option takes a percentage between 0 (disabled) and 100 (files must
68 option takes a percentage between 0 (disabled) and 100 (files must
69 be identical) as its parameter. Detecting renamed files this way
69 be identical) as its parameter. Detecting renamed files this way
70 can be expensive. After using this option, :hg:`status -C` can be
70 can be expensive. After using this option, :hg:`status -C` can be
71 used to check which files were identified as moved or renamed.
71 used to check which files were identified as moved or renamed.
72
72
73 Returns 0 if all files are successfully added.
73 Returns 0 if all files are successfully added.
74 """
74 """
75 try:
75 try:
76 sim = float(opts.get('similarity') or 100)
76 sim = float(opts.get('similarity') or 100)
77 except ValueError:
77 except ValueError:
78 raise util.Abort(_('similarity must be a number'))
78 raise util.Abort(_('similarity must be a number'))
79 if sim < 0 or sim > 100:
79 if sim < 0 or sim > 100:
80 raise util.Abort(_('similarity must be between 0 and 100'))
80 raise util.Abort(_('similarity must be between 0 and 100'))
81 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
82
82
83 def annotate(ui, repo, *pats, **opts):
83 def annotate(ui, repo, *pats, **opts):
84 """show changeset information by line for each file
84 """show changeset information by line for each file
85
85
86 List changes in files, showing the revision id responsible for
86 List changes in files, showing the revision id responsible for
87 each line
87 each line
88
88
89 This command is useful for discovering when a change was made and
89 This command is useful for discovering when a change was made and
90 by whom.
90 by whom.
91
91
92 Without the -a/--text option, annotate will avoid processing files
92 Without the -a/--text option, annotate will avoid processing files
93 it detects as binary. With -a, annotate will annotate the file
93 it detects as binary. With -a, annotate will annotate the file
94 anyway, although the results will probably be neither useful
94 anyway, although the results will probably be neither useful
95 nor desirable.
95 nor desirable.
96
96
97 Returns 0 on success.
97 Returns 0 on success.
98 """
98 """
99 if opts.get('follow'):
99 if opts.get('follow'):
100 # --follow is deprecated and now just an alias for -f/--file
100 # --follow is deprecated and now just an alias for -f/--file
101 # to mimic the behavior of Mercurial before version 1.5
101 # to mimic the behavior of Mercurial before version 1.5
102 opts['file'] = True
102 opts['file'] = True
103
103
104 datefunc = ui.quiet and util.shortdate or util.datestr
104 datefunc = ui.quiet and util.shortdate or util.datestr
105 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
106
106
107 if not pats:
107 if not pats:
108 raise util.Abort(_('at least one filename or pattern is required'))
108 raise util.Abort(_('at least one filename or pattern is required'))
109
109
110 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
111 ('number', lambda x: str(x[0].rev())),
111 ('number', lambda x: str(x[0].rev())),
112 ('changeset', lambda x: short(x[0].node())),
112 ('changeset', lambda x: short(x[0].node())),
113 ('date', getdate),
113 ('date', getdate),
114 ('file', lambda x: x[0].path()),
114 ('file', lambda x: x[0].path()),
115 ]
115 ]
116
116
117 if (not opts.get('user') and not opts.get('changeset')
117 if (not opts.get('user') and not opts.get('changeset')
118 and not opts.get('date') and not opts.get('file')):
118 and not opts.get('date') and not opts.get('file')):
119 opts['number'] = True
119 opts['number'] = True
120
120
121 linenumber = opts.get('line_number') is not None
121 linenumber = opts.get('line_number') is not None
122 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
123 raise util.Abort(_('at least one of -n/-c is required for -l'))
123 raise util.Abort(_('at least one of -n/-c is required for -l'))
124
124
125 funcmap = [func for op, func in opmap if opts.get(op)]
125 funcmap = [func for op, func in opmap if opts.get(op)]
126 if linenumber:
126 if linenumber:
127 lastfunc = funcmap[-1]
127 lastfunc = funcmap[-1]
128 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
129
129
130 def bad(x, y):
130 def bad(x, y):
131 raise util.Abort("%s: %s" % (x, y))
131 raise util.Abort("%s: %s" % (x, y))
132
132
133 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 ctx = cmdutil.revsingle(repo, opts.get('rev'))
134 m = cmdutil.match(repo, pats, opts)
134 m = cmdutil.match(repo, pats, opts)
135 m.bad = bad
135 m.bad = bad
136 follow = not opts.get('no_follow')
136 follow = not opts.get('no_follow')
137 for abs in ctx.walk(m):
137 for abs in ctx.walk(m):
138 fctx = ctx[abs]
138 fctx = ctx[abs]
139 if not opts.get('text') and util.binary(fctx.data()):
139 if not opts.get('text') and util.binary(fctx.data()):
140 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
141 continue
141 continue
142
142
143 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 lines = fctx.annotate(follow=follow, linenumber=linenumber)
144 pieces = []
144 pieces = []
145
145
146 for f in funcmap:
146 for f in funcmap:
147 l = [f(n) for n, dummy in lines]
147 l = [f(n) for n, dummy in lines]
148 if l:
148 if l:
149 sized = [(x, encoding.colwidth(x)) for x in l]
149 sized = [(x, encoding.colwidth(x)) for x in l]
150 ml = max([w for x, w in sized])
150 ml = max([w for x, w in sized])
151 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
152
152
153 if pieces:
153 if pieces:
154 for p, l in zip(zip(*pieces), lines):
154 for p, l in zip(zip(*pieces), lines):
155 ui.write("%s: %s" % (" ".join(p), l[1]))
155 ui.write("%s: %s" % (" ".join(p), l[1]))
156
156
157 def archive(ui, repo, dest, **opts):
157 def archive(ui, repo, dest, **opts):
158 '''create an unversioned archive of a repository revision
158 '''create an unversioned archive of a repository revision
159
159
160 By default, the revision used is the parent of the working
160 By default, the revision used is the parent of the working
161 directory; use -r/--rev to specify a different revision.
161 directory; use -r/--rev to specify a different revision.
162
162
163 The archive type is automatically detected based on file
163 The archive type is automatically detected based on file
164 extension (or override using -t/--type).
164 extension (or override using -t/--type).
165
165
166 Valid types are:
166 Valid types are:
167
167
168 :``files``: a directory full of files (default)
168 :``files``: a directory full of files (default)
169 :``tar``: tar archive, uncompressed
169 :``tar``: tar archive, uncompressed
170 :``tbz2``: tar archive, compressed using bzip2
170 :``tbz2``: tar archive, compressed using bzip2
171 :``tgz``: tar archive, compressed using gzip
171 :``tgz``: tar archive, compressed using gzip
172 :``uzip``: zip archive, uncompressed
172 :``uzip``: zip archive, uncompressed
173 :``zip``: zip archive, compressed using deflate
173 :``zip``: zip archive, compressed using deflate
174
174
175 The exact name of the destination archive or directory is given
175 The exact name of the destination archive or directory is given
176 using a format string; see :hg:`help export` for details.
176 using a format string; see :hg:`help export` for details.
177
177
178 Each member added to an archive file has a directory prefix
178 Each member added to an archive file has a directory prefix
179 prepended. Use -p/--prefix to specify a format string for the
179 prepended. Use -p/--prefix to specify a format string for the
180 prefix. The default is the basename of the archive, with suffixes
180 prefix. The default is the basename of the archive, with suffixes
181 removed.
181 removed.
182
182
183 Returns 0 on success.
183 Returns 0 on success.
184 '''
184 '''
185
185
186 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 ctx = cmdutil.revsingle(repo, opts.get('rev'))
187 if not ctx:
187 if not ctx:
188 raise util.Abort(_('no working directory: please specify a revision'))
188 raise util.Abort(_('no working directory: please specify a revision'))
189 node = ctx.node()
189 node = ctx.node()
190 dest = cmdutil.make_filename(repo, dest, node)
190 dest = cmdutil.make_filename(repo, dest, node)
191 if os.path.realpath(dest) == repo.root:
191 if os.path.realpath(dest) == repo.root:
192 raise util.Abort(_('repository root cannot be destination'))
192 raise util.Abort(_('repository root cannot be destination'))
193
193
194 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 kind = opts.get('type') or archival.guesskind(dest) or 'files'
195 prefix = opts.get('prefix')
195 prefix = opts.get('prefix')
196
196
197 if dest == '-':
197 if dest == '-':
198 if kind == 'files':
198 if kind == 'files':
199 raise util.Abort(_('cannot archive plain files to stdout'))
199 raise util.Abort(_('cannot archive plain files to stdout'))
200 dest = sys.stdout
200 dest = sys.stdout
201 if not prefix:
201 if not prefix:
202 prefix = os.path.basename(repo.root) + '-%h'
202 prefix = os.path.basename(repo.root) + '-%h'
203
203
204 prefix = cmdutil.make_filename(repo, prefix, node)
204 prefix = cmdutil.make_filename(repo, prefix, node)
205 matchfn = cmdutil.match(repo, [], opts)
205 matchfn = cmdutil.match(repo, [], opts)
206 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
207 matchfn, prefix, subrepos=opts.get('subrepos'))
207 matchfn, prefix, subrepos=opts.get('subrepos'))
208
208
209 def backout(ui, repo, node=None, rev=None, **opts):
209 def backout(ui, repo, node=None, rev=None, **opts):
210 '''reverse effect of earlier changeset
210 '''reverse effect of earlier changeset
211
211
212 Prepare a new changeset with the effect of REV undone in the
212 Prepare a new changeset with the effect of REV undone in the
213 current working directory.
213 current working directory.
214
214
215 If REV is the parent of the working directory, then this new changeset
215 If REV is the parent of the working directory, then this new changeset
216 is committed automatically. Otherwise, hg needs to merge the
216 is committed automatically. Otherwise, hg needs to merge the
217 changes and the merged result is left uncommitted.
217 changes and the merged result is left uncommitted.
218
218
219 By default, the pending changeset will have one parent,
219 By default, the pending changeset will have one parent,
220 maintaining a linear history. With --merge, the pending changeset
220 maintaining a linear history. With --merge, the pending changeset
221 will instead have two parents: the old parent of the working
221 will instead have two parents: the old parent of the working
222 directory and a new child of REV that simply undoes REV.
222 directory and a new child of REV that simply undoes REV.
223
223
224 Before version 1.7, the behavior without --merge was equivalent to
224 Before version 1.7, the behavior without --merge was equivalent to
225 specifying --merge followed by :hg:`update --clean .` to cancel
225 specifying --merge followed by :hg:`update --clean .` to cancel
226 the merge and leave the child of REV as a head to be merged
226 the merge and leave the child of REV as a head to be merged
227 separately.
227 separately.
228
228
229 See :hg:`help dates` for a list of formats valid for -d/--date.
229 See :hg:`help dates` for a list of formats valid for -d/--date.
230
230
231 Returns 0 on success.
231 Returns 0 on success.
232 '''
232 '''
233 if rev and node:
233 if rev and node:
234 raise util.Abort(_("please specify just one revision"))
234 raise util.Abort(_("please specify just one revision"))
235
235
236 if not rev:
236 if not rev:
237 rev = node
237 rev = node
238
238
239 if not rev:
239 if not rev:
240 raise util.Abort(_("please specify a revision to backout"))
240 raise util.Abort(_("please specify a revision to backout"))
241
241
242 date = opts.get('date')
242 date = opts.get('date')
243 if date:
243 if date:
244 opts['date'] = util.parsedate(date)
244 opts['date'] = util.parsedate(date)
245
245
246 cmdutil.bail_if_changed(repo)
246 cmdutil.bail_if_changed(repo)
247 node = cmdutil.revsingle(repo, rev).node()
247 node = cmdutil.revsingle(repo, rev).node()
248
248
249 op1, op2 = repo.dirstate.parents()
249 op1, op2 = repo.dirstate.parents()
250 a = repo.changelog.ancestor(op1, node)
250 a = repo.changelog.ancestor(op1, node)
251 if a != node:
251 if a != node:
252 raise util.Abort(_('cannot backout change on a different branch'))
252 raise util.Abort(_('cannot backout change on a different branch'))
253
253
254 p1, p2 = repo.changelog.parents(node)
254 p1, p2 = repo.changelog.parents(node)
255 if p1 == nullid:
255 if p1 == nullid:
256 raise util.Abort(_('cannot backout a change with no parents'))
256 raise util.Abort(_('cannot backout a change with no parents'))
257 if p2 != nullid:
257 if p2 != nullid:
258 if not opts.get('parent'):
258 if not opts.get('parent'):
259 raise util.Abort(_('cannot backout a merge changeset without '
259 raise util.Abort(_('cannot backout a merge changeset without '
260 '--parent'))
260 '--parent'))
261 p = repo.lookup(opts['parent'])
261 p = repo.lookup(opts['parent'])
262 if p not in (p1, p2):
262 if p not in (p1, p2):
263 raise util.Abort(_('%s is not a parent of %s') %
263 raise util.Abort(_('%s is not a parent of %s') %
264 (short(p), short(node)))
264 (short(p), short(node)))
265 parent = p
265 parent = p
266 else:
266 else:
267 if opts.get('parent'):
267 if opts.get('parent'):
268 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 raise util.Abort(_('cannot use --parent on non-merge changeset'))
269 parent = p1
269 parent = p1
270
270
271 # the backout should appear on the same branch
271 # the backout should appear on the same branch
272 branch = repo.dirstate.branch()
272 branch = repo.dirstate.branch()
273 hg.clean(repo, node, show_stats=False)
273 hg.clean(repo, node, show_stats=False)
274 repo.dirstate.setbranch(branch)
274 repo.dirstate.setbranch(branch)
275 revert_opts = opts.copy()
275 revert_opts = opts.copy()
276 revert_opts['date'] = None
276 revert_opts['date'] = None
277 revert_opts['all'] = True
277 revert_opts['all'] = True
278 revert_opts['rev'] = hex(parent)
278 revert_opts['rev'] = hex(parent)
279 revert_opts['no_backup'] = None
279 revert_opts['no_backup'] = None
280 revert(ui, repo, **revert_opts)
280 revert(ui, repo, **revert_opts)
281 if not opts.get('merge') and op1 != node:
281 if not opts.get('merge') and op1 != node:
282 try:
282 try:
283 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
284 return hg.update(repo, op1)
284 return hg.update(repo, op1)
285 finally:
285 finally:
286 ui.setconfig('ui', 'forcemerge', '')
286 ui.setconfig('ui', 'forcemerge', '')
287
287
288 commit_opts = opts.copy()
288 commit_opts = opts.copy()
289 commit_opts['addremove'] = False
289 commit_opts['addremove'] = False
290 if not commit_opts['message'] and not commit_opts['logfile']:
290 if not commit_opts['message'] and not commit_opts['logfile']:
291 # we don't translate commit messages
291 # we don't translate commit messages
292 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 commit_opts['message'] = "Backed out changeset %s" % short(node)
293 commit_opts['force_editor'] = True
293 commit_opts['force_editor'] = True
294 commit(ui, repo, **commit_opts)
294 commit(ui, repo, **commit_opts)
295 def nice(node):
295 def nice(node):
296 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 return '%d:%s' % (repo.changelog.rev(node), short(node))
297 ui.status(_('changeset %s backs out changeset %s\n') %
297 ui.status(_('changeset %s backs out changeset %s\n') %
298 (nice(repo.changelog.tip()), nice(node)))
298 (nice(repo.changelog.tip()), nice(node)))
299 if opts.get('merge') and op1 != node:
299 if opts.get('merge') and op1 != node:
300 hg.clean(repo, op1, show_stats=False)
300 hg.clean(repo, op1, show_stats=False)
301 ui.status(_('merging with changeset %s\n')
301 ui.status(_('merging with changeset %s\n')
302 % nice(repo.changelog.tip()))
302 % nice(repo.changelog.tip()))
303 try:
303 try:
304 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
305 return hg.merge(repo, hex(repo.changelog.tip()))
305 return hg.merge(repo, hex(repo.changelog.tip()))
306 finally:
306 finally:
307 ui.setconfig('ui', 'forcemerge', '')
307 ui.setconfig('ui', 'forcemerge', '')
308 return 0
308 return 0
309
309
310 def bisect(ui, repo, rev=None, extra=None, command=None,
310 def bisect(ui, repo, rev=None, extra=None, command=None,
311 reset=None, good=None, bad=None, skip=None, extend=None,
311 reset=None, good=None, bad=None, skip=None, extend=None,
312 noupdate=None):
312 noupdate=None):
313 """subdivision search of changesets
313 """subdivision search of changesets
314
314
315 This command helps to find changesets which introduce problems. To
315 This command helps to find changesets which introduce problems. To
316 use, mark the earliest changeset you know exhibits the problem as
316 use, mark the earliest changeset you know exhibits the problem as
317 bad, then mark the latest changeset which is free from the problem
317 bad, then mark the latest changeset which is free from the problem
318 as good. Bisect will update your working directory to a revision
318 as good. Bisect will update your working directory to a revision
319 for testing (unless the -U/--noupdate option is specified). Once
319 for testing (unless the -U/--noupdate option is specified). Once
320 you have performed tests, mark the working directory as good or
320 you have performed tests, mark the working directory as good or
321 bad, and bisect will either update to another candidate changeset
321 bad, and bisect will either update to another candidate changeset
322 or announce that it has found the bad revision.
322 or announce that it has found the bad revision.
323
323
324 As a shortcut, you can also use the revision argument to mark a
324 As a shortcut, you can also use the revision argument to mark a
325 revision as good or bad without checking it out first.
325 revision as good or bad without checking it out first.
326
326
327 If you supply a command, it will be used for automatic bisection.
327 If you supply a command, it will be used for automatic bisection.
328 Its exit status will be used to mark revisions as good or bad:
328 Its exit status will be used to mark revisions as good or bad:
329 status 0 means good, 125 means to skip the revision, 127
329 status 0 means good, 125 means to skip the revision, 127
330 (command not found) will abort the bisection, and any other
330 (command not found) will abort the bisection, and any other
331 non-zero exit status means the revision is bad.
331 non-zero exit status means the revision is bad.
332
332
333 Returns 0 on success.
333 Returns 0 on success.
334 """
334 """
335 def extendbisectrange(nodes, good):
335 def extendbisectrange(nodes, good):
336 # bisect is incomplete when it ends on a merge node and
336 # bisect is incomplete when it ends on a merge node and
337 # one of the parent was not checked.
337 # one of the parent was not checked.
338 parents = repo[nodes[0]].parents()
338 parents = repo[nodes[0]].parents()
339 if len(parents) > 1:
339 if len(parents) > 1:
340 side = good and state['bad'] or state['good']
340 side = good and state['bad'] or state['good']
341 num = len(set(i.node() for i in parents) & set(side))
341 num = len(set(i.node() for i in parents) & set(side))
342 if num == 1:
342 if num == 1:
343 return parents[0].ancestor(parents[1])
343 return parents[0].ancestor(parents[1])
344 return None
344 return None
345
345
346 def print_result(nodes, good):
346 def print_result(nodes, good):
347 displayer = cmdutil.show_changeset(ui, repo, {})
347 displayer = cmdutil.show_changeset(ui, repo, {})
348 if len(nodes) == 1:
348 if len(nodes) == 1:
349 # narrowed it down to a single revision
349 # narrowed it down to a single revision
350 if good:
350 if good:
351 ui.write(_("The first good revision is:\n"))
351 ui.write(_("The first good revision is:\n"))
352 else:
352 else:
353 ui.write(_("The first bad revision is:\n"))
353 ui.write(_("The first bad revision is:\n"))
354 displayer.show(repo[nodes[0]])
354 displayer.show(repo[nodes[0]])
355 extendnode = extendbisectrange(nodes, good)
355 extendnode = extendbisectrange(nodes, good)
356 if extendnode is not None:
356 if extendnode is not None:
357 ui.write(_('Not all ancestors of this changeset have been'
357 ui.write(_('Not all ancestors of this changeset have been'
358 ' checked.\nUse bisect --extend to continue the '
358 ' checked.\nUse bisect --extend to continue the '
359 'bisection from\nthe common ancestor, %s.\n')
359 'bisection from\nthe common ancestor, %s.\n')
360 % extendnode)
360 % extendnode)
361 else:
361 else:
362 # multiple possible revisions
362 # multiple possible revisions
363 if good:
363 if good:
364 ui.write(_("Due to skipped revisions, the first "
364 ui.write(_("Due to skipped revisions, the first "
365 "good revision could be any of:\n"))
365 "good revision could be any of:\n"))
366 else:
366 else:
367 ui.write(_("Due to skipped revisions, the first "
367 ui.write(_("Due to skipped revisions, the first "
368 "bad revision could be any of:\n"))
368 "bad revision could be any of:\n"))
369 for n in nodes:
369 for n in nodes:
370 displayer.show(repo[n])
370 displayer.show(repo[n])
371 displayer.close()
371 displayer.close()
372
372
373 def check_state(state, interactive=True):
373 def check_state(state, interactive=True):
374 if not state['good'] or not state['bad']:
374 if not state['good'] or not state['bad']:
375 if (good or bad or skip or reset) and interactive:
375 if (good or bad or skip or reset) and interactive:
376 return
376 return
377 if not state['good']:
377 if not state['good']:
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
378 raise util.Abort(_('cannot bisect (no known good revisions)'))
379 else:
379 else:
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
381 return True
381 return True
382
382
383 # backward compatibility
383 # backward compatibility
384 if rev in "good bad reset init".split():
384 if rev in "good bad reset init".split():
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
386 cmd, rev, extra = rev, extra, None
386 cmd, rev, extra = rev, extra, None
387 if cmd == "good":
387 if cmd == "good":
388 good = True
388 good = True
389 elif cmd == "bad":
389 elif cmd == "bad":
390 bad = True
390 bad = True
391 else:
391 else:
392 reset = True
392 reset = True
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
394 raise util.Abort(_('incompatible arguments'))
394 raise util.Abort(_('incompatible arguments'))
395
395
396 if reset:
396 if reset:
397 p = repo.join("bisect.state")
397 p = repo.join("bisect.state")
398 if os.path.exists(p):
398 if os.path.exists(p):
399 os.unlink(p)
399 os.unlink(p)
400 return
400 return
401
401
402 state = hbisect.load_state(repo)
402 state = hbisect.load_state(repo)
403
403
404 if command:
404 if command:
405 changesets = 1
405 changesets = 1
406 try:
406 try:
407 while changesets:
407 while changesets:
408 # update state
408 # update state
409 status = util.system(command)
409 status = util.system(command)
410 if status == 125:
410 if status == 125:
411 transition = "skip"
411 transition = "skip"
412 elif status == 0:
412 elif status == 0:
413 transition = "good"
413 transition = "good"
414 # status < 0 means process was killed
414 # status < 0 means process was killed
415 elif status == 127:
415 elif status == 127:
416 raise util.Abort(_("failed to execute %s") % command)
416 raise util.Abort(_("failed to execute %s") % command)
417 elif status < 0:
417 elif status < 0:
418 raise util.Abort(_("%s killed") % command)
418 raise util.Abort(_("%s killed") % command)
419 else:
419 else:
420 transition = "bad"
420 transition = "bad"
421 ctx = cmdutil.revsingle(repo, rev)
421 ctx = cmdutil.revsingle(repo, rev)
422 rev = None # clear for future iterations
422 rev = None # clear for future iterations
423 state[transition].append(ctx.node())
423 state[transition].append(ctx.node())
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
425 check_state(state, interactive=False)
425 check_state(state, interactive=False)
426 # bisect
426 # bisect
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 # update to next check
428 # update to next check
429 cmdutil.bail_if_changed(repo)
429 cmdutil.bail_if_changed(repo)
430 hg.clean(repo, nodes[0], show_stats=False)
430 hg.clean(repo, nodes[0], show_stats=False)
431 finally:
431 finally:
432 hbisect.save_state(repo, state)
432 hbisect.save_state(repo, state)
433 print_result(nodes, good)
433 print_result(nodes, good)
434 return
434 return
435
435
436 # update state
436 # update state
437
437
438 if rev:
438 if rev:
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
440 else:
440 else:
441 nodes = [repo.lookup('.')]
441 nodes = [repo.lookup('.')]
442
442
443 if good or bad or skip:
443 if good or bad or skip:
444 if good:
444 if good:
445 state['good'] += nodes
445 state['good'] += nodes
446 elif bad:
446 elif bad:
447 state['bad'] += nodes
447 state['bad'] += nodes
448 elif skip:
448 elif skip:
449 state['skip'] += nodes
449 state['skip'] += nodes
450 hbisect.save_state(repo, state)
450 hbisect.save_state(repo, state)
451
451
452 if not check_state(state):
452 if not check_state(state):
453 return
453 return
454
454
455 # actually bisect
455 # actually bisect
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
457 if extend:
457 if extend:
458 if not changesets:
458 if not changesets:
459 extendnode = extendbisectrange(nodes, good)
459 extendnode = extendbisectrange(nodes, good)
460 if extendnode is not None:
460 if extendnode is not None:
461 ui.write(_("Extending search to changeset %d:%s\n"
461 ui.write(_("Extending search to changeset %d:%s\n"
462 % (extendnode.rev(), extendnode)))
462 % (extendnode.rev(), extendnode)))
463 if noupdate:
463 if noupdate:
464 return
464 return
465 cmdutil.bail_if_changed(repo)
465 cmdutil.bail_if_changed(repo)
466 return hg.clean(repo, extendnode.node())
466 return hg.clean(repo, extendnode.node())
467 raise util.Abort(_("nothing to extend"))
467 raise util.Abort(_("nothing to extend"))
468
468
469 if changesets == 0:
469 if changesets == 0:
470 print_result(nodes, good)
470 print_result(nodes, good)
471 else:
471 else:
472 assert len(nodes) == 1 # only a single node can be tested next
472 assert len(nodes) == 1 # only a single node can be tested next
473 node = nodes[0]
473 node = nodes[0]
474 # compute the approximate number of remaining tests
474 # compute the approximate number of remaining tests
475 tests, size = 0, 2
475 tests, size = 0, 2
476 while size <= changesets:
476 while size <= changesets:
477 tests, size = tests + 1, size * 2
477 tests, size = tests + 1, size * 2
478 rev = repo.changelog.rev(node)
478 rev = repo.changelog.rev(node)
479 ui.write(_("Testing changeset %d:%s "
479 ui.write(_("Testing changeset %d:%s "
480 "(%d changesets remaining, ~%d tests)\n")
480 "(%d changesets remaining, ~%d tests)\n")
481 % (rev, short(node), changesets, tests))
481 % (rev, short(node), changesets, tests))
482 if not noupdate:
482 if not noupdate:
483 cmdutil.bail_if_changed(repo)
483 cmdutil.bail_if_changed(repo)
484 return hg.clean(repo, node)
484 return hg.clean(repo, node)
485
485
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
487 rename=None, inactive=False):
487 rename=None, inactive=False):
488 '''track a line of development with movable markers
488 '''track a line of development with movable markers
489
489
490 Bookmarks are pointers to certain commits that move when
490 Bookmarks are pointers to certain commits that move when
491 committing. Bookmarks are local. They can be renamed, copied and
491 committing. Bookmarks are local. They can be renamed, copied and
492 deleted. It is possible to use bookmark names in :hg:`merge` and
492 deleted. It is possible to use bookmark names in :hg:`merge` and
493 :hg:`update` to merge and update respectively to a given bookmark.
493 :hg:`update` to merge and update respectively to a given bookmark.
494
494
495 You can use :hg:`bookmark NAME` to set a bookmark on the working
495 You can use :hg:`bookmark NAME` to set a bookmark on the working
496 directory's parent revision with the given name. If you specify
496 directory's parent revision with the given name. If you specify
497 a revision using -r REV (where REV may be an existing bookmark),
497 a revision using -r REV (where REV may be an existing bookmark),
498 the bookmark is assigned to that revision.
498 the bookmark is assigned to that revision.
499
499
500 Bookmarks can be pushed and pulled between repositories (see :hg:`help
500 Bookmarks can be pushed and pulled between repositories (see :hg:`help
501 push` and :hg:`help pull`). This requires both the local and remote
501 push` and :hg:`help pull`). This requires both the local and remote
502 repositories to support bookmarks. For versions prior to 1.8, this means
502 repositories to support bookmarks. For versions prior to 1.8, this means
503 the bookmarks extension must be enabled.
503 the bookmarks extension must be enabled.
504 '''
504 '''
505 hexfn = ui.debugflag and hex or short
505 hexfn = ui.debugflag and hex or short
506 marks = repo._bookmarks
506 marks = repo._bookmarks
507 cur = repo.changectx('.').node()
507 cur = repo.changectx('.').node()
508
508
509 if rename:
509 if rename:
510 if rename not in marks:
510 if rename not in marks:
511 raise util.Abort(_("bookmark '%s' does not exist") % rename)
511 raise util.Abort(_("bookmark '%s' does not exist") % rename)
512 if mark in marks and not force:
512 if mark in marks and not force:
513 raise util.Abort(_("bookmark '%s' already exists "
513 raise util.Abort(_("bookmark '%s' already exists "
514 "(use -f to force)") % mark)
514 "(use -f to force)") % mark)
515 if mark is None:
515 if mark is None:
516 raise util.Abort(_("new bookmark name required"))
516 raise util.Abort(_("new bookmark name required"))
517 marks[mark] = marks[rename]
517 marks[mark] = marks[rename]
518 if repo._bookmarkcurrent == rename and not inactive:
518 if repo._bookmarkcurrent == rename and not inactive:
519 bookmarks.setcurrent(repo, mark)
519 bookmarks.setcurrent(repo, mark)
520 del marks[rename]
520 del marks[rename]
521 bookmarks.write(repo)
521 bookmarks.write(repo)
522 return
522 return
523
523
524 if delete:
524 if delete:
525 if mark is None:
525 if mark is None:
526 raise util.Abort(_("bookmark name required"))
526 raise util.Abort(_("bookmark name required"))
527 if mark not in marks:
527 if mark not in marks:
528 raise util.Abort(_("bookmark '%s' does not exist") % mark)
528 raise util.Abort(_("bookmark '%s' does not exist") % mark)
529 if mark == repo._bookmarkcurrent:
529 if mark == repo._bookmarkcurrent:
530 bookmarks.setcurrent(repo, None)
530 bookmarks.setcurrent(repo, None)
531 del marks[mark]
531 del marks[mark]
532 bookmarks.write(repo)
532 bookmarks.write(repo)
533 return
533 return
534
534
535 if mark is not None:
535 if mark is not None:
536 if "\n" in mark:
536 if "\n" in mark:
537 raise util.Abort(_("bookmark name cannot contain newlines"))
537 raise util.Abort(_("bookmark name cannot contain newlines"))
538 mark = mark.strip()
538 mark = mark.strip()
539 if not mark:
539 if not mark:
540 raise util.Abort(_("bookmark names cannot consist entirely of "
540 raise util.Abort(_("bookmark names cannot consist entirely of "
541 "whitespace"))
541 "whitespace"))
542 if inactive and mark == repo._bookmarkcurrent:
542 if inactive and mark == repo._bookmarkcurrent:
543 bookmarks.setcurrent(repo, None)
543 bookmarks.setcurrent(repo, None)
544 return
544 return
545 if mark in marks and not force:
545 if mark in marks and not force:
546 raise util.Abort(_("bookmark '%s' already exists "
546 raise util.Abort(_("bookmark '%s' already exists "
547 "(use -f to force)") % mark)
547 "(use -f to force)") % mark)
548 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
548 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
549 and not force):
549 and not force):
550 raise util.Abort(
550 raise util.Abort(
551 _("a bookmark cannot have the name of an existing branch"))
551 _("a bookmark cannot have the name of an existing branch"))
552 if rev:
552 if rev:
553 marks[mark] = repo.lookup(rev)
553 marks[mark] = repo.lookup(rev)
554 else:
554 else:
555 marks[mark] = repo.changectx('.').node()
555 marks[mark] = repo.changectx('.').node()
556 if not inactive and repo.changectx('.').node() == marks[mark]:
556 if not inactive and repo.changectx('.').node() == marks[mark]:
557 bookmarks.setcurrent(repo, mark)
557 bookmarks.setcurrent(repo, mark)
558 bookmarks.write(repo)
558 bookmarks.write(repo)
559 return
559 return
560
560
561 if mark is None:
561 if mark is None:
562 if rev:
562 if rev:
563 raise util.Abort(_("bookmark name required"))
563 raise util.Abort(_("bookmark name required"))
564 if len(marks) == 0:
564 if len(marks) == 0:
565 ui.status(_("no bookmarks set\n"))
565 ui.status(_("no bookmarks set\n"))
566 else:
566 else:
567 for bmark, n in sorted(marks.iteritems()):
567 for bmark, n in sorted(marks.iteritems()):
568 current = repo._bookmarkcurrent
568 current = repo._bookmarkcurrent
569 if bmark == current and n == cur:
569 if bmark == current and n == cur:
570 prefix, label = '*', 'bookmarks.current'
570 prefix, label = '*', 'bookmarks.current'
571 else:
571 else:
572 prefix, label = ' ', ''
572 prefix, label = ' ', ''
573
573
574 if ui.quiet:
574 if ui.quiet:
575 ui.write("%s\n" % bmark, label=label)
575 ui.write("%s\n" % bmark, label=label)
576 else:
576 else:
577 ui.write(" %s %-25s %d:%s\n" % (
577 ui.write(" %s %-25s %d:%s\n" % (
578 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
578 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
579 label=label)
579 label=label)
580 return
580 return
581
581
582 def branch(ui, repo, label=None, **opts):
582 def branch(ui, repo, label=None, **opts):
583 """set or show the current branch name
583 """set or show the current branch name
584
584
585 With no argument, show the current branch name. With one argument,
585 With no argument, show the current branch name. With one argument,
586 set the working directory branch name (the branch will not exist
586 set the working directory branch name (the branch will not exist
587 in the repository until the next commit). Standard practice
587 in the repository until the next commit). Standard practice
588 recommends that primary development take place on the 'default'
588 recommends that primary development take place on the 'default'
589 branch.
589 branch.
590
590
591 Unless -f/--force is specified, branch will not let you set a
591 Unless -f/--force is specified, branch will not let you set a
592 branch name that already exists, even if it's inactive.
592 branch name that already exists, even if it's inactive.
593
593
594 Use -C/--clean to reset the working directory branch to that of
594 Use -C/--clean to reset the working directory branch to that of
595 the parent of the working directory, negating a previous branch
595 the parent of the working directory, negating a previous branch
596 change.
596 change.
597
597
598 Use the command :hg:`update` to switch to an existing branch. Use
598 Use the command :hg:`update` to switch to an existing branch. Use
599 :hg:`commit --close-branch` to mark this branch as closed.
599 :hg:`commit --close-branch` to mark this branch as closed.
600
600
601 Returns 0 on success.
601 Returns 0 on success.
602 """
602 """
603
603
604 if opts.get('clean'):
604 if opts.get('clean'):
605 label = repo[None].p1().branch()
605 label = repo[None].p1().branch()
606 repo.dirstate.setbranch(label)
606 repo.dirstate.setbranch(label)
607 ui.status(_('reset working directory to branch %s\n') % label)
607 ui.status(_('reset working directory to branch %s\n') % label)
608 elif label:
608 elif label:
609 if not opts.get('force') and label in repo.branchtags():
609 if not opts.get('force') and label in repo.branchtags():
610 if label not in [p.branch() for p in repo.parents()]:
610 if label not in [p.branch() for p in repo.parents()]:
611 raise util.Abort(_('a branch of the same name already exists'),
611 raise util.Abort(_('a branch of the same name already exists'),
612 hint=_("use 'hg update' to switch to it"))
612 hint=_("use 'hg update' to switch to it"))
613 repo.dirstate.setbranch(label)
613 repo.dirstate.setbranch(label)
614 ui.status(_('marked working directory as branch %s\n') % label)
614 ui.status(_('marked working directory as branch %s\n') % label)
615 else:
615 else:
616 ui.write("%s\n" % repo.dirstate.branch())
616 ui.write("%s\n" % repo.dirstate.branch())
617
617
618 def branches(ui, repo, active=False, closed=False):
618 def branches(ui, repo, active=False, closed=False):
619 """list repository named branches
619 """list repository named branches
620
620
621 List the repository's named branches, indicating which ones are
621 List the repository's named branches, indicating which ones are
622 inactive. If -c/--closed is specified, also list branches which have
622 inactive. If -c/--closed is specified, also list branches which have
623 been marked closed (see :hg:`commit --close-branch`).
623 been marked closed (see :hg:`commit --close-branch`).
624
624
625 If -a/--active is specified, only show active branches. A branch
625 If -a/--active is specified, only show active branches. A branch
626 is considered active if it contains repository heads.
626 is considered active if it contains repository heads.
627
627
628 Use the command :hg:`update` to switch to an existing branch.
628 Use the command :hg:`update` to switch to an existing branch.
629
629
630 Returns 0.
630 Returns 0.
631 """
631 """
632
632
633 hexfunc = ui.debugflag and hex or short
633 hexfunc = ui.debugflag and hex or short
634 activebranches = [repo[n].branch() for n in repo.heads()]
634 activebranches = [repo[n].branch() for n in repo.heads()]
635 def testactive(tag, node):
635 def testactive(tag, node):
636 realhead = tag in activebranches
636 realhead = tag in activebranches
637 open = node in repo.branchheads(tag, closed=False)
637 open = node in repo.branchheads(tag, closed=False)
638 return realhead and open
638 return realhead and open
639 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
639 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
640 for tag, node in repo.branchtags().items()],
640 for tag, node in repo.branchtags().items()],
641 reverse=True)
641 reverse=True)
642
642
643 for isactive, node, tag in branches:
643 for isactive, node, tag in branches:
644 if (not active) or isactive:
644 if (not active) or isactive:
645 if ui.quiet:
645 if ui.quiet:
646 ui.write("%s\n" % tag)
646 ui.write("%s\n" % tag)
647 else:
647 else:
648 hn = repo.lookup(node)
648 hn = repo.lookup(node)
649 if isactive:
649 if isactive:
650 label = 'branches.active'
650 label = 'branches.active'
651 notice = ''
651 notice = ''
652 elif hn not in repo.branchheads(tag, closed=False):
652 elif hn not in repo.branchheads(tag, closed=False):
653 if not closed:
653 if not closed:
654 continue
654 continue
655 label = 'branches.closed'
655 label = 'branches.closed'
656 notice = _(' (closed)')
656 notice = _(' (closed)')
657 else:
657 else:
658 label = 'branches.inactive'
658 label = 'branches.inactive'
659 notice = _(' (inactive)')
659 notice = _(' (inactive)')
660 if tag == repo.dirstate.branch():
660 if tag == repo.dirstate.branch():
661 label = 'branches.current'
661 label = 'branches.current'
662 rev = str(node).rjust(31 - encoding.colwidth(tag))
662 rev = str(node).rjust(31 - encoding.colwidth(tag))
663 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
663 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
664 tag = ui.label(tag, label)
664 tag = ui.label(tag, label)
665 ui.write("%s %s%s\n" % (tag, rev, notice))
665 ui.write("%s %s%s\n" % (tag, rev, notice))
666
666
667 def bundle(ui, repo, fname, dest=None, **opts):
667 def bundle(ui, repo, fname, dest=None, **opts):
668 """create a changegroup file
668 """create a changegroup file
669
669
670 Generate a compressed changegroup file collecting changesets not
670 Generate a compressed changegroup file collecting changesets not
671 known to be in another repository.
671 known to be in another repository.
672
672
673 If you omit the destination repository, then hg assumes the
673 If you omit the destination repository, then hg assumes the
674 destination will have all the nodes you specify with --base
674 destination will have all the nodes you specify with --base
675 parameters. To create a bundle containing all changesets, use
675 parameters. To create a bundle containing all changesets, use
676 -a/--all (or --base null).
676 -a/--all (or --base null).
677
677
678 You can change compression method with the -t/--type option.
678 You can change compression method with the -t/--type option.
679 The available compression methods are: none, bzip2, and
679 The available compression methods are: none, bzip2, and
680 gzip (by default, bundles are compressed using bzip2).
680 gzip (by default, bundles are compressed using bzip2).
681
681
682 The bundle file can then be transferred using conventional means
682 The bundle file can then be transferred using conventional means
683 and applied to another repository with the unbundle or pull
683 and applied to another repository with the unbundle or pull
684 command. This is useful when direct push and pull are not
684 command. This is useful when direct push and pull are not
685 available or when exporting an entire repository is undesirable.
685 available or when exporting an entire repository is undesirable.
686
686
687 Applying bundles preserves all changeset contents including
687 Applying bundles preserves all changeset contents including
688 permissions, copy/rename information, and revision history.
688 permissions, copy/rename information, and revision history.
689
689
690 Returns 0 on success, 1 if no changes found.
690 Returns 0 on success, 1 if no changes found.
691 """
691 """
692 revs = None
692 revs = None
693 if 'rev' in opts:
693 if 'rev' in opts:
694 revs = cmdutil.revrange(repo, opts['rev'])
694 revs = cmdutil.revrange(repo, opts['rev'])
695
695
696 if opts.get('all'):
696 if opts.get('all'):
697 base = ['null']
697 base = ['null']
698 else:
698 else:
699 base = cmdutil.revrange(repo, opts.get('base'))
699 base = cmdutil.revrange(repo, opts.get('base'))
700 if base:
700 if base:
701 if dest:
701 if dest:
702 raise util.Abort(_("--base is incompatible with specifying "
702 raise util.Abort(_("--base is incompatible with specifying "
703 "a destination"))
703 "a destination"))
704 common = [repo.lookup(rev) for rev in base]
704 common = [repo.lookup(rev) for rev in base]
705 heads = revs and map(repo.lookup, revs) or revs
705 heads = revs and map(repo.lookup, revs) or revs
706 else:
706 else:
707 dest = ui.expandpath(dest or 'default-push', dest or 'default')
707 dest = ui.expandpath(dest or 'default-push', dest or 'default')
708 dest, branches = hg.parseurl(dest, opts.get('branch'))
708 dest, branches = hg.parseurl(dest, opts.get('branch'))
709 other = hg.repository(hg.remoteui(repo, opts), dest)
709 other = hg.repository(hg.remoteui(repo, opts), dest)
710 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
710 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
711 heads = revs and map(repo.lookup, revs) or revs
711 heads = revs and map(repo.lookup, revs) or revs
712 common, outheads = discovery.findcommonoutgoing(repo, other,
712 common, outheads = discovery.findcommonoutgoing(repo, other,
713 onlyheads=heads,
713 onlyheads=heads,
714 force=opts.get('force'))
714 force=opts.get('force'))
715
715
716 cg = repo.getbundle('bundle', common=common, heads=heads)
716 cg = repo.getbundle('bundle', common=common, heads=heads)
717 if not cg:
717 if not cg:
718 ui.status(_("no changes found\n"))
718 ui.status(_("no changes found\n"))
719 return 1
719 return 1
720
720
721 bundletype = opts.get('type', 'bzip2').lower()
721 bundletype = opts.get('type', 'bzip2').lower()
722 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
722 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
723 bundletype = btypes.get(bundletype)
723 bundletype = btypes.get(bundletype)
724 if bundletype not in changegroup.bundletypes:
724 if bundletype not in changegroup.bundletypes:
725 raise util.Abort(_('unknown bundle type specified with --type'))
725 raise util.Abort(_('unknown bundle type specified with --type'))
726
726
727 changegroup.writebundle(cg, fname, bundletype)
727 changegroup.writebundle(cg, fname, bundletype)
728
728
729 def cat(ui, repo, file1, *pats, **opts):
729 def cat(ui, repo, file1, *pats, **opts):
730 """output the current or given revision of files
730 """output the current or given revision of files
731
731
732 Print the specified files as they were at the given revision. If
732 Print the specified files as they were at the given revision. If
733 no revision is given, the parent of the working directory is used,
733 no revision is given, the parent of the working directory is used,
734 or tip if no revision is checked out.
734 or tip if no revision is checked out.
735
735
736 Output may be to a file, in which case the name of the file is
736 Output may be to a file, in which case the name of the file is
737 given using a format string. The formatting rules are the same as
737 given using a format string. The formatting rules are the same as
738 for the export command, with the following additions:
738 for the export command, with the following additions:
739
739
740 :``%s``: basename of file being printed
740 :``%s``: basename of file being printed
741 :``%d``: dirname of file being printed, or '.' if in repository root
741 :``%d``: dirname of file being printed, or '.' if in repository root
742 :``%p``: root-relative path name of file being printed
742 :``%p``: root-relative path name of file being printed
743
743
744 Returns 0 on success.
744 Returns 0 on success.
745 """
745 """
746 ctx = cmdutil.revsingle(repo, opts.get('rev'))
746 ctx = cmdutil.revsingle(repo, opts.get('rev'))
747 err = 1
747 err = 1
748 m = cmdutil.match(repo, (file1,) + pats, opts)
748 m = cmdutil.match(repo, (file1,) + pats, opts)
749 for abs in ctx.walk(m):
749 for abs in ctx.walk(m):
750 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
750 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
751 data = ctx[abs].data()
751 data = ctx[abs].data()
752 if opts.get('decode'):
752 if opts.get('decode'):
753 data = repo.wwritedata(abs, data)
753 data = repo.wwritedata(abs, data)
754 fp.write(data)
754 fp.write(data)
755 fp.close()
755 fp.close()
756 err = 0
756 err = 0
757 return err
757 return err
758
758
759 def clone(ui, source, dest=None, **opts):
759 def clone(ui, source, dest=None, **opts):
760 """make a copy of an existing repository
760 """make a copy of an existing repository
761
761
762 Create a copy of an existing repository in a new directory.
762 Create a copy of an existing repository in a new directory.
763
763
764 If no destination directory name is specified, it defaults to the
764 If no destination directory name is specified, it defaults to the
765 basename of the source.
765 basename of the source.
766
766
767 The location of the source is added to the new repository's
767 The location of the source is added to the new repository's
768 ``.hg/hgrc`` file, as the default to be used for future pulls.
768 ``.hg/hgrc`` file, as the default to be used for future pulls.
769
769
770 See :hg:`help urls` for valid source format details.
770 See :hg:`help urls` for valid source format details.
771
771
772 It is possible to specify an ``ssh://`` URL as the destination, but no
772 It is possible to specify an ``ssh://`` URL as the destination, but no
773 ``.hg/hgrc`` and working directory will be created on the remote side.
773 ``.hg/hgrc`` and working directory will be created on the remote side.
774 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
774 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
775
775
776 A set of changesets (tags, or branch names) to pull may be specified
776 A set of changesets (tags, or branch names) to pull may be specified
777 by listing each changeset (tag, or branch name) with -r/--rev.
777 by listing each changeset (tag, or branch name) with -r/--rev.
778 If -r/--rev is used, the cloned repository will contain only a subset
778 If -r/--rev is used, the cloned repository will contain only a subset
779 of the changesets of the source repository. Only the set of changesets
779 of the changesets of the source repository. Only the set of changesets
780 defined by all -r/--rev options (including all their ancestors)
780 defined by all -r/--rev options (including all their ancestors)
781 will be pulled into the destination repository.
781 will be pulled into the destination repository.
782 No subsequent changesets (including subsequent tags) will be present
782 No subsequent changesets (including subsequent tags) will be present
783 in the destination.
783 in the destination.
784
784
785 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
785 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
786 local source repositories.
786 local source repositories.
787
787
788 For efficiency, hardlinks are used for cloning whenever the source
788 For efficiency, hardlinks are used for cloning whenever the source
789 and destination are on the same filesystem (note this applies only
789 and destination are on the same filesystem (note this applies only
790 to the repository data, not to the working directory). Some
790 to the repository data, not to the working directory). Some
791 filesystems, such as AFS, implement hardlinking incorrectly, but
791 filesystems, such as AFS, implement hardlinking incorrectly, but
792 do not report errors. In these cases, use the --pull option to
792 do not report errors. In these cases, use the --pull option to
793 avoid hardlinking.
793 avoid hardlinking.
794
794
795 In some cases, you can clone repositories and the working directory
795 In some cases, you can clone repositories and the working directory
796 using full hardlinks with ::
796 using full hardlinks with ::
797
797
798 $ cp -al REPO REPOCLONE
798 $ cp -al REPO REPOCLONE
799
799
800 This is the fastest way to clone, but it is not always safe. The
800 This is the fastest way to clone, but it is not always safe. The
801 operation is not atomic (making sure REPO is not modified during
801 operation is not atomic (making sure REPO is not modified during
802 the operation is up to you) and you have to make sure your editor
802 the operation is up to you) and you have to make sure your editor
803 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
803 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
804 this is not compatible with certain extensions that place their
804 this is not compatible with certain extensions that place their
805 metadata under the .hg directory, such as mq.
805 metadata under the .hg directory, such as mq.
806
806
807 Mercurial will update the working directory to the first applicable
807 Mercurial will update the working directory to the first applicable
808 revision from this list:
808 revision from this list:
809
809
810 a) null if -U or the source repository has no changesets
810 a) null if -U or the source repository has no changesets
811 b) if -u . and the source repository is local, the first parent of
811 b) if -u . and the source repository is local, the first parent of
812 the source repository's working directory
812 the source repository's working directory
813 c) the changeset specified with -u (if a branch name, this means the
813 c) the changeset specified with -u (if a branch name, this means the
814 latest head of that branch)
814 latest head of that branch)
815 d) the changeset specified with -r
815 d) the changeset specified with -r
816 e) the tipmost head specified with -b
816 e) the tipmost head specified with -b
817 f) the tipmost head specified with the url#branch source syntax
817 f) the tipmost head specified with the url#branch source syntax
818 g) the tipmost head of the default branch
818 g) the tipmost head of the default branch
819 h) tip
819 h) tip
820
820
821 Returns 0 on success.
821 Returns 0 on success.
822 """
822 """
823 if opts.get('noupdate') and opts.get('updaterev'):
823 if opts.get('noupdate') and opts.get('updaterev'):
824 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
824 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
825
825
826 r = hg.clone(hg.remoteui(ui, opts), source, dest,
826 r = hg.clone(hg.remoteui(ui, opts), source, dest,
827 pull=opts.get('pull'),
827 pull=opts.get('pull'),
828 stream=opts.get('uncompressed'),
828 stream=opts.get('uncompressed'),
829 rev=opts.get('rev'),
829 rev=opts.get('rev'),
830 update=opts.get('updaterev') or not opts.get('noupdate'),
830 update=opts.get('updaterev') or not opts.get('noupdate'),
831 branch=opts.get('branch'))
831 branch=opts.get('branch'))
832
832
833 return r is None
833 return r is None
834
834
835 def commit(ui, repo, *pats, **opts):
835 def commit(ui, repo, *pats, **opts):
836 """commit the specified files or all outstanding changes
836 """commit the specified files or all outstanding changes
837
837
838 Commit changes to the given files into the repository. Unlike a
838 Commit changes to the given files into the repository. Unlike a
839 centralized SCM, this operation is a local operation. See
839 centralized SCM, this operation is a local operation. See
840 :hg:`push` for a way to actively distribute your changes.
840 :hg:`push` for a way to actively distribute your changes.
841
841
842 If a list of files is omitted, all changes reported by :hg:`status`
842 If a list of files is omitted, all changes reported by :hg:`status`
843 will be committed.
843 will be committed.
844
844
845 If you are committing the result of a merge, do not provide any
845 If you are committing the result of a merge, do not provide any
846 filenames or -I/-X filters.
846 filenames or -I/-X filters.
847
847
848 If no commit message is specified, Mercurial starts your
848 If no commit message is specified, Mercurial starts your
849 configured editor where you can enter a message. In case your
849 configured editor where you can enter a message. In case your
850 commit fails, you will find a backup of your message in
850 commit fails, you will find a backup of your message in
851 ``.hg/last-message.txt``.
851 ``.hg/last-message.txt``.
852
852
853 See :hg:`help dates` for a list of formats valid for -d/--date.
853 See :hg:`help dates` for a list of formats valid for -d/--date.
854
854
855 Returns 0 on success, 1 if nothing changed.
855 Returns 0 on success, 1 if nothing changed.
856 """
856 """
857 extra = {}
857 extra = {}
858 if opts.get('close_branch'):
858 if opts.get('close_branch'):
859 if repo['.'].node() not in repo.branchheads():
859 if repo['.'].node() not in repo.branchheads():
860 # The topo heads set is included in the branch heads set of the
860 # The topo heads set is included in the branch heads set of the
861 # current branch, so it's sufficient to test branchheads
861 # current branch, so it's sufficient to test branchheads
862 raise util.Abort(_('can only close branch heads'))
862 raise util.Abort(_('can only close branch heads'))
863 extra['close'] = 1
863 extra['close'] = 1
864 e = cmdutil.commiteditor
864 e = cmdutil.commiteditor
865 if opts.get('force_editor'):
865 if opts.get('force_editor'):
866 e = cmdutil.commitforceeditor
866 e = cmdutil.commitforceeditor
867
867
868 def commitfunc(ui, repo, message, match, opts):
868 def commitfunc(ui, repo, message, match, opts):
869 return repo.commit(message, opts.get('user'), opts.get('date'), match,
869 return repo.commit(message, opts.get('user'), opts.get('date'), match,
870 editor=e, extra=extra)
870 editor=e, extra=extra)
871
871
872 branch = repo[None].branch()
872 branch = repo[None].branch()
873 bheads = repo.branchheads(branch)
873 bheads = repo.branchheads(branch)
874
874
875 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
875 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
876 if not node:
876 if not node:
877 stat = repo.status(match=cmdutil.match(repo, pats, opts))
877 stat = repo.status(match=cmdutil.match(repo, pats, opts))
878 if stat[3]:
878 if stat[3]:
879 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
879 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
880 % len(stat[3]))
880 % len(stat[3]))
881 else:
881 else:
882 ui.status(_("nothing changed\n"))
882 ui.status(_("nothing changed\n"))
883 return 1
883 return 1
884
884
885 ctx = repo[node]
885 ctx = repo[node]
886 parents = ctx.parents()
886 parents = ctx.parents()
887
887
888 if bheads and not [x for x in parents
888 if bheads and not [x for x in parents
889 if x.node() in bheads and x.branch() == branch]:
889 if x.node() in bheads and x.branch() == branch]:
890 ui.status(_('created new head\n'))
890 ui.status(_('created new head\n'))
891 # The message is not printed for initial roots. For the other
891 # The message is not printed for initial roots. For the other
892 # changesets, it is printed in the following situations:
892 # changesets, it is printed in the following situations:
893 #
893 #
894 # Par column: for the 2 parents with ...
894 # Par column: for the 2 parents with ...
895 # N: null or no parent
895 # N: null or no parent
896 # B: parent is on another named branch
896 # B: parent is on another named branch
897 # C: parent is a regular non head changeset
897 # C: parent is a regular non head changeset
898 # H: parent was a branch head of the current branch
898 # H: parent was a branch head of the current branch
899 # Msg column: whether we print "created new head" message
899 # Msg column: whether we print "created new head" message
900 # In the following, it is assumed that there already exists some
900 # In the following, it is assumed that there already exists some
901 # initial branch heads of the current branch, otherwise nothing is
901 # initial branch heads of the current branch, otherwise nothing is
902 # printed anyway.
902 # printed anyway.
903 #
903 #
904 # Par Msg Comment
904 # Par Msg Comment
905 # NN y additional topo root
905 # NN y additional topo root
906 #
906 #
907 # BN y additional branch root
907 # BN y additional branch root
908 # CN y additional topo head
908 # CN y additional topo head
909 # HN n usual case
909 # HN n usual case
910 #
910 #
911 # BB y weird additional branch root
911 # BB y weird additional branch root
912 # CB y branch merge
912 # CB y branch merge
913 # HB n merge with named branch
913 # HB n merge with named branch
914 #
914 #
915 # CC y additional head from merge
915 # CC y additional head from merge
916 # CH n merge with a head
916 # CH n merge with a head
917 #
917 #
918 # HH n head merge: head count decreases
918 # HH n head merge: head count decreases
919
919
920 if not opts.get('close_branch'):
920 if not opts.get('close_branch'):
921 for r in parents:
921 for r in parents:
922 if r.extra().get('close') and r.branch() == branch:
922 if r.extra().get('close') and r.branch() == branch:
923 ui.status(_('reopening closed branch head %d\n') % r)
923 ui.status(_('reopening closed branch head %d\n') % r)
924
924
925 if ui.debugflag:
925 if ui.debugflag:
926 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
926 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
927 elif ui.verbose:
927 elif ui.verbose:
928 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
928 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
929
929
930 def copy(ui, repo, *pats, **opts):
930 def copy(ui, repo, *pats, **opts):
931 """mark files as copied for the next commit
931 """mark files as copied for the next commit
932
932
933 Mark dest as having copies of source files. If dest is a
933 Mark dest as having copies of source files. If dest is a
934 directory, copies are put in that directory. If dest is a file,
934 directory, copies are put in that directory. If dest is a file,
935 the source must be a single file.
935 the source must be a single file.
936
936
937 By default, this command copies the contents of files as they
937 By default, this command copies the contents of files as they
938 exist in the working directory. If invoked with -A/--after, the
938 exist in the working directory. If invoked with -A/--after, the
939 operation is recorded, but no copying is performed.
939 operation is recorded, but no copying is performed.
940
940
941 This command takes effect with the next commit. To undo a copy
941 This command takes effect with the next commit. To undo a copy
942 before that, see :hg:`revert`.
942 before that, see :hg:`revert`.
943
943
944 Returns 0 on success, 1 if errors are encountered.
944 Returns 0 on success, 1 if errors are encountered.
945 """
945 """
946 wlock = repo.wlock(False)
946 wlock = repo.wlock(False)
947 try:
947 try:
948 return cmdutil.copy(ui, repo, pats, opts)
948 return cmdutil.copy(ui, repo, pats, opts)
949 finally:
949 finally:
950 wlock.release()
950 wlock.release()
951
951
952 def debugancestor(ui, repo, *args):
952 def debugancestor(ui, repo, *args):
953 """find the ancestor revision of two revisions in a given index"""
953 """find the ancestor revision of two revisions in a given index"""
954 if len(args) == 3:
954 if len(args) == 3:
955 index, rev1, rev2 = args
955 index, rev1, rev2 = args
956 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
956 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
957 lookup = r.lookup
957 lookup = r.lookup
958 elif len(args) == 2:
958 elif len(args) == 2:
959 if not repo:
959 if not repo:
960 raise util.Abort(_("there is no Mercurial repository here "
960 raise util.Abort(_("there is no Mercurial repository here "
961 "(.hg not found)"))
961 "(.hg not found)"))
962 rev1, rev2 = args
962 rev1, rev2 = args
963 r = repo.changelog
963 r = repo.changelog
964 lookup = repo.lookup
964 lookup = repo.lookup
965 else:
965 else:
966 raise util.Abort(_('either two or three arguments required'))
966 raise util.Abort(_('either two or three arguments required'))
967 a = r.ancestor(lookup(rev1), lookup(rev2))
967 a = r.ancestor(lookup(rev1), lookup(rev2))
968 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
968 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
969
969
970 def debugbuilddag(ui, repo, text,
970 def debugbuilddag(ui, repo, text,
971 mergeable_file=False,
971 mergeable_file=False,
972 overwritten_file=False,
972 overwritten_file=False,
973 new_file=False):
973 new_file=False):
974 """builds a repo with a given dag from scratch in the current empty repo
974 """builds a repo with a given dag from scratch in the current empty repo
975
975
976 Elements:
976 Elements:
977
977
978 - "+n" is a linear run of n nodes based on the current default parent
978 - "+n" is a linear run of n nodes based on the current default parent
979 - "." is a single node based on the current default parent
979 - "." is a single node based on the current default parent
980 - "$" resets the default parent to null (implied at the start);
980 - "$" resets the default parent to null (implied at the start);
981 otherwise the default parent is always the last node created
981 otherwise the default parent is always the last node created
982 - "<p" sets the default parent to the backref p
982 - "<p" sets the default parent to the backref p
983 - "*p" is a fork at parent p, which is a backref
983 - "*p" is a fork at parent p, which is a backref
984 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
984 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
985 - "/p2" is a merge of the preceding node and p2
985 - "/p2" is a merge of the preceding node and p2
986 - ":tag" defines a local tag for the preceding node
986 - ":tag" defines a local tag for the preceding node
987 - "@branch" sets the named branch for subsequent nodes
987 - "@branch" sets the named branch for subsequent nodes
988 - "#...\\n" is a comment up to the end of the line
988 - "#...\\n" is a comment up to the end of the line
989
989
990 Whitespace between the above elements is ignored.
990 Whitespace between the above elements is ignored.
991
991
992 A backref is either
992 A backref is either
993
993
994 - a number n, which references the node curr-n, where curr is the current
994 - a number n, which references the node curr-n, where curr is the current
995 node, or
995 node, or
996 - the name of a local tag you placed earlier using ":tag", or
996 - the name of a local tag you placed earlier using ":tag", or
997 - empty to denote the default parent.
997 - empty to denote the default parent.
998
998
999 All string valued-elements are either strictly alphanumeric, or must
999 All string valued-elements are either strictly alphanumeric, or must
1000 be enclosed in double quotes ("..."), with "\\" as escape character.
1000 be enclosed in double quotes ("..."), with "\\" as escape character.
1001 """
1001 """
1002
1002
1003 cl = repo.changelog
1003 cl = repo.changelog
1004 if len(cl) > 0:
1004 if len(cl) > 0:
1005 raise util.Abort(_('repository is not empty'))
1005 raise util.Abort(_('repository is not empty'))
1006
1006
1007 if mergeable_file:
1007 if mergeable_file:
1008 linesperrev = 2
1008 linesperrev = 2
1009 # determine number of revs in DAG
1009 # determine number of revs in DAG
1010 n = 0
1010 n = 0
1011 for type, data in dagparser.parsedag(text):
1011 for type, data in dagparser.parsedag(text):
1012 if type == 'n':
1012 if type == 'n':
1013 n += 1
1013 n += 1
1014 # make a file with k lines per rev
1014 # make a file with k lines per rev
1015 initialmergedlines = [str(i) for i in xrange(0, n * linesperrev)]
1015 initialmergedlines = [str(i) for i in xrange(0, n * linesperrev)]
1016 initialmergedlines.append("")
1016 initialmergedlines.append("")
1017
1017
1018 tags = []
1018 tags = []
1019
1019
1020 tr = repo.transaction("builddag")
1020 tr = repo.transaction("builddag")
1021 try:
1021 try:
1022
1022
1023 at = -1
1023 at = -1
1024 atbranch = 'default'
1024 atbranch = 'default'
1025 nodeids = []
1025 nodeids = []
1026 for type, data in dagparser.parsedag(text):
1026 for type, data in dagparser.parsedag(text):
1027 if type == 'n':
1027 if type == 'n':
1028 ui.note('node %s\n' % str(data))
1028 ui.note('node %s\n' % str(data))
1029 id, ps = data
1029 id, ps = data
1030
1030
1031 files = []
1031 files = []
1032 fctxs = {}
1032 fctxs = {}
1033
1033
1034 p2 = None
1034 p2 = None
1035 if mergeable_file:
1035 if mergeable_file:
1036 fn = "mf"
1036 fn = "mf"
1037 p1 = repo[ps[0]]
1037 p1 = repo[ps[0]]
1038 if len(ps) > 1:
1038 if len(ps) > 1:
1039 p2 = repo[ps[1]]
1039 p2 = repo[ps[1]]
1040 pa = p1.ancestor(p2)
1040 pa = p1.ancestor(p2)
1041 base, local, other = [x[fn].data() for x in pa, p1, p2]
1041 base, local, other = [x[fn].data() for x in pa, p1, p2]
1042 m3 = simplemerge.Merge3Text(base, local, other)
1042 m3 = simplemerge.Merge3Text(base, local, other)
1043 ml = [l.strip() for l in m3.merge_lines()]
1043 ml = [l.strip() for l in m3.merge_lines()]
1044 ml.append("")
1044 ml.append("")
1045 elif at > 0:
1045 elif at > 0:
1046 ml = p1[fn].data().split("\n")
1046 ml = p1[fn].data().split("\n")
1047 else:
1047 else:
1048 ml = initialmergedlines
1048 ml = initialmergedlines
1049 ml[id * linesperrev] += " r%i" % id
1049 ml[id * linesperrev] += " r%i" % id
1050 mergedtext = "\n".join(ml)
1050 mergedtext = "\n".join(ml)
1051 files.append(fn)
1051 files.append(fn)
1052 fctxs[fn] = context.memfilectx(fn, mergedtext)
1052 fctxs[fn] = context.memfilectx(fn, mergedtext)
1053
1053
1054 if overwritten_file:
1054 if overwritten_file:
1055 fn = "of"
1055 fn = "of"
1056 files.append(fn)
1056 files.append(fn)
1057 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1057 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1058
1058
1059 if new_file:
1059 if new_file:
1060 fn = "nf%i" % id
1060 fn = "nf%i" % id
1061 files.append(fn)
1061 files.append(fn)
1062 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1062 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1063 if len(ps) > 1:
1063 if len(ps) > 1:
1064 if not p2:
1064 if not p2:
1065 p2 = repo[ps[1]]
1065 p2 = repo[ps[1]]
1066 for fn in p2:
1066 for fn in p2:
1067 if fn.startswith("nf"):
1067 if fn.startswith("nf"):
1068 files.append(fn)
1068 files.append(fn)
1069 fctxs[fn] = p2[fn]
1069 fctxs[fn] = p2[fn]
1070
1070
1071 def fctxfn(repo, cx, path):
1071 def fctxfn(repo, cx, path):
1072 return fctxs.get(path)
1072 return fctxs.get(path)
1073
1073
1074 if len(ps) == 0 or ps[0] < 0:
1074 if len(ps) == 0 or ps[0] < 0:
1075 pars = [None, None]
1075 pars = [None, None]
1076 elif len(ps) == 1:
1076 elif len(ps) == 1:
1077 pars = [nodeids[ps[0]], None]
1077 pars = [nodeids[ps[0]], None]
1078 else:
1078 else:
1079 pars = [nodeids[p] for p in ps]
1079 pars = [nodeids[p] for p in ps]
1080 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1080 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1081 date=(id, 0),
1081 date=(id, 0),
1082 user="debugbuilddag",
1082 user="debugbuilddag",
1083 extra={'branch': atbranch})
1083 extra={'branch': atbranch})
1084 nodeid = repo.commitctx(cx)
1084 nodeid = repo.commitctx(cx)
1085 nodeids.append(nodeid)
1085 nodeids.append(nodeid)
1086 at = id
1086 at = id
1087 elif type == 'l':
1087 elif type == 'l':
1088 id, name = data
1088 id, name = data
1089 ui.note('tag %s\n' % name)
1089 ui.note('tag %s\n' % name)
1090 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1090 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1091 elif type == 'a':
1091 elif type == 'a':
1092 ui.note('branch %s\n' % data)
1092 ui.note('branch %s\n' % data)
1093 atbranch = data
1093 atbranch = data
1094 tr.close()
1094 tr.close()
1095 finally:
1095 finally:
1096 tr.release()
1096 tr.release()
1097
1097
1098 if tags:
1098 if tags:
1099 tagsf = repo.opener("localtags", "w")
1099 tagsf = repo.opener("localtags", "w")
1100 try:
1100 try:
1101 tagsf.write("".join(tags))
1101 tagsf.write("".join(tags))
1102 finally:
1102 finally:
1103 tagsf.close()
1103 tagsf.close()
1104
1104
1105 def debugcommands(ui, cmd='', *args):
1105 def debugcommands(ui, cmd='', *args):
1106 """list all available commands and options"""
1106 """list all available commands and options"""
1107 for cmd, vals in sorted(table.iteritems()):
1107 for cmd, vals in sorted(table.iteritems()):
1108 cmd = cmd.split('|')[0].strip('^')
1108 cmd = cmd.split('|')[0].strip('^')
1109 opts = ', '.join([i[1] for i in vals[1]])
1109 opts = ', '.join([i[1] for i in vals[1]])
1110 ui.write('%s: %s\n' % (cmd, opts))
1110 ui.write('%s: %s\n' % (cmd, opts))
1111
1111
1112 def debugcomplete(ui, cmd='', **opts):
1112 def debugcomplete(ui, cmd='', **opts):
1113 """returns the completion list associated with the given command"""
1113 """returns the completion list associated with the given command"""
1114
1114
1115 if opts.get('options'):
1115 if opts.get('options'):
1116 options = []
1116 options = []
1117 otables = [globalopts]
1117 otables = [globalopts]
1118 if cmd:
1118 if cmd:
1119 aliases, entry = cmdutil.findcmd(cmd, table, False)
1119 aliases, entry = cmdutil.findcmd(cmd, table, False)
1120 otables.append(entry[1])
1120 otables.append(entry[1])
1121 for t in otables:
1121 for t in otables:
1122 for o in t:
1122 for o in t:
1123 if "(DEPRECATED)" in o[3]:
1123 if "(DEPRECATED)" in o[3]:
1124 continue
1124 continue
1125 if o[0]:
1125 if o[0]:
1126 options.append('-%s' % o[0])
1126 options.append('-%s' % o[0])
1127 options.append('--%s' % o[1])
1127 options.append('--%s' % o[1])
1128 ui.write("%s\n" % "\n".join(options))
1128 ui.write("%s\n" % "\n".join(options))
1129 return
1129 return
1130
1130
1131 cmdlist = cmdutil.findpossible(cmd, table)
1131 cmdlist = cmdutil.findpossible(cmd, table)
1132 if ui.verbose:
1132 if ui.verbose:
1133 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1133 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1134 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1134 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1135
1135
1136 def debugfsinfo(ui, path = "."):
1136 def debugfsinfo(ui, path = "."):
1137 """show information detected about current filesystem"""
1137 """show information detected about current filesystem"""
1138 util.writefile('.debugfsinfo', '')
1138 util.writefile('.debugfsinfo', '')
1139 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1139 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1140 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1140 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1141 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1141 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1142 and 'yes' or 'no'))
1142 and 'yes' or 'no'))
1143 os.unlink('.debugfsinfo')
1143 os.unlink('.debugfsinfo')
1144
1144
1145 def debugrebuildstate(ui, repo, rev="tip"):
1145 def debugrebuildstate(ui, repo, rev="tip"):
1146 """rebuild the dirstate as it would look like for the given revision"""
1146 """rebuild the dirstate as it would look like for the given revision"""
1147 ctx = cmdutil.revsingle(repo, rev)
1147 ctx = cmdutil.revsingle(repo, rev)
1148 wlock = repo.wlock()
1148 wlock = repo.wlock()
1149 try:
1149 try:
1150 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1150 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1151 finally:
1151 finally:
1152 wlock.release()
1152 wlock.release()
1153
1153
1154 def debugcheckstate(ui, repo):
1154 def debugcheckstate(ui, repo):
1155 """validate the correctness of the current dirstate"""
1155 """validate the correctness of the current dirstate"""
1156 parent1, parent2 = repo.dirstate.parents()
1156 parent1, parent2 = repo.dirstate.parents()
1157 m1 = repo[parent1].manifest()
1157 m1 = repo[parent1].manifest()
1158 m2 = repo[parent2].manifest()
1158 m2 = repo[parent2].manifest()
1159 errors = 0
1159 errors = 0
1160 for f in repo.dirstate:
1160 for f in repo.dirstate:
1161 state = repo.dirstate[f]
1161 state = repo.dirstate[f]
1162 if state in "nr" and f not in m1:
1162 if state in "nr" and f not in m1:
1163 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1163 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1164 errors += 1
1164 errors += 1
1165 if state in "a" and f in m1:
1165 if state in "a" and f in m1:
1166 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1166 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1167 errors += 1
1167 errors += 1
1168 if state in "m" and f not in m1 and f not in m2:
1168 if state in "m" and f not in m1 and f not in m2:
1169 ui.warn(_("%s in state %s, but not in either manifest\n") %
1169 ui.warn(_("%s in state %s, but not in either manifest\n") %
1170 (f, state))
1170 (f, state))
1171 errors += 1
1171 errors += 1
1172 for f in m1:
1172 for f in m1:
1173 state = repo.dirstate[f]
1173 state = repo.dirstate[f]
1174 if state not in "nrm":
1174 if state not in "nrm":
1175 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1175 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1176 errors += 1
1176 errors += 1
1177 if errors:
1177 if errors:
1178 error = _(".hg/dirstate inconsistent with current parent's manifest")
1178 error = _(".hg/dirstate inconsistent with current parent's manifest")
1179 raise util.Abort(error)
1179 raise util.Abort(error)
1180
1180
1181 def showconfig(ui, repo, *values, **opts):
1181 def showconfig(ui, repo, *values, **opts):
1182 """show combined config settings from all hgrc files
1182 """show combined config settings from all hgrc files
1183
1183
1184 With no arguments, print names and values of all config items.
1184 With no arguments, print names and values of all config items.
1185
1185
1186 With one argument of the form section.name, print just the value
1186 With one argument of the form section.name, print just the value
1187 of that config item.
1187 of that config item.
1188
1188
1189 With multiple arguments, print names and values of all config
1189 With multiple arguments, print names and values of all config
1190 items with matching section names.
1190 items with matching section names.
1191
1191
1192 With --debug, the source (filename and line number) is printed
1192 With --debug, the source (filename and line number) is printed
1193 for each config item.
1193 for each config item.
1194
1194
1195 Returns 0 on success.
1195 Returns 0 on success.
1196 """
1196 """
1197
1197
1198 for f in scmutil.rcpath():
1198 for f in scmutil.rcpath():
1199 ui.debug(_('read config from: %s\n') % f)
1199 ui.debug(_('read config from: %s\n') % f)
1200 untrusted = bool(opts.get('untrusted'))
1200 untrusted = bool(opts.get('untrusted'))
1201 if values:
1201 if values:
1202 sections = [v for v in values if '.' not in v]
1202 sections = [v for v in values if '.' not in v]
1203 items = [v for v in values if '.' in v]
1203 items = [v for v in values if '.' in v]
1204 if len(items) > 1 or items and sections:
1204 if len(items) > 1 or items and sections:
1205 raise util.Abort(_('only one config item permitted'))
1205 raise util.Abort(_('only one config item permitted'))
1206 for section, name, value in ui.walkconfig(untrusted=untrusted):
1206 for section, name, value in ui.walkconfig(untrusted=untrusted):
1207 value = str(value).replace('\n', '\\n')
1207 value = str(value).replace('\n', '\\n')
1208 sectname = section + '.' + name
1208 sectname = section + '.' + name
1209 if values:
1209 if values:
1210 for v in values:
1210 for v in values:
1211 if v == section:
1211 if v == section:
1212 ui.debug('%s: ' %
1212 ui.debug('%s: ' %
1213 ui.configsource(section, name, untrusted))
1213 ui.configsource(section, name, untrusted))
1214 ui.write('%s=%s\n' % (sectname, value))
1214 ui.write('%s=%s\n' % (sectname, value))
1215 elif v == sectname:
1215 elif v == sectname:
1216 ui.debug('%s: ' %
1216 ui.debug('%s: ' %
1217 ui.configsource(section, name, untrusted))
1217 ui.configsource(section, name, untrusted))
1218 ui.write(value, '\n')
1218 ui.write(value, '\n')
1219 else:
1219 else:
1220 ui.debug('%s: ' %
1220 ui.debug('%s: ' %
1221 ui.configsource(section, name, untrusted))
1221 ui.configsource(section, name, untrusted))
1222 ui.write('%s=%s\n' % (sectname, value))
1222 ui.write('%s=%s\n' % (sectname, value))
1223
1223
1224 def debugknown(ui, repopath, *ids, **opts):
1224 def debugknown(ui, repopath, *ids, **opts):
1225 """test whether node ids are known to a repo
1225 """test whether node ids are known to a repo
1226
1226
1227 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1227 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1228 indicating unknown/known.
1228 indicating unknown/known.
1229 """
1229 """
1230 repo = hg.repository(ui, repopath)
1230 repo = hg.repository(ui, repopath)
1231 if not repo.capable('known'):
1231 if not repo.capable('known'):
1232 raise util.Abort("known() not supported by target repository")
1232 raise util.Abort("known() not supported by target repository")
1233 flags = repo.known([bin(s) for s in ids])
1233 flags = repo.known([bin(s) for s in ids])
1234 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1234 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1235
1235
1236 def debugbundle(ui, bundlepath, all=None, **opts):
1236 def debugbundle(ui, bundlepath, all=None, **opts):
1237 """lists the contents of a bundle"""
1237 """lists the contents of a bundle"""
1238 f = url.open(ui, bundlepath)
1238 f = url.open(ui, bundlepath)
1239 try:
1239 try:
1240 gen = changegroup.readbundle(f, bundlepath)
1240 gen = changegroup.readbundle(f, bundlepath)
1241 if all:
1241 if all:
1242 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1242 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1243
1243
1244 def showchunks(named):
1244 def showchunks(named):
1245 ui.write("\n%s\n" % named)
1245 ui.write("\n%s\n" % named)
1246 chain = None
1246 chain = None
1247 while 1:
1247 while 1:
1248 chunkdata = gen.deltachunk(chain)
1248 chunkdata = gen.deltachunk(chain)
1249 if not chunkdata:
1249 if not chunkdata:
1250 break
1250 break
1251 node = chunkdata['node']
1251 node = chunkdata['node']
1252 p1 = chunkdata['p1']
1252 p1 = chunkdata['p1']
1253 p2 = chunkdata['p2']
1253 p2 = chunkdata['p2']
1254 cs = chunkdata['cs']
1254 cs = chunkdata['cs']
1255 deltabase = chunkdata['deltabase']
1255 deltabase = chunkdata['deltabase']
1256 delta = chunkdata['delta']
1256 delta = chunkdata['delta']
1257 ui.write("%s %s %s %s %s %s\n" %
1257 ui.write("%s %s %s %s %s %s\n" %
1258 (hex(node), hex(p1), hex(p2),
1258 (hex(node), hex(p1), hex(p2),
1259 hex(cs), hex(deltabase), len(delta)))
1259 hex(cs), hex(deltabase), len(delta)))
1260 chain = node
1260 chain = node
1261
1261
1262 chunkdata = gen.changelogheader()
1262 chunkdata = gen.changelogheader()
1263 showchunks("changelog")
1263 showchunks("changelog")
1264 chunkdata = gen.manifestheader()
1264 chunkdata = gen.manifestheader()
1265 showchunks("manifest")
1265 showchunks("manifest")
1266 while 1:
1266 while 1:
1267 chunkdata = gen.filelogheader()
1267 chunkdata = gen.filelogheader()
1268 if not chunkdata:
1268 if not chunkdata:
1269 break
1269 break
1270 fname = chunkdata['filename']
1270 fname = chunkdata['filename']
1271 showchunks(fname)
1271 showchunks(fname)
1272 else:
1272 else:
1273 chunkdata = gen.changelogheader()
1273 chunkdata = gen.changelogheader()
1274 chain = None
1274 chain = None
1275 while 1:
1275 while 1:
1276 chunkdata = gen.deltachunk(chain)
1276 chunkdata = gen.deltachunk(chain)
1277 if not chunkdata:
1277 if not chunkdata:
1278 break
1278 break
1279 node = chunkdata['node']
1279 node = chunkdata['node']
1280 ui.write("%s\n" % hex(node))
1280 ui.write("%s\n" % hex(node))
1281 chain = node
1281 chain = node
1282 finally:
1282 finally:
1283 f.close()
1283 f.close()
1284
1284
1285 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1285 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1286 """retrieves a bundle from a repo
1286 """retrieves a bundle from a repo
1287
1287
1288 Every ID must be a full-length hex node id string. Saves the bundle to the
1288 Every ID must be a full-length hex node id string. Saves the bundle to the
1289 given file.
1289 given file.
1290 """
1290 """
1291 repo = hg.repository(ui, repopath)
1291 repo = hg.repository(ui, repopath)
1292 if not repo.capable('getbundle'):
1292 if not repo.capable('getbundle'):
1293 raise util.Abort("getbundle() not supported by target repository")
1293 raise util.Abort("getbundle() not supported by target repository")
1294 args = {}
1294 args = {}
1295 if common:
1295 if common:
1296 args['common'] = [bin(s) for s in common]
1296 args['common'] = [bin(s) for s in common]
1297 if head:
1297 if head:
1298 args['heads'] = [bin(s) for s in head]
1298 args['heads'] = [bin(s) for s in head]
1299 bundle = repo.getbundle('debug', **args)
1299 bundle = repo.getbundle('debug', **args)
1300
1300
1301 bundletype = opts.get('type', 'bzip2').lower()
1301 bundletype = opts.get('type', 'bzip2').lower()
1302 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1302 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1303 bundletype = btypes.get(bundletype)
1303 bundletype = btypes.get(bundletype)
1304 if bundletype not in changegroup.bundletypes:
1304 if bundletype not in changegroup.bundletypes:
1305 raise util.Abort(_('unknown bundle type specified with --type'))
1305 raise util.Abort(_('unknown bundle type specified with --type'))
1306 changegroup.writebundle(bundle, bundlepath, bundletype)
1306 changegroup.writebundle(bundle, bundlepath, bundletype)
1307
1307
1308 def debugpushkey(ui, repopath, namespace, *keyinfo):
1308 def debugpushkey(ui, repopath, namespace, *keyinfo):
1309 '''access the pushkey key/value protocol
1309 '''access the pushkey key/value protocol
1310
1310
1311 With two args, list the keys in the given namespace.
1311 With two args, list the keys in the given namespace.
1312
1312
1313 With five args, set a key to new if it currently is set to old.
1313 With five args, set a key to new if it currently is set to old.
1314 Reports success or failure.
1314 Reports success or failure.
1315 '''
1315 '''
1316
1316
1317 target = hg.repository(ui, repopath)
1317 target = hg.repository(ui, repopath)
1318 if keyinfo:
1318 if keyinfo:
1319 key, old, new = keyinfo
1319 key, old, new = keyinfo
1320 r = target.pushkey(namespace, key, old, new)
1320 r = target.pushkey(namespace, key, old, new)
1321 ui.status(str(r) + '\n')
1321 ui.status(str(r) + '\n')
1322 return not r
1322 return not r
1323 else:
1323 else:
1324 for k, v in target.listkeys(namespace).iteritems():
1324 for k, v in target.listkeys(namespace).iteritems():
1325 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1325 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1326 v.encode('string-escape')))
1326 v.encode('string-escape')))
1327
1327
1328 def debugrevspec(ui, repo, expr):
1328 def debugrevspec(ui, repo, expr):
1329 '''parse and apply a revision specification'''
1329 '''parse and apply a revision specification'''
1330 if ui.verbose:
1330 if ui.verbose:
1331 tree = revset.parse(expr)[0]
1331 tree = revset.parse(expr)[0]
1332 ui.note(tree, "\n")
1332 ui.note(tree, "\n")
1333 newtree = revset.findaliases(ui, tree)
1333 newtree = revset.findaliases(ui, tree)
1334 if newtree != tree:
1334 if newtree != tree:
1335 ui.note(newtree, "\n")
1335 ui.note(newtree, "\n")
1336 func = revset.match(ui, expr)
1336 func = revset.match(ui, expr)
1337 for c in func(repo, range(len(repo))):
1337 for c in func(repo, range(len(repo))):
1338 ui.write("%s\n" % c)
1338 ui.write("%s\n" % c)
1339
1339
1340 def debugsetparents(ui, repo, rev1, rev2=None):
1340 def debugsetparents(ui, repo, rev1, rev2=None):
1341 """manually set the parents of the current working directory
1341 """manually set the parents of the current working directory
1342
1342
1343 This is useful for writing repository conversion tools, but should
1343 This is useful for writing repository conversion tools, but should
1344 be used with care.
1344 be used with care.
1345
1345
1346 Returns 0 on success.
1346 Returns 0 on success.
1347 """
1347 """
1348
1348
1349 r1 = cmdutil.revsingle(repo, rev1).node()
1349 r1 = cmdutil.revsingle(repo, rev1).node()
1350 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1350 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1351
1351
1352 wlock = repo.wlock()
1352 wlock = repo.wlock()
1353 try:
1353 try:
1354 repo.dirstate.setparents(r1, r2)
1354 repo.dirstate.setparents(r1, r2)
1355 finally:
1355 finally:
1356 wlock.release()
1356 wlock.release()
1357
1357
1358 def debugstate(ui, repo, nodates=None, datesort=None):
1358 def debugstate(ui, repo, nodates=None, datesort=None):
1359 """show the contents of the current dirstate"""
1359 """show the contents of the current dirstate"""
1360 timestr = ""
1360 timestr = ""
1361 showdate = not nodates
1361 showdate = not nodates
1362 if datesort:
1362 if datesort:
1363 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1363 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1364 else:
1364 else:
1365 keyfunc = None # sort by filename
1365 keyfunc = None # sort by filename
1366 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1366 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1367 if showdate:
1367 if showdate:
1368 if ent[3] == -1:
1368 if ent[3] == -1:
1369 # Pad or slice to locale representation
1369 # Pad or slice to locale representation
1370 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1370 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1371 time.localtime(0)))
1371 time.localtime(0)))
1372 timestr = 'unset'
1372 timestr = 'unset'
1373 timestr = (timestr[:locale_len] +
1373 timestr = (timestr[:locale_len] +
1374 ' ' * (locale_len - len(timestr)))
1374 ' ' * (locale_len - len(timestr)))
1375 else:
1375 else:
1376 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1376 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1377 time.localtime(ent[3]))
1377 time.localtime(ent[3]))
1378 if ent[1] & 020000:
1378 if ent[1] & 020000:
1379 mode = 'lnk'
1379 mode = 'lnk'
1380 else:
1380 else:
1381 mode = '%3o' % (ent[1] & 0777)
1381 mode = '%3o' % (ent[1] & 0777)
1382 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1382 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1383 for f in repo.dirstate.copies():
1383 for f in repo.dirstate.copies():
1384 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1384 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1385
1385
1386 def debugsub(ui, repo, rev=None):
1386 def debugsub(ui, repo, rev=None):
1387 ctx = cmdutil.revsingle(repo, rev, None)
1387 ctx = cmdutil.revsingle(repo, rev, None)
1388 for k, v in sorted(ctx.substate.items()):
1388 for k, v in sorted(ctx.substate.items()):
1389 ui.write('path %s\n' % k)
1389 ui.write('path %s\n' % k)
1390 ui.write(' source %s\n' % v[0])
1390 ui.write(' source %s\n' % v[0])
1391 ui.write(' revision %s\n' % v[1])
1391 ui.write(' revision %s\n' % v[1])
1392
1392
1393 def debugdag(ui, repo, file_=None, *revs, **opts):
1393 def debugdag(ui, repo, file_=None, *revs, **opts):
1394 """format the changelog or an index DAG as a concise textual description
1394 """format the changelog or an index DAG as a concise textual description
1395
1395
1396 If you pass a revlog index, the revlog's DAG is emitted. If you list
1396 If you pass a revlog index, the revlog's DAG is emitted. If you list
1397 revision numbers, they get labelled in the output as rN.
1397 revision numbers, they get labelled in the output as rN.
1398
1398
1399 Otherwise, the changelog DAG of the current repo is emitted.
1399 Otherwise, the changelog DAG of the current repo is emitted.
1400 """
1400 """
1401 spaces = opts.get('spaces')
1401 spaces = opts.get('spaces')
1402 dots = opts.get('dots')
1402 dots = opts.get('dots')
1403 if file_:
1403 if file_:
1404 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1404 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1405 revs = set((int(r) for r in revs))
1405 revs = set((int(r) for r in revs))
1406 def events():
1406 def events():
1407 for r in rlog:
1407 for r in rlog:
1408 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1408 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1409 if r in revs:
1409 if r in revs:
1410 yield 'l', (r, "r%i" % r)
1410 yield 'l', (r, "r%i" % r)
1411 elif repo:
1411 elif repo:
1412 cl = repo.changelog
1412 cl = repo.changelog
1413 tags = opts.get('tags')
1413 tags = opts.get('tags')
1414 branches = opts.get('branches')
1414 branches = opts.get('branches')
1415 if tags:
1415 if tags:
1416 labels = {}
1416 labels = {}
1417 for l, n in repo.tags().items():
1417 for l, n in repo.tags().items():
1418 labels.setdefault(cl.rev(n), []).append(l)
1418 labels.setdefault(cl.rev(n), []).append(l)
1419 def events():
1419 def events():
1420 b = "default"
1420 b = "default"
1421 for r in cl:
1421 for r in cl:
1422 if branches:
1422 if branches:
1423 newb = cl.read(cl.node(r))[5]['branch']
1423 newb = cl.read(cl.node(r))[5]['branch']
1424 if newb != b:
1424 if newb != b:
1425 yield 'a', newb
1425 yield 'a', newb
1426 b = newb
1426 b = newb
1427 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1427 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1428 if tags:
1428 if tags:
1429 ls = labels.get(r)
1429 ls = labels.get(r)
1430 if ls:
1430 if ls:
1431 for l in ls:
1431 for l in ls:
1432 yield 'l', (r, l)
1432 yield 'l', (r, l)
1433 else:
1433 else:
1434 raise util.Abort(_('need repo for changelog dag'))
1434 raise util.Abort(_('need repo for changelog dag'))
1435
1435
1436 for line in dagparser.dagtextlines(events(),
1436 for line in dagparser.dagtextlines(events(),
1437 addspaces=spaces,
1437 addspaces=spaces,
1438 wraplabels=True,
1438 wraplabels=True,
1439 wrapannotations=True,
1439 wrapannotations=True,
1440 wrapnonlinear=dots,
1440 wrapnonlinear=dots,
1441 usedots=dots,
1441 usedots=dots,
1442 maxlinewidth=70):
1442 maxlinewidth=70):
1443 ui.write(line)
1443 ui.write(line)
1444 ui.write("\n")
1444 ui.write("\n")
1445
1445
1446 def debugdata(ui, repo, file_, rev):
1446 def debugdata(ui, repo, file_, rev):
1447 """dump the contents of a data file revision"""
1447 """dump the contents of a data file revision"""
1448 r = None
1448 r = None
1449 if repo:
1449 if repo:
1450 filelog = repo.file(file_)
1450 filelog = repo.file(file_)
1451 if len(filelog):
1451 if len(filelog):
1452 r = filelog
1452 r = filelog
1453 if not r:
1453 if not r:
1454 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1454 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1455 file_[:-2] + ".i")
1455 file_[:-2] + ".i")
1456 try:
1456 try:
1457 ui.write(r.revision(r.lookup(rev)))
1457 ui.write(r.revision(r.lookup(rev)))
1458 except KeyError:
1458 except KeyError:
1459 raise util.Abort(_('invalid revision identifier %s') % rev)
1459 raise util.Abort(_('invalid revision identifier %s') % rev)
1460
1460
1461 def debugdate(ui, date, range=None, **opts):
1461 def debugdate(ui, date, range=None, **opts):
1462 """parse and display a date"""
1462 """parse and display a date"""
1463 if opts["extended"]:
1463 if opts["extended"]:
1464 d = util.parsedate(date, util.extendeddateformats)
1464 d = util.parsedate(date, util.extendeddateformats)
1465 else:
1465 else:
1466 d = util.parsedate(date)
1466 d = util.parsedate(date)
1467 ui.write("internal: %s %s\n" % d)
1467 ui.write("internal: %s %s\n" % d)
1468 ui.write("standard: %s\n" % util.datestr(d))
1468 ui.write("standard: %s\n" % util.datestr(d))
1469 if range:
1469 if range:
1470 m = util.matchdate(range)
1470 m = util.matchdate(range)
1471 ui.write("match: %s\n" % m(d[0]))
1471 ui.write("match: %s\n" % m(d[0]))
1472
1472
1473 def debugignore(ui, repo, *values, **opts):
1473 def debugignore(ui, repo, *values, **opts):
1474 """display the combined ignore pattern"""
1474 """display the combined ignore pattern"""
1475 ignore = repo.dirstate._ignore
1475 ignore = repo.dirstate._ignore
1476 if hasattr(ignore, 'includepat'):
1476 if hasattr(ignore, 'includepat'):
1477 ui.write("%s\n" % ignore.includepat)
1477 ui.write("%s\n" % ignore.includepat)
1478 else:
1478 else:
1479 raise util.Abort(_("no ignore patterns found"))
1479 raise util.Abort(_("no ignore patterns found"))
1480
1480
1481 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1481 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1482 """runs the changeset discovery protocol in isolation"""
1482 """runs the changeset discovery protocol in isolation"""
1483 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1483 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1484 remote = hg.repository(hg.remoteui(repo, opts), remoteurl)
1484 remote = hg.repository(hg.remoteui(repo, opts), remoteurl)
1485 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1485 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1486
1486
1487 # make sure tests are repeatable
1487 # make sure tests are repeatable
1488 random.seed(12323)
1488 random.seed(12323)
1489
1489
1490 def doit(localheads, remoteheads):
1490 def doit(localheads, remoteheads):
1491 if opts.get('old'):
1491 if opts.get('old'):
1492 if localheads:
1492 if localheads:
1493 raise util.Abort('cannot use localheads with old style discovery')
1493 raise util.Abort('cannot use localheads with old style discovery')
1494 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1494 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1495 force=True)
1495 force=True)
1496 common = set(common)
1496 common = set(common)
1497 if not opts.get('nonheads'):
1497 if not opts.get('nonheads'):
1498 ui.write("unpruned common: %s\n" % " ".join([short(n)
1498 ui.write("unpruned common: %s\n" % " ".join([short(n)
1499 for n in common]))
1499 for n in common]))
1500 dag = dagutil.revlogdag(repo.changelog)
1500 dag = dagutil.revlogdag(repo.changelog)
1501 all = dag.ancestorset(dag.internalizeall(common))
1501 all = dag.ancestorset(dag.internalizeall(common))
1502 common = dag.externalizeall(dag.headsetofconnecteds(all))
1502 common = dag.externalizeall(dag.headsetofconnecteds(all))
1503 else:
1503 else:
1504 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1504 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1505 common = set(common)
1505 common = set(common)
1506 rheads = set(hds)
1506 rheads = set(hds)
1507 lheads = set(repo.heads())
1507 lheads = set(repo.heads())
1508 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1508 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1509 if lheads <= common:
1509 if lheads <= common:
1510 ui.write("local is subset\n")
1510 ui.write("local is subset\n")
1511 elif rheads <= common:
1511 elif rheads <= common:
1512 ui.write("remote is subset\n")
1512 ui.write("remote is subset\n")
1513
1513
1514 serverlogs = opts.get('serverlog')
1514 serverlogs = opts.get('serverlog')
1515 if serverlogs:
1515 if serverlogs:
1516 for filename in serverlogs:
1516 for filename in serverlogs:
1517 logfile = open(filename, 'r')
1517 logfile = open(filename, 'r')
1518 try:
1518 try:
1519 line = logfile.readline()
1519 line = logfile.readline()
1520 while line:
1520 while line:
1521 parts = line.strip().split(';')
1521 parts = line.strip().split(';')
1522 op = parts[1]
1522 op = parts[1]
1523 if op == 'cg':
1523 if op == 'cg':
1524 pass
1524 pass
1525 elif op == 'cgss':
1525 elif op == 'cgss':
1526 doit(parts[2].split(' '), parts[3].split(' '))
1526 doit(parts[2].split(' '), parts[3].split(' '))
1527 elif op == 'unb':
1527 elif op == 'unb':
1528 doit(parts[3].split(' '), parts[2].split(' '))
1528 doit(parts[3].split(' '), parts[2].split(' '))
1529 line = logfile.readline()
1529 line = logfile.readline()
1530 finally:
1530 finally:
1531 logfile.close()
1531 logfile.close()
1532
1532
1533 else:
1533 else:
1534 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1534 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1535 opts.get('remote_head'))
1535 opts.get('remote_head'))
1536 localrevs = opts.get('local_head')
1536 localrevs = opts.get('local_head')
1537 doit(localrevs, remoterevs)
1537 doit(localrevs, remoterevs)
1538
1538
1539
1539
1540 def debugindex(ui, repo, file_, **opts):
1540 def debugindex(ui, repo, file_, **opts):
1541 """dump the contents of an index file"""
1541 """dump the contents of an index file"""
1542 r = None
1542 r = None
1543 if repo:
1543 if repo:
1544 filelog = repo.file(file_)
1544 filelog = repo.file(file_)
1545 if len(filelog):
1545 if len(filelog):
1546 r = filelog
1546 r = filelog
1547
1547
1548 format = opts.get('format', 0)
1548 format = opts.get('format', 0)
1549 if format not in (0, 1):
1549 if format not in (0, 1):
1550 raise util.Abort(_("unknown format %d") % format)
1550 raise util.Abort(_("unknown format %d") % format)
1551
1551
1552 if not r:
1552 if not r:
1553 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1553 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1554
1554
1555 if format == 0:
1555 if format == 0:
1556 ui.write(" rev offset length base linkrev"
1556 ui.write(" rev offset length base linkrev"
1557 " nodeid p1 p2\n")
1557 " nodeid p1 p2\n")
1558 elif format == 1:
1558 elif format == 1:
1559 ui.write(" rev flag offset length"
1559 ui.write(" rev flag offset length"
1560 " size base link p1 p2 nodeid\n")
1560 " size base link p1 p2 nodeid\n")
1561
1561
1562 for i in r:
1562 for i in r:
1563 node = r.node(i)
1563 node = r.node(i)
1564 if format == 0:
1564 if format == 0:
1565 try:
1565 try:
1566 pp = r.parents(node)
1566 pp = r.parents(node)
1567 except:
1567 except:
1568 pp = [nullid, nullid]
1568 pp = [nullid, nullid]
1569 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1569 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1570 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1570 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1571 short(node), short(pp[0]), short(pp[1])))
1571 short(node), short(pp[0]), short(pp[1])))
1572 elif format == 1:
1572 elif format == 1:
1573 pr = r.parentrevs(i)
1573 pr = r.parentrevs(i)
1574 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1574 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1575 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1575 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1576 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1576 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1577
1577
1578 def debugindexdot(ui, repo, file_):
1578 def debugindexdot(ui, repo, file_):
1579 """dump an index DAG as a graphviz dot file"""
1579 """dump an index DAG as a graphviz dot file"""
1580 r = None
1580 r = None
1581 if repo:
1581 if repo:
1582 filelog = repo.file(file_)
1582 filelog = repo.file(file_)
1583 if len(filelog):
1583 if len(filelog):
1584 r = filelog
1584 r = filelog
1585 if not r:
1585 if not r:
1586 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1586 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1587 ui.write("digraph G {\n")
1587 ui.write("digraph G {\n")
1588 for i in r:
1588 for i in r:
1589 node = r.node(i)
1589 node = r.node(i)
1590 pp = r.parents(node)
1590 pp = r.parents(node)
1591 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1591 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1592 if pp[1] != nullid:
1592 if pp[1] != nullid:
1593 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1593 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1594 ui.write("}\n")
1594 ui.write("}\n")
1595
1595
1596 def debuginstall(ui):
1596 def debuginstall(ui):
1597 '''test Mercurial installation
1597 '''test Mercurial installation
1598
1598
1599 Returns 0 on success.
1599 Returns 0 on success.
1600 '''
1600 '''
1601
1601
1602 def writetemp(contents):
1602 def writetemp(contents):
1603 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1603 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1604 f = os.fdopen(fd, "wb")
1604 f = os.fdopen(fd, "wb")
1605 f.write(contents)
1605 f.write(contents)
1606 f.close()
1606 f.close()
1607 return name
1607 return name
1608
1608
1609 problems = 0
1609 problems = 0
1610
1610
1611 # encoding
1611 # encoding
1612 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1612 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1613 try:
1613 try:
1614 encoding.fromlocal("test")
1614 encoding.fromlocal("test")
1615 except util.Abort, inst:
1615 except util.Abort, inst:
1616 ui.write(" %s\n" % inst)
1616 ui.write(" %s\n" % inst)
1617 ui.write(_(" (check that your locale is properly set)\n"))
1617 ui.write(_(" (check that your locale is properly set)\n"))
1618 problems += 1
1618 problems += 1
1619
1619
1620 # compiled modules
1620 # compiled modules
1621 ui.status(_("Checking installed modules (%s)...\n")
1621 ui.status(_("Checking installed modules (%s)...\n")
1622 % os.path.dirname(__file__))
1622 % os.path.dirname(__file__))
1623 try:
1623 try:
1624 import bdiff, mpatch, base85, osutil
1624 import bdiff, mpatch, base85, osutil
1625 except Exception, inst:
1625 except Exception, inst:
1626 ui.write(" %s\n" % inst)
1626 ui.write(" %s\n" % inst)
1627 ui.write(_(" One or more extensions could not be found"))
1627 ui.write(_(" One or more extensions could not be found"))
1628 ui.write(_(" (check that you compiled the extensions)\n"))
1628 ui.write(_(" (check that you compiled the extensions)\n"))
1629 problems += 1
1629 problems += 1
1630
1630
1631 # templates
1631 # templates
1632 ui.status(_("Checking templates...\n"))
1632 ui.status(_("Checking templates...\n"))
1633 try:
1633 try:
1634 import templater
1634 import templater
1635 templater.templater(templater.templatepath("map-cmdline.default"))
1635 templater.templater(templater.templatepath("map-cmdline.default"))
1636 except Exception, inst:
1636 except Exception, inst:
1637 ui.write(" %s\n" % inst)
1637 ui.write(" %s\n" % inst)
1638 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1638 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1639 problems += 1
1639 problems += 1
1640
1640
1641 # editor
1641 # editor
1642 ui.status(_("Checking commit editor...\n"))
1642 ui.status(_("Checking commit editor...\n"))
1643 editor = ui.geteditor()
1643 editor = ui.geteditor()
1644 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1644 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1645 if not cmdpath:
1645 if not cmdpath:
1646 if editor == 'vi':
1646 if editor == 'vi':
1647 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1647 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1648 ui.write(_(" (specify a commit editor in your configuration"
1648 ui.write(_(" (specify a commit editor in your configuration"
1649 " file)\n"))
1649 " file)\n"))
1650 else:
1650 else:
1651 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1651 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1652 ui.write(_(" (specify a commit editor in your configuration"
1652 ui.write(_(" (specify a commit editor in your configuration"
1653 " file)\n"))
1653 " file)\n"))
1654 problems += 1
1654 problems += 1
1655
1655
1656 # check username
1656 # check username
1657 ui.status(_("Checking username...\n"))
1657 ui.status(_("Checking username...\n"))
1658 try:
1658 try:
1659 ui.username()
1659 ui.username()
1660 except util.Abort, e:
1660 except util.Abort, e:
1661 ui.write(" %s\n" % e)
1661 ui.write(" %s\n" % e)
1662 ui.write(_(" (specify a username in your configuration file)\n"))
1662 ui.write(_(" (specify a username in your configuration file)\n"))
1663 problems += 1
1663 problems += 1
1664
1664
1665 if not problems:
1665 if not problems:
1666 ui.status(_("No problems detected\n"))
1666 ui.status(_("No problems detected\n"))
1667 else:
1667 else:
1668 ui.write(_("%s problems detected,"
1668 ui.write(_("%s problems detected,"
1669 " please check your install!\n") % problems)
1669 " please check your install!\n") % problems)
1670
1670
1671 return problems
1671 return problems
1672
1672
1673 def debugrename(ui, repo, file1, *pats, **opts):
1673 def debugrename(ui, repo, file1, *pats, **opts):
1674 """dump rename information"""
1674 """dump rename information"""
1675
1675
1676 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1676 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1677 m = cmdutil.match(repo, (file1,) + pats, opts)
1677 m = cmdutil.match(repo, (file1,) + pats, opts)
1678 for abs in ctx.walk(m):
1678 for abs in ctx.walk(m):
1679 fctx = ctx[abs]
1679 fctx = ctx[abs]
1680 o = fctx.filelog().renamed(fctx.filenode())
1680 o = fctx.filelog().renamed(fctx.filenode())
1681 rel = m.rel(abs)
1681 rel = m.rel(abs)
1682 if o:
1682 if o:
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1684 else:
1684 else:
1685 ui.write(_("%s not renamed\n") % rel)
1685 ui.write(_("%s not renamed\n") % rel)
1686
1686
1687 def debugwalk(ui, repo, *pats, **opts):
1687 def debugwalk(ui, repo, *pats, **opts):
1688 """show how files match on given patterns"""
1688 """show how files match on given patterns"""
1689 m = cmdutil.match(repo, pats, opts)
1689 m = cmdutil.match(repo, pats, opts)
1690 items = list(repo.walk(m))
1690 items = list(repo.walk(m))
1691 if not items:
1691 if not items:
1692 return
1692 return
1693 fmt = 'f %%-%ds %%-%ds %%s' % (
1693 fmt = 'f %%-%ds %%-%ds %%s' % (
1694 max([len(abs) for abs in items]),
1694 max([len(abs) for abs in items]),
1695 max([len(m.rel(abs)) for abs in items]))
1695 max([len(m.rel(abs)) for abs in items]))
1696 for abs in items:
1696 for abs in items:
1697 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1697 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1698 ui.write("%s\n" % line.rstrip())
1698 ui.write("%s\n" % line.rstrip())
1699
1699
1700 def debugwireargs(ui, repopath, *vals, **opts):
1700 def debugwireargs(ui, repopath, *vals, **opts):
1701 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1701 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1702 for opt in remoteopts:
1702 for opt in remoteopts:
1703 del opts[opt[1]]
1703 del opts[opt[1]]
1704 args = {}
1704 args = {}
1705 for k, v in opts.iteritems():
1705 for k, v in opts.iteritems():
1706 if v:
1706 if v:
1707 args[k] = v
1707 args[k] = v
1708 # run twice to check that we don't mess up the stream for the next command
1708 # run twice to check that we don't mess up the stream for the next command
1709 res1 = repo.debugwireargs(*vals, **args)
1709 res1 = repo.debugwireargs(*vals, **args)
1710 res2 = repo.debugwireargs(*vals, **args)
1710 res2 = repo.debugwireargs(*vals, **args)
1711 ui.write("%s\n" % res1)
1711 ui.write("%s\n" % res1)
1712 if res1 != res2:
1712 if res1 != res2:
1713 ui.warn("%s\n" % res2)
1713 ui.warn("%s\n" % res2)
1714
1714
1715 def diff(ui, repo, *pats, **opts):
1715 def diff(ui, repo, *pats, **opts):
1716 """diff repository (or selected files)
1716 """diff repository (or selected files)
1717
1717
1718 Show differences between revisions for the specified files.
1718 Show differences between revisions for the specified files.
1719
1719
1720 Differences between files are shown using the unified diff format.
1720 Differences between files are shown using the unified diff format.
1721
1721
1722 .. note::
1722 .. note::
1723 diff may generate unexpected results for merges, as it will
1723 diff may generate unexpected results for merges, as it will
1724 default to comparing against the working directory's first
1724 default to comparing against the working directory's first
1725 parent changeset if no revisions are specified.
1725 parent changeset if no revisions are specified.
1726
1726
1727 When two revision arguments are given, then changes are shown
1727 When two revision arguments are given, then changes are shown
1728 between those revisions. If only one revision is specified then
1728 between those revisions. If only one revision is specified then
1729 that revision is compared to the working directory, and, when no
1729 that revision is compared to the working directory, and, when no
1730 revisions are specified, the working directory files are compared
1730 revisions are specified, the working directory files are compared
1731 to its parent.
1731 to its parent.
1732
1732
1733 Alternatively you can specify -c/--change with a revision to see
1733 Alternatively you can specify -c/--change with a revision to see
1734 the changes in that changeset relative to its first parent.
1734 the changes in that changeset relative to its first parent.
1735
1735
1736 Without the -a/--text option, diff will avoid generating diffs of
1736 Without the -a/--text option, diff will avoid generating diffs of
1737 files it detects as binary. With -a, diff will generate a diff
1737 files it detects as binary. With -a, diff will generate a diff
1738 anyway, probably with undesirable results.
1738 anyway, probably with undesirable results.
1739
1739
1740 Use the -g/--git option to generate diffs in the git extended diff
1740 Use the -g/--git option to generate diffs in the git extended diff
1741 format. For more information, read :hg:`help diffs`.
1741 format. For more information, read :hg:`help diffs`.
1742
1742
1743 Returns 0 on success.
1743 Returns 0 on success.
1744 """
1744 """
1745
1745
1746 revs = opts.get('rev')
1746 revs = opts.get('rev')
1747 change = opts.get('change')
1747 change = opts.get('change')
1748 stat = opts.get('stat')
1748 stat = opts.get('stat')
1749 reverse = opts.get('reverse')
1749 reverse = opts.get('reverse')
1750
1750
1751 if revs and change:
1751 if revs and change:
1752 msg = _('cannot specify --rev and --change at the same time')
1752 msg = _('cannot specify --rev and --change at the same time')
1753 raise util.Abort(msg)
1753 raise util.Abort(msg)
1754 elif change:
1754 elif change:
1755 node2 = cmdutil.revsingle(repo, change, None).node()
1755 node2 = cmdutil.revsingle(repo, change, None).node()
1756 node1 = repo[node2].p1().node()
1756 node1 = repo[node2].p1().node()
1757 else:
1757 else:
1758 node1, node2 = cmdutil.revpair(repo, revs)
1758 node1, node2 = cmdutil.revpair(repo, revs)
1759
1759
1760 if reverse:
1760 if reverse:
1761 node1, node2 = node2, node1
1761 node1, node2 = node2, node1
1762
1762
1763 diffopts = patch.diffopts(ui, opts)
1763 diffopts = patch.diffopts(ui, opts)
1764 m = cmdutil.match(repo, pats, opts)
1764 m = cmdutil.match(repo, pats, opts)
1765 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1765 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1766 listsubrepos=opts.get('subrepos'))
1766 listsubrepos=opts.get('subrepos'))
1767
1767
1768 def export(ui, repo, *changesets, **opts):
1768 def export(ui, repo, *changesets, **opts):
1769 """dump the header and diffs for one or more changesets
1769 """dump the header and diffs for one or more changesets
1770
1770
1771 Print the changeset header and diffs for one or more revisions.
1771 Print the changeset header and diffs for one or more revisions.
1772
1772
1773 The information shown in the changeset header is: author, date,
1773 The information shown in the changeset header is: author, date,
1774 branch name (if non-default), changeset hash, parent(s) and commit
1774 branch name (if non-default), changeset hash, parent(s) and commit
1775 comment.
1775 comment.
1776
1776
1777 .. note::
1777 .. note::
1778 export may generate unexpected diff output for merge
1778 export may generate unexpected diff output for merge
1779 changesets, as it will compare the merge changeset against its
1779 changesets, as it will compare the merge changeset against its
1780 first parent only.
1780 first parent only.
1781
1781
1782 Output may be to a file, in which case the name of the file is
1782 Output may be to a file, in which case the name of the file is
1783 given using a format string. The formatting rules are as follows:
1783 given using a format string. The formatting rules are as follows:
1784
1784
1785 :``%%``: literal "%" character
1785 :``%%``: literal "%" character
1786 :``%H``: changeset hash (40 hexadecimal digits)
1786 :``%H``: changeset hash (40 hexadecimal digits)
1787 :``%N``: number of patches being generated
1787 :``%N``: number of patches being generated
1788 :``%R``: changeset revision number
1788 :``%R``: changeset revision number
1789 :``%b``: basename of the exporting repository
1789 :``%b``: basename of the exporting repository
1790 :``%h``: short-form changeset hash (12 hexadecimal digits)
1790 :``%h``: short-form changeset hash (12 hexadecimal digits)
1791 :``%n``: zero-padded sequence number, starting at 1
1791 :``%n``: zero-padded sequence number, starting at 1
1792 :``%r``: zero-padded changeset revision number
1792 :``%r``: zero-padded changeset revision number
1793
1793
1794 Without the -a/--text option, export will avoid generating diffs
1794 Without the -a/--text option, export will avoid generating diffs
1795 of files it detects as binary. With -a, export will generate a
1795 of files it detects as binary. With -a, export will generate a
1796 diff anyway, probably with undesirable results.
1796 diff anyway, probably with undesirable results.
1797
1797
1798 Use the -g/--git option to generate diffs in the git extended diff
1798 Use the -g/--git option to generate diffs in the git extended diff
1799 format. See :hg:`help diffs` for more information.
1799 format. See :hg:`help diffs` for more information.
1800
1800
1801 With the --switch-parent option, the diff will be against the
1801 With the --switch-parent option, the diff will be against the
1802 second parent. It can be useful to review a merge.
1802 second parent. It can be useful to review a merge.
1803
1803
1804 Returns 0 on success.
1804 Returns 0 on success.
1805 """
1805 """
1806 changesets += tuple(opts.get('rev', []))
1806 changesets += tuple(opts.get('rev', []))
1807 if not changesets:
1807 if not changesets:
1808 raise util.Abort(_("export requires at least one changeset"))
1808 raise util.Abort(_("export requires at least one changeset"))
1809 revs = cmdutil.revrange(repo, changesets)
1809 revs = cmdutil.revrange(repo, changesets)
1810 if len(revs) > 1:
1810 if len(revs) > 1:
1811 ui.note(_('exporting patches:\n'))
1811 ui.note(_('exporting patches:\n'))
1812 else:
1812 else:
1813 ui.note(_('exporting patch:\n'))
1813 ui.note(_('exporting patch:\n'))
1814 cmdutil.export(repo, revs, template=opts.get('output'),
1814 cmdutil.export(repo, revs, template=opts.get('output'),
1815 switch_parent=opts.get('switch_parent'),
1815 switch_parent=opts.get('switch_parent'),
1816 opts=patch.diffopts(ui, opts))
1816 opts=patch.diffopts(ui, opts))
1817
1817
1818 def forget(ui, repo, *pats, **opts):
1818 def forget(ui, repo, *pats, **opts):
1819 """forget the specified files on the next commit
1819 """forget the specified files on the next commit
1820
1820
1821 Mark the specified files so they will no longer be tracked
1821 Mark the specified files so they will no longer be tracked
1822 after the next commit.
1822 after the next commit.
1823
1823
1824 This only removes files from the current branch, not from the
1824 This only removes files from the current branch, not from the
1825 entire project history, and it does not delete them from the
1825 entire project history, and it does not delete them from the
1826 working directory.
1826 working directory.
1827
1827
1828 To undo a forget before the next commit, see :hg:`add`.
1828 To undo a forget before the next commit, see :hg:`add`.
1829
1829
1830 Returns 0 on success.
1830 Returns 0 on success.
1831 """
1831 """
1832
1832
1833 if not pats:
1833 if not pats:
1834 raise util.Abort(_('no files specified'))
1834 raise util.Abort(_('no files specified'))
1835
1835
1836 m = cmdutil.match(repo, pats, opts)
1836 m = cmdutil.match(repo, pats, opts)
1837 s = repo.status(match=m, clean=True)
1837 s = repo.status(match=m, clean=True)
1838 forget = sorted(s[0] + s[1] + s[3] + s[6])
1838 forget = sorted(s[0] + s[1] + s[3] + s[6])
1839 errs = 0
1839 errs = 0
1840
1840
1841 for f in m.files():
1841 for f in m.files():
1842 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1842 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1843 ui.warn(_('not removing %s: file is already untracked\n')
1843 ui.warn(_('not removing %s: file is already untracked\n')
1844 % m.rel(f))
1844 % m.rel(f))
1845 errs = 1
1845 errs = 1
1846
1846
1847 for f in forget:
1847 for f in forget:
1848 if ui.verbose or not m.exact(f):
1848 if ui.verbose or not m.exact(f):
1849 ui.status(_('removing %s\n') % m.rel(f))
1849 ui.status(_('removing %s\n') % m.rel(f))
1850
1850
1851 repo[None].remove(forget, unlink=False)
1851 repo[None].remove(forget, unlink=False)
1852 return errs
1852 return errs
1853
1853
1854 def grep(ui, repo, pattern, *pats, **opts):
1854 def grep(ui, repo, pattern, *pats, **opts):
1855 """search for a pattern in specified files and revisions
1855 """search for a pattern in specified files and revisions
1856
1856
1857 Search revisions of files for a regular expression.
1857 Search revisions of files for a regular expression.
1858
1858
1859 This command behaves differently than Unix grep. It only accepts
1859 This command behaves differently than Unix grep. It only accepts
1860 Python/Perl regexps. It searches repository history, not the
1860 Python/Perl regexps. It searches repository history, not the
1861 working directory. It always prints the revision number in which a
1861 working directory. It always prints the revision number in which a
1862 match appears.
1862 match appears.
1863
1863
1864 By default, grep only prints output for the first revision of a
1864 By default, grep only prints output for the first revision of a
1865 file in which it finds a match. To get it to print every revision
1865 file in which it finds a match. To get it to print every revision
1866 that contains a change in match status ("-" for a match that
1866 that contains a change in match status ("-" for a match that
1867 becomes a non-match, or "+" for a non-match that becomes a match),
1867 becomes a non-match, or "+" for a non-match that becomes a match),
1868 use the --all flag.
1868 use the --all flag.
1869
1869
1870 Returns 0 if a match is found, 1 otherwise.
1870 Returns 0 if a match is found, 1 otherwise.
1871 """
1871 """
1872 reflags = 0
1872 reflags = 0
1873 if opts.get('ignore_case'):
1873 if opts.get('ignore_case'):
1874 reflags |= re.I
1874 reflags |= re.I
1875 try:
1875 try:
1876 regexp = re.compile(pattern, reflags)
1876 regexp = re.compile(pattern, reflags)
1877 except re.error, inst:
1877 except re.error, inst:
1878 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1878 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1879 return 1
1879 return 1
1880 sep, eol = ':', '\n'
1880 sep, eol = ':', '\n'
1881 if opts.get('print0'):
1881 if opts.get('print0'):
1882 sep = eol = '\0'
1882 sep = eol = '\0'
1883
1883
1884 getfile = util.lrucachefunc(repo.file)
1884 getfile = util.lrucachefunc(repo.file)
1885
1885
1886 def matchlines(body):
1886 def matchlines(body):
1887 begin = 0
1887 begin = 0
1888 linenum = 0
1888 linenum = 0
1889 while True:
1889 while True:
1890 match = regexp.search(body, begin)
1890 match = regexp.search(body, begin)
1891 if not match:
1891 if not match:
1892 break
1892 break
1893 mstart, mend = match.span()
1893 mstart, mend = match.span()
1894 linenum += body.count('\n', begin, mstart) + 1
1894 linenum += body.count('\n', begin, mstart) + 1
1895 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1895 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1896 begin = body.find('\n', mend) + 1 or len(body)
1896 begin = body.find('\n', mend) + 1 or len(body)
1897 lend = begin - 1
1897 lend = begin - 1
1898 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1898 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1899
1899
1900 class linestate(object):
1900 class linestate(object):
1901 def __init__(self, line, linenum, colstart, colend):
1901 def __init__(self, line, linenum, colstart, colend):
1902 self.line = line
1902 self.line = line
1903 self.linenum = linenum
1903 self.linenum = linenum
1904 self.colstart = colstart
1904 self.colstart = colstart
1905 self.colend = colend
1905 self.colend = colend
1906
1906
1907 def __hash__(self):
1907 def __hash__(self):
1908 return hash((self.linenum, self.line))
1908 return hash((self.linenum, self.line))
1909
1909
1910 def __eq__(self, other):
1910 def __eq__(self, other):
1911 return self.line == other.line
1911 return self.line == other.line
1912
1912
1913 matches = {}
1913 matches = {}
1914 copies = {}
1914 copies = {}
1915 def grepbody(fn, rev, body):
1915 def grepbody(fn, rev, body):
1916 matches[rev].setdefault(fn, [])
1916 matches[rev].setdefault(fn, [])
1917 m = matches[rev][fn]
1917 m = matches[rev][fn]
1918 for lnum, cstart, cend, line in matchlines(body):
1918 for lnum, cstart, cend, line in matchlines(body):
1919 s = linestate(line, lnum, cstart, cend)
1919 s = linestate(line, lnum, cstart, cend)
1920 m.append(s)
1920 m.append(s)
1921
1921
1922 def difflinestates(a, b):
1922 def difflinestates(a, b):
1923 sm = difflib.SequenceMatcher(None, a, b)
1923 sm = difflib.SequenceMatcher(None, a, b)
1924 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1924 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1925 if tag == 'insert':
1925 if tag == 'insert':
1926 for i in xrange(blo, bhi):
1926 for i in xrange(blo, bhi):
1927 yield ('+', b[i])
1927 yield ('+', b[i])
1928 elif tag == 'delete':
1928 elif tag == 'delete':
1929 for i in xrange(alo, ahi):
1929 for i in xrange(alo, ahi):
1930 yield ('-', a[i])
1930 yield ('-', a[i])
1931 elif tag == 'replace':
1931 elif tag == 'replace':
1932 for i in xrange(alo, ahi):
1932 for i in xrange(alo, ahi):
1933 yield ('-', a[i])
1933 yield ('-', a[i])
1934 for i in xrange(blo, bhi):
1934 for i in xrange(blo, bhi):
1935 yield ('+', b[i])
1935 yield ('+', b[i])
1936
1936
1937 def display(fn, ctx, pstates, states):
1937 def display(fn, ctx, pstates, states):
1938 rev = ctx.rev()
1938 rev = ctx.rev()
1939 datefunc = ui.quiet and util.shortdate or util.datestr
1939 datefunc = ui.quiet and util.shortdate or util.datestr
1940 found = False
1940 found = False
1941 filerevmatches = {}
1941 filerevmatches = {}
1942 def binary():
1942 def binary():
1943 flog = getfile(fn)
1943 flog = getfile(fn)
1944 return util.binary(flog.read(ctx.filenode(fn)))
1944 return util.binary(flog.read(ctx.filenode(fn)))
1945
1945
1946 if opts.get('all'):
1946 if opts.get('all'):
1947 iter = difflinestates(pstates, states)
1947 iter = difflinestates(pstates, states)
1948 else:
1948 else:
1949 iter = [('', l) for l in states]
1949 iter = [('', l) for l in states]
1950 for change, l in iter:
1950 for change, l in iter:
1951 cols = [fn, str(rev)]
1951 cols = [fn, str(rev)]
1952 before, match, after = None, None, None
1952 before, match, after = None, None, None
1953 if opts.get('line_number'):
1953 if opts.get('line_number'):
1954 cols.append(str(l.linenum))
1954 cols.append(str(l.linenum))
1955 if opts.get('all'):
1955 if opts.get('all'):
1956 cols.append(change)
1956 cols.append(change)
1957 if opts.get('user'):
1957 if opts.get('user'):
1958 cols.append(ui.shortuser(ctx.user()))
1958 cols.append(ui.shortuser(ctx.user()))
1959 if opts.get('date'):
1959 if opts.get('date'):
1960 cols.append(datefunc(ctx.date()))
1960 cols.append(datefunc(ctx.date()))
1961 if opts.get('files_with_matches'):
1961 if opts.get('files_with_matches'):
1962 c = (fn, rev)
1962 c = (fn, rev)
1963 if c in filerevmatches:
1963 if c in filerevmatches:
1964 continue
1964 continue
1965 filerevmatches[c] = 1
1965 filerevmatches[c] = 1
1966 else:
1966 else:
1967 before = l.line[:l.colstart]
1967 before = l.line[:l.colstart]
1968 match = l.line[l.colstart:l.colend]
1968 match = l.line[l.colstart:l.colend]
1969 after = l.line[l.colend:]
1969 after = l.line[l.colend:]
1970 ui.write(sep.join(cols))
1970 ui.write(sep.join(cols))
1971 if before is not None:
1971 if before is not None:
1972 if not opts.get('text') and binary():
1972 if not opts.get('text') and binary():
1973 ui.write(sep + " Binary file matches")
1973 ui.write(sep + " Binary file matches")
1974 else:
1974 else:
1975 ui.write(sep + before)
1975 ui.write(sep + before)
1976 ui.write(match, label='grep.match')
1976 ui.write(match, label='grep.match')
1977 ui.write(after)
1977 ui.write(after)
1978 ui.write(eol)
1978 ui.write(eol)
1979 found = True
1979 found = True
1980 return found
1980 return found
1981
1981
1982 skip = {}
1982 skip = {}
1983 revfiles = {}
1983 revfiles = {}
1984 matchfn = cmdutil.match(repo, pats, opts)
1984 matchfn = cmdutil.match(repo, pats, opts)
1985 found = False
1985 found = False
1986 follow = opts.get('follow')
1986 follow = opts.get('follow')
1987
1987
1988 def prep(ctx, fns):
1988 def prep(ctx, fns):
1989 rev = ctx.rev()
1989 rev = ctx.rev()
1990 pctx = ctx.p1()
1990 pctx = ctx.p1()
1991 parent = pctx.rev()
1991 parent = pctx.rev()
1992 matches.setdefault(rev, {})
1992 matches.setdefault(rev, {})
1993 matches.setdefault(parent, {})
1993 matches.setdefault(parent, {})
1994 files = revfiles.setdefault(rev, [])
1994 files = revfiles.setdefault(rev, [])
1995 for fn in fns:
1995 for fn in fns:
1996 flog = getfile(fn)
1996 flog = getfile(fn)
1997 try:
1997 try:
1998 fnode = ctx.filenode(fn)
1998 fnode = ctx.filenode(fn)
1999 except error.LookupError:
1999 except error.LookupError:
2000 continue
2000 continue
2001
2001
2002 copied = flog.renamed(fnode)
2002 copied = flog.renamed(fnode)
2003 copy = follow and copied and copied[0]
2003 copy = follow and copied and copied[0]
2004 if copy:
2004 if copy:
2005 copies.setdefault(rev, {})[fn] = copy
2005 copies.setdefault(rev, {})[fn] = copy
2006 if fn in skip:
2006 if fn in skip:
2007 if copy:
2007 if copy:
2008 skip[copy] = True
2008 skip[copy] = True
2009 continue
2009 continue
2010 files.append(fn)
2010 files.append(fn)
2011
2011
2012 if fn not in matches[rev]:
2012 if fn not in matches[rev]:
2013 grepbody(fn, rev, flog.read(fnode))
2013 grepbody(fn, rev, flog.read(fnode))
2014
2014
2015 pfn = copy or fn
2015 pfn = copy or fn
2016 if pfn not in matches[parent]:
2016 if pfn not in matches[parent]:
2017 try:
2017 try:
2018 fnode = pctx.filenode(pfn)
2018 fnode = pctx.filenode(pfn)
2019 grepbody(pfn, parent, flog.read(fnode))
2019 grepbody(pfn, parent, flog.read(fnode))
2020 except error.LookupError:
2020 except error.LookupError:
2021 pass
2021 pass
2022
2022
2023 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2023 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2024 rev = ctx.rev()
2024 rev = ctx.rev()
2025 parent = ctx.p1().rev()
2025 parent = ctx.p1().rev()
2026 for fn in sorted(revfiles.get(rev, [])):
2026 for fn in sorted(revfiles.get(rev, [])):
2027 states = matches[rev][fn]
2027 states = matches[rev][fn]
2028 copy = copies.get(rev, {}).get(fn)
2028 copy = copies.get(rev, {}).get(fn)
2029 if fn in skip:
2029 if fn in skip:
2030 if copy:
2030 if copy:
2031 skip[copy] = True
2031 skip[copy] = True
2032 continue
2032 continue
2033 pstates = matches.get(parent, {}).get(copy or fn, [])
2033 pstates = matches.get(parent, {}).get(copy or fn, [])
2034 if pstates or states:
2034 if pstates or states:
2035 r = display(fn, ctx, pstates, states)
2035 r = display(fn, ctx, pstates, states)
2036 found = found or r
2036 found = found or r
2037 if r and not opts.get('all'):
2037 if r and not opts.get('all'):
2038 skip[fn] = True
2038 skip[fn] = True
2039 if copy:
2039 if copy:
2040 skip[copy] = True
2040 skip[copy] = True
2041 del matches[rev]
2041 del matches[rev]
2042 del revfiles[rev]
2042 del revfiles[rev]
2043
2043
2044 return not found
2044 return not found
2045
2045
2046 def heads(ui, repo, *branchrevs, **opts):
2046 def heads(ui, repo, *branchrevs, **opts):
2047 """show current repository heads or show branch heads
2047 """show current repository heads or show branch heads
2048
2048
2049 With no arguments, show all repository branch heads.
2049 With no arguments, show all repository branch heads.
2050
2050
2051 Repository "heads" are changesets with no child changesets. They are
2051 Repository "heads" are changesets with no child changesets. They are
2052 where development generally takes place and are the usual targets
2052 where development generally takes place and are the usual targets
2053 for update and merge operations. Branch heads are changesets that have
2053 for update and merge operations. Branch heads are changesets that have
2054 no child changeset on the same branch.
2054 no child changeset on the same branch.
2055
2055
2056 If one or more REVs are given, only branch heads on the branches
2056 If one or more REVs are given, only branch heads on the branches
2057 associated with the specified changesets are shown.
2057 associated with the specified changesets are shown.
2058
2058
2059 If -c/--closed is specified, also show branch heads marked closed
2059 If -c/--closed is specified, also show branch heads marked closed
2060 (see :hg:`commit --close-branch`).
2060 (see :hg:`commit --close-branch`).
2061
2061
2062 If STARTREV is specified, only those heads that are descendants of
2062 If STARTREV is specified, only those heads that are descendants of
2063 STARTREV will be displayed.
2063 STARTREV will be displayed.
2064
2064
2065 If -t/--topo is specified, named branch mechanics will be ignored and only
2065 If -t/--topo is specified, named branch mechanics will be ignored and only
2066 changesets without children will be shown.
2066 changesets without children will be shown.
2067
2067
2068 Returns 0 if matching heads are found, 1 if not.
2068 Returns 0 if matching heads are found, 1 if not.
2069 """
2069 """
2070
2070
2071 start = None
2071 start = None
2072 if 'rev' in opts:
2072 if 'rev' in opts:
2073 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2073 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2074
2074
2075 if opts.get('topo'):
2075 if opts.get('topo'):
2076 heads = [repo[h] for h in repo.heads(start)]
2076 heads = [repo[h] for h in repo.heads(start)]
2077 else:
2077 else:
2078 heads = []
2078 heads = []
2079 for b, ls in repo.branchmap().iteritems():
2079 for b, ls in repo.branchmap().iteritems():
2080 if start is None:
2080 if start is None:
2081 heads += [repo[h] for h in ls]
2081 heads += [repo[h] for h in ls]
2082 continue
2082 continue
2083 startrev = repo.changelog.rev(start)
2083 startrev = repo.changelog.rev(start)
2084 descendants = set(repo.changelog.descendants(startrev))
2084 descendants = set(repo.changelog.descendants(startrev))
2085 descendants.add(startrev)
2085 descendants.add(startrev)
2086 rev = repo.changelog.rev
2086 rev = repo.changelog.rev
2087 heads += [repo[h] for h in ls if rev(h) in descendants]
2087 heads += [repo[h] for h in ls if rev(h) in descendants]
2088
2088
2089 if branchrevs:
2089 if branchrevs:
2090 branches = set(repo[br].branch() for br in branchrevs)
2090 branches = set(repo[br].branch() for br in branchrevs)
2091 heads = [h for h in heads if h.branch() in branches]
2091 heads = [h for h in heads if h.branch() in branches]
2092
2092
2093 if not opts.get('closed'):
2093 if not opts.get('closed'):
2094 heads = [h for h in heads if not h.extra().get('close')]
2094 heads = [h for h in heads if not h.extra().get('close')]
2095
2095
2096 if opts.get('active') and branchrevs:
2096 if opts.get('active') and branchrevs:
2097 dagheads = repo.heads(start)
2097 dagheads = repo.heads(start)
2098 heads = [h for h in heads if h.node() in dagheads]
2098 heads = [h for h in heads if h.node() in dagheads]
2099
2099
2100 if branchrevs:
2100 if branchrevs:
2101 haveheads = set(h.branch() for h in heads)
2101 haveheads = set(h.branch() for h in heads)
2102 if branches - haveheads:
2102 if branches - haveheads:
2103 headless = ', '.join(b for b in branches - haveheads)
2103 headless = ', '.join(b for b in branches - haveheads)
2104 msg = _('no open branch heads found on branches %s')
2104 msg = _('no open branch heads found on branches %s')
2105 if opts.get('rev'):
2105 if opts.get('rev'):
2106 msg += _(' (started at %s)' % opts['rev'])
2106 msg += _(' (started at %s)' % opts['rev'])
2107 ui.warn((msg + '\n') % headless)
2107 ui.warn((msg + '\n') % headless)
2108
2108
2109 if not heads:
2109 if not heads:
2110 return 1
2110 return 1
2111
2111
2112 heads = sorted(heads, key=lambda x: -x.rev())
2112 heads = sorted(heads, key=lambda x: -x.rev())
2113 displayer = cmdutil.show_changeset(ui, repo, opts)
2113 displayer = cmdutil.show_changeset(ui, repo, opts)
2114 for ctx in heads:
2114 for ctx in heads:
2115 displayer.show(ctx)
2115 displayer.show(ctx)
2116 displayer.close()
2116 displayer.close()
2117
2117
2118 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2118 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2119 """show help for a given topic or a help overview
2119 """show help for a given topic or a help overview
2120
2120
2121 With no arguments, print a list of commands with short help messages.
2121 With no arguments, print a list of commands with short help messages.
2122
2122
2123 Given a topic, extension, or command name, print help for that
2123 Given a topic, extension, or command name, print help for that
2124 topic.
2124 topic.
2125
2125
2126 Returns 0 if successful.
2126 Returns 0 if successful.
2127 """
2127 """
2128 option_lists = []
2128 option_lists = []
2129 textwidth = min(ui.termwidth(), 80) - 2
2129 textwidth = min(ui.termwidth(), 80) - 2
2130
2130
2131 def addglobalopts(aliases):
2131 def addglobalopts(aliases):
2132 if ui.verbose:
2132 if ui.verbose:
2133 option_lists.append((_("global options:"), globalopts))
2133 option_lists.append((_("global options:"), globalopts))
2134 if name == 'shortlist':
2134 if name == 'shortlist':
2135 option_lists.append((_('use "hg help" for the full list '
2135 option_lists.append((_('use "hg help" for the full list '
2136 'of commands'), ()))
2136 'of commands'), ()))
2137 else:
2137 else:
2138 if name == 'shortlist':
2138 if name == 'shortlist':
2139 msg = _('use "hg help" for the full list of commands '
2139 msg = _('use "hg help" for the full list of commands '
2140 'or "hg -v" for details')
2140 'or "hg -v" for details')
2141 elif name and not full:
2141 elif name and not full:
2142 msg = _('use "hg help %s" to show the full help text' % name)
2142 msg = _('use "hg help %s" to show the full help text' % name)
2143 elif aliases:
2143 elif aliases:
2144 msg = _('use "hg -v help%s" to show builtin aliases and '
2144 msg = _('use "hg -v help%s" to show builtin aliases and '
2145 'global options') % (name and " " + name or "")
2145 'global options') % (name and " " + name or "")
2146 else:
2146 else:
2147 msg = _('use "hg -v help %s" to show global options') % name
2147 msg = _('use "hg -v help %s" to show global options') % name
2148 option_lists.append((msg, ()))
2148 option_lists.append((msg, ()))
2149
2149
2150 def helpcmd(name):
2150 def helpcmd(name):
2151 if with_version:
2151 if with_version:
2152 version_(ui)
2152 version_(ui)
2153 ui.write('\n')
2153 ui.write('\n')
2154
2154
2155 try:
2155 try:
2156 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2156 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2157 except error.AmbiguousCommand, inst:
2157 except error.AmbiguousCommand, inst:
2158 # py3k fix: except vars can't be used outside the scope of the
2158 # py3k fix: except vars can't be used outside the scope of the
2159 # except block, nor can be used inside a lambda. python issue4617
2159 # except block, nor can be used inside a lambda. python issue4617
2160 prefix = inst.args[0]
2160 prefix = inst.args[0]
2161 select = lambda c: c.lstrip('^').startswith(prefix)
2161 select = lambda c: c.lstrip('^').startswith(prefix)
2162 helplist(_('list of commands:\n\n'), select)
2162 helplist(_('list of commands:\n\n'), select)
2163 return
2163 return
2164
2164
2165 # check if it's an invalid alias and display its error if it is
2165 # check if it's an invalid alias and display its error if it is
2166 if getattr(entry[0], 'badalias', False):
2166 if getattr(entry[0], 'badalias', False):
2167 if not unknowncmd:
2167 if not unknowncmd:
2168 entry[0](ui)
2168 entry[0](ui)
2169 return
2169 return
2170
2170
2171 # synopsis
2171 # synopsis
2172 if len(entry) > 2:
2172 if len(entry) > 2:
2173 if entry[2].startswith('hg'):
2173 if entry[2].startswith('hg'):
2174 ui.write("%s\n" % entry[2])
2174 ui.write("%s\n" % entry[2])
2175 else:
2175 else:
2176 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2176 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2177 else:
2177 else:
2178 ui.write('hg %s\n' % aliases[0])
2178 ui.write('hg %s\n' % aliases[0])
2179
2179
2180 # aliases
2180 # aliases
2181 if full and not ui.quiet and len(aliases) > 1:
2181 if full and not ui.quiet and len(aliases) > 1:
2182 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2182 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2183
2183
2184 # description
2184 # description
2185 doc = gettext(entry[0].__doc__)
2185 doc = gettext(entry[0].__doc__)
2186 if not doc:
2186 if not doc:
2187 doc = _("(no help text available)")
2187 doc = _("(no help text available)")
2188 if hasattr(entry[0], 'definition'): # aliased command
2188 if hasattr(entry[0], 'definition'): # aliased command
2189 if entry[0].definition.startswith('!'): # shell alias
2189 if entry[0].definition.startswith('!'): # shell alias
2190 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2190 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2191 else:
2191 else:
2192 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2192 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2193 if ui.quiet or not full:
2193 if ui.quiet or not full:
2194 doc = doc.splitlines()[0]
2194 doc = doc.splitlines()[0]
2195 keep = ui.verbose and ['verbose'] or []
2195 keep = ui.verbose and ['verbose'] or []
2196 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2196 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2197 ui.write("\n%s\n" % formatted)
2197 ui.write("\n%s\n" % formatted)
2198 if pruned:
2198 if pruned:
2199 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2199 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2200
2200
2201 if not ui.quiet:
2201 if not ui.quiet:
2202 # options
2202 # options
2203 if entry[1]:
2203 if entry[1]:
2204 option_lists.append((_("options:\n"), entry[1]))
2204 option_lists.append((_("options:\n"), entry[1]))
2205
2205
2206 addglobalopts(False)
2206 addglobalopts(False)
2207
2207
2208 def helplist(header, select=None):
2208 def helplist(header, select=None):
2209 h = {}
2209 h = {}
2210 cmds = {}
2210 cmds = {}
2211 for c, e in table.iteritems():
2211 for c, e in table.iteritems():
2212 f = c.split("|", 1)[0]
2212 f = c.split("|", 1)[0]
2213 if select and not select(f):
2213 if select and not select(f):
2214 continue
2214 continue
2215 if (not select and name != 'shortlist' and
2215 if (not select and name != 'shortlist' and
2216 e[0].__module__ != __name__):
2216 e[0].__module__ != __name__):
2217 continue
2217 continue
2218 if name == "shortlist" and not f.startswith("^"):
2218 if name == "shortlist" and not f.startswith("^"):
2219 continue
2219 continue
2220 f = f.lstrip("^")
2220 f = f.lstrip("^")
2221 if not ui.debugflag and f.startswith("debug"):
2221 if not ui.debugflag and f.startswith("debug"):
2222 continue
2222 continue
2223 doc = e[0].__doc__
2223 doc = e[0].__doc__
2224 if doc and 'DEPRECATED' in doc and not ui.verbose:
2224 if doc and 'DEPRECATED' in doc and not ui.verbose:
2225 continue
2225 continue
2226 doc = gettext(doc)
2226 doc = gettext(doc)
2227 if not doc:
2227 if not doc:
2228 doc = _("(no help text available)")
2228 doc = _("(no help text available)")
2229 h[f] = doc.splitlines()[0].rstrip()
2229 h[f] = doc.splitlines()[0].rstrip()
2230 cmds[f] = c.lstrip("^")
2230 cmds[f] = c.lstrip("^")
2231
2231
2232 if not h:
2232 if not h:
2233 ui.status(_('no commands defined\n'))
2233 ui.status(_('no commands defined\n'))
2234 return
2234 return
2235
2235
2236 ui.status(header)
2236 ui.status(header)
2237 fns = sorted(h)
2237 fns = sorted(h)
2238 m = max(map(len, fns))
2238 m = max(map(len, fns))
2239 for f in fns:
2239 for f in fns:
2240 if ui.verbose:
2240 if ui.verbose:
2241 commands = cmds[f].replace("|",", ")
2241 commands = cmds[f].replace("|",", ")
2242 ui.write(" %s:\n %s\n"%(commands, h[f]))
2242 ui.write(" %s:\n %s\n"%(commands, h[f]))
2243 else:
2243 else:
2244 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2244 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2245 initindent=' %-*s ' % (m, f),
2245 initindent=' %-*s ' % (m, f),
2246 hangindent=' ' * (m + 4))))
2246 hangindent=' ' * (m + 4))))
2247
2247
2248 if not ui.quiet:
2248 if not ui.quiet:
2249 addglobalopts(True)
2249 addglobalopts(True)
2250
2250
2251 def helptopic(name):
2251 def helptopic(name):
2252 for names, header, doc in help.helptable:
2252 for names, header, doc in help.helptable:
2253 if name in names:
2253 if name in names:
2254 break
2254 break
2255 else:
2255 else:
2256 raise error.UnknownCommand(name)
2256 raise error.UnknownCommand(name)
2257
2257
2258 # description
2258 # description
2259 if not doc:
2259 if not doc:
2260 doc = _("(no help text available)")
2260 doc = _("(no help text available)")
2261 if hasattr(doc, '__call__'):
2261 if hasattr(doc, '__call__'):
2262 doc = doc()
2262 doc = doc()
2263
2263
2264 ui.write("%s\n\n" % header)
2264 ui.write("%s\n\n" % header)
2265 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2265 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2266
2266
2267 def helpext(name):
2267 def helpext(name):
2268 try:
2268 try:
2269 mod = extensions.find(name)
2269 mod = extensions.find(name)
2270 doc = gettext(mod.__doc__) or _('no help text available')
2270 doc = gettext(mod.__doc__) or _('no help text available')
2271 except KeyError:
2271 except KeyError:
2272 mod = None
2272 mod = None
2273 doc = extensions.disabledext(name)
2273 doc = extensions.disabledext(name)
2274 if not doc:
2274 if not doc:
2275 raise error.UnknownCommand(name)
2275 raise error.UnknownCommand(name)
2276
2276
2277 if '\n' not in doc:
2277 if '\n' not in doc:
2278 head, tail = doc, ""
2278 head, tail = doc, ""
2279 else:
2279 else:
2280 head, tail = doc.split('\n', 1)
2280 head, tail = doc.split('\n', 1)
2281 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2281 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2282 if tail:
2282 if tail:
2283 ui.write(minirst.format(tail, textwidth))
2283 ui.write(minirst.format(tail, textwidth))
2284 ui.status('\n\n')
2284 ui.status('\n\n')
2285
2285
2286 if mod:
2286 if mod:
2287 try:
2287 try:
2288 ct = mod.cmdtable
2288 ct = mod.cmdtable
2289 except AttributeError:
2289 except AttributeError:
2290 ct = {}
2290 ct = {}
2291 modcmds = set([c.split('|', 1)[0] for c in ct])
2291 modcmds = set([c.split('|', 1)[0] for c in ct])
2292 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2292 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2293 else:
2293 else:
2294 ui.write(_('use "hg help extensions" for information on enabling '
2294 ui.write(_('use "hg help extensions" for information on enabling '
2295 'extensions\n'))
2295 'extensions\n'))
2296
2296
2297 def helpextcmd(name):
2297 def helpextcmd(name):
2298 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2298 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2299 doc = gettext(mod.__doc__).splitlines()[0]
2299 doc = gettext(mod.__doc__).splitlines()[0]
2300
2300
2301 msg = help.listexts(_("'%s' is provided by the following "
2301 msg = help.listexts(_("'%s' is provided by the following "
2302 "extension:") % cmd, {ext: doc}, len(ext),
2302 "extension:") % cmd, {ext: doc}, len(ext),
2303 indent=4)
2303 indent=4)
2304 ui.write(minirst.format(msg, textwidth))
2304 ui.write(minirst.format(msg, textwidth))
2305 ui.write('\n\n')
2305 ui.write('\n\n')
2306 ui.write(_('use "hg help extensions" for information on enabling '
2306 ui.write(_('use "hg help extensions" for information on enabling '
2307 'extensions\n'))
2307 'extensions\n'))
2308
2308
2309 help.addtopichook('revsets', revset.makedoc)
2309 help.addtopichook('revsets', revset.makedoc)
2310 help.addtopichook('templates', templatekw.makedoc)
2310 help.addtopichook('templates', templatekw.makedoc)
2311 help.addtopichook('templates', templatefilters.makedoc)
2311 help.addtopichook('templates', templatefilters.makedoc)
2312
2312
2313 if name and name != 'shortlist':
2313 if name and name != 'shortlist':
2314 i = None
2314 i = None
2315 if unknowncmd:
2315 if unknowncmd:
2316 queries = (helpextcmd,)
2316 queries = (helpextcmd,)
2317 else:
2317 else:
2318 queries = (helptopic, helpcmd, helpext, helpextcmd)
2318 queries = (helptopic, helpcmd, helpext, helpextcmd)
2319 for f in queries:
2319 for f in queries:
2320 try:
2320 try:
2321 f(name)
2321 f(name)
2322 i = None
2322 i = None
2323 break
2323 break
2324 except error.UnknownCommand, inst:
2324 except error.UnknownCommand, inst:
2325 i = inst
2325 i = inst
2326 if i:
2326 if i:
2327 raise i
2327 raise i
2328
2328
2329 else:
2329 else:
2330 # program name
2330 # program name
2331 if ui.verbose or with_version:
2331 if ui.verbose or with_version:
2332 version_(ui)
2332 version_(ui)
2333 else:
2333 else:
2334 ui.status(_("Mercurial Distributed SCM\n"))
2334 ui.status(_("Mercurial Distributed SCM\n"))
2335 ui.status('\n')
2335 ui.status('\n')
2336
2336
2337 # list of commands
2337 # list of commands
2338 if name == "shortlist":
2338 if name == "shortlist":
2339 header = _('basic commands:\n\n')
2339 header = _('basic commands:\n\n')
2340 else:
2340 else:
2341 header = _('list of commands:\n\n')
2341 header = _('list of commands:\n\n')
2342
2342
2343 helplist(header)
2343 helplist(header)
2344 if name != 'shortlist':
2344 if name != 'shortlist':
2345 exts, maxlength = extensions.enabled()
2345 exts, maxlength = extensions.enabled()
2346 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2346 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2347 if text:
2347 if text:
2348 ui.write("\n%s\n" % minirst.format(text, textwidth))
2348 ui.write("\n%s\n" % minirst.format(text, textwidth))
2349
2349
2350 # list all option lists
2350 # list all option lists
2351 opt_output = []
2351 opt_output = []
2352 multioccur = False
2352 multioccur = False
2353 for title, options in option_lists:
2353 for title, options in option_lists:
2354 opt_output.append(("\n%s" % title, None))
2354 opt_output.append(("\n%s" % title, None))
2355 for option in options:
2355 for option in options:
2356 if len(option) == 5:
2356 if len(option) == 5:
2357 shortopt, longopt, default, desc, optlabel = option
2357 shortopt, longopt, default, desc, optlabel = option
2358 else:
2358 else:
2359 shortopt, longopt, default, desc = option
2359 shortopt, longopt, default, desc = option
2360 optlabel = _("VALUE") # default label
2360 optlabel = _("VALUE") # default label
2361
2361
2362 if _("DEPRECATED") in desc and not ui.verbose:
2362 if _("DEPRECATED") in desc and not ui.verbose:
2363 continue
2363 continue
2364 if isinstance(default, list):
2364 if isinstance(default, list):
2365 numqualifier = " %s [+]" % optlabel
2365 numqualifier = " %s [+]" % optlabel
2366 multioccur = True
2366 multioccur = True
2367 elif (default is not None) and not isinstance(default, bool):
2367 elif (default is not None) and not isinstance(default, bool):
2368 numqualifier = " %s" % optlabel
2368 numqualifier = " %s" % optlabel
2369 else:
2369 else:
2370 numqualifier = ""
2370 numqualifier = ""
2371 opt_output.append(("%2s%s" %
2371 opt_output.append(("%2s%s" %
2372 (shortopt and "-%s" % shortopt,
2372 (shortopt and "-%s" % shortopt,
2373 longopt and " --%s%s" %
2373 longopt and " --%s%s" %
2374 (longopt, numqualifier)),
2374 (longopt, numqualifier)),
2375 "%s%s" % (desc,
2375 "%s%s" % (desc,
2376 default
2376 default
2377 and _(" (default: %s)") % default
2377 and _(" (default: %s)") % default
2378 or "")))
2378 or "")))
2379 if multioccur:
2379 if multioccur:
2380 msg = _("\n[+] marked option can be specified multiple times")
2380 msg = _("\n[+] marked option can be specified multiple times")
2381 if ui.verbose and name != 'shortlist':
2381 if ui.verbose and name != 'shortlist':
2382 opt_output.append((msg, None))
2382 opt_output.append((msg, None))
2383 else:
2383 else:
2384 opt_output.insert(-1, (msg, None))
2384 opt_output.insert(-1, (msg, None))
2385
2385
2386 if not name:
2386 if not name:
2387 ui.write(_("\nadditional help topics:\n\n"))
2387 ui.write(_("\nadditional help topics:\n\n"))
2388 topics = []
2388 topics = []
2389 for names, header, doc in help.helptable:
2389 for names, header, doc in help.helptable:
2390 topics.append((sorted(names, key=len, reverse=True)[0], header))
2390 topics.append((sorted(names, key=len, reverse=True)[0], header))
2391 topics_len = max([len(s[0]) for s in topics])
2391 topics_len = max([len(s[0]) for s in topics])
2392 for t, desc in topics:
2392 for t, desc in topics:
2393 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2393 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2394
2394
2395 if opt_output:
2395 if opt_output:
2396 colwidth = encoding.colwidth
2396 colwidth = encoding.colwidth
2397 # normalize: (opt or message, desc or None, width of opt)
2397 # normalize: (opt or message, desc or None, width of opt)
2398 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2398 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2399 for opt, desc in opt_output]
2399 for opt, desc in opt_output]
2400 hanging = max([e[2] for e in entries])
2400 hanging = max([e[2] for e in entries])
2401 for opt, desc, width in entries:
2401 for opt, desc, width in entries:
2402 if desc:
2402 if desc:
2403 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2403 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2404 hangindent = ' ' * (hanging + 3)
2404 hangindent = ' ' * (hanging + 3)
2405 ui.write('%s\n' % (util.wrap(desc, textwidth,
2405 ui.write('%s\n' % (util.wrap(desc, textwidth,
2406 initindent=initindent,
2406 initindent=initindent,
2407 hangindent=hangindent)))
2407 hangindent=hangindent)))
2408 else:
2408 else:
2409 ui.write("%s\n" % opt)
2409 ui.write("%s\n" % opt)
2410
2410
2411 def identify(ui, repo, source=None, rev=None,
2411 def identify(ui, repo, source=None, rev=None,
2412 num=None, id=None, branch=None, tags=None, bookmarks=None):
2412 num=None, id=None, branch=None, tags=None, bookmarks=None):
2413 """identify the working copy or specified revision
2413 """identify the working copy or specified revision
2414
2414
2415 Print a summary identifying the repository state at REV using one or
2415 Print a summary identifying the repository state at REV using one or
2416 two parent hash identifiers, followed by a "+" if the working
2416 two parent hash identifiers, followed by a "+" if the working
2417 directory has uncommitted changes, the branch name (if not default),
2417 directory has uncommitted changes, the branch name (if not default),
2418 a list of tags, and a list of bookmarks.
2418 a list of tags, and a list of bookmarks.
2419
2419
2420 When REV is not given, print a summary of the current state of the
2420 When REV is not given, print a summary of the current state of the
2421 repository.
2421 repository.
2422
2422
2423 Specifying a path to a repository root or Mercurial bundle will
2423 Specifying a path to a repository root or Mercurial bundle will
2424 cause lookup to operate on that repository/bundle.
2424 cause lookup to operate on that repository/bundle.
2425
2425
2426 Returns 0 if successful.
2426 Returns 0 if successful.
2427 """
2427 """
2428
2428
2429 if not repo and not source:
2429 if not repo and not source:
2430 raise util.Abort(_("there is no Mercurial repository here "
2430 raise util.Abort(_("there is no Mercurial repository here "
2431 "(.hg not found)"))
2431 "(.hg not found)"))
2432
2432
2433 hexfunc = ui.debugflag and hex or short
2433 hexfunc = ui.debugflag and hex or short
2434 default = not (num or id or branch or tags or bookmarks)
2434 default = not (num or id or branch or tags or bookmarks)
2435 output = []
2435 output = []
2436 revs = []
2436 revs = []
2437
2437
2438 if source:
2438 if source:
2439 source, branches = hg.parseurl(ui.expandpath(source))
2439 source, branches = hg.parseurl(ui.expandpath(source))
2440 repo = hg.repository(ui, source)
2440 repo = hg.repository(ui, source)
2441 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2441 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2442
2442
2443 if not repo.local():
2443 if not repo.local():
2444 if num or branch or tags:
2444 if num or branch or tags:
2445 raise util.Abort(
2445 raise util.Abort(
2446 _("can't query remote revision number, branch, or tags"))
2446 _("can't query remote revision number, branch, or tags"))
2447 if not rev and revs:
2447 if not rev and revs:
2448 rev = revs[0]
2448 rev = revs[0]
2449 if not rev:
2449 if not rev:
2450 rev = "tip"
2450 rev = "tip"
2451
2451
2452 remoterev = repo.lookup(rev)
2452 remoterev = repo.lookup(rev)
2453 if default or id:
2453 if default or id:
2454 output = [hexfunc(remoterev)]
2454 output = [hexfunc(remoterev)]
2455
2455
2456 def getbms():
2456 def getbms():
2457 bms = []
2457 bms = []
2458
2458
2459 if 'bookmarks' in repo.listkeys('namespaces'):
2459 if 'bookmarks' in repo.listkeys('namespaces'):
2460 hexremoterev = hex(remoterev)
2460 hexremoterev = hex(remoterev)
2461 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2461 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2462 if bmr == hexremoterev]
2462 if bmr == hexremoterev]
2463
2463
2464 return bms
2464 return bms
2465
2465
2466 if bookmarks:
2466 if bookmarks:
2467 output.extend(getbms())
2467 output.extend(getbms())
2468 elif default and not ui.quiet:
2468 elif default and not ui.quiet:
2469 # multiple bookmarks for a single parent separated by '/'
2469 # multiple bookmarks for a single parent separated by '/'
2470 bm = '/'.join(getbms())
2470 bm = '/'.join(getbms())
2471 if bm:
2471 if bm:
2472 output.append(bm)
2472 output.append(bm)
2473 else:
2473 else:
2474 if not rev:
2474 if not rev:
2475 ctx = repo[None]
2475 ctx = repo[None]
2476 parents = ctx.parents()
2476 parents = ctx.parents()
2477 changed = ""
2477 changed = ""
2478 if default or id or num:
2478 if default or id or num:
2479 changed = util.any(repo.status()) and "+" or ""
2479 changed = util.any(repo.status()) and "+" or ""
2480 if default or id:
2480 if default or id:
2481 output = ["%s%s" %
2481 output = ["%s%s" %
2482 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2482 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2483 if num:
2483 if num:
2484 output.append("%s%s" %
2484 output.append("%s%s" %
2485 ('+'.join([str(p.rev()) for p in parents]), changed))
2485 ('+'.join([str(p.rev()) for p in parents]), changed))
2486 else:
2486 else:
2487 ctx = cmdutil.revsingle(repo, rev)
2487 ctx = cmdutil.revsingle(repo, rev)
2488 if default or id:
2488 if default or id:
2489 output = [hexfunc(ctx.node())]
2489 output = [hexfunc(ctx.node())]
2490 if num:
2490 if num:
2491 output.append(str(ctx.rev()))
2491 output.append(str(ctx.rev()))
2492
2492
2493 if default and not ui.quiet:
2493 if default and not ui.quiet:
2494 b = ctx.branch()
2494 b = ctx.branch()
2495 if b != 'default':
2495 if b != 'default':
2496 output.append("(%s)" % b)
2496 output.append("(%s)" % b)
2497
2497
2498 # multiple tags for a single parent separated by '/'
2498 # multiple tags for a single parent separated by '/'
2499 t = '/'.join(ctx.tags())
2499 t = '/'.join(ctx.tags())
2500 if t:
2500 if t:
2501 output.append(t)
2501 output.append(t)
2502
2502
2503 # multiple bookmarks for a single parent separated by '/'
2503 # multiple bookmarks for a single parent separated by '/'
2504 bm = '/'.join(ctx.bookmarks())
2504 bm = '/'.join(ctx.bookmarks())
2505 if bm:
2505 if bm:
2506 output.append(bm)
2506 output.append(bm)
2507 else:
2507 else:
2508 if branch:
2508 if branch:
2509 output.append(ctx.branch())
2509 output.append(ctx.branch())
2510
2510
2511 if tags:
2511 if tags:
2512 output.extend(ctx.tags())
2512 output.extend(ctx.tags())
2513
2513
2514 if bookmarks:
2514 if bookmarks:
2515 output.extend(ctx.bookmarks())
2515 output.extend(ctx.bookmarks())
2516
2516
2517 ui.write("%s\n" % ' '.join(output))
2517 ui.write("%s\n" % ' '.join(output))
2518
2518
2519 def import_(ui, repo, patch1, *patches, **opts):
2519 def import_(ui, repo, patch1, *patches, **opts):
2520 """import an ordered set of patches
2520 """import an ordered set of patches
2521
2521
2522 Import a list of patches and commit them individually (unless
2522 Import a list of patches and commit them individually (unless
2523 --no-commit is specified).
2523 --no-commit is specified).
2524
2524
2525 If there are outstanding changes in the working directory, import
2525 If there are outstanding changes in the working directory, import
2526 will abort unless given the -f/--force flag.
2526 will abort unless given the -f/--force flag.
2527
2527
2528 You can import a patch straight from a mail message. Even patches
2528 You can import a patch straight from a mail message. Even patches
2529 as attachments work (to use the body part, it must have type
2529 as attachments work (to use the body part, it must have type
2530 text/plain or text/x-patch). From and Subject headers of email
2530 text/plain or text/x-patch). From and Subject headers of email
2531 message are used as default committer and commit message. All
2531 message are used as default committer and commit message. All
2532 text/plain body parts before first diff are added to commit
2532 text/plain body parts before first diff are added to commit
2533 message.
2533 message.
2534
2534
2535 If the imported patch was generated by :hg:`export`, user and
2535 If the imported patch was generated by :hg:`export`, user and
2536 description from patch override values from message headers and
2536 description from patch override values from message headers and
2537 body. Values given on command line with -m/--message and -u/--user
2537 body. Values given on command line with -m/--message and -u/--user
2538 override these.
2538 override these.
2539
2539
2540 If --exact is specified, import will set the working directory to
2540 If --exact is specified, import will set the working directory to
2541 the parent of each patch before applying it, and will abort if the
2541 the parent of each patch before applying it, and will abort if the
2542 resulting changeset has a different ID than the one recorded in
2542 resulting changeset has a different ID than the one recorded in
2543 the patch. This may happen due to character set problems or other
2543 the patch. This may happen due to character set problems or other
2544 deficiencies in the text patch format.
2544 deficiencies in the text patch format.
2545
2545
2546 With -s/--similarity, hg will attempt to discover renames and
2546 With -s/--similarity, hg will attempt to discover renames and
2547 copies in the patch in the same way as 'addremove'.
2547 copies in the patch in the same way as 'addremove'.
2548
2548
2549 To read a patch from standard input, use "-" as the patch name. If
2549 To read a patch from standard input, use "-" as the patch name. If
2550 a URL is specified, the patch will be downloaded from it.
2550 a URL is specified, the patch will be downloaded from it.
2551 See :hg:`help dates` for a list of formats valid for -d/--date.
2551 See :hg:`help dates` for a list of formats valid for -d/--date.
2552
2552
2553 Returns 0 on success.
2553 Returns 0 on success.
2554 """
2554 """
2555 patches = (patch1,) + patches
2555 patches = (patch1,) + patches
2556
2556
2557 date = opts.get('date')
2557 date = opts.get('date')
2558 if date:
2558 if date:
2559 opts['date'] = util.parsedate(date)
2559 opts['date'] = util.parsedate(date)
2560
2560
2561 try:
2561 try:
2562 sim = float(opts.get('similarity') or 0)
2562 sim = float(opts.get('similarity') or 0)
2563 except ValueError:
2563 except ValueError:
2564 raise util.Abort(_('similarity must be a number'))
2564 raise util.Abort(_('similarity must be a number'))
2565 if sim < 0 or sim > 100:
2565 if sim < 0 or sim > 100:
2566 raise util.Abort(_('similarity must be between 0 and 100'))
2566 raise util.Abort(_('similarity must be between 0 and 100'))
2567
2567
2568 if opts.get('exact') or not opts.get('force'):
2568 if opts.get('exact') or not opts.get('force'):
2569 cmdutil.bail_if_changed(repo)
2569 cmdutil.bail_if_changed(repo)
2570
2570
2571 d = opts["base"]
2571 d = opts["base"]
2572 strip = opts["strip"]
2572 strip = opts["strip"]
2573 wlock = lock = None
2573 wlock = lock = None
2574 msgs = []
2574 msgs = []
2575
2575
2576 def tryone(ui, hunk):
2576 def tryone(ui, hunk):
2577 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2577 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2578 patch.extract(ui, hunk)
2578 patch.extract(ui, hunk)
2579
2579
2580 if not tmpname:
2580 if not tmpname:
2581 return None
2581 return None
2582 commitid = _('to working directory')
2582 commitid = _('to working directory')
2583
2583
2584 try:
2584 try:
2585 cmdline_message = cmdutil.logmessage(opts)
2585 cmdline_message = cmdutil.logmessage(opts)
2586 if cmdline_message:
2586 if cmdline_message:
2587 # pickup the cmdline msg
2587 # pickup the cmdline msg
2588 message = cmdline_message
2588 message = cmdline_message
2589 elif message:
2589 elif message:
2590 # pickup the patch msg
2590 # pickup the patch msg
2591 message = message.strip()
2591 message = message.strip()
2592 else:
2592 else:
2593 # launch the editor
2593 # launch the editor
2594 message = None
2594 message = None
2595 ui.debug('message:\n%s\n' % message)
2595 ui.debug('message:\n%s\n' % message)
2596
2596
2597 wp = repo.parents()
2597 wp = repo.parents()
2598 if opts.get('exact'):
2598 if opts.get('exact'):
2599 if not nodeid or not p1:
2599 if not nodeid or not p1:
2600 raise util.Abort(_('not a Mercurial patch'))
2600 raise util.Abort(_('not a Mercurial patch'))
2601 p1 = repo.lookup(p1)
2601 p1 = repo.lookup(p1)
2602 p2 = repo.lookup(p2 or hex(nullid))
2602 p2 = repo.lookup(p2 or hex(nullid))
2603
2603
2604 if p1 != wp[0].node():
2604 if p1 != wp[0].node():
2605 hg.clean(repo, p1)
2605 hg.clean(repo, p1)
2606 repo.dirstate.setparents(p1, p2)
2606 repo.dirstate.setparents(p1, p2)
2607 elif p2:
2607 elif p2:
2608 try:
2608 try:
2609 p1 = repo.lookup(p1)
2609 p1 = repo.lookup(p1)
2610 p2 = repo.lookup(p2)
2610 p2 = repo.lookup(p2)
2611 if p1 == wp[0].node():
2611 if p1 == wp[0].node():
2612 repo.dirstate.setparents(p1, p2)
2612 repo.dirstate.setparents(p1, p2)
2613 except error.RepoError:
2613 except error.RepoError:
2614 pass
2614 pass
2615 if opts.get('exact') or opts.get('import_branch'):
2615 if opts.get('exact') or opts.get('import_branch'):
2616 repo.dirstate.setbranch(branch or 'default')
2616 repo.dirstate.setbranch(branch or 'default')
2617
2617
2618 files = {}
2618 files = {}
2619 try:
2619 try:
2620 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2620 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2621 files=files, eolmode=None)
2621 files=files, eolmode=None)
2622 finally:
2622 finally:
2623 files = cmdutil.updatedir(ui, repo, files,
2623 files = cmdutil.updatedir(ui, repo, files,
2624 similarity=sim / 100.0)
2624 similarity=sim / 100.0)
2625 if opts.get('no_commit'):
2625 if opts.get('no_commit'):
2626 if message:
2626 if message:
2627 msgs.append(message)
2627 msgs.append(message)
2628 else:
2628 else:
2629 if opts.get('exact'):
2629 if opts.get('exact'):
2630 m = None
2630 m = None
2631 else:
2631 else:
2632 m = cmdutil.matchfiles(repo, files or [])
2632 m = cmdutil.matchfiles(repo, files or [])
2633 n = repo.commit(message, opts.get('user') or user,
2633 n = repo.commit(message, opts.get('user') or user,
2634 opts.get('date') or date, match=m,
2634 opts.get('date') or date, match=m,
2635 editor=cmdutil.commiteditor)
2635 editor=cmdutil.commiteditor)
2636 if opts.get('exact'):
2636 if opts.get('exact'):
2637 if hex(n) != nodeid:
2637 if hex(n) != nodeid:
2638 repo.rollback()
2638 repo.rollback()
2639 raise util.Abort(_('patch is damaged'
2639 raise util.Abort(_('patch is damaged'
2640 ' or loses information'))
2640 ' or loses information'))
2641 # Force a dirstate write so that the next transaction
2641 # Force a dirstate write so that the next transaction
2642 # backups an up-do-date file.
2642 # backups an up-do-date file.
2643 repo.dirstate.write()
2643 repo.dirstate.write()
2644 if n:
2644 if n:
2645 commitid = short(n)
2645 commitid = short(n)
2646
2646
2647 return commitid
2647 return commitid
2648 finally:
2648 finally:
2649 os.unlink(tmpname)
2649 os.unlink(tmpname)
2650
2650
2651 try:
2651 try:
2652 wlock = repo.wlock()
2652 wlock = repo.wlock()
2653 lock = repo.lock()
2653 lock = repo.lock()
2654 lastcommit = None
2654 lastcommit = None
2655 for p in patches:
2655 for p in patches:
2656 pf = os.path.join(d, p)
2656 pf = os.path.join(d, p)
2657
2657
2658 if pf == '-':
2658 if pf == '-':
2659 ui.status(_("applying patch from stdin\n"))
2659 ui.status(_("applying patch from stdin\n"))
2660 pf = sys.stdin
2660 pf = sys.stdin
2661 else:
2661 else:
2662 ui.status(_("applying %s\n") % p)
2662 ui.status(_("applying %s\n") % p)
2663 pf = url.open(ui, pf)
2663 pf = url.open(ui, pf)
2664
2664
2665 haspatch = False
2665 haspatch = False
2666 for hunk in patch.split(pf):
2666 for hunk in patch.split(pf):
2667 commitid = tryone(ui, hunk)
2667 commitid = tryone(ui, hunk)
2668 if commitid:
2668 if commitid:
2669 haspatch = True
2669 haspatch = True
2670 if lastcommit:
2670 if lastcommit:
2671 ui.status(_('applied %s\n') % lastcommit)
2671 ui.status(_('applied %s\n') % lastcommit)
2672 lastcommit = commitid
2672 lastcommit = commitid
2673
2673
2674 if not haspatch:
2674 if not haspatch:
2675 raise util.Abort(_('no diffs found'))
2675 raise util.Abort(_('no diffs found'))
2676
2676
2677 if msgs:
2677 if msgs:
2678 repo.opener.write('last-message.txt', '\n* * *\n'.join(msgs))
2678 repo.opener.write('last-message.txt', '\n* * *\n'.join(msgs))
2679 finally:
2679 finally:
2680 release(lock, wlock)
2680 release(lock, wlock)
2681
2681
2682 def incoming(ui, repo, source="default", **opts):
2682 def incoming(ui, repo, source="default", **opts):
2683 """show new changesets found in source
2683 """show new changesets found in source
2684
2684
2685 Show new changesets found in the specified path/URL or the default
2685 Show new changesets found in the specified path/URL or the default
2686 pull location. These are the changesets that would have been pulled
2686 pull location. These are the changesets that would have been pulled
2687 if a pull at the time you issued this command.
2687 if a pull at the time you issued this command.
2688
2688
2689 For remote repository, using --bundle avoids downloading the
2689 For remote repository, using --bundle avoids downloading the
2690 changesets twice if the incoming is followed by a pull.
2690 changesets twice if the incoming is followed by a pull.
2691
2691
2692 See pull for valid source format details.
2692 See pull for valid source format details.
2693
2693
2694 Returns 0 if there are incoming changes, 1 otherwise.
2694 Returns 0 if there are incoming changes, 1 otherwise.
2695 """
2695 """
2696 if opts.get('bundle') and opts.get('subrepos'):
2696 if opts.get('bundle') and opts.get('subrepos'):
2697 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2697 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2698
2698
2699 if opts.get('bookmarks'):
2699 if opts.get('bookmarks'):
2700 source, branches = hg.parseurl(ui.expandpath(source),
2700 source, branches = hg.parseurl(ui.expandpath(source),
2701 opts.get('branch'))
2701 opts.get('branch'))
2702 other = hg.repository(hg.remoteui(repo, opts), source)
2702 other = hg.repository(hg.remoteui(repo, opts), source)
2703 if 'bookmarks' not in other.listkeys('namespaces'):
2703 if 'bookmarks' not in other.listkeys('namespaces'):
2704 ui.warn(_("remote doesn't support bookmarks\n"))
2704 ui.warn(_("remote doesn't support bookmarks\n"))
2705 return 0
2705 return 0
2706 ui.status(_('comparing with %s\n') % util.hidepassword(source))
2706 ui.status(_('comparing with %s\n') % util.hidepassword(source))
2707 return bookmarks.diff(ui, repo, other)
2707 return bookmarks.diff(ui, repo, other)
2708
2708
2709 ret = hg.incoming(ui, repo, source, opts)
2709 ret = hg.incoming(ui, repo, source, opts)
2710 return ret
2710 return ret
2711
2711
2712 def init(ui, dest=".", **opts):
2712 def init(ui, dest=".", **opts):
2713 """create a new repository in the given directory
2713 """create a new repository in the given directory
2714
2714
2715 Initialize a new repository in the given directory. If the given
2715 Initialize a new repository in the given directory. If the given
2716 directory does not exist, it will be created.
2716 directory does not exist, it will be created.
2717
2717
2718 If no directory is given, the current directory is used.
2718 If no directory is given, the current directory is used.
2719
2719
2720 It is possible to specify an ``ssh://`` URL as the destination.
2720 It is possible to specify an ``ssh://`` URL as the destination.
2721 See :hg:`help urls` for more information.
2721 See :hg:`help urls` for more information.
2722
2722
2723 Returns 0 on success.
2723 Returns 0 on success.
2724 """
2724 """
2725 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2725 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2726
2726
2727 def locate(ui, repo, *pats, **opts):
2727 def locate(ui, repo, *pats, **opts):
2728 """locate files matching specific patterns
2728 """locate files matching specific patterns
2729
2729
2730 Print files under Mercurial control in the working directory whose
2730 Print files under Mercurial control in the working directory whose
2731 names match the given patterns.
2731 names match the given patterns.
2732
2732
2733 By default, this command searches all directories in the working
2733 By default, this command searches all directories in the working
2734 directory. To search just the current directory and its
2734 directory. To search just the current directory and its
2735 subdirectories, use "--include .".
2735 subdirectories, use "--include .".
2736
2736
2737 If no patterns are given to match, this command prints the names
2737 If no patterns are given to match, this command prints the names
2738 of all files under Mercurial control in the working directory.
2738 of all files under Mercurial control in the working directory.
2739
2739
2740 If you want to feed the output of this command into the "xargs"
2740 If you want to feed the output of this command into the "xargs"
2741 command, use the -0 option to both this command and "xargs". This
2741 command, use the -0 option to both this command and "xargs". This
2742 will avoid the problem of "xargs" treating single filenames that
2742 will avoid the problem of "xargs" treating single filenames that
2743 contain whitespace as multiple filenames.
2743 contain whitespace as multiple filenames.
2744
2744
2745 Returns 0 if a match is found, 1 otherwise.
2745 Returns 0 if a match is found, 1 otherwise.
2746 """
2746 """
2747 end = opts.get('print0') and '\0' or '\n'
2747 end = opts.get('print0') and '\0' or '\n'
2748 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2748 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2749
2749
2750 ret = 1
2750 ret = 1
2751 m = cmdutil.match(repo, pats, opts, default='relglob')
2751 m = cmdutil.match(repo, pats, opts, default='relglob')
2752 m.bad = lambda x, y: False
2752 m.bad = lambda x, y: False
2753 for abs in repo[rev].walk(m):
2753 for abs in repo[rev].walk(m):
2754 if not rev and abs not in repo.dirstate:
2754 if not rev and abs not in repo.dirstate:
2755 continue
2755 continue
2756 if opts.get('fullpath'):
2756 if opts.get('fullpath'):
2757 ui.write(repo.wjoin(abs), end)
2757 ui.write(repo.wjoin(abs), end)
2758 else:
2758 else:
2759 ui.write(((pats and m.rel(abs)) or abs), end)
2759 ui.write(((pats and m.rel(abs)) or abs), end)
2760 ret = 0
2760 ret = 0
2761
2761
2762 return ret
2762 return ret
2763
2763
2764 def log(ui, repo, *pats, **opts):
2764 def log(ui, repo, *pats, **opts):
2765 """show revision history of entire repository or files
2765 """show revision history of entire repository or files
2766
2766
2767 Print the revision history of the specified files or the entire
2767 Print the revision history of the specified files or the entire
2768 project.
2768 project.
2769
2769
2770 File history is shown without following rename or copy history of
2770 File history is shown without following rename or copy history of
2771 files. Use -f/--follow with a filename to follow history across
2771 files. Use -f/--follow with a filename to follow history across
2772 renames and copies. --follow without a filename will only show
2772 renames and copies. --follow without a filename will only show
2773 ancestors or descendants of the starting revision. --follow-first
2773 ancestors or descendants of the starting revision. --follow-first
2774 only follows the first parent of merge revisions.
2774 only follows the first parent of merge revisions.
2775
2775
2776 If no revision range is specified, the default is ``tip:0`` unless
2776 If no revision range is specified, the default is ``tip:0`` unless
2777 --follow is set, in which case the working directory parent is
2777 --follow is set, in which case the working directory parent is
2778 used as the starting revision. You can specify a revision set for
2778 used as the starting revision. You can specify a revision set for
2779 log, see :hg:`help revsets` for more information.
2779 log, see :hg:`help revsets` for more information.
2780
2780
2781 See :hg:`help dates` for a list of formats valid for -d/--date.
2781 See :hg:`help dates` for a list of formats valid for -d/--date.
2782
2782
2783 By default this command prints revision number and changeset id,
2783 By default this command prints revision number and changeset id,
2784 tags, non-trivial parents, user, date and time, and a summary for
2784 tags, non-trivial parents, user, date and time, and a summary for
2785 each commit. When the -v/--verbose switch is used, the list of
2785 each commit. When the -v/--verbose switch is used, the list of
2786 changed files and full commit message are shown.
2786 changed files and full commit message are shown.
2787
2787
2788 .. note::
2788 .. note::
2789 log -p/--patch may generate unexpected diff output for merge
2789 log -p/--patch may generate unexpected diff output for merge
2790 changesets, as it will only compare the merge changeset against
2790 changesets, as it will only compare the merge changeset against
2791 its first parent. Also, only files different from BOTH parents
2791 its first parent. Also, only files different from BOTH parents
2792 will appear in files:.
2792 will appear in files:.
2793
2793
2794 Returns 0 on success.
2794 Returns 0 on success.
2795 """
2795 """
2796
2796
2797 matchfn = cmdutil.match(repo, pats, opts)
2797 matchfn = cmdutil.match(repo, pats, opts)
2798 limit = cmdutil.loglimit(opts)
2798 limit = cmdutil.loglimit(opts)
2799 count = 0
2799 count = 0
2800
2800
2801 endrev = None
2801 endrev = None
2802 if opts.get('copies') and opts.get('rev'):
2802 if opts.get('copies') and opts.get('rev'):
2803 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2803 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2804
2804
2805 df = False
2805 df = False
2806 if opts["date"]:
2806 if opts["date"]:
2807 df = util.matchdate(opts["date"])
2807 df = util.matchdate(opts["date"])
2808
2808
2809 branches = opts.get('branch', []) + opts.get('only_branch', [])
2809 branches = opts.get('branch', []) + opts.get('only_branch', [])
2810 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2810 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2811
2811
2812 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2812 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2813 def prep(ctx, fns):
2813 def prep(ctx, fns):
2814 rev = ctx.rev()
2814 rev = ctx.rev()
2815 parents = [p for p in repo.changelog.parentrevs(rev)
2815 parents = [p for p in repo.changelog.parentrevs(rev)
2816 if p != nullrev]
2816 if p != nullrev]
2817 if opts.get('no_merges') and len(parents) == 2:
2817 if opts.get('no_merges') and len(parents) == 2:
2818 return
2818 return
2819 if opts.get('only_merges') and len(parents) != 2:
2819 if opts.get('only_merges') and len(parents) != 2:
2820 return
2820 return
2821 if opts.get('branch') and ctx.branch() not in opts['branch']:
2821 if opts.get('branch') and ctx.branch() not in opts['branch']:
2822 return
2822 return
2823 if df and not df(ctx.date()[0]):
2823 if df and not df(ctx.date()[0]):
2824 return
2824 return
2825 if opts['user'] and not [k for k in opts['user']
2825 if opts['user'] and not [k for k in opts['user']
2826 if k.lower() in ctx.user().lower()]:
2826 if k.lower() in ctx.user().lower()]:
2827 return
2827 return
2828 if opts.get('keyword'):
2828 if opts.get('keyword'):
2829 for k in [kw.lower() for kw in opts['keyword']]:
2829 for k in [kw.lower() for kw in opts['keyword']]:
2830 if (k in ctx.user().lower() or
2830 if (k in ctx.user().lower() or
2831 k in ctx.description().lower() or
2831 k in ctx.description().lower() or
2832 k in " ".join(ctx.files()).lower()):
2832 k in " ".join(ctx.files()).lower()):
2833 break
2833 break
2834 else:
2834 else:
2835 return
2835 return
2836
2836
2837 copies = None
2837 copies = None
2838 if opts.get('copies') and rev:
2838 if opts.get('copies') and rev:
2839 copies = []
2839 copies = []
2840 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2840 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2841 for fn in ctx.files():
2841 for fn in ctx.files():
2842 rename = getrenamed(fn, rev)
2842 rename = getrenamed(fn, rev)
2843 if rename:
2843 if rename:
2844 copies.append((fn, rename[0]))
2844 copies.append((fn, rename[0]))
2845
2845
2846 revmatchfn = None
2846 revmatchfn = None
2847 if opts.get('patch') or opts.get('stat'):
2847 if opts.get('patch') or opts.get('stat'):
2848 if opts.get('follow') or opts.get('follow_first'):
2848 if opts.get('follow') or opts.get('follow_first'):
2849 # note: this might be wrong when following through merges
2849 # note: this might be wrong when following through merges
2850 revmatchfn = cmdutil.match(repo, fns, default='path')
2850 revmatchfn = cmdutil.match(repo, fns, default='path')
2851 else:
2851 else:
2852 revmatchfn = matchfn
2852 revmatchfn = matchfn
2853
2853
2854 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2854 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2855
2855
2856 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2856 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2857 if count == limit:
2857 if count == limit:
2858 break
2858 break
2859 if displayer.flush(ctx.rev()):
2859 if displayer.flush(ctx.rev()):
2860 count += 1
2860 count += 1
2861 displayer.close()
2861 displayer.close()
2862
2862
2863 def manifest(ui, repo, node=None, rev=None):
2863 def manifest(ui, repo, node=None, rev=None):
2864 """output the current or given revision of the project manifest
2864 """output the current or given revision of the project manifest
2865
2865
2866 Print a list of version controlled files for the given revision.
2866 Print a list of version controlled files for the given revision.
2867 If no revision is given, the first parent of the working directory
2867 If no revision is given, the first parent of the working directory
2868 is used, or the null revision if no revision is checked out.
2868 is used, or the null revision if no revision is checked out.
2869
2869
2870 With -v, print file permissions, symlink and executable bits.
2870 With -v, print file permissions, symlink and executable bits.
2871 With --debug, print file revision hashes.
2871 With --debug, print file revision hashes.
2872
2872
2873 Returns 0 on success.
2873 Returns 0 on success.
2874 """
2874 """
2875
2875
2876 if rev and node:
2876 if rev and node:
2877 raise util.Abort(_("please specify just one revision"))
2877 raise util.Abort(_("please specify just one revision"))
2878
2878
2879 if not node:
2879 if not node:
2880 node = rev
2880 node = rev
2881
2881
2882 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2882 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2883 ctx = cmdutil.revsingle(repo, node)
2883 ctx = cmdutil.revsingle(repo, node)
2884 for f in ctx:
2884 for f in ctx:
2885 if ui.debugflag:
2885 if ui.debugflag:
2886 ui.write("%40s " % hex(ctx.manifest()[f]))
2886 ui.write("%40s " % hex(ctx.manifest()[f]))
2887 if ui.verbose:
2887 if ui.verbose:
2888 ui.write(decor[ctx.flags(f)])
2888 ui.write(decor[ctx.flags(f)])
2889 ui.write("%s\n" % f)
2889 ui.write("%s\n" % f)
2890
2890
2891 def merge(ui, repo, node=None, **opts):
2891 def merge(ui, repo, node=None, **opts):
2892 """merge working directory with another revision
2892 """merge working directory with another revision
2893
2893
2894 The current working directory is updated with all changes made in
2894 The current working directory is updated with all changes made in
2895 the requested revision since the last common predecessor revision.
2895 the requested revision since the last common predecessor revision.
2896
2896
2897 Files that changed between either parent are marked as changed for
2897 Files that changed between either parent are marked as changed for
2898 the next commit and a commit must be performed before any further
2898 the next commit and a commit must be performed before any further
2899 updates to the repository are allowed. The next commit will have
2899 updates to the repository are allowed. The next commit will have
2900 two parents.
2900 two parents.
2901
2901
2902 ``--tool`` can be used to specify the merge tool used for file
2902 ``--tool`` can be used to specify the merge tool used for file
2903 merges. It overrides the HGMERGE environment variable and your
2903 merges. It overrides the HGMERGE environment variable and your
2904 configuration files. See :hg:`help merge-tools` for options.
2904 configuration files. See :hg:`help merge-tools` for options.
2905
2905
2906 If no revision is specified, the working directory's parent is a
2906 If no revision is specified, the working directory's parent is a
2907 head revision, and the current branch contains exactly one other
2907 head revision, and the current branch contains exactly one other
2908 head, the other head is merged with by default. Otherwise, an
2908 head, the other head is merged with by default. Otherwise, an
2909 explicit revision with which to merge with must be provided.
2909 explicit revision with which to merge with must be provided.
2910
2910
2911 :hg:`resolve` must be used to resolve unresolved files.
2911 :hg:`resolve` must be used to resolve unresolved files.
2912
2912
2913 To undo an uncommitted merge, use :hg:`update --clean .` which
2913 To undo an uncommitted merge, use :hg:`update --clean .` which
2914 will check out a clean copy of the original merge parent, losing
2914 will check out a clean copy of the original merge parent, losing
2915 all changes.
2915 all changes.
2916
2916
2917 Returns 0 on success, 1 if there are unresolved files.
2917 Returns 0 on success, 1 if there are unresolved files.
2918 """
2918 """
2919
2919
2920 if opts.get('rev') and node:
2920 if opts.get('rev') and node:
2921 raise util.Abort(_("please specify just one revision"))
2921 raise util.Abort(_("please specify just one revision"))
2922 if not node:
2922 if not node:
2923 node = opts.get('rev')
2923 node = opts.get('rev')
2924
2924
2925 if not node:
2925 if not node:
2926 branch = repo[None].branch()
2926 branch = repo[None].branch()
2927 bheads = repo.branchheads(branch)
2927 bheads = repo.branchheads(branch)
2928 if len(bheads) > 2:
2928 if len(bheads) > 2:
2929 raise util.Abort(_("branch '%s' has %d heads - "
2929 raise util.Abort(_("branch '%s' has %d heads - "
2930 "please merge with an explicit rev")
2930 "please merge with an explicit rev")
2931 % (branch, len(bheads)),
2931 % (branch, len(bheads)),
2932 hint=_("run 'hg heads .' to see heads"))
2932 hint=_("run 'hg heads .' to see heads"))
2933
2933
2934 parent = repo.dirstate.p1()
2934 parent = repo.dirstate.p1()
2935 if len(bheads) == 1:
2935 if len(bheads) == 1:
2936 if len(repo.heads()) > 1:
2936 if len(repo.heads()) > 1:
2937 raise util.Abort(_("branch '%s' has one head - "
2937 raise util.Abort(_("branch '%s' has one head - "
2938 "please merge with an explicit rev")
2938 "please merge with an explicit rev")
2939 % branch,
2939 % branch,
2940 hint=_("run 'hg heads' to see all heads"))
2940 hint=_("run 'hg heads' to see all heads"))
2941 msg = _('there is nothing to merge')
2941 msg = _('there is nothing to merge')
2942 if parent != repo.lookup(repo[None].branch()):
2942 if parent != repo.lookup(repo[None].branch()):
2943 msg = _('%s - use "hg update" instead') % msg
2943 msg = _('%s - use "hg update" instead') % msg
2944 raise util.Abort(msg)
2944 raise util.Abort(msg)
2945
2945
2946 if parent not in bheads:
2946 if parent not in bheads:
2947 raise util.Abort(_('working directory not at a head revision'),
2947 raise util.Abort(_('working directory not at a head revision'),
2948 hint=_("use 'hg update' or merge with an "
2948 hint=_("use 'hg update' or merge with an "
2949 "explicit revision"))
2949 "explicit revision"))
2950 node = parent == bheads[0] and bheads[-1] or bheads[0]
2950 node = parent == bheads[0] and bheads[-1] or bheads[0]
2951 else:
2951 else:
2952 node = cmdutil.revsingle(repo, node).node()
2952 node = cmdutil.revsingle(repo, node).node()
2953
2953
2954 if opts.get('preview'):
2954 if opts.get('preview'):
2955 # find nodes that are ancestors of p2 but not of p1
2955 # find nodes that are ancestors of p2 but not of p1
2956 p1 = repo.lookup('.')
2956 p1 = repo.lookup('.')
2957 p2 = repo.lookup(node)
2957 p2 = repo.lookup(node)
2958 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2958 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2959
2959
2960 displayer = cmdutil.show_changeset(ui, repo, opts)
2960 displayer = cmdutil.show_changeset(ui, repo, opts)
2961 for node in nodes:
2961 for node in nodes:
2962 displayer.show(repo[node])
2962 displayer.show(repo[node])
2963 displayer.close()
2963 displayer.close()
2964 return 0
2964 return 0
2965
2965
2966 try:
2966 try:
2967 # ui.forcemerge is an internal variable, do not document
2967 # ui.forcemerge is an internal variable, do not document
2968 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2968 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2969 return hg.merge(repo, node, force=opts.get('force'))
2969 return hg.merge(repo, node, force=opts.get('force'))
2970 finally:
2970 finally:
2971 ui.setconfig('ui', 'forcemerge', '')
2971 ui.setconfig('ui', 'forcemerge', '')
2972
2972
2973 def outgoing(ui, repo, dest=None, **opts):
2973 def outgoing(ui, repo, dest=None, **opts):
2974 """show changesets not found in the destination
2974 """show changesets not found in the destination
2975
2975
2976 Show changesets not found in the specified destination repository
2976 Show changesets not found in the specified destination repository
2977 or the default push location. These are the changesets that would
2977 or the default push location. These are the changesets that would
2978 be pushed if a push was requested.
2978 be pushed if a push was requested.
2979
2979
2980 See pull for details of valid destination formats.
2980 See pull for details of valid destination formats.
2981
2981
2982 Returns 0 if there are outgoing changes, 1 otherwise.
2982 Returns 0 if there are outgoing changes, 1 otherwise.
2983 """
2983 """
2984
2984
2985 if opts.get('bookmarks'):
2985 if opts.get('bookmarks'):
2986 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2986 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2987 dest, branches = hg.parseurl(dest, opts.get('branch'))
2987 dest, branches = hg.parseurl(dest, opts.get('branch'))
2988 other = hg.repository(hg.remoteui(repo, opts), dest)
2988 other = hg.repository(hg.remoteui(repo, opts), dest)
2989 if 'bookmarks' not in other.listkeys('namespaces'):
2989 if 'bookmarks' not in other.listkeys('namespaces'):
2990 ui.warn(_("remote doesn't support bookmarks\n"))
2990 ui.warn(_("remote doesn't support bookmarks\n"))
2991 return 0
2991 return 0
2992 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
2992 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
2993 return bookmarks.diff(ui, other, repo)
2993 return bookmarks.diff(ui, other, repo)
2994
2994
2995 ret = hg.outgoing(ui, repo, dest, opts)
2995 ret = hg.outgoing(ui, repo, dest, opts)
2996 return ret
2996 return ret
2997
2997
2998 def parents(ui, repo, file_=None, **opts):
2998 def parents(ui, repo, file_=None, **opts):
2999 """show the parents of the working directory or revision
2999 """show the parents of the working directory or revision
3000
3000
3001 Print the working directory's parent revisions. If a revision is
3001 Print the working directory's parent revisions. If a revision is
3002 given via -r/--rev, the parent of that revision will be printed.
3002 given via -r/--rev, the parent of that revision will be printed.
3003 If a file argument is given, the revision in which the file was
3003 If a file argument is given, the revision in which the file was
3004 last changed (before the working directory revision or the
3004 last changed (before the working directory revision or the
3005 argument to --rev if given) is printed.
3005 argument to --rev if given) is printed.
3006
3006
3007 Returns 0 on success.
3007 Returns 0 on success.
3008 """
3008 """
3009
3009
3010 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
3010 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
3011
3011
3012 if file_:
3012 if file_:
3013 m = cmdutil.match(repo, (file_,), opts)
3013 m = cmdutil.match(repo, (file_,), opts)
3014 if m.anypats() or len(m.files()) != 1:
3014 if m.anypats() or len(m.files()) != 1:
3015 raise util.Abort(_('can only specify an explicit filename'))
3015 raise util.Abort(_('can only specify an explicit filename'))
3016 file_ = m.files()[0]
3016 file_ = m.files()[0]
3017 filenodes = []
3017 filenodes = []
3018 for cp in ctx.parents():
3018 for cp in ctx.parents():
3019 if not cp:
3019 if not cp:
3020 continue
3020 continue
3021 try:
3021 try:
3022 filenodes.append(cp.filenode(file_))
3022 filenodes.append(cp.filenode(file_))
3023 except error.LookupError:
3023 except error.LookupError:
3024 pass
3024 pass
3025 if not filenodes:
3025 if not filenodes:
3026 raise util.Abort(_("'%s' not found in manifest!") % file_)
3026 raise util.Abort(_("'%s' not found in manifest!") % file_)
3027 fl = repo.file(file_)
3027 fl = repo.file(file_)
3028 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
3028 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
3029 else:
3029 else:
3030 p = [cp.node() for cp in ctx.parents()]
3030 p = [cp.node() for cp in ctx.parents()]
3031
3031
3032 displayer = cmdutil.show_changeset(ui, repo, opts)
3032 displayer = cmdutil.show_changeset(ui, repo, opts)
3033 for n in p:
3033 for n in p:
3034 if n != nullid:
3034 if n != nullid:
3035 displayer.show(repo[n])
3035 displayer.show(repo[n])
3036 displayer.close()
3036 displayer.close()
3037
3037
3038 def paths(ui, repo, search=None):
3038 def paths(ui, repo, search=None):
3039 """show aliases for remote repositories
3039 """show aliases for remote repositories
3040
3040
3041 Show definition of symbolic path name NAME. If no name is given,
3041 Show definition of symbolic path name NAME. If no name is given,
3042 show definition of all available names.
3042 show definition of all available names.
3043
3043
3044 Path names are defined in the [paths] section of your
3044 Path names are defined in the [paths] section of your
3045 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3045 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3046 repository, ``.hg/hgrc`` is used, too.
3046 repository, ``.hg/hgrc`` is used, too.
3047
3047
3048 The path names ``default`` and ``default-push`` have a special
3048 The path names ``default`` and ``default-push`` have a special
3049 meaning. When performing a push or pull operation, they are used
3049 meaning. When performing a push or pull operation, they are used
3050 as fallbacks if no location is specified on the command-line.
3050 as fallbacks if no location is specified on the command-line.
3051 When ``default-push`` is set, it will be used for push and
3051 When ``default-push`` is set, it will be used for push and
3052 ``default`` will be used for pull; otherwise ``default`` is used
3052 ``default`` will be used for pull; otherwise ``default`` is used
3053 as the fallback for both. When cloning a repository, the clone
3053 as the fallback for both. When cloning a repository, the clone
3054 source is written as ``default`` in ``.hg/hgrc``. Note that
3054 source is written as ``default`` in ``.hg/hgrc``. Note that
3055 ``default`` and ``default-push`` apply to all inbound (e.g.
3055 ``default`` and ``default-push`` apply to all inbound (e.g.
3056 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
3056 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
3057 :hg:`bundle`) operations.
3057 :hg:`bundle`) operations.
3058
3058
3059 See :hg:`help urls` for more information.
3059 See :hg:`help urls` for more information.
3060
3060
3061 Returns 0 on success.
3061 Returns 0 on success.
3062 """
3062 """
3063 if search:
3063 if search:
3064 for name, path in ui.configitems("paths"):
3064 for name, path in ui.configitems("paths"):
3065 if name == search:
3065 if name == search:
3066 ui.write("%s\n" % util.hidepassword(path))
3066 ui.write("%s\n" % util.hidepassword(path))
3067 return
3067 return
3068 ui.warn(_("not found!\n"))
3068 ui.warn(_("not found!\n"))
3069 return 1
3069 return 1
3070 else:
3070 else:
3071 for name, path in ui.configitems("paths"):
3071 for name, path in ui.configitems("paths"):
3072 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
3072 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
3073
3073
3074 def postincoming(ui, repo, modheads, optupdate, checkout):
3074 def postincoming(ui, repo, modheads, optupdate, checkout):
3075 if modheads == 0:
3075 if modheads == 0:
3076 return
3076 return
3077 if optupdate:
3077 if optupdate:
3078 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3078 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3079 return hg.update(repo, checkout)
3079 return hg.update(repo, checkout)
3080 else:
3080 else:
3081 ui.status(_("not updating, since new heads added\n"))
3081 ui.status(_("not updating, since new heads added\n"))
3082 if modheads > 1:
3082 if modheads > 1:
3083 currentbranchheads = len(repo.branchheads())
3083 currentbranchheads = len(repo.branchheads())
3084 if currentbranchheads == modheads:
3084 if currentbranchheads == modheads:
3085 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3085 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3086 elif currentbranchheads > 1:
3086 elif currentbranchheads > 1:
3087 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
3087 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
3088 else:
3088 else:
3089 ui.status(_("(run 'hg heads' to see heads)\n"))
3089 ui.status(_("(run 'hg heads' to see heads)\n"))
3090 else:
3090 else:
3091 ui.status(_("(run 'hg update' to get a working copy)\n"))
3091 ui.status(_("(run 'hg update' to get a working copy)\n"))
3092
3092
3093 def pull(ui, repo, source="default", **opts):
3093 def pull(ui, repo, source="default", **opts):
3094 """pull changes from the specified source
3094 """pull changes from the specified source
3095
3095
3096 Pull changes from a remote repository to a local one.
3096 Pull changes from a remote repository to a local one.
3097
3097
3098 This finds all changes from the repository at the specified path
3098 This finds all changes from the repository at the specified path
3099 or URL and adds them to a local repository (the current one unless
3099 or URL and adds them to a local repository (the current one unless
3100 -R is specified). By default, this does not update the copy of the
3100 -R is specified). By default, this does not update the copy of the
3101 project in the working directory.
3101 project in the working directory.
3102
3102
3103 Use :hg:`incoming` if you want to see what would have been added
3103 Use :hg:`incoming` if you want to see what would have been added
3104 by a pull at the time you issued this command. If you then decide
3104 by a pull at the time you issued this command. If you then decide
3105 to add those changes to the repository, you should use :hg:`pull
3105 to add those changes to the repository, you should use :hg:`pull
3106 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3106 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3107
3107
3108 If SOURCE is omitted, the 'default' path will be used.
3108 If SOURCE is omitted, the 'default' path will be used.
3109 See :hg:`help urls` for more information.
3109 See :hg:`help urls` for more information.
3110
3110
3111 Returns 0 on success, 1 if an update had unresolved files.
3111 Returns 0 on success, 1 if an update had unresolved files.
3112 """
3112 """
3113 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3113 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3114 other = hg.repository(hg.remoteui(repo, opts), source)
3114 other = hg.repository(hg.remoteui(repo, opts), source)
3115 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3115 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3116 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3116 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3117
3117
3118 if opts.get('bookmark'):
3118 if opts.get('bookmark'):
3119 if not revs:
3119 if not revs:
3120 revs = []
3120 revs = []
3121 rb = other.listkeys('bookmarks')
3121 rb = other.listkeys('bookmarks')
3122 for b in opts['bookmark']:
3122 for b in opts['bookmark']:
3123 if b not in rb:
3123 if b not in rb:
3124 raise util.Abort(_('remote bookmark %s not found!') % b)
3124 raise util.Abort(_('remote bookmark %s not found!') % b)
3125 revs.append(rb[b])
3125 revs.append(rb[b])
3126
3126
3127 if revs:
3127 if revs:
3128 try:
3128 try:
3129 revs = [other.lookup(rev) for rev in revs]
3129 revs = [other.lookup(rev) for rev in revs]
3130 except error.CapabilityError:
3130 except error.CapabilityError:
3131 err = _("other repository doesn't support revision lookup, "
3131 err = _("other repository doesn't support revision lookup, "
3132 "so a rev cannot be specified.")
3132 "so a rev cannot be specified.")
3133 raise util.Abort(err)
3133 raise util.Abort(err)
3134
3134
3135 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3135 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3136 bookmarks.updatefromremote(ui, repo, other)
3136 bookmarks.updatefromremote(ui, repo, other)
3137 if checkout:
3137 if checkout:
3138 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3138 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3139 repo._subtoppath = source
3139 repo._subtoppath = source
3140 try:
3140 try:
3141 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3141 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3142
3142
3143 finally:
3143 finally:
3144 del repo._subtoppath
3144 del repo._subtoppath
3145
3145
3146 # update specified bookmarks
3146 # update specified bookmarks
3147 if opts.get('bookmark'):
3147 if opts.get('bookmark'):
3148 for b in opts['bookmark']:
3148 for b in opts['bookmark']:
3149 # explicit pull overrides local bookmark if any
3149 # explicit pull overrides local bookmark if any
3150 ui.status(_("importing bookmark %s\n") % b)
3150 ui.status(_("importing bookmark %s\n") % b)
3151 repo._bookmarks[b] = repo[rb[b]].node()
3151 repo._bookmarks[b] = repo[rb[b]].node()
3152 bookmarks.write(repo)
3152 bookmarks.write(repo)
3153
3153
3154 return ret
3154 return ret
3155
3155
3156 def push(ui, repo, dest=None, **opts):
3156 def push(ui, repo, dest=None, **opts):
3157 """push changes to the specified destination
3157 """push changes to the specified destination
3158
3158
3159 Push changesets from the local repository to the specified
3159 Push changesets from the local repository to the specified
3160 destination.
3160 destination.
3161
3161
3162 This operation is symmetrical to pull: it is identical to a pull
3162 This operation is symmetrical to pull: it is identical to a pull
3163 in the destination repository from the current one.
3163 in the destination repository from the current one.
3164
3164
3165 By default, push will not allow creation of new heads at the
3165 By default, push will not allow creation of new heads at the
3166 destination, since multiple heads would make it unclear which head
3166 destination, since multiple heads would make it unclear which head
3167 to use. In this situation, it is recommended to pull and merge
3167 to use. In this situation, it is recommended to pull and merge
3168 before pushing.
3168 before pushing.
3169
3169
3170 Use --new-branch if you want to allow push to create a new named
3170 Use --new-branch if you want to allow push to create a new named
3171 branch that is not present at the destination. This allows you to
3171 branch that is not present at the destination. This allows you to
3172 only create a new branch without forcing other changes.
3172 only create a new branch without forcing other changes.
3173
3173
3174 Use -f/--force to override the default behavior and push all
3174 Use -f/--force to override the default behavior and push all
3175 changesets on all branches.
3175 changesets on all branches.
3176
3176
3177 If -r/--rev is used, the specified revision and all its ancestors
3177 If -r/--rev is used, the specified revision and all its ancestors
3178 will be pushed to the remote repository.
3178 will be pushed to the remote repository.
3179
3179
3180 Please see :hg:`help urls` for important details about ``ssh://``
3180 Please see :hg:`help urls` for important details about ``ssh://``
3181 URLs. If DESTINATION is omitted, a default path will be used.
3181 URLs. If DESTINATION is omitted, a default path will be used.
3182
3182
3183 Returns 0 if push was successful, 1 if nothing to push.
3183 Returns 0 if push was successful, 1 if nothing to push.
3184 """
3184 """
3185
3185
3186 if opts.get('bookmark'):
3186 if opts.get('bookmark'):
3187 for b in opts['bookmark']:
3187 for b in opts['bookmark']:
3188 # translate -B options to -r so changesets get pushed
3188 # translate -B options to -r so changesets get pushed
3189 if b in repo._bookmarks:
3189 if b in repo._bookmarks:
3190 opts.setdefault('rev', []).append(b)
3190 opts.setdefault('rev', []).append(b)
3191 else:
3191 else:
3192 # if we try to push a deleted bookmark, translate it to null
3192 # if we try to push a deleted bookmark, translate it to null
3193 # this lets simultaneous -r, -b options continue working
3193 # this lets simultaneous -r, -b options continue working
3194 opts.setdefault('rev', []).append("null")
3194 opts.setdefault('rev', []).append("null")
3195
3195
3196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3197 dest, branches = hg.parseurl(dest, opts.get('branch'))
3197 dest, branches = hg.parseurl(dest, opts.get('branch'))
3198 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
3198 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
3199 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3199 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3200 other = hg.repository(hg.remoteui(repo, opts), dest)
3200 other = hg.repository(hg.remoteui(repo, opts), dest)
3201 if revs:
3201 if revs:
3202 revs = [repo.lookup(rev) for rev in revs]
3202 revs = [repo.lookup(rev) for rev in revs]
3203
3203
3204 repo._subtoppath = dest
3204 repo._subtoppath = dest
3205 try:
3205 try:
3206 # push subrepos depth-first for coherent ordering
3206 # push subrepos depth-first for coherent ordering
3207 c = repo['']
3207 c = repo['']
3208 subs = c.substate # only repos that are committed
3208 subs = c.substate # only repos that are committed
3209 for s in sorted(subs):
3209 for s in sorted(subs):
3210 if not c.sub(s).push(opts.get('force')):
3210 if not c.sub(s).push(opts.get('force')):
3211 return False
3211 return False
3212 finally:
3212 finally:
3213 del repo._subtoppath
3213 del repo._subtoppath
3214 result = repo.push(other, opts.get('force'), revs=revs,
3214 result = repo.push(other, opts.get('force'), revs=revs,
3215 newbranch=opts.get('new_branch'))
3215 newbranch=opts.get('new_branch'))
3216
3216
3217 result = (result == 0)
3217 result = (result == 0)
3218
3218
3219 if opts.get('bookmark'):
3219 if opts.get('bookmark'):
3220 rb = other.listkeys('bookmarks')
3220 rb = other.listkeys('bookmarks')
3221 for b in opts['bookmark']:
3221 for b in opts['bookmark']:
3222 # explicit push overrides remote bookmark if any
3222 # explicit push overrides remote bookmark if any
3223 if b in repo._bookmarks:
3223 if b in repo._bookmarks:
3224 ui.status(_("exporting bookmark %s\n") % b)
3224 ui.status(_("exporting bookmark %s\n") % b)
3225 new = repo[b].hex()
3225 new = repo[b].hex()
3226 elif b in rb:
3226 elif b in rb:
3227 ui.status(_("deleting remote bookmark %s\n") % b)
3227 ui.status(_("deleting remote bookmark %s\n") % b)
3228 new = '' # delete
3228 new = '' # delete
3229 else:
3229 else:
3230 ui.warn(_('bookmark %s does not exist on the local '
3230 ui.warn(_('bookmark %s does not exist on the local '
3231 'or remote repository!\n') % b)
3231 'or remote repository!\n') % b)
3232 return 2
3232 return 2
3233 old = rb.get(b, '')
3233 old = rb.get(b, '')
3234 r = other.pushkey('bookmarks', b, old, new)
3234 r = other.pushkey('bookmarks', b, old, new)
3235 if not r:
3235 if not r:
3236 ui.warn(_('updating bookmark %s failed!\n') % b)
3236 ui.warn(_('updating bookmark %s failed!\n') % b)
3237 if not result:
3237 if not result:
3238 result = 2
3238 result = 2
3239
3239
3240 return result
3240 return result
3241
3241
3242 def recover(ui, repo):
3242 def recover(ui, repo):
3243 """roll back an interrupted transaction
3243 """roll back an interrupted transaction
3244
3244
3245 Recover from an interrupted commit or pull.
3245 Recover from an interrupted commit or pull.
3246
3246
3247 This command tries to fix the repository status after an
3247 This command tries to fix the repository status after an
3248 interrupted operation. It should only be necessary when Mercurial
3248 interrupted operation. It should only be necessary when Mercurial
3249 suggests it.
3249 suggests it.
3250
3250
3251 Returns 0 if successful, 1 if nothing to recover or verify fails.
3251 Returns 0 if successful, 1 if nothing to recover or verify fails.
3252 """
3252 """
3253 if repo.recover():
3253 if repo.recover():
3254 return hg.verify(repo)
3254 return hg.verify(repo)
3255 return 1
3255 return 1
3256
3256
3257 def remove(ui, repo, *pats, **opts):
3257 def remove(ui, repo, *pats, **opts):
3258 """remove the specified files on the next commit
3258 """remove the specified files on the next commit
3259
3259
3260 Schedule the indicated files for removal from the repository.
3260 Schedule the indicated files for removal from the repository.
3261
3261
3262 This only removes files from the current branch, not from the
3262 This only removes files from the current branch, not from the
3263 entire project history. -A/--after can be used to remove only
3263 entire project history. -A/--after can be used to remove only
3264 files that have already been deleted, -f/--force can be used to
3264 files that have already been deleted, -f/--force can be used to
3265 force deletion, and -Af can be used to remove files from the next
3265 force deletion, and -Af can be used to remove files from the next
3266 revision without deleting them from the working directory.
3266 revision without deleting them from the working directory.
3267
3267
3268 The following table details the behavior of remove for different
3268 The following table details the behavior of remove for different
3269 file states (columns) and option combinations (rows). The file
3269 file states (columns) and option combinations (rows). The file
3270 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3270 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3271 reported by :hg:`status`). The actions are Warn, Remove (from
3271 reported by :hg:`status`). The actions are Warn, Remove (from
3272 branch) and Delete (from disk)::
3272 branch) and Delete (from disk)::
3273
3273
3274 A C M !
3274 A C M !
3275 none W RD W R
3275 none W RD W R
3276 -f R RD RD R
3276 -f R RD RD R
3277 -A W W W R
3277 -A W W W R
3278 -Af R R R R
3278 -Af R R R R
3279
3279
3280 This command schedules the files to be removed at the next commit.
3280 This command schedules the files to be removed at the next commit.
3281 To undo a remove before that, see :hg:`revert`.
3281 To undo a remove before that, see :hg:`revert`.
3282
3282
3283 Returns 0 on success, 1 if any warnings encountered.
3283 Returns 0 on success, 1 if any warnings encountered.
3284 """
3284 """
3285
3285
3286 ret = 0
3286 ret = 0
3287 after, force = opts.get('after'), opts.get('force')
3287 after, force = opts.get('after'), opts.get('force')
3288 if not pats and not after:
3288 if not pats and not after:
3289 raise util.Abort(_('no files specified'))
3289 raise util.Abort(_('no files specified'))
3290
3290
3291 m = cmdutil.match(repo, pats, opts)
3291 m = cmdutil.match(repo, pats, opts)
3292 s = repo.status(match=m, clean=True)
3292 s = repo.status(match=m, clean=True)
3293 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3293 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3294
3294
3295 for f in m.files():
3295 for f in m.files():
3296 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3296 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3297 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3297 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3298 ret = 1
3298 ret = 1
3299
3299
3300 if force:
3300 if force:
3301 remove, forget = modified + deleted + clean, added
3301 remove, forget = modified + deleted + clean, added
3302 elif after:
3302 elif after:
3303 remove, forget = deleted, []
3303 remove, forget = deleted, []
3304 for f in modified + added + clean:
3304 for f in modified + added + clean:
3305 ui.warn(_('not removing %s: file still exists (use -f'
3305 ui.warn(_('not removing %s: file still exists (use -f'
3306 ' to force removal)\n') % m.rel(f))
3306 ' to force removal)\n') % m.rel(f))
3307 ret = 1
3307 ret = 1
3308 else:
3308 else:
3309 remove, forget = deleted + clean, []
3309 remove, forget = deleted + clean, []
3310 for f in modified:
3310 for f in modified:
3311 ui.warn(_('not removing %s: file is modified (use -f'
3311 ui.warn(_('not removing %s: file is modified (use -f'
3312 ' to force removal)\n') % m.rel(f))
3312 ' to force removal)\n') % m.rel(f))
3313 ret = 1
3313 ret = 1
3314 for f in added:
3314 for f in added:
3315 ui.warn(_('not removing %s: file has been marked for add (use -f'
3315 ui.warn(_('not removing %s: file has been marked for add (use -f'
3316 ' to force removal)\n') % m.rel(f))
3316 ' to force removal)\n') % m.rel(f))
3317 ret = 1
3317 ret = 1
3318
3318
3319 for f in sorted(remove + forget):
3319 for f in sorted(remove + forget):
3320 if ui.verbose or not m.exact(f):
3320 if ui.verbose or not m.exact(f):
3321 ui.status(_('removing %s\n') % m.rel(f))
3321 ui.status(_('removing %s\n') % m.rel(f))
3322
3322
3323 repo[None].forget(forget)
3323 repo[None].forget(forget)
3324 repo[None].remove(remove, unlink=not after)
3324 repo[None].remove(remove, unlink=not after)
3325 return ret
3325 return ret
3326
3326
3327 def rename(ui, repo, *pats, **opts):
3327 def rename(ui, repo, *pats, **opts):
3328 """rename files; equivalent of copy + remove
3328 """rename files; equivalent of copy + remove
3329
3329
3330 Mark dest as copies of sources; mark sources for deletion. If dest
3330 Mark dest as copies of sources; mark sources for deletion. If dest
3331 is a directory, copies are put in that directory. If dest is a
3331 is a directory, copies are put in that directory. If dest is a
3332 file, there can only be one source.
3332 file, there can only be one source.
3333
3333
3334 By default, this command copies the contents of files as they
3334 By default, this command copies the contents of files as they
3335 exist in the working directory. If invoked with -A/--after, the
3335 exist in the working directory. If invoked with -A/--after, the
3336 operation is recorded, but no copying is performed.
3336 operation is recorded, but no copying is performed.
3337
3337
3338 This command takes effect at the next commit. To undo a rename
3338 This command takes effect at the next commit. To undo a rename
3339 before that, see :hg:`revert`.
3339 before that, see :hg:`revert`.
3340
3340
3341 Returns 0 on success, 1 if errors are encountered.
3341 Returns 0 on success, 1 if errors are encountered.
3342 """
3342 """
3343 wlock = repo.wlock(False)
3343 wlock = repo.wlock(False)
3344 try:
3344 try:
3345 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3345 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3346 finally:
3346 finally:
3347 wlock.release()
3347 wlock.release()
3348
3348
3349 def resolve(ui, repo, *pats, **opts):
3349 def resolve(ui, repo, *pats, **opts):
3350 """redo merges or set/view the merge status of files
3350 """redo merges or set/view the merge status of files
3351
3351
3352 Merges with unresolved conflicts are often the result of
3352 Merges with unresolved conflicts are often the result of
3353 non-interactive merging using the ``internal:merge`` configuration
3353 non-interactive merging using the ``internal:merge`` configuration
3354 setting, or a command-line merge tool like ``diff3``. The resolve
3354 setting, or a command-line merge tool like ``diff3``. The resolve
3355 command is used to manage the files involved in a merge, after
3355 command is used to manage the files involved in a merge, after
3356 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3356 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3357 working directory must have two parents).
3357 working directory must have two parents).
3358
3358
3359 The resolve command can be used in the following ways:
3359 The resolve command can be used in the following ways:
3360
3360
3361 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3361 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3362 files, discarding any previous merge attempts. Re-merging is not
3362 files, discarding any previous merge attempts. Re-merging is not
3363 performed for files already marked as resolved. Use ``--all/-a``
3363 performed for files already marked as resolved. Use ``--all/-a``
3364 to selects all unresolved files. ``--tool`` can be used to specify
3364 to selects all unresolved files. ``--tool`` can be used to specify
3365 the merge tool used for the given files. It overrides the HGMERGE
3365 the merge tool used for the given files. It overrides the HGMERGE
3366 environment variable and your configuration files.
3366 environment variable and your configuration files.
3367
3367
3368 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3368 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3369 (e.g. after having manually fixed-up the files). The default is
3369 (e.g. after having manually fixed-up the files). The default is
3370 to mark all unresolved files.
3370 to mark all unresolved files.
3371
3371
3372 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3372 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3373 default is to mark all resolved files.
3373 default is to mark all resolved files.
3374
3374
3375 - :hg:`resolve -l`: list files which had or still have conflicts.
3375 - :hg:`resolve -l`: list files which had or still have conflicts.
3376 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3376 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3377
3377
3378 Note that Mercurial will not let you commit files with unresolved
3378 Note that Mercurial will not let you commit files with unresolved
3379 merge conflicts. You must use :hg:`resolve -m ...` before you can
3379 merge conflicts. You must use :hg:`resolve -m ...` before you can
3380 commit after a conflicting merge.
3380 commit after a conflicting merge.
3381
3381
3382 Returns 0 on success, 1 if any files fail a resolve attempt.
3382 Returns 0 on success, 1 if any files fail a resolve attempt.
3383 """
3383 """
3384
3384
3385 all, mark, unmark, show, nostatus = \
3385 all, mark, unmark, show, nostatus = \
3386 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3386 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3387
3387
3388 if (show and (mark or unmark)) or (mark and unmark):
3388 if (show and (mark or unmark)) or (mark and unmark):
3389 raise util.Abort(_("too many options specified"))
3389 raise util.Abort(_("too many options specified"))
3390 if pats and all:
3390 if pats and all:
3391 raise util.Abort(_("can't specify --all and patterns"))
3391 raise util.Abort(_("can't specify --all and patterns"))
3392 if not (all or pats or show or mark or unmark):
3392 if not (all or pats or show or mark or unmark):
3393 raise util.Abort(_('no files or directories specified; '
3393 raise util.Abort(_('no files or directories specified; '
3394 'use --all to remerge all files'))
3394 'use --all to remerge all files'))
3395
3395
3396 ms = mergemod.mergestate(repo)
3396 ms = mergemod.mergestate(repo)
3397 m = cmdutil.match(repo, pats, opts)
3397 m = cmdutil.match(repo, pats, opts)
3398 ret = 0
3398 ret = 0
3399
3399
3400 for f in ms:
3400 for f in ms:
3401 if m(f):
3401 if m(f):
3402 if show:
3402 if show:
3403 if nostatus:
3403 if nostatus:
3404 ui.write("%s\n" % f)
3404 ui.write("%s\n" % f)
3405 else:
3405 else:
3406 ui.write("%s %s\n" % (ms[f].upper(), f),
3406 ui.write("%s %s\n" % (ms[f].upper(), f),
3407 label='resolve.' +
3407 label='resolve.' +
3408 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3408 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3409 elif mark:
3409 elif mark:
3410 ms.mark(f, "r")
3410 ms.mark(f, "r")
3411 elif unmark:
3411 elif unmark:
3412 ms.mark(f, "u")
3412 ms.mark(f, "u")
3413 else:
3413 else:
3414 wctx = repo[None]
3414 wctx = repo[None]
3415 mctx = wctx.parents()[-1]
3415 mctx = wctx.parents()[-1]
3416
3416
3417 # backup pre-resolve (merge uses .orig for its own purposes)
3417 # backup pre-resolve (merge uses .orig for its own purposes)
3418 a = repo.wjoin(f)
3418 a = repo.wjoin(f)
3419 util.copyfile(a, a + ".resolve")
3419 util.copyfile(a, a + ".resolve")
3420
3420
3421 try:
3421 try:
3422 # resolve file
3422 # resolve file
3423 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3423 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3424 if ms.resolve(f, wctx, mctx):
3424 if ms.resolve(f, wctx, mctx):
3425 ret = 1
3425 ret = 1
3426 finally:
3426 finally:
3427 ui.setconfig('ui', 'forcemerge', '')
3427 ui.setconfig('ui', 'forcemerge', '')
3428
3428
3429 # replace filemerge's .orig file with our resolve file
3429 # replace filemerge's .orig file with our resolve file
3430 util.rename(a + ".resolve", a + ".orig")
3430 util.rename(a + ".resolve", a + ".orig")
3431
3431
3432 ms.commit()
3432 ms.commit()
3433 return ret
3433 return ret
3434
3434
3435 def revert(ui, repo, *pats, **opts):
3435 def revert(ui, repo, *pats, **opts):
3436 """restore individual files or directories to an earlier state
3436 """restore individual files or directories to an earlier state
3437
3437
3438 .. note::
3438 .. note::
3439 This command is most likely not what you are looking for.
3439 This command is most likely not what you are looking for.
3440 Revert will partially overwrite content in the working
3440 Revert will partially overwrite content in the working
3441 directory without changing the working directory parents. Use
3441 directory without changing the working directory parents. Use
3442 :hg:`update -r rev` to check out earlier revisions, or
3442 :hg:`update -r rev` to check out earlier revisions, or
3443 :hg:`update --clean .` to undo a merge which has added another
3443 :hg:`update --clean .` to undo a merge which has added another
3444 parent.
3444 parent.
3445
3445
3446 With no revision specified, revert the named files or directories
3446 With no revision specified, revert the named files or directories
3447 to the contents they had in the parent of the working directory.
3447 to the contents they had in the parent of the working directory.
3448 This restores the contents of the affected files to an unmodified
3448 This restores the contents of the affected files to an unmodified
3449 state and unschedules adds, removes, copies, and renames. If the
3449 state and unschedules adds, removes, copies, and renames. If the
3450 working directory has two parents, you must explicitly specify a
3450 working directory has two parents, you must explicitly specify a
3451 revision.
3451 revision.
3452
3452
3453 Using the -r/--rev option, revert the given files or directories
3453 Using the -r/--rev option, revert the given files or directories
3454 to their contents as of a specific revision. This can be helpful
3454 to their contents as of a specific revision. This can be helpful
3455 to "roll back" some or all of an earlier change. See :hg:`help
3455 to "roll back" some or all of an earlier change. See :hg:`help
3456 dates` for a list of formats valid for -d/--date.
3456 dates` for a list of formats valid for -d/--date.
3457
3457
3458 Revert modifies the working directory. It does not commit any
3458 Revert modifies the working directory. It does not commit any
3459 changes, or change the parent of the working directory. If you
3459 changes, or change the parent of the working directory. If you
3460 revert to a revision other than the parent of the working
3460 revert to a revision other than the parent of the working
3461 directory, the reverted files will thus appear modified
3461 directory, the reverted files will thus appear modified
3462 afterwards.
3462 afterwards.
3463
3463
3464 If a file has been deleted, it is restored. Files scheduled for
3464 If a file has been deleted, it is restored. Files scheduled for
3465 addition are just unscheduled and left as they are. If the
3465 addition are just unscheduled and left as they are. If the
3466 executable mode of a file was changed, it is reset.
3466 executable mode of a file was changed, it is reset.
3467
3467
3468 If names are given, all files matching the names are reverted.
3468 If names are given, all files matching the names are reverted.
3469 If no arguments are given, no files are reverted.
3469 If no arguments are given, no files are reverted.
3470
3470
3471 Modified files are saved with a .orig suffix before reverting.
3471 Modified files are saved with a .orig suffix before reverting.
3472 To disable these backups, use --no-backup.
3472 To disable these backups, use --no-backup.
3473
3473
3474 Returns 0 on success.
3474 Returns 0 on success.
3475 """
3475 """
3476
3476
3477 if opts.get("date"):
3477 if opts.get("date"):
3478 if opts.get("rev"):
3478 if opts.get("rev"):
3479 raise util.Abort(_("you can't specify a revision and a date"))
3479 raise util.Abort(_("you can't specify a revision and a date"))
3480 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3480 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3481
3481
3482 parent, p2 = repo.dirstate.parents()
3482 parent, p2 = repo.dirstate.parents()
3483 if not opts.get('rev') and p2 != nullid:
3483 if not opts.get('rev') and p2 != nullid:
3484 raise util.Abort(_('uncommitted merge - '
3484 raise util.Abort(_('uncommitted merge - '
3485 'use "hg update", see "hg help revert"'))
3485 'use "hg update", see "hg help revert"'))
3486
3486
3487 if not pats and not opts.get('all'):
3487 if not pats and not opts.get('all'):
3488 raise util.Abort(_('no files or directories specified; '
3488 raise util.Abort(_('no files or directories specified; '
3489 'use --all to revert the whole repo'))
3489 'use --all to revert the whole repo'))
3490
3490
3491 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3491 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3492 node = ctx.node()
3492 node = ctx.node()
3493 mf = ctx.manifest()
3493 mf = ctx.manifest()
3494 if node == parent:
3494 if node == parent:
3495 pmf = mf
3495 pmf = mf
3496 else:
3496 else:
3497 pmf = None
3497 pmf = None
3498
3498
3499 # need all matching names in dirstate and manifest of target rev,
3499 # need all matching names in dirstate and manifest of target rev,
3500 # so have to walk both. do not print errors if files exist in one
3500 # so have to walk both. do not print errors if files exist in one
3501 # but not other.
3501 # but not other.
3502
3502
3503 names = {}
3503 names = {}
3504
3504
3505 wlock = repo.wlock()
3505 wlock = repo.wlock()
3506 try:
3506 try:
3507 # walk dirstate.
3507 # walk dirstate.
3508
3508
3509 m = cmdutil.match(repo, pats, opts)
3509 m = cmdutil.match(repo, pats, opts)
3510 m.bad = lambda x, y: False
3510 m.bad = lambda x, y: False
3511 for abs in repo.walk(m):
3511 for abs in repo.walk(m):
3512 names[abs] = m.rel(abs), m.exact(abs)
3512 names[abs] = m.rel(abs), m.exact(abs)
3513
3513
3514 # walk target manifest.
3514 # walk target manifest.
3515
3515
3516 def badfn(path, msg):
3516 def badfn(path, msg):
3517 if path in names:
3517 if path in names:
3518 return
3518 return
3519 path_ = path + '/'
3519 path_ = path + '/'
3520 for f in names:
3520 for f in names:
3521 if f.startswith(path_):
3521 if f.startswith(path_):
3522 return
3522 return
3523 ui.warn("%s: %s\n" % (m.rel(path), msg))
3523 ui.warn("%s: %s\n" % (m.rel(path), msg))
3524
3524
3525 m = cmdutil.match(repo, pats, opts)
3525 m = cmdutil.match(repo, pats, opts)
3526 m.bad = badfn
3526 m.bad = badfn
3527 for abs in repo[node].walk(m):
3527 for abs in repo[node].walk(m):
3528 if abs not in names:
3528 if abs not in names:
3529 names[abs] = m.rel(abs), m.exact(abs)
3529 names[abs] = m.rel(abs), m.exact(abs)
3530
3530
3531 m = cmdutil.matchfiles(repo, names)
3531 m = cmdutil.matchfiles(repo, names)
3532 changes = repo.status(match=m)[:4]
3532 changes = repo.status(match=m)[:4]
3533 modified, added, removed, deleted = map(set, changes)
3533 modified, added, removed, deleted = map(set, changes)
3534
3534
3535 # if f is a rename, also revert the source
3535 # if f is a rename, also revert the source
3536 cwd = repo.getcwd()
3536 cwd = repo.getcwd()
3537 for f in added:
3537 for f in added:
3538 src = repo.dirstate.copied(f)
3538 src = repo.dirstate.copied(f)
3539 if src and src not in names and repo.dirstate[src] == 'r':
3539 if src and src not in names and repo.dirstate[src] == 'r':
3540 removed.add(src)
3540 removed.add(src)
3541 names[src] = (repo.pathto(src, cwd), True)
3541 names[src] = (repo.pathto(src, cwd), True)
3542
3542
3543 def removeforget(abs):
3543 def removeforget(abs):
3544 if repo.dirstate[abs] == 'a':
3544 if repo.dirstate[abs] == 'a':
3545 return _('forgetting %s\n')
3545 return _('forgetting %s\n')
3546 return _('removing %s\n')
3546 return _('removing %s\n')
3547
3547
3548 revert = ([], _('reverting %s\n'))
3548 revert = ([], _('reverting %s\n'))
3549 add = ([], _('adding %s\n'))
3549 add = ([], _('adding %s\n'))
3550 remove = ([], removeforget)
3550 remove = ([], removeforget)
3551 undelete = ([], _('undeleting %s\n'))
3551 undelete = ([], _('undeleting %s\n'))
3552
3552
3553 disptable = (
3553 disptable = (
3554 # dispatch table:
3554 # dispatch table:
3555 # file state
3555 # file state
3556 # action if in target manifest
3556 # action if in target manifest
3557 # action if not in target manifest
3557 # action if not in target manifest
3558 # make backup if in target manifest
3558 # make backup if in target manifest
3559 # make backup if not in target manifest
3559 # make backup if not in target manifest
3560 (modified, revert, remove, True, True),
3560 (modified, revert, remove, True, True),
3561 (added, revert, remove, True, False),
3561 (added, revert, remove, True, False),
3562 (removed, undelete, None, False, False),
3562 (removed, undelete, None, False, False),
3563 (deleted, revert, remove, False, False),
3563 (deleted, revert, remove, False, False),
3564 )
3564 )
3565
3565
3566 for abs, (rel, exact) in sorted(names.items()):
3566 for abs, (rel, exact) in sorted(names.items()):
3567 mfentry = mf.get(abs)
3567 mfentry = mf.get(abs)
3568 target = repo.wjoin(abs)
3568 target = repo.wjoin(abs)
3569 def handle(xlist, dobackup):
3569 def handle(xlist, dobackup):
3570 xlist[0].append(abs)
3570 xlist[0].append(abs)
3571 if (dobackup and not opts.get('no_backup') and
3571 if (dobackup and not opts.get('no_backup') and
3572 os.path.lexists(target)):
3572 os.path.lexists(target)):
3573 bakname = "%s.orig" % rel
3573 bakname = "%s.orig" % rel
3574 ui.note(_('saving current version of %s as %s\n') %
3574 ui.note(_('saving current version of %s as %s\n') %
3575 (rel, bakname))
3575 (rel, bakname))
3576 if not opts.get('dry_run'):
3576 if not opts.get('dry_run'):
3577 util.rename(target, bakname)
3577 util.rename(target, bakname)
3578 if ui.verbose or not exact:
3578 if ui.verbose or not exact:
3579 msg = xlist[1]
3579 msg = xlist[1]
3580 if not isinstance(msg, basestring):
3580 if not isinstance(msg, basestring):
3581 msg = msg(abs)
3581 msg = msg(abs)
3582 ui.status(msg % rel)
3582 ui.status(msg % rel)
3583 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3583 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3584 if abs not in table:
3584 if abs not in table:
3585 continue
3585 continue
3586 # file has changed in dirstate
3586 # file has changed in dirstate
3587 if mfentry:
3587 if mfentry:
3588 handle(hitlist, backuphit)
3588 handle(hitlist, backuphit)
3589 elif misslist is not None:
3589 elif misslist is not None:
3590 handle(misslist, backupmiss)
3590 handle(misslist, backupmiss)
3591 break
3591 break
3592 else:
3592 else:
3593 if abs not in repo.dirstate:
3593 if abs not in repo.dirstate:
3594 if mfentry:
3594 if mfentry:
3595 handle(add, True)
3595 handle(add, True)
3596 elif exact:
3596 elif exact:
3597 ui.warn(_('file not managed: %s\n') % rel)
3597 ui.warn(_('file not managed: %s\n') % rel)
3598 continue
3598 continue
3599 # file has not changed in dirstate
3599 # file has not changed in dirstate
3600 if node == parent:
3600 if node == parent:
3601 if exact:
3601 if exact:
3602 ui.warn(_('no changes needed to %s\n') % rel)
3602 ui.warn(_('no changes needed to %s\n') % rel)
3603 continue
3603 continue
3604 if pmf is None:
3604 if pmf is None:
3605 # only need parent manifest in this unlikely case,
3605 # only need parent manifest in this unlikely case,
3606 # so do not read by default
3606 # so do not read by default
3607 pmf = repo[parent].manifest()
3607 pmf = repo[parent].manifest()
3608 if abs in pmf:
3608 if abs in pmf:
3609 if mfentry:
3609 if mfentry:
3610 # if version of file is same in parent and target
3610 # if version of file is same in parent and target
3611 # manifests, do nothing
3611 # manifests, do nothing
3612 if (pmf[abs] != mfentry or
3612 if (pmf[abs] != mfentry or
3613 pmf.flags(abs) != mf.flags(abs)):
3613 pmf.flags(abs) != mf.flags(abs)):
3614 handle(revert, False)
3614 handle(revert, False)
3615 else:
3615 else:
3616 handle(remove, False)
3616 handle(remove, False)
3617
3617
3618 if not opts.get('dry_run'):
3618 if not opts.get('dry_run'):
3619 def checkout(f):
3619 def checkout(f):
3620 fc = ctx[f]
3620 fc = ctx[f]
3621 repo.wwrite(f, fc.data(), fc.flags())
3621 repo.wwrite(f, fc.data(), fc.flags())
3622
3622
3623 audit_path = scmutil.pathauditor(repo.root)
3623 audit_path = scmutil.pathauditor(repo.root)
3624 for f in remove[0]:
3624 for f in remove[0]:
3625 if repo.dirstate[f] == 'a':
3625 if repo.dirstate[f] == 'a':
3626 repo.dirstate.forget(f)
3626 repo.dirstate.forget(f)
3627 continue
3627 continue
3628 audit_path(f)
3628 audit_path(f)
3629 try:
3629 try:
3630 util.unlinkpath(repo.wjoin(f))
3630 util.unlinkpath(repo.wjoin(f))
3631 except OSError:
3631 except OSError:
3632 pass
3632 pass
3633 repo.dirstate.remove(f)
3633 repo.dirstate.remove(f)
3634
3634
3635 normal = None
3635 normal = None
3636 if node == parent:
3636 if node == parent:
3637 # We're reverting to our parent. If possible, we'd like status
3637 # We're reverting to our parent. If possible, we'd like status
3638 # to report the file as clean. We have to use normallookup for
3638 # to report the file as clean. We have to use normallookup for
3639 # merges to avoid losing information about merged/dirty files.
3639 # merges to avoid losing information about merged/dirty files.
3640 if p2 != nullid:
3640 if p2 != nullid:
3641 normal = repo.dirstate.normallookup
3641 normal = repo.dirstate.normallookup
3642 else:
3642 else:
3643 normal = repo.dirstate.normal
3643 normal = repo.dirstate.normal
3644 for f in revert[0]:
3644 for f in revert[0]:
3645 checkout(f)
3645 checkout(f)
3646 if normal:
3646 if normal:
3647 normal(f)
3647 normal(f)
3648
3648
3649 for f in add[0]:
3649 for f in add[0]:
3650 checkout(f)
3650 checkout(f)
3651 repo.dirstate.add(f)
3651 repo.dirstate.add(f)
3652
3652
3653 normal = repo.dirstate.normallookup
3653 normal = repo.dirstate.normallookup
3654 if node == parent and p2 == nullid:
3654 if node == parent and p2 == nullid:
3655 normal = repo.dirstate.normal
3655 normal = repo.dirstate.normal
3656 for f in undelete[0]:
3656 for f in undelete[0]:
3657 checkout(f)
3657 checkout(f)
3658 normal(f)
3658 normal(f)
3659
3659
3660 finally:
3660 finally:
3661 wlock.release()
3661 wlock.release()
3662
3662
3663 def rollback(ui, repo, **opts):
3663 def rollback(ui, repo, **opts):
3664 """roll back the last transaction (dangerous)
3664 """roll back the last transaction (dangerous)
3665
3665
3666 This command should be used with care. There is only one level of
3666 This command should be used with care. There is only one level of
3667 rollback, and there is no way to undo a rollback. It will also
3667 rollback, and there is no way to undo a rollback. It will also
3668 restore the dirstate at the time of the last transaction, losing
3668 restore the dirstate at the time of the last transaction, losing
3669 any dirstate changes since that time. This command does not alter
3669 any dirstate changes since that time. This command does not alter
3670 the working directory.
3670 the working directory.
3671
3671
3672 Transactions are used to encapsulate the effects of all commands
3672 Transactions are used to encapsulate the effects of all commands
3673 that create new changesets or propagate existing changesets into a
3673 that create new changesets or propagate existing changesets into a
3674 repository. For example, the following commands are transactional,
3674 repository. For example, the following commands are transactional,
3675 and their effects can be rolled back:
3675 and their effects can be rolled back:
3676
3676
3677 - commit
3677 - commit
3678 - import
3678 - import
3679 - pull
3679 - pull
3680 - push (with this repository as the destination)
3680 - push (with this repository as the destination)
3681 - unbundle
3681 - unbundle
3682
3682
3683 This command is not intended for use on public repositories. Once
3683 This command is not intended for use on public repositories. Once
3684 changes are visible for pull by other users, rolling a transaction
3684 changes are visible for pull by other users, rolling a transaction
3685 back locally is ineffective (someone else may already have pulled
3685 back locally is ineffective (someone else may already have pulled
3686 the changes). Furthermore, a race is possible with readers of the
3686 the changes). Furthermore, a race is possible with readers of the
3687 repository; for example an in-progress pull from the repository
3687 repository; for example an in-progress pull from the repository
3688 may fail if a rollback is performed.
3688 may fail if a rollback is performed.
3689
3689
3690 Returns 0 on success, 1 if no rollback data is available.
3690 Returns 0 on success, 1 if no rollback data is available.
3691 """
3691 """
3692 return repo.rollback(opts.get('dry_run'))
3692 return repo.rollback(opts.get('dry_run'))
3693
3693
3694 def root(ui, repo):
3694 def root(ui, repo):
3695 """print the root (top) of the current working directory
3695 """print the root (top) of the current working directory
3696
3696
3697 Print the root directory of the current repository.
3697 Print the root directory of the current repository.
3698
3698
3699 Returns 0 on success.
3699 Returns 0 on success.
3700 """
3700 """
3701 ui.write(repo.root + "\n")
3701 ui.write(repo.root + "\n")
3702
3702
3703 def serve(ui, repo, **opts):
3703 def serve(ui, repo, **opts):
3704 """start stand-alone webserver
3704 """start stand-alone webserver
3705
3705
3706 Start a local HTTP repository browser and pull server. You can use
3706 Start a local HTTP repository browser and pull server. You can use
3707 this for ad-hoc sharing and browsing of repositories. It is
3707 this for ad-hoc sharing and browsing of repositories. It is
3708 recommended to use a real web server to serve a repository for
3708 recommended to use a real web server to serve a repository for
3709 longer periods of time.
3709 longer periods of time.
3710
3710
3711 Please note that the server does not implement access control.
3711 Please note that the server does not implement access control.
3712 This means that, by default, anybody can read from the server and
3712 This means that, by default, anybody can read from the server and
3713 nobody can write to it by default. Set the ``web.allow_push``
3713 nobody can write to it by default. Set the ``web.allow_push``
3714 option to ``*`` to allow everybody to push to the server. You
3714 option to ``*`` to allow everybody to push to the server. You
3715 should use a real web server if you need to authenticate users.
3715 should use a real web server if you need to authenticate users.
3716
3716
3717 By default, the server logs accesses to stdout and errors to
3717 By default, the server logs accesses to stdout and errors to
3718 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3718 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3719 files.
3719 files.
3720
3720
3721 To have the server choose a free port number to listen on, specify
3721 To have the server choose a free port number to listen on, specify
3722 a port number of 0; in this case, the server will print the port
3722 a port number of 0; in this case, the server will print the port
3723 number it uses.
3723 number it uses.
3724
3724
3725 Returns 0 on success.
3725 Returns 0 on success.
3726 """
3726 """
3727
3727
3728 if opts["stdio"]:
3728 if opts["stdio"]:
3729 if repo is None:
3729 if repo is None:
3730 raise error.RepoError(_("There is no Mercurial repository here"
3730 raise error.RepoError(_("There is no Mercurial repository here"
3731 " (.hg not found)"))
3731 " (.hg not found)"))
3732 s = sshserver.sshserver(ui, repo)
3732 s = sshserver.sshserver(ui, repo)
3733 s.serve_forever()
3733 s.serve_forever()
3734
3734
3735 # this way we can check if something was given in the command-line
3735 # this way we can check if something was given in the command-line
3736 if opts.get('port'):
3736 if opts.get('port'):
3737 opts['port'] = util.getport(opts.get('port'))
3737 opts['port'] = util.getport(opts.get('port'))
3738
3738
3739 baseui = repo and repo.baseui or ui
3739 baseui = repo and repo.baseui or ui
3740 optlist = ("name templates style address port prefix ipv6"
3740 optlist = ("name templates style address port prefix ipv6"
3741 " accesslog errorlog certificate encoding")
3741 " accesslog errorlog certificate encoding")
3742 for o in optlist.split():
3742 for o in optlist.split():
3743 val = opts.get(o, '')
3743 val = opts.get(o, '')
3744 if val in (None, ''): # should check against default options instead
3744 if val in (None, ''): # should check against default options instead
3745 continue
3745 continue
3746 baseui.setconfig("web", o, val)
3746 baseui.setconfig("web", o, val)
3747 if repo and repo.ui != baseui:
3747 if repo and repo.ui != baseui:
3748 repo.ui.setconfig("web", o, val)
3748 repo.ui.setconfig("web", o, val)
3749
3749
3750 o = opts.get('web_conf') or opts.get('webdir_conf')
3750 o = opts.get('web_conf') or opts.get('webdir_conf')
3751 if not o:
3751 if not o:
3752 if not repo:
3752 if not repo:
3753 raise error.RepoError(_("There is no Mercurial repository"
3753 raise error.RepoError(_("There is no Mercurial repository"
3754 " here (.hg not found)"))
3754 " here (.hg not found)"))
3755 o = repo.root
3755 o = repo.root
3756
3756
3757 app = hgweb.hgweb(o, baseui=ui)
3757 app = hgweb.hgweb(o, baseui=ui)
3758
3758
3759 class service(object):
3759 class service(object):
3760 def init(self):
3760 def init(self):
3761 util.set_signal_handler()
3761 util.setsignalhandler()
3762 self.httpd = hgweb.server.create_server(ui, app)
3762 self.httpd = hgweb.server.create_server(ui, app)
3763
3763
3764 if opts['port'] and not ui.verbose:
3764 if opts['port'] and not ui.verbose:
3765 return
3765 return
3766
3766
3767 if self.httpd.prefix:
3767 if self.httpd.prefix:
3768 prefix = self.httpd.prefix.strip('/') + '/'
3768 prefix = self.httpd.prefix.strip('/') + '/'
3769 else:
3769 else:
3770 prefix = ''
3770 prefix = ''
3771
3771
3772 port = ':%d' % self.httpd.port
3772 port = ':%d' % self.httpd.port
3773 if port == ':80':
3773 if port == ':80':
3774 port = ''
3774 port = ''
3775
3775
3776 bindaddr = self.httpd.addr
3776 bindaddr = self.httpd.addr
3777 if bindaddr == '0.0.0.0':
3777 if bindaddr == '0.0.0.0':
3778 bindaddr = '*'
3778 bindaddr = '*'
3779 elif ':' in bindaddr: # IPv6
3779 elif ':' in bindaddr: # IPv6
3780 bindaddr = '[%s]' % bindaddr
3780 bindaddr = '[%s]' % bindaddr
3781
3781
3782 fqaddr = self.httpd.fqaddr
3782 fqaddr = self.httpd.fqaddr
3783 if ':' in fqaddr:
3783 if ':' in fqaddr:
3784 fqaddr = '[%s]' % fqaddr
3784 fqaddr = '[%s]' % fqaddr
3785 if opts['port']:
3785 if opts['port']:
3786 write = ui.status
3786 write = ui.status
3787 else:
3787 else:
3788 write = ui.write
3788 write = ui.write
3789 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3789 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3790 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3790 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3791
3791
3792 def run(self):
3792 def run(self):
3793 self.httpd.serve_forever()
3793 self.httpd.serve_forever()
3794
3794
3795 service = service()
3795 service = service()
3796
3796
3797 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3797 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3798
3798
3799 def status(ui, repo, *pats, **opts):
3799 def status(ui, repo, *pats, **opts):
3800 """show changed files in the working directory
3800 """show changed files in the working directory
3801
3801
3802 Show status of files in the repository. If names are given, only
3802 Show status of files in the repository. If names are given, only
3803 files that match are shown. Files that are clean or ignored or
3803 files that match are shown. Files that are clean or ignored or
3804 the source of a copy/move operation, are not listed unless
3804 the source of a copy/move operation, are not listed unless
3805 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3805 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3806 Unless options described with "show only ..." are given, the
3806 Unless options described with "show only ..." are given, the
3807 options -mardu are used.
3807 options -mardu are used.
3808
3808
3809 Option -q/--quiet hides untracked (unknown and ignored) files
3809 Option -q/--quiet hides untracked (unknown and ignored) files
3810 unless explicitly requested with -u/--unknown or -i/--ignored.
3810 unless explicitly requested with -u/--unknown or -i/--ignored.
3811
3811
3812 .. note::
3812 .. note::
3813 status may appear to disagree with diff if permissions have
3813 status may appear to disagree with diff if permissions have
3814 changed or a merge has occurred. The standard diff format does
3814 changed or a merge has occurred. The standard diff format does
3815 not report permission changes and diff only reports changes
3815 not report permission changes and diff only reports changes
3816 relative to one merge parent.
3816 relative to one merge parent.
3817
3817
3818 If one revision is given, it is used as the base revision.
3818 If one revision is given, it is used as the base revision.
3819 If two revisions are given, the differences between them are
3819 If two revisions are given, the differences between them are
3820 shown. The --change option can also be used as a shortcut to list
3820 shown. The --change option can also be used as a shortcut to list
3821 the changed files of a revision from its first parent.
3821 the changed files of a revision from its first parent.
3822
3822
3823 The codes used to show the status of files are::
3823 The codes used to show the status of files are::
3824
3824
3825 M = modified
3825 M = modified
3826 A = added
3826 A = added
3827 R = removed
3827 R = removed
3828 C = clean
3828 C = clean
3829 ! = missing (deleted by non-hg command, but still tracked)
3829 ! = missing (deleted by non-hg command, but still tracked)
3830 ? = not tracked
3830 ? = not tracked
3831 I = ignored
3831 I = ignored
3832 = origin of the previous file listed as A (added)
3832 = origin of the previous file listed as A (added)
3833
3833
3834 Returns 0 on success.
3834 Returns 0 on success.
3835 """
3835 """
3836
3836
3837 revs = opts.get('rev')
3837 revs = opts.get('rev')
3838 change = opts.get('change')
3838 change = opts.get('change')
3839
3839
3840 if revs and change:
3840 if revs and change:
3841 msg = _('cannot specify --rev and --change at the same time')
3841 msg = _('cannot specify --rev and --change at the same time')
3842 raise util.Abort(msg)
3842 raise util.Abort(msg)
3843 elif change:
3843 elif change:
3844 node2 = repo.lookup(change)
3844 node2 = repo.lookup(change)
3845 node1 = repo[node2].p1().node()
3845 node1 = repo[node2].p1().node()
3846 else:
3846 else:
3847 node1, node2 = cmdutil.revpair(repo, revs)
3847 node1, node2 = cmdutil.revpair(repo, revs)
3848
3848
3849 cwd = (pats and repo.getcwd()) or ''
3849 cwd = (pats and repo.getcwd()) or ''
3850 end = opts.get('print0') and '\0' or '\n'
3850 end = opts.get('print0') and '\0' or '\n'
3851 copy = {}
3851 copy = {}
3852 states = 'modified added removed deleted unknown ignored clean'.split()
3852 states = 'modified added removed deleted unknown ignored clean'.split()
3853 show = [k for k in states if opts.get(k)]
3853 show = [k for k in states if opts.get(k)]
3854 if opts.get('all'):
3854 if opts.get('all'):
3855 show += ui.quiet and (states[:4] + ['clean']) or states
3855 show += ui.quiet and (states[:4] + ['clean']) or states
3856 if not show:
3856 if not show:
3857 show = ui.quiet and states[:4] or states[:5]
3857 show = ui.quiet and states[:4] or states[:5]
3858
3858
3859 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3859 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3860 'ignored' in show, 'clean' in show, 'unknown' in show,
3860 'ignored' in show, 'clean' in show, 'unknown' in show,
3861 opts.get('subrepos'))
3861 opts.get('subrepos'))
3862 changestates = zip(states, 'MAR!?IC', stat)
3862 changestates = zip(states, 'MAR!?IC', stat)
3863
3863
3864 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3864 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3865 ctxn = repo[nullid]
3865 ctxn = repo[nullid]
3866 ctx1 = repo[node1]
3866 ctx1 = repo[node1]
3867 ctx2 = repo[node2]
3867 ctx2 = repo[node2]
3868 added = stat[1]
3868 added = stat[1]
3869 if node2 is None:
3869 if node2 is None:
3870 added = stat[0] + stat[1] # merged?
3870 added = stat[0] + stat[1] # merged?
3871
3871
3872 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3872 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3873 if k in added:
3873 if k in added:
3874 copy[k] = v
3874 copy[k] = v
3875 elif v in added:
3875 elif v in added:
3876 copy[v] = k
3876 copy[v] = k
3877
3877
3878 for state, char, files in changestates:
3878 for state, char, files in changestates:
3879 if state in show:
3879 if state in show:
3880 format = "%s %%s%s" % (char, end)
3880 format = "%s %%s%s" % (char, end)
3881 if opts.get('no_status'):
3881 if opts.get('no_status'):
3882 format = "%%s%s" % end
3882 format = "%%s%s" % end
3883
3883
3884 for f in files:
3884 for f in files:
3885 ui.write(format % repo.pathto(f, cwd),
3885 ui.write(format % repo.pathto(f, cwd),
3886 label='status.' + state)
3886 label='status.' + state)
3887 if f in copy:
3887 if f in copy:
3888 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3888 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3889 label='status.copied')
3889 label='status.copied')
3890
3890
3891 def summary(ui, repo, **opts):
3891 def summary(ui, repo, **opts):
3892 """summarize working directory state
3892 """summarize working directory state
3893
3893
3894 This generates a brief summary of the working directory state,
3894 This generates a brief summary of the working directory state,
3895 including parents, branch, commit status, and available updates.
3895 including parents, branch, commit status, and available updates.
3896
3896
3897 With the --remote option, this will check the default paths for
3897 With the --remote option, this will check the default paths for
3898 incoming and outgoing changes. This can be time-consuming.
3898 incoming and outgoing changes. This can be time-consuming.
3899
3899
3900 Returns 0 on success.
3900 Returns 0 on success.
3901 """
3901 """
3902
3902
3903 ctx = repo[None]
3903 ctx = repo[None]
3904 parents = ctx.parents()
3904 parents = ctx.parents()
3905 pnode = parents[0].node()
3905 pnode = parents[0].node()
3906
3906
3907 for p in parents:
3907 for p in parents:
3908 # label with log.changeset (instead of log.parent) since this
3908 # label with log.changeset (instead of log.parent) since this
3909 # shows a working directory parent *changeset*:
3909 # shows a working directory parent *changeset*:
3910 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3910 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3911 label='log.changeset')
3911 label='log.changeset')
3912 ui.write(' '.join(p.tags()), label='log.tag')
3912 ui.write(' '.join(p.tags()), label='log.tag')
3913 if p.bookmarks():
3913 if p.bookmarks():
3914 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3914 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3915 if p.rev() == -1:
3915 if p.rev() == -1:
3916 if not len(repo):
3916 if not len(repo):
3917 ui.write(_(' (empty repository)'))
3917 ui.write(_(' (empty repository)'))
3918 else:
3918 else:
3919 ui.write(_(' (no revision checked out)'))
3919 ui.write(_(' (no revision checked out)'))
3920 ui.write('\n')
3920 ui.write('\n')
3921 if p.description():
3921 if p.description():
3922 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3922 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3923 label='log.summary')
3923 label='log.summary')
3924
3924
3925 branch = ctx.branch()
3925 branch = ctx.branch()
3926 bheads = repo.branchheads(branch)
3926 bheads = repo.branchheads(branch)
3927 m = _('branch: %s\n') % branch
3927 m = _('branch: %s\n') % branch
3928 if branch != 'default':
3928 if branch != 'default':
3929 ui.write(m, label='log.branch')
3929 ui.write(m, label='log.branch')
3930 else:
3930 else:
3931 ui.status(m, label='log.branch')
3931 ui.status(m, label='log.branch')
3932
3932
3933 st = list(repo.status(unknown=True))[:6]
3933 st = list(repo.status(unknown=True))[:6]
3934
3934
3935 c = repo.dirstate.copies()
3935 c = repo.dirstate.copies()
3936 copied, renamed = [], []
3936 copied, renamed = [], []
3937 for d, s in c.iteritems():
3937 for d, s in c.iteritems():
3938 if s in st[2]:
3938 if s in st[2]:
3939 st[2].remove(s)
3939 st[2].remove(s)
3940 renamed.append(d)
3940 renamed.append(d)
3941 else:
3941 else:
3942 copied.append(d)
3942 copied.append(d)
3943 if d in st[1]:
3943 if d in st[1]:
3944 st[1].remove(d)
3944 st[1].remove(d)
3945 st.insert(3, renamed)
3945 st.insert(3, renamed)
3946 st.insert(4, copied)
3946 st.insert(4, copied)
3947
3947
3948 ms = mergemod.mergestate(repo)
3948 ms = mergemod.mergestate(repo)
3949 st.append([f for f in ms if ms[f] == 'u'])
3949 st.append([f for f in ms if ms[f] == 'u'])
3950
3950
3951 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3951 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3952 st.append(subs)
3952 st.append(subs)
3953
3953
3954 labels = [ui.label(_('%d modified'), 'status.modified'),
3954 labels = [ui.label(_('%d modified'), 'status.modified'),
3955 ui.label(_('%d added'), 'status.added'),
3955 ui.label(_('%d added'), 'status.added'),
3956 ui.label(_('%d removed'), 'status.removed'),
3956 ui.label(_('%d removed'), 'status.removed'),
3957 ui.label(_('%d renamed'), 'status.copied'),
3957 ui.label(_('%d renamed'), 'status.copied'),
3958 ui.label(_('%d copied'), 'status.copied'),
3958 ui.label(_('%d copied'), 'status.copied'),
3959 ui.label(_('%d deleted'), 'status.deleted'),
3959 ui.label(_('%d deleted'), 'status.deleted'),
3960 ui.label(_('%d unknown'), 'status.unknown'),
3960 ui.label(_('%d unknown'), 'status.unknown'),
3961 ui.label(_('%d ignored'), 'status.ignored'),
3961 ui.label(_('%d ignored'), 'status.ignored'),
3962 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3962 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3963 ui.label(_('%d subrepos'), 'status.modified')]
3963 ui.label(_('%d subrepos'), 'status.modified')]
3964 t = []
3964 t = []
3965 for s, l in zip(st, labels):
3965 for s, l in zip(st, labels):
3966 if s:
3966 if s:
3967 t.append(l % len(s))
3967 t.append(l % len(s))
3968
3968
3969 t = ', '.join(t)
3969 t = ', '.join(t)
3970 cleanworkdir = False
3970 cleanworkdir = False
3971
3971
3972 if len(parents) > 1:
3972 if len(parents) > 1:
3973 t += _(' (merge)')
3973 t += _(' (merge)')
3974 elif branch != parents[0].branch():
3974 elif branch != parents[0].branch():
3975 t += _(' (new branch)')
3975 t += _(' (new branch)')
3976 elif (parents[0].extra().get('close') and
3976 elif (parents[0].extra().get('close') and
3977 pnode in repo.branchheads(branch, closed=True)):
3977 pnode in repo.branchheads(branch, closed=True)):
3978 t += _(' (head closed)')
3978 t += _(' (head closed)')
3979 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3979 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3980 t += _(' (clean)')
3980 t += _(' (clean)')
3981 cleanworkdir = True
3981 cleanworkdir = True
3982 elif pnode not in bheads:
3982 elif pnode not in bheads:
3983 t += _(' (new branch head)')
3983 t += _(' (new branch head)')
3984
3984
3985 if cleanworkdir:
3985 if cleanworkdir:
3986 ui.status(_('commit: %s\n') % t.strip())
3986 ui.status(_('commit: %s\n') % t.strip())
3987 else:
3987 else:
3988 ui.write(_('commit: %s\n') % t.strip())
3988 ui.write(_('commit: %s\n') % t.strip())
3989
3989
3990 # all ancestors of branch heads - all ancestors of parent = new csets
3990 # all ancestors of branch heads - all ancestors of parent = new csets
3991 new = [0] * len(repo)
3991 new = [0] * len(repo)
3992 cl = repo.changelog
3992 cl = repo.changelog
3993 for a in [cl.rev(n) for n in bheads]:
3993 for a in [cl.rev(n) for n in bheads]:
3994 new[a] = 1
3994 new[a] = 1
3995 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3995 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3996 new[a] = 1
3996 new[a] = 1
3997 for a in [p.rev() for p in parents]:
3997 for a in [p.rev() for p in parents]:
3998 if a >= 0:
3998 if a >= 0:
3999 new[a] = 0
3999 new[a] = 0
4000 for a in cl.ancestors(*[p.rev() for p in parents]):
4000 for a in cl.ancestors(*[p.rev() for p in parents]):
4001 new[a] = 0
4001 new[a] = 0
4002 new = sum(new)
4002 new = sum(new)
4003
4003
4004 if new == 0:
4004 if new == 0:
4005 ui.status(_('update: (current)\n'))
4005 ui.status(_('update: (current)\n'))
4006 elif pnode not in bheads:
4006 elif pnode not in bheads:
4007 ui.write(_('update: %d new changesets (update)\n') % new)
4007 ui.write(_('update: %d new changesets (update)\n') % new)
4008 else:
4008 else:
4009 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4009 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4010 (new, len(bheads)))
4010 (new, len(bheads)))
4011
4011
4012 if opts.get('remote'):
4012 if opts.get('remote'):
4013 t = []
4013 t = []
4014 source, branches = hg.parseurl(ui.expandpath('default'))
4014 source, branches = hg.parseurl(ui.expandpath('default'))
4015 other = hg.repository(hg.remoteui(repo, {}), source)
4015 other = hg.repository(hg.remoteui(repo, {}), source)
4016 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4016 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4017 ui.debug('comparing with %s\n' % util.hidepassword(source))
4017 ui.debug('comparing with %s\n' % util.hidepassword(source))
4018 repo.ui.pushbuffer()
4018 repo.ui.pushbuffer()
4019 commoninc = discovery.findcommonincoming(repo, other)
4019 commoninc = discovery.findcommonincoming(repo, other)
4020 _common, incoming, _rheads = commoninc
4020 _common, incoming, _rheads = commoninc
4021 repo.ui.popbuffer()
4021 repo.ui.popbuffer()
4022 if incoming:
4022 if incoming:
4023 t.append(_('1 or more incoming'))
4023 t.append(_('1 or more incoming'))
4024
4024
4025 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4025 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4026 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4026 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4027 if source != dest:
4027 if source != dest:
4028 other = hg.repository(hg.remoteui(repo, {}), dest)
4028 other = hg.repository(hg.remoteui(repo, {}), dest)
4029 commoninc = None
4029 commoninc = None
4030 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4030 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4031 repo.ui.pushbuffer()
4031 repo.ui.pushbuffer()
4032 common, outheads = discovery.findcommonoutgoing(repo, other,
4032 common, outheads = discovery.findcommonoutgoing(repo, other,
4033 commoninc=commoninc)
4033 commoninc=commoninc)
4034 repo.ui.popbuffer()
4034 repo.ui.popbuffer()
4035 o = repo.changelog.findmissing(common=common, heads=outheads)
4035 o = repo.changelog.findmissing(common=common, heads=outheads)
4036 if o:
4036 if o:
4037 t.append(_('%d outgoing') % len(o))
4037 t.append(_('%d outgoing') % len(o))
4038 if 'bookmarks' in other.listkeys('namespaces'):
4038 if 'bookmarks' in other.listkeys('namespaces'):
4039 lmarks = repo.listkeys('bookmarks')
4039 lmarks = repo.listkeys('bookmarks')
4040 rmarks = other.listkeys('bookmarks')
4040 rmarks = other.listkeys('bookmarks')
4041 diff = set(rmarks) - set(lmarks)
4041 diff = set(rmarks) - set(lmarks)
4042 if len(diff) > 0:
4042 if len(diff) > 0:
4043 t.append(_('%d incoming bookmarks') % len(diff))
4043 t.append(_('%d incoming bookmarks') % len(diff))
4044 diff = set(lmarks) - set(rmarks)
4044 diff = set(lmarks) - set(rmarks)
4045 if len(diff) > 0:
4045 if len(diff) > 0:
4046 t.append(_('%d outgoing bookmarks') % len(diff))
4046 t.append(_('%d outgoing bookmarks') % len(diff))
4047
4047
4048 if t:
4048 if t:
4049 ui.write(_('remote: %s\n') % (', '.join(t)))
4049 ui.write(_('remote: %s\n') % (', '.join(t)))
4050 else:
4050 else:
4051 ui.status(_('remote: (synced)\n'))
4051 ui.status(_('remote: (synced)\n'))
4052
4052
4053 def tag(ui, repo, name1, *names, **opts):
4053 def tag(ui, repo, name1, *names, **opts):
4054 """add one or more tags for the current or given revision
4054 """add one or more tags for the current or given revision
4055
4055
4056 Name a particular revision using <name>.
4056 Name a particular revision using <name>.
4057
4057
4058 Tags are used to name particular revisions of the repository and are
4058 Tags are used to name particular revisions of the repository and are
4059 very useful to compare different revisions, to go back to significant
4059 very useful to compare different revisions, to go back to significant
4060 earlier versions or to mark branch points as releases, etc. Changing
4060 earlier versions or to mark branch points as releases, etc. Changing
4061 an existing tag is normally disallowed; use -f/--force to override.
4061 an existing tag is normally disallowed; use -f/--force to override.
4062
4062
4063 If no revision is given, the parent of the working directory is
4063 If no revision is given, the parent of the working directory is
4064 used, or tip if no revision is checked out.
4064 used, or tip if no revision is checked out.
4065
4065
4066 To facilitate version control, distribution, and merging of tags,
4066 To facilitate version control, distribution, and merging of tags,
4067 they are stored as a file named ".hgtags" which is managed similarly
4067 they are stored as a file named ".hgtags" which is managed similarly
4068 to other project files and can be hand-edited if necessary. This
4068 to other project files and can be hand-edited if necessary. This
4069 also means that tagging creates a new commit. The file
4069 also means that tagging creates a new commit. The file
4070 ".hg/localtags" is used for local tags (not shared among
4070 ".hg/localtags" is used for local tags (not shared among
4071 repositories).
4071 repositories).
4072
4072
4073 Tag commits are usually made at the head of a branch. If the parent
4073 Tag commits are usually made at the head of a branch. If the parent
4074 of the working directory is not a branch head, :hg:`tag` aborts; use
4074 of the working directory is not a branch head, :hg:`tag` aborts; use
4075 -f/--force to force the tag commit to be based on a non-head
4075 -f/--force to force the tag commit to be based on a non-head
4076 changeset.
4076 changeset.
4077
4077
4078 See :hg:`help dates` for a list of formats valid for -d/--date.
4078 See :hg:`help dates` for a list of formats valid for -d/--date.
4079
4079
4080 Since tag names have priority over branch names during revision
4080 Since tag names have priority over branch names during revision
4081 lookup, using an existing branch name as a tag name is discouraged.
4081 lookup, using an existing branch name as a tag name is discouraged.
4082
4082
4083 Returns 0 on success.
4083 Returns 0 on success.
4084 """
4084 """
4085
4085
4086 rev_ = "."
4086 rev_ = "."
4087 names = [t.strip() for t in (name1,) + names]
4087 names = [t.strip() for t in (name1,) + names]
4088 if len(names) != len(set(names)):
4088 if len(names) != len(set(names)):
4089 raise util.Abort(_('tag names must be unique'))
4089 raise util.Abort(_('tag names must be unique'))
4090 for n in names:
4090 for n in names:
4091 if n in ['tip', '.', 'null']:
4091 if n in ['tip', '.', 'null']:
4092 raise util.Abort(_("the name '%s' is reserved") % n)
4092 raise util.Abort(_("the name '%s' is reserved") % n)
4093 if not n:
4093 if not n:
4094 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4094 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4095 if opts.get('rev') and opts.get('remove'):
4095 if opts.get('rev') and opts.get('remove'):
4096 raise util.Abort(_("--rev and --remove are incompatible"))
4096 raise util.Abort(_("--rev and --remove are incompatible"))
4097 if opts.get('rev'):
4097 if opts.get('rev'):
4098 rev_ = opts['rev']
4098 rev_ = opts['rev']
4099 message = opts.get('message')
4099 message = opts.get('message')
4100 if opts.get('remove'):
4100 if opts.get('remove'):
4101 expectedtype = opts.get('local') and 'local' or 'global'
4101 expectedtype = opts.get('local') and 'local' or 'global'
4102 for n in names:
4102 for n in names:
4103 if not repo.tagtype(n):
4103 if not repo.tagtype(n):
4104 raise util.Abort(_("tag '%s' does not exist") % n)
4104 raise util.Abort(_("tag '%s' does not exist") % n)
4105 if repo.tagtype(n) != expectedtype:
4105 if repo.tagtype(n) != expectedtype:
4106 if expectedtype == 'global':
4106 if expectedtype == 'global':
4107 raise util.Abort(_("tag '%s' is not a global tag") % n)
4107 raise util.Abort(_("tag '%s' is not a global tag") % n)
4108 else:
4108 else:
4109 raise util.Abort(_("tag '%s' is not a local tag") % n)
4109 raise util.Abort(_("tag '%s' is not a local tag") % n)
4110 rev_ = nullid
4110 rev_ = nullid
4111 if not message:
4111 if not message:
4112 # we don't translate commit messages
4112 # we don't translate commit messages
4113 message = 'Removed tag %s' % ', '.join(names)
4113 message = 'Removed tag %s' % ', '.join(names)
4114 elif not opts.get('force'):
4114 elif not opts.get('force'):
4115 for n in names:
4115 for n in names:
4116 if n in repo.tags():
4116 if n in repo.tags():
4117 raise util.Abort(_("tag '%s' already exists "
4117 raise util.Abort(_("tag '%s' already exists "
4118 "(use -f to force)") % n)
4118 "(use -f to force)") % n)
4119 if not opts.get('local'):
4119 if not opts.get('local'):
4120 p1, p2 = repo.dirstate.parents()
4120 p1, p2 = repo.dirstate.parents()
4121 if p2 != nullid:
4121 if p2 != nullid:
4122 raise util.Abort(_('uncommitted merge'))
4122 raise util.Abort(_('uncommitted merge'))
4123 bheads = repo.branchheads()
4123 bheads = repo.branchheads()
4124 if not opts.get('force') and bheads and p1 not in bheads:
4124 if not opts.get('force') and bheads and p1 not in bheads:
4125 raise util.Abort(_('not at a branch head (use -f to force)'))
4125 raise util.Abort(_('not at a branch head (use -f to force)'))
4126 r = cmdutil.revsingle(repo, rev_).node()
4126 r = cmdutil.revsingle(repo, rev_).node()
4127
4127
4128 if not message:
4128 if not message:
4129 # we don't translate commit messages
4129 # we don't translate commit messages
4130 message = ('Added tag %s for changeset %s' %
4130 message = ('Added tag %s for changeset %s' %
4131 (', '.join(names), short(r)))
4131 (', '.join(names), short(r)))
4132
4132
4133 date = opts.get('date')
4133 date = opts.get('date')
4134 if date:
4134 if date:
4135 date = util.parsedate(date)
4135 date = util.parsedate(date)
4136
4136
4137 if opts.get('edit'):
4137 if opts.get('edit'):
4138 message = ui.edit(message, ui.username())
4138 message = ui.edit(message, ui.username())
4139
4139
4140 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4140 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4141
4141
4142 def tags(ui, repo):
4142 def tags(ui, repo):
4143 """list repository tags
4143 """list repository tags
4144
4144
4145 This lists both regular and local tags. When the -v/--verbose
4145 This lists both regular and local tags. When the -v/--verbose
4146 switch is used, a third column "local" is printed for local tags.
4146 switch is used, a third column "local" is printed for local tags.
4147
4147
4148 Returns 0 on success.
4148 Returns 0 on success.
4149 """
4149 """
4150
4150
4151 hexfunc = ui.debugflag and hex or short
4151 hexfunc = ui.debugflag and hex or short
4152 tagtype = ""
4152 tagtype = ""
4153
4153
4154 for t, n in reversed(repo.tagslist()):
4154 for t, n in reversed(repo.tagslist()):
4155 if ui.quiet:
4155 if ui.quiet:
4156 ui.write("%s\n" % t)
4156 ui.write("%s\n" % t)
4157 continue
4157 continue
4158
4158
4159 hn = hexfunc(n)
4159 hn = hexfunc(n)
4160 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4160 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4161 spaces = " " * (30 - encoding.colwidth(t))
4161 spaces = " " * (30 - encoding.colwidth(t))
4162
4162
4163 if ui.verbose:
4163 if ui.verbose:
4164 if repo.tagtype(t) == 'local':
4164 if repo.tagtype(t) == 'local':
4165 tagtype = " local"
4165 tagtype = " local"
4166 else:
4166 else:
4167 tagtype = ""
4167 tagtype = ""
4168 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4168 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4169
4169
4170 def tip(ui, repo, **opts):
4170 def tip(ui, repo, **opts):
4171 """show the tip revision
4171 """show the tip revision
4172
4172
4173 The tip revision (usually just called the tip) is the changeset
4173 The tip revision (usually just called the tip) is the changeset
4174 most recently added to the repository (and therefore the most
4174 most recently added to the repository (and therefore the most
4175 recently changed head).
4175 recently changed head).
4176
4176
4177 If you have just made a commit, that commit will be the tip. If
4177 If you have just made a commit, that commit will be the tip. If
4178 you have just pulled changes from another repository, the tip of
4178 you have just pulled changes from another repository, the tip of
4179 that repository becomes the current tip. The "tip" tag is special
4179 that repository becomes the current tip. The "tip" tag is special
4180 and cannot be renamed or assigned to a different changeset.
4180 and cannot be renamed or assigned to a different changeset.
4181
4181
4182 Returns 0 on success.
4182 Returns 0 on success.
4183 """
4183 """
4184 displayer = cmdutil.show_changeset(ui, repo, opts)
4184 displayer = cmdutil.show_changeset(ui, repo, opts)
4185 displayer.show(repo[len(repo) - 1])
4185 displayer.show(repo[len(repo) - 1])
4186 displayer.close()
4186 displayer.close()
4187
4187
4188 def unbundle(ui, repo, fname1, *fnames, **opts):
4188 def unbundle(ui, repo, fname1, *fnames, **opts):
4189 """apply one or more changegroup files
4189 """apply one or more changegroup files
4190
4190
4191 Apply one or more compressed changegroup files generated by the
4191 Apply one or more compressed changegroup files generated by the
4192 bundle command.
4192 bundle command.
4193
4193
4194 Returns 0 on success, 1 if an update has unresolved files.
4194 Returns 0 on success, 1 if an update has unresolved files.
4195 """
4195 """
4196 fnames = (fname1,) + fnames
4196 fnames = (fname1,) + fnames
4197
4197
4198 lock = repo.lock()
4198 lock = repo.lock()
4199 wc = repo['.']
4199 wc = repo['.']
4200 try:
4200 try:
4201 for fname in fnames:
4201 for fname in fnames:
4202 f = url.open(ui, fname)
4202 f = url.open(ui, fname)
4203 gen = changegroup.readbundle(f, fname)
4203 gen = changegroup.readbundle(f, fname)
4204 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4204 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4205 lock=lock)
4205 lock=lock)
4206 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4206 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4207 finally:
4207 finally:
4208 lock.release()
4208 lock.release()
4209 return postincoming(ui, repo, modheads, opts.get('update'), None)
4209 return postincoming(ui, repo, modheads, opts.get('update'), None)
4210
4210
4211 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4211 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4212 """update working directory (or switch revisions)
4212 """update working directory (or switch revisions)
4213
4213
4214 Update the repository's working directory to the specified
4214 Update the repository's working directory to the specified
4215 changeset. If no changeset is specified, update to the tip of the
4215 changeset. If no changeset is specified, update to the tip of the
4216 current named branch.
4216 current named branch.
4217
4217
4218 If the changeset is not a descendant of the working directory's
4218 If the changeset is not a descendant of the working directory's
4219 parent, the update is aborted. With the -c/--check option, the
4219 parent, the update is aborted. With the -c/--check option, the
4220 working directory is checked for uncommitted changes; if none are
4220 working directory is checked for uncommitted changes; if none are
4221 found, the working directory is updated to the specified
4221 found, the working directory is updated to the specified
4222 changeset.
4222 changeset.
4223
4223
4224 The following rules apply when the working directory contains
4224 The following rules apply when the working directory contains
4225 uncommitted changes:
4225 uncommitted changes:
4226
4226
4227 1. If neither -c/--check nor -C/--clean is specified, and if
4227 1. If neither -c/--check nor -C/--clean is specified, and if
4228 the requested changeset is an ancestor or descendant of
4228 the requested changeset is an ancestor or descendant of
4229 the working directory's parent, the uncommitted changes
4229 the working directory's parent, the uncommitted changes
4230 are merged into the requested changeset and the merged
4230 are merged into the requested changeset and the merged
4231 result is left uncommitted. If the requested changeset is
4231 result is left uncommitted. If the requested changeset is
4232 not an ancestor or descendant (that is, it is on another
4232 not an ancestor or descendant (that is, it is on another
4233 branch), the update is aborted and the uncommitted changes
4233 branch), the update is aborted and the uncommitted changes
4234 are preserved.
4234 are preserved.
4235
4235
4236 2. With the -c/--check option, the update is aborted and the
4236 2. With the -c/--check option, the update is aborted and the
4237 uncommitted changes are preserved.
4237 uncommitted changes are preserved.
4238
4238
4239 3. With the -C/--clean option, uncommitted changes are discarded and
4239 3. With the -C/--clean option, uncommitted changes are discarded and
4240 the working directory is updated to the requested changeset.
4240 the working directory is updated to the requested changeset.
4241
4241
4242 Use null as the changeset to remove the working directory (like
4242 Use null as the changeset to remove the working directory (like
4243 :hg:`clone -U`).
4243 :hg:`clone -U`).
4244
4244
4245 If you want to update just one file to an older changeset, use
4245 If you want to update just one file to an older changeset, use
4246 :hg:`revert`.
4246 :hg:`revert`.
4247
4247
4248 See :hg:`help dates` for a list of formats valid for -d/--date.
4248 See :hg:`help dates` for a list of formats valid for -d/--date.
4249
4249
4250 Returns 0 on success, 1 if there are unresolved files.
4250 Returns 0 on success, 1 if there are unresolved files.
4251 """
4251 """
4252 if rev and node:
4252 if rev and node:
4253 raise util.Abort(_("please specify just one revision"))
4253 raise util.Abort(_("please specify just one revision"))
4254
4254
4255 if rev is None or rev == '':
4255 if rev is None or rev == '':
4256 rev = node
4256 rev = node
4257
4257
4258 # if we defined a bookmark, we have to remember the original bookmark name
4258 # if we defined a bookmark, we have to remember the original bookmark name
4259 brev = rev
4259 brev = rev
4260 rev = cmdutil.revsingle(repo, rev, rev).rev()
4260 rev = cmdutil.revsingle(repo, rev, rev).rev()
4261
4261
4262 if check and clean:
4262 if check and clean:
4263 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4263 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4264
4264
4265 if check:
4265 if check:
4266 # we could use dirty() but we can ignore merge and branch trivia
4266 # we could use dirty() but we can ignore merge and branch trivia
4267 c = repo[None]
4267 c = repo[None]
4268 if c.modified() or c.added() or c.removed():
4268 if c.modified() or c.added() or c.removed():
4269 raise util.Abort(_("uncommitted local changes"))
4269 raise util.Abort(_("uncommitted local changes"))
4270
4270
4271 if date:
4271 if date:
4272 if rev is not None:
4272 if rev is not None:
4273 raise util.Abort(_("you can't specify a revision and a date"))
4273 raise util.Abort(_("you can't specify a revision and a date"))
4274 rev = cmdutil.finddate(ui, repo, date)
4274 rev = cmdutil.finddate(ui, repo, date)
4275
4275
4276 if clean or check:
4276 if clean or check:
4277 ret = hg.clean(repo, rev)
4277 ret = hg.clean(repo, rev)
4278 else:
4278 else:
4279 ret = hg.update(repo, rev)
4279 ret = hg.update(repo, rev)
4280
4280
4281 if brev in repo._bookmarks:
4281 if brev in repo._bookmarks:
4282 bookmarks.setcurrent(repo, brev)
4282 bookmarks.setcurrent(repo, brev)
4283
4283
4284 return ret
4284 return ret
4285
4285
4286 def verify(ui, repo):
4286 def verify(ui, repo):
4287 """verify the integrity of the repository
4287 """verify the integrity of the repository
4288
4288
4289 Verify the integrity of the current repository.
4289 Verify the integrity of the current repository.
4290
4290
4291 This will perform an extensive check of the repository's
4291 This will perform an extensive check of the repository's
4292 integrity, validating the hashes and checksums of each entry in
4292 integrity, validating the hashes and checksums of each entry in
4293 the changelog, manifest, and tracked files, as well as the
4293 the changelog, manifest, and tracked files, as well as the
4294 integrity of their crosslinks and indices.
4294 integrity of their crosslinks and indices.
4295
4295
4296 Returns 0 on success, 1 if errors are encountered.
4296 Returns 0 on success, 1 if errors are encountered.
4297 """
4297 """
4298 return hg.verify(repo)
4298 return hg.verify(repo)
4299
4299
4300 def version_(ui):
4300 def version_(ui):
4301 """output version and copyright information"""
4301 """output version and copyright information"""
4302 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4302 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4303 % util.version())
4303 % util.version())
4304 ui.status(_(
4304 ui.status(_(
4305 "(see http://mercurial.selenic.com for more information)\n"
4305 "(see http://mercurial.selenic.com for more information)\n"
4306 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4306 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4307 "This is free software; see the source for copying conditions. "
4307 "This is free software; see the source for copying conditions. "
4308 "There is NO\nwarranty; "
4308 "There is NO\nwarranty; "
4309 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4309 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4310 ))
4310 ))
4311
4311
4312 # Command options and aliases are listed here, alphabetically
4312 # Command options and aliases are listed here, alphabetically
4313
4313
4314 globalopts = [
4314 globalopts = [
4315 ('R', 'repository', '',
4315 ('R', 'repository', '',
4316 _('repository root directory or name of overlay bundle file'),
4316 _('repository root directory or name of overlay bundle file'),
4317 _('REPO')),
4317 _('REPO')),
4318 ('', 'cwd', '',
4318 ('', 'cwd', '',
4319 _('change working directory'), _('DIR')),
4319 _('change working directory'), _('DIR')),
4320 ('y', 'noninteractive', None,
4320 ('y', 'noninteractive', None,
4321 _('do not prompt, assume \'yes\' for any required answers')),
4321 _('do not prompt, assume \'yes\' for any required answers')),
4322 ('q', 'quiet', None, _('suppress output')),
4322 ('q', 'quiet', None, _('suppress output')),
4323 ('v', 'verbose', None, _('enable additional output')),
4323 ('v', 'verbose', None, _('enable additional output')),
4324 ('', 'config', [],
4324 ('', 'config', [],
4325 _('set/override config option (use \'section.name=value\')'),
4325 _('set/override config option (use \'section.name=value\')'),
4326 _('CONFIG')),
4326 _('CONFIG')),
4327 ('', 'debug', None, _('enable debugging output')),
4327 ('', 'debug', None, _('enable debugging output')),
4328 ('', 'debugger', None, _('start debugger')),
4328 ('', 'debugger', None, _('start debugger')),
4329 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4329 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4330 _('ENCODE')),
4330 _('ENCODE')),
4331 ('', 'encodingmode', encoding.encodingmode,
4331 ('', 'encodingmode', encoding.encodingmode,
4332 _('set the charset encoding mode'), _('MODE')),
4332 _('set the charset encoding mode'), _('MODE')),
4333 ('', 'traceback', None, _('always print a traceback on exception')),
4333 ('', 'traceback', None, _('always print a traceback on exception')),
4334 ('', 'time', None, _('time how long the command takes')),
4334 ('', 'time', None, _('time how long the command takes')),
4335 ('', 'profile', None, _('print command execution profile')),
4335 ('', 'profile', None, _('print command execution profile')),
4336 ('', 'version', None, _('output version information and exit')),
4336 ('', 'version', None, _('output version information and exit')),
4337 ('h', 'help', None, _('display help and exit')),
4337 ('h', 'help', None, _('display help and exit')),
4338 ]
4338 ]
4339
4339
4340 dryrunopts = [('n', 'dry-run', None,
4340 dryrunopts = [('n', 'dry-run', None,
4341 _('do not perform actions, just print output'))]
4341 _('do not perform actions, just print output'))]
4342
4342
4343 remoteopts = [
4343 remoteopts = [
4344 ('e', 'ssh', '',
4344 ('e', 'ssh', '',
4345 _('specify ssh command to use'), _('CMD')),
4345 _('specify ssh command to use'), _('CMD')),
4346 ('', 'remotecmd', '',
4346 ('', 'remotecmd', '',
4347 _('specify hg command to run on the remote side'), _('CMD')),
4347 _('specify hg command to run on the remote side'), _('CMD')),
4348 ('', 'insecure', None,
4348 ('', 'insecure', None,
4349 _('do not verify server certificate (ignoring web.cacerts config)')),
4349 _('do not verify server certificate (ignoring web.cacerts config)')),
4350 ]
4350 ]
4351
4351
4352 walkopts = [
4352 walkopts = [
4353 ('I', 'include', [],
4353 ('I', 'include', [],
4354 _('include names matching the given patterns'), _('PATTERN')),
4354 _('include names matching the given patterns'), _('PATTERN')),
4355 ('X', 'exclude', [],
4355 ('X', 'exclude', [],
4356 _('exclude names matching the given patterns'), _('PATTERN')),
4356 _('exclude names matching the given patterns'), _('PATTERN')),
4357 ]
4357 ]
4358
4358
4359 commitopts = [
4359 commitopts = [
4360 ('m', 'message', '',
4360 ('m', 'message', '',
4361 _('use text as commit message'), _('TEXT')),
4361 _('use text as commit message'), _('TEXT')),
4362 ('l', 'logfile', '',
4362 ('l', 'logfile', '',
4363 _('read commit message from file'), _('FILE')),
4363 _('read commit message from file'), _('FILE')),
4364 ]
4364 ]
4365
4365
4366 commitopts2 = [
4366 commitopts2 = [
4367 ('d', 'date', '',
4367 ('d', 'date', '',
4368 _('record the specified date as commit date'), _('DATE')),
4368 _('record the specified date as commit date'), _('DATE')),
4369 ('u', 'user', '',
4369 ('u', 'user', '',
4370 _('record the specified user as committer'), _('USER')),
4370 _('record the specified user as committer'), _('USER')),
4371 ]
4371 ]
4372
4372
4373 templateopts = [
4373 templateopts = [
4374 ('', 'style', '',
4374 ('', 'style', '',
4375 _('display using template map file'), _('STYLE')),
4375 _('display using template map file'), _('STYLE')),
4376 ('', 'template', '',
4376 ('', 'template', '',
4377 _('display with template'), _('TEMPLATE')),
4377 _('display with template'), _('TEMPLATE')),
4378 ]
4378 ]
4379
4379
4380 logopts = [
4380 logopts = [
4381 ('p', 'patch', None, _('show patch')),
4381 ('p', 'patch', None, _('show patch')),
4382 ('g', 'git', None, _('use git extended diff format')),
4382 ('g', 'git', None, _('use git extended diff format')),
4383 ('l', 'limit', '',
4383 ('l', 'limit', '',
4384 _('limit number of changes displayed'), _('NUM')),
4384 _('limit number of changes displayed'), _('NUM')),
4385 ('M', 'no-merges', None, _('do not show merges')),
4385 ('M', 'no-merges', None, _('do not show merges')),
4386 ('', 'stat', None, _('output diffstat-style summary of changes')),
4386 ('', 'stat', None, _('output diffstat-style summary of changes')),
4387 ] + templateopts
4387 ] + templateopts
4388
4388
4389 diffopts = [
4389 diffopts = [
4390 ('a', 'text', None, _('treat all files as text')),
4390 ('a', 'text', None, _('treat all files as text')),
4391 ('g', 'git', None, _('use git extended diff format')),
4391 ('g', 'git', None, _('use git extended diff format')),
4392 ('', 'nodates', None, _('omit dates from diff headers'))
4392 ('', 'nodates', None, _('omit dates from diff headers'))
4393 ]
4393 ]
4394
4394
4395 diffopts2 = [
4395 diffopts2 = [
4396 ('p', 'show-function', None, _('show which function each change is in')),
4396 ('p', 'show-function', None, _('show which function each change is in')),
4397 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4397 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4398 ('w', 'ignore-all-space', None,
4398 ('w', 'ignore-all-space', None,
4399 _('ignore white space when comparing lines')),
4399 _('ignore white space when comparing lines')),
4400 ('b', 'ignore-space-change', None,
4400 ('b', 'ignore-space-change', None,
4401 _('ignore changes in the amount of white space')),
4401 _('ignore changes in the amount of white space')),
4402 ('B', 'ignore-blank-lines', None,
4402 ('B', 'ignore-blank-lines', None,
4403 _('ignore changes whose lines are all blank')),
4403 _('ignore changes whose lines are all blank')),
4404 ('U', 'unified', '',
4404 ('U', 'unified', '',
4405 _('number of lines of context to show'), _('NUM')),
4405 _('number of lines of context to show'), _('NUM')),
4406 ('', 'stat', None, _('output diffstat-style summary of changes')),
4406 ('', 'stat', None, _('output diffstat-style summary of changes')),
4407 ]
4407 ]
4408
4408
4409 similarityopts = [
4409 similarityopts = [
4410 ('s', 'similarity', '',
4410 ('s', 'similarity', '',
4411 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4411 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4412 ]
4412 ]
4413
4413
4414 subrepoopts = [
4414 subrepoopts = [
4415 ('S', 'subrepos', None,
4415 ('S', 'subrepos', None,
4416 _('recurse into subrepositories'))
4416 _('recurse into subrepositories'))
4417 ]
4417 ]
4418
4418
4419 table = {
4419 table = {
4420 "^add": (add, walkopts + subrepoopts + dryrunopts,
4420 "^add": (add, walkopts + subrepoopts + dryrunopts,
4421 _('[OPTION]... [FILE]...')),
4421 _('[OPTION]... [FILE]...')),
4422 "addremove":
4422 "addremove":
4423 (addremove, similarityopts + walkopts + dryrunopts,
4423 (addremove, similarityopts + walkopts + dryrunopts,
4424 _('[OPTION]... [FILE]...')),
4424 _('[OPTION]... [FILE]...')),
4425 "^annotate|blame":
4425 "^annotate|blame":
4426 (annotate,
4426 (annotate,
4427 [('r', 'rev', '',
4427 [('r', 'rev', '',
4428 _('annotate the specified revision'), _('REV')),
4428 _('annotate the specified revision'), _('REV')),
4429 ('', 'follow', None,
4429 ('', 'follow', None,
4430 _('follow copies/renames and list the filename (DEPRECATED)')),
4430 _('follow copies/renames and list the filename (DEPRECATED)')),
4431 ('', 'no-follow', None, _("don't follow copies and renames")),
4431 ('', 'no-follow', None, _("don't follow copies and renames")),
4432 ('a', 'text', None, _('treat all files as text')),
4432 ('a', 'text', None, _('treat all files as text')),
4433 ('u', 'user', None, _('list the author (long with -v)')),
4433 ('u', 'user', None, _('list the author (long with -v)')),
4434 ('f', 'file', None, _('list the filename')),
4434 ('f', 'file', None, _('list the filename')),
4435 ('d', 'date', None, _('list the date (short with -q)')),
4435 ('d', 'date', None, _('list the date (short with -q)')),
4436 ('n', 'number', None, _('list the revision number (default)')),
4436 ('n', 'number', None, _('list the revision number (default)')),
4437 ('c', 'changeset', None, _('list the changeset')),
4437 ('c', 'changeset', None, _('list the changeset')),
4438 ('l', 'line-number', None,
4438 ('l', 'line-number', None,
4439 _('show line number at the first appearance'))
4439 _('show line number at the first appearance'))
4440 ] + walkopts,
4440 ] + walkopts,
4441 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4441 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4442 "archive":
4442 "archive":
4443 (archive,
4443 (archive,
4444 [('', 'no-decode', None, _('do not pass files through decoders')),
4444 [('', 'no-decode', None, _('do not pass files through decoders')),
4445 ('p', 'prefix', '',
4445 ('p', 'prefix', '',
4446 _('directory prefix for files in archive'), _('PREFIX')),
4446 _('directory prefix for files in archive'), _('PREFIX')),
4447 ('r', 'rev', '',
4447 ('r', 'rev', '',
4448 _('revision to distribute'), _('REV')),
4448 _('revision to distribute'), _('REV')),
4449 ('t', 'type', '',
4449 ('t', 'type', '',
4450 _('type of distribution to create'), _('TYPE')),
4450 _('type of distribution to create'), _('TYPE')),
4451 ] + subrepoopts + walkopts,
4451 ] + subrepoopts + walkopts,
4452 _('[OPTION]... DEST')),
4452 _('[OPTION]... DEST')),
4453 "backout":
4453 "backout":
4454 (backout,
4454 (backout,
4455 [('', 'merge', None,
4455 [('', 'merge', None,
4456 _('merge with old dirstate parent after backout')),
4456 _('merge with old dirstate parent after backout')),
4457 ('', 'parent', '',
4457 ('', 'parent', '',
4458 _('parent to choose when backing out merge'), _('REV')),
4458 _('parent to choose when backing out merge'), _('REV')),
4459 ('t', 'tool', '',
4459 ('t', 'tool', '',
4460 _('specify merge tool')),
4460 _('specify merge tool')),
4461 ('r', 'rev', '',
4461 ('r', 'rev', '',
4462 _('revision to backout'), _('REV')),
4462 _('revision to backout'), _('REV')),
4463 ] + walkopts + commitopts + commitopts2,
4463 ] + walkopts + commitopts + commitopts2,
4464 _('[OPTION]... [-r] REV')),
4464 _('[OPTION]... [-r] REV')),
4465 "bisect":
4465 "bisect":
4466 (bisect,
4466 (bisect,
4467 [('r', 'reset', False, _('reset bisect state')),
4467 [('r', 'reset', False, _('reset bisect state')),
4468 ('g', 'good', False, _('mark changeset good')),
4468 ('g', 'good', False, _('mark changeset good')),
4469 ('b', 'bad', False, _('mark changeset bad')),
4469 ('b', 'bad', False, _('mark changeset bad')),
4470 ('s', 'skip', False, _('skip testing changeset')),
4470 ('s', 'skip', False, _('skip testing changeset')),
4471 ('e', 'extend', False, _('extend the bisect range')),
4471 ('e', 'extend', False, _('extend the bisect range')),
4472 ('c', 'command', '',
4472 ('c', 'command', '',
4473 _('use command to check changeset state'), _('CMD')),
4473 _('use command to check changeset state'), _('CMD')),
4474 ('U', 'noupdate', False, _('do not update to target'))],
4474 ('U', 'noupdate', False, _('do not update to target'))],
4475 _("[-gbsr] [-U] [-c CMD] [REV]")),
4475 _("[-gbsr] [-U] [-c CMD] [REV]")),
4476 "bookmarks":
4476 "bookmarks":
4477 (bookmark,
4477 (bookmark,
4478 [('f', 'force', False, _('force')),
4478 [('f', 'force', False, _('force')),
4479 ('r', 'rev', '', _('revision'), _('REV')),
4479 ('r', 'rev', '', _('revision'), _('REV')),
4480 ('d', 'delete', False, _('delete a given bookmark')),
4480 ('d', 'delete', False, _('delete a given bookmark')),
4481 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
4481 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
4482 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
4482 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
4483 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]')),
4483 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]')),
4484 "branch":
4484 "branch":
4485 (branch,
4485 (branch,
4486 [('f', 'force', None,
4486 [('f', 'force', None,
4487 _('set branch name even if it shadows an existing branch')),
4487 _('set branch name even if it shadows an existing branch')),
4488 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4488 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4489 _('[-fC] [NAME]')),
4489 _('[-fC] [NAME]')),
4490 "branches":
4490 "branches":
4491 (branches,
4491 (branches,
4492 [('a', 'active', False,
4492 [('a', 'active', False,
4493 _('show only branches that have unmerged heads')),
4493 _('show only branches that have unmerged heads')),
4494 ('c', 'closed', False,
4494 ('c', 'closed', False,
4495 _('show normal and closed branches'))],
4495 _('show normal and closed branches'))],
4496 _('[-ac]')),
4496 _('[-ac]')),
4497 "bundle":
4497 "bundle":
4498 (bundle,
4498 (bundle,
4499 [('f', 'force', None,
4499 [('f', 'force', None,
4500 _('run even when the destination is unrelated')),
4500 _('run even when the destination is unrelated')),
4501 ('r', 'rev', [],
4501 ('r', 'rev', [],
4502 _('a changeset intended to be added to the destination'),
4502 _('a changeset intended to be added to the destination'),
4503 _('REV')),
4503 _('REV')),
4504 ('b', 'branch', [],
4504 ('b', 'branch', [],
4505 _('a specific branch you would like to bundle'),
4505 _('a specific branch you would like to bundle'),
4506 _('BRANCH')),
4506 _('BRANCH')),
4507 ('', 'base', [],
4507 ('', 'base', [],
4508 _('a base changeset assumed to be available at the destination'),
4508 _('a base changeset assumed to be available at the destination'),
4509 _('REV')),
4509 _('REV')),
4510 ('a', 'all', None, _('bundle all changesets in the repository')),
4510 ('a', 'all', None, _('bundle all changesets in the repository')),
4511 ('t', 'type', 'bzip2',
4511 ('t', 'type', 'bzip2',
4512 _('bundle compression type to use'), _('TYPE')),
4512 _('bundle compression type to use'), _('TYPE')),
4513 ] + remoteopts,
4513 ] + remoteopts,
4514 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4514 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4515 "cat":
4515 "cat":
4516 (cat,
4516 (cat,
4517 [('o', 'output', '',
4517 [('o', 'output', '',
4518 _('print output to file with formatted name'), _('FORMAT')),
4518 _('print output to file with formatted name'), _('FORMAT')),
4519 ('r', 'rev', '',
4519 ('r', 'rev', '',
4520 _('print the given revision'), _('REV')),
4520 _('print the given revision'), _('REV')),
4521 ('', 'decode', None, _('apply any matching decode filter')),
4521 ('', 'decode', None, _('apply any matching decode filter')),
4522 ] + walkopts,
4522 ] + walkopts,
4523 _('[OPTION]... FILE...')),
4523 _('[OPTION]... FILE...')),
4524 "^clone":
4524 "^clone":
4525 (clone,
4525 (clone,
4526 [('U', 'noupdate', None,
4526 [('U', 'noupdate', None,
4527 _('the clone will include an empty working copy (only a repository)')),
4527 _('the clone will include an empty working copy (only a repository)')),
4528 ('u', 'updaterev', '',
4528 ('u', 'updaterev', '',
4529 _('revision, tag or branch to check out'), _('REV')),
4529 _('revision, tag or branch to check out'), _('REV')),
4530 ('r', 'rev', [],
4530 ('r', 'rev', [],
4531 _('include the specified changeset'), _('REV')),
4531 _('include the specified changeset'), _('REV')),
4532 ('b', 'branch', [],
4532 ('b', 'branch', [],
4533 _('clone only the specified branch'), _('BRANCH')),
4533 _('clone only the specified branch'), _('BRANCH')),
4534 ('', 'pull', None, _('use pull protocol to copy metadata')),
4534 ('', 'pull', None, _('use pull protocol to copy metadata')),
4535 ('', 'uncompressed', None,
4535 ('', 'uncompressed', None,
4536 _('use uncompressed transfer (fast over LAN)')),
4536 _('use uncompressed transfer (fast over LAN)')),
4537 ] + remoteopts,
4537 ] + remoteopts,
4538 _('[OPTION]... SOURCE [DEST]')),
4538 _('[OPTION]... SOURCE [DEST]')),
4539 "^commit|ci":
4539 "^commit|ci":
4540 (commit,
4540 (commit,
4541 [('A', 'addremove', None,
4541 [('A', 'addremove', None,
4542 _('mark new/missing files as added/removed before committing')),
4542 _('mark new/missing files as added/removed before committing')),
4543 ('', 'close-branch', None,
4543 ('', 'close-branch', None,
4544 _('mark a branch as closed, hiding it from the branch list')),
4544 _('mark a branch as closed, hiding it from the branch list')),
4545 ] + walkopts + commitopts + commitopts2,
4545 ] + walkopts + commitopts + commitopts2,
4546 _('[OPTION]... [FILE]...')),
4546 _('[OPTION]... [FILE]...')),
4547 "copy|cp":
4547 "copy|cp":
4548 (copy,
4548 (copy,
4549 [('A', 'after', None, _('record a copy that has already occurred')),
4549 [('A', 'after', None, _('record a copy that has already occurred')),
4550 ('f', 'force', None,
4550 ('f', 'force', None,
4551 _('forcibly copy over an existing managed file')),
4551 _('forcibly copy over an existing managed file')),
4552 ] + walkopts + dryrunopts,
4552 ] + walkopts + dryrunopts,
4553 _('[OPTION]... [SOURCE]... DEST')),
4553 _('[OPTION]... [SOURCE]... DEST')),
4554 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4554 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4555 "debugbuilddag":
4555 "debugbuilddag":
4556 (debugbuilddag,
4556 (debugbuilddag,
4557 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4557 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4558 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4558 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4559 ('n', 'new-file', None, _('add new file at each rev')),
4559 ('n', 'new-file', None, _('add new file at each rev')),
4560 ],
4560 ],
4561 _('[OPTION]... TEXT')),
4561 _('[OPTION]... TEXT')),
4562 "debugbundle":
4562 "debugbundle":
4563 (debugbundle,
4563 (debugbundle,
4564 [('a', 'all', None, _('show all details')),
4564 [('a', 'all', None, _('show all details')),
4565 ],
4565 ],
4566 _('FILE')),
4566 _('FILE')),
4567 "debugcheckstate": (debugcheckstate, [], ''),
4567 "debugcheckstate": (debugcheckstate, [], ''),
4568 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4568 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4569 "debugcomplete":
4569 "debugcomplete":
4570 (debugcomplete,
4570 (debugcomplete,
4571 [('o', 'options', None, _('show the command options'))],
4571 [('o', 'options', None, _('show the command options'))],
4572 _('[-o] CMD')),
4572 _('[-o] CMD')),
4573 "debugdag":
4573 "debugdag":
4574 (debugdag,
4574 (debugdag,
4575 [('t', 'tags', None, _('use tags as labels')),
4575 [('t', 'tags', None, _('use tags as labels')),
4576 ('b', 'branches', None, _('annotate with branch names')),
4576 ('b', 'branches', None, _('annotate with branch names')),
4577 ('', 'dots', None, _('use dots for runs')),
4577 ('', 'dots', None, _('use dots for runs')),
4578 ('s', 'spaces', None, _('separate elements by spaces')),
4578 ('s', 'spaces', None, _('separate elements by spaces')),
4579 ],
4579 ],
4580 _('[OPTION]... [FILE [REV]...]')),
4580 _('[OPTION]... [FILE [REV]...]')),
4581 "debugdate":
4581 "debugdate":
4582 (debugdate,
4582 (debugdate,
4583 [('e', 'extended', None, _('try extended date formats'))],
4583 [('e', 'extended', None, _('try extended date formats'))],
4584 _('[-e] DATE [RANGE]')),
4584 _('[-e] DATE [RANGE]')),
4585 "debugdata": (debugdata, [], _('FILE REV')),
4585 "debugdata": (debugdata, [], _('FILE REV')),
4586 "debugdiscovery": (debugdiscovery,
4586 "debugdiscovery": (debugdiscovery,
4587 [('', 'old', None,
4587 [('', 'old', None,
4588 _('use old-style discovery')),
4588 _('use old-style discovery')),
4589 ('', 'nonheads', None,
4589 ('', 'nonheads', None,
4590 _('use old-style discovery with non-heads included')),
4590 _('use old-style discovery with non-heads included')),
4591 ] + remoteopts,
4591 ] + remoteopts,
4592 _('[-l REV] [-r REV] [-b BRANCH]...'
4592 _('[-l REV] [-r REV] [-b BRANCH]...'
4593 ' [OTHER]')),
4593 ' [OTHER]')),
4594 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4594 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4595 "debuggetbundle":
4595 "debuggetbundle":
4596 (debuggetbundle,
4596 (debuggetbundle,
4597 [('H', 'head', [], _('id of head node'), _('ID')),
4597 [('H', 'head', [], _('id of head node'), _('ID')),
4598 ('C', 'common', [], _('id of common node'), _('ID')),
4598 ('C', 'common', [], _('id of common node'), _('ID')),
4599 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4599 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4600 ],
4600 ],
4601 _('REPO FILE [-H|-C ID]...')),
4601 _('REPO FILE [-H|-C ID]...')),
4602 "debugignore": (debugignore, [], ''),
4602 "debugignore": (debugignore, [], ''),
4603 "debugindex": (debugindex,
4603 "debugindex": (debugindex,
4604 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4604 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4605 _('FILE')),
4605 _('FILE')),
4606 "debugindexdot": (debugindexdot, [], _('FILE')),
4606 "debugindexdot": (debugindexdot, [], _('FILE')),
4607 "debuginstall": (debuginstall, [], ''),
4607 "debuginstall": (debuginstall, [], ''),
4608 "debugknown": (debugknown, [], _('REPO ID...')),
4608 "debugknown": (debugknown, [], _('REPO ID...')),
4609 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4609 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4610 "debugrebuildstate":
4610 "debugrebuildstate":
4611 (debugrebuildstate,
4611 (debugrebuildstate,
4612 [('r', 'rev', '',
4612 [('r', 'rev', '',
4613 _('revision to rebuild to'), _('REV'))],
4613 _('revision to rebuild to'), _('REV'))],
4614 _('[-r REV] [REV]')),
4614 _('[-r REV] [REV]')),
4615 "debugrename":
4615 "debugrename":
4616 (debugrename,
4616 (debugrename,
4617 [('r', 'rev', '',
4617 [('r', 'rev', '',
4618 _('revision to debug'), _('REV'))],
4618 _('revision to debug'), _('REV'))],
4619 _('[-r REV] FILE')),
4619 _('[-r REV] FILE')),
4620 "debugrevspec":
4620 "debugrevspec":
4621 (debugrevspec, [], ('REVSPEC')),
4621 (debugrevspec, [], ('REVSPEC')),
4622 "debugsetparents":
4622 "debugsetparents":
4623 (debugsetparents, [], _('REV1 [REV2]')),
4623 (debugsetparents, [], _('REV1 [REV2]')),
4624 "debugstate":
4624 "debugstate":
4625 (debugstate,
4625 (debugstate,
4626 [('', 'nodates', None, _('do not display the saved mtime')),
4626 [('', 'nodates', None, _('do not display the saved mtime')),
4627 ('', 'datesort', None, _('sort by saved mtime'))],
4627 ('', 'datesort', None, _('sort by saved mtime'))],
4628 _('[OPTION]...')),
4628 _('[OPTION]...')),
4629 "debugsub":
4629 "debugsub":
4630 (debugsub,
4630 (debugsub,
4631 [('r', 'rev', '',
4631 [('r', 'rev', '',
4632 _('revision to check'), _('REV'))],
4632 _('revision to check'), _('REV'))],
4633 _('[-r REV] [REV]')),
4633 _('[-r REV] [REV]')),
4634 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4634 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4635 "debugwireargs":
4635 "debugwireargs":
4636 (debugwireargs,
4636 (debugwireargs,
4637 [('', 'three', '', 'three'),
4637 [('', 'three', '', 'three'),
4638 ('', 'four', '', 'four'),
4638 ('', 'four', '', 'four'),
4639 ('', 'five', '', 'five'),
4639 ('', 'five', '', 'five'),
4640 ] + remoteopts,
4640 ] + remoteopts,
4641 _('REPO [OPTIONS]... [ONE [TWO]]')),
4641 _('REPO [OPTIONS]... [ONE [TWO]]')),
4642 "^diff":
4642 "^diff":
4643 (diff,
4643 (diff,
4644 [('r', 'rev', [],
4644 [('r', 'rev', [],
4645 _('revision'), _('REV')),
4645 _('revision'), _('REV')),
4646 ('c', 'change', '',
4646 ('c', 'change', '',
4647 _('change made by revision'), _('REV'))
4647 _('change made by revision'), _('REV'))
4648 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4648 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4649 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4649 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4650 "^export":
4650 "^export":
4651 (export,
4651 (export,
4652 [('o', 'output', '',
4652 [('o', 'output', '',
4653 _('print output to file with formatted name'), _('FORMAT')),
4653 _('print output to file with formatted name'), _('FORMAT')),
4654 ('', 'switch-parent', None, _('diff against the second parent')),
4654 ('', 'switch-parent', None, _('diff against the second parent')),
4655 ('r', 'rev', [],
4655 ('r', 'rev', [],
4656 _('revisions to export'), _('REV')),
4656 _('revisions to export'), _('REV')),
4657 ] + diffopts,
4657 ] + diffopts,
4658 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4658 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4659 "^forget":
4659 "^forget":
4660 (forget,
4660 (forget,
4661 [] + walkopts,
4661 [] + walkopts,
4662 _('[OPTION]... FILE...')),
4662 _('[OPTION]... FILE...')),
4663 "grep":
4663 "grep":
4664 (grep,
4664 (grep,
4665 [('0', 'print0', None, _('end fields with NUL')),
4665 [('0', 'print0', None, _('end fields with NUL')),
4666 ('', 'all', None, _('print all revisions that match')),
4666 ('', 'all', None, _('print all revisions that match')),
4667 ('a', 'text', None, _('treat all files as text')),
4667 ('a', 'text', None, _('treat all files as text')),
4668 ('f', 'follow', None,
4668 ('f', 'follow', None,
4669 _('follow changeset history,'
4669 _('follow changeset history,'
4670 ' or file history across copies and renames')),
4670 ' or file history across copies and renames')),
4671 ('i', 'ignore-case', None, _('ignore case when matching')),
4671 ('i', 'ignore-case', None, _('ignore case when matching')),
4672 ('l', 'files-with-matches', None,
4672 ('l', 'files-with-matches', None,
4673 _('print only filenames and revisions that match')),
4673 _('print only filenames and revisions that match')),
4674 ('n', 'line-number', None, _('print matching line numbers')),
4674 ('n', 'line-number', None, _('print matching line numbers')),
4675 ('r', 'rev', [],
4675 ('r', 'rev', [],
4676 _('only search files changed within revision range'), _('REV')),
4676 _('only search files changed within revision range'), _('REV')),
4677 ('u', 'user', None, _('list the author (long with -v)')),
4677 ('u', 'user', None, _('list the author (long with -v)')),
4678 ('d', 'date', None, _('list the date (short with -q)')),
4678 ('d', 'date', None, _('list the date (short with -q)')),
4679 ] + walkopts,
4679 ] + walkopts,
4680 _('[OPTION]... PATTERN [FILE]...')),
4680 _('[OPTION]... PATTERN [FILE]...')),
4681 "heads":
4681 "heads":
4682 (heads,
4682 (heads,
4683 [('r', 'rev', '',
4683 [('r', 'rev', '',
4684 _('show only heads which are descendants of STARTREV'),
4684 _('show only heads which are descendants of STARTREV'),
4685 _('STARTREV')),
4685 _('STARTREV')),
4686 ('t', 'topo', False, _('show topological heads only')),
4686 ('t', 'topo', False, _('show topological heads only')),
4687 ('a', 'active', False,
4687 ('a', 'active', False,
4688 _('show active branchheads only (DEPRECATED)')),
4688 _('show active branchheads only (DEPRECATED)')),
4689 ('c', 'closed', False,
4689 ('c', 'closed', False,
4690 _('show normal and closed branch heads')),
4690 _('show normal and closed branch heads')),
4691 ] + templateopts,
4691 ] + templateopts,
4692 _('[-ac] [-r STARTREV] [REV]...')),
4692 _('[-ac] [-r STARTREV] [REV]...')),
4693 "help": (help_, [], _('[TOPIC]')),
4693 "help": (help_, [], _('[TOPIC]')),
4694 "identify|id":
4694 "identify|id":
4695 (identify,
4695 (identify,
4696 [('r', 'rev', '',
4696 [('r', 'rev', '',
4697 _('identify the specified revision'), _('REV')),
4697 _('identify the specified revision'), _('REV')),
4698 ('n', 'num', None, _('show local revision number')),
4698 ('n', 'num', None, _('show local revision number')),
4699 ('i', 'id', None, _('show global revision id')),
4699 ('i', 'id', None, _('show global revision id')),
4700 ('b', 'branch', None, _('show branch')),
4700 ('b', 'branch', None, _('show branch')),
4701 ('t', 'tags', None, _('show tags')),
4701 ('t', 'tags', None, _('show tags')),
4702 ('B', 'bookmarks', None, _('show bookmarks'))],
4702 ('B', 'bookmarks', None, _('show bookmarks'))],
4703 _('[-nibtB] [-r REV] [SOURCE]')),
4703 _('[-nibtB] [-r REV] [SOURCE]')),
4704 "import|patch":
4704 "import|patch":
4705 (import_,
4705 (import_,
4706 [('p', 'strip', 1,
4706 [('p', 'strip', 1,
4707 _('directory strip option for patch. This has the same '
4707 _('directory strip option for patch. This has the same '
4708 'meaning as the corresponding patch option'),
4708 'meaning as the corresponding patch option'),
4709 _('NUM')),
4709 _('NUM')),
4710 ('b', 'base', '',
4710 ('b', 'base', '',
4711 _('base path'), _('PATH')),
4711 _('base path'), _('PATH')),
4712 ('f', 'force', None,
4712 ('f', 'force', None,
4713 _('skip check for outstanding uncommitted changes')),
4713 _('skip check for outstanding uncommitted changes')),
4714 ('', 'no-commit', None,
4714 ('', 'no-commit', None,
4715 _("don't commit, just update the working directory")),
4715 _("don't commit, just update the working directory")),
4716 ('', 'exact', None,
4716 ('', 'exact', None,
4717 _('apply patch to the nodes from which it was generated')),
4717 _('apply patch to the nodes from which it was generated')),
4718 ('', 'import-branch', None,
4718 ('', 'import-branch', None,
4719 _('use any branch information in patch (implied by --exact)'))] +
4719 _('use any branch information in patch (implied by --exact)'))] +
4720 commitopts + commitopts2 + similarityopts,
4720 commitopts + commitopts2 + similarityopts,
4721 _('[OPTION]... PATCH...')),
4721 _('[OPTION]... PATCH...')),
4722 "incoming|in":
4722 "incoming|in":
4723 (incoming,
4723 (incoming,
4724 [('f', 'force', None,
4724 [('f', 'force', None,
4725 _('run even if remote repository is unrelated')),
4725 _('run even if remote repository is unrelated')),
4726 ('n', 'newest-first', None, _('show newest record first')),
4726 ('n', 'newest-first', None, _('show newest record first')),
4727 ('', 'bundle', '',
4727 ('', 'bundle', '',
4728 _('file to store the bundles into'), _('FILE')),
4728 _('file to store the bundles into'), _('FILE')),
4729 ('r', 'rev', [],
4729 ('r', 'rev', [],
4730 _('a remote changeset intended to be added'), _('REV')),
4730 _('a remote changeset intended to be added'), _('REV')),
4731 ('B', 'bookmarks', False, _("compare bookmarks")),
4731 ('B', 'bookmarks', False, _("compare bookmarks")),
4732 ('b', 'branch', [],
4732 ('b', 'branch', [],
4733 _('a specific branch you would like to pull'), _('BRANCH')),
4733 _('a specific branch you would like to pull'), _('BRANCH')),
4734 ] + logopts + remoteopts + subrepoopts,
4734 ] + logopts + remoteopts + subrepoopts,
4735 _('[-p] [-n] [-M] [-f] [-r REV]...'
4735 _('[-p] [-n] [-M] [-f] [-r REV]...'
4736 ' [--bundle FILENAME] [SOURCE]')),
4736 ' [--bundle FILENAME] [SOURCE]')),
4737 "^init":
4737 "^init":
4738 (init,
4738 (init,
4739 remoteopts,
4739 remoteopts,
4740 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4740 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4741 "locate":
4741 "locate":
4742 (locate,
4742 (locate,
4743 [('r', 'rev', '',
4743 [('r', 'rev', '',
4744 _('search the repository as it is in REV'), _('REV')),
4744 _('search the repository as it is in REV'), _('REV')),
4745 ('0', 'print0', None,
4745 ('0', 'print0', None,
4746 _('end filenames with NUL, for use with xargs')),
4746 _('end filenames with NUL, for use with xargs')),
4747 ('f', 'fullpath', None,
4747 ('f', 'fullpath', None,
4748 _('print complete paths from the filesystem root')),
4748 _('print complete paths from the filesystem root')),
4749 ] + walkopts,
4749 ] + walkopts,
4750 _('[OPTION]... [PATTERN]...')),
4750 _('[OPTION]... [PATTERN]...')),
4751 "^log|history":
4751 "^log|history":
4752 (log,
4752 (log,
4753 [('f', 'follow', None,
4753 [('f', 'follow', None,
4754 _('follow changeset history,'
4754 _('follow changeset history,'
4755 ' or file history across copies and renames')),
4755 ' or file history across copies and renames')),
4756 ('', 'follow-first', None,
4756 ('', 'follow-first', None,
4757 _('only follow the first parent of merge changesets')),
4757 _('only follow the first parent of merge changesets')),
4758 ('d', 'date', '',
4758 ('d', 'date', '',
4759 _('show revisions matching date spec'), _('DATE')),
4759 _('show revisions matching date spec'), _('DATE')),
4760 ('C', 'copies', None, _('show copied files')),
4760 ('C', 'copies', None, _('show copied files')),
4761 ('k', 'keyword', [],
4761 ('k', 'keyword', [],
4762 _('do case-insensitive search for a given text'), _('TEXT')),
4762 _('do case-insensitive search for a given text'), _('TEXT')),
4763 ('r', 'rev', [],
4763 ('r', 'rev', [],
4764 _('show the specified revision or range'), _('REV')),
4764 _('show the specified revision or range'), _('REV')),
4765 ('', 'removed', None, _('include revisions where files were removed')),
4765 ('', 'removed', None, _('include revisions where files were removed')),
4766 ('m', 'only-merges', None, _('show only merges')),
4766 ('m', 'only-merges', None, _('show only merges')),
4767 ('u', 'user', [],
4767 ('u', 'user', [],
4768 _('revisions committed by user'), _('USER')),
4768 _('revisions committed by user'), _('USER')),
4769 ('', 'only-branch', [],
4769 ('', 'only-branch', [],
4770 _('show only changesets within the given named branch (DEPRECATED)'),
4770 _('show only changesets within the given named branch (DEPRECATED)'),
4771 _('BRANCH')),
4771 _('BRANCH')),
4772 ('b', 'branch', [],
4772 ('b', 'branch', [],
4773 _('show changesets within the given named branch'), _('BRANCH')),
4773 _('show changesets within the given named branch'), _('BRANCH')),
4774 ('P', 'prune', [],
4774 ('P', 'prune', [],
4775 _('do not display revision or any of its ancestors'), _('REV')),
4775 _('do not display revision or any of its ancestors'), _('REV')),
4776 ] + logopts + walkopts,
4776 ] + logopts + walkopts,
4777 _('[OPTION]... [FILE]')),
4777 _('[OPTION]... [FILE]')),
4778 "manifest":
4778 "manifest":
4779 (manifest,
4779 (manifest,
4780 [('r', 'rev', '',
4780 [('r', 'rev', '',
4781 _('revision to display'), _('REV'))],
4781 _('revision to display'), _('REV'))],
4782 _('[-r REV]')),
4782 _('[-r REV]')),
4783 "^merge":
4783 "^merge":
4784 (merge,
4784 (merge,
4785 [('f', 'force', None, _('force a merge with outstanding changes')),
4785 [('f', 'force', None, _('force a merge with outstanding changes')),
4786 ('t', 'tool', '', _('specify merge tool')),
4786 ('t', 'tool', '', _('specify merge tool')),
4787 ('r', 'rev', '',
4787 ('r', 'rev', '',
4788 _('revision to merge'), _('REV')),
4788 _('revision to merge'), _('REV')),
4789 ('P', 'preview', None,
4789 ('P', 'preview', None,
4790 _('review revisions to merge (no merge is performed)'))],
4790 _('review revisions to merge (no merge is performed)'))],
4791 _('[-P] [-f] [[-r] REV]')),
4791 _('[-P] [-f] [[-r] REV]')),
4792 "outgoing|out":
4792 "outgoing|out":
4793 (outgoing,
4793 (outgoing,
4794 [('f', 'force', None,
4794 [('f', 'force', None,
4795 _('run even when the destination is unrelated')),
4795 _('run even when the destination is unrelated')),
4796 ('r', 'rev', [],
4796 ('r', 'rev', [],
4797 _('a changeset intended to be included in the destination'),
4797 _('a changeset intended to be included in the destination'),
4798 _('REV')),
4798 _('REV')),
4799 ('n', 'newest-first', None, _('show newest record first')),
4799 ('n', 'newest-first', None, _('show newest record first')),
4800 ('B', 'bookmarks', False, _("compare bookmarks")),
4800 ('B', 'bookmarks', False, _("compare bookmarks")),
4801 ('b', 'branch', [],
4801 ('b', 'branch', [],
4802 _('a specific branch you would like to push'), _('BRANCH')),
4802 _('a specific branch you would like to push'), _('BRANCH')),
4803 ] + logopts + remoteopts + subrepoopts,
4803 ] + logopts + remoteopts + subrepoopts,
4804 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4804 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4805 "parents":
4805 "parents":
4806 (parents,
4806 (parents,
4807 [('r', 'rev', '',
4807 [('r', 'rev', '',
4808 _('show parents of the specified revision'), _('REV')),
4808 _('show parents of the specified revision'), _('REV')),
4809 ] + templateopts,
4809 ] + templateopts,
4810 _('[-r REV] [FILE]')),
4810 _('[-r REV] [FILE]')),
4811 "paths": (paths, [], _('[NAME]')),
4811 "paths": (paths, [], _('[NAME]')),
4812 "^pull":
4812 "^pull":
4813 (pull,
4813 (pull,
4814 [('u', 'update', None,
4814 [('u', 'update', None,
4815 _('update to new branch head if changesets were pulled')),
4815 _('update to new branch head if changesets were pulled')),
4816 ('f', 'force', None,
4816 ('f', 'force', None,
4817 _('run even when remote repository is unrelated')),
4817 _('run even when remote repository is unrelated')),
4818 ('r', 'rev', [],
4818 ('r', 'rev', [],
4819 _('a remote changeset intended to be added'), _('REV')),
4819 _('a remote changeset intended to be added'), _('REV')),
4820 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4820 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4821 ('b', 'branch', [],
4821 ('b', 'branch', [],
4822 _('a specific branch you would like to pull'), _('BRANCH')),
4822 _('a specific branch you would like to pull'), _('BRANCH')),
4823 ] + remoteopts,
4823 ] + remoteopts,
4824 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4824 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4825 "^push":
4825 "^push":
4826 (push,
4826 (push,
4827 [('f', 'force', None, _('force push')),
4827 [('f', 'force', None, _('force push')),
4828 ('r', 'rev', [],
4828 ('r', 'rev', [],
4829 _('a changeset intended to be included in the destination'),
4829 _('a changeset intended to be included in the destination'),
4830 _('REV')),
4830 _('REV')),
4831 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4831 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4832 ('b', 'branch', [],
4832 ('b', 'branch', [],
4833 _('a specific branch you would like to push'), _('BRANCH')),
4833 _('a specific branch you would like to push'), _('BRANCH')),
4834 ('', 'new-branch', False, _('allow pushing a new branch')),
4834 ('', 'new-branch', False, _('allow pushing a new branch')),
4835 ] + remoteopts,
4835 ] + remoteopts,
4836 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4836 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4837 "recover": (recover, []),
4837 "recover": (recover, []),
4838 "^remove|rm":
4838 "^remove|rm":
4839 (remove,
4839 (remove,
4840 [('A', 'after', None, _('record delete for missing files')),
4840 [('A', 'after', None, _('record delete for missing files')),
4841 ('f', 'force', None,
4841 ('f', 'force', None,
4842 _('remove (and delete) file even if added or modified')),
4842 _('remove (and delete) file even if added or modified')),
4843 ] + walkopts,
4843 ] + walkopts,
4844 _('[OPTION]... FILE...')),
4844 _('[OPTION]... FILE...')),
4845 "rename|move|mv":
4845 "rename|move|mv":
4846 (rename,
4846 (rename,
4847 [('A', 'after', None, _('record a rename that has already occurred')),
4847 [('A', 'after', None, _('record a rename that has already occurred')),
4848 ('f', 'force', None,
4848 ('f', 'force', None,
4849 _('forcibly copy over an existing managed file')),
4849 _('forcibly copy over an existing managed file')),
4850 ] + walkopts + dryrunopts,
4850 ] + walkopts + dryrunopts,
4851 _('[OPTION]... SOURCE... DEST')),
4851 _('[OPTION]... SOURCE... DEST')),
4852 "resolve":
4852 "resolve":
4853 (resolve,
4853 (resolve,
4854 [('a', 'all', None, _('select all unresolved files')),
4854 [('a', 'all', None, _('select all unresolved files')),
4855 ('l', 'list', None, _('list state of files needing merge')),
4855 ('l', 'list', None, _('list state of files needing merge')),
4856 ('m', 'mark', None, _('mark files as resolved')),
4856 ('m', 'mark', None, _('mark files as resolved')),
4857 ('u', 'unmark', None, _('mark files as unresolved')),
4857 ('u', 'unmark', None, _('mark files as unresolved')),
4858 ('t', 'tool', '', _('specify merge tool')),
4858 ('t', 'tool', '', _('specify merge tool')),
4859 ('n', 'no-status', None, _('hide status prefix'))]
4859 ('n', 'no-status', None, _('hide status prefix'))]
4860 + walkopts,
4860 + walkopts,
4861 _('[OPTION]... [FILE]...')),
4861 _('[OPTION]... [FILE]...')),
4862 "revert":
4862 "revert":
4863 (revert,
4863 (revert,
4864 [('a', 'all', None, _('revert all changes when no arguments given')),
4864 [('a', 'all', None, _('revert all changes when no arguments given')),
4865 ('d', 'date', '',
4865 ('d', 'date', '',
4866 _('tipmost revision matching date'), _('DATE')),
4866 _('tipmost revision matching date'), _('DATE')),
4867 ('r', 'rev', '',
4867 ('r', 'rev', '',
4868 _('revert to the specified revision'), _('REV')),
4868 _('revert to the specified revision'), _('REV')),
4869 ('', 'no-backup', None, _('do not save backup copies of files')),
4869 ('', 'no-backup', None, _('do not save backup copies of files')),
4870 ] + walkopts + dryrunopts,
4870 ] + walkopts + dryrunopts,
4871 _('[OPTION]... [-r REV] [NAME]...')),
4871 _('[OPTION]... [-r REV] [NAME]...')),
4872 "rollback": (rollback, dryrunopts),
4872 "rollback": (rollback, dryrunopts),
4873 "root": (root, []),
4873 "root": (root, []),
4874 "^serve":
4874 "^serve":
4875 (serve,
4875 (serve,
4876 [('A', 'accesslog', '',
4876 [('A', 'accesslog', '',
4877 _('name of access log file to write to'), _('FILE')),
4877 _('name of access log file to write to'), _('FILE')),
4878 ('d', 'daemon', None, _('run server in background')),
4878 ('d', 'daemon', None, _('run server in background')),
4879 ('', 'daemon-pipefds', '',
4879 ('', 'daemon-pipefds', '',
4880 _('used internally by daemon mode'), _('NUM')),
4880 _('used internally by daemon mode'), _('NUM')),
4881 ('E', 'errorlog', '',
4881 ('E', 'errorlog', '',
4882 _('name of error log file to write to'), _('FILE')),
4882 _('name of error log file to write to'), _('FILE')),
4883 # use string type, then we can check if something was passed
4883 # use string type, then we can check if something was passed
4884 ('p', 'port', '',
4884 ('p', 'port', '',
4885 _('port to listen on (default: 8000)'), _('PORT')),
4885 _('port to listen on (default: 8000)'), _('PORT')),
4886 ('a', 'address', '',
4886 ('a', 'address', '',
4887 _('address to listen on (default: all interfaces)'), _('ADDR')),
4887 _('address to listen on (default: all interfaces)'), _('ADDR')),
4888 ('', 'prefix', '',
4888 ('', 'prefix', '',
4889 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4889 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4890 ('n', 'name', '',
4890 ('n', 'name', '',
4891 _('name to show in web pages (default: working directory)'),
4891 _('name to show in web pages (default: working directory)'),
4892 _('NAME')),
4892 _('NAME')),
4893 ('', 'web-conf', '',
4893 ('', 'web-conf', '',
4894 _('name of the hgweb config file (see "hg help hgweb")'),
4894 _('name of the hgweb config file (see "hg help hgweb")'),
4895 _('FILE')),
4895 _('FILE')),
4896 ('', 'webdir-conf', '',
4896 ('', 'webdir-conf', '',
4897 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4897 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4898 ('', 'pid-file', '',
4898 ('', 'pid-file', '',
4899 _('name of file to write process ID to'), _('FILE')),
4899 _('name of file to write process ID to'), _('FILE')),
4900 ('', 'stdio', None, _('for remote clients')),
4900 ('', 'stdio', None, _('for remote clients')),
4901 ('t', 'templates', '',
4901 ('t', 'templates', '',
4902 _('web templates to use'), _('TEMPLATE')),
4902 _('web templates to use'), _('TEMPLATE')),
4903 ('', 'style', '',
4903 ('', 'style', '',
4904 _('template style to use'), _('STYLE')),
4904 _('template style to use'), _('STYLE')),
4905 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4905 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4906 ('', 'certificate', '',
4906 ('', 'certificate', '',
4907 _('SSL certificate file'), _('FILE'))],
4907 _('SSL certificate file'), _('FILE'))],
4908 _('[OPTION]...')),
4908 _('[OPTION]...')),
4909 "showconfig|debugconfig":
4909 "showconfig|debugconfig":
4910 (showconfig,
4910 (showconfig,
4911 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4911 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4912 _('[-u] [NAME]...')),
4912 _('[-u] [NAME]...')),
4913 "^summary|sum":
4913 "^summary|sum":
4914 (summary,
4914 (summary,
4915 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4915 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4916 "^status|st":
4916 "^status|st":
4917 (status,
4917 (status,
4918 [('A', 'all', None, _('show status of all files')),
4918 [('A', 'all', None, _('show status of all files')),
4919 ('m', 'modified', None, _('show only modified files')),
4919 ('m', 'modified', None, _('show only modified files')),
4920 ('a', 'added', None, _('show only added files')),
4920 ('a', 'added', None, _('show only added files')),
4921 ('r', 'removed', None, _('show only removed files')),
4921 ('r', 'removed', None, _('show only removed files')),
4922 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4922 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4923 ('c', 'clean', None, _('show only files without changes')),
4923 ('c', 'clean', None, _('show only files without changes')),
4924 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4924 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4925 ('i', 'ignored', None, _('show only ignored files')),
4925 ('i', 'ignored', None, _('show only ignored files')),
4926 ('n', 'no-status', None, _('hide status prefix')),
4926 ('n', 'no-status', None, _('hide status prefix')),
4927 ('C', 'copies', None, _('show source of copied files')),
4927 ('C', 'copies', None, _('show source of copied files')),
4928 ('0', 'print0', None,
4928 ('0', 'print0', None,
4929 _('end filenames with NUL, for use with xargs')),
4929 _('end filenames with NUL, for use with xargs')),
4930 ('', 'rev', [],
4930 ('', 'rev', [],
4931 _('show difference from revision'), _('REV')),
4931 _('show difference from revision'), _('REV')),
4932 ('', 'change', '',
4932 ('', 'change', '',
4933 _('list the changed files of a revision'), _('REV')),
4933 _('list the changed files of a revision'), _('REV')),
4934 ] + walkopts + subrepoopts,
4934 ] + walkopts + subrepoopts,
4935 _('[OPTION]... [FILE]...')),
4935 _('[OPTION]... [FILE]...')),
4936 "tag":
4936 "tag":
4937 (tag,
4937 (tag,
4938 [('f', 'force', None, _('force tag')),
4938 [('f', 'force', None, _('force tag')),
4939 ('l', 'local', None, _('make the tag local')),
4939 ('l', 'local', None, _('make the tag local')),
4940 ('r', 'rev', '',
4940 ('r', 'rev', '',
4941 _('revision to tag'), _('REV')),
4941 _('revision to tag'), _('REV')),
4942 ('', 'remove', None, _('remove a tag')),
4942 ('', 'remove', None, _('remove a tag')),
4943 # -l/--local is already there, commitopts cannot be used
4943 # -l/--local is already there, commitopts cannot be used
4944 ('e', 'edit', None, _('edit commit message')),
4944 ('e', 'edit', None, _('edit commit message')),
4945 ('m', 'message', '',
4945 ('m', 'message', '',
4946 _('use <text> as commit message'), _('TEXT')),
4946 _('use <text> as commit message'), _('TEXT')),
4947 ] + commitopts2,
4947 ] + commitopts2,
4948 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4948 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4949 "tags": (tags, [], ''),
4949 "tags": (tags, [], ''),
4950 "tip":
4950 "tip":
4951 (tip,
4951 (tip,
4952 [('p', 'patch', None, _('show patch')),
4952 [('p', 'patch', None, _('show patch')),
4953 ('g', 'git', None, _('use git extended diff format')),
4953 ('g', 'git', None, _('use git extended diff format')),
4954 ] + templateopts,
4954 ] + templateopts,
4955 _('[-p] [-g]')),
4955 _('[-p] [-g]')),
4956 "unbundle":
4956 "unbundle":
4957 (unbundle,
4957 (unbundle,
4958 [('u', 'update', None,
4958 [('u', 'update', None,
4959 _('update to new branch head if changesets were unbundled'))],
4959 _('update to new branch head if changesets were unbundled'))],
4960 _('[-u] FILE...')),
4960 _('[-u] FILE...')),
4961 "^update|up|checkout|co":
4961 "^update|up|checkout|co":
4962 (update,
4962 (update,
4963 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4963 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4964 ('c', 'check', None,
4964 ('c', 'check', None,
4965 _('update across branches if no uncommitted changes')),
4965 _('update across branches if no uncommitted changes')),
4966 ('d', 'date', '',
4966 ('d', 'date', '',
4967 _('tipmost revision matching date'), _('DATE')),
4967 _('tipmost revision matching date'), _('DATE')),
4968 ('r', 'rev', '',
4968 ('r', 'rev', '',
4969 _('revision'), _('REV'))],
4969 _('revision'), _('REV'))],
4970 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4970 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4971 "verify": (verify, []),
4971 "verify": (verify, []),
4972 "version": (version_, []),
4972 "version": (version_, []),
4973 }
4973 }
4974
4974
4975 norepo = ("clone init version help debugcommands debugcomplete"
4975 norepo = ("clone init version help debugcommands debugcomplete"
4976 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4976 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4977 " debugknown debuggetbundle debugbundle")
4977 " debugknown debuggetbundle debugbundle")
4978 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4978 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4979 " debugdata debugindex debugindexdot")
4979 " debugdata debugindex debugindexdot")
@@ -1,269 +1,269 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import short
8 from node import short
9 from i18n import _
9 from i18n import _
10 import util, simplemerge, match, error
10 import util, simplemerge, match, error
11 import os, tempfile, re, filecmp
11 import os, tempfile, re, filecmp
12
12
13 def _toolstr(ui, tool, part, default=""):
13 def _toolstr(ui, tool, part, default=""):
14 return ui.config("merge-tools", tool + "." + part, default)
14 return ui.config("merge-tools", tool + "." + part, default)
15
15
16 def _toolbool(ui, tool, part, default=False):
16 def _toolbool(ui, tool, part, default=False):
17 return ui.configbool("merge-tools", tool + "." + part, default)
17 return ui.configbool("merge-tools", tool + "." + part, default)
18
18
19 def _toollist(ui, tool, part, default=[]):
19 def _toollist(ui, tool, part, default=[]):
20 return ui.configlist("merge-tools", tool + "." + part, default)
20 return ui.configlist("merge-tools", tool + "." + part, default)
21
21
22 _internal = ['internal:' + s
22 _internal = ['internal:' + s
23 for s in 'fail local other merge prompt dump'.split()]
23 for s in 'fail local other merge prompt dump'.split()]
24
24
25 def _findtool(ui, tool):
25 def _findtool(ui, tool):
26 if tool in _internal:
26 if tool in _internal:
27 return tool
27 return tool
28 for kn in ("regkey", "regkeyalt"):
28 for kn in ("regkey", "regkeyalt"):
29 k = _toolstr(ui, tool, kn)
29 k = _toolstr(ui, tool, kn)
30 if not k:
30 if not k:
31 continue
31 continue
32 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
32 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
33 if p:
33 if p:
34 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
34 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
35 if p:
35 if p:
36 return p
36 return p
37 return util.find_exe(_toolstr(ui, tool, "executable", tool))
37 return util.find_exe(_toolstr(ui, tool, "executable", tool))
38
38
39 def _picktool(repo, ui, path, binary, symlink):
39 def _picktool(repo, ui, path, binary, symlink):
40 def check(tool, pat, symlink, binary):
40 def check(tool, pat, symlink, binary):
41 tmsg = tool
41 tmsg = tool
42 if pat:
42 if pat:
43 tmsg += " specified for " + pat
43 tmsg += " specified for " + pat
44 if not _findtool(ui, tool):
44 if not _findtool(ui, tool):
45 if pat: # explicitly requested tool deserves a warning
45 if pat: # explicitly requested tool deserves a warning
46 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
46 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
47 else: # configured but non-existing tools are more silent
47 else: # configured but non-existing tools are more silent
48 ui.note(_("couldn't find merge tool %s\n") % tmsg)
48 ui.note(_("couldn't find merge tool %s\n") % tmsg)
49 elif symlink and not _toolbool(ui, tool, "symlink"):
49 elif symlink and not _toolbool(ui, tool, "symlink"):
50 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
50 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
51 elif binary and not _toolbool(ui, tool, "binary"):
51 elif binary and not _toolbool(ui, tool, "binary"):
52 ui.warn(_("tool %s can't handle binary\n") % tmsg)
52 ui.warn(_("tool %s can't handle binary\n") % tmsg)
53 elif not util.gui() and _toolbool(ui, tool, "gui"):
53 elif not util.gui() and _toolbool(ui, tool, "gui"):
54 ui.warn(_("tool %s requires a GUI\n") % tmsg)
54 ui.warn(_("tool %s requires a GUI\n") % tmsg)
55 else:
55 else:
56 return True
56 return True
57 return False
57 return False
58
58
59 # forcemerge comes from command line arguments, highest priority
59 # forcemerge comes from command line arguments, highest priority
60 force = ui.config('ui', 'forcemerge')
60 force = ui.config('ui', 'forcemerge')
61 if force:
61 if force:
62 toolpath = _findtool(ui, force)
62 toolpath = _findtool(ui, force)
63 if toolpath:
63 if toolpath:
64 return (force, '"' + toolpath + '"')
64 return (force, '"' + toolpath + '"')
65 else:
65 else:
66 # mimic HGMERGE if given tool not found
66 # mimic HGMERGE if given tool not found
67 return (force, force)
67 return (force, force)
68
68
69 # HGMERGE takes next precedence
69 # HGMERGE takes next precedence
70 hgmerge = os.environ.get("HGMERGE")
70 hgmerge = os.environ.get("HGMERGE")
71 if hgmerge:
71 if hgmerge:
72 return (hgmerge, hgmerge)
72 return (hgmerge, hgmerge)
73
73
74 # then patterns
74 # then patterns
75 for pat, tool in ui.configitems("merge-patterns"):
75 for pat, tool in ui.configitems("merge-patterns"):
76 mf = match.match(repo.root, '', [pat])
76 mf = match.match(repo.root, '', [pat])
77 if mf(path) and check(tool, pat, symlink, False):
77 if mf(path) and check(tool, pat, symlink, False):
78 toolpath = _findtool(ui, tool)
78 toolpath = _findtool(ui, tool)
79 return (tool, '"' + toolpath + '"')
79 return (tool, '"' + toolpath + '"')
80
80
81 # then merge tools
81 # then merge tools
82 tools = {}
82 tools = {}
83 for k, v in ui.configitems("merge-tools"):
83 for k, v in ui.configitems("merge-tools"):
84 t = k.split('.')[0]
84 t = k.split('.')[0]
85 if t not in tools:
85 if t not in tools:
86 tools[t] = int(_toolstr(ui, t, "priority", "0"))
86 tools[t] = int(_toolstr(ui, t, "priority", "0"))
87 names = tools.keys()
87 names = tools.keys()
88 tools = sorted([(-p, t) for t, p in tools.items()])
88 tools = sorted([(-p, t) for t, p in tools.items()])
89 uimerge = ui.config("ui", "merge")
89 uimerge = ui.config("ui", "merge")
90 if uimerge:
90 if uimerge:
91 if uimerge not in names:
91 if uimerge not in names:
92 return (uimerge, uimerge)
92 return (uimerge, uimerge)
93 tools.insert(0, (None, uimerge)) # highest priority
93 tools.insert(0, (None, uimerge)) # highest priority
94 tools.append((None, "hgmerge")) # the old default, if found
94 tools.append((None, "hgmerge")) # the old default, if found
95 for p, t in tools:
95 for p, t in tools:
96 if check(t, None, symlink, binary):
96 if check(t, None, symlink, binary):
97 toolpath = _findtool(ui, t)
97 toolpath = _findtool(ui, t)
98 return (t, '"' + toolpath + '"')
98 return (t, '"' + toolpath + '"')
99 # internal merge as last resort
99 # internal merge as last resort
100 return (not (symlink or binary) and "internal:merge" or None, None)
100 return (not (symlink or binary) and "internal:merge" or None, None)
101
101
102 def _eoltype(data):
102 def _eoltype(data):
103 "Guess the EOL type of a file"
103 "Guess the EOL type of a file"
104 if '\0' in data: # binary
104 if '\0' in data: # binary
105 return None
105 return None
106 if '\r\n' in data: # Windows
106 if '\r\n' in data: # Windows
107 return '\r\n'
107 return '\r\n'
108 if '\r' in data: # Old Mac
108 if '\r' in data: # Old Mac
109 return '\r'
109 return '\r'
110 if '\n' in data: # UNIX
110 if '\n' in data: # UNIX
111 return '\n'
111 return '\n'
112 return None # unknown
112 return None # unknown
113
113
114 def _matcheol(file, origfile):
114 def _matcheol(file, origfile):
115 "Convert EOL markers in a file to match origfile"
115 "Convert EOL markers in a file to match origfile"
116 tostyle = _eoltype(util.readfile(origfile))
116 tostyle = _eoltype(util.readfile(origfile))
117 if tostyle:
117 if tostyle:
118 data = util.readfile(file)
118 data = util.readfile(file)
119 style = _eoltype(data)
119 style = _eoltype(data)
120 if style:
120 if style:
121 newdata = data.replace(style, tostyle)
121 newdata = data.replace(style, tostyle)
122 if newdata != data:
122 if newdata != data:
123 util.writefile(file, newdata)
123 util.writefile(file, newdata)
124
124
125 def filemerge(repo, mynode, orig, fcd, fco, fca):
125 def filemerge(repo, mynode, orig, fcd, fco, fca):
126 """perform a 3-way merge in the working directory
126 """perform a 3-way merge in the working directory
127
127
128 mynode = parent node before merge
128 mynode = parent node before merge
129 orig = original local filename before merge
129 orig = original local filename before merge
130 fco = other file context
130 fco = other file context
131 fca = ancestor file context
131 fca = ancestor file context
132 fcd = local file context for current/destination file
132 fcd = local file context for current/destination file
133 """
133 """
134
134
135 def temp(prefix, ctx):
135 def temp(prefix, ctx):
136 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
136 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
137 (fd, name) = tempfile.mkstemp(prefix=pre)
137 (fd, name) = tempfile.mkstemp(prefix=pre)
138 data = repo.wwritedata(ctx.path(), ctx.data())
138 data = repo.wwritedata(ctx.path(), ctx.data())
139 f = os.fdopen(fd, "wb")
139 f = os.fdopen(fd, "wb")
140 f.write(data)
140 f.write(data)
141 f.close()
141 f.close()
142 return name
142 return name
143
143
144 def isbin(ctx):
144 def isbin(ctx):
145 try:
145 try:
146 return util.binary(ctx.data())
146 return util.binary(ctx.data())
147 except IOError:
147 except IOError:
148 return False
148 return False
149
149
150 if not fco.cmp(fcd): # files identical?
150 if not fco.cmp(fcd): # files identical?
151 return None
151 return None
152
152
153 ui = repo.ui
153 ui = repo.ui
154 fd = fcd.path()
154 fd = fcd.path()
155 binary = isbin(fcd) or isbin(fco) or isbin(fca)
155 binary = isbin(fcd) or isbin(fco) or isbin(fca)
156 symlink = 'l' in fcd.flags() + fco.flags()
156 symlink = 'l' in fcd.flags() + fco.flags()
157 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
157 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
158 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
158 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
159 (tool, fd, binary, symlink))
159 (tool, fd, binary, symlink))
160
160
161 if not tool or tool == 'internal:prompt':
161 if not tool or tool == 'internal:prompt':
162 tool = "internal:local"
162 tool = "internal:local"
163 if ui.promptchoice(_(" no tool found to merge %s\n"
163 if ui.promptchoice(_(" no tool found to merge %s\n"
164 "keep (l)ocal or take (o)ther?") % fd,
164 "keep (l)ocal or take (o)ther?") % fd,
165 (_("&Local"), _("&Other")), 0):
165 (_("&Local"), _("&Other")), 0):
166 tool = "internal:other"
166 tool = "internal:other"
167 if tool == "internal:local":
167 if tool == "internal:local":
168 return 0
168 return 0
169 if tool == "internal:other":
169 if tool == "internal:other":
170 repo.wwrite(fd, fco.data(), fco.flags())
170 repo.wwrite(fd, fco.data(), fco.flags())
171 return 0
171 return 0
172 if tool == "internal:fail":
172 if tool == "internal:fail":
173 return 1
173 return 1
174
174
175 # do the actual merge
175 # do the actual merge
176 a = repo.wjoin(fd)
176 a = repo.wjoin(fd)
177 b = temp("base", fca)
177 b = temp("base", fca)
178 c = temp("other", fco)
178 c = temp("other", fco)
179 out = ""
179 out = ""
180 back = a + ".orig"
180 back = a + ".orig"
181 util.copyfile(a, back)
181 util.copyfile(a, back)
182
182
183 if orig != fco.path():
183 if orig != fco.path():
184 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
184 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
185 else:
185 else:
186 ui.status(_("merging %s\n") % fd)
186 ui.status(_("merging %s\n") % fd)
187
187
188 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
188 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
189
189
190 # do we attempt to simplemerge first?
190 # do we attempt to simplemerge first?
191 try:
191 try:
192 premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
192 premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
193 except error.ConfigError:
193 except error.ConfigError:
194 premerge = _toolstr(ui, tool, "premerge").lower()
194 premerge = _toolstr(ui, tool, "premerge").lower()
195 valid = 'keep'.split()
195 valid = 'keep'.split()
196 if premerge not in valid:
196 if premerge not in valid:
197 _valid = ', '.join(["'" + v + "'" for v in valid])
197 _valid = ', '.join(["'" + v + "'" for v in valid])
198 raise error.ConfigError(_("%s.premerge not valid "
198 raise error.ConfigError(_("%s.premerge not valid "
199 "('%s' is neither boolean nor %s)") %
199 "('%s' is neither boolean nor %s)") %
200 (tool, premerge, _valid))
200 (tool, premerge, _valid))
201
201
202 if premerge:
202 if premerge:
203 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
203 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
204 if not r:
204 if not r:
205 ui.debug(" premerge successful\n")
205 ui.debug(" premerge successful\n")
206 os.unlink(back)
206 os.unlink(back)
207 os.unlink(b)
207 os.unlink(b)
208 os.unlink(c)
208 os.unlink(c)
209 return 0
209 return 0
210 if premerge != 'keep':
210 if premerge != 'keep':
211 util.copyfile(back, a) # restore from backup and try again
211 util.copyfile(back, a) # restore from backup and try again
212
212
213 env = dict(HG_FILE=fd,
213 env = dict(HG_FILE=fd,
214 HG_MY_NODE=short(mynode),
214 HG_MY_NODE=short(mynode),
215 HG_OTHER_NODE=str(fco.changectx()),
215 HG_OTHER_NODE=str(fco.changectx()),
216 HG_BASE_NODE=str(fca.changectx()),
216 HG_BASE_NODE=str(fca.changectx()),
217 HG_MY_ISLINK='l' in fcd.flags(),
217 HG_MY_ISLINK='l' in fcd.flags(),
218 HG_OTHER_ISLINK='l' in fco.flags(),
218 HG_OTHER_ISLINK='l' in fco.flags(),
219 HG_BASE_ISLINK='l' in fca.flags())
219 HG_BASE_ISLINK='l' in fca.flags())
220
220
221 if tool == "internal:merge":
221 if tool == "internal:merge":
222 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
222 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
223 elif tool == 'internal:dump':
223 elif tool == 'internal:dump':
224 a = repo.wjoin(fd)
224 a = repo.wjoin(fd)
225 util.copyfile(a, a + ".local")
225 util.copyfile(a, a + ".local")
226 repo.wwrite(fd + ".other", fco.data(), fco.flags())
226 repo.wwrite(fd + ".other", fco.data(), fco.flags())
227 repo.wwrite(fd + ".base", fca.data(), fca.flags())
227 repo.wwrite(fd + ".base", fca.data(), fca.flags())
228 return 1 # unresolved
228 return 1 # unresolved
229 else:
229 else:
230 args = _toolstr(ui, tool, "args", '$local $base $other')
230 args = _toolstr(ui, tool, "args", '$local $base $other')
231 if "$output" in args:
231 if "$output" in args:
232 out, a = a, back # read input from backup, write to original
232 out, a = a, back # read input from backup, write to original
233 replace = dict(local=a, base=b, other=c, output=out)
233 replace = dict(local=a, base=b, other=c, output=out)
234 args = util.interpolate(r'\$', replace, args,
234 args = util.interpolate(r'\$', replace, args,
235 lambda s: '"%s"' % util.localpath(s))
235 lambda s: '"%s"' % util.localpath(s))
236 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
236 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
237
237
238 if not r and (_toolbool(ui, tool, "checkconflicts") or
238 if not r and (_toolbool(ui, tool, "checkconflicts") or
239 'conflicts' in _toollist(ui, tool, "check")):
239 'conflicts' in _toollist(ui, tool, "check")):
240 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
240 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
241 re.MULTILINE):
241 re.MULTILINE):
242 r = 1
242 r = 1
243
243
244 checked = False
244 checked = False
245 if 'prompt' in _toollist(ui, tool, "check"):
245 if 'prompt' in _toollist(ui, tool, "check"):
246 checked = True
246 checked = True
247 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
247 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
248 (_("&Yes"), _("&No")), 1):
248 (_("&Yes"), _("&No")), 1):
249 r = 1
249 r = 1
250
250
251 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
251 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
252 'changed' in _toollist(ui, tool, "check")):
252 'changed' in _toollist(ui, tool, "check")):
253 if filecmp.cmp(repo.wjoin(fd), back):
253 if filecmp.cmp(repo.wjoin(fd), back):
254 if ui.promptchoice(_(" output file %s appears unchanged\n"
254 if ui.promptchoice(_(" output file %s appears unchanged\n"
255 "was merge successful (yn)?") % fd,
255 "was merge successful (yn)?") % fd,
256 (_("&Yes"), _("&No")), 1):
256 (_("&Yes"), _("&No")), 1):
257 r = 1
257 r = 1
258
258
259 if _toolbool(ui, tool, "fixeol"):
259 if _toolbool(ui, tool, "fixeol"):
260 _matcheol(repo.wjoin(fd), back)
260 _matcheol(repo.wjoin(fd), back)
261
261
262 if r:
262 if r:
263 ui.warn(_("merging %s failed!\n") % fd)
263 ui.warn(_("merging %s failed!\n") % fd)
264 else:
264 else:
265 os.unlink(back)
265 os.unlink(back)
266
266
267 os.unlink(b)
267 os.unlink(b)
268 os.unlink(c)
268 os.unlink(c)
269 return r
269 return r
@@ -1,82 +1,82 b''
1 # hgweb/wsgicgi.py - CGI->WSGI translator
1 # hgweb/wsgicgi.py - CGI->WSGI translator
2 #
2 #
3 # Copyright 2006 Eric Hopper <hopper@omnifarious.org>
3 # Copyright 2006 Eric Hopper <hopper@omnifarious.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # This was originally copied from the public domain code at
8 # This was originally copied from the public domain code at
9 # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side
9 # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side
10
10
11 import os, sys
11 import os, sys
12 from mercurial import util
12 from mercurial import util
13 from mercurial.hgweb import common
13 from mercurial.hgweb import common
14
14
15 def launch(application):
15 def launch(application):
16 util.set_binary(sys.stdin)
16 util.setbinary(sys.stdin)
17 util.set_binary(sys.stdout)
17 util.setbinary(sys.stdout)
18
18
19 environ = dict(os.environ.iteritems())
19 environ = dict(os.environ.iteritems())
20 environ.setdefault('PATH_INFO', '')
20 environ.setdefault('PATH_INFO', '')
21 if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'):
21 if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'):
22 # IIS includes script_name in path_info
22 # IIS includes script_name in path_info
23 scriptname = environ['SCRIPT_NAME']
23 scriptname = environ['SCRIPT_NAME']
24 if environ['PATH_INFO'].startswith(scriptname):
24 if environ['PATH_INFO'].startswith(scriptname):
25 environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):]
25 environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):]
26
26
27 stdin = sys.stdin
27 stdin = sys.stdin
28 if environ.get('HTTP_EXPECT', '').lower() == '100-continue':
28 if environ.get('HTTP_EXPECT', '').lower() == '100-continue':
29 stdin = common.continuereader(stdin, sys.stdout.write)
29 stdin = common.continuereader(stdin, sys.stdout.write)
30
30
31 environ['wsgi.input'] = stdin
31 environ['wsgi.input'] = stdin
32 environ['wsgi.errors'] = sys.stderr
32 environ['wsgi.errors'] = sys.stderr
33 environ['wsgi.version'] = (1, 0)
33 environ['wsgi.version'] = (1, 0)
34 environ['wsgi.multithread'] = False
34 environ['wsgi.multithread'] = False
35 environ['wsgi.multiprocess'] = True
35 environ['wsgi.multiprocess'] = True
36 environ['wsgi.run_once'] = True
36 environ['wsgi.run_once'] = True
37
37
38 if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'):
38 if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'):
39 environ['wsgi.url_scheme'] = 'https'
39 environ['wsgi.url_scheme'] = 'https'
40 else:
40 else:
41 environ['wsgi.url_scheme'] = 'http'
41 environ['wsgi.url_scheme'] = 'http'
42
42
43 headers_set = []
43 headers_set = []
44 headers_sent = []
44 headers_sent = []
45 out = sys.stdout
45 out = sys.stdout
46
46
47 def write(data):
47 def write(data):
48 if not headers_set:
48 if not headers_set:
49 raise AssertionError("write() before start_response()")
49 raise AssertionError("write() before start_response()")
50
50
51 elif not headers_sent:
51 elif not headers_sent:
52 # Before the first output, send the stored headers
52 # Before the first output, send the stored headers
53 status, response_headers = headers_sent[:] = headers_set
53 status, response_headers = headers_sent[:] = headers_set
54 out.write('Status: %s\r\n' % status)
54 out.write('Status: %s\r\n' % status)
55 for header in response_headers:
55 for header in response_headers:
56 out.write('%s: %s\r\n' % header)
56 out.write('%s: %s\r\n' % header)
57 out.write('\r\n')
57 out.write('\r\n')
58
58
59 out.write(data)
59 out.write(data)
60 out.flush()
60 out.flush()
61
61
62 def start_response(status, response_headers, exc_info=None):
62 def start_response(status, response_headers, exc_info=None):
63 if exc_info:
63 if exc_info:
64 try:
64 try:
65 if headers_sent:
65 if headers_sent:
66 # Re-raise original exception if headers sent
66 # Re-raise original exception if headers sent
67 raise exc_info[0](exc_info[1], exc_info[2])
67 raise exc_info[0](exc_info[1], exc_info[2])
68 finally:
68 finally:
69 exc_info = None # avoid dangling circular ref
69 exc_info = None # avoid dangling circular ref
70 elif headers_set:
70 elif headers_set:
71 raise AssertionError("Headers already set!")
71 raise AssertionError("Headers already set!")
72
72
73 headers_set[:] = [status, response_headers]
73 headers_set[:] = [status, response_headers]
74 return write
74 return write
75
75
76 content = application(environ, start_response)
76 content = application(environ, start_response)
77 try:
77 try:
78 for chunk in content:
78 for chunk in content:
79 write(chunk)
79 write(chunk)
80 finally:
80 finally:
81 if hasattr(content, 'close'):
81 if hasattr(content, 'close'):
82 content.close()
82 content.close()
@@ -1,159 +1,159 b''
1 # hook.py - hook support for mercurial
1 # hook.py - hook support for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import os, sys
9 import os, sys
10 import extensions, util
10 import extensions, util
11
11
12 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
12 def _pythonhook(ui, repo, name, hname, funcname, args, throw):
13 '''call python hook. hook is callable object, looked up as
13 '''call python hook. hook is callable object, looked up as
14 name in python module. if callable returns "true", hook
14 name in python module. if callable returns "true", hook
15 fails, else passes. if hook raises exception, treated as
15 fails, else passes. if hook raises exception, treated as
16 hook failure. exception propagates if throw is "true".
16 hook failure. exception propagates if throw is "true".
17
17
18 reason for "true" meaning "hook failed" is so that
18 reason for "true" meaning "hook failed" is so that
19 unmodified commands (e.g. mercurial.commands.update) can
19 unmodified commands (e.g. mercurial.commands.update) can
20 be run as hooks without wrappers to convert return values.'''
20 be run as hooks without wrappers to convert return values.'''
21
21
22 ui.note(_("calling hook %s: %s\n") % (hname, funcname))
22 ui.note(_("calling hook %s: %s\n") % (hname, funcname))
23 obj = funcname
23 obj = funcname
24 if not hasattr(obj, '__call__'):
24 if not hasattr(obj, '__call__'):
25 d = funcname.rfind('.')
25 d = funcname.rfind('.')
26 if d == -1:
26 if d == -1:
27 raise util.Abort(_('%s hook is invalid ("%s" not in '
27 raise util.Abort(_('%s hook is invalid ("%s" not in '
28 'a module)') % (hname, funcname))
28 'a module)') % (hname, funcname))
29 modname = funcname[:d]
29 modname = funcname[:d]
30 oldpaths = sys.path
30 oldpaths = sys.path
31 if hasattr(sys, "frozen"):
31 if hasattr(sys, "frozen"):
32 # binary installs require sys.path manipulation
32 # binary installs require sys.path manipulation
33 modpath, modfile = os.path.split(modname)
33 modpath, modfile = os.path.split(modname)
34 if modpath and modfile:
34 if modpath and modfile:
35 sys.path = sys.path[:] + [modpath]
35 sys.path = sys.path[:] + [modpath]
36 modname = modfile
36 modname = modfile
37 try:
37 try:
38 obj = __import__(modname)
38 obj = __import__(modname)
39 except ImportError:
39 except ImportError:
40 e1 = sys.exc_type, sys.exc_value, sys.exc_traceback
40 e1 = sys.exc_type, sys.exc_value, sys.exc_traceback
41 try:
41 try:
42 # extensions are loaded with hgext_ prefix
42 # extensions are loaded with hgext_ prefix
43 obj = __import__("hgext_%s" % modname)
43 obj = __import__("hgext_%s" % modname)
44 except ImportError:
44 except ImportError:
45 e2 = sys.exc_type, sys.exc_value, sys.exc_traceback
45 e2 = sys.exc_type, sys.exc_value, sys.exc_traceback
46 if ui.tracebackflag:
46 if ui.tracebackflag:
47 ui.warn(_('exception from first failed import attempt:\n'))
47 ui.warn(_('exception from first failed import attempt:\n'))
48 ui.traceback(e1)
48 ui.traceback(e1)
49 if ui.tracebackflag:
49 if ui.tracebackflag:
50 ui.warn(_('exception from second failed import attempt:\n'))
50 ui.warn(_('exception from second failed import attempt:\n'))
51 ui.traceback(e2)
51 ui.traceback(e2)
52 raise util.Abort(_('%s hook is invalid '
52 raise util.Abort(_('%s hook is invalid '
53 '(import of "%s" failed)') %
53 '(import of "%s" failed)') %
54 (hname, modname))
54 (hname, modname))
55 sys.path = oldpaths
55 sys.path = oldpaths
56 try:
56 try:
57 for p in funcname.split('.')[1:]:
57 for p in funcname.split('.')[1:]:
58 obj = getattr(obj, p)
58 obj = getattr(obj, p)
59 except AttributeError:
59 except AttributeError:
60 raise util.Abort(_('%s hook is invalid '
60 raise util.Abort(_('%s hook is invalid '
61 '("%s" is not defined)') %
61 '("%s" is not defined)') %
62 (hname, funcname))
62 (hname, funcname))
63 if not hasattr(obj, '__call__'):
63 if not hasattr(obj, '__call__'):
64 raise util.Abort(_('%s hook is invalid '
64 raise util.Abort(_('%s hook is invalid '
65 '("%s" is not callable)') %
65 '("%s" is not callable)') %
66 (hname, funcname))
66 (hname, funcname))
67 try:
67 try:
68 r = obj(ui=ui, repo=repo, hooktype=name, **args)
68 r = obj(ui=ui, repo=repo, hooktype=name, **args)
69 except KeyboardInterrupt:
69 except KeyboardInterrupt:
70 raise
70 raise
71 except Exception, exc:
71 except Exception, exc:
72 if isinstance(exc, util.Abort):
72 if isinstance(exc, util.Abort):
73 ui.warn(_('error: %s hook failed: %s\n') %
73 ui.warn(_('error: %s hook failed: %s\n') %
74 (hname, exc.args[0]))
74 (hname, exc.args[0]))
75 else:
75 else:
76 ui.warn(_('error: %s hook raised an exception: '
76 ui.warn(_('error: %s hook raised an exception: '
77 '%s\n') % (hname, exc))
77 '%s\n') % (hname, exc))
78 if throw:
78 if throw:
79 raise
79 raise
80 ui.traceback()
80 ui.traceback()
81 return True
81 return True
82 if r:
82 if r:
83 if throw:
83 if throw:
84 raise util.Abort(_('%s hook failed') % hname)
84 raise util.Abort(_('%s hook failed') % hname)
85 ui.warn(_('warning: %s hook failed\n') % hname)
85 ui.warn(_('warning: %s hook failed\n') % hname)
86 return r
86 return r
87
87
88 def _exthook(ui, repo, name, cmd, args, throw):
88 def _exthook(ui, repo, name, cmd, args, throw):
89 ui.note(_("running hook %s: %s\n") % (name, cmd))
89 ui.note(_("running hook %s: %s\n") % (name, cmd))
90
90
91 env = {}
91 env = {}
92 for k, v in args.iteritems():
92 for k, v in args.iteritems():
93 if hasattr(v, '__call__'):
93 if hasattr(v, '__call__'):
94 v = v()
94 v = v()
95 if isinstance(v, dict):
95 if isinstance(v, dict):
96 # make the dictionary element order stable across Python
96 # make the dictionary element order stable across Python
97 # implementations
97 # implementations
98 v = ('{' +
98 v = ('{' +
99 ', '.join('%r: %r' % i for i in sorted(v.iteritems())) +
99 ', '.join('%r: %r' % i for i in sorted(v.iteritems())) +
100 '}')
100 '}')
101 env['HG_' + k.upper()] = v
101 env['HG_' + k.upper()] = v
102
102
103 if repo:
103 if repo:
104 cwd = repo.root
104 cwd = repo.root
105 else:
105 else:
106 cwd = os.getcwd()
106 cwd = os.getcwd()
107 if 'HG_URL' in env and env['HG_URL'].startswith('remote:http'):
107 if 'HG_URL' in env and env['HG_URL'].startswith('remote:http'):
108 r = util.system(cmd, environ=env, cwd=cwd, out=ui)
108 r = util.system(cmd, environ=env, cwd=cwd, out=ui)
109 else:
109 else:
110 r = util.system(cmd, environ=env, cwd=cwd)
110 r = util.system(cmd, environ=env, cwd=cwd)
111 if r:
111 if r:
112 desc, r = util.explain_exit(r)
112 desc, r = util.explainexit(r)
113 if throw:
113 if throw:
114 raise util.Abort(_('%s hook %s') % (name, desc))
114 raise util.Abort(_('%s hook %s') % (name, desc))
115 ui.warn(_('warning: %s hook %s\n') % (name, desc))
115 ui.warn(_('warning: %s hook %s\n') % (name, desc))
116 return r
116 return r
117
117
118 _redirect = False
118 _redirect = False
119 def redirect(state):
119 def redirect(state):
120 global _redirect
120 global _redirect
121 _redirect = state
121 _redirect = state
122
122
123 def hook(ui, repo, name, throw=False, **args):
123 def hook(ui, repo, name, throw=False, **args):
124 r = False
124 r = False
125
125
126 oldstdout = -1
126 oldstdout = -1
127 if _redirect:
127 if _redirect:
128 stdoutno = sys.__stdout__.fileno()
128 stdoutno = sys.__stdout__.fileno()
129 stderrno = sys.__stderr__.fileno()
129 stderrno = sys.__stderr__.fileno()
130 # temporarily redirect stdout to stderr, if possible
130 # temporarily redirect stdout to stderr, if possible
131 if stdoutno >= 0 and stderrno >= 0:
131 if stdoutno >= 0 and stderrno >= 0:
132 oldstdout = os.dup(stdoutno)
132 oldstdout = os.dup(stdoutno)
133 os.dup2(stderrno, stdoutno)
133 os.dup2(stderrno, stdoutno)
134
134
135 try:
135 try:
136 for hname, cmd in ui.configitems('hooks'):
136 for hname, cmd in ui.configitems('hooks'):
137 if hname.split('.')[0] != name or not cmd:
137 if hname.split('.')[0] != name or not cmd:
138 continue
138 continue
139 if hasattr(cmd, '__call__'):
139 if hasattr(cmd, '__call__'):
140 r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
140 r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
141 elif cmd.startswith('python:'):
141 elif cmd.startswith('python:'):
142 if cmd.count(':') >= 2:
142 if cmd.count(':') >= 2:
143 path, cmd = cmd[7:].rsplit(':', 1)
143 path, cmd = cmd[7:].rsplit(':', 1)
144 path = util.expandpath(path)
144 path = util.expandpath(path)
145 if repo:
145 if repo:
146 path = os.path.join(repo.root, path)
146 path = os.path.join(repo.root, path)
147 mod = extensions.loadpath(path, 'hghook.%s' % hname)
147 mod = extensions.loadpath(path, 'hghook.%s' % hname)
148 hookfn = getattr(mod, cmd)
148 hookfn = getattr(mod, cmd)
149 else:
149 else:
150 hookfn = cmd[7:].strip()
150 hookfn = cmd[7:].strip()
151 r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
151 r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r
152 else:
152 else:
153 r = _exthook(ui, repo, hname, cmd, args, throw) or r
153 r = _exthook(ui, repo, hname, cmd, args, throw) or r
154 finally:
154 finally:
155 if _redirect and oldstdout >= 0:
155 if _redirect and oldstdout >= 0:
156 os.dup2(oldstdout, stdoutno)
156 os.dup2(oldstdout, stdoutno)
157 os.close(oldstdout)
157 os.close(oldstdout)
158
158
159 return r
159 return r
@@ -1,1957 +1,1957 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error
13 import scmutil, util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20
20
21 class localrepository(repo.repository):
21 class localrepository(repo.repository):
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
23 'known', 'getbundle'))
23 'known', 'getbundle'))
24 supportedformats = set(('revlogv1',))
24 supportedformats = set(('revlogv1',))
25 supported = supportedformats | set(('store', 'fncache', 'shared',
25 supported = supportedformats | set(('store', 'fncache', 'shared',
26 'dotencode'))
26 'dotencode'))
27
27
28 def __init__(self, baseui, path=None, create=0):
28 def __init__(self, baseui, path=None, create=0):
29 repo.repository.__init__(self)
29 repo.repository.__init__(self)
30 self.root = os.path.realpath(util.expandpath(path))
30 self.root = os.path.realpath(util.expandpath(path))
31 self.path = os.path.join(self.root, ".hg")
31 self.path = os.path.join(self.root, ".hg")
32 self.origroot = path
32 self.origroot = path
33 self.auditor = scmutil.pathauditor(self.root, self._checknested)
33 self.auditor = scmutil.pathauditor(self.root, self._checknested)
34 self.opener = scmutil.opener(self.path)
34 self.opener = scmutil.opener(self.path)
35 self.wopener = scmutil.opener(self.root)
35 self.wopener = scmutil.opener(self.root)
36 self.baseui = baseui
36 self.baseui = baseui
37 self.ui = baseui.copy()
37 self.ui = baseui.copy()
38
38
39 try:
39 try:
40 self.ui.readconfig(self.join("hgrc"), self.root)
40 self.ui.readconfig(self.join("hgrc"), self.root)
41 extensions.loadall(self.ui)
41 extensions.loadall(self.ui)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 if not os.path.isdir(self.path):
45 if not os.path.isdir(self.path):
46 if create:
46 if create:
47 if not os.path.exists(path):
47 if not os.path.exists(path):
48 util.makedirs(path)
48 util.makedirs(path)
49 util.makedir(self.path, notindexed=True)
49 util.makedir(self.path, notindexed=True)
50 requirements = ["revlogv1"]
50 requirements = ["revlogv1"]
51 if self.ui.configbool('format', 'usestore', True):
51 if self.ui.configbool('format', 'usestore', True):
52 os.mkdir(os.path.join(self.path, "store"))
52 os.mkdir(os.path.join(self.path, "store"))
53 requirements.append("store")
53 requirements.append("store")
54 if self.ui.configbool('format', 'usefncache', True):
54 if self.ui.configbool('format', 'usefncache', True):
55 requirements.append("fncache")
55 requirements.append("fncache")
56 if self.ui.configbool('format', 'dotencode', True):
56 if self.ui.configbool('format', 'dotencode', True):
57 requirements.append('dotencode')
57 requirements.append('dotencode')
58 # create an invalid changelog
58 # create an invalid changelog
59 self.opener.append(
59 self.opener.append(
60 "00changelog.i",
60 "00changelog.i",
61 '\0\0\0\2' # represents revlogv2
61 '\0\0\0\2' # represents revlogv2
62 ' dummy changelog to prevent using the old repo layout'
62 ' dummy changelog to prevent using the old repo layout'
63 )
63 )
64 else:
64 else:
65 raise error.RepoError(_("repository %s not found") % path)
65 raise error.RepoError(_("repository %s not found") % path)
66 elif create:
66 elif create:
67 raise error.RepoError(_("repository %s already exists") % path)
67 raise error.RepoError(_("repository %s already exists") % path)
68 else:
68 else:
69 # find requirements
69 # find requirements
70 requirements = set()
70 requirements = set()
71 try:
71 try:
72 requirements = set(self.opener.read("requires").splitlines())
72 requirements = set(self.opener.read("requires").splitlines())
73 except IOError, inst:
73 except IOError, inst:
74 if inst.errno != errno.ENOENT:
74 if inst.errno != errno.ENOENT:
75 raise
75 raise
76 for r in requirements - self.supported:
76 for r in requirements - self.supported:
77 raise error.RequirementError(
77 raise error.RequirementError(
78 _("requirement '%s' not supported") % r)
78 _("requirement '%s' not supported") % r)
79
79
80 self.sharedpath = self.path
80 self.sharedpath = self.path
81 try:
81 try:
82 s = os.path.realpath(self.opener.read("sharedpath"))
82 s = os.path.realpath(self.opener.read("sharedpath"))
83 if not os.path.exists(s):
83 if not os.path.exists(s):
84 raise error.RepoError(
84 raise error.RepoError(
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
85 _('.hg/sharedpath points to nonexistent directory %s') % s)
86 self.sharedpath = s
86 self.sharedpath = s
87 except IOError, inst:
87 except IOError, inst:
88 if inst.errno != errno.ENOENT:
88 if inst.errno != errno.ENOENT:
89 raise
89 raise
90
90
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
92 self.spath = self.store.path
92 self.spath = self.store.path
93 self.sopener = self.store.opener
93 self.sopener = self.store.opener
94 self.sjoin = self.store.join
94 self.sjoin = self.store.join
95 self.opener.createmode = self.store.createmode
95 self.opener.createmode = self.store.createmode
96 self._applyrequirements(requirements)
96 self._applyrequirements(requirements)
97 if create:
97 if create:
98 self._writerequirements()
98 self._writerequirements()
99
99
100 # These two define the set of tags for this repository. _tags
100 # These two define the set of tags for this repository. _tags
101 # maps tag name to node; _tagtypes maps tag name to 'global' or
101 # maps tag name to node; _tagtypes maps tag name to 'global' or
102 # 'local'. (Global tags are defined by .hgtags across all
102 # 'local'. (Global tags are defined by .hgtags across all
103 # heads, and local tags are defined in .hg/localtags.) They
103 # heads, and local tags are defined in .hg/localtags.) They
104 # constitute the in-memory cache of tags.
104 # constitute the in-memory cache of tags.
105 self._tags = None
105 self._tags = None
106 self._tagtypes = None
106 self._tagtypes = None
107
107
108 self._branchcache = None
108 self._branchcache = None
109 self._branchcachetip = None
109 self._branchcachetip = None
110 self.nodetagscache = None
110 self.nodetagscache = None
111 self.filterpats = {}
111 self.filterpats = {}
112 self._datafilters = {}
112 self._datafilters = {}
113 self._transref = self._lockref = self._wlockref = None
113 self._transref = self._lockref = self._wlockref = None
114
114
115 def _applyrequirements(self, requirements):
115 def _applyrequirements(self, requirements):
116 self.requirements = requirements
116 self.requirements = requirements
117 self.sopener.options = {}
117 self.sopener.options = {}
118
118
119 def _writerequirements(self):
119 def _writerequirements(self):
120 reqfile = self.opener("requires", "w")
120 reqfile = self.opener("requires", "w")
121 for r in self.requirements:
121 for r in self.requirements:
122 reqfile.write("%s\n" % r)
122 reqfile.write("%s\n" % r)
123 reqfile.close()
123 reqfile.close()
124
124
125 def _checknested(self, path):
125 def _checknested(self, path):
126 """Determine if path is a legal nested repository."""
126 """Determine if path is a legal nested repository."""
127 if not path.startswith(self.root):
127 if not path.startswith(self.root):
128 return False
128 return False
129 subpath = path[len(self.root) + 1:]
129 subpath = path[len(self.root) + 1:]
130
130
131 # XXX: Checking against the current working copy is wrong in
131 # XXX: Checking against the current working copy is wrong in
132 # the sense that it can reject things like
132 # the sense that it can reject things like
133 #
133 #
134 # $ hg cat -r 10 sub/x.txt
134 # $ hg cat -r 10 sub/x.txt
135 #
135 #
136 # if sub/ is no longer a subrepository in the working copy
136 # if sub/ is no longer a subrepository in the working copy
137 # parent revision.
137 # parent revision.
138 #
138 #
139 # However, it can of course also allow things that would have
139 # However, it can of course also allow things that would have
140 # been rejected before, such as the above cat command if sub/
140 # been rejected before, such as the above cat command if sub/
141 # is a subrepository now, but was a normal directory before.
141 # is a subrepository now, but was a normal directory before.
142 # The old path auditor would have rejected by mistake since it
142 # The old path auditor would have rejected by mistake since it
143 # panics when it sees sub/.hg/.
143 # panics when it sees sub/.hg/.
144 #
144 #
145 # All in all, checking against the working copy seems sensible
145 # All in all, checking against the working copy seems sensible
146 # since we want to prevent access to nested repositories on
146 # since we want to prevent access to nested repositories on
147 # the filesystem *now*.
147 # the filesystem *now*.
148 ctx = self[None]
148 ctx = self[None]
149 parts = util.splitpath(subpath)
149 parts = util.splitpath(subpath)
150 while parts:
150 while parts:
151 prefix = os.sep.join(parts)
151 prefix = os.sep.join(parts)
152 if prefix in ctx.substate:
152 if prefix in ctx.substate:
153 if prefix == subpath:
153 if prefix == subpath:
154 return True
154 return True
155 else:
155 else:
156 sub = ctx.sub(prefix)
156 sub = ctx.sub(prefix)
157 return sub.checknested(subpath[len(prefix) + 1:])
157 return sub.checknested(subpath[len(prefix) + 1:])
158 else:
158 else:
159 parts.pop()
159 parts.pop()
160 return False
160 return False
161
161
162 @util.propertycache
162 @util.propertycache
163 def _bookmarks(self):
163 def _bookmarks(self):
164 return bookmarks.read(self)
164 return bookmarks.read(self)
165
165
166 @util.propertycache
166 @util.propertycache
167 def _bookmarkcurrent(self):
167 def _bookmarkcurrent(self):
168 return bookmarks.readcurrent(self)
168 return bookmarks.readcurrent(self)
169
169
170 @propertycache
170 @propertycache
171 def changelog(self):
171 def changelog(self):
172 c = changelog.changelog(self.sopener)
172 c = changelog.changelog(self.sopener)
173 if 'HG_PENDING' in os.environ:
173 if 'HG_PENDING' in os.environ:
174 p = os.environ['HG_PENDING']
174 p = os.environ['HG_PENDING']
175 if p.startswith(self.root):
175 if p.startswith(self.root):
176 c.readpending('00changelog.i.a')
176 c.readpending('00changelog.i.a')
177 self.sopener.options['defversion'] = c.version
177 self.sopener.options['defversion'] = c.version
178 return c
178 return c
179
179
180 @propertycache
180 @propertycache
181 def manifest(self):
181 def manifest(self):
182 return manifest.manifest(self.sopener)
182 return manifest.manifest(self.sopener)
183
183
184 @propertycache
184 @propertycache
185 def dirstate(self):
185 def dirstate(self):
186 warned = [0]
186 warned = [0]
187 def validate(node):
187 def validate(node):
188 try:
188 try:
189 self.changelog.rev(node)
189 self.changelog.rev(node)
190 return node
190 return node
191 except error.LookupError:
191 except error.LookupError:
192 if not warned[0]:
192 if not warned[0]:
193 warned[0] = True
193 warned[0] = True
194 self.ui.warn(_("warning: ignoring unknown"
194 self.ui.warn(_("warning: ignoring unknown"
195 " working parent %s!\n") % short(node))
195 " working parent %s!\n") % short(node))
196 return nullid
196 return nullid
197
197
198 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
198 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
199
199
200 def __getitem__(self, changeid):
200 def __getitem__(self, changeid):
201 if changeid is None:
201 if changeid is None:
202 return context.workingctx(self)
202 return context.workingctx(self)
203 return context.changectx(self, changeid)
203 return context.changectx(self, changeid)
204
204
205 def __contains__(self, changeid):
205 def __contains__(self, changeid):
206 try:
206 try:
207 return bool(self.lookup(changeid))
207 return bool(self.lookup(changeid))
208 except error.RepoLookupError:
208 except error.RepoLookupError:
209 return False
209 return False
210
210
211 def __nonzero__(self):
211 def __nonzero__(self):
212 return True
212 return True
213
213
214 def __len__(self):
214 def __len__(self):
215 return len(self.changelog)
215 return len(self.changelog)
216
216
217 def __iter__(self):
217 def __iter__(self):
218 for i in xrange(len(self)):
218 for i in xrange(len(self)):
219 yield i
219 yield i
220
220
221 def url(self):
221 def url(self):
222 return 'file:' + self.root
222 return 'file:' + self.root
223
223
224 def hook(self, name, throw=False, **args):
224 def hook(self, name, throw=False, **args):
225 return hook.hook(self.ui, self, name, throw, **args)
225 return hook.hook(self.ui, self, name, throw, **args)
226
226
227 tag_disallowed = ':\r\n'
227 tag_disallowed = ':\r\n'
228
228
229 def _tag(self, names, node, message, local, user, date, extra={}):
229 def _tag(self, names, node, message, local, user, date, extra={}):
230 if isinstance(names, str):
230 if isinstance(names, str):
231 allchars = names
231 allchars = names
232 names = (names,)
232 names = (names,)
233 else:
233 else:
234 allchars = ''.join(names)
234 allchars = ''.join(names)
235 for c in self.tag_disallowed:
235 for c in self.tag_disallowed:
236 if c in allchars:
236 if c in allchars:
237 raise util.Abort(_('%r cannot be used in a tag name') % c)
237 raise util.Abort(_('%r cannot be used in a tag name') % c)
238
238
239 branches = self.branchmap()
239 branches = self.branchmap()
240 for name in names:
240 for name in names:
241 self.hook('pretag', throw=True, node=hex(node), tag=name,
241 self.hook('pretag', throw=True, node=hex(node), tag=name,
242 local=local)
242 local=local)
243 if name in branches:
243 if name in branches:
244 self.ui.warn(_("warning: tag %s conflicts with existing"
244 self.ui.warn(_("warning: tag %s conflicts with existing"
245 " branch name\n") % name)
245 " branch name\n") % name)
246
246
247 def writetags(fp, names, munge, prevtags):
247 def writetags(fp, names, munge, prevtags):
248 fp.seek(0, 2)
248 fp.seek(0, 2)
249 if prevtags and prevtags[-1] != '\n':
249 if prevtags and prevtags[-1] != '\n':
250 fp.write('\n')
250 fp.write('\n')
251 for name in names:
251 for name in names:
252 m = munge and munge(name) or name
252 m = munge and munge(name) or name
253 if self._tagtypes and name in self._tagtypes:
253 if self._tagtypes and name in self._tagtypes:
254 old = self._tags.get(name, nullid)
254 old = self._tags.get(name, nullid)
255 fp.write('%s %s\n' % (hex(old), m))
255 fp.write('%s %s\n' % (hex(old), m))
256 fp.write('%s %s\n' % (hex(node), m))
256 fp.write('%s %s\n' % (hex(node), m))
257 fp.close()
257 fp.close()
258
258
259 prevtags = ''
259 prevtags = ''
260 if local:
260 if local:
261 try:
261 try:
262 fp = self.opener('localtags', 'r+')
262 fp = self.opener('localtags', 'r+')
263 except IOError:
263 except IOError:
264 fp = self.opener('localtags', 'a')
264 fp = self.opener('localtags', 'a')
265 else:
265 else:
266 prevtags = fp.read()
266 prevtags = fp.read()
267
267
268 # local tags are stored in the current charset
268 # local tags are stored in the current charset
269 writetags(fp, names, None, prevtags)
269 writetags(fp, names, None, prevtags)
270 for name in names:
270 for name in names:
271 self.hook('tag', node=hex(node), tag=name, local=local)
271 self.hook('tag', node=hex(node), tag=name, local=local)
272 return
272 return
273
273
274 try:
274 try:
275 fp = self.wfile('.hgtags', 'rb+')
275 fp = self.wfile('.hgtags', 'rb+')
276 except IOError:
276 except IOError:
277 fp = self.wfile('.hgtags', 'ab')
277 fp = self.wfile('.hgtags', 'ab')
278 else:
278 else:
279 prevtags = fp.read()
279 prevtags = fp.read()
280
280
281 # committed tags are stored in UTF-8
281 # committed tags are stored in UTF-8
282 writetags(fp, names, encoding.fromlocal, prevtags)
282 writetags(fp, names, encoding.fromlocal, prevtags)
283
283
284 fp.close()
284 fp.close()
285
285
286 if '.hgtags' not in self.dirstate:
286 if '.hgtags' not in self.dirstate:
287 self[None].add(['.hgtags'])
287 self[None].add(['.hgtags'])
288
288
289 m = matchmod.exact(self.root, '', ['.hgtags'])
289 m = matchmod.exact(self.root, '', ['.hgtags'])
290 tagnode = self.commit(message, user, date, extra=extra, match=m)
290 tagnode = self.commit(message, user, date, extra=extra, match=m)
291
291
292 for name in names:
292 for name in names:
293 self.hook('tag', node=hex(node), tag=name, local=local)
293 self.hook('tag', node=hex(node), tag=name, local=local)
294
294
295 return tagnode
295 return tagnode
296
296
297 def tag(self, names, node, message, local, user, date):
297 def tag(self, names, node, message, local, user, date):
298 '''tag a revision with one or more symbolic names.
298 '''tag a revision with one or more symbolic names.
299
299
300 names is a list of strings or, when adding a single tag, names may be a
300 names is a list of strings or, when adding a single tag, names may be a
301 string.
301 string.
302
302
303 if local is True, the tags are stored in a per-repository file.
303 if local is True, the tags are stored in a per-repository file.
304 otherwise, they are stored in the .hgtags file, and a new
304 otherwise, they are stored in the .hgtags file, and a new
305 changeset is committed with the change.
305 changeset is committed with the change.
306
306
307 keyword arguments:
307 keyword arguments:
308
308
309 local: whether to store tags in non-version-controlled file
309 local: whether to store tags in non-version-controlled file
310 (default False)
310 (default False)
311
311
312 message: commit message to use if committing
312 message: commit message to use if committing
313
313
314 user: name of user to use if committing
314 user: name of user to use if committing
315
315
316 date: date tuple to use if committing'''
316 date: date tuple to use if committing'''
317
317
318 if not local:
318 if not local:
319 for x in self.status()[:5]:
319 for x in self.status()[:5]:
320 if '.hgtags' in x:
320 if '.hgtags' in x:
321 raise util.Abort(_('working copy of .hgtags is changed '
321 raise util.Abort(_('working copy of .hgtags is changed '
322 '(please commit .hgtags manually)'))
322 '(please commit .hgtags manually)'))
323
323
324 self.tags() # instantiate the cache
324 self.tags() # instantiate the cache
325 self._tag(names, node, message, local, user, date)
325 self._tag(names, node, message, local, user, date)
326
326
327 def tags(self):
327 def tags(self):
328 '''return a mapping of tag to node'''
328 '''return a mapping of tag to node'''
329 if self._tags is None:
329 if self._tags is None:
330 (self._tags, self._tagtypes) = self._findtags()
330 (self._tags, self._tagtypes) = self._findtags()
331
331
332 return self._tags
332 return self._tags
333
333
334 def _findtags(self):
334 def _findtags(self):
335 '''Do the hard work of finding tags. Return a pair of dicts
335 '''Do the hard work of finding tags. Return a pair of dicts
336 (tags, tagtypes) where tags maps tag name to node, and tagtypes
336 (tags, tagtypes) where tags maps tag name to node, and tagtypes
337 maps tag name to a string like \'global\' or \'local\'.
337 maps tag name to a string like \'global\' or \'local\'.
338 Subclasses or extensions are free to add their own tags, but
338 Subclasses or extensions are free to add their own tags, but
339 should be aware that the returned dicts will be retained for the
339 should be aware that the returned dicts will be retained for the
340 duration of the localrepo object.'''
340 duration of the localrepo object.'''
341
341
342 # XXX what tagtype should subclasses/extensions use? Currently
342 # XXX what tagtype should subclasses/extensions use? Currently
343 # mq and bookmarks add tags, but do not set the tagtype at all.
343 # mq and bookmarks add tags, but do not set the tagtype at all.
344 # Should each extension invent its own tag type? Should there
344 # Should each extension invent its own tag type? Should there
345 # be one tagtype for all such "virtual" tags? Or is the status
345 # be one tagtype for all such "virtual" tags? Or is the status
346 # quo fine?
346 # quo fine?
347
347
348 alltags = {} # map tag name to (node, hist)
348 alltags = {} # map tag name to (node, hist)
349 tagtypes = {}
349 tagtypes = {}
350
350
351 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
351 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
352 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
352 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
353
353
354 # Build the return dicts. Have to re-encode tag names because
354 # Build the return dicts. Have to re-encode tag names because
355 # the tags module always uses UTF-8 (in order not to lose info
355 # the tags module always uses UTF-8 (in order not to lose info
356 # writing to the cache), but the rest of Mercurial wants them in
356 # writing to the cache), but the rest of Mercurial wants them in
357 # local encoding.
357 # local encoding.
358 tags = {}
358 tags = {}
359 for (name, (node, hist)) in alltags.iteritems():
359 for (name, (node, hist)) in alltags.iteritems():
360 if node != nullid:
360 if node != nullid:
361 try:
361 try:
362 # ignore tags to unknown nodes
362 # ignore tags to unknown nodes
363 self.changelog.lookup(node)
363 self.changelog.lookup(node)
364 tags[encoding.tolocal(name)] = node
364 tags[encoding.tolocal(name)] = node
365 except error.LookupError:
365 except error.LookupError:
366 pass
366 pass
367 tags['tip'] = self.changelog.tip()
367 tags['tip'] = self.changelog.tip()
368 tagtypes = dict([(encoding.tolocal(name), value)
368 tagtypes = dict([(encoding.tolocal(name), value)
369 for (name, value) in tagtypes.iteritems()])
369 for (name, value) in tagtypes.iteritems()])
370 return (tags, tagtypes)
370 return (tags, tagtypes)
371
371
372 def tagtype(self, tagname):
372 def tagtype(self, tagname):
373 '''
373 '''
374 return the type of the given tag. result can be:
374 return the type of the given tag. result can be:
375
375
376 'local' : a local tag
376 'local' : a local tag
377 'global' : a global tag
377 'global' : a global tag
378 None : tag does not exist
378 None : tag does not exist
379 '''
379 '''
380
380
381 self.tags()
381 self.tags()
382
382
383 return self._tagtypes.get(tagname)
383 return self._tagtypes.get(tagname)
384
384
385 def tagslist(self):
385 def tagslist(self):
386 '''return a list of tags ordered by revision'''
386 '''return a list of tags ordered by revision'''
387 l = []
387 l = []
388 for t, n in self.tags().iteritems():
388 for t, n in self.tags().iteritems():
389 r = self.changelog.rev(n)
389 r = self.changelog.rev(n)
390 l.append((r, t, n))
390 l.append((r, t, n))
391 return [(t, n) for r, t, n in sorted(l)]
391 return [(t, n) for r, t, n in sorted(l)]
392
392
393 def nodetags(self, node):
393 def nodetags(self, node):
394 '''return the tags associated with a node'''
394 '''return the tags associated with a node'''
395 if not self.nodetagscache:
395 if not self.nodetagscache:
396 self.nodetagscache = {}
396 self.nodetagscache = {}
397 for t, n in self.tags().iteritems():
397 for t, n in self.tags().iteritems():
398 self.nodetagscache.setdefault(n, []).append(t)
398 self.nodetagscache.setdefault(n, []).append(t)
399 for tags in self.nodetagscache.itervalues():
399 for tags in self.nodetagscache.itervalues():
400 tags.sort()
400 tags.sort()
401 return self.nodetagscache.get(node, [])
401 return self.nodetagscache.get(node, [])
402
402
403 def nodebookmarks(self, node):
403 def nodebookmarks(self, node):
404 marks = []
404 marks = []
405 for bookmark, n in self._bookmarks.iteritems():
405 for bookmark, n in self._bookmarks.iteritems():
406 if n == node:
406 if n == node:
407 marks.append(bookmark)
407 marks.append(bookmark)
408 return sorted(marks)
408 return sorted(marks)
409
409
410 def _branchtags(self, partial, lrev):
410 def _branchtags(self, partial, lrev):
411 # TODO: rename this function?
411 # TODO: rename this function?
412 tiprev = len(self) - 1
412 tiprev = len(self) - 1
413 if lrev != tiprev:
413 if lrev != tiprev:
414 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
414 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
415 self._updatebranchcache(partial, ctxgen)
415 self._updatebranchcache(partial, ctxgen)
416 self._writebranchcache(partial, self.changelog.tip(), tiprev)
416 self._writebranchcache(partial, self.changelog.tip(), tiprev)
417
417
418 return partial
418 return partial
419
419
420 def updatebranchcache(self):
420 def updatebranchcache(self):
421 tip = self.changelog.tip()
421 tip = self.changelog.tip()
422 if self._branchcache is not None and self._branchcachetip == tip:
422 if self._branchcache is not None and self._branchcachetip == tip:
423 return self._branchcache
423 return self._branchcache
424
424
425 oldtip = self._branchcachetip
425 oldtip = self._branchcachetip
426 self._branchcachetip = tip
426 self._branchcachetip = tip
427 if oldtip is None or oldtip not in self.changelog.nodemap:
427 if oldtip is None or oldtip not in self.changelog.nodemap:
428 partial, last, lrev = self._readbranchcache()
428 partial, last, lrev = self._readbranchcache()
429 else:
429 else:
430 lrev = self.changelog.rev(oldtip)
430 lrev = self.changelog.rev(oldtip)
431 partial = self._branchcache
431 partial = self._branchcache
432
432
433 self._branchtags(partial, lrev)
433 self._branchtags(partial, lrev)
434 # this private cache holds all heads (not just tips)
434 # this private cache holds all heads (not just tips)
435 self._branchcache = partial
435 self._branchcache = partial
436
436
437 def branchmap(self):
437 def branchmap(self):
438 '''returns a dictionary {branch: [branchheads]}'''
438 '''returns a dictionary {branch: [branchheads]}'''
439 self.updatebranchcache()
439 self.updatebranchcache()
440 return self._branchcache
440 return self._branchcache
441
441
442 def branchtags(self):
442 def branchtags(self):
443 '''return a dict where branch names map to the tipmost head of
443 '''return a dict where branch names map to the tipmost head of
444 the branch, open heads come before closed'''
444 the branch, open heads come before closed'''
445 bt = {}
445 bt = {}
446 for bn, heads in self.branchmap().iteritems():
446 for bn, heads in self.branchmap().iteritems():
447 tip = heads[-1]
447 tip = heads[-1]
448 for h in reversed(heads):
448 for h in reversed(heads):
449 if 'close' not in self.changelog.read(h)[5]:
449 if 'close' not in self.changelog.read(h)[5]:
450 tip = h
450 tip = h
451 break
451 break
452 bt[bn] = tip
452 bt[bn] = tip
453 return bt
453 return bt
454
454
455 def _readbranchcache(self):
455 def _readbranchcache(self):
456 partial = {}
456 partial = {}
457 try:
457 try:
458 f = self.opener("cache/branchheads")
458 f = self.opener("cache/branchheads")
459 lines = f.read().split('\n')
459 lines = f.read().split('\n')
460 f.close()
460 f.close()
461 except (IOError, OSError):
461 except (IOError, OSError):
462 return {}, nullid, nullrev
462 return {}, nullid, nullrev
463
463
464 try:
464 try:
465 last, lrev = lines.pop(0).split(" ", 1)
465 last, lrev = lines.pop(0).split(" ", 1)
466 last, lrev = bin(last), int(lrev)
466 last, lrev = bin(last), int(lrev)
467 if lrev >= len(self) or self[lrev].node() != last:
467 if lrev >= len(self) or self[lrev].node() != last:
468 # invalidate the cache
468 # invalidate the cache
469 raise ValueError('invalidating branch cache (tip differs)')
469 raise ValueError('invalidating branch cache (tip differs)')
470 for l in lines:
470 for l in lines:
471 if not l:
471 if not l:
472 continue
472 continue
473 node, label = l.split(" ", 1)
473 node, label = l.split(" ", 1)
474 label = encoding.tolocal(label.strip())
474 label = encoding.tolocal(label.strip())
475 partial.setdefault(label, []).append(bin(node))
475 partial.setdefault(label, []).append(bin(node))
476 except KeyboardInterrupt:
476 except KeyboardInterrupt:
477 raise
477 raise
478 except Exception, inst:
478 except Exception, inst:
479 if self.ui.debugflag:
479 if self.ui.debugflag:
480 self.ui.warn(str(inst), '\n')
480 self.ui.warn(str(inst), '\n')
481 partial, last, lrev = {}, nullid, nullrev
481 partial, last, lrev = {}, nullid, nullrev
482 return partial, last, lrev
482 return partial, last, lrev
483
483
484 def _writebranchcache(self, branches, tip, tiprev):
484 def _writebranchcache(self, branches, tip, tiprev):
485 try:
485 try:
486 f = self.opener("cache/branchheads", "w", atomictemp=True)
486 f = self.opener("cache/branchheads", "w", atomictemp=True)
487 f.write("%s %s\n" % (hex(tip), tiprev))
487 f.write("%s %s\n" % (hex(tip), tiprev))
488 for label, nodes in branches.iteritems():
488 for label, nodes in branches.iteritems():
489 for node in nodes:
489 for node in nodes:
490 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
490 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
491 f.rename()
491 f.rename()
492 except (IOError, OSError):
492 except (IOError, OSError):
493 pass
493 pass
494
494
495 def _updatebranchcache(self, partial, ctxgen):
495 def _updatebranchcache(self, partial, ctxgen):
496 # collect new branch entries
496 # collect new branch entries
497 newbranches = {}
497 newbranches = {}
498 for c in ctxgen:
498 for c in ctxgen:
499 newbranches.setdefault(c.branch(), []).append(c.node())
499 newbranches.setdefault(c.branch(), []).append(c.node())
500 # if older branchheads are reachable from new ones, they aren't
500 # if older branchheads are reachable from new ones, they aren't
501 # really branchheads. Note checking parents is insufficient:
501 # really branchheads. Note checking parents is insufficient:
502 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
502 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
503 for branch, newnodes in newbranches.iteritems():
503 for branch, newnodes in newbranches.iteritems():
504 bheads = partial.setdefault(branch, [])
504 bheads = partial.setdefault(branch, [])
505 bheads.extend(newnodes)
505 bheads.extend(newnodes)
506 if len(bheads) <= 1:
506 if len(bheads) <= 1:
507 continue
507 continue
508 bheads = sorted(bheads, key=lambda x: self[x].rev())
508 bheads = sorted(bheads, key=lambda x: self[x].rev())
509 # starting from tip means fewer passes over reachable
509 # starting from tip means fewer passes over reachable
510 while newnodes:
510 while newnodes:
511 latest = newnodes.pop()
511 latest = newnodes.pop()
512 if latest not in bheads:
512 if latest not in bheads:
513 continue
513 continue
514 minbhrev = self[bheads[0]].node()
514 minbhrev = self[bheads[0]].node()
515 reachable = self.changelog.reachable(latest, minbhrev)
515 reachable = self.changelog.reachable(latest, minbhrev)
516 reachable.remove(latest)
516 reachable.remove(latest)
517 if reachable:
517 if reachable:
518 bheads = [b for b in bheads if b not in reachable]
518 bheads = [b for b in bheads if b not in reachable]
519 partial[branch] = bheads
519 partial[branch] = bheads
520
520
521 def lookup(self, key):
521 def lookup(self, key):
522 if isinstance(key, int):
522 if isinstance(key, int):
523 return self.changelog.node(key)
523 return self.changelog.node(key)
524 elif key == '.':
524 elif key == '.':
525 return self.dirstate.p1()
525 return self.dirstate.p1()
526 elif key == 'null':
526 elif key == 'null':
527 return nullid
527 return nullid
528 elif key == 'tip':
528 elif key == 'tip':
529 return self.changelog.tip()
529 return self.changelog.tip()
530 n = self.changelog._match(key)
530 n = self.changelog._match(key)
531 if n:
531 if n:
532 return n
532 return n
533 if key in self._bookmarks:
533 if key in self._bookmarks:
534 return self._bookmarks[key]
534 return self._bookmarks[key]
535 if key in self.tags():
535 if key in self.tags():
536 return self.tags()[key]
536 return self.tags()[key]
537 if key in self.branchtags():
537 if key in self.branchtags():
538 return self.branchtags()[key]
538 return self.branchtags()[key]
539 n = self.changelog._partialmatch(key)
539 n = self.changelog._partialmatch(key)
540 if n:
540 if n:
541 return n
541 return n
542
542
543 # can't find key, check if it might have come from damaged dirstate
543 # can't find key, check if it might have come from damaged dirstate
544 if key in self.dirstate.parents():
544 if key in self.dirstate.parents():
545 raise error.Abort(_("working directory has unknown parent '%s'!")
545 raise error.Abort(_("working directory has unknown parent '%s'!")
546 % short(key))
546 % short(key))
547 try:
547 try:
548 if len(key) == 20:
548 if len(key) == 20:
549 key = hex(key)
549 key = hex(key)
550 except TypeError:
550 except TypeError:
551 pass
551 pass
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
552 raise error.RepoLookupError(_("unknown revision '%s'") % key)
553
553
554 def lookupbranch(self, key, remote=None):
554 def lookupbranch(self, key, remote=None):
555 repo = remote or self
555 repo = remote or self
556 if key in repo.branchmap():
556 if key in repo.branchmap():
557 return key
557 return key
558
558
559 repo = (remote and remote.local()) and remote or self
559 repo = (remote and remote.local()) and remote or self
560 return repo[key].branch()
560 return repo[key].branch()
561
561
562 def known(self, nodes):
562 def known(self, nodes):
563 nm = self.changelog.nodemap
563 nm = self.changelog.nodemap
564 return [(n in nm) for n in nodes]
564 return [(n in nm) for n in nodes]
565
565
566 def local(self):
566 def local(self):
567 return True
567 return True
568
568
569 def join(self, f):
569 def join(self, f):
570 return os.path.join(self.path, f)
570 return os.path.join(self.path, f)
571
571
572 def wjoin(self, f):
572 def wjoin(self, f):
573 return os.path.join(self.root, f)
573 return os.path.join(self.root, f)
574
574
575 def file(self, f):
575 def file(self, f):
576 if f[0] == '/':
576 if f[0] == '/':
577 f = f[1:]
577 f = f[1:]
578 return filelog.filelog(self.sopener, f)
578 return filelog.filelog(self.sopener, f)
579
579
580 def changectx(self, changeid):
580 def changectx(self, changeid):
581 return self[changeid]
581 return self[changeid]
582
582
583 def parents(self, changeid=None):
583 def parents(self, changeid=None):
584 '''get list of changectxs for parents of changeid'''
584 '''get list of changectxs for parents of changeid'''
585 return self[changeid].parents()
585 return self[changeid].parents()
586
586
587 def filectx(self, path, changeid=None, fileid=None):
587 def filectx(self, path, changeid=None, fileid=None):
588 """changeid can be a changeset revision, node, or tag.
588 """changeid can be a changeset revision, node, or tag.
589 fileid can be a file revision or node."""
589 fileid can be a file revision or node."""
590 return context.filectx(self, path, changeid, fileid)
590 return context.filectx(self, path, changeid, fileid)
591
591
592 def getcwd(self):
592 def getcwd(self):
593 return self.dirstate.getcwd()
593 return self.dirstate.getcwd()
594
594
595 def pathto(self, f, cwd=None):
595 def pathto(self, f, cwd=None):
596 return self.dirstate.pathto(f, cwd)
596 return self.dirstate.pathto(f, cwd)
597
597
598 def wfile(self, f, mode='r'):
598 def wfile(self, f, mode='r'):
599 return self.wopener(f, mode)
599 return self.wopener(f, mode)
600
600
601 def _link(self, f):
601 def _link(self, f):
602 return os.path.islink(self.wjoin(f))
602 return os.path.islink(self.wjoin(f))
603
603
604 def _loadfilter(self, filter):
604 def _loadfilter(self, filter):
605 if filter not in self.filterpats:
605 if filter not in self.filterpats:
606 l = []
606 l = []
607 for pat, cmd in self.ui.configitems(filter):
607 for pat, cmd in self.ui.configitems(filter):
608 if cmd == '!':
608 if cmd == '!':
609 continue
609 continue
610 mf = matchmod.match(self.root, '', [pat])
610 mf = matchmod.match(self.root, '', [pat])
611 fn = None
611 fn = None
612 params = cmd
612 params = cmd
613 for name, filterfn in self._datafilters.iteritems():
613 for name, filterfn in self._datafilters.iteritems():
614 if cmd.startswith(name):
614 if cmd.startswith(name):
615 fn = filterfn
615 fn = filterfn
616 params = cmd[len(name):].lstrip()
616 params = cmd[len(name):].lstrip()
617 break
617 break
618 if not fn:
618 if not fn:
619 fn = lambda s, c, **kwargs: util.filter(s, c)
619 fn = lambda s, c, **kwargs: util.filter(s, c)
620 # Wrap old filters not supporting keyword arguments
620 # Wrap old filters not supporting keyword arguments
621 if not inspect.getargspec(fn)[2]:
621 if not inspect.getargspec(fn)[2]:
622 oldfn = fn
622 oldfn = fn
623 fn = lambda s, c, **kwargs: oldfn(s, c)
623 fn = lambda s, c, **kwargs: oldfn(s, c)
624 l.append((mf, fn, params))
624 l.append((mf, fn, params))
625 self.filterpats[filter] = l
625 self.filterpats[filter] = l
626 return self.filterpats[filter]
626 return self.filterpats[filter]
627
627
628 def _filter(self, filterpats, filename, data):
628 def _filter(self, filterpats, filename, data):
629 for mf, fn, cmd in filterpats:
629 for mf, fn, cmd in filterpats:
630 if mf(filename):
630 if mf(filename):
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
631 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
632 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
633 break
633 break
634
634
635 return data
635 return data
636
636
637 @propertycache
637 @propertycache
638 def _encodefilterpats(self):
638 def _encodefilterpats(self):
639 return self._loadfilter('encode')
639 return self._loadfilter('encode')
640
640
641 @propertycache
641 @propertycache
642 def _decodefilterpats(self):
642 def _decodefilterpats(self):
643 return self._loadfilter('decode')
643 return self._loadfilter('decode')
644
644
645 def adddatafilter(self, name, filter):
645 def adddatafilter(self, name, filter):
646 self._datafilters[name] = filter
646 self._datafilters[name] = filter
647
647
648 def wread(self, filename):
648 def wread(self, filename):
649 if self._link(filename):
649 if self._link(filename):
650 data = os.readlink(self.wjoin(filename))
650 data = os.readlink(self.wjoin(filename))
651 else:
651 else:
652 data = self.wopener.read(filename)
652 data = self.wopener.read(filename)
653 return self._filter(self._encodefilterpats, filename, data)
653 return self._filter(self._encodefilterpats, filename, data)
654
654
655 def wwrite(self, filename, data, flags):
655 def wwrite(self, filename, data, flags):
656 data = self._filter(self._decodefilterpats, filename, data)
656 data = self._filter(self._decodefilterpats, filename, data)
657 if 'l' in flags:
657 if 'l' in flags:
658 self.wopener.symlink(data, filename)
658 self.wopener.symlink(data, filename)
659 else:
659 else:
660 self.wopener.write(filename, data)
660 self.wopener.write(filename, data)
661 if 'x' in flags:
661 if 'x' in flags:
662 util.set_flags(self.wjoin(filename), False, True)
662 util.setflags(self.wjoin(filename), False, True)
663
663
664 def wwritedata(self, filename, data):
664 def wwritedata(self, filename, data):
665 return self._filter(self._decodefilterpats, filename, data)
665 return self._filter(self._decodefilterpats, filename, data)
666
666
667 def transaction(self, desc):
667 def transaction(self, desc):
668 tr = self._transref and self._transref() or None
668 tr = self._transref and self._transref() or None
669 if tr and tr.running():
669 if tr and tr.running():
670 return tr.nest()
670 return tr.nest()
671
671
672 # abort here if the journal already exists
672 # abort here if the journal already exists
673 if os.path.exists(self.sjoin("journal")):
673 if os.path.exists(self.sjoin("journal")):
674 raise error.RepoError(
674 raise error.RepoError(
675 _("abandoned transaction found - run hg recover"))
675 _("abandoned transaction found - run hg recover"))
676
676
677 # save dirstate for rollback
677 # save dirstate for rollback
678 try:
678 try:
679 ds = self.opener.read("dirstate")
679 ds = self.opener.read("dirstate")
680 except IOError:
680 except IOError:
681 ds = ""
681 ds = ""
682 self.opener.write("journal.dirstate", ds)
682 self.opener.write("journal.dirstate", ds)
683 self.opener.write("journal.branch",
683 self.opener.write("journal.branch",
684 encoding.fromlocal(self.dirstate.branch()))
684 encoding.fromlocal(self.dirstate.branch()))
685 self.opener.write("journal.desc",
685 self.opener.write("journal.desc",
686 "%d\n%s\n" % (len(self), desc))
686 "%d\n%s\n" % (len(self), desc))
687
687
688 renames = [(self.sjoin("journal"), self.sjoin("undo")),
688 renames = [(self.sjoin("journal"), self.sjoin("undo")),
689 (self.join("journal.dirstate"), self.join("undo.dirstate")),
689 (self.join("journal.dirstate"), self.join("undo.dirstate")),
690 (self.join("journal.branch"), self.join("undo.branch")),
690 (self.join("journal.branch"), self.join("undo.branch")),
691 (self.join("journal.desc"), self.join("undo.desc"))]
691 (self.join("journal.desc"), self.join("undo.desc"))]
692 tr = transaction.transaction(self.ui.warn, self.sopener,
692 tr = transaction.transaction(self.ui.warn, self.sopener,
693 self.sjoin("journal"),
693 self.sjoin("journal"),
694 aftertrans(renames),
694 aftertrans(renames),
695 self.store.createmode)
695 self.store.createmode)
696 self._transref = weakref.ref(tr)
696 self._transref = weakref.ref(tr)
697 return tr
697 return tr
698
698
699 def recover(self):
699 def recover(self):
700 lock = self.lock()
700 lock = self.lock()
701 try:
701 try:
702 if os.path.exists(self.sjoin("journal")):
702 if os.path.exists(self.sjoin("journal")):
703 self.ui.status(_("rolling back interrupted transaction\n"))
703 self.ui.status(_("rolling back interrupted transaction\n"))
704 transaction.rollback(self.sopener, self.sjoin("journal"),
704 transaction.rollback(self.sopener, self.sjoin("journal"),
705 self.ui.warn)
705 self.ui.warn)
706 self.invalidate()
706 self.invalidate()
707 return True
707 return True
708 else:
708 else:
709 self.ui.warn(_("no interrupted transaction available\n"))
709 self.ui.warn(_("no interrupted transaction available\n"))
710 return False
710 return False
711 finally:
711 finally:
712 lock.release()
712 lock.release()
713
713
714 def rollback(self, dryrun=False):
714 def rollback(self, dryrun=False):
715 wlock = lock = None
715 wlock = lock = None
716 try:
716 try:
717 wlock = self.wlock()
717 wlock = self.wlock()
718 lock = self.lock()
718 lock = self.lock()
719 if os.path.exists(self.sjoin("undo")):
719 if os.path.exists(self.sjoin("undo")):
720 try:
720 try:
721 args = self.opener.read("undo.desc").splitlines()
721 args = self.opener.read("undo.desc").splitlines()
722 if len(args) >= 3 and self.ui.verbose:
722 if len(args) >= 3 and self.ui.verbose:
723 desc = _("repository tip rolled back to revision %s"
723 desc = _("repository tip rolled back to revision %s"
724 " (undo %s: %s)\n") % (
724 " (undo %s: %s)\n") % (
725 int(args[0]) - 1, args[1], args[2])
725 int(args[0]) - 1, args[1], args[2])
726 elif len(args) >= 2:
726 elif len(args) >= 2:
727 desc = _("repository tip rolled back to revision %s"
727 desc = _("repository tip rolled back to revision %s"
728 " (undo %s)\n") % (
728 " (undo %s)\n") % (
729 int(args[0]) - 1, args[1])
729 int(args[0]) - 1, args[1])
730 except IOError:
730 except IOError:
731 desc = _("rolling back unknown transaction\n")
731 desc = _("rolling back unknown transaction\n")
732 self.ui.status(desc)
732 self.ui.status(desc)
733 if dryrun:
733 if dryrun:
734 return
734 return
735 transaction.rollback(self.sopener, self.sjoin("undo"),
735 transaction.rollback(self.sopener, self.sjoin("undo"),
736 self.ui.warn)
736 self.ui.warn)
737 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
737 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
738 if os.path.exists(self.join('undo.bookmarks')):
738 if os.path.exists(self.join('undo.bookmarks')):
739 util.rename(self.join('undo.bookmarks'),
739 util.rename(self.join('undo.bookmarks'),
740 self.join('bookmarks'))
740 self.join('bookmarks'))
741 try:
741 try:
742 branch = self.opener.read("undo.branch")
742 branch = self.opener.read("undo.branch")
743 self.dirstate.setbranch(branch)
743 self.dirstate.setbranch(branch)
744 except IOError:
744 except IOError:
745 self.ui.warn(_("named branch could not be reset, "
745 self.ui.warn(_("named branch could not be reset, "
746 "current branch is still: %s\n")
746 "current branch is still: %s\n")
747 % self.dirstate.branch())
747 % self.dirstate.branch())
748 self.invalidate()
748 self.invalidate()
749 self.dirstate.invalidate()
749 self.dirstate.invalidate()
750 self.destroyed()
750 self.destroyed()
751 parents = tuple([p.rev() for p in self.parents()])
751 parents = tuple([p.rev() for p in self.parents()])
752 if len(parents) > 1:
752 if len(parents) > 1:
753 self.ui.status(_("working directory now based on "
753 self.ui.status(_("working directory now based on "
754 "revisions %d and %d\n") % parents)
754 "revisions %d and %d\n") % parents)
755 else:
755 else:
756 self.ui.status(_("working directory now based on "
756 self.ui.status(_("working directory now based on "
757 "revision %d\n") % parents)
757 "revision %d\n") % parents)
758 else:
758 else:
759 self.ui.warn(_("no rollback information available\n"))
759 self.ui.warn(_("no rollback information available\n"))
760 return 1
760 return 1
761 finally:
761 finally:
762 release(lock, wlock)
762 release(lock, wlock)
763
763
764 def invalidatecaches(self):
764 def invalidatecaches(self):
765 self._tags = None
765 self._tags = None
766 self._tagtypes = None
766 self._tagtypes = None
767 self.nodetagscache = None
767 self.nodetagscache = None
768 self._branchcache = None # in UTF-8
768 self._branchcache = None # in UTF-8
769 self._branchcachetip = None
769 self._branchcachetip = None
770
770
771 def invalidate(self):
771 def invalidate(self):
772 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
772 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
773 if a in self.__dict__:
773 if a in self.__dict__:
774 delattr(self, a)
774 delattr(self, a)
775 self.invalidatecaches()
775 self.invalidatecaches()
776
776
777 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
777 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
778 try:
778 try:
779 l = lock.lock(lockname, 0, releasefn, desc=desc)
779 l = lock.lock(lockname, 0, releasefn, desc=desc)
780 except error.LockHeld, inst:
780 except error.LockHeld, inst:
781 if not wait:
781 if not wait:
782 raise
782 raise
783 self.ui.warn(_("waiting for lock on %s held by %r\n") %
783 self.ui.warn(_("waiting for lock on %s held by %r\n") %
784 (desc, inst.locker))
784 (desc, inst.locker))
785 # default to 600 seconds timeout
785 # default to 600 seconds timeout
786 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
786 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
787 releasefn, desc=desc)
787 releasefn, desc=desc)
788 if acquirefn:
788 if acquirefn:
789 acquirefn()
789 acquirefn()
790 return l
790 return l
791
791
792 def lock(self, wait=True):
792 def lock(self, wait=True):
793 '''Lock the repository store (.hg/store) and return a weak reference
793 '''Lock the repository store (.hg/store) and return a weak reference
794 to the lock. Use this before modifying the store (e.g. committing or
794 to the lock. Use this before modifying the store (e.g. committing or
795 stripping). If you are opening a transaction, get a lock as well.)'''
795 stripping). If you are opening a transaction, get a lock as well.)'''
796 l = self._lockref and self._lockref()
796 l = self._lockref and self._lockref()
797 if l is not None and l.held:
797 if l is not None and l.held:
798 l.lock()
798 l.lock()
799 return l
799 return l
800
800
801 l = self._lock(self.sjoin("lock"), wait, self.store.write,
801 l = self._lock(self.sjoin("lock"), wait, self.store.write,
802 self.invalidate, _('repository %s') % self.origroot)
802 self.invalidate, _('repository %s') % self.origroot)
803 self._lockref = weakref.ref(l)
803 self._lockref = weakref.ref(l)
804 return l
804 return l
805
805
806 def wlock(self, wait=True):
806 def wlock(self, wait=True):
807 '''Lock the non-store parts of the repository (everything under
807 '''Lock the non-store parts of the repository (everything under
808 .hg except .hg/store) and return a weak reference to the lock.
808 .hg except .hg/store) and return a weak reference to the lock.
809 Use this before modifying files in .hg.'''
809 Use this before modifying files in .hg.'''
810 l = self._wlockref and self._wlockref()
810 l = self._wlockref and self._wlockref()
811 if l is not None and l.held:
811 if l is not None and l.held:
812 l.lock()
812 l.lock()
813 return l
813 return l
814
814
815 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
815 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
816 self.dirstate.invalidate, _('working directory of %s') %
816 self.dirstate.invalidate, _('working directory of %s') %
817 self.origroot)
817 self.origroot)
818 self._wlockref = weakref.ref(l)
818 self._wlockref = weakref.ref(l)
819 return l
819 return l
820
820
821 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
821 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
822 """
822 """
823 commit an individual file as part of a larger transaction
823 commit an individual file as part of a larger transaction
824 """
824 """
825
825
826 fname = fctx.path()
826 fname = fctx.path()
827 text = fctx.data()
827 text = fctx.data()
828 flog = self.file(fname)
828 flog = self.file(fname)
829 fparent1 = manifest1.get(fname, nullid)
829 fparent1 = manifest1.get(fname, nullid)
830 fparent2 = fparent2o = manifest2.get(fname, nullid)
830 fparent2 = fparent2o = manifest2.get(fname, nullid)
831
831
832 meta = {}
832 meta = {}
833 copy = fctx.renamed()
833 copy = fctx.renamed()
834 if copy and copy[0] != fname:
834 if copy and copy[0] != fname:
835 # Mark the new revision of this file as a copy of another
835 # Mark the new revision of this file as a copy of another
836 # file. This copy data will effectively act as a parent
836 # file. This copy data will effectively act as a parent
837 # of this new revision. If this is a merge, the first
837 # of this new revision. If this is a merge, the first
838 # parent will be the nullid (meaning "look up the copy data")
838 # parent will be the nullid (meaning "look up the copy data")
839 # and the second one will be the other parent. For example:
839 # and the second one will be the other parent. For example:
840 #
840 #
841 # 0 --- 1 --- 3 rev1 changes file foo
841 # 0 --- 1 --- 3 rev1 changes file foo
842 # \ / rev2 renames foo to bar and changes it
842 # \ / rev2 renames foo to bar and changes it
843 # \- 2 -/ rev3 should have bar with all changes and
843 # \- 2 -/ rev3 should have bar with all changes and
844 # should record that bar descends from
844 # should record that bar descends from
845 # bar in rev2 and foo in rev1
845 # bar in rev2 and foo in rev1
846 #
846 #
847 # this allows this merge to succeed:
847 # this allows this merge to succeed:
848 #
848 #
849 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
849 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
850 # \ / merging rev3 and rev4 should use bar@rev2
850 # \ / merging rev3 and rev4 should use bar@rev2
851 # \- 2 --- 4 as the merge base
851 # \- 2 --- 4 as the merge base
852 #
852 #
853
853
854 cfname = copy[0]
854 cfname = copy[0]
855 crev = manifest1.get(cfname)
855 crev = manifest1.get(cfname)
856 newfparent = fparent2
856 newfparent = fparent2
857
857
858 if manifest2: # branch merge
858 if manifest2: # branch merge
859 if fparent2 == nullid or crev is None: # copied on remote side
859 if fparent2 == nullid or crev is None: # copied on remote side
860 if cfname in manifest2:
860 if cfname in manifest2:
861 crev = manifest2[cfname]
861 crev = manifest2[cfname]
862 newfparent = fparent1
862 newfparent = fparent1
863
863
864 # find source in nearest ancestor if we've lost track
864 # find source in nearest ancestor if we've lost track
865 if not crev:
865 if not crev:
866 self.ui.debug(" %s: searching for copy revision for %s\n" %
866 self.ui.debug(" %s: searching for copy revision for %s\n" %
867 (fname, cfname))
867 (fname, cfname))
868 for ancestor in self[None].ancestors():
868 for ancestor in self[None].ancestors():
869 if cfname in ancestor:
869 if cfname in ancestor:
870 crev = ancestor[cfname].filenode()
870 crev = ancestor[cfname].filenode()
871 break
871 break
872
872
873 if crev:
873 if crev:
874 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
874 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
875 meta["copy"] = cfname
875 meta["copy"] = cfname
876 meta["copyrev"] = hex(crev)
876 meta["copyrev"] = hex(crev)
877 fparent1, fparent2 = nullid, newfparent
877 fparent1, fparent2 = nullid, newfparent
878 else:
878 else:
879 self.ui.warn(_("warning: can't find ancestor for '%s' "
879 self.ui.warn(_("warning: can't find ancestor for '%s' "
880 "copied from '%s'!\n") % (fname, cfname))
880 "copied from '%s'!\n") % (fname, cfname))
881
881
882 elif fparent2 != nullid:
882 elif fparent2 != nullid:
883 # is one parent an ancestor of the other?
883 # is one parent an ancestor of the other?
884 fparentancestor = flog.ancestor(fparent1, fparent2)
884 fparentancestor = flog.ancestor(fparent1, fparent2)
885 if fparentancestor == fparent1:
885 if fparentancestor == fparent1:
886 fparent1, fparent2 = fparent2, nullid
886 fparent1, fparent2 = fparent2, nullid
887 elif fparentancestor == fparent2:
887 elif fparentancestor == fparent2:
888 fparent2 = nullid
888 fparent2 = nullid
889
889
890 # is the file changed?
890 # is the file changed?
891 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
891 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
892 changelist.append(fname)
892 changelist.append(fname)
893 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
893 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
894
894
895 # are just the flags changed during merge?
895 # are just the flags changed during merge?
896 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
896 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
897 changelist.append(fname)
897 changelist.append(fname)
898
898
899 return fparent1
899 return fparent1
900
900
901 def commit(self, text="", user=None, date=None, match=None, force=False,
901 def commit(self, text="", user=None, date=None, match=None, force=False,
902 editor=False, extra={}):
902 editor=False, extra={}):
903 """Add a new revision to current repository.
903 """Add a new revision to current repository.
904
904
905 Revision information is gathered from the working directory,
905 Revision information is gathered from the working directory,
906 match can be used to filter the committed files. If editor is
906 match can be used to filter the committed files. If editor is
907 supplied, it is called to get a commit message.
907 supplied, it is called to get a commit message.
908 """
908 """
909
909
910 def fail(f, msg):
910 def fail(f, msg):
911 raise util.Abort('%s: %s' % (f, msg))
911 raise util.Abort('%s: %s' % (f, msg))
912
912
913 if not match:
913 if not match:
914 match = matchmod.always(self.root, '')
914 match = matchmod.always(self.root, '')
915
915
916 if not force:
916 if not force:
917 vdirs = []
917 vdirs = []
918 match.dir = vdirs.append
918 match.dir = vdirs.append
919 match.bad = fail
919 match.bad = fail
920
920
921 wlock = self.wlock()
921 wlock = self.wlock()
922 try:
922 try:
923 wctx = self[None]
923 wctx = self[None]
924 merge = len(wctx.parents()) > 1
924 merge = len(wctx.parents()) > 1
925
925
926 if (not force and merge and match and
926 if (not force and merge and match and
927 (match.files() or match.anypats())):
927 (match.files() or match.anypats())):
928 raise util.Abort(_('cannot partially commit a merge '
928 raise util.Abort(_('cannot partially commit a merge '
929 '(do not specify files or patterns)'))
929 '(do not specify files or patterns)'))
930
930
931 changes = self.status(match=match, clean=force)
931 changes = self.status(match=match, clean=force)
932 if force:
932 if force:
933 changes[0].extend(changes[6]) # mq may commit unchanged files
933 changes[0].extend(changes[6]) # mq may commit unchanged files
934
934
935 # check subrepos
935 # check subrepos
936 subs = []
936 subs = []
937 removedsubs = set()
937 removedsubs = set()
938 for p in wctx.parents():
938 for p in wctx.parents():
939 removedsubs.update(s for s in p.substate if match(s))
939 removedsubs.update(s for s in p.substate if match(s))
940 for s in wctx.substate:
940 for s in wctx.substate:
941 removedsubs.discard(s)
941 removedsubs.discard(s)
942 if match(s) and wctx.sub(s).dirty():
942 if match(s) and wctx.sub(s).dirty():
943 subs.append(s)
943 subs.append(s)
944 if (subs or removedsubs):
944 if (subs or removedsubs):
945 if (not match('.hgsub') and
945 if (not match('.hgsub') and
946 '.hgsub' in (wctx.modified() + wctx.added())):
946 '.hgsub' in (wctx.modified() + wctx.added())):
947 raise util.Abort(_("can't commit subrepos without .hgsub"))
947 raise util.Abort(_("can't commit subrepos without .hgsub"))
948 if '.hgsubstate' not in changes[0]:
948 if '.hgsubstate' not in changes[0]:
949 changes[0].insert(0, '.hgsubstate')
949 changes[0].insert(0, '.hgsubstate')
950
950
951 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
951 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
952 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
952 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
953 if changedsubs:
953 if changedsubs:
954 raise util.Abort(_("uncommitted changes in subrepo %s")
954 raise util.Abort(_("uncommitted changes in subrepo %s")
955 % changedsubs[0])
955 % changedsubs[0])
956
956
957 # make sure all explicit patterns are matched
957 # make sure all explicit patterns are matched
958 if not force and match.files():
958 if not force and match.files():
959 matched = set(changes[0] + changes[1] + changes[2])
959 matched = set(changes[0] + changes[1] + changes[2])
960
960
961 for f in match.files():
961 for f in match.files():
962 if f == '.' or f in matched or f in wctx.substate:
962 if f == '.' or f in matched or f in wctx.substate:
963 continue
963 continue
964 if f in changes[3]: # missing
964 if f in changes[3]: # missing
965 fail(f, _('file not found!'))
965 fail(f, _('file not found!'))
966 if f in vdirs: # visited directory
966 if f in vdirs: # visited directory
967 d = f + '/'
967 d = f + '/'
968 for mf in matched:
968 for mf in matched:
969 if mf.startswith(d):
969 if mf.startswith(d):
970 break
970 break
971 else:
971 else:
972 fail(f, _("no match under directory!"))
972 fail(f, _("no match under directory!"))
973 elif f not in self.dirstate:
973 elif f not in self.dirstate:
974 fail(f, _("file not tracked!"))
974 fail(f, _("file not tracked!"))
975
975
976 if (not force and not extra.get("close") and not merge
976 if (not force and not extra.get("close") and not merge
977 and not (changes[0] or changes[1] or changes[2])
977 and not (changes[0] or changes[1] or changes[2])
978 and wctx.branch() == wctx.p1().branch()):
978 and wctx.branch() == wctx.p1().branch()):
979 return None
979 return None
980
980
981 ms = mergemod.mergestate(self)
981 ms = mergemod.mergestate(self)
982 for f in changes[0]:
982 for f in changes[0]:
983 if f in ms and ms[f] == 'u':
983 if f in ms and ms[f] == 'u':
984 raise util.Abort(_("unresolved merge conflicts "
984 raise util.Abort(_("unresolved merge conflicts "
985 "(see hg help resolve)"))
985 "(see hg help resolve)"))
986
986
987 cctx = context.workingctx(self, text, user, date, extra, changes)
987 cctx = context.workingctx(self, text, user, date, extra, changes)
988 if editor:
988 if editor:
989 cctx._text = editor(self, cctx, subs)
989 cctx._text = editor(self, cctx, subs)
990 edited = (text != cctx._text)
990 edited = (text != cctx._text)
991
991
992 # commit subs
992 # commit subs
993 if subs or removedsubs:
993 if subs or removedsubs:
994 state = wctx.substate.copy()
994 state = wctx.substate.copy()
995 for s in sorted(subs):
995 for s in sorted(subs):
996 sub = wctx.sub(s)
996 sub = wctx.sub(s)
997 self.ui.status(_('committing subrepository %s\n') %
997 self.ui.status(_('committing subrepository %s\n') %
998 subrepo.subrelpath(sub))
998 subrepo.subrelpath(sub))
999 sr = sub.commit(cctx._text, user, date)
999 sr = sub.commit(cctx._text, user, date)
1000 state[s] = (state[s][0], sr)
1000 state[s] = (state[s][0], sr)
1001 subrepo.writestate(self, state)
1001 subrepo.writestate(self, state)
1002
1002
1003 # Save commit message in case this transaction gets rolled back
1003 # Save commit message in case this transaction gets rolled back
1004 # (e.g. by a pretxncommit hook). Leave the content alone on
1004 # (e.g. by a pretxncommit hook). Leave the content alone on
1005 # the assumption that the user will use the same editor again.
1005 # the assumption that the user will use the same editor again.
1006 msgfile = self.opener('last-message.txt', 'wb')
1006 msgfile = self.opener('last-message.txt', 'wb')
1007 msgfile.write(cctx._text)
1007 msgfile.write(cctx._text)
1008 msgfile.close()
1008 msgfile.close()
1009
1009
1010 p1, p2 = self.dirstate.parents()
1010 p1, p2 = self.dirstate.parents()
1011 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1011 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1012 try:
1012 try:
1013 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1013 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1014 ret = self.commitctx(cctx, True)
1014 ret = self.commitctx(cctx, True)
1015 except:
1015 except:
1016 if edited:
1016 if edited:
1017 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1017 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1018 self.ui.write(
1018 self.ui.write(
1019 _('note: commit message saved in %s\n') % msgfn)
1019 _('note: commit message saved in %s\n') % msgfn)
1020 raise
1020 raise
1021
1021
1022 # update bookmarks, dirstate and mergestate
1022 # update bookmarks, dirstate and mergestate
1023 bookmarks.update(self, p1, ret)
1023 bookmarks.update(self, p1, ret)
1024 for f in changes[0] + changes[1]:
1024 for f in changes[0] + changes[1]:
1025 self.dirstate.normal(f)
1025 self.dirstate.normal(f)
1026 for f in changes[2]:
1026 for f in changes[2]:
1027 self.dirstate.forget(f)
1027 self.dirstate.forget(f)
1028 self.dirstate.setparents(ret)
1028 self.dirstate.setparents(ret)
1029 ms.reset()
1029 ms.reset()
1030 finally:
1030 finally:
1031 wlock.release()
1031 wlock.release()
1032
1032
1033 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1033 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1034 return ret
1034 return ret
1035
1035
1036 def commitctx(self, ctx, error=False):
1036 def commitctx(self, ctx, error=False):
1037 """Add a new revision to current repository.
1037 """Add a new revision to current repository.
1038 Revision information is passed via the context argument.
1038 Revision information is passed via the context argument.
1039 """
1039 """
1040
1040
1041 tr = lock = None
1041 tr = lock = None
1042 removed = list(ctx.removed())
1042 removed = list(ctx.removed())
1043 p1, p2 = ctx.p1(), ctx.p2()
1043 p1, p2 = ctx.p1(), ctx.p2()
1044 user = ctx.user()
1044 user = ctx.user()
1045
1045
1046 lock = self.lock()
1046 lock = self.lock()
1047 try:
1047 try:
1048 tr = self.transaction("commit")
1048 tr = self.transaction("commit")
1049 trp = weakref.proxy(tr)
1049 trp = weakref.proxy(tr)
1050
1050
1051 if ctx.files():
1051 if ctx.files():
1052 m1 = p1.manifest().copy()
1052 m1 = p1.manifest().copy()
1053 m2 = p2.manifest()
1053 m2 = p2.manifest()
1054
1054
1055 # check in files
1055 # check in files
1056 new = {}
1056 new = {}
1057 changed = []
1057 changed = []
1058 linkrev = len(self)
1058 linkrev = len(self)
1059 for f in sorted(ctx.modified() + ctx.added()):
1059 for f in sorted(ctx.modified() + ctx.added()):
1060 self.ui.note(f + "\n")
1060 self.ui.note(f + "\n")
1061 try:
1061 try:
1062 fctx = ctx[f]
1062 fctx = ctx[f]
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1064 changed)
1064 changed)
1065 m1.set(f, fctx.flags())
1065 m1.set(f, fctx.flags())
1066 except OSError, inst:
1066 except OSError, inst:
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1068 raise
1068 raise
1069 except IOError, inst:
1069 except IOError, inst:
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1071 if error or errcode and errcode != errno.ENOENT:
1071 if error or errcode and errcode != errno.ENOENT:
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1073 raise
1073 raise
1074 else:
1074 else:
1075 removed.append(f)
1075 removed.append(f)
1076
1076
1077 # update manifest
1077 # update manifest
1078 m1.update(new)
1078 m1.update(new)
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1080 drop = [f for f in removed if f in m1]
1080 drop = [f for f in removed if f in m1]
1081 for f in drop:
1081 for f in drop:
1082 del m1[f]
1082 del m1[f]
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1084 p2.manifestnode(), (new, drop))
1084 p2.manifestnode(), (new, drop))
1085 files = changed + removed
1085 files = changed + removed
1086 else:
1086 else:
1087 mn = p1.manifestnode()
1087 mn = p1.manifestnode()
1088 files = []
1088 files = []
1089
1089
1090 # update changelog
1090 # update changelog
1091 self.changelog.delayupdate()
1091 self.changelog.delayupdate()
1092 n = self.changelog.add(mn, files, ctx.description(),
1092 n = self.changelog.add(mn, files, ctx.description(),
1093 trp, p1.node(), p2.node(),
1093 trp, p1.node(), p2.node(),
1094 user, ctx.date(), ctx.extra().copy())
1094 user, ctx.date(), ctx.extra().copy())
1095 p = lambda: self.changelog.writepending() and self.root or ""
1095 p = lambda: self.changelog.writepending() and self.root or ""
1096 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1096 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1097 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1097 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1098 parent2=xp2, pending=p)
1098 parent2=xp2, pending=p)
1099 self.changelog.finalize(trp)
1099 self.changelog.finalize(trp)
1100 tr.close()
1100 tr.close()
1101
1101
1102 if self._branchcache:
1102 if self._branchcache:
1103 self.updatebranchcache()
1103 self.updatebranchcache()
1104 return n
1104 return n
1105 finally:
1105 finally:
1106 if tr:
1106 if tr:
1107 tr.release()
1107 tr.release()
1108 lock.release()
1108 lock.release()
1109
1109
1110 def destroyed(self):
1110 def destroyed(self):
1111 '''Inform the repository that nodes have been destroyed.
1111 '''Inform the repository that nodes have been destroyed.
1112 Intended for use by strip and rollback, so there's a common
1112 Intended for use by strip and rollback, so there's a common
1113 place for anything that has to be done after destroying history.'''
1113 place for anything that has to be done after destroying history.'''
1114 # XXX it might be nice if we could take the list of destroyed
1114 # XXX it might be nice if we could take the list of destroyed
1115 # nodes, but I don't see an easy way for rollback() to do that
1115 # nodes, but I don't see an easy way for rollback() to do that
1116
1116
1117 # Ensure the persistent tag cache is updated. Doing it now
1117 # Ensure the persistent tag cache is updated. Doing it now
1118 # means that the tag cache only has to worry about destroyed
1118 # means that the tag cache only has to worry about destroyed
1119 # heads immediately after a strip/rollback. That in turn
1119 # heads immediately after a strip/rollback. That in turn
1120 # guarantees that "cachetip == currenttip" (comparing both rev
1120 # guarantees that "cachetip == currenttip" (comparing both rev
1121 # and node) always means no nodes have been added or destroyed.
1121 # and node) always means no nodes have been added or destroyed.
1122
1122
1123 # XXX this is suboptimal when qrefresh'ing: we strip the current
1123 # XXX this is suboptimal when qrefresh'ing: we strip the current
1124 # head, refresh the tag cache, then immediately add a new head.
1124 # head, refresh the tag cache, then immediately add a new head.
1125 # But I think doing it this way is necessary for the "instant
1125 # But I think doing it this way is necessary for the "instant
1126 # tag cache retrieval" case to work.
1126 # tag cache retrieval" case to work.
1127 self.invalidatecaches()
1127 self.invalidatecaches()
1128
1128
1129 def walk(self, match, node=None):
1129 def walk(self, match, node=None):
1130 '''
1130 '''
1131 walk recursively through the directory tree or a given
1131 walk recursively through the directory tree or a given
1132 changeset, finding all files matched by the match
1132 changeset, finding all files matched by the match
1133 function
1133 function
1134 '''
1134 '''
1135 return self[node].walk(match)
1135 return self[node].walk(match)
1136
1136
1137 def status(self, node1='.', node2=None, match=None,
1137 def status(self, node1='.', node2=None, match=None,
1138 ignored=False, clean=False, unknown=False,
1138 ignored=False, clean=False, unknown=False,
1139 listsubrepos=False):
1139 listsubrepos=False):
1140 """return status of files between two nodes or node and working directory
1140 """return status of files between two nodes or node and working directory
1141
1141
1142 If node1 is None, use the first dirstate parent instead.
1142 If node1 is None, use the first dirstate parent instead.
1143 If node2 is None, compare node1 with working directory.
1143 If node2 is None, compare node1 with working directory.
1144 """
1144 """
1145
1145
1146 def mfmatches(ctx):
1146 def mfmatches(ctx):
1147 mf = ctx.manifest().copy()
1147 mf = ctx.manifest().copy()
1148 for fn in mf.keys():
1148 for fn in mf.keys():
1149 if not match(fn):
1149 if not match(fn):
1150 del mf[fn]
1150 del mf[fn]
1151 return mf
1151 return mf
1152
1152
1153 if isinstance(node1, context.changectx):
1153 if isinstance(node1, context.changectx):
1154 ctx1 = node1
1154 ctx1 = node1
1155 else:
1155 else:
1156 ctx1 = self[node1]
1156 ctx1 = self[node1]
1157 if isinstance(node2, context.changectx):
1157 if isinstance(node2, context.changectx):
1158 ctx2 = node2
1158 ctx2 = node2
1159 else:
1159 else:
1160 ctx2 = self[node2]
1160 ctx2 = self[node2]
1161
1161
1162 working = ctx2.rev() is None
1162 working = ctx2.rev() is None
1163 parentworking = working and ctx1 == self['.']
1163 parentworking = working and ctx1 == self['.']
1164 match = match or matchmod.always(self.root, self.getcwd())
1164 match = match or matchmod.always(self.root, self.getcwd())
1165 listignored, listclean, listunknown = ignored, clean, unknown
1165 listignored, listclean, listunknown = ignored, clean, unknown
1166
1166
1167 # load earliest manifest first for caching reasons
1167 # load earliest manifest first for caching reasons
1168 if not working and ctx2.rev() < ctx1.rev():
1168 if not working and ctx2.rev() < ctx1.rev():
1169 ctx2.manifest()
1169 ctx2.manifest()
1170
1170
1171 if not parentworking:
1171 if not parentworking:
1172 def bad(f, msg):
1172 def bad(f, msg):
1173 if f not in ctx1:
1173 if f not in ctx1:
1174 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1174 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1175 match.bad = bad
1175 match.bad = bad
1176
1176
1177 if working: # we need to scan the working dir
1177 if working: # we need to scan the working dir
1178 subrepos = []
1178 subrepos = []
1179 if '.hgsub' in self.dirstate:
1179 if '.hgsub' in self.dirstate:
1180 subrepos = ctx1.substate.keys()
1180 subrepos = ctx1.substate.keys()
1181 s = self.dirstate.status(match, subrepos, listignored,
1181 s = self.dirstate.status(match, subrepos, listignored,
1182 listclean, listunknown)
1182 listclean, listunknown)
1183 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1183 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1184
1184
1185 # check for any possibly clean files
1185 # check for any possibly clean files
1186 if parentworking and cmp:
1186 if parentworking and cmp:
1187 fixup = []
1187 fixup = []
1188 # do a full compare of any files that might have changed
1188 # do a full compare of any files that might have changed
1189 for f in sorted(cmp):
1189 for f in sorted(cmp):
1190 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1190 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1191 or ctx1[f].cmp(ctx2[f])):
1191 or ctx1[f].cmp(ctx2[f])):
1192 modified.append(f)
1192 modified.append(f)
1193 else:
1193 else:
1194 fixup.append(f)
1194 fixup.append(f)
1195
1195
1196 # update dirstate for files that are actually clean
1196 # update dirstate for files that are actually clean
1197 if fixup:
1197 if fixup:
1198 if listclean:
1198 if listclean:
1199 clean += fixup
1199 clean += fixup
1200
1200
1201 try:
1201 try:
1202 # updating the dirstate is optional
1202 # updating the dirstate is optional
1203 # so we don't wait on the lock
1203 # so we don't wait on the lock
1204 wlock = self.wlock(False)
1204 wlock = self.wlock(False)
1205 try:
1205 try:
1206 for f in fixup:
1206 for f in fixup:
1207 self.dirstate.normal(f)
1207 self.dirstate.normal(f)
1208 finally:
1208 finally:
1209 wlock.release()
1209 wlock.release()
1210 except error.LockError:
1210 except error.LockError:
1211 pass
1211 pass
1212
1212
1213 if not parentworking:
1213 if not parentworking:
1214 mf1 = mfmatches(ctx1)
1214 mf1 = mfmatches(ctx1)
1215 if working:
1215 if working:
1216 # we are comparing working dir against non-parent
1216 # we are comparing working dir against non-parent
1217 # generate a pseudo-manifest for the working dir
1217 # generate a pseudo-manifest for the working dir
1218 mf2 = mfmatches(self['.'])
1218 mf2 = mfmatches(self['.'])
1219 for f in cmp + modified + added:
1219 for f in cmp + modified + added:
1220 mf2[f] = None
1220 mf2[f] = None
1221 mf2.set(f, ctx2.flags(f))
1221 mf2.set(f, ctx2.flags(f))
1222 for f in removed:
1222 for f in removed:
1223 if f in mf2:
1223 if f in mf2:
1224 del mf2[f]
1224 del mf2[f]
1225 else:
1225 else:
1226 # we are comparing two revisions
1226 # we are comparing two revisions
1227 deleted, unknown, ignored = [], [], []
1227 deleted, unknown, ignored = [], [], []
1228 mf2 = mfmatches(ctx2)
1228 mf2 = mfmatches(ctx2)
1229
1229
1230 modified, added, clean = [], [], []
1230 modified, added, clean = [], [], []
1231 for fn in mf2:
1231 for fn in mf2:
1232 if fn in mf1:
1232 if fn in mf1:
1233 if (fn not in deleted and
1233 if (fn not in deleted and
1234 (mf1.flags(fn) != mf2.flags(fn) or
1234 (mf1.flags(fn) != mf2.flags(fn) or
1235 (mf1[fn] != mf2[fn] and
1235 (mf1[fn] != mf2[fn] and
1236 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1236 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1237 modified.append(fn)
1237 modified.append(fn)
1238 elif listclean:
1238 elif listclean:
1239 clean.append(fn)
1239 clean.append(fn)
1240 del mf1[fn]
1240 del mf1[fn]
1241 elif fn not in deleted:
1241 elif fn not in deleted:
1242 added.append(fn)
1242 added.append(fn)
1243 removed = mf1.keys()
1243 removed = mf1.keys()
1244
1244
1245 r = modified, added, removed, deleted, unknown, ignored, clean
1245 r = modified, added, removed, deleted, unknown, ignored, clean
1246
1246
1247 if listsubrepos:
1247 if listsubrepos:
1248 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1248 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1249 if working:
1249 if working:
1250 rev2 = None
1250 rev2 = None
1251 else:
1251 else:
1252 rev2 = ctx2.substate[subpath][1]
1252 rev2 = ctx2.substate[subpath][1]
1253 try:
1253 try:
1254 submatch = matchmod.narrowmatcher(subpath, match)
1254 submatch = matchmod.narrowmatcher(subpath, match)
1255 s = sub.status(rev2, match=submatch, ignored=listignored,
1255 s = sub.status(rev2, match=submatch, ignored=listignored,
1256 clean=listclean, unknown=listunknown,
1256 clean=listclean, unknown=listunknown,
1257 listsubrepos=True)
1257 listsubrepos=True)
1258 for rfiles, sfiles in zip(r, s):
1258 for rfiles, sfiles in zip(r, s):
1259 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1259 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1260 except error.LookupError:
1260 except error.LookupError:
1261 self.ui.status(_("skipping missing subrepository: %s\n")
1261 self.ui.status(_("skipping missing subrepository: %s\n")
1262 % subpath)
1262 % subpath)
1263
1263
1264 for l in r:
1264 for l in r:
1265 l.sort()
1265 l.sort()
1266 return r
1266 return r
1267
1267
1268 def heads(self, start=None):
1268 def heads(self, start=None):
1269 heads = self.changelog.heads(start)
1269 heads = self.changelog.heads(start)
1270 # sort the output in rev descending order
1270 # sort the output in rev descending order
1271 return sorted(heads, key=self.changelog.rev, reverse=True)
1271 return sorted(heads, key=self.changelog.rev, reverse=True)
1272
1272
1273 def branchheads(self, branch=None, start=None, closed=False):
1273 def branchheads(self, branch=None, start=None, closed=False):
1274 '''return a (possibly filtered) list of heads for the given branch
1274 '''return a (possibly filtered) list of heads for the given branch
1275
1275
1276 Heads are returned in topological order, from newest to oldest.
1276 Heads are returned in topological order, from newest to oldest.
1277 If branch is None, use the dirstate branch.
1277 If branch is None, use the dirstate branch.
1278 If start is not None, return only heads reachable from start.
1278 If start is not None, return only heads reachable from start.
1279 If closed is True, return heads that are marked as closed as well.
1279 If closed is True, return heads that are marked as closed as well.
1280 '''
1280 '''
1281 if branch is None:
1281 if branch is None:
1282 branch = self[None].branch()
1282 branch = self[None].branch()
1283 branches = self.branchmap()
1283 branches = self.branchmap()
1284 if branch not in branches:
1284 if branch not in branches:
1285 return []
1285 return []
1286 # the cache returns heads ordered lowest to highest
1286 # the cache returns heads ordered lowest to highest
1287 bheads = list(reversed(branches[branch]))
1287 bheads = list(reversed(branches[branch]))
1288 if start is not None:
1288 if start is not None:
1289 # filter out the heads that cannot be reached from startrev
1289 # filter out the heads that cannot be reached from startrev
1290 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1290 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1291 bheads = [h for h in bheads if h in fbheads]
1291 bheads = [h for h in bheads if h in fbheads]
1292 if not closed:
1292 if not closed:
1293 bheads = [h for h in bheads if
1293 bheads = [h for h in bheads if
1294 ('close' not in self.changelog.read(h)[5])]
1294 ('close' not in self.changelog.read(h)[5])]
1295 return bheads
1295 return bheads
1296
1296
1297 def branches(self, nodes):
1297 def branches(self, nodes):
1298 if not nodes:
1298 if not nodes:
1299 nodes = [self.changelog.tip()]
1299 nodes = [self.changelog.tip()]
1300 b = []
1300 b = []
1301 for n in nodes:
1301 for n in nodes:
1302 t = n
1302 t = n
1303 while 1:
1303 while 1:
1304 p = self.changelog.parents(n)
1304 p = self.changelog.parents(n)
1305 if p[1] != nullid or p[0] == nullid:
1305 if p[1] != nullid or p[0] == nullid:
1306 b.append((t, n, p[0], p[1]))
1306 b.append((t, n, p[0], p[1]))
1307 break
1307 break
1308 n = p[0]
1308 n = p[0]
1309 return b
1309 return b
1310
1310
1311 def between(self, pairs):
1311 def between(self, pairs):
1312 r = []
1312 r = []
1313
1313
1314 for top, bottom in pairs:
1314 for top, bottom in pairs:
1315 n, l, i = top, [], 0
1315 n, l, i = top, [], 0
1316 f = 1
1316 f = 1
1317
1317
1318 while n != bottom and n != nullid:
1318 while n != bottom and n != nullid:
1319 p = self.changelog.parents(n)[0]
1319 p = self.changelog.parents(n)[0]
1320 if i == f:
1320 if i == f:
1321 l.append(n)
1321 l.append(n)
1322 f = f * 2
1322 f = f * 2
1323 n = p
1323 n = p
1324 i += 1
1324 i += 1
1325
1325
1326 r.append(l)
1326 r.append(l)
1327
1327
1328 return r
1328 return r
1329
1329
1330 def pull(self, remote, heads=None, force=False):
1330 def pull(self, remote, heads=None, force=False):
1331 lock = self.lock()
1331 lock = self.lock()
1332 try:
1332 try:
1333 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1333 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1334 force=force)
1334 force=force)
1335 common, fetch, rheads = tmp
1335 common, fetch, rheads = tmp
1336 if not fetch:
1336 if not fetch:
1337 self.ui.status(_("no changes found\n"))
1337 self.ui.status(_("no changes found\n"))
1338 result = 0
1338 result = 0
1339 else:
1339 else:
1340 if heads is None and list(common) == [nullid]:
1340 if heads is None and list(common) == [nullid]:
1341 self.ui.status(_("requesting all changes\n"))
1341 self.ui.status(_("requesting all changes\n"))
1342 elif heads is None and remote.capable('changegroupsubset'):
1342 elif heads is None and remote.capable('changegroupsubset'):
1343 # issue1320, avoid a race if remote changed after discovery
1343 # issue1320, avoid a race if remote changed after discovery
1344 heads = rheads
1344 heads = rheads
1345
1345
1346 if remote.capable('getbundle'):
1346 if remote.capable('getbundle'):
1347 cg = remote.getbundle('pull', common=common,
1347 cg = remote.getbundle('pull', common=common,
1348 heads=heads or rheads)
1348 heads=heads or rheads)
1349 elif heads is None:
1349 elif heads is None:
1350 cg = remote.changegroup(fetch, 'pull')
1350 cg = remote.changegroup(fetch, 'pull')
1351 elif not remote.capable('changegroupsubset'):
1351 elif not remote.capable('changegroupsubset'):
1352 raise util.Abort(_("partial pull cannot be done because "
1352 raise util.Abort(_("partial pull cannot be done because "
1353 "other repository doesn't support "
1353 "other repository doesn't support "
1354 "changegroupsubset."))
1354 "changegroupsubset."))
1355 else:
1355 else:
1356 cg = remote.changegroupsubset(fetch, heads, 'pull')
1356 cg = remote.changegroupsubset(fetch, heads, 'pull')
1357 result = self.addchangegroup(cg, 'pull', remote.url(),
1357 result = self.addchangegroup(cg, 'pull', remote.url(),
1358 lock=lock)
1358 lock=lock)
1359 finally:
1359 finally:
1360 lock.release()
1360 lock.release()
1361
1361
1362 return result
1362 return result
1363
1363
1364 def checkpush(self, force, revs):
1364 def checkpush(self, force, revs):
1365 """Extensions can override this function if additional checks have
1365 """Extensions can override this function if additional checks have
1366 to be performed before pushing, or call it if they override push
1366 to be performed before pushing, or call it if they override push
1367 command.
1367 command.
1368 """
1368 """
1369 pass
1369 pass
1370
1370
1371 def push(self, remote, force=False, revs=None, newbranch=False):
1371 def push(self, remote, force=False, revs=None, newbranch=False):
1372 '''Push outgoing changesets (limited by revs) from the current
1372 '''Push outgoing changesets (limited by revs) from the current
1373 repository to remote. Return an integer:
1373 repository to remote. Return an integer:
1374 - 0 means HTTP error *or* nothing to push
1374 - 0 means HTTP error *or* nothing to push
1375 - 1 means we pushed and remote head count is unchanged *or*
1375 - 1 means we pushed and remote head count is unchanged *or*
1376 we have outgoing changesets but refused to push
1376 we have outgoing changesets but refused to push
1377 - other values as described by addchangegroup()
1377 - other values as described by addchangegroup()
1378 '''
1378 '''
1379 # there are two ways to push to remote repo:
1379 # there are two ways to push to remote repo:
1380 #
1380 #
1381 # addchangegroup assumes local user can lock remote
1381 # addchangegroup assumes local user can lock remote
1382 # repo (local filesystem, old ssh servers).
1382 # repo (local filesystem, old ssh servers).
1383 #
1383 #
1384 # unbundle assumes local user cannot lock remote repo (new ssh
1384 # unbundle assumes local user cannot lock remote repo (new ssh
1385 # servers, http servers).
1385 # servers, http servers).
1386
1386
1387 self.checkpush(force, revs)
1387 self.checkpush(force, revs)
1388 lock = None
1388 lock = None
1389 unbundle = remote.capable('unbundle')
1389 unbundle = remote.capable('unbundle')
1390 if not unbundle:
1390 if not unbundle:
1391 lock = remote.lock()
1391 lock = remote.lock()
1392 try:
1392 try:
1393 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1393 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1394 newbranch)
1394 newbranch)
1395 ret = remote_heads
1395 ret = remote_heads
1396 if cg is not None:
1396 if cg is not None:
1397 if unbundle:
1397 if unbundle:
1398 # local repo finds heads on server, finds out what
1398 # local repo finds heads on server, finds out what
1399 # revs it must push. once revs transferred, if server
1399 # revs it must push. once revs transferred, if server
1400 # finds it has different heads (someone else won
1400 # finds it has different heads (someone else won
1401 # commit/push race), server aborts.
1401 # commit/push race), server aborts.
1402 if force:
1402 if force:
1403 remote_heads = ['force']
1403 remote_heads = ['force']
1404 # ssh: return remote's addchangegroup()
1404 # ssh: return remote's addchangegroup()
1405 # http: return remote's addchangegroup() or 0 for error
1405 # http: return remote's addchangegroup() or 0 for error
1406 ret = remote.unbundle(cg, remote_heads, 'push')
1406 ret = remote.unbundle(cg, remote_heads, 'push')
1407 else:
1407 else:
1408 # we return an integer indicating remote head count change
1408 # we return an integer indicating remote head count change
1409 ret = remote.addchangegroup(cg, 'push', self.url(),
1409 ret = remote.addchangegroup(cg, 'push', self.url(),
1410 lock=lock)
1410 lock=lock)
1411 finally:
1411 finally:
1412 if lock is not None:
1412 if lock is not None:
1413 lock.release()
1413 lock.release()
1414
1414
1415 self.ui.debug("checking for updated bookmarks\n")
1415 self.ui.debug("checking for updated bookmarks\n")
1416 rb = remote.listkeys('bookmarks')
1416 rb = remote.listkeys('bookmarks')
1417 for k in rb.keys():
1417 for k in rb.keys():
1418 if k in self._bookmarks:
1418 if k in self._bookmarks:
1419 nr, nl = rb[k], hex(self._bookmarks[k])
1419 nr, nl = rb[k], hex(self._bookmarks[k])
1420 if nr in self:
1420 if nr in self:
1421 cr = self[nr]
1421 cr = self[nr]
1422 cl = self[nl]
1422 cl = self[nl]
1423 if cl in cr.descendants():
1423 if cl in cr.descendants():
1424 r = remote.pushkey('bookmarks', k, nr, nl)
1424 r = remote.pushkey('bookmarks', k, nr, nl)
1425 if r:
1425 if r:
1426 self.ui.status(_("updating bookmark %s\n") % k)
1426 self.ui.status(_("updating bookmark %s\n") % k)
1427 else:
1427 else:
1428 self.ui.warn(_('updating bookmark %s'
1428 self.ui.warn(_('updating bookmark %s'
1429 ' failed!\n') % k)
1429 ' failed!\n') % k)
1430
1430
1431 return ret
1431 return ret
1432
1432
1433 def changegroupinfo(self, nodes, source):
1433 def changegroupinfo(self, nodes, source):
1434 if self.ui.verbose or source == 'bundle':
1434 if self.ui.verbose or source == 'bundle':
1435 self.ui.status(_("%d changesets found\n") % len(nodes))
1435 self.ui.status(_("%d changesets found\n") % len(nodes))
1436 if self.ui.debugflag:
1436 if self.ui.debugflag:
1437 self.ui.debug("list of changesets:\n")
1437 self.ui.debug("list of changesets:\n")
1438 for node in nodes:
1438 for node in nodes:
1439 self.ui.debug("%s\n" % hex(node))
1439 self.ui.debug("%s\n" % hex(node))
1440
1440
1441 def changegroupsubset(self, bases, heads, source):
1441 def changegroupsubset(self, bases, heads, source):
1442 """Compute a changegroup consisting of all the nodes that are
1442 """Compute a changegroup consisting of all the nodes that are
1443 descendents of any of the bases and ancestors of any of the heads.
1443 descendents of any of the bases and ancestors of any of the heads.
1444 Return a chunkbuffer object whose read() method will return
1444 Return a chunkbuffer object whose read() method will return
1445 successive changegroup chunks.
1445 successive changegroup chunks.
1446
1446
1447 It is fairly complex as determining which filenodes and which
1447 It is fairly complex as determining which filenodes and which
1448 manifest nodes need to be included for the changeset to be complete
1448 manifest nodes need to be included for the changeset to be complete
1449 is non-trivial.
1449 is non-trivial.
1450
1450
1451 Another wrinkle is doing the reverse, figuring out which changeset in
1451 Another wrinkle is doing the reverse, figuring out which changeset in
1452 the changegroup a particular filenode or manifestnode belongs to.
1452 the changegroup a particular filenode or manifestnode belongs to.
1453 """
1453 """
1454 cl = self.changelog
1454 cl = self.changelog
1455 if not bases:
1455 if not bases:
1456 bases = [nullid]
1456 bases = [nullid]
1457 csets, bases, heads = cl.nodesbetween(bases, heads)
1457 csets, bases, heads = cl.nodesbetween(bases, heads)
1458 # We assume that all ancestors of bases are known
1458 # We assume that all ancestors of bases are known
1459 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1459 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1460 return self._changegroupsubset(common, csets, heads, source)
1460 return self._changegroupsubset(common, csets, heads, source)
1461
1461
1462 def getbundle(self, source, heads=None, common=None):
1462 def getbundle(self, source, heads=None, common=None):
1463 """Like changegroupsubset, but returns the set difference between the
1463 """Like changegroupsubset, but returns the set difference between the
1464 ancestors of heads and the ancestors common.
1464 ancestors of heads and the ancestors common.
1465
1465
1466 If heads is None, use the local heads. If common is None, use [nullid].
1466 If heads is None, use the local heads. If common is None, use [nullid].
1467
1467
1468 The nodes in common might not all be known locally due to the way the
1468 The nodes in common might not all be known locally due to the way the
1469 current discovery protocol works.
1469 current discovery protocol works.
1470 """
1470 """
1471 cl = self.changelog
1471 cl = self.changelog
1472 if common:
1472 if common:
1473 nm = cl.nodemap
1473 nm = cl.nodemap
1474 common = [n for n in common if n in nm]
1474 common = [n for n in common if n in nm]
1475 else:
1475 else:
1476 common = [nullid]
1476 common = [nullid]
1477 if not heads:
1477 if not heads:
1478 heads = cl.heads()
1478 heads = cl.heads()
1479 common, missing = cl.findcommonmissing(common, heads)
1479 common, missing = cl.findcommonmissing(common, heads)
1480 if not missing:
1480 if not missing:
1481 return None
1481 return None
1482 return self._changegroupsubset(common, missing, heads, source)
1482 return self._changegroupsubset(common, missing, heads, source)
1483
1483
1484 def _changegroupsubset(self, commonrevs, csets, heads, source):
1484 def _changegroupsubset(self, commonrevs, csets, heads, source):
1485
1485
1486 cl = self.changelog
1486 cl = self.changelog
1487 mf = self.manifest
1487 mf = self.manifest
1488 mfs = {} # needed manifests
1488 mfs = {} # needed manifests
1489 fnodes = {} # needed file nodes
1489 fnodes = {} # needed file nodes
1490 changedfiles = set()
1490 changedfiles = set()
1491 fstate = ['', {}]
1491 fstate = ['', {}]
1492 count = [0]
1492 count = [0]
1493
1493
1494 # can we go through the fast path ?
1494 # can we go through the fast path ?
1495 heads.sort()
1495 heads.sort()
1496 if heads == sorted(self.heads()):
1496 if heads == sorted(self.heads()):
1497 return self._changegroup(csets, source)
1497 return self._changegroup(csets, source)
1498
1498
1499 # slow path
1499 # slow path
1500 self.hook('preoutgoing', throw=True, source=source)
1500 self.hook('preoutgoing', throw=True, source=source)
1501 self.changegroupinfo(csets, source)
1501 self.changegroupinfo(csets, source)
1502
1502
1503 # filter any nodes that claim to be part of the known set
1503 # filter any nodes that claim to be part of the known set
1504 def prune(revlog, missing):
1504 def prune(revlog, missing):
1505 for n in missing:
1505 for n in missing:
1506 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1506 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1507 yield n
1507 yield n
1508
1508
1509 def lookup(revlog, x):
1509 def lookup(revlog, x):
1510 if revlog == cl:
1510 if revlog == cl:
1511 c = cl.read(x)
1511 c = cl.read(x)
1512 changedfiles.update(c[3])
1512 changedfiles.update(c[3])
1513 mfs.setdefault(c[0], x)
1513 mfs.setdefault(c[0], x)
1514 count[0] += 1
1514 count[0] += 1
1515 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1515 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1516 return x
1516 return x
1517 elif revlog == mf:
1517 elif revlog == mf:
1518 clnode = mfs[x]
1518 clnode = mfs[x]
1519 mdata = mf.readfast(x)
1519 mdata = mf.readfast(x)
1520 for f in changedfiles:
1520 for f in changedfiles:
1521 if f in mdata:
1521 if f in mdata:
1522 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1522 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1523 count[0] += 1
1523 count[0] += 1
1524 self.ui.progress(_('bundling'), count[0],
1524 self.ui.progress(_('bundling'), count[0],
1525 unit=_('manifests'), total=len(mfs))
1525 unit=_('manifests'), total=len(mfs))
1526 return mfs[x]
1526 return mfs[x]
1527 else:
1527 else:
1528 self.ui.progress(
1528 self.ui.progress(
1529 _('bundling'), count[0], item=fstate[0],
1529 _('bundling'), count[0], item=fstate[0],
1530 unit=_('files'), total=len(changedfiles))
1530 unit=_('files'), total=len(changedfiles))
1531 return fstate[1][x]
1531 return fstate[1][x]
1532
1532
1533 bundler = changegroup.bundle10(lookup)
1533 bundler = changegroup.bundle10(lookup)
1534
1534
1535 def gengroup():
1535 def gengroup():
1536 # Create a changenode group generator that will call our functions
1536 # Create a changenode group generator that will call our functions
1537 # back to lookup the owning changenode and collect information.
1537 # back to lookup the owning changenode and collect information.
1538 for chunk in cl.group(csets, bundler):
1538 for chunk in cl.group(csets, bundler):
1539 yield chunk
1539 yield chunk
1540 self.ui.progress(_('bundling'), None)
1540 self.ui.progress(_('bundling'), None)
1541
1541
1542 # Create a generator for the manifestnodes that calls our lookup
1542 # Create a generator for the manifestnodes that calls our lookup
1543 # and data collection functions back.
1543 # and data collection functions back.
1544 count[0] = 0
1544 count[0] = 0
1545 for chunk in mf.group(prune(mf, mfs), bundler):
1545 for chunk in mf.group(prune(mf, mfs), bundler):
1546 yield chunk
1546 yield chunk
1547 self.ui.progress(_('bundling'), None)
1547 self.ui.progress(_('bundling'), None)
1548
1548
1549 mfs.clear()
1549 mfs.clear()
1550
1550
1551 # Go through all our files in order sorted by name.
1551 # Go through all our files in order sorted by name.
1552 count[0] = 0
1552 count[0] = 0
1553 for fname in sorted(changedfiles):
1553 for fname in sorted(changedfiles):
1554 filerevlog = self.file(fname)
1554 filerevlog = self.file(fname)
1555 if not len(filerevlog):
1555 if not len(filerevlog):
1556 raise util.Abort(_("empty or missing revlog for %s") % fname)
1556 raise util.Abort(_("empty or missing revlog for %s") % fname)
1557 fstate[0] = fname
1557 fstate[0] = fname
1558 fstate[1] = fnodes.pop(fname, {})
1558 fstate[1] = fnodes.pop(fname, {})
1559 first = True
1559 first = True
1560
1560
1561 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1561 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1562 bundler):
1562 bundler):
1563 if first:
1563 if first:
1564 if chunk == bundler.close():
1564 if chunk == bundler.close():
1565 break
1565 break
1566 count[0] += 1
1566 count[0] += 1
1567 yield bundler.fileheader(fname)
1567 yield bundler.fileheader(fname)
1568 first = False
1568 first = False
1569 yield chunk
1569 yield chunk
1570 # Signal that no more groups are left.
1570 # Signal that no more groups are left.
1571 yield bundler.close()
1571 yield bundler.close()
1572 self.ui.progress(_('bundling'), None)
1572 self.ui.progress(_('bundling'), None)
1573
1573
1574 if csets:
1574 if csets:
1575 self.hook('outgoing', node=hex(csets[0]), source=source)
1575 self.hook('outgoing', node=hex(csets[0]), source=source)
1576
1576
1577 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1577 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1578
1578
1579 def changegroup(self, basenodes, source):
1579 def changegroup(self, basenodes, source):
1580 # to avoid a race we use changegroupsubset() (issue1320)
1580 # to avoid a race we use changegroupsubset() (issue1320)
1581 return self.changegroupsubset(basenodes, self.heads(), source)
1581 return self.changegroupsubset(basenodes, self.heads(), source)
1582
1582
1583 def _changegroup(self, nodes, source):
1583 def _changegroup(self, nodes, source):
1584 """Compute the changegroup of all nodes that we have that a recipient
1584 """Compute the changegroup of all nodes that we have that a recipient
1585 doesn't. Return a chunkbuffer object whose read() method will return
1585 doesn't. Return a chunkbuffer object whose read() method will return
1586 successive changegroup chunks.
1586 successive changegroup chunks.
1587
1587
1588 This is much easier than the previous function as we can assume that
1588 This is much easier than the previous function as we can assume that
1589 the recipient has any changenode we aren't sending them.
1589 the recipient has any changenode we aren't sending them.
1590
1590
1591 nodes is the set of nodes to send"""
1591 nodes is the set of nodes to send"""
1592
1592
1593 cl = self.changelog
1593 cl = self.changelog
1594 mf = self.manifest
1594 mf = self.manifest
1595 mfs = {}
1595 mfs = {}
1596 changedfiles = set()
1596 changedfiles = set()
1597 fstate = ['']
1597 fstate = ['']
1598 count = [0]
1598 count = [0]
1599
1599
1600 self.hook('preoutgoing', throw=True, source=source)
1600 self.hook('preoutgoing', throw=True, source=source)
1601 self.changegroupinfo(nodes, source)
1601 self.changegroupinfo(nodes, source)
1602
1602
1603 revset = set([cl.rev(n) for n in nodes])
1603 revset = set([cl.rev(n) for n in nodes])
1604
1604
1605 def gennodelst(log):
1605 def gennodelst(log):
1606 for r in log:
1606 for r in log:
1607 if log.linkrev(r) in revset:
1607 if log.linkrev(r) in revset:
1608 yield log.node(r)
1608 yield log.node(r)
1609
1609
1610 def lookup(revlog, x):
1610 def lookup(revlog, x):
1611 if revlog == cl:
1611 if revlog == cl:
1612 c = cl.read(x)
1612 c = cl.read(x)
1613 changedfiles.update(c[3])
1613 changedfiles.update(c[3])
1614 mfs.setdefault(c[0], x)
1614 mfs.setdefault(c[0], x)
1615 count[0] += 1
1615 count[0] += 1
1616 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1616 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1617 return x
1617 return x
1618 elif revlog == mf:
1618 elif revlog == mf:
1619 count[0] += 1
1619 count[0] += 1
1620 self.ui.progress(_('bundling'), count[0],
1620 self.ui.progress(_('bundling'), count[0],
1621 unit=_('manifests'), total=len(mfs))
1621 unit=_('manifests'), total=len(mfs))
1622 return cl.node(revlog.linkrev(revlog.rev(x)))
1622 return cl.node(revlog.linkrev(revlog.rev(x)))
1623 else:
1623 else:
1624 self.ui.progress(
1624 self.ui.progress(
1625 _('bundling'), count[0], item=fstate[0],
1625 _('bundling'), count[0], item=fstate[0],
1626 total=len(changedfiles), unit=_('files'))
1626 total=len(changedfiles), unit=_('files'))
1627 return cl.node(revlog.linkrev(revlog.rev(x)))
1627 return cl.node(revlog.linkrev(revlog.rev(x)))
1628
1628
1629 bundler = changegroup.bundle10(lookup)
1629 bundler = changegroup.bundle10(lookup)
1630
1630
1631 def gengroup():
1631 def gengroup():
1632 '''yield a sequence of changegroup chunks (strings)'''
1632 '''yield a sequence of changegroup chunks (strings)'''
1633 # construct a list of all changed files
1633 # construct a list of all changed files
1634
1634
1635 for chunk in cl.group(nodes, bundler):
1635 for chunk in cl.group(nodes, bundler):
1636 yield chunk
1636 yield chunk
1637 self.ui.progress(_('bundling'), None)
1637 self.ui.progress(_('bundling'), None)
1638
1638
1639 count[0] = 0
1639 count[0] = 0
1640 for chunk in mf.group(gennodelst(mf), bundler):
1640 for chunk in mf.group(gennodelst(mf), bundler):
1641 yield chunk
1641 yield chunk
1642 self.ui.progress(_('bundling'), None)
1642 self.ui.progress(_('bundling'), None)
1643
1643
1644 count[0] = 0
1644 count[0] = 0
1645 for fname in sorted(changedfiles):
1645 for fname in sorted(changedfiles):
1646 filerevlog = self.file(fname)
1646 filerevlog = self.file(fname)
1647 if not len(filerevlog):
1647 if not len(filerevlog):
1648 raise util.Abort(_("empty or missing revlog for %s") % fname)
1648 raise util.Abort(_("empty or missing revlog for %s") % fname)
1649 fstate[0] = fname
1649 fstate[0] = fname
1650 first = True
1650 first = True
1651 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1651 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1652 if first:
1652 if first:
1653 if chunk == bundler.close():
1653 if chunk == bundler.close():
1654 break
1654 break
1655 count[0] += 1
1655 count[0] += 1
1656 yield bundler.fileheader(fname)
1656 yield bundler.fileheader(fname)
1657 first = False
1657 first = False
1658 yield chunk
1658 yield chunk
1659 yield bundler.close()
1659 yield bundler.close()
1660 self.ui.progress(_('bundling'), None)
1660 self.ui.progress(_('bundling'), None)
1661
1661
1662 if nodes:
1662 if nodes:
1663 self.hook('outgoing', node=hex(nodes[0]), source=source)
1663 self.hook('outgoing', node=hex(nodes[0]), source=source)
1664
1664
1665 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1665 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1666
1666
1667 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1667 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1668 """Add the changegroup returned by source.read() to this repo.
1668 """Add the changegroup returned by source.read() to this repo.
1669 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1669 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1670 the URL of the repo where this changegroup is coming from.
1670 the URL of the repo where this changegroup is coming from.
1671 If lock is not None, the function takes ownership of the lock
1671 If lock is not None, the function takes ownership of the lock
1672 and releases it after the changegroup is added.
1672 and releases it after the changegroup is added.
1673
1673
1674 Return an integer summarizing the change to this repo:
1674 Return an integer summarizing the change to this repo:
1675 - nothing changed or no source: 0
1675 - nothing changed or no source: 0
1676 - more heads than before: 1+added heads (2..n)
1676 - more heads than before: 1+added heads (2..n)
1677 - fewer heads than before: -1-removed heads (-2..-n)
1677 - fewer heads than before: -1-removed heads (-2..-n)
1678 - number of heads stays the same: 1
1678 - number of heads stays the same: 1
1679 """
1679 """
1680 def csmap(x):
1680 def csmap(x):
1681 self.ui.debug("add changeset %s\n" % short(x))
1681 self.ui.debug("add changeset %s\n" % short(x))
1682 return len(cl)
1682 return len(cl)
1683
1683
1684 def revmap(x):
1684 def revmap(x):
1685 return cl.rev(x)
1685 return cl.rev(x)
1686
1686
1687 if not source:
1687 if not source:
1688 return 0
1688 return 0
1689
1689
1690 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1690 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1691
1691
1692 changesets = files = revisions = 0
1692 changesets = files = revisions = 0
1693 efiles = set()
1693 efiles = set()
1694
1694
1695 # write changelog data to temp files so concurrent readers will not see
1695 # write changelog data to temp files so concurrent readers will not see
1696 # inconsistent view
1696 # inconsistent view
1697 cl = self.changelog
1697 cl = self.changelog
1698 cl.delayupdate()
1698 cl.delayupdate()
1699 oldheads = cl.heads()
1699 oldheads = cl.heads()
1700
1700
1701 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1701 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1702 try:
1702 try:
1703 trp = weakref.proxy(tr)
1703 trp = weakref.proxy(tr)
1704 # pull off the changeset group
1704 # pull off the changeset group
1705 self.ui.status(_("adding changesets\n"))
1705 self.ui.status(_("adding changesets\n"))
1706 clstart = len(cl)
1706 clstart = len(cl)
1707 class prog(object):
1707 class prog(object):
1708 step = _('changesets')
1708 step = _('changesets')
1709 count = 1
1709 count = 1
1710 ui = self.ui
1710 ui = self.ui
1711 total = None
1711 total = None
1712 def __call__(self):
1712 def __call__(self):
1713 self.ui.progress(self.step, self.count, unit=_('chunks'),
1713 self.ui.progress(self.step, self.count, unit=_('chunks'),
1714 total=self.total)
1714 total=self.total)
1715 self.count += 1
1715 self.count += 1
1716 pr = prog()
1716 pr = prog()
1717 source.callback = pr
1717 source.callback = pr
1718
1718
1719 source.changelogheader()
1719 source.changelogheader()
1720 if (cl.addgroup(source, csmap, trp) is None
1720 if (cl.addgroup(source, csmap, trp) is None
1721 and not emptyok):
1721 and not emptyok):
1722 raise util.Abort(_("received changelog group is empty"))
1722 raise util.Abort(_("received changelog group is empty"))
1723 clend = len(cl)
1723 clend = len(cl)
1724 changesets = clend - clstart
1724 changesets = clend - clstart
1725 for c in xrange(clstart, clend):
1725 for c in xrange(clstart, clend):
1726 efiles.update(self[c].files())
1726 efiles.update(self[c].files())
1727 efiles = len(efiles)
1727 efiles = len(efiles)
1728 self.ui.progress(_('changesets'), None)
1728 self.ui.progress(_('changesets'), None)
1729
1729
1730 # pull off the manifest group
1730 # pull off the manifest group
1731 self.ui.status(_("adding manifests\n"))
1731 self.ui.status(_("adding manifests\n"))
1732 pr.step = _('manifests')
1732 pr.step = _('manifests')
1733 pr.count = 1
1733 pr.count = 1
1734 pr.total = changesets # manifests <= changesets
1734 pr.total = changesets # manifests <= changesets
1735 # no need to check for empty manifest group here:
1735 # no need to check for empty manifest group here:
1736 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1736 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1737 # no new manifest will be created and the manifest group will
1737 # no new manifest will be created and the manifest group will
1738 # be empty during the pull
1738 # be empty during the pull
1739 source.manifestheader()
1739 source.manifestheader()
1740 self.manifest.addgroup(source, revmap, trp)
1740 self.manifest.addgroup(source, revmap, trp)
1741 self.ui.progress(_('manifests'), None)
1741 self.ui.progress(_('manifests'), None)
1742
1742
1743 needfiles = {}
1743 needfiles = {}
1744 if self.ui.configbool('server', 'validate', default=False):
1744 if self.ui.configbool('server', 'validate', default=False):
1745 # validate incoming csets have their manifests
1745 # validate incoming csets have their manifests
1746 for cset in xrange(clstart, clend):
1746 for cset in xrange(clstart, clend):
1747 mfest = self.changelog.read(self.changelog.node(cset))[0]
1747 mfest = self.changelog.read(self.changelog.node(cset))[0]
1748 mfest = self.manifest.readdelta(mfest)
1748 mfest = self.manifest.readdelta(mfest)
1749 # store file nodes we must see
1749 # store file nodes we must see
1750 for f, n in mfest.iteritems():
1750 for f, n in mfest.iteritems():
1751 needfiles.setdefault(f, set()).add(n)
1751 needfiles.setdefault(f, set()).add(n)
1752
1752
1753 # process the files
1753 # process the files
1754 self.ui.status(_("adding file changes\n"))
1754 self.ui.status(_("adding file changes\n"))
1755 pr.step = 'files'
1755 pr.step = 'files'
1756 pr.count = 1
1756 pr.count = 1
1757 pr.total = efiles
1757 pr.total = efiles
1758 source.callback = None
1758 source.callback = None
1759
1759
1760 while 1:
1760 while 1:
1761 chunkdata = source.filelogheader()
1761 chunkdata = source.filelogheader()
1762 if not chunkdata:
1762 if not chunkdata:
1763 break
1763 break
1764 f = chunkdata["filename"]
1764 f = chunkdata["filename"]
1765 self.ui.debug("adding %s revisions\n" % f)
1765 self.ui.debug("adding %s revisions\n" % f)
1766 pr()
1766 pr()
1767 fl = self.file(f)
1767 fl = self.file(f)
1768 o = len(fl)
1768 o = len(fl)
1769 if fl.addgroup(source, revmap, trp) is None:
1769 if fl.addgroup(source, revmap, trp) is None:
1770 raise util.Abort(_("received file revlog group is empty"))
1770 raise util.Abort(_("received file revlog group is empty"))
1771 revisions += len(fl) - o
1771 revisions += len(fl) - o
1772 files += 1
1772 files += 1
1773 if f in needfiles:
1773 if f in needfiles:
1774 needs = needfiles[f]
1774 needs = needfiles[f]
1775 for new in xrange(o, len(fl)):
1775 for new in xrange(o, len(fl)):
1776 n = fl.node(new)
1776 n = fl.node(new)
1777 if n in needs:
1777 if n in needs:
1778 needs.remove(n)
1778 needs.remove(n)
1779 if not needs:
1779 if not needs:
1780 del needfiles[f]
1780 del needfiles[f]
1781 self.ui.progress(_('files'), None)
1781 self.ui.progress(_('files'), None)
1782
1782
1783 for f, needs in needfiles.iteritems():
1783 for f, needs in needfiles.iteritems():
1784 fl = self.file(f)
1784 fl = self.file(f)
1785 for n in needs:
1785 for n in needs:
1786 try:
1786 try:
1787 fl.rev(n)
1787 fl.rev(n)
1788 except error.LookupError:
1788 except error.LookupError:
1789 raise util.Abort(
1789 raise util.Abort(
1790 _('missing file data for %s:%s - run hg verify') %
1790 _('missing file data for %s:%s - run hg verify') %
1791 (f, hex(n)))
1791 (f, hex(n)))
1792
1792
1793 dh = 0
1793 dh = 0
1794 if oldheads:
1794 if oldheads:
1795 heads = cl.heads()
1795 heads = cl.heads()
1796 dh = len(heads) - len(oldheads)
1796 dh = len(heads) - len(oldheads)
1797 for h in heads:
1797 for h in heads:
1798 if h not in oldheads and 'close' in self[h].extra():
1798 if h not in oldheads and 'close' in self[h].extra():
1799 dh -= 1
1799 dh -= 1
1800 htext = ""
1800 htext = ""
1801 if dh:
1801 if dh:
1802 htext = _(" (%+d heads)") % dh
1802 htext = _(" (%+d heads)") % dh
1803
1803
1804 self.ui.status(_("added %d changesets"
1804 self.ui.status(_("added %d changesets"
1805 " with %d changes to %d files%s\n")
1805 " with %d changes to %d files%s\n")
1806 % (changesets, revisions, files, htext))
1806 % (changesets, revisions, files, htext))
1807
1807
1808 if changesets > 0:
1808 if changesets > 0:
1809 p = lambda: cl.writepending() and self.root or ""
1809 p = lambda: cl.writepending() and self.root or ""
1810 self.hook('pretxnchangegroup', throw=True,
1810 self.hook('pretxnchangegroup', throw=True,
1811 node=hex(cl.node(clstart)), source=srctype,
1811 node=hex(cl.node(clstart)), source=srctype,
1812 url=url, pending=p)
1812 url=url, pending=p)
1813
1813
1814 # make changelog see real files again
1814 # make changelog see real files again
1815 cl.finalize(trp)
1815 cl.finalize(trp)
1816
1816
1817 tr.close()
1817 tr.close()
1818 finally:
1818 finally:
1819 tr.release()
1819 tr.release()
1820 if lock:
1820 if lock:
1821 lock.release()
1821 lock.release()
1822
1822
1823 if changesets > 0:
1823 if changesets > 0:
1824 # forcefully update the on-disk branch cache
1824 # forcefully update the on-disk branch cache
1825 self.ui.debug("updating the branch cache\n")
1825 self.ui.debug("updating the branch cache\n")
1826 self.updatebranchcache()
1826 self.updatebranchcache()
1827 self.hook("changegroup", node=hex(cl.node(clstart)),
1827 self.hook("changegroup", node=hex(cl.node(clstart)),
1828 source=srctype, url=url)
1828 source=srctype, url=url)
1829
1829
1830 for i in xrange(clstart, clend):
1830 for i in xrange(clstart, clend):
1831 self.hook("incoming", node=hex(cl.node(i)),
1831 self.hook("incoming", node=hex(cl.node(i)),
1832 source=srctype, url=url)
1832 source=srctype, url=url)
1833
1833
1834 # never return 0 here:
1834 # never return 0 here:
1835 if dh < 0:
1835 if dh < 0:
1836 return dh - 1
1836 return dh - 1
1837 else:
1837 else:
1838 return dh + 1
1838 return dh + 1
1839
1839
1840 def stream_in(self, remote, requirements):
1840 def stream_in(self, remote, requirements):
1841 lock = self.lock()
1841 lock = self.lock()
1842 try:
1842 try:
1843 fp = remote.stream_out()
1843 fp = remote.stream_out()
1844 l = fp.readline()
1844 l = fp.readline()
1845 try:
1845 try:
1846 resp = int(l)
1846 resp = int(l)
1847 except ValueError:
1847 except ValueError:
1848 raise error.ResponseError(
1848 raise error.ResponseError(
1849 _('Unexpected response from remote server:'), l)
1849 _('Unexpected response from remote server:'), l)
1850 if resp == 1:
1850 if resp == 1:
1851 raise util.Abort(_('operation forbidden by server'))
1851 raise util.Abort(_('operation forbidden by server'))
1852 elif resp == 2:
1852 elif resp == 2:
1853 raise util.Abort(_('locking the remote repository failed'))
1853 raise util.Abort(_('locking the remote repository failed'))
1854 elif resp != 0:
1854 elif resp != 0:
1855 raise util.Abort(_('the server sent an unknown error code'))
1855 raise util.Abort(_('the server sent an unknown error code'))
1856 self.ui.status(_('streaming all changes\n'))
1856 self.ui.status(_('streaming all changes\n'))
1857 l = fp.readline()
1857 l = fp.readline()
1858 try:
1858 try:
1859 total_files, total_bytes = map(int, l.split(' ', 1))
1859 total_files, total_bytes = map(int, l.split(' ', 1))
1860 except (ValueError, TypeError):
1860 except (ValueError, TypeError):
1861 raise error.ResponseError(
1861 raise error.ResponseError(
1862 _('Unexpected response from remote server:'), l)
1862 _('Unexpected response from remote server:'), l)
1863 self.ui.status(_('%d files to transfer, %s of data\n') %
1863 self.ui.status(_('%d files to transfer, %s of data\n') %
1864 (total_files, util.bytecount(total_bytes)))
1864 (total_files, util.bytecount(total_bytes)))
1865 start = time.time()
1865 start = time.time()
1866 for i in xrange(total_files):
1866 for i in xrange(total_files):
1867 # XXX doesn't support '\n' or '\r' in filenames
1867 # XXX doesn't support '\n' or '\r' in filenames
1868 l = fp.readline()
1868 l = fp.readline()
1869 try:
1869 try:
1870 name, size = l.split('\0', 1)
1870 name, size = l.split('\0', 1)
1871 size = int(size)
1871 size = int(size)
1872 except (ValueError, TypeError):
1872 except (ValueError, TypeError):
1873 raise error.ResponseError(
1873 raise error.ResponseError(
1874 _('Unexpected response from remote server:'), l)
1874 _('Unexpected response from remote server:'), l)
1875 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1875 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1876 # for backwards compat, name was partially encoded
1876 # for backwards compat, name was partially encoded
1877 ofp = self.sopener(store.decodedir(name), 'w')
1877 ofp = self.sopener(store.decodedir(name), 'w')
1878 for chunk in util.filechunkiter(fp, limit=size):
1878 for chunk in util.filechunkiter(fp, limit=size):
1879 ofp.write(chunk)
1879 ofp.write(chunk)
1880 ofp.close()
1880 ofp.close()
1881 elapsed = time.time() - start
1881 elapsed = time.time() - start
1882 if elapsed <= 0:
1882 if elapsed <= 0:
1883 elapsed = 0.001
1883 elapsed = 0.001
1884 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1884 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1885 (util.bytecount(total_bytes), elapsed,
1885 (util.bytecount(total_bytes), elapsed,
1886 util.bytecount(total_bytes / elapsed)))
1886 util.bytecount(total_bytes / elapsed)))
1887
1887
1888 # new requirements = old non-format requirements + new format-related
1888 # new requirements = old non-format requirements + new format-related
1889 # requirements from the streamed-in repository
1889 # requirements from the streamed-in repository
1890 requirements.update(set(self.requirements) - self.supportedformats)
1890 requirements.update(set(self.requirements) - self.supportedformats)
1891 self._applyrequirements(requirements)
1891 self._applyrequirements(requirements)
1892 self._writerequirements()
1892 self._writerequirements()
1893
1893
1894 self.invalidate()
1894 self.invalidate()
1895 return len(self.heads()) + 1
1895 return len(self.heads()) + 1
1896 finally:
1896 finally:
1897 lock.release()
1897 lock.release()
1898
1898
1899 def clone(self, remote, heads=[], stream=False):
1899 def clone(self, remote, heads=[], stream=False):
1900 '''clone remote repository.
1900 '''clone remote repository.
1901
1901
1902 keyword arguments:
1902 keyword arguments:
1903 heads: list of revs to clone (forces use of pull)
1903 heads: list of revs to clone (forces use of pull)
1904 stream: use streaming clone if possible'''
1904 stream: use streaming clone if possible'''
1905
1905
1906 # now, all clients that can request uncompressed clones can
1906 # now, all clients that can request uncompressed clones can
1907 # read repo formats supported by all servers that can serve
1907 # read repo formats supported by all servers that can serve
1908 # them.
1908 # them.
1909
1909
1910 # if revlog format changes, client will have to check version
1910 # if revlog format changes, client will have to check version
1911 # and format flags on "stream" capability, and use
1911 # and format flags on "stream" capability, and use
1912 # uncompressed only if compatible.
1912 # uncompressed only if compatible.
1913
1913
1914 if stream and not heads:
1914 if stream and not heads:
1915 # 'stream' means remote revlog format is revlogv1 only
1915 # 'stream' means remote revlog format is revlogv1 only
1916 if remote.capable('stream'):
1916 if remote.capable('stream'):
1917 return self.stream_in(remote, set(('revlogv1',)))
1917 return self.stream_in(remote, set(('revlogv1',)))
1918 # otherwise, 'streamreqs' contains the remote revlog format
1918 # otherwise, 'streamreqs' contains the remote revlog format
1919 streamreqs = remote.capable('streamreqs')
1919 streamreqs = remote.capable('streamreqs')
1920 if streamreqs:
1920 if streamreqs:
1921 streamreqs = set(streamreqs.split(','))
1921 streamreqs = set(streamreqs.split(','))
1922 # if we support it, stream in and adjust our requirements
1922 # if we support it, stream in and adjust our requirements
1923 if not streamreqs - self.supportedformats:
1923 if not streamreqs - self.supportedformats:
1924 return self.stream_in(remote, streamreqs)
1924 return self.stream_in(remote, streamreqs)
1925 return self.pull(remote, heads)
1925 return self.pull(remote, heads)
1926
1926
1927 def pushkey(self, namespace, key, old, new):
1927 def pushkey(self, namespace, key, old, new):
1928 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1928 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1929 old=old, new=new)
1929 old=old, new=new)
1930 ret = pushkey.push(self, namespace, key, old, new)
1930 ret = pushkey.push(self, namespace, key, old, new)
1931 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1931 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1932 ret=ret)
1932 ret=ret)
1933 return ret
1933 return ret
1934
1934
1935 def listkeys(self, namespace):
1935 def listkeys(self, namespace):
1936 self.hook('prelistkeys', throw=True, namespace=namespace)
1936 self.hook('prelistkeys', throw=True, namespace=namespace)
1937 values = pushkey.list(self, namespace)
1937 values = pushkey.list(self, namespace)
1938 self.hook('listkeys', namespace=namespace, values=values)
1938 self.hook('listkeys', namespace=namespace, values=values)
1939 return values
1939 return values
1940
1940
1941 def debugwireargs(self, one, two, three=None, four=None, five=None):
1941 def debugwireargs(self, one, two, three=None, four=None, five=None):
1942 '''used to test argument passing over the wire'''
1942 '''used to test argument passing over the wire'''
1943 return "%s %s %s %s %s" % (one, two, three, four, five)
1943 return "%s %s %s %s %s" % (one, two, three, four, five)
1944
1944
1945 # used to avoid circular references so destructors work
1945 # used to avoid circular references so destructors work
1946 def aftertrans(files):
1946 def aftertrans(files):
1947 renamefiles = [tuple(t) for t in files]
1947 renamefiles = [tuple(t) for t in files]
1948 def a():
1948 def a():
1949 for src, dest in renamefiles:
1949 for src, dest in renamefiles:
1950 util.rename(src, dest)
1950 util.rename(src, dest)
1951 return a
1951 return a
1952
1952
1953 def instance(ui, path, create):
1953 def instance(ui, path, create):
1954 return localrepository(ui, util.localpath(path), create)
1954 return localrepository(ui, util.localpath(path), create)
1955
1955
1956 def islocal(path):
1956 def islocal(path):
1957 return True
1957 return True
@@ -1,233 +1,233 b''
1 # mail.py - mail sending bits for mercurial
1 # mail.py - mail sending bits for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, encoding
9 import util, encoding
10 import os, smtplib, socket, quopri
10 import os, smtplib, socket, quopri
11 import email.Header, email.MIMEText, email.Utils
11 import email.Header, email.MIMEText, email.Utils
12
12
13 _oldheaderinit = email.Header.Header.__init__
13 _oldheaderinit = email.Header.Header.__init__
14 def _unifiedheaderinit(self, *args, **kw):
14 def _unifiedheaderinit(self, *args, **kw):
15 """
15 """
16 Python2.7 introduces a backwards incompatible change
16 Python2.7 introduces a backwards incompatible change
17 (Python issue1974, r70772) in email.Generator.Generator code:
17 (Python issue1974, r70772) in email.Generator.Generator code:
18 pre-2.7 code passed "continuation_ws='\t'" to the Header
18 pre-2.7 code passed "continuation_ws='\t'" to the Header
19 constructor, and 2.7 removed this parameter.
19 constructor, and 2.7 removed this parameter.
20
20
21 Default argument is continuation_ws=' ', which means that the
21 Default argument is continuation_ws=' ', which means that the
22 behaviour is different in <2.7 and 2.7
22 behaviour is different in <2.7 and 2.7
23
23
24 We consider the 2.7 behaviour to be preferable, but need
24 We consider the 2.7 behaviour to be preferable, but need
25 to have an unified behaviour for versions 2.4 to 2.7
25 to have an unified behaviour for versions 2.4 to 2.7
26 """
26 """
27 # override continuation_ws
27 # override continuation_ws
28 kw['continuation_ws'] = ' '
28 kw['continuation_ws'] = ' '
29 _oldheaderinit(self, *args, **kw)
29 _oldheaderinit(self, *args, **kw)
30
30
31 email.Header.Header.__dict__['__init__'] = _unifiedheaderinit
31 email.Header.Header.__dict__['__init__'] = _unifiedheaderinit
32
32
33 def _smtp(ui):
33 def _smtp(ui):
34 '''build an smtp connection and return a function to send mail'''
34 '''build an smtp connection and return a function to send mail'''
35 local_hostname = ui.config('smtp', 'local_hostname')
35 local_hostname = ui.config('smtp', 'local_hostname')
36 tls = ui.config('smtp', 'tls', 'none')
36 tls = ui.config('smtp', 'tls', 'none')
37 # backward compatible: when tls = true, we use starttls.
37 # backward compatible: when tls = true, we use starttls.
38 starttls = tls == 'starttls' or util.parsebool(tls)
38 starttls = tls == 'starttls' or util.parsebool(tls)
39 smtps = tls == 'smtps'
39 smtps = tls == 'smtps'
40 if (starttls or smtps) and not hasattr(socket, 'ssl'):
40 if (starttls or smtps) and not hasattr(socket, 'ssl'):
41 raise util.Abort(_("can't use TLS: Python SSL support not installed"))
41 raise util.Abort(_("can't use TLS: Python SSL support not installed"))
42 if smtps:
42 if smtps:
43 ui.note(_('(using smtps)\n'))
43 ui.note(_('(using smtps)\n'))
44 s = smtplib.SMTP_SSL(local_hostname=local_hostname)
44 s = smtplib.SMTP_SSL(local_hostname=local_hostname)
45 else:
45 else:
46 s = smtplib.SMTP(local_hostname=local_hostname)
46 s = smtplib.SMTP(local_hostname=local_hostname)
47 mailhost = ui.config('smtp', 'host')
47 mailhost = ui.config('smtp', 'host')
48 if not mailhost:
48 if not mailhost:
49 raise util.Abort(_('smtp.host not configured - cannot send mail'))
49 raise util.Abort(_('smtp.host not configured - cannot send mail'))
50 mailport = util.getport(ui.config('smtp', 'port', 25))
50 mailport = util.getport(ui.config('smtp', 'port', 25))
51 ui.note(_('sending mail: smtp host %s, port %s\n') %
51 ui.note(_('sending mail: smtp host %s, port %s\n') %
52 (mailhost, mailport))
52 (mailhost, mailport))
53 s.connect(host=mailhost, port=mailport)
53 s.connect(host=mailhost, port=mailport)
54 if starttls:
54 if starttls:
55 ui.note(_('(using starttls)\n'))
55 ui.note(_('(using starttls)\n'))
56 s.ehlo()
56 s.ehlo()
57 s.starttls()
57 s.starttls()
58 s.ehlo()
58 s.ehlo()
59 username = ui.config('smtp', 'username')
59 username = ui.config('smtp', 'username')
60 password = ui.config('smtp', 'password')
60 password = ui.config('smtp', 'password')
61 if username and not password:
61 if username and not password:
62 password = ui.getpass()
62 password = ui.getpass()
63 if username and password:
63 if username and password:
64 ui.note(_('(authenticating to mail server as %s)\n') %
64 ui.note(_('(authenticating to mail server as %s)\n') %
65 (username))
65 (username))
66 try:
66 try:
67 s.login(username, password)
67 s.login(username, password)
68 except smtplib.SMTPException, inst:
68 except smtplib.SMTPException, inst:
69 raise util.Abort(inst)
69 raise util.Abort(inst)
70
70
71 def send(sender, recipients, msg):
71 def send(sender, recipients, msg):
72 try:
72 try:
73 return s.sendmail(sender, recipients, msg)
73 return s.sendmail(sender, recipients, msg)
74 except smtplib.SMTPRecipientsRefused, inst:
74 except smtplib.SMTPRecipientsRefused, inst:
75 recipients = [r[1] for r in inst.recipients.values()]
75 recipients = [r[1] for r in inst.recipients.values()]
76 raise util.Abort('\n' + '\n'.join(recipients))
76 raise util.Abort('\n' + '\n'.join(recipients))
77 except smtplib.SMTPException, inst:
77 except smtplib.SMTPException, inst:
78 raise util.Abort(inst)
78 raise util.Abort(inst)
79
79
80 return send
80 return send
81
81
82 def _sendmail(ui, sender, recipients, msg):
82 def _sendmail(ui, sender, recipients, msg):
83 '''send mail using sendmail.'''
83 '''send mail using sendmail.'''
84 program = ui.config('email', 'method')
84 program = ui.config('email', 'method')
85 cmdline = '%s -f %s %s' % (program, util.email(sender),
85 cmdline = '%s -f %s %s' % (program, util.email(sender),
86 ' '.join(map(util.email, recipients)))
86 ' '.join(map(util.email, recipients)))
87 ui.note(_('sending mail: %s\n') % cmdline)
87 ui.note(_('sending mail: %s\n') % cmdline)
88 fp = util.popen(cmdline, 'w')
88 fp = util.popen(cmdline, 'w')
89 fp.write(msg)
89 fp.write(msg)
90 ret = fp.close()
90 ret = fp.close()
91 if ret:
91 if ret:
92 raise util.Abort('%s %s' % (
92 raise util.Abort('%s %s' % (
93 os.path.basename(program.split(None, 1)[0]),
93 os.path.basename(program.split(None, 1)[0]),
94 util.explain_exit(ret)[0]))
94 util.explainexit(ret)[0]))
95
95
96 def connect(ui):
96 def connect(ui):
97 '''make a mail connection. return a function to send mail.
97 '''make a mail connection. return a function to send mail.
98 call as sendmail(sender, list-of-recipients, msg).'''
98 call as sendmail(sender, list-of-recipients, msg).'''
99 if ui.config('email', 'method', 'smtp') == 'smtp':
99 if ui.config('email', 'method', 'smtp') == 'smtp':
100 return _smtp(ui)
100 return _smtp(ui)
101 return lambda s, r, m: _sendmail(ui, s, r, m)
101 return lambda s, r, m: _sendmail(ui, s, r, m)
102
102
103 def sendmail(ui, sender, recipients, msg):
103 def sendmail(ui, sender, recipients, msg):
104 send = connect(ui)
104 send = connect(ui)
105 return send(sender, recipients, msg)
105 return send(sender, recipients, msg)
106
106
107 def validateconfig(ui):
107 def validateconfig(ui):
108 '''determine if we have enough config data to try sending email.'''
108 '''determine if we have enough config data to try sending email.'''
109 method = ui.config('email', 'method', 'smtp')
109 method = ui.config('email', 'method', 'smtp')
110 if method == 'smtp':
110 if method == 'smtp':
111 if not ui.config('smtp', 'host'):
111 if not ui.config('smtp', 'host'):
112 raise util.Abort(_('smtp specified as email transport, '
112 raise util.Abort(_('smtp specified as email transport, '
113 'but no smtp host configured'))
113 'but no smtp host configured'))
114 else:
114 else:
115 if not util.find_exe(method):
115 if not util.find_exe(method):
116 raise util.Abort(_('%r specified as email transport, '
116 raise util.Abort(_('%r specified as email transport, '
117 'but not in PATH') % method)
117 'but not in PATH') % method)
118
118
119 def mimetextpatch(s, subtype='plain', display=False):
119 def mimetextpatch(s, subtype='plain', display=False):
120 '''If patch in utf-8 transfer-encode it.'''
120 '''If patch in utf-8 transfer-encode it.'''
121
121
122 enc = None
122 enc = None
123 for line in s.splitlines():
123 for line in s.splitlines():
124 if len(line) > 950:
124 if len(line) > 950:
125 s = quopri.encodestring(s)
125 s = quopri.encodestring(s)
126 enc = "quoted-printable"
126 enc = "quoted-printable"
127 break
127 break
128
128
129 cs = 'us-ascii'
129 cs = 'us-ascii'
130 if not display:
130 if not display:
131 try:
131 try:
132 s.decode('us-ascii')
132 s.decode('us-ascii')
133 except UnicodeDecodeError:
133 except UnicodeDecodeError:
134 try:
134 try:
135 s.decode('utf-8')
135 s.decode('utf-8')
136 cs = 'utf-8'
136 cs = 'utf-8'
137 except UnicodeDecodeError:
137 except UnicodeDecodeError:
138 # We'll go with us-ascii as a fallback.
138 # We'll go with us-ascii as a fallback.
139 pass
139 pass
140
140
141 msg = email.MIMEText.MIMEText(s, subtype, cs)
141 msg = email.MIMEText.MIMEText(s, subtype, cs)
142 if enc:
142 if enc:
143 del msg['Content-Transfer-Encoding']
143 del msg['Content-Transfer-Encoding']
144 msg['Content-Transfer-Encoding'] = enc
144 msg['Content-Transfer-Encoding'] = enc
145 return msg
145 return msg
146
146
147 def _charsets(ui):
147 def _charsets(ui):
148 '''Obtains charsets to send mail parts not containing patches.'''
148 '''Obtains charsets to send mail parts not containing patches.'''
149 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
149 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
150 fallbacks = [encoding.fallbackencoding.lower(),
150 fallbacks = [encoding.fallbackencoding.lower(),
151 encoding.encoding.lower(), 'utf-8']
151 encoding.encoding.lower(), 'utf-8']
152 for cs in fallbacks: # find unique charsets while keeping order
152 for cs in fallbacks: # find unique charsets while keeping order
153 if cs not in charsets:
153 if cs not in charsets:
154 charsets.append(cs)
154 charsets.append(cs)
155 return [cs for cs in charsets if not cs.endswith('ascii')]
155 return [cs for cs in charsets if not cs.endswith('ascii')]
156
156
157 def _encode(ui, s, charsets):
157 def _encode(ui, s, charsets):
158 '''Returns (converted) string, charset tuple.
158 '''Returns (converted) string, charset tuple.
159 Finds out best charset by cycling through sendcharsets in descending
159 Finds out best charset by cycling through sendcharsets in descending
160 order. Tries both encoding and fallbackencoding for input. Only as
160 order. Tries both encoding and fallbackencoding for input. Only as
161 last resort send as is in fake ascii.
161 last resort send as is in fake ascii.
162 Caveat: Do not use for mail parts containing patches!'''
162 Caveat: Do not use for mail parts containing patches!'''
163 try:
163 try:
164 s.decode('ascii')
164 s.decode('ascii')
165 except UnicodeDecodeError:
165 except UnicodeDecodeError:
166 sendcharsets = charsets or _charsets(ui)
166 sendcharsets = charsets or _charsets(ui)
167 for ics in (encoding.encoding, encoding.fallbackencoding):
167 for ics in (encoding.encoding, encoding.fallbackencoding):
168 try:
168 try:
169 u = s.decode(ics)
169 u = s.decode(ics)
170 except UnicodeDecodeError:
170 except UnicodeDecodeError:
171 continue
171 continue
172 for ocs in sendcharsets:
172 for ocs in sendcharsets:
173 try:
173 try:
174 return u.encode(ocs), ocs
174 return u.encode(ocs), ocs
175 except UnicodeEncodeError:
175 except UnicodeEncodeError:
176 pass
176 pass
177 except LookupError:
177 except LookupError:
178 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
178 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
179 # if ascii, or all conversion attempts fail, send (broken) ascii
179 # if ascii, or all conversion attempts fail, send (broken) ascii
180 return s, 'us-ascii'
180 return s, 'us-ascii'
181
181
182 def headencode(ui, s, charsets=None, display=False):
182 def headencode(ui, s, charsets=None, display=False):
183 '''Returns RFC-2047 compliant header from given string.'''
183 '''Returns RFC-2047 compliant header from given string.'''
184 if not display:
184 if not display:
185 # split into words?
185 # split into words?
186 s, cs = _encode(ui, s, charsets)
186 s, cs = _encode(ui, s, charsets)
187 return str(email.Header.Header(s, cs))
187 return str(email.Header.Header(s, cs))
188 return s
188 return s
189
189
190 def _addressencode(ui, name, addr, charsets=None):
190 def _addressencode(ui, name, addr, charsets=None):
191 name = headencode(ui, name, charsets)
191 name = headencode(ui, name, charsets)
192 try:
192 try:
193 acc, dom = addr.split('@')
193 acc, dom = addr.split('@')
194 acc = acc.encode('ascii')
194 acc = acc.encode('ascii')
195 dom = dom.decode(encoding.encoding).encode('idna')
195 dom = dom.decode(encoding.encoding).encode('idna')
196 addr = '%s@%s' % (acc, dom)
196 addr = '%s@%s' % (acc, dom)
197 except UnicodeDecodeError:
197 except UnicodeDecodeError:
198 raise util.Abort(_('invalid email address: %s') % addr)
198 raise util.Abort(_('invalid email address: %s') % addr)
199 except ValueError:
199 except ValueError:
200 try:
200 try:
201 # too strict?
201 # too strict?
202 addr = addr.encode('ascii')
202 addr = addr.encode('ascii')
203 except UnicodeDecodeError:
203 except UnicodeDecodeError:
204 raise util.Abort(_('invalid local address: %s') % addr)
204 raise util.Abort(_('invalid local address: %s') % addr)
205 return email.Utils.formataddr((name, addr))
205 return email.Utils.formataddr((name, addr))
206
206
207 def addressencode(ui, address, charsets=None, display=False):
207 def addressencode(ui, address, charsets=None, display=False):
208 '''Turns address into RFC-2047 compliant header.'''
208 '''Turns address into RFC-2047 compliant header.'''
209 if display or not address:
209 if display or not address:
210 return address or ''
210 return address or ''
211 name, addr = email.Utils.parseaddr(address)
211 name, addr = email.Utils.parseaddr(address)
212 return _addressencode(ui, name, addr, charsets)
212 return _addressencode(ui, name, addr, charsets)
213
213
214 def addrlistencode(ui, addrs, charsets=None, display=False):
214 def addrlistencode(ui, addrs, charsets=None, display=False):
215 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
215 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
216 A single element of input list may contain multiple addresses, but output
216 A single element of input list may contain multiple addresses, but output
217 always has one address per item'''
217 always has one address per item'''
218 if display:
218 if display:
219 return [a.strip() for a in addrs if a.strip()]
219 return [a.strip() for a in addrs if a.strip()]
220
220
221 result = []
221 result = []
222 for name, addr in email.Utils.getaddresses(addrs):
222 for name, addr in email.Utils.getaddresses(addrs):
223 if name or addr:
223 if name or addr:
224 result.append(_addressencode(ui, name, addr, charsets))
224 result.append(_addressencode(ui, name, addr, charsets))
225 return result
225 return result
226
226
227 def mimeencode(ui, s, charsets=None, display=False):
227 def mimeencode(ui, s, charsets=None, display=False):
228 '''creates mime text object, encodes it if needed, and sets
228 '''creates mime text object, encodes it if needed, and sets
229 charset and transfer-encoding accordingly.'''
229 charset and transfer-encoding accordingly.'''
230 cs = 'us-ascii'
230 cs = 'us-ascii'
231 if not display:
231 if not display:
232 s, cs = _encode(ui, s, charsets)
232 s, cs = _encode(ui, s, charsets)
233 return email.MIMEText.MIMEText(s, 'plain', cs)
233 return email.MIMEText.MIMEText(s, 'plain', cs)
@@ -1,561 +1,561 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 import scmutil, util, filemerge, copies, subrepo
10 import scmutil, util, filemerge, copies, subrepo
11 import errno, os, shutil
11 import errno, os, shutil
12
12
13 class mergestate(object):
13 class mergestate(object):
14 '''track 3-way merge state of individual files'''
14 '''track 3-way merge state of individual files'''
15 def __init__(self, repo):
15 def __init__(self, repo):
16 self._repo = repo
16 self._repo = repo
17 self._dirty = False
17 self._dirty = False
18 self._read()
18 self._read()
19 def reset(self, node=None):
19 def reset(self, node=None):
20 self._state = {}
20 self._state = {}
21 if node:
21 if node:
22 self._local = node
22 self._local = node
23 shutil.rmtree(self._repo.join("merge"), True)
23 shutil.rmtree(self._repo.join("merge"), True)
24 self._dirty = False
24 self._dirty = False
25 def _read(self):
25 def _read(self):
26 self._state = {}
26 self._state = {}
27 try:
27 try:
28 f = self._repo.opener("merge/state")
28 f = self._repo.opener("merge/state")
29 for i, l in enumerate(f):
29 for i, l in enumerate(f):
30 if i == 0:
30 if i == 0:
31 self._local = bin(l[:-1])
31 self._local = bin(l[:-1])
32 else:
32 else:
33 bits = l[:-1].split("\0")
33 bits = l[:-1].split("\0")
34 self._state[bits[0]] = bits[1:]
34 self._state[bits[0]] = bits[1:]
35 f.close()
35 f.close()
36 except IOError, err:
36 except IOError, err:
37 if err.errno != errno.ENOENT:
37 if err.errno != errno.ENOENT:
38 raise
38 raise
39 self._dirty = False
39 self._dirty = False
40 def commit(self):
40 def commit(self):
41 if self._dirty:
41 if self._dirty:
42 f = self._repo.opener("merge/state", "w")
42 f = self._repo.opener("merge/state", "w")
43 f.write(hex(self._local) + "\n")
43 f.write(hex(self._local) + "\n")
44 for d, v in self._state.iteritems():
44 for d, v in self._state.iteritems():
45 f.write("\0".join([d] + v) + "\n")
45 f.write("\0".join([d] + v) + "\n")
46 f.close()
46 f.close()
47 self._dirty = False
47 self._dirty = False
48 def add(self, fcl, fco, fca, fd, flags):
48 def add(self, fcl, fco, fca, fd, flags):
49 hash = util.sha1(fcl.path()).hexdigest()
49 hash = util.sha1(fcl.path()).hexdigest()
50 self._repo.opener.write("merge/" + hash, fcl.data())
50 self._repo.opener.write("merge/" + hash, fcl.data())
51 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
51 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
52 hex(fca.filenode()), fco.path(), flags]
52 hex(fca.filenode()), fco.path(), flags]
53 self._dirty = True
53 self._dirty = True
54 def __contains__(self, dfile):
54 def __contains__(self, dfile):
55 return dfile in self._state
55 return dfile in self._state
56 def __getitem__(self, dfile):
56 def __getitem__(self, dfile):
57 return self._state[dfile][0]
57 return self._state[dfile][0]
58 def __iter__(self):
58 def __iter__(self):
59 l = self._state.keys()
59 l = self._state.keys()
60 l.sort()
60 l.sort()
61 for f in l:
61 for f in l:
62 yield f
62 yield f
63 def mark(self, dfile, state):
63 def mark(self, dfile, state):
64 self._state[dfile][0] = state
64 self._state[dfile][0] = state
65 self._dirty = True
65 self._dirty = True
66 def resolve(self, dfile, wctx, octx):
66 def resolve(self, dfile, wctx, octx):
67 if self[dfile] == 'r':
67 if self[dfile] == 'r':
68 return 0
68 return 0
69 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
69 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
70 f = self._repo.opener("merge/" + hash)
70 f = self._repo.opener("merge/" + hash)
71 self._repo.wwrite(dfile, f.read(), flags)
71 self._repo.wwrite(dfile, f.read(), flags)
72 f.close()
72 f.close()
73 fcd = wctx[dfile]
73 fcd = wctx[dfile]
74 fco = octx[ofile]
74 fco = octx[ofile]
75 fca = self._repo.filectx(afile, fileid=anode)
75 fca = self._repo.filectx(afile, fileid=anode)
76 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
76 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
77 if r is None:
77 if r is None:
78 # no real conflict
78 # no real conflict
79 del self._state[dfile]
79 del self._state[dfile]
80 elif not r:
80 elif not r:
81 self.mark(dfile, 'r')
81 self.mark(dfile, 'r')
82 return r
82 return r
83
83
84 def _checkunknown(wctx, mctx):
84 def _checkunknown(wctx, mctx):
85 "check for collisions between unknown files and files in mctx"
85 "check for collisions between unknown files and files in mctx"
86 for f in wctx.unknown():
86 for f in wctx.unknown():
87 if f in mctx and mctx[f].cmp(wctx[f]):
87 if f in mctx and mctx[f].cmp(wctx[f]):
88 raise util.Abort(_("untracked file in working directory differs"
88 raise util.Abort(_("untracked file in working directory differs"
89 " from file in requested revision: '%s'") % f)
89 " from file in requested revision: '%s'") % f)
90
90
91 def _checkcollision(mctx):
91 def _checkcollision(mctx):
92 "check for case folding collisions in the destination context"
92 "check for case folding collisions in the destination context"
93 folded = {}
93 folded = {}
94 for fn in mctx:
94 for fn in mctx:
95 fold = fn.lower()
95 fold = fn.lower()
96 if fold in folded:
96 if fold in folded:
97 raise util.Abort(_("case-folding collision between %s and %s")
97 raise util.Abort(_("case-folding collision between %s and %s")
98 % (fn, folded[fold]))
98 % (fn, folded[fold]))
99 folded[fold] = fn
99 folded[fold] = fn
100
100
101 def _forgetremoved(wctx, mctx, branchmerge):
101 def _forgetremoved(wctx, mctx, branchmerge):
102 """
102 """
103 Forget removed files
103 Forget removed files
104
104
105 If we're jumping between revisions (as opposed to merging), and if
105 If we're jumping between revisions (as opposed to merging), and if
106 neither the working directory nor the target rev has the file,
106 neither the working directory nor the target rev has the file,
107 then we need to remove it from the dirstate, to prevent the
107 then we need to remove it from the dirstate, to prevent the
108 dirstate from listing the file when it is no longer in the
108 dirstate from listing the file when it is no longer in the
109 manifest.
109 manifest.
110
110
111 If we're merging, and the other revision has removed a file
111 If we're merging, and the other revision has removed a file
112 that is not present in the working directory, we need to mark it
112 that is not present in the working directory, we need to mark it
113 as removed.
113 as removed.
114 """
114 """
115
115
116 action = []
116 action = []
117 state = branchmerge and 'r' or 'f'
117 state = branchmerge and 'r' or 'f'
118 for f in wctx.deleted():
118 for f in wctx.deleted():
119 if f not in mctx:
119 if f not in mctx:
120 action.append((f, state))
120 action.append((f, state))
121
121
122 if not branchmerge:
122 if not branchmerge:
123 for f in wctx.removed():
123 for f in wctx.removed():
124 if f not in mctx:
124 if f not in mctx:
125 action.append((f, "f"))
125 action.append((f, "f"))
126
126
127 return action
127 return action
128
128
129 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
129 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
130 """
130 """
131 Merge p1 and p2 with ancestor pa and generate merge action list
131 Merge p1 and p2 with ancestor pa and generate merge action list
132
132
133 overwrite = whether we clobber working files
133 overwrite = whether we clobber working files
134 partial = function to filter file lists
134 partial = function to filter file lists
135 """
135 """
136
136
137 def fmerge(f, f2, fa):
137 def fmerge(f, f2, fa):
138 """merge flags"""
138 """merge flags"""
139 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
139 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
140 if m == n: # flags agree
140 if m == n: # flags agree
141 return m # unchanged
141 return m # unchanged
142 if m and n and not a: # flags set, don't agree, differ from parent
142 if m and n and not a: # flags set, don't agree, differ from parent
143 r = repo.ui.promptchoice(
143 r = repo.ui.promptchoice(
144 _(" conflicting flags for %s\n"
144 _(" conflicting flags for %s\n"
145 "(n)one, e(x)ec or sym(l)ink?") % f,
145 "(n)one, e(x)ec or sym(l)ink?") % f,
146 (_("&None"), _("E&xec"), _("Sym&link")), 0)
146 (_("&None"), _("E&xec"), _("Sym&link")), 0)
147 if r == 1:
147 if r == 1:
148 return "x" # Exec
148 return "x" # Exec
149 if r == 2:
149 if r == 2:
150 return "l" # Symlink
150 return "l" # Symlink
151 return ""
151 return ""
152 if m and m != a: # changed from a to m
152 if m and m != a: # changed from a to m
153 return m
153 return m
154 if n and n != a: # changed from a to n
154 if n and n != a: # changed from a to n
155 return n
155 return n
156 return '' # flag was cleared
156 return '' # flag was cleared
157
157
158 def act(msg, m, f, *args):
158 def act(msg, m, f, *args):
159 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
159 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
160 action.append((f, m) + args)
160 action.append((f, m) + args)
161
161
162 action, copy = [], {}
162 action, copy = [], {}
163
163
164 if overwrite:
164 if overwrite:
165 pa = p1
165 pa = p1
166 elif pa == p2: # backwards
166 elif pa == p2: # backwards
167 pa = p1.p1()
167 pa = p1.p1()
168 elif pa and repo.ui.configbool("merge", "followcopies", True):
168 elif pa and repo.ui.configbool("merge", "followcopies", True):
169 dirs = repo.ui.configbool("merge", "followdirs", True)
169 dirs = repo.ui.configbool("merge", "followdirs", True)
170 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
170 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
171 for of, fl in diverge.iteritems():
171 for of, fl in diverge.iteritems():
172 act("divergent renames", "dr", of, fl)
172 act("divergent renames", "dr", of, fl)
173
173
174 repo.ui.note(_("resolving manifests\n"))
174 repo.ui.note(_("resolving manifests\n"))
175 repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
175 repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
176 repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
176 repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
177
177
178 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
178 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
179 copied = set(copy.values())
179 copied = set(copy.values())
180
180
181 if '.hgsubstate' in m1:
181 if '.hgsubstate' in m1:
182 # check whether sub state is modified
182 # check whether sub state is modified
183 for s in p1.substate:
183 for s in p1.substate:
184 if p1.sub(s).dirty():
184 if p1.sub(s).dirty():
185 m1['.hgsubstate'] += "+"
185 m1['.hgsubstate'] += "+"
186 break
186 break
187
187
188 # Compare manifests
188 # Compare manifests
189 for f, n in m1.iteritems():
189 for f, n in m1.iteritems():
190 if partial and not partial(f):
190 if partial and not partial(f):
191 continue
191 continue
192 if f in m2:
192 if f in m2:
193 rflags = fmerge(f, f, f)
193 rflags = fmerge(f, f, f)
194 a = ma.get(f, nullid)
194 a = ma.get(f, nullid)
195 if n == m2[f] or m2[f] == a: # same or local newer
195 if n == m2[f] or m2[f] == a: # same or local newer
196 # is file locally modified or flags need changing?
196 # is file locally modified or flags need changing?
197 # dirstate flags may need to be made current
197 # dirstate flags may need to be made current
198 if m1.flags(f) != rflags or n[20:]:
198 if m1.flags(f) != rflags or n[20:]:
199 act("update permissions", "e", f, rflags)
199 act("update permissions", "e", f, rflags)
200 elif n == a: # remote newer
200 elif n == a: # remote newer
201 act("remote is newer", "g", f, rflags)
201 act("remote is newer", "g", f, rflags)
202 else: # both changed
202 else: # both changed
203 act("versions differ", "m", f, f, f, rflags, False)
203 act("versions differ", "m", f, f, f, rflags, False)
204 elif f in copied: # files we'll deal with on m2 side
204 elif f in copied: # files we'll deal with on m2 side
205 pass
205 pass
206 elif f in copy:
206 elif f in copy:
207 f2 = copy[f]
207 f2 = copy[f]
208 if f2 not in m2: # directory rename
208 if f2 not in m2: # directory rename
209 act("remote renamed directory to " + f2, "d",
209 act("remote renamed directory to " + f2, "d",
210 f, None, f2, m1.flags(f))
210 f, None, f2, m1.flags(f))
211 else: # case 2 A,B/B/B or case 4,21 A/B/B
211 else: # case 2 A,B/B/B or case 4,21 A/B/B
212 act("local copied/moved to " + f2, "m",
212 act("local copied/moved to " + f2, "m",
213 f, f2, f, fmerge(f, f2, f2), False)
213 f, f2, f, fmerge(f, f2, f2), False)
214 elif f in ma: # clean, a different, no remote
214 elif f in ma: # clean, a different, no remote
215 if n != ma[f]:
215 if n != ma[f]:
216 if repo.ui.promptchoice(
216 if repo.ui.promptchoice(
217 _(" local changed %s which remote deleted\n"
217 _(" local changed %s which remote deleted\n"
218 "use (c)hanged version or (d)elete?") % f,
218 "use (c)hanged version or (d)elete?") % f,
219 (_("&Changed"), _("&Delete")), 0):
219 (_("&Changed"), _("&Delete")), 0):
220 act("prompt delete", "r", f)
220 act("prompt delete", "r", f)
221 else:
221 else:
222 act("prompt keep", "a", f)
222 act("prompt keep", "a", f)
223 elif n[20:] == "a": # added, no remote
223 elif n[20:] == "a": # added, no remote
224 act("remote deleted", "f", f)
224 act("remote deleted", "f", f)
225 elif n[20:] != "u":
225 elif n[20:] != "u":
226 act("other deleted", "r", f)
226 act("other deleted", "r", f)
227
227
228 for f, n in m2.iteritems():
228 for f, n in m2.iteritems():
229 if partial and not partial(f):
229 if partial and not partial(f):
230 continue
230 continue
231 if f in m1 or f in copied: # files already visited
231 if f in m1 or f in copied: # files already visited
232 continue
232 continue
233 if f in copy:
233 if f in copy:
234 f2 = copy[f]
234 f2 = copy[f]
235 if f2 not in m1: # directory rename
235 if f2 not in m1: # directory rename
236 act("local renamed directory to " + f2, "d",
236 act("local renamed directory to " + f2, "d",
237 None, f, f2, m2.flags(f))
237 None, f, f2, m2.flags(f))
238 elif f2 in m2: # rename case 1, A/A,B/A
238 elif f2 in m2: # rename case 1, A/A,B/A
239 act("remote copied to " + f, "m",
239 act("remote copied to " + f, "m",
240 f2, f, f, fmerge(f2, f, f2), False)
240 f2, f, f, fmerge(f2, f, f2), False)
241 else: # case 3,20 A/B/A
241 else: # case 3,20 A/B/A
242 act("remote moved to " + f, "m",
242 act("remote moved to " + f, "m",
243 f2, f, f, fmerge(f2, f, f2), True)
243 f2, f, f, fmerge(f2, f, f2), True)
244 elif f not in ma:
244 elif f not in ma:
245 act("remote created", "g", f, m2.flags(f))
245 act("remote created", "g", f, m2.flags(f))
246 elif n != ma[f]:
246 elif n != ma[f]:
247 if repo.ui.promptchoice(
247 if repo.ui.promptchoice(
248 _("remote changed %s which local deleted\n"
248 _("remote changed %s which local deleted\n"
249 "use (c)hanged version or leave (d)eleted?") % f,
249 "use (c)hanged version or leave (d)eleted?") % f,
250 (_("&Changed"), _("&Deleted")), 0) == 0:
250 (_("&Changed"), _("&Deleted")), 0) == 0:
251 act("prompt recreating", "g", f, m2.flags(f))
251 act("prompt recreating", "g", f, m2.flags(f))
252
252
253 return action
253 return action
254
254
255 def actionkey(a):
255 def actionkey(a):
256 return a[1] == 'r' and -1 or 0, a
256 return a[1] == 'r' and -1 or 0, a
257
257
258 def applyupdates(repo, action, wctx, mctx, actx, overwrite):
258 def applyupdates(repo, action, wctx, mctx, actx, overwrite):
259 """apply the merge action list to the working directory
259 """apply the merge action list to the working directory
260
260
261 wctx is the working copy context
261 wctx is the working copy context
262 mctx is the context to be merged into the working copy
262 mctx is the context to be merged into the working copy
263 actx is the context of the common ancestor
263 actx is the context of the common ancestor
264
264
265 Return a tuple of counts (updated, merged, removed, unresolved) that
265 Return a tuple of counts (updated, merged, removed, unresolved) that
266 describes how many files were affected by the update.
266 describes how many files were affected by the update.
267 """
267 """
268
268
269 updated, merged, removed, unresolved = 0, 0, 0, 0
269 updated, merged, removed, unresolved = 0, 0, 0, 0
270 ms = mergestate(repo)
270 ms = mergestate(repo)
271 ms.reset(wctx.p1().node())
271 ms.reset(wctx.p1().node())
272 moves = []
272 moves = []
273 action.sort(key=actionkey)
273 action.sort(key=actionkey)
274
274
275 # prescan for merges
275 # prescan for merges
276 u = repo.ui
276 u = repo.ui
277 for a in action:
277 for a in action:
278 f, m = a[:2]
278 f, m = a[:2]
279 if m == 'm': # merge
279 if m == 'm': # merge
280 f2, fd, flags, move = a[2:]
280 f2, fd, flags, move = a[2:]
281 if f == '.hgsubstate': # merged internally
281 if f == '.hgsubstate': # merged internally
282 continue
282 continue
283 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
283 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
284 fcl = wctx[f]
284 fcl = wctx[f]
285 fco = mctx[f2]
285 fco = mctx[f2]
286 if mctx == actx: # backwards, use working dir parent as ancestor
286 if mctx == actx: # backwards, use working dir parent as ancestor
287 if fcl.parents():
287 if fcl.parents():
288 fca = fcl.p1()
288 fca = fcl.p1()
289 else:
289 else:
290 fca = repo.filectx(f, fileid=nullrev)
290 fca = repo.filectx(f, fileid=nullrev)
291 else:
291 else:
292 fca = fcl.ancestor(fco, actx)
292 fca = fcl.ancestor(fco, actx)
293 if not fca:
293 if not fca:
294 fca = repo.filectx(f, fileid=nullrev)
294 fca = repo.filectx(f, fileid=nullrev)
295 ms.add(fcl, fco, fca, fd, flags)
295 ms.add(fcl, fco, fca, fd, flags)
296 if f != fd and move:
296 if f != fd and move:
297 moves.append(f)
297 moves.append(f)
298
298
299 # remove renamed files after safely stored
299 # remove renamed files after safely stored
300 for f in moves:
300 for f in moves:
301 if os.path.lexists(repo.wjoin(f)):
301 if os.path.lexists(repo.wjoin(f)):
302 repo.ui.debug("removing %s\n" % f)
302 repo.ui.debug("removing %s\n" % f)
303 os.unlink(repo.wjoin(f))
303 os.unlink(repo.wjoin(f))
304
304
305 audit_path = scmutil.pathauditor(repo.root)
305 audit_path = scmutil.pathauditor(repo.root)
306
306
307 numupdates = len(action)
307 numupdates = len(action)
308 for i, a in enumerate(action):
308 for i, a in enumerate(action):
309 f, m = a[:2]
309 f, m = a[:2]
310 u.progress(_('updating'), i + 1, item=f, total=numupdates,
310 u.progress(_('updating'), i + 1, item=f, total=numupdates,
311 unit=_('files'))
311 unit=_('files'))
312 if f and f[0] == "/":
312 if f and f[0] == "/":
313 continue
313 continue
314 if m == "r": # remove
314 if m == "r": # remove
315 repo.ui.note(_("removing %s\n") % f)
315 repo.ui.note(_("removing %s\n") % f)
316 audit_path(f)
316 audit_path(f)
317 if f == '.hgsubstate': # subrepo states need updating
317 if f == '.hgsubstate': # subrepo states need updating
318 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
318 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
319 try:
319 try:
320 util.unlinkpath(repo.wjoin(f))
320 util.unlinkpath(repo.wjoin(f))
321 except OSError, inst:
321 except OSError, inst:
322 if inst.errno != errno.ENOENT:
322 if inst.errno != errno.ENOENT:
323 repo.ui.warn(_("update failed to remove %s: %s!\n") %
323 repo.ui.warn(_("update failed to remove %s: %s!\n") %
324 (f, inst.strerror))
324 (f, inst.strerror))
325 removed += 1
325 removed += 1
326 elif m == "m": # merge
326 elif m == "m": # merge
327 if f == '.hgsubstate': # subrepo states need updating
327 if f == '.hgsubstate': # subrepo states need updating
328 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
328 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
329 continue
329 continue
330 f2, fd, flags, move = a[2:]
330 f2, fd, flags, move = a[2:]
331 r = ms.resolve(fd, wctx, mctx)
331 r = ms.resolve(fd, wctx, mctx)
332 if r is not None and r > 0:
332 if r is not None and r > 0:
333 unresolved += 1
333 unresolved += 1
334 else:
334 else:
335 if r is None:
335 if r is None:
336 updated += 1
336 updated += 1
337 else:
337 else:
338 merged += 1
338 merged += 1
339 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
339 util.setflags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
340 if (move and repo.dirstate.normalize(fd) != f
340 if (move and repo.dirstate.normalize(fd) != f
341 and os.path.lexists(repo.wjoin(f))):
341 and os.path.lexists(repo.wjoin(f))):
342 repo.ui.debug("removing %s\n" % f)
342 repo.ui.debug("removing %s\n" % f)
343 os.unlink(repo.wjoin(f))
343 os.unlink(repo.wjoin(f))
344 elif m == "g": # get
344 elif m == "g": # get
345 flags = a[2]
345 flags = a[2]
346 repo.ui.note(_("getting %s\n") % f)
346 repo.ui.note(_("getting %s\n") % f)
347 t = mctx.filectx(f).data()
347 t = mctx.filectx(f).data()
348 repo.wwrite(f, t, flags)
348 repo.wwrite(f, t, flags)
349 t = None
349 t = None
350 updated += 1
350 updated += 1
351 if f == '.hgsubstate': # subrepo states need updating
351 if f == '.hgsubstate': # subrepo states need updating
352 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
352 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
353 elif m == "d": # directory rename
353 elif m == "d": # directory rename
354 f2, fd, flags = a[2:]
354 f2, fd, flags = a[2:]
355 if f:
355 if f:
356 repo.ui.note(_("moving %s to %s\n") % (f, fd))
356 repo.ui.note(_("moving %s to %s\n") % (f, fd))
357 t = wctx.filectx(f).data()
357 t = wctx.filectx(f).data()
358 repo.wwrite(fd, t, flags)
358 repo.wwrite(fd, t, flags)
359 util.unlinkpath(repo.wjoin(f))
359 util.unlinkpath(repo.wjoin(f))
360 if f2:
360 if f2:
361 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
361 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
362 t = mctx.filectx(f2).data()
362 t = mctx.filectx(f2).data()
363 repo.wwrite(fd, t, flags)
363 repo.wwrite(fd, t, flags)
364 updated += 1
364 updated += 1
365 elif m == "dr": # divergent renames
365 elif m == "dr": # divergent renames
366 fl = a[2]
366 fl = a[2]
367 repo.ui.warn(_("note: possible conflict - %s was renamed "
367 repo.ui.warn(_("note: possible conflict - %s was renamed "
368 "multiple times to:\n") % f)
368 "multiple times to:\n") % f)
369 for nf in fl:
369 for nf in fl:
370 repo.ui.warn(" %s\n" % nf)
370 repo.ui.warn(" %s\n" % nf)
371 elif m == "e": # exec
371 elif m == "e": # exec
372 flags = a[2]
372 flags = a[2]
373 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
373 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
374 ms.commit()
374 ms.commit()
375 u.progress(_('updating'), None, total=numupdates, unit=_('files'))
375 u.progress(_('updating'), None, total=numupdates, unit=_('files'))
376
376
377 return updated, merged, removed, unresolved
377 return updated, merged, removed, unresolved
378
378
379 def recordupdates(repo, action, branchmerge):
379 def recordupdates(repo, action, branchmerge):
380 "record merge actions to the dirstate"
380 "record merge actions to the dirstate"
381
381
382 for a in action:
382 for a in action:
383 f, m = a[:2]
383 f, m = a[:2]
384 if m == "r": # remove
384 if m == "r": # remove
385 if branchmerge:
385 if branchmerge:
386 repo.dirstate.remove(f)
386 repo.dirstate.remove(f)
387 else:
387 else:
388 repo.dirstate.forget(f)
388 repo.dirstate.forget(f)
389 elif m == "a": # re-add
389 elif m == "a": # re-add
390 if not branchmerge:
390 if not branchmerge:
391 repo.dirstate.add(f)
391 repo.dirstate.add(f)
392 elif m == "f": # forget
392 elif m == "f": # forget
393 repo.dirstate.forget(f)
393 repo.dirstate.forget(f)
394 elif m == "e": # exec change
394 elif m == "e": # exec change
395 repo.dirstate.normallookup(f)
395 repo.dirstate.normallookup(f)
396 elif m == "g": # get
396 elif m == "g": # get
397 if branchmerge:
397 if branchmerge:
398 repo.dirstate.otherparent(f)
398 repo.dirstate.otherparent(f)
399 else:
399 else:
400 repo.dirstate.normal(f)
400 repo.dirstate.normal(f)
401 elif m == "m": # merge
401 elif m == "m": # merge
402 f2, fd, flag, move = a[2:]
402 f2, fd, flag, move = a[2:]
403 if branchmerge:
403 if branchmerge:
404 # We've done a branch merge, mark this file as merged
404 # We've done a branch merge, mark this file as merged
405 # so that we properly record the merger later
405 # so that we properly record the merger later
406 repo.dirstate.merge(fd)
406 repo.dirstate.merge(fd)
407 if f != f2: # copy/rename
407 if f != f2: # copy/rename
408 if move:
408 if move:
409 repo.dirstate.remove(f)
409 repo.dirstate.remove(f)
410 if f != fd:
410 if f != fd:
411 repo.dirstate.copy(f, fd)
411 repo.dirstate.copy(f, fd)
412 else:
412 else:
413 repo.dirstate.copy(f2, fd)
413 repo.dirstate.copy(f2, fd)
414 else:
414 else:
415 # We've update-merged a locally modified file, so
415 # We've update-merged a locally modified file, so
416 # we set the dirstate to emulate a normal checkout
416 # we set the dirstate to emulate a normal checkout
417 # of that file some time in the past. Thus our
417 # of that file some time in the past. Thus our
418 # merge will appear as a normal local file
418 # merge will appear as a normal local file
419 # modification.
419 # modification.
420 if f2 == fd: # file not locally copied/moved
420 if f2 == fd: # file not locally copied/moved
421 repo.dirstate.normallookup(fd)
421 repo.dirstate.normallookup(fd)
422 if move:
422 if move:
423 repo.dirstate.forget(f)
423 repo.dirstate.forget(f)
424 elif m == "d": # directory rename
424 elif m == "d": # directory rename
425 f2, fd, flag = a[2:]
425 f2, fd, flag = a[2:]
426 if not f2 and f not in repo.dirstate:
426 if not f2 and f not in repo.dirstate:
427 # untracked file moved
427 # untracked file moved
428 continue
428 continue
429 if branchmerge:
429 if branchmerge:
430 repo.dirstate.add(fd)
430 repo.dirstate.add(fd)
431 if f:
431 if f:
432 repo.dirstate.remove(f)
432 repo.dirstate.remove(f)
433 repo.dirstate.copy(f, fd)
433 repo.dirstate.copy(f, fd)
434 if f2:
434 if f2:
435 repo.dirstate.copy(f2, fd)
435 repo.dirstate.copy(f2, fd)
436 else:
436 else:
437 repo.dirstate.normal(fd)
437 repo.dirstate.normal(fd)
438 if f:
438 if f:
439 repo.dirstate.forget(f)
439 repo.dirstate.forget(f)
440
440
441 def update(repo, node, branchmerge, force, partial, ancestor=None):
441 def update(repo, node, branchmerge, force, partial, ancestor=None):
442 """
442 """
443 Perform a merge between the working directory and the given node
443 Perform a merge between the working directory and the given node
444
444
445 node = the node to update to, or None if unspecified
445 node = the node to update to, or None if unspecified
446 branchmerge = whether to merge between branches
446 branchmerge = whether to merge between branches
447 force = whether to force branch merging or file overwriting
447 force = whether to force branch merging or file overwriting
448 partial = a function to filter file lists (dirstate not updated)
448 partial = a function to filter file lists (dirstate not updated)
449
449
450 The table below shows all the behaviors of the update command
450 The table below shows all the behaviors of the update command
451 given the -c and -C or no options, whether the working directory
451 given the -c and -C or no options, whether the working directory
452 is dirty, whether a revision is specified, and the relationship of
452 is dirty, whether a revision is specified, and the relationship of
453 the parent rev to the target rev (linear, on the same named
453 the parent rev to the target rev (linear, on the same named
454 branch, or on another named branch).
454 branch, or on another named branch).
455
455
456 This logic is tested by test-update-branches.t.
456 This logic is tested by test-update-branches.t.
457
457
458 -c -C dirty rev | linear same cross
458 -c -C dirty rev | linear same cross
459 n n n n | ok (1) x
459 n n n n | ok (1) x
460 n n n y | ok ok ok
460 n n n y | ok ok ok
461 n n y * | merge (2) (2)
461 n n y * | merge (2) (2)
462 n y * * | --- discard ---
462 n y * * | --- discard ---
463 y n y * | --- (3) ---
463 y n y * | --- (3) ---
464 y n n * | --- ok ---
464 y n n * | --- ok ---
465 y y * * | --- (4) ---
465 y y * * | --- (4) ---
466
466
467 x = can't happen
467 x = can't happen
468 * = don't-care
468 * = don't-care
469 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
469 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
470 2 = abort: crosses branches (use 'hg merge' to merge or
470 2 = abort: crosses branches (use 'hg merge' to merge or
471 use 'hg update -C' to discard changes)
471 use 'hg update -C' to discard changes)
472 3 = abort: uncommitted local changes
472 3 = abort: uncommitted local changes
473 4 = incompatible options (checked in commands.py)
473 4 = incompatible options (checked in commands.py)
474
474
475 Return the same tuple as applyupdates().
475 Return the same tuple as applyupdates().
476 """
476 """
477
477
478 onode = node
478 onode = node
479 wlock = repo.wlock()
479 wlock = repo.wlock()
480 try:
480 try:
481 wc = repo[None]
481 wc = repo[None]
482 if node is None:
482 if node is None:
483 # tip of current branch
483 # tip of current branch
484 try:
484 try:
485 node = repo.branchtags()[wc.branch()]
485 node = repo.branchtags()[wc.branch()]
486 except KeyError:
486 except KeyError:
487 if wc.branch() == "default": # no default branch!
487 if wc.branch() == "default": # no default branch!
488 node = repo.lookup("tip") # update to tip
488 node = repo.lookup("tip") # update to tip
489 else:
489 else:
490 raise util.Abort(_("branch %s not found") % wc.branch())
490 raise util.Abort(_("branch %s not found") % wc.branch())
491 overwrite = force and not branchmerge
491 overwrite = force and not branchmerge
492 pl = wc.parents()
492 pl = wc.parents()
493 p1, p2 = pl[0], repo[node]
493 p1, p2 = pl[0], repo[node]
494 if ancestor:
494 if ancestor:
495 pa = repo[ancestor]
495 pa = repo[ancestor]
496 else:
496 else:
497 pa = p1.ancestor(p2)
497 pa = p1.ancestor(p2)
498
498
499 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
499 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
500
500
501 ### check phase
501 ### check phase
502 if not overwrite and len(pl) > 1:
502 if not overwrite and len(pl) > 1:
503 raise util.Abort(_("outstanding uncommitted merges"))
503 raise util.Abort(_("outstanding uncommitted merges"))
504 if branchmerge:
504 if branchmerge:
505 if pa == p2:
505 if pa == p2:
506 raise util.Abort(_("merging with a working directory ancestor"
506 raise util.Abort(_("merging with a working directory ancestor"
507 " has no effect"))
507 " has no effect"))
508 elif pa == p1:
508 elif pa == p1:
509 if p1.branch() == p2.branch():
509 if p1.branch() == p2.branch():
510 raise util.Abort(_("nothing to merge (use 'hg update'"
510 raise util.Abort(_("nothing to merge (use 'hg update'"
511 " or check 'hg heads')"))
511 " or check 'hg heads')"))
512 if not force and (wc.files() or wc.deleted()):
512 if not force and (wc.files() or wc.deleted()):
513 raise util.Abort(_("outstanding uncommitted changes "
513 raise util.Abort(_("outstanding uncommitted changes "
514 "(use 'hg status' to list changes)"))
514 "(use 'hg status' to list changes)"))
515 for s in wc.substate:
515 for s in wc.substate:
516 if wc.sub(s).dirty():
516 if wc.sub(s).dirty():
517 raise util.Abort(_("outstanding uncommitted changes in "
517 raise util.Abort(_("outstanding uncommitted changes in "
518 "subrepository '%s'") % s)
518 "subrepository '%s'") % s)
519
519
520 elif not overwrite:
520 elif not overwrite:
521 if pa == p1 or pa == p2: # linear
521 if pa == p1 or pa == p2: # linear
522 pass # all good
522 pass # all good
523 elif wc.files() or wc.deleted():
523 elif wc.files() or wc.deleted():
524 raise util.Abort(_("crosses branches (merge branches or use"
524 raise util.Abort(_("crosses branches (merge branches or use"
525 " --clean to discard changes)"))
525 " --clean to discard changes)"))
526 elif onode is None:
526 elif onode is None:
527 raise util.Abort(_("crosses branches (merge branches or use"
527 raise util.Abort(_("crosses branches (merge branches or use"
528 " --check to force update)"))
528 " --check to force update)"))
529 else:
529 else:
530 # Allow jumping branches if clean and specific rev given
530 # Allow jumping branches if clean and specific rev given
531 overwrite = True
531 overwrite = True
532
532
533 ### calculate phase
533 ### calculate phase
534 action = []
534 action = []
535 wc.status(unknown=True) # prime cache
535 wc.status(unknown=True) # prime cache
536 if not force:
536 if not force:
537 _checkunknown(wc, p2)
537 _checkunknown(wc, p2)
538 if not util.checkcase(repo.path):
538 if not util.checkcase(repo.path):
539 _checkcollision(p2)
539 _checkcollision(p2)
540 action += _forgetremoved(wc, p2, branchmerge)
540 action += _forgetremoved(wc, p2, branchmerge)
541 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
541 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
542
542
543 ### apply phase
543 ### apply phase
544 if not branchmerge: # just jump to the new rev
544 if not branchmerge: # just jump to the new rev
545 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
545 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
546 if not partial:
546 if not partial:
547 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
547 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
548
548
549 stats = applyupdates(repo, action, wc, p2, pa, overwrite)
549 stats = applyupdates(repo, action, wc, p2, pa, overwrite)
550
550
551 if not partial:
551 if not partial:
552 repo.dirstate.setparents(fp1, fp2)
552 repo.dirstate.setparents(fp1, fp2)
553 recordupdates(repo, action, branchmerge)
553 recordupdates(repo, action, branchmerge)
554 if not branchmerge:
554 if not branchmerge:
555 repo.dirstate.setbranch(p2.branch())
555 repo.dirstate.setbranch(p2.branch())
556 finally:
556 finally:
557 wlock.release()
557 wlock.release()
558
558
559 if not partial:
559 if not partial:
560 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
560 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
561 return stats
561 return stats
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now