##// END OF EJS Templates
rebase: move updatedirstate into cmdutil so it can be shared
Matt Mackall -
r15214:231aac52 default
parent child Browse files
Show More
@@ -1,621 +1,607 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 http://mercurial.selenic.com/wiki/RebaseExtension
14 http://mercurial.selenic.com/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
18 from mercurial import extensions, copies, patch
18 from mercurial import extensions, patch
19 from mercurial.commands import templateopts
19 from mercurial.commands import templateopts
20 from mercurial.node import nullrev
20 from mercurial.node import nullrev
21 from mercurial.lock import release
21 from mercurial.lock import release
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 import os, errno
23 import os, errno
24
24
25 nullmerge = -2
25 nullmerge = -2
26
26
27 cmdtable = {}
27 cmdtable = {}
28 command = cmdutil.command(cmdtable)
28 command = cmdutil.command(cmdtable)
29
29
30 @command('rebase',
30 @command('rebase',
31 [('s', 'source', '',
31 [('s', 'source', '',
32 _('rebase from the specified changeset'), _('REV')),
32 _('rebase from the specified changeset'), _('REV')),
33 ('b', 'base', '',
33 ('b', 'base', '',
34 _('rebase from the base of the specified changeset '
34 _('rebase from the base of the specified changeset '
35 '(up to greatest common ancestor of base and dest)'),
35 '(up to greatest common ancestor of base and dest)'),
36 _('REV')),
36 _('REV')),
37 ('d', 'dest', '',
37 ('d', 'dest', '',
38 _('rebase onto the specified changeset'), _('REV')),
38 _('rebase onto the specified changeset'), _('REV')),
39 ('', 'collapse', False, _('collapse the rebased changesets')),
39 ('', 'collapse', False, _('collapse the rebased changesets')),
40 ('m', 'message', '',
40 ('m', 'message', '',
41 _('use text as collapse commit message'), _('TEXT')),
41 _('use text as collapse commit message'), _('TEXT')),
42 ('l', 'logfile', '',
42 ('l', 'logfile', '',
43 _('read collapse commit message from file'), _('FILE')),
43 _('read collapse commit message from file'), _('FILE')),
44 ('', 'keep', False, _('keep original changesets')),
44 ('', 'keep', False, _('keep original changesets')),
45 ('', 'keepbranches', False, _('keep original branch names')),
45 ('', 'keepbranches', False, _('keep original branch names')),
46 ('', 'detach', False, _('force detaching of source from its original '
46 ('', 'detach', False, _('force detaching of source from its original '
47 'branch')),
47 'branch')),
48 ('t', 'tool', '', _('specify merge tool')),
48 ('t', 'tool', '', _('specify merge tool')),
49 ('c', 'continue', False, _('continue an interrupted rebase')),
49 ('c', 'continue', False, _('continue an interrupted rebase')),
50 ('a', 'abort', False, _('abort an interrupted rebase'))] +
50 ('a', 'abort', False, _('abort an interrupted rebase'))] +
51 templateopts,
51 templateopts,
52 _('hg rebase [-s REV | -b REV] [-d REV] [options]\n'
52 _('hg rebase [-s REV | -b REV] [-d REV] [options]\n'
53 'hg rebase {-a|-c}'))
53 'hg rebase {-a|-c}'))
54 def rebase(ui, repo, **opts):
54 def rebase(ui, repo, **opts):
55 """move changeset (and descendants) to a different branch
55 """move changeset (and descendants) to a different branch
56
56
57 Rebase uses repeated merging to graft changesets from one part of
57 Rebase uses repeated merging to graft changesets from one part of
58 history (the source) onto another (the destination). This can be
58 history (the source) onto another (the destination). This can be
59 useful for linearizing *local* changes relative to a master
59 useful for linearizing *local* changes relative to a master
60 development tree.
60 development tree.
61
61
62 You should not rebase changesets that have already been shared
62 You should not rebase changesets that have already been shared
63 with others. Doing so will force everybody else to perform the
63 with others. Doing so will force everybody else to perform the
64 same rebase or they will end up with duplicated changesets after
64 same rebase or they will end up with duplicated changesets after
65 pulling in your rebased changesets.
65 pulling in your rebased changesets.
66
66
67 If you don't specify a destination changeset (``-d/--dest``),
67 If you don't specify a destination changeset (``-d/--dest``),
68 rebase uses the tipmost head of the current named branch as the
68 rebase uses the tipmost head of the current named branch as the
69 destination. (The destination changeset is not modified by
69 destination. (The destination changeset is not modified by
70 rebasing, but new changesets are added as its descendants.)
70 rebasing, but new changesets are added as its descendants.)
71
71
72 You can specify which changesets to rebase in two ways: as a
72 You can specify which changesets to rebase in two ways: as a
73 "source" changeset or as a "base" changeset. Both are shorthand
73 "source" changeset or as a "base" changeset. Both are shorthand
74 for a topologically related set of changesets (the "source
74 for a topologically related set of changesets (the "source
75 branch"). If you specify source (``-s/--source``), rebase will
75 branch"). If you specify source (``-s/--source``), rebase will
76 rebase that changeset and all of its descendants onto dest. If you
76 rebase that changeset and all of its descendants onto dest. If you
77 specify base (``-b/--base``), rebase will select ancestors of base
77 specify base (``-b/--base``), rebase will select ancestors of base
78 back to but not including the common ancestor with dest. Thus,
78 back to but not including the common ancestor with dest. Thus,
79 ``-b`` is less precise but more convenient than ``-s``: you can
79 ``-b`` is less precise but more convenient than ``-s``: you can
80 specify any changeset in the source branch, and rebase will select
80 specify any changeset in the source branch, and rebase will select
81 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
81 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
82 uses the parent of the working directory as the base.
82 uses the parent of the working directory as the base.
83
83
84 By default, rebase recreates the changesets in the source branch
84 By default, rebase recreates the changesets in the source branch
85 as descendants of dest and then destroys the originals. Use
85 as descendants of dest and then destroys the originals. Use
86 ``--keep`` to preserve the original source changesets. Some
86 ``--keep`` to preserve the original source changesets. Some
87 changesets in the source branch (e.g. merges from the destination
87 changesets in the source branch (e.g. merges from the destination
88 branch) may be dropped if they no longer contribute any change.
88 branch) may be dropped if they no longer contribute any change.
89
89
90 One result of the rules for selecting the destination changeset
90 One result of the rules for selecting the destination changeset
91 and source branch is that, unlike ``merge``, rebase will do
91 and source branch is that, unlike ``merge``, rebase will do
92 nothing if you are at the latest (tipmost) head of a named branch
92 nothing if you are at the latest (tipmost) head of a named branch
93 with two heads. You need to explicitly specify source and/or
93 with two heads. You need to explicitly specify source and/or
94 destination (or ``update`` to the other head, if it's the head of
94 destination (or ``update`` to the other head, if it's the head of
95 the intended source branch).
95 the intended source branch).
96
96
97 If a rebase is interrupted to manually resolve a merge, it can be
97 If a rebase is interrupted to manually resolve a merge, it can be
98 continued with --continue/-c or aborted with --abort/-a.
98 continued with --continue/-c or aborted with --abort/-a.
99
99
100 Returns 0 on success, 1 if nothing to rebase.
100 Returns 0 on success, 1 if nothing to rebase.
101 """
101 """
102 originalwd = target = None
102 originalwd = target = None
103 external = nullrev
103 external = nullrev
104 state = {}
104 state = {}
105 skipped = set()
105 skipped = set()
106 targetancestors = set()
106 targetancestors = set()
107
107
108 lock = wlock = None
108 lock = wlock = None
109 try:
109 try:
110 lock = repo.lock()
110 lock = repo.lock()
111 wlock = repo.wlock()
111 wlock = repo.wlock()
112
112
113 # Validate input and define rebasing points
113 # Validate input and define rebasing points
114 destf = opts.get('dest', None)
114 destf = opts.get('dest', None)
115 srcf = opts.get('source', None)
115 srcf = opts.get('source', None)
116 basef = opts.get('base', None)
116 basef = opts.get('base', None)
117 contf = opts.get('continue')
117 contf = opts.get('continue')
118 abortf = opts.get('abort')
118 abortf = opts.get('abort')
119 collapsef = opts.get('collapse', False)
119 collapsef = opts.get('collapse', False)
120 collapsemsg = cmdutil.logmessage(ui, opts)
120 collapsemsg = cmdutil.logmessage(ui, opts)
121 extrafn = opts.get('extrafn') # internal, used by e.g. hgsubversion
121 extrafn = opts.get('extrafn') # internal, used by e.g. hgsubversion
122 keepf = opts.get('keep', False)
122 keepf = opts.get('keep', False)
123 keepbranchesf = opts.get('keepbranches', False)
123 keepbranchesf = opts.get('keepbranches', False)
124 detachf = opts.get('detach', False)
124 detachf = opts.get('detach', False)
125 # keepopen is not meant for use on the command line, but by
125 # keepopen is not meant for use on the command line, but by
126 # other extensions
126 # other extensions
127 keepopen = opts.get('keepopen', False)
127 keepopen = opts.get('keepopen', False)
128
128
129 if collapsemsg and not collapsef:
129 if collapsemsg and not collapsef:
130 raise util.Abort(
130 raise util.Abort(
131 _('message can only be specified with collapse'))
131 _('message can only be specified with collapse'))
132
132
133 if contf or abortf:
133 if contf or abortf:
134 if contf and abortf:
134 if contf and abortf:
135 raise util.Abort(_('cannot use both abort and continue'))
135 raise util.Abort(_('cannot use both abort and continue'))
136 if collapsef:
136 if collapsef:
137 raise util.Abort(
137 raise util.Abort(
138 _('cannot use collapse with continue or abort'))
138 _('cannot use collapse with continue or abort'))
139 if detachf:
139 if detachf:
140 raise util.Abort(_('cannot use detach with continue or abort'))
140 raise util.Abort(_('cannot use detach with continue or abort'))
141 if srcf or basef or destf:
141 if srcf or basef or destf:
142 raise util.Abort(
142 raise util.Abort(
143 _('abort and continue do not allow specifying revisions'))
143 _('abort and continue do not allow specifying revisions'))
144 if opts.get('tool', False):
144 if opts.get('tool', False):
145 ui.warn(_('tool option will be ignored\n'))
145 ui.warn(_('tool option will be ignored\n'))
146
146
147 (originalwd, target, state, skipped, collapsef, keepf,
147 (originalwd, target, state, skipped, collapsef, keepf,
148 keepbranchesf, external) = restorestatus(repo)
148 keepbranchesf, external) = restorestatus(repo)
149 if abortf:
149 if abortf:
150 return abort(repo, originalwd, target, state)
150 return abort(repo, originalwd, target, state)
151 else:
151 else:
152 if srcf and basef:
152 if srcf and basef:
153 raise util.Abort(_('cannot specify both a '
153 raise util.Abort(_('cannot specify both a '
154 'revision and a base'))
154 'revision and a base'))
155 if detachf:
155 if detachf:
156 if not srcf:
156 if not srcf:
157 raise util.Abort(
157 raise util.Abort(
158 _('detach requires a revision to be specified'))
158 _('detach requires a revision to be specified'))
159 if basef:
159 if basef:
160 raise util.Abort(_('cannot specify a base with detach'))
160 raise util.Abort(_('cannot specify a base with detach'))
161
161
162 cmdutil.bailifchanged(repo)
162 cmdutil.bailifchanged(repo)
163 result = buildstate(repo, destf, srcf, basef, detachf)
163 result = buildstate(repo, destf, srcf, basef, detachf)
164 if not result:
164 if not result:
165 # Empty state built, nothing to rebase
165 # Empty state built, nothing to rebase
166 ui.status(_('nothing to rebase\n'))
166 ui.status(_('nothing to rebase\n'))
167 return 1
167 return 1
168 else:
168 else:
169 originalwd, target, state = result
169 originalwd, target, state = result
170 if collapsef:
170 if collapsef:
171 targetancestors = set(repo.changelog.ancestors(target))
171 targetancestors = set(repo.changelog.ancestors(target))
172 external = checkexternal(repo, state, targetancestors)
172 external = checkexternal(repo, state, targetancestors)
173
173
174 if keepbranchesf:
174 if keepbranchesf:
175 assert not extrafn, 'cannot use both keepbranches and extrafn'
175 assert not extrafn, 'cannot use both keepbranches and extrafn'
176 def extrafn(ctx, extra):
176 def extrafn(ctx, extra):
177 extra['branch'] = ctx.branch()
177 extra['branch'] = ctx.branch()
178 if collapsef:
178 if collapsef:
179 branches = set()
179 branches = set()
180 for rev in state:
180 for rev in state:
181 branches.add(repo[rev].branch())
181 branches.add(repo[rev].branch())
182 if len(branches) > 1:
182 if len(branches) > 1:
183 raise util.Abort(_('cannot collapse multiple named '
183 raise util.Abort(_('cannot collapse multiple named '
184 'branches'))
184 'branches'))
185
185
186
186
187 # Rebase
187 # Rebase
188 if not targetancestors:
188 if not targetancestors:
189 targetancestors = set(repo.changelog.ancestors(target))
189 targetancestors = set(repo.changelog.ancestors(target))
190 targetancestors.add(target)
190 targetancestors.add(target)
191
191
192 # Keep track of the current bookmarks in order to reset them later
192 # Keep track of the current bookmarks in order to reset them later
193 currentbookmarks = repo._bookmarks.copy()
193 currentbookmarks = repo._bookmarks.copy()
194
194
195 sortedstate = sorted(state)
195 sortedstate = sorted(state)
196 total = len(sortedstate)
196 total = len(sortedstate)
197 pos = 0
197 pos = 0
198 for rev in sortedstate:
198 for rev in sortedstate:
199 pos += 1
199 pos += 1
200 if state[rev] == -1:
200 if state[rev] == -1:
201 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, repo[rev])),
201 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, repo[rev])),
202 _('changesets'), total)
202 _('changesets'), total)
203 storestatus(repo, originalwd, target, state, collapsef, keepf,
203 storestatus(repo, originalwd, target, state, collapsef, keepf,
204 keepbranchesf, external)
204 keepbranchesf, external)
205 p1, p2 = defineparents(repo, rev, target, state,
205 p1, p2 = defineparents(repo, rev, target, state,
206 targetancestors)
206 targetancestors)
207 if len(repo.parents()) == 2:
207 if len(repo.parents()) == 2:
208 repo.ui.debug('resuming interrupted rebase\n')
208 repo.ui.debug('resuming interrupted rebase\n')
209 else:
209 else:
210 try:
210 try:
211 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
211 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
212 stats = rebasenode(repo, rev, p1, state)
212 stats = rebasenode(repo, rev, p1, state)
213 if stats and stats[3] > 0:
213 if stats and stats[3] > 0:
214 raise util.Abort(_('unresolved conflicts (see hg '
214 raise util.Abort(_('unresolved conflicts (see hg '
215 'resolve, then hg rebase --continue)'))
215 'resolve, then hg rebase --continue)'))
216 finally:
216 finally:
217 ui.setconfig('ui', 'forcemerge', '')
217 ui.setconfig('ui', 'forcemerge', '')
218 updatedirstate(repo, rev, target, p2)
218 cmdutil.duplicatecopies(repo, rev, target, p2)
219 if not collapsef:
219 if not collapsef:
220 newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn)
220 newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn)
221 else:
221 else:
222 # Skip commit if we are collapsing
222 # Skip commit if we are collapsing
223 repo.dirstate.setparents(repo[p1].node())
223 repo.dirstate.setparents(repo[p1].node())
224 newrev = None
224 newrev = None
225 # Update the state
225 # Update the state
226 if newrev is not None:
226 if newrev is not None:
227 state[rev] = repo[newrev].rev()
227 state[rev] = repo[newrev].rev()
228 else:
228 else:
229 if not collapsef:
229 if not collapsef:
230 ui.note(_('no changes, revision %d skipped\n') % rev)
230 ui.note(_('no changes, revision %d skipped\n') % rev)
231 ui.debug('next revision set to %s\n' % p1)
231 ui.debug('next revision set to %s\n' % p1)
232 skipped.add(rev)
232 skipped.add(rev)
233 state[rev] = p1
233 state[rev] = p1
234
234
235 ui.progress(_('rebasing'), None)
235 ui.progress(_('rebasing'), None)
236 ui.note(_('rebase merging completed\n'))
236 ui.note(_('rebase merging completed\n'))
237
237
238 if collapsef and not keepopen:
238 if collapsef and not keepopen:
239 p1, p2 = defineparents(repo, min(state), target,
239 p1, p2 = defineparents(repo, min(state), target,
240 state, targetancestors)
240 state, targetancestors)
241 if collapsemsg:
241 if collapsemsg:
242 commitmsg = collapsemsg
242 commitmsg = collapsemsg
243 else:
243 else:
244 commitmsg = 'Collapsed revision'
244 commitmsg = 'Collapsed revision'
245 for rebased in state:
245 for rebased in state:
246 if rebased not in skipped and state[rebased] != nullmerge:
246 if rebased not in skipped and state[rebased] != nullmerge:
247 commitmsg += '\n* %s' % repo[rebased].description()
247 commitmsg += '\n* %s' % repo[rebased].description()
248 commitmsg = ui.edit(commitmsg, repo.ui.username())
248 commitmsg = ui.edit(commitmsg, repo.ui.username())
249 newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
249 newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
250 extrafn=extrafn)
250 extrafn=extrafn)
251
251
252 if 'qtip' in repo.tags():
252 if 'qtip' in repo.tags():
253 updatemq(repo, state, skipped, **opts)
253 updatemq(repo, state, skipped, **opts)
254
254
255 if currentbookmarks:
255 if currentbookmarks:
256 # Nodeids are needed to reset bookmarks
256 # Nodeids are needed to reset bookmarks
257 nstate = {}
257 nstate = {}
258 for k, v in state.iteritems():
258 for k, v in state.iteritems():
259 if v != nullmerge:
259 if v != nullmerge:
260 nstate[repo[k].node()] = repo[v].node()
260 nstate[repo[k].node()] = repo[v].node()
261
261
262 if not keepf:
262 if not keepf:
263 # Remove no more useful revisions
263 # Remove no more useful revisions
264 rebased = [rev for rev in state if state[rev] != nullmerge]
264 rebased = [rev for rev in state if state[rev] != nullmerge]
265 if rebased:
265 if rebased:
266 if set(repo.changelog.descendants(min(rebased))) - set(state):
266 if set(repo.changelog.descendants(min(rebased))) - set(state):
267 ui.warn(_("warning: new changesets detected "
267 ui.warn(_("warning: new changesets detected "
268 "on source branch, not stripping\n"))
268 "on source branch, not stripping\n"))
269 else:
269 else:
270 # backup the old csets by default
270 # backup the old csets by default
271 repair.strip(ui, repo, repo[min(rebased)].node(), "all")
271 repair.strip(ui, repo, repo[min(rebased)].node(), "all")
272
272
273 if currentbookmarks:
273 if currentbookmarks:
274 updatebookmarks(repo, nstate, currentbookmarks, **opts)
274 updatebookmarks(repo, nstate, currentbookmarks, **opts)
275
275
276 clearstatus(repo)
276 clearstatus(repo)
277 ui.note(_("rebase completed\n"))
277 ui.note(_("rebase completed\n"))
278 if os.path.exists(repo.sjoin('undo')):
278 if os.path.exists(repo.sjoin('undo')):
279 util.unlinkpath(repo.sjoin('undo'))
279 util.unlinkpath(repo.sjoin('undo'))
280 if skipped:
280 if skipped:
281 ui.note(_("%d revisions have been skipped\n") % len(skipped))
281 ui.note(_("%d revisions have been skipped\n") % len(skipped))
282 finally:
282 finally:
283 release(lock, wlock)
283 release(lock, wlock)
284
284
285 def checkexternal(repo, state, targetancestors):
285 def checkexternal(repo, state, targetancestors):
286 """Check whether one or more external revisions need to be taken in
286 """Check whether one or more external revisions need to be taken in
287 consideration. In the latter case, abort.
287 consideration. In the latter case, abort.
288 """
288 """
289 external = nullrev
289 external = nullrev
290 source = min(state)
290 source = min(state)
291 for rev in state:
291 for rev in state:
292 if rev == source:
292 if rev == source:
293 continue
293 continue
294 # Check externals and fail if there are more than one
294 # Check externals and fail if there are more than one
295 for p in repo[rev].parents():
295 for p in repo[rev].parents():
296 if (p.rev() not in state
296 if (p.rev() not in state
297 and p.rev() not in targetancestors):
297 and p.rev() not in targetancestors):
298 if external != nullrev:
298 if external != nullrev:
299 raise util.Abort(_('unable to collapse, there is more '
299 raise util.Abort(_('unable to collapse, there is more '
300 'than one external parent'))
300 'than one external parent'))
301 external = p.rev()
301 external = p.rev()
302 return external
302 return external
303
303
304 def updatedirstate(repo, rev, p1, p2):
305 """Keep track of renamed files in the revision that is going to be rebased
306 """
307 # Here we simulate the copies and renames in the source changeset
308 cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True)
309 m1 = repo[rev].manifest()
310 m2 = repo[p1].manifest()
311 for k, v in cop.iteritems():
312 if k in m1:
313 if v in m1 or v in m2:
314 repo.dirstate.copy(v, k)
315 if v in m2 and v not in m1 and k in m2:
316 repo.dirstate.remove(v)
317
318 def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None):
304 def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None):
319 'Commit the changes and store useful information in extra'
305 'Commit the changes and store useful information in extra'
320 try:
306 try:
321 repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
307 repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
322 ctx = repo[rev]
308 ctx = repo[rev]
323 if commitmsg is None:
309 if commitmsg is None:
324 commitmsg = ctx.description()
310 commitmsg = ctx.description()
325 extra = {'rebase_source': ctx.hex()}
311 extra = {'rebase_source': ctx.hex()}
326 if extrafn:
312 if extrafn:
327 extrafn(ctx, extra)
313 extrafn(ctx, extra)
328 # Commit might fail if unresolved files exist
314 # Commit might fail if unresolved files exist
329 newrev = repo.commit(text=commitmsg, user=ctx.user(),
315 newrev = repo.commit(text=commitmsg, user=ctx.user(),
330 date=ctx.date(), extra=extra)
316 date=ctx.date(), extra=extra)
331 repo.dirstate.setbranch(repo[newrev].branch())
317 repo.dirstate.setbranch(repo[newrev].branch())
332 return newrev
318 return newrev
333 except util.Abort:
319 except util.Abort:
334 # Invalidate the previous setparents
320 # Invalidate the previous setparents
335 repo.dirstate.invalidate()
321 repo.dirstate.invalidate()
336 raise
322 raise
337
323
338 def rebasenode(repo, rev, p1, state):
324 def rebasenode(repo, rev, p1, state):
339 'Rebase a single revision'
325 'Rebase a single revision'
340 # Merge phase
326 # Merge phase
341 # Update to target and merge it with local
327 # Update to target and merge it with local
342 if repo['.'].rev() != repo[p1].rev():
328 if repo['.'].rev() != repo[p1].rev():
343 repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
329 repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
344 merge.update(repo, p1, False, True, False)
330 merge.update(repo, p1, False, True, False)
345 else:
331 else:
346 repo.ui.debug(" already in target\n")
332 repo.ui.debug(" already in target\n")
347 repo.dirstate.write()
333 repo.dirstate.write()
348 repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
334 repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
349 base = None
335 base = None
350 if repo[rev].rev() != repo[min(state)].rev():
336 if repo[rev].rev() != repo[min(state)].rev():
351 base = repo[rev].p1().node()
337 base = repo[rev].p1().node()
352 return merge.update(repo, rev, True, True, False, base)
338 return merge.update(repo, rev, True, True, False, base)
353
339
354 def defineparents(repo, rev, target, state, targetancestors):
340 def defineparents(repo, rev, target, state, targetancestors):
355 'Return the new parent relationship of the revision that will be rebased'
341 'Return the new parent relationship of the revision that will be rebased'
356 parents = repo[rev].parents()
342 parents = repo[rev].parents()
357 p1 = p2 = nullrev
343 p1 = p2 = nullrev
358
344
359 P1n = parents[0].rev()
345 P1n = parents[0].rev()
360 if P1n in targetancestors:
346 if P1n in targetancestors:
361 p1 = target
347 p1 = target
362 elif P1n in state:
348 elif P1n in state:
363 if state[P1n] == nullmerge:
349 if state[P1n] == nullmerge:
364 p1 = target
350 p1 = target
365 else:
351 else:
366 p1 = state[P1n]
352 p1 = state[P1n]
367 else: # P1n external
353 else: # P1n external
368 p1 = target
354 p1 = target
369 p2 = P1n
355 p2 = P1n
370
356
371 if len(parents) == 2 and parents[1].rev() not in targetancestors:
357 if len(parents) == 2 and parents[1].rev() not in targetancestors:
372 P2n = parents[1].rev()
358 P2n = parents[1].rev()
373 # interesting second parent
359 # interesting second parent
374 if P2n in state:
360 if P2n in state:
375 if p1 == target: # P1n in targetancestors or external
361 if p1 == target: # P1n in targetancestors or external
376 p1 = state[P2n]
362 p1 = state[P2n]
377 else:
363 else:
378 p2 = state[P2n]
364 p2 = state[P2n]
379 else: # P2n external
365 else: # P2n external
380 if p2 != nullrev: # P1n external too => rev is a merged revision
366 if p2 != nullrev: # P1n external too => rev is a merged revision
381 raise util.Abort(_('cannot use revision %d as base, result '
367 raise util.Abort(_('cannot use revision %d as base, result '
382 'would have 3 parents') % rev)
368 'would have 3 parents') % rev)
383 p2 = P2n
369 p2 = P2n
384 repo.ui.debug(" future parents are %d and %d\n" %
370 repo.ui.debug(" future parents are %d and %d\n" %
385 (repo[p1].rev(), repo[p2].rev()))
371 (repo[p1].rev(), repo[p2].rev()))
386 return p1, p2
372 return p1, p2
387
373
388 def isagitpatch(repo, patchname):
374 def isagitpatch(repo, patchname):
389 'Return true if the given patch is in git format'
375 'Return true if the given patch is in git format'
390 mqpatch = os.path.join(repo.mq.path, patchname)
376 mqpatch = os.path.join(repo.mq.path, patchname)
391 for line in patch.linereader(file(mqpatch, 'rb')):
377 for line in patch.linereader(file(mqpatch, 'rb')):
392 if line.startswith('diff --git'):
378 if line.startswith('diff --git'):
393 return True
379 return True
394 return False
380 return False
395
381
396 def updatemq(repo, state, skipped, **opts):
382 def updatemq(repo, state, skipped, **opts):
397 'Update rebased mq patches - finalize and then import them'
383 'Update rebased mq patches - finalize and then import them'
398 mqrebase = {}
384 mqrebase = {}
399 mq = repo.mq
385 mq = repo.mq
400 original_series = mq.fullseries[:]
386 original_series = mq.fullseries[:]
401
387
402 for p in mq.applied:
388 for p in mq.applied:
403 rev = repo[p.node].rev()
389 rev = repo[p.node].rev()
404 if rev in state:
390 if rev in state:
405 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
391 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
406 (rev, p.name))
392 (rev, p.name))
407 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
393 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
408
394
409 if mqrebase:
395 if mqrebase:
410 mq.finish(repo, mqrebase.keys())
396 mq.finish(repo, mqrebase.keys())
411
397
412 # We must start import from the newest revision
398 # We must start import from the newest revision
413 for rev in sorted(mqrebase, reverse=True):
399 for rev in sorted(mqrebase, reverse=True):
414 if rev not in skipped:
400 if rev not in skipped:
415 name, isgit = mqrebase[rev]
401 name, isgit = mqrebase[rev]
416 repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name))
402 repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name))
417 mq.qimport(repo, (), patchname=name, git=isgit,
403 mq.qimport(repo, (), patchname=name, git=isgit,
418 rev=[str(state[rev])])
404 rev=[str(state[rev])])
419
405
420 # restore old series to preserve guards
406 # restore old series to preserve guards
421 mq.fullseries = original_series
407 mq.fullseries = original_series
422 mq.series_dirty = True
408 mq.series_dirty = True
423 mq.savedirty()
409 mq.savedirty()
424
410
425 def updatebookmarks(repo, nstate, originalbookmarks, **opts):
411 def updatebookmarks(repo, nstate, originalbookmarks, **opts):
426 'Move bookmarks to their correct changesets'
412 'Move bookmarks to their correct changesets'
427 current = repo._bookmarkcurrent
413 current = repo._bookmarkcurrent
428 for k, v in originalbookmarks.iteritems():
414 for k, v in originalbookmarks.iteritems():
429 if v in nstate:
415 if v in nstate:
430 if nstate[v] != nullmerge:
416 if nstate[v] != nullmerge:
431 # reset the pointer if the bookmark was moved incorrectly
417 # reset the pointer if the bookmark was moved incorrectly
432 if k != current:
418 if k != current:
433 repo._bookmarks[k] = nstate[v]
419 repo._bookmarks[k] = nstate[v]
434
420
435 bookmarks.write(repo)
421 bookmarks.write(repo)
436
422
437 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
423 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
438 external):
424 external):
439 'Store the current status to allow recovery'
425 'Store the current status to allow recovery'
440 f = repo.opener("rebasestate", "w")
426 f = repo.opener("rebasestate", "w")
441 f.write(repo[originalwd].hex() + '\n')
427 f.write(repo[originalwd].hex() + '\n')
442 f.write(repo[target].hex() + '\n')
428 f.write(repo[target].hex() + '\n')
443 f.write(repo[external].hex() + '\n')
429 f.write(repo[external].hex() + '\n')
444 f.write('%d\n' % int(collapse))
430 f.write('%d\n' % int(collapse))
445 f.write('%d\n' % int(keep))
431 f.write('%d\n' % int(keep))
446 f.write('%d\n' % int(keepbranches))
432 f.write('%d\n' % int(keepbranches))
447 for d, v in state.iteritems():
433 for d, v in state.iteritems():
448 oldrev = repo[d].hex()
434 oldrev = repo[d].hex()
449 newrev = repo[v].hex()
435 newrev = repo[v].hex()
450 f.write("%s:%s\n" % (oldrev, newrev))
436 f.write("%s:%s\n" % (oldrev, newrev))
451 f.close()
437 f.close()
452 repo.ui.debug('rebase status stored\n')
438 repo.ui.debug('rebase status stored\n')
453
439
454 def clearstatus(repo):
440 def clearstatus(repo):
455 'Remove the status files'
441 'Remove the status files'
456 if os.path.exists(repo.join("rebasestate")):
442 if os.path.exists(repo.join("rebasestate")):
457 util.unlinkpath(repo.join("rebasestate"))
443 util.unlinkpath(repo.join("rebasestate"))
458
444
459 def restorestatus(repo):
445 def restorestatus(repo):
460 'Restore a previously stored status'
446 'Restore a previously stored status'
461 try:
447 try:
462 target = None
448 target = None
463 collapse = False
449 collapse = False
464 external = nullrev
450 external = nullrev
465 state = {}
451 state = {}
466 f = repo.opener("rebasestate")
452 f = repo.opener("rebasestate")
467 for i, l in enumerate(f.read().splitlines()):
453 for i, l in enumerate(f.read().splitlines()):
468 if i == 0:
454 if i == 0:
469 originalwd = repo[l].rev()
455 originalwd = repo[l].rev()
470 elif i == 1:
456 elif i == 1:
471 target = repo[l].rev()
457 target = repo[l].rev()
472 elif i == 2:
458 elif i == 2:
473 external = repo[l].rev()
459 external = repo[l].rev()
474 elif i == 3:
460 elif i == 3:
475 collapse = bool(int(l))
461 collapse = bool(int(l))
476 elif i == 4:
462 elif i == 4:
477 keep = bool(int(l))
463 keep = bool(int(l))
478 elif i == 5:
464 elif i == 5:
479 keepbranches = bool(int(l))
465 keepbranches = bool(int(l))
480 else:
466 else:
481 oldrev, newrev = l.split(':')
467 oldrev, newrev = l.split(':')
482 state[repo[oldrev].rev()] = repo[newrev].rev()
468 state[repo[oldrev].rev()] = repo[newrev].rev()
483 skipped = set()
469 skipped = set()
484 # recompute the set of skipped revs
470 # recompute the set of skipped revs
485 if not collapse:
471 if not collapse:
486 seen = set([target])
472 seen = set([target])
487 for old, new in sorted(state.items()):
473 for old, new in sorted(state.items()):
488 if new != nullrev and new in seen:
474 if new != nullrev and new in seen:
489 skipped.add(old)
475 skipped.add(old)
490 seen.add(new)
476 seen.add(new)
491 repo.ui.debug('computed skipped revs: %s\n' % skipped)
477 repo.ui.debug('computed skipped revs: %s\n' % skipped)
492 repo.ui.debug('rebase status resumed\n')
478 repo.ui.debug('rebase status resumed\n')
493 return (originalwd, target, state, skipped,
479 return (originalwd, target, state, skipped,
494 collapse, keep, keepbranches, external)
480 collapse, keep, keepbranches, external)
495 except IOError, err:
481 except IOError, err:
496 if err.errno != errno.ENOENT:
482 if err.errno != errno.ENOENT:
497 raise
483 raise
498 raise util.Abort(_('no rebase in progress'))
484 raise util.Abort(_('no rebase in progress'))
499
485
500 def abort(repo, originalwd, target, state):
486 def abort(repo, originalwd, target, state):
501 'Restore the repository to its original state'
487 'Restore the repository to its original state'
502 if set(repo.changelog.descendants(target)) - set(state.values()):
488 if set(repo.changelog.descendants(target)) - set(state.values()):
503 repo.ui.warn(_("warning: new changesets detected on target branch, "
489 repo.ui.warn(_("warning: new changesets detected on target branch, "
504 "can't abort\n"))
490 "can't abort\n"))
505 return -1
491 return -1
506 else:
492 else:
507 # Strip from the first rebased revision
493 # Strip from the first rebased revision
508 merge.update(repo, repo[originalwd].rev(), False, True, False)
494 merge.update(repo, repo[originalwd].rev(), False, True, False)
509 rebased = filter(lambda x: x > -1 and x != target, state.values())
495 rebased = filter(lambda x: x > -1 and x != target, state.values())
510 if rebased:
496 if rebased:
511 strippoint = min(rebased)
497 strippoint = min(rebased)
512 # no backup of rebased cset versions needed
498 # no backup of rebased cset versions needed
513 repair.strip(repo.ui, repo, repo[strippoint].node())
499 repair.strip(repo.ui, repo, repo[strippoint].node())
514 clearstatus(repo)
500 clearstatus(repo)
515 repo.ui.warn(_('rebase aborted\n'))
501 repo.ui.warn(_('rebase aborted\n'))
516 return 0
502 return 0
517
503
518 def buildstate(repo, dest, src, base, detach):
504 def buildstate(repo, dest, src, base, detach):
519 'Define which revisions are going to be rebased and where'
505 'Define which revisions are going to be rebased and where'
520 targetancestors = set()
506 targetancestors = set()
521 detachset = set()
507 detachset = set()
522
508
523 if not dest:
509 if not dest:
524 # Destination defaults to the latest revision in the current branch
510 # Destination defaults to the latest revision in the current branch
525 branch = repo[None].branch()
511 branch = repo[None].branch()
526 dest = repo[branch].rev()
512 dest = repo[branch].rev()
527 else:
513 else:
528 dest = repo[dest].rev()
514 dest = repo[dest].rev()
529
515
530 # This check isn't strictly necessary, since mq detects commits over an
516 # This check isn't strictly necessary, since mq detects commits over an
531 # applied patch. But it prevents messing up the working directory when
517 # applied patch. But it prevents messing up the working directory when
532 # a partially completed rebase is blocked by mq.
518 # a partially completed rebase is blocked by mq.
533 if 'qtip' in repo.tags() and (repo[dest].node() in
519 if 'qtip' in repo.tags() and (repo[dest].node() in
534 [s.node for s in repo.mq.applied]):
520 [s.node for s in repo.mq.applied]):
535 raise util.Abort(_('cannot rebase onto an applied mq patch'))
521 raise util.Abort(_('cannot rebase onto an applied mq patch'))
536
522
537 if src:
523 if src:
538 commonbase = repo[src].ancestor(repo[dest])
524 commonbase = repo[src].ancestor(repo[dest])
539 if commonbase == repo[src]:
525 if commonbase == repo[src]:
540 raise util.Abort(_('source is ancestor of destination'))
526 raise util.Abort(_('source is ancestor of destination'))
541 if commonbase == repo[dest]:
527 if commonbase == repo[dest]:
542 samebranch = repo[src].branch() == repo[dest].branch()
528 samebranch = repo[src].branch() == repo[dest].branch()
543 if samebranch and repo[src] in repo[dest].children():
529 if samebranch and repo[src] in repo[dest].children():
544 raise util.Abort(_('source is a child of destination'))
530 raise util.Abort(_('source is a child of destination'))
545 # rebase on ancestor, force detach
531 # rebase on ancestor, force detach
546 detach = True
532 detach = True
547 source = repo[src].rev()
533 source = repo[src].rev()
548 if detach:
534 if detach:
549 # We need to keep track of source's ancestors up to the common base
535 # We need to keep track of source's ancestors up to the common base
550 srcancestors = set(repo.changelog.ancestors(source))
536 srcancestors = set(repo.changelog.ancestors(source))
551 baseancestors = set(repo.changelog.ancestors(commonbase.rev()))
537 baseancestors = set(repo.changelog.ancestors(commonbase.rev()))
552 detachset = srcancestors - baseancestors
538 detachset = srcancestors - baseancestors
553 detachset.discard(commonbase.rev())
539 detachset.discard(commonbase.rev())
554 else:
540 else:
555 if base:
541 if base:
556 cwd = repo[base].rev()
542 cwd = repo[base].rev()
557 else:
543 else:
558 cwd = repo['.'].rev()
544 cwd = repo['.'].rev()
559
545
560 if cwd == dest:
546 if cwd == dest:
561 repo.ui.debug('source and destination are the same\n')
547 repo.ui.debug('source and destination are the same\n')
562 return None
548 return None
563
549
564 targetancestors = set(repo.changelog.ancestors(dest))
550 targetancestors = set(repo.changelog.ancestors(dest))
565 if cwd in targetancestors:
551 if cwd in targetancestors:
566 repo.ui.debug('source is ancestor of destination\n')
552 repo.ui.debug('source is ancestor of destination\n')
567 return None
553 return None
568
554
569 cwdancestors = set(repo.changelog.ancestors(cwd))
555 cwdancestors = set(repo.changelog.ancestors(cwd))
570 if dest in cwdancestors:
556 if dest in cwdancestors:
571 repo.ui.debug('source is descendant of destination\n')
557 repo.ui.debug('source is descendant of destination\n')
572 return None
558 return None
573
559
574 cwdancestors.add(cwd)
560 cwdancestors.add(cwd)
575 rebasingbranch = cwdancestors - targetancestors
561 rebasingbranch = cwdancestors - targetancestors
576 source = min(rebasingbranch)
562 source = min(rebasingbranch)
577
563
578 repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
564 repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
579 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
565 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
580 state.update(dict.fromkeys(detachset, nullmerge))
566 state.update(dict.fromkeys(detachset, nullmerge))
581 state[source] = nullrev
567 state[source] = nullrev
582 return repo['.'].rev(), repo[dest].rev(), state
568 return repo['.'].rev(), repo[dest].rev(), state
583
569
584 def pullrebase(orig, ui, repo, *args, **opts):
570 def pullrebase(orig, ui, repo, *args, **opts):
585 'Call rebase after pull if the latter has been invoked with --rebase'
571 'Call rebase after pull if the latter has been invoked with --rebase'
586 if opts.get('rebase'):
572 if opts.get('rebase'):
587 if opts.get('update'):
573 if opts.get('update'):
588 del opts['update']
574 del opts['update']
589 ui.debug('--update and --rebase are not compatible, ignoring '
575 ui.debug('--update and --rebase are not compatible, ignoring '
590 'the update flag\n')
576 'the update flag\n')
591
577
592 cmdutil.bailifchanged(repo)
578 cmdutil.bailifchanged(repo)
593 revsprepull = len(repo)
579 revsprepull = len(repo)
594 origpostincoming = commands.postincoming
580 origpostincoming = commands.postincoming
595 def _dummy(*args, **kwargs):
581 def _dummy(*args, **kwargs):
596 pass
582 pass
597 commands.postincoming = _dummy
583 commands.postincoming = _dummy
598 try:
584 try:
599 orig(ui, repo, *args, **opts)
585 orig(ui, repo, *args, **opts)
600 finally:
586 finally:
601 commands.postincoming = origpostincoming
587 commands.postincoming = origpostincoming
602 revspostpull = len(repo)
588 revspostpull = len(repo)
603 if revspostpull > revsprepull:
589 if revspostpull > revsprepull:
604 rebase(ui, repo, **opts)
590 rebase(ui, repo, **opts)
605 branch = repo[None].branch()
591 branch = repo[None].branch()
606 dest = repo[branch].rev()
592 dest = repo[branch].rev()
607 if dest != repo['.'].rev():
593 if dest != repo['.'].rev():
608 # there was nothing to rebase we force an update
594 # there was nothing to rebase we force an update
609 hg.update(repo, dest)
595 hg.update(repo, dest)
610 else:
596 else:
611 if opts.get('tool'):
597 if opts.get('tool'):
612 raise util.Abort(_('--tool can only be used with --rebase'))
598 raise util.Abort(_('--tool can only be used with --rebase'))
613 orig(ui, repo, *args, **opts)
599 orig(ui, repo, *args, **opts)
614
600
615 def uisetup(ui):
601 def uisetup(ui):
616 'Replace pull with a decorator to provide --rebase option'
602 'Replace pull with a decorator to provide --rebase option'
617 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
603 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
618 entry[1].append(('', 'rebase', None,
604 entry[1].append(('', 'rebase', None,
619 _("rebase working directory to branch head")))
605 _("rebase working directory to branch head")))
620 entry[1].append(('t', 'tool', '',
606 entry[1].append(('t', 'tool', '',
621 _("specify merge tool for rebase")))
607 _("specify merge tool for rebase")))
@@ -1,1247 +1,1260 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import subrepo
13 import subrepo
14
14
15 def parsealiases(cmd):
15 def parsealiases(cmd):
16 return cmd.lstrip("^").split("|")
16 return cmd.lstrip("^").split("|")
17
17
18 def findpossible(cmd, table, strict=False):
18 def findpossible(cmd, table, strict=False):
19 """
19 """
20 Return cmd -> (aliases, command table entry)
20 Return cmd -> (aliases, command table entry)
21 for each matching command.
21 for each matching command.
22 Return debug commands (or their aliases) only if no normal command matches.
22 Return debug commands (or their aliases) only if no normal command matches.
23 """
23 """
24 choice = {}
24 choice = {}
25 debugchoice = {}
25 debugchoice = {}
26 for e in table.keys():
26 for e in table.keys():
27 aliases = parsealiases(e)
27 aliases = parsealiases(e)
28 found = None
28 found = None
29 if cmd in aliases:
29 if cmd in aliases:
30 found = cmd
30 found = cmd
31 elif not strict:
31 elif not strict:
32 for a in aliases:
32 for a in aliases:
33 if a.startswith(cmd):
33 if a.startswith(cmd):
34 found = a
34 found = a
35 break
35 break
36 if found is not None:
36 if found is not None:
37 if aliases[0].startswith("debug") or found.startswith("debug"):
37 if aliases[0].startswith("debug") or found.startswith("debug"):
38 debugchoice[found] = (aliases, table[e])
38 debugchoice[found] = (aliases, table[e])
39 else:
39 else:
40 choice[found] = (aliases, table[e])
40 choice[found] = (aliases, table[e])
41
41
42 if not choice and debugchoice:
42 if not choice and debugchoice:
43 choice = debugchoice
43 choice = debugchoice
44
44
45 return choice
45 return choice
46
46
47 def findcmd(cmd, table, strict=True):
47 def findcmd(cmd, table, strict=True):
48 """Return (aliases, command table entry) for command string."""
48 """Return (aliases, command table entry) for command string."""
49 choice = findpossible(cmd, table, strict)
49 choice = findpossible(cmd, table, strict)
50
50
51 if cmd in choice:
51 if cmd in choice:
52 return choice[cmd]
52 return choice[cmd]
53
53
54 if len(choice) > 1:
54 if len(choice) > 1:
55 clist = choice.keys()
55 clist = choice.keys()
56 clist.sort()
56 clist.sort()
57 raise error.AmbiguousCommand(cmd, clist)
57 raise error.AmbiguousCommand(cmd, clist)
58
58
59 if choice:
59 if choice:
60 return choice.values()[0]
60 return choice.values()[0]
61
61
62 raise error.UnknownCommand(cmd)
62 raise error.UnknownCommand(cmd)
63
63
64 def findrepo(p):
64 def findrepo(p):
65 while not os.path.isdir(os.path.join(p, ".hg")):
65 while not os.path.isdir(os.path.join(p, ".hg")):
66 oldp, p = p, os.path.dirname(p)
66 oldp, p = p, os.path.dirname(p)
67 if p == oldp:
67 if p == oldp:
68 return None
68 return None
69
69
70 return p
70 return p
71
71
72 def bailifchanged(repo):
72 def bailifchanged(repo):
73 if repo.dirstate.p2() != nullid:
73 if repo.dirstate.p2() != nullid:
74 raise util.Abort(_('outstanding uncommitted merge'))
74 raise util.Abort(_('outstanding uncommitted merge'))
75 modified, added, removed, deleted = repo.status()[:4]
75 modified, added, removed, deleted = repo.status()[:4]
76 if modified or added or removed or deleted:
76 if modified or added or removed or deleted:
77 raise util.Abort(_("outstanding uncommitted changes"))
77 raise util.Abort(_("outstanding uncommitted changes"))
78
78
79 def logmessage(ui, opts):
79 def logmessage(ui, opts):
80 """ get the log message according to -m and -l option """
80 """ get the log message according to -m and -l option """
81 message = opts.get('message')
81 message = opts.get('message')
82 logfile = opts.get('logfile')
82 logfile = opts.get('logfile')
83
83
84 if message and logfile:
84 if message and logfile:
85 raise util.Abort(_('options --message and --logfile are mutually '
85 raise util.Abort(_('options --message and --logfile are mutually '
86 'exclusive'))
86 'exclusive'))
87 if not message and logfile:
87 if not message and logfile:
88 try:
88 try:
89 if logfile == '-':
89 if logfile == '-':
90 message = ui.fin.read()
90 message = ui.fin.read()
91 else:
91 else:
92 message = '\n'.join(util.readfile(logfile).splitlines())
92 message = '\n'.join(util.readfile(logfile).splitlines())
93 except IOError, inst:
93 except IOError, inst:
94 raise util.Abort(_("can't read commit message '%s': %s") %
94 raise util.Abort(_("can't read commit message '%s': %s") %
95 (logfile, inst.strerror))
95 (logfile, inst.strerror))
96 return message
96 return message
97
97
98 def loglimit(opts):
98 def loglimit(opts):
99 """get the log limit according to option -l/--limit"""
99 """get the log limit according to option -l/--limit"""
100 limit = opts.get('limit')
100 limit = opts.get('limit')
101 if limit:
101 if limit:
102 try:
102 try:
103 limit = int(limit)
103 limit = int(limit)
104 except ValueError:
104 except ValueError:
105 raise util.Abort(_('limit must be a positive integer'))
105 raise util.Abort(_('limit must be a positive integer'))
106 if limit <= 0:
106 if limit <= 0:
107 raise util.Abort(_('limit must be positive'))
107 raise util.Abort(_('limit must be positive'))
108 else:
108 else:
109 limit = None
109 limit = None
110 return limit
110 return limit
111
111
112 def makefilename(repo, pat, node, desc=None,
112 def makefilename(repo, pat, node, desc=None,
113 total=None, seqno=None, revwidth=None, pathname=None):
113 total=None, seqno=None, revwidth=None, pathname=None):
114 node_expander = {
114 node_expander = {
115 'H': lambda: hex(node),
115 'H': lambda: hex(node),
116 'R': lambda: str(repo.changelog.rev(node)),
116 'R': lambda: str(repo.changelog.rev(node)),
117 'h': lambda: short(node),
117 'h': lambda: short(node),
118 'm': lambda: re.sub('[^\w]', '_', str(desc))
118 'm': lambda: re.sub('[^\w]', '_', str(desc))
119 }
119 }
120 expander = {
120 expander = {
121 '%': lambda: '%',
121 '%': lambda: '%',
122 'b': lambda: os.path.basename(repo.root),
122 'b': lambda: os.path.basename(repo.root),
123 }
123 }
124
124
125 try:
125 try:
126 if node:
126 if node:
127 expander.update(node_expander)
127 expander.update(node_expander)
128 if node:
128 if node:
129 expander['r'] = (lambda:
129 expander['r'] = (lambda:
130 str(repo.changelog.rev(node)).zfill(revwidth or 0))
130 str(repo.changelog.rev(node)).zfill(revwidth or 0))
131 if total is not None:
131 if total is not None:
132 expander['N'] = lambda: str(total)
132 expander['N'] = lambda: str(total)
133 if seqno is not None:
133 if seqno is not None:
134 expander['n'] = lambda: str(seqno)
134 expander['n'] = lambda: str(seqno)
135 if total is not None and seqno is not None:
135 if total is not None and seqno is not None:
136 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
136 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
137 if pathname is not None:
137 if pathname is not None:
138 expander['s'] = lambda: os.path.basename(pathname)
138 expander['s'] = lambda: os.path.basename(pathname)
139 expander['d'] = lambda: os.path.dirname(pathname) or '.'
139 expander['d'] = lambda: os.path.dirname(pathname) or '.'
140 expander['p'] = lambda: pathname
140 expander['p'] = lambda: pathname
141
141
142 newname = []
142 newname = []
143 patlen = len(pat)
143 patlen = len(pat)
144 i = 0
144 i = 0
145 while i < patlen:
145 while i < patlen:
146 c = pat[i]
146 c = pat[i]
147 if c == '%':
147 if c == '%':
148 i += 1
148 i += 1
149 c = pat[i]
149 c = pat[i]
150 c = expander[c]()
150 c = expander[c]()
151 newname.append(c)
151 newname.append(c)
152 i += 1
152 i += 1
153 return ''.join(newname)
153 return ''.join(newname)
154 except KeyError, inst:
154 except KeyError, inst:
155 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
155 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
156 inst.args[0])
156 inst.args[0])
157
157
158 def makefileobj(repo, pat, node=None, desc=None, total=None,
158 def makefileobj(repo, pat, node=None, desc=None, total=None,
159 seqno=None, revwidth=None, mode='wb', pathname=None):
159 seqno=None, revwidth=None, mode='wb', pathname=None):
160
160
161 writable = mode not in ('r', 'rb')
161 writable = mode not in ('r', 'rb')
162
162
163 if not pat or pat == '-':
163 if not pat or pat == '-':
164 fp = writable and repo.ui.fout or repo.ui.fin
164 fp = writable and repo.ui.fout or repo.ui.fin
165 if util.safehasattr(fp, 'fileno'):
165 if util.safehasattr(fp, 'fileno'):
166 return os.fdopen(os.dup(fp.fileno()), mode)
166 return os.fdopen(os.dup(fp.fileno()), mode)
167 else:
167 else:
168 # if this fp can't be duped properly, return
168 # if this fp can't be duped properly, return
169 # a dummy object that can be closed
169 # a dummy object that can be closed
170 class wrappedfileobj(object):
170 class wrappedfileobj(object):
171 noop = lambda x: None
171 noop = lambda x: None
172 def __init__(self, f):
172 def __init__(self, f):
173 self.f = f
173 self.f = f
174 def __getattr__(self, attr):
174 def __getattr__(self, attr):
175 if attr == 'close':
175 if attr == 'close':
176 return self.noop
176 return self.noop
177 else:
177 else:
178 return getattr(self.f, attr)
178 return getattr(self.f, attr)
179
179
180 return wrappedfileobj(fp)
180 return wrappedfileobj(fp)
181 if util.safehasattr(pat, 'write') and writable:
181 if util.safehasattr(pat, 'write') and writable:
182 return pat
182 return pat
183 if util.safehasattr(pat, 'read') and 'r' in mode:
183 if util.safehasattr(pat, 'read') and 'r' in mode:
184 return pat
184 return pat
185 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
185 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
186 pathname),
186 pathname),
187 mode)
187 mode)
188
188
189 def openrevlog(repo, cmd, file_, opts):
189 def openrevlog(repo, cmd, file_, opts):
190 """opens the changelog, manifest, a filelog or a given revlog"""
190 """opens the changelog, manifest, a filelog or a given revlog"""
191 cl = opts['changelog']
191 cl = opts['changelog']
192 mf = opts['manifest']
192 mf = opts['manifest']
193 msg = None
193 msg = None
194 if cl and mf:
194 if cl and mf:
195 msg = _('cannot specify --changelog and --manifest at the same time')
195 msg = _('cannot specify --changelog and --manifest at the same time')
196 elif cl or mf:
196 elif cl or mf:
197 if file_:
197 if file_:
198 msg = _('cannot specify filename with --changelog or --manifest')
198 msg = _('cannot specify filename with --changelog or --manifest')
199 elif not repo:
199 elif not repo:
200 msg = _('cannot specify --changelog or --manifest '
200 msg = _('cannot specify --changelog or --manifest '
201 'without a repository')
201 'without a repository')
202 if msg:
202 if msg:
203 raise util.Abort(msg)
203 raise util.Abort(msg)
204
204
205 r = None
205 r = None
206 if repo:
206 if repo:
207 if cl:
207 if cl:
208 r = repo.changelog
208 r = repo.changelog
209 elif mf:
209 elif mf:
210 r = repo.manifest
210 r = repo.manifest
211 elif file_:
211 elif file_:
212 filelog = repo.file(file_)
212 filelog = repo.file(file_)
213 if len(filelog):
213 if len(filelog):
214 r = filelog
214 r = filelog
215 if not r:
215 if not r:
216 if not file_:
216 if not file_:
217 raise error.CommandError(cmd, _('invalid arguments'))
217 raise error.CommandError(cmd, _('invalid arguments'))
218 if not os.path.isfile(file_):
218 if not os.path.isfile(file_):
219 raise util.Abort(_("revlog '%s' not found") % file_)
219 raise util.Abort(_("revlog '%s' not found") % file_)
220 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
220 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
221 file_[:-2] + ".i")
221 file_[:-2] + ".i")
222 return r
222 return r
223
223
224 def copy(ui, repo, pats, opts, rename=False):
224 def copy(ui, repo, pats, opts, rename=False):
225 # called with the repo lock held
225 # called with the repo lock held
226 #
226 #
227 # hgsep => pathname that uses "/" to separate directories
227 # hgsep => pathname that uses "/" to separate directories
228 # ossep => pathname that uses os.sep to separate directories
228 # ossep => pathname that uses os.sep to separate directories
229 cwd = repo.getcwd()
229 cwd = repo.getcwd()
230 targets = {}
230 targets = {}
231 after = opts.get("after")
231 after = opts.get("after")
232 dryrun = opts.get("dry_run")
232 dryrun = opts.get("dry_run")
233 wctx = repo[None]
233 wctx = repo[None]
234
234
235 def walkpat(pat):
235 def walkpat(pat):
236 srcs = []
236 srcs = []
237 badstates = after and '?' or '?r'
237 badstates = after and '?' or '?r'
238 m = scmutil.match(repo[None], [pat], opts, globbed=True)
238 m = scmutil.match(repo[None], [pat], opts, globbed=True)
239 for abs in repo.walk(m):
239 for abs in repo.walk(m):
240 state = repo.dirstate[abs]
240 state = repo.dirstate[abs]
241 rel = m.rel(abs)
241 rel = m.rel(abs)
242 exact = m.exact(abs)
242 exact = m.exact(abs)
243 if state in badstates:
243 if state in badstates:
244 if exact and state == '?':
244 if exact and state == '?':
245 ui.warn(_('%s: not copying - file is not managed\n') % rel)
245 ui.warn(_('%s: not copying - file is not managed\n') % rel)
246 if exact and state == 'r':
246 if exact and state == 'r':
247 ui.warn(_('%s: not copying - file has been marked for'
247 ui.warn(_('%s: not copying - file has been marked for'
248 ' remove\n') % rel)
248 ' remove\n') % rel)
249 continue
249 continue
250 # abs: hgsep
250 # abs: hgsep
251 # rel: ossep
251 # rel: ossep
252 srcs.append((abs, rel, exact))
252 srcs.append((abs, rel, exact))
253 return srcs
253 return srcs
254
254
255 # abssrc: hgsep
255 # abssrc: hgsep
256 # relsrc: ossep
256 # relsrc: ossep
257 # otarget: ossep
257 # otarget: ossep
258 def copyfile(abssrc, relsrc, otarget, exact):
258 def copyfile(abssrc, relsrc, otarget, exact):
259 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
259 abstarget = scmutil.canonpath(repo.root, cwd, otarget)
260 reltarget = repo.pathto(abstarget, cwd)
260 reltarget = repo.pathto(abstarget, cwd)
261 target = repo.wjoin(abstarget)
261 target = repo.wjoin(abstarget)
262 src = repo.wjoin(abssrc)
262 src = repo.wjoin(abssrc)
263 state = repo.dirstate[abstarget]
263 state = repo.dirstate[abstarget]
264
264
265 scmutil.checkportable(ui, abstarget)
265 scmutil.checkportable(ui, abstarget)
266
266
267 # check for collisions
267 # check for collisions
268 prevsrc = targets.get(abstarget)
268 prevsrc = targets.get(abstarget)
269 if prevsrc is not None:
269 if prevsrc is not None:
270 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
270 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
271 (reltarget, repo.pathto(abssrc, cwd),
271 (reltarget, repo.pathto(abssrc, cwd),
272 repo.pathto(prevsrc, cwd)))
272 repo.pathto(prevsrc, cwd)))
273 return
273 return
274
274
275 # check for overwrites
275 # check for overwrites
276 exists = os.path.lexists(target)
276 exists = os.path.lexists(target)
277 if not after and exists or after and state in 'mn':
277 if not after and exists or after and state in 'mn':
278 if not opts['force']:
278 if not opts['force']:
279 ui.warn(_('%s: not overwriting - file exists\n') %
279 ui.warn(_('%s: not overwriting - file exists\n') %
280 reltarget)
280 reltarget)
281 return
281 return
282
282
283 if after:
283 if after:
284 if not exists:
284 if not exists:
285 if rename:
285 if rename:
286 ui.warn(_('%s: not recording move - %s does not exist\n') %
286 ui.warn(_('%s: not recording move - %s does not exist\n') %
287 (relsrc, reltarget))
287 (relsrc, reltarget))
288 else:
288 else:
289 ui.warn(_('%s: not recording copy - %s does not exist\n') %
289 ui.warn(_('%s: not recording copy - %s does not exist\n') %
290 (relsrc, reltarget))
290 (relsrc, reltarget))
291 return
291 return
292 elif not dryrun:
292 elif not dryrun:
293 try:
293 try:
294 if exists:
294 if exists:
295 os.unlink(target)
295 os.unlink(target)
296 targetdir = os.path.dirname(target) or '.'
296 targetdir = os.path.dirname(target) or '.'
297 if not os.path.isdir(targetdir):
297 if not os.path.isdir(targetdir):
298 os.makedirs(targetdir)
298 os.makedirs(targetdir)
299 util.copyfile(src, target)
299 util.copyfile(src, target)
300 srcexists = True
300 srcexists = True
301 except IOError, inst:
301 except IOError, inst:
302 if inst.errno == errno.ENOENT:
302 if inst.errno == errno.ENOENT:
303 ui.warn(_('%s: deleted in working copy\n') % relsrc)
303 ui.warn(_('%s: deleted in working copy\n') % relsrc)
304 srcexists = False
304 srcexists = False
305 else:
305 else:
306 ui.warn(_('%s: cannot copy - %s\n') %
306 ui.warn(_('%s: cannot copy - %s\n') %
307 (relsrc, inst.strerror))
307 (relsrc, inst.strerror))
308 return True # report a failure
308 return True # report a failure
309
309
310 if ui.verbose or not exact:
310 if ui.verbose or not exact:
311 if rename:
311 if rename:
312 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
312 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
313 else:
313 else:
314 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
314 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
315
315
316 targets[abstarget] = abssrc
316 targets[abstarget] = abssrc
317
317
318 # fix up dirstate
318 # fix up dirstate
319 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
319 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
320 dryrun=dryrun, cwd=cwd)
320 dryrun=dryrun, cwd=cwd)
321 if rename and not dryrun:
321 if rename and not dryrun:
322 if not after and srcexists:
322 if not after and srcexists:
323 util.unlinkpath(repo.wjoin(abssrc))
323 util.unlinkpath(repo.wjoin(abssrc))
324 wctx.forget([abssrc])
324 wctx.forget([abssrc])
325
325
326 # pat: ossep
326 # pat: ossep
327 # dest ossep
327 # dest ossep
328 # srcs: list of (hgsep, hgsep, ossep, bool)
328 # srcs: list of (hgsep, hgsep, ossep, bool)
329 # return: function that takes hgsep and returns ossep
329 # return: function that takes hgsep and returns ossep
330 def targetpathfn(pat, dest, srcs):
330 def targetpathfn(pat, dest, srcs):
331 if os.path.isdir(pat):
331 if os.path.isdir(pat):
332 abspfx = scmutil.canonpath(repo.root, cwd, pat)
332 abspfx = scmutil.canonpath(repo.root, cwd, pat)
333 abspfx = util.localpath(abspfx)
333 abspfx = util.localpath(abspfx)
334 if destdirexists:
334 if destdirexists:
335 striplen = len(os.path.split(abspfx)[0])
335 striplen = len(os.path.split(abspfx)[0])
336 else:
336 else:
337 striplen = len(abspfx)
337 striplen = len(abspfx)
338 if striplen:
338 if striplen:
339 striplen += len(os.sep)
339 striplen += len(os.sep)
340 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
340 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
341 elif destdirexists:
341 elif destdirexists:
342 res = lambda p: os.path.join(dest,
342 res = lambda p: os.path.join(dest,
343 os.path.basename(util.localpath(p)))
343 os.path.basename(util.localpath(p)))
344 else:
344 else:
345 res = lambda p: dest
345 res = lambda p: dest
346 return res
346 return res
347
347
348 # pat: ossep
348 # pat: ossep
349 # dest ossep
349 # dest ossep
350 # srcs: list of (hgsep, hgsep, ossep, bool)
350 # srcs: list of (hgsep, hgsep, ossep, bool)
351 # return: function that takes hgsep and returns ossep
351 # return: function that takes hgsep and returns ossep
352 def targetpathafterfn(pat, dest, srcs):
352 def targetpathafterfn(pat, dest, srcs):
353 if matchmod.patkind(pat):
353 if matchmod.patkind(pat):
354 # a mercurial pattern
354 # a mercurial pattern
355 res = lambda p: os.path.join(dest,
355 res = lambda p: os.path.join(dest,
356 os.path.basename(util.localpath(p)))
356 os.path.basename(util.localpath(p)))
357 else:
357 else:
358 abspfx = scmutil.canonpath(repo.root, cwd, pat)
358 abspfx = scmutil.canonpath(repo.root, cwd, pat)
359 if len(abspfx) < len(srcs[0][0]):
359 if len(abspfx) < len(srcs[0][0]):
360 # A directory. Either the target path contains the last
360 # A directory. Either the target path contains the last
361 # component of the source path or it does not.
361 # component of the source path or it does not.
362 def evalpath(striplen):
362 def evalpath(striplen):
363 score = 0
363 score = 0
364 for s in srcs:
364 for s in srcs:
365 t = os.path.join(dest, util.localpath(s[0])[striplen:])
365 t = os.path.join(dest, util.localpath(s[0])[striplen:])
366 if os.path.lexists(t):
366 if os.path.lexists(t):
367 score += 1
367 score += 1
368 return score
368 return score
369
369
370 abspfx = util.localpath(abspfx)
370 abspfx = util.localpath(abspfx)
371 striplen = len(abspfx)
371 striplen = len(abspfx)
372 if striplen:
372 if striplen:
373 striplen += len(os.sep)
373 striplen += len(os.sep)
374 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
374 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
375 score = evalpath(striplen)
375 score = evalpath(striplen)
376 striplen1 = len(os.path.split(abspfx)[0])
376 striplen1 = len(os.path.split(abspfx)[0])
377 if striplen1:
377 if striplen1:
378 striplen1 += len(os.sep)
378 striplen1 += len(os.sep)
379 if evalpath(striplen1) > score:
379 if evalpath(striplen1) > score:
380 striplen = striplen1
380 striplen = striplen1
381 res = lambda p: os.path.join(dest,
381 res = lambda p: os.path.join(dest,
382 util.localpath(p)[striplen:])
382 util.localpath(p)[striplen:])
383 else:
383 else:
384 # a file
384 # a file
385 if destdirexists:
385 if destdirexists:
386 res = lambda p: os.path.join(dest,
386 res = lambda p: os.path.join(dest,
387 os.path.basename(util.localpath(p)))
387 os.path.basename(util.localpath(p)))
388 else:
388 else:
389 res = lambda p: dest
389 res = lambda p: dest
390 return res
390 return res
391
391
392
392
393 pats = scmutil.expandpats(pats)
393 pats = scmutil.expandpats(pats)
394 if not pats:
394 if not pats:
395 raise util.Abort(_('no source or destination specified'))
395 raise util.Abort(_('no source or destination specified'))
396 if len(pats) == 1:
396 if len(pats) == 1:
397 raise util.Abort(_('no destination specified'))
397 raise util.Abort(_('no destination specified'))
398 dest = pats.pop()
398 dest = pats.pop()
399 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
399 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
400 if not destdirexists:
400 if not destdirexists:
401 if len(pats) > 1 or matchmod.patkind(pats[0]):
401 if len(pats) > 1 or matchmod.patkind(pats[0]):
402 raise util.Abort(_('with multiple sources, destination must be an '
402 raise util.Abort(_('with multiple sources, destination must be an '
403 'existing directory'))
403 'existing directory'))
404 if util.endswithsep(dest):
404 if util.endswithsep(dest):
405 raise util.Abort(_('destination %s is not a directory') % dest)
405 raise util.Abort(_('destination %s is not a directory') % dest)
406
406
407 tfn = targetpathfn
407 tfn = targetpathfn
408 if after:
408 if after:
409 tfn = targetpathafterfn
409 tfn = targetpathafterfn
410 copylist = []
410 copylist = []
411 for pat in pats:
411 for pat in pats:
412 srcs = walkpat(pat)
412 srcs = walkpat(pat)
413 if not srcs:
413 if not srcs:
414 continue
414 continue
415 copylist.append((tfn(pat, dest, srcs), srcs))
415 copylist.append((tfn(pat, dest, srcs), srcs))
416 if not copylist:
416 if not copylist:
417 raise util.Abort(_('no files to copy'))
417 raise util.Abort(_('no files to copy'))
418
418
419 errors = 0
419 errors = 0
420 for targetpath, srcs in copylist:
420 for targetpath, srcs in copylist:
421 for abssrc, relsrc, exact in srcs:
421 for abssrc, relsrc, exact in srcs:
422 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
422 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
423 errors += 1
423 errors += 1
424
424
425 if errors:
425 if errors:
426 ui.warn(_('(consider using --after)\n'))
426 ui.warn(_('(consider using --after)\n'))
427
427
428 return errors != 0
428 return errors != 0
429
429
430 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
430 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
431 runargs=None, appendpid=False):
431 runargs=None, appendpid=False):
432 '''Run a command as a service.'''
432 '''Run a command as a service.'''
433
433
434 if opts['daemon'] and not opts['daemon_pipefds']:
434 if opts['daemon'] and not opts['daemon_pipefds']:
435 # Signal child process startup with file removal
435 # Signal child process startup with file removal
436 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
436 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
437 os.close(lockfd)
437 os.close(lockfd)
438 try:
438 try:
439 if not runargs:
439 if not runargs:
440 runargs = util.hgcmd() + sys.argv[1:]
440 runargs = util.hgcmd() + sys.argv[1:]
441 runargs.append('--daemon-pipefds=%s' % lockpath)
441 runargs.append('--daemon-pipefds=%s' % lockpath)
442 # Don't pass --cwd to the child process, because we've already
442 # Don't pass --cwd to the child process, because we've already
443 # changed directory.
443 # changed directory.
444 for i in xrange(1, len(runargs)):
444 for i in xrange(1, len(runargs)):
445 if runargs[i].startswith('--cwd='):
445 if runargs[i].startswith('--cwd='):
446 del runargs[i]
446 del runargs[i]
447 break
447 break
448 elif runargs[i].startswith('--cwd'):
448 elif runargs[i].startswith('--cwd'):
449 del runargs[i:i + 2]
449 del runargs[i:i + 2]
450 break
450 break
451 def condfn():
451 def condfn():
452 return not os.path.exists(lockpath)
452 return not os.path.exists(lockpath)
453 pid = util.rundetached(runargs, condfn)
453 pid = util.rundetached(runargs, condfn)
454 if pid < 0:
454 if pid < 0:
455 raise util.Abort(_('child process failed to start'))
455 raise util.Abort(_('child process failed to start'))
456 finally:
456 finally:
457 try:
457 try:
458 os.unlink(lockpath)
458 os.unlink(lockpath)
459 except OSError, e:
459 except OSError, e:
460 if e.errno != errno.ENOENT:
460 if e.errno != errno.ENOENT:
461 raise
461 raise
462 if parentfn:
462 if parentfn:
463 return parentfn(pid)
463 return parentfn(pid)
464 else:
464 else:
465 return
465 return
466
466
467 if initfn:
467 if initfn:
468 initfn()
468 initfn()
469
469
470 if opts['pid_file']:
470 if opts['pid_file']:
471 mode = appendpid and 'a' or 'w'
471 mode = appendpid and 'a' or 'w'
472 fp = open(opts['pid_file'], mode)
472 fp = open(opts['pid_file'], mode)
473 fp.write(str(os.getpid()) + '\n')
473 fp.write(str(os.getpid()) + '\n')
474 fp.close()
474 fp.close()
475
475
476 if opts['daemon_pipefds']:
476 if opts['daemon_pipefds']:
477 lockpath = opts['daemon_pipefds']
477 lockpath = opts['daemon_pipefds']
478 try:
478 try:
479 os.setsid()
479 os.setsid()
480 except AttributeError:
480 except AttributeError:
481 pass
481 pass
482 os.unlink(lockpath)
482 os.unlink(lockpath)
483 util.hidewindow()
483 util.hidewindow()
484 sys.stdout.flush()
484 sys.stdout.flush()
485 sys.stderr.flush()
485 sys.stderr.flush()
486
486
487 nullfd = os.open(util.nulldev, os.O_RDWR)
487 nullfd = os.open(util.nulldev, os.O_RDWR)
488 logfilefd = nullfd
488 logfilefd = nullfd
489 if logfile:
489 if logfile:
490 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
490 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
491 os.dup2(nullfd, 0)
491 os.dup2(nullfd, 0)
492 os.dup2(logfilefd, 1)
492 os.dup2(logfilefd, 1)
493 os.dup2(logfilefd, 2)
493 os.dup2(logfilefd, 2)
494 if nullfd not in (0, 1, 2):
494 if nullfd not in (0, 1, 2):
495 os.close(nullfd)
495 os.close(nullfd)
496 if logfile and logfilefd not in (0, 1, 2):
496 if logfile and logfilefd not in (0, 1, 2):
497 os.close(logfilefd)
497 os.close(logfilefd)
498
498
499 if runfn:
499 if runfn:
500 return runfn()
500 return runfn()
501
501
502 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
502 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
503 opts=None):
503 opts=None):
504 '''export changesets as hg patches.'''
504 '''export changesets as hg patches.'''
505
505
506 total = len(revs)
506 total = len(revs)
507 revwidth = max([len(str(rev)) for rev in revs])
507 revwidth = max([len(str(rev)) for rev in revs])
508
508
509 def single(rev, seqno, fp):
509 def single(rev, seqno, fp):
510 ctx = repo[rev]
510 ctx = repo[rev]
511 node = ctx.node()
511 node = ctx.node()
512 parents = [p.node() for p in ctx.parents() if p]
512 parents = [p.node() for p in ctx.parents() if p]
513 branch = ctx.branch()
513 branch = ctx.branch()
514 if switch_parent:
514 if switch_parent:
515 parents.reverse()
515 parents.reverse()
516 prev = (parents and parents[0]) or nullid
516 prev = (parents and parents[0]) or nullid
517
517
518 shouldclose = False
518 shouldclose = False
519 if not fp:
519 if not fp:
520 desc_lines = ctx.description().rstrip().split('\n')
520 desc_lines = ctx.description().rstrip().split('\n')
521 desc = desc_lines[0] #Commit always has a first line.
521 desc = desc_lines[0] #Commit always has a first line.
522 fp = makefileobj(repo, template, node, desc=desc, total=total,
522 fp = makefileobj(repo, template, node, desc=desc, total=total,
523 seqno=seqno, revwidth=revwidth, mode='ab')
523 seqno=seqno, revwidth=revwidth, mode='ab')
524 if fp != template:
524 if fp != template:
525 shouldclose = True
525 shouldclose = True
526 if fp != sys.stdout and util.safehasattr(fp, 'name'):
526 if fp != sys.stdout and util.safehasattr(fp, 'name'):
527 repo.ui.note("%s\n" % fp.name)
527 repo.ui.note("%s\n" % fp.name)
528
528
529 fp.write("# HG changeset patch\n")
529 fp.write("# HG changeset patch\n")
530 fp.write("# User %s\n" % ctx.user())
530 fp.write("# User %s\n" % ctx.user())
531 fp.write("# Date %d %d\n" % ctx.date())
531 fp.write("# Date %d %d\n" % ctx.date())
532 if branch and branch != 'default':
532 if branch and branch != 'default':
533 fp.write("# Branch %s\n" % branch)
533 fp.write("# Branch %s\n" % branch)
534 fp.write("# Node ID %s\n" % hex(node))
534 fp.write("# Node ID %s\n" % hex(node))
535 fp.write("# Parent %s\n" % hex(prev))
535 fp.write("# Parent %s\n" % hex(prev))
536 if len(parents) > 1:
536 if len(parents) > 1:
537 fp.write("# Parent %s\n" % hex(parents[1]))
537 fp.write("# Parent %s\n" % hex(parents[1]))
538 fp.write(ctx.description().rstrip())
538 fp.write(ctx.description().rstrip())
539 fp.write("\n\n")
539 fp.write("\n\n")
540
540
541 for chunk in patch.diff(repo, prev, node, opts=opts):
541 for chunk in patch.diff(repo, prev, node, opts=opts):
542 fp.write(chunk)
542 fp.write(chunk)
543
543
544 if shouldclose:
544 if shouldclose:
545 fp.close()
545 fp.close()
546
546
547 for seqno, rev in enumerate(revs):
547 for seqno, rev in enumerate(revs):
548 single(rev, seqno + 1, fp)
548 single(rev, seqno + 1, fp)
549
549
550 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
550 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
551 changes=None, stat=False, fp=None, prefix='',
551 changes=None, stat=False, fp=None, prefix='',
552 listsubrepos=False):
552 listsubrepos=False):
553 '''show diff or diffstat.'''
553 '''show diff or diffstat.'''
554 if fp is None:
554 if fp is None:
555 write = ui.write
555 write = ui.write
556 else:
556 else:
557 def write(s, **kw):
557 def write(s, **kw):
558 fp.write(s)
558 fp.write(s)
559
559
560 if stat:
560 if stat:
561 diffopts = diffopts.copy(context=0)
561 diffopts = diffopts.copy(context=0)
562 width = 80
562 width = 80
563 if not ui.plain():
563 if not ui.plain():
564 width = ui.termwidth()
564 width = ui.termwidth()
565 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
565 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
566 prefix=prefix)
566 prefix=prefix)
567 for chunk, label in patch.diffstatui(util.iterlines(chunks),
567 for chunk, label in patch.diffstatui(util.iterlines(chunks),
568 width=width,
568 width=width,
569 git=diffopts.git):
569 git=diffopts.git):
570 write(chunk, label=label)
570 write(chunk, label=label)
571 else:
571 else:
572 for chunk, label in patch.diffui(repo, node1, node2, match,
572 for chunk, label in patch.diffui(repo, node1, node2, match,
573 changes, diffopts, prefix=prefix):
573 changes, diffopts, prefix=prefix):
574 write(chunk, label=label)
574 write(chunk, label=label)
575
575
576 if listsubrepos:
576 if listsubrepos:
577 ctx1 = repo[node1]
577 ctx1 = repo[node1]
578 ctx2 = repo[node2]
578 ctx2 = repo[node2]
579 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
579 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
580 if node2 is not None:
580 if node2 is not None:
581 node2 = ctx2.substate[subpath][1]
581 node2 = ctx2.substate[subpath][1]
582 submatch = matchmod.narrowmatcher(subpath, match)
582 submatch = matchmod.narrowmatcher(subpath, match)
583 sub.diff(diffopts, node2, submatch, changes=changes,
583 sub.diff(diffopts, node2, submatch, changes=changes,
584 stat=stat, fp=fp, prefix=prefix)
584 stat=stat, fp=fp, prefix=prefix)
585
585
586 class changeset_printer(object):
586 class changeset_printer(object):
587 '''show changeset information when templating not requested.'''
587 '''show changeset information when templating not requested.'''
588
588
589 def __init__(self, ui, repo, patch, diffopts, buffered):
589 def __init__(self, ui, repo, patch, diffopts, buffered):
590 self.ui = ui
590 self.ui = ui
591 self.repo = repo
591 self.repo = repo
592 self.buffered = buffered
592 self.buffered = buffered
593 self.patch = patch
593 self.patch = patch
594 self.diffopts = diffopts
594 self.diffopts = diffopts
595 self.header = {}
595 self.header = {}
596 self.hunk = {}
596 self.hunk = {}
597 self.lastheader = None
597 self.lastheader = None
598 self.footer = None
598 self.footer = None
599
599
600 def flush(self, rev):
600 def flush(self, rev):
601 if rev in self.header:
601 if rev in self.header:
602 h = self.header[rev]
602 h = self.header[rev]
603 if h != self.lastheader:
603 if h != self.lastheader:
604 self.lastheader = h
604 self.lastheader = h
605 self.ui.write(h)
605 self.ui.write(h)
606 del self.header[rev]
606 del self.header[rev]
607 if rev in self.hunk:
607 if rev in self.hunk:
608 self.ui.write(self.hunk[rev])
608 self.ui.write(self.hunk[rev])
609 del self.hunk[rev]
609 del self.hunk[rev]
610 return 1
610 return 1
611 return 0
611 return 0
612
612
613 def close(self):
613 def close(self):
614 if self.footer:
614 if self.footer:
615 self.ui.write(self.footer)
615 self.ui.write(self.footer)
616
616
617 def show(self, ctx, copies=None, matchfn=None, **props):
617 def show(self, ctx, copies=None, matchfn=None, **props):
618 if self.buffered:
618 if self.buffered:
619 self.ui.pushbuffer()
619 self.ui.pushbuffer()
620 self._show(ctx, copies, matchfn, props)
620 self._show(ctx, copies, matchfn, props)
621 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
621 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
622 else:
622 else:
623 self._show(ctx, copies, matchfn, props)
623 self._show(ctx, copies, matchfn, props)
624
624
625 def _show(self, ctx, copies, matchfn, props):
625 def _show(self, ctx, copies, matchfn, props):
626 '''show a single changeset or file revision'''
626 '''show a single changeset or file revision'''
627 changenode = ctx.node()
627 changenode = ctx.node()
628 rev = ctx.rev()
628 rev = ctx.rev()
629
629
630 if self.ui.quiet:
630 if self.ui.quiet:
631 self.ui.write("%d:%s\n" % (rev, short(changenode)),
631 self.ui.write("%d:%s\n" % (rev, short(changenode)),
632 label='log.node')
632 label='log.node')
633 return
633 return
634
634
635 log = self.repo.changelog
635 log = self.repo.changelog
636 date = util.datestr(ctx.date())
636 date = util.datestr(ctx.date())
637
637
638 hexfunc = self.ui.debugflag and hex or short
638 hexfunc = self.ui.debugflag and hex or short
639
639
640 parents = [(p, hexfunc(log.node(p)))
640 parents = [(p, hexfunc(log.node(p)))
641 for p in self._meaningful_parentrevs(log, rev)]
641 for p in self._meaningful_parentrevs(log, rev)]
642
642
643 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
643 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
644 label='log.changeset')
644 label='log.changeset')
645
645
646 branch = ctx.branch()
646 branch = ctx.branch()
647 # don't show the default branch name
647 # don't show the default branch name
648 if branch != 'default':
648 if branch != 'default':
649 self.ui.write(_("branch: %s\n") % branch,
649 self.ui.write(_("branch: %s\n") % branch,
650 label='log.branch')
650 label='log.branch')
651 for bookmark in self.repo.nodebookmarks(changenode):
651 for bookmark in self.repo.nodebookmarks(changenode):
652 self.ui.write(_("bookmark: %s\n") % bookmark,
652 self.ui.write(_("bookmark: %s\n") % bookmark,
653 label='log.bookmark')
653 label='log.bookmark')
654 for tag in self.repo.nodetags(changenode):
654 for tag in self.repo.nodetags(changenode):
655 self.ui.write(_("tag: %s\n") % tag,
655 self.ui.write(_("tag: %s\n") % tag,
656 label='log.tag')
656 label='log.tag')
657 for parent in parents:
657 for parent in parents:
658 self.ui.write(_("parent: %d:%s\n") % parent,
658 self.ui.write(_("parent: %d:%s\n") % parent,
659 label='log.parent')
659 label='log.parent')
660
660
661 if self.ui.debugflag:
661 if self.ui.debugflag:
662 mnode = ctx.manifestnode()
662 mnode = ctx.manifestnode()
663 self.ui.write(_("manifest: %d:%s\n") %
663 self.ui.write(_("manifest: %d:%s\n") %
664 (self.repo.manifest.rev(mnode), hex(mnode)),
664 (self.repo.manifest.rev(mnode), hex(mnode)),
665 label='ui.debug log.manifest')
665 label='ui.debug log.manifest')
666 self.ui.write(_("user: %s\n") % ctx.user(),
666 self.ui.write(_("user: %s\n") % ctx.user(),
667 label='log.user')
667 label='log.user')
668 self.ui.write(_("date: %s\n") % date,
668 self.ui.write(_("date: %s\n") % date,
669 label='log.date')
669 label='log.date')
670
670
671 if self.ui.debugflag:
671 if self.ui.debugflag:
672 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
672 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
673 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
673 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
674 files):
674 files):
675 if value:
675 if value:
676 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
676 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
677 label='ui.debug log.files')
677 label='ui.debug log.files')
678 elif ctx.files() and self.ui.verbose:
678 elif ctx.files() and self.ui.verbose:
679 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
679 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
680 label='ui.note log.files')
680 label='ui.note log.files')
681 if copies and self.ui.verbose:
681 if copies and self.ui.verbose:
682 copies = ['%s (%s)' % c for c in copies]
682 copies = ['%s (%s)' % c for c in copies]
683 self.ui.write(_("copies: %s\n") % ' '.join(copies),
683 self.ui.write(_("copies: %s\n") % ' '.join(copies),
684 label='ui.note log.copies')
684 label='ui.note log.copies')
685
685
686 extra = ctx.extra()
686 extra = ctx.extra()
687 if extra and self.ui.debugflag:
687 if extra and self.ui.debugflag:
688 for key, value in sorted(extra.items()):
688 for key, value in sorted(extra.items()):
689 self.ui.write(_("extra: %s=%s\n")
689 self.ui.write(_("extra: %s=%s\n")
690 % (key, value.encode('string_escape')),
690 % (key, value.encode('string_escape')),
691 label='ui.debug log.extra')
691 label='ui.debug log.extra')
692
692
693 description = ctx.description().strip()
693 description = ctx.description().strip()
694 if description:
694 if description:
695 if self.ui.verbose:
695 if self.ui.verbose:
696 self.ui.write(_("description:\n"),
696 self.ui.write(_("description:\n"),
697 label='ui.note log.description')
697 label='ui.note log.description')
698 self.ui.write(description,
698 self.ui.write(description,
699 label='ui.note log.description')
699 label='ui.note log.description')
700 self.ui.write("\n\n")
700 self.ui.write("\n\n")
701 else:
701 else:
702 self.ui.write(_("summary: %s\n") %
702 self.ui.write(_("summary: %s\n") %
703 description.splitlines()[0],
703 description.splitlines()[0],
704 label='log.summary')
704 label='log.summary')
705 self.ui.write("\n")
705 self.ui.write("\n")
706
706
707 self.showpatch(changenode, matchfn)
707 self.showpatch(changenode, matchfn)
708
708
709 def showpatch(self, node, matchfn):
709 def showpatch(self, node, matchfn):
710 if not matchfn:
710 if not matchfn:
711 matchfn = self.patch
711 matchfn = self.patch
712 if matchfn:
712 if matchfn:
713 stat = self.diffopts.get('stat')
713 stat = self.diffopts.get('stat')
714 diff = self.diffopts.get('patch')
714 diff = self.diffopts.get('patch')
715 diffopts = patch.diffopts(self.ui, self.diffopts)
715 diffopts = patch.diffopts(self.ui, self.diffopts)
716 prev = self.repo.changelog.parents(node)[0]
716 prev = self.repo.changelog.parents(node)[0]
717 if stat:
717 if stat:
718 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
718 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
719 match=matchfn, stat=True)
719 match=matchfn, stat=True)
720 if diff:
720 if diff:
721 if stat:
721 if stat:
722 self.ui.write("\n")
722 self.ui.write("\n")
723 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
723 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
724 match=matchfn, stat=False)
724 match=matchfn, stat=False)
725 self.ui.write("\n")
725 self.ui.write("\n")
726
726
727 def _meaningful_parentrevs(self, log, rev):
727 def _meaningful_parentrevs(self, log, rev):
728 """Return list of meaningful (or all if debug) parentrevs for rev.
728 """Return list of meaningful (or all if debug) parentrevs for rev.
729
729
730 For merges (two non-nullrev revisions) both parents are meaningful.
730 For merges (two non-nullrev revisions) both parents are meaningful.
731 Otherwise the first parent revision is considered meaningful if it
731 Otherwise the first parent revision is considered meaningful if it
732 is not the preceding revision.
732 is not the preceding revision.
733 """
733 """
734 parents = log.parentrevs(rev)
734 parents = log.parentrevs(rev)
735 if not self.ui.debugflag and parents[1] == nullrev:
735 if not self.ui.debugflag and parents[1] == nullrev:
736 if parents[0] >= rev - 1:
736 if parents[0] >= rev - 1:
737 parents = []
737 parents = []
738 else:
738 else:
739 parents = [parents[0]]
739 parents = [parents[0]]
740 return parents
740 return parents
741
741
742
742
743 class changeset_templater(changeset_printer):
743 class changeset_templater(changeset_printer):
744 '''format changeset information.'''
744 '''format changeset information.'''
745
745
746 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
746 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
747 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
747 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
748 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
748 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
749 defaulttempl = {
749 defaulttempl = {
750 'parent': '{rev}:{node|formatnode} ',
750 'parent': '{rev}:{node|formatnode} ',
751 'manifest': '{rev}:{node|formatnode}',
751 'manifest': '{rev}:{node|formatnode}',
752 'file_copy': '{name} ({source})',
752 'file_copy': '{name} ({source})',
753 'extra': '{key}={value|stringescape}'
753 'extra': '{key}={value|stringescape}'
754 }
754 }
755 # filecopy is preserved for compatibility reasons
755 # filecopy is preserved for compatibility reasons
756 defaulttempl['filecopy'] = defaulttempl['file_copy']
756 defaulttempl['filecopy'] = defaulttempl['file_copy']
757 self.t = templater.templater(mapfile, {'formatnode': formatnode},
757 self.t = templater.templater(mapfile, {'formatnode': formatnode},
758 cache=defaulttempl)
758 cache=defaulttempl)
759 self.cache = {}
759 self.cache = {}
760
760
761 def use_template(self, t):
761 def use_template(self, t):
762 '''set template string to use'''
762 '''set template string to use'''
763 self.t.cache['changeset'] = t
763 self.t.cache['changeset'] = t
764
764
765 def _meaningful_parentrevs(self, ctx):
765 def _meaningful_parentrevs(self, ctx):
766 """Return list of meaningful (or all if debug) parentrevs for rev.
766 """Return list of meaningful (or all if debug) parentrevs for rev.
767 """
767 """
768 parents = ctx.parents()
768 parents = ctx.parents()
769 if len(parents) > 1:
769 if len(parents) > 1:
770 return parents
770 return parents
771 if self.ui.debugflag:
771 if self.ui.debugflag:
772 return [parents[0], self.repo['null']]
772 return [parents[0], self.repo['null']]
773 if parents[0].rev() >= ctx.rev() - 1:
773 if parents[0].rev() >= ctx.rev() - 1:
774 return []
774 return []
775 return parents
775 return parents
776
776
777 def _show(self, ctx, copies, matchfn, props):
777 def _show(self, ctx, copies, matchfn, props):
778 '''show a single changeset or file revision'''
778 '''show a single changeset or file revision'''
779
779
780 showlist = templatekw.showlist
780 showlist = templatekw.showlist
781
781
782 # showparents() behaviour depends on ui trace level which
782 # showparents() behaviour depends on ui trace level which
783 # causes unexpected behaviours at templating level and makes
783 # causes unexpected behaviours at templating level and makes
784 # it harder to extract it in a standalone function. Its
784 # it harder to extract it in a standalone function. Its
785 # behaviour cannot be changed so leave it here for now.
785 # behaviour cannot be changed so leave it here for now.
786 def showparents(**args):
786 def showparents(**args):
787 ctx = args['ctx']
787 ctx = args['ctx']
788 parents = [[('rev', p.rev()), ('node', p.hex())]
788 parents = [[('rev', p.rev()), ('node', p.hex())]
789 for p in self._meaningful_parentrevs(ctx)]
789 for p in self._meaningful_parentrevs(ctx)]
790 return showlist('parent', parents, **args)
790 return showlist('parent', parents, **args)
791
791
792 props = props.copy()
792 props = props.copy()
793 props.update(templatekw.keywords)
793 props.update(templatekw.keywords)
794 props['parents'] = showparents
794 props['parents'] = showparents
795 props['templ'] = self.t
795 props['templ'] = self.t
796 props['ctx'] = ctx
796 props['ctx'] = ctx
797 props['repo'] = self.repo
797 props['repo'] = self.repo
798 props['revcache'] = {'copies': copies}
798 props['revcache'] = {'copies': copies}
799 props['cache'] = self.cache
799 props['cache'] = self.cache
800
800
801 # find correct templates for current mode
801 # find correct templates for current mode
802
802
803 tmplmodes = [
803 tmplmodes = [
804 (True, None),
804 (True, None),
805 (self.ui.verbose, 'verbose'),
805 (self.ui.verbose, 'verbose'),
806 (self.ui.quiet, 'quiet'),
806 (self.ui.quiet, 'quiet'),
807 (self.ui.debugflag, 'debug'),
807 (self.ui.debugflag, 'debug'),
808 ]
808 ]
809
809
810 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
810 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
811 for mode, postfix in tmplmodes:
811 for mode, postfix in tmplmodes:
812 for type in types:
812 for type in types:
813 cur = postfix and ('%s_%s' % (type, postfix)) or type
813 cur = postfix and ('%s_%s' % (type, postfix)) or type
814 if mode and cur in self.t:
814 if mode and cur in self.t:
815 types[type] = cur
815 types[type] = cur
816
816
817 try:
817 try:
818
818
819 # write header
819 # write header
820 if types['header']:
820 if types['header']:
821 h = templater.stringify(self.t(types['header'], **props))
821 h = templater.stringify(self.t(types['header'], **props))
822 if self.buffered:
822 if self.buffered:
823 self.header[ctx.rev()] = h
823 self.header[ctx.rev()] = h
824 else:
824 else:
825 if self.lastheader != h:
825 if self.lastheader != h:
826 self.lastheader = h
826 self.lastheader = h
827 self.ui.write(h)
827 self.ui.write(h)
828
828
829 # write changeset metadata, then patch if requested
829 # write changeset metadata, then patch if requested
830 key = types['changeset']
830 key = types['changeset']
831 self.ui.write(templater.stringify(self.t(key, **props)))
831 self.ui.write(templater.stringify(self.t(key, **props)))
832 self.showpatch(ctx.node(), matchfn)
832 self.showpatch(ctx.node(), matchfn)
833
833
834 if types['footer']:
834 if types['footer']:
835 if not self.footer:
835 if not self.footer:
836 self.footer = templater.stringify(self.t(types['footer'],
836 self.footer = templater.stringify(self.t(types['footer'],
837 **props))
837 **props))
838
838
839 except KeyError, inst:
839 except KeyError, inst:
840 msg = _("%s: no key named '%s'")
840 msg = _("%s: no key named '%s'")
841 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
841 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
842 except SyntaxError, inst:
842 except SyntaxError, inst:
843 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
843 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
844
844
845 def show_changeset(ui, repo, opts, buffered=False):
845 def show_changeset(ui, repo, opts, buffered=False):
846 """show one changeset using template or regular display.
846 """show one changeset using template or regular display.
847
847
848 Display format will be the first non-empty hit of:
848 Display format will be the first non-empty hit of:
849 1. option 'template'
849 1. option 'template'
850 2. option 'style'
850 2. option 'style'
851 3. [ui] setting 'logtemplate'
851 3. [ui] setting 'logtemplate'
852 4. [ui] setting 'style'
852 4. [ui] setting 'style'
853 If all of these values are either the unset or the empty string,
853 If all of these values are either the unset or the empty string,
854 regular display via changeset_printer() is done.
854 regular display via changeset_printer() is done.
855 """
855 """
856 # options
856 # options
857 patch = False
857 patch = False
858 if opts.get('patch') or opts.get('stat'):
858 if opts.get('patch') or opts.get('stat'):
859 patch = scmutil.matchall(repo)
859 patch = scmutil.matchall(repo)
860
860
861 tmpl = opts.get('template')
861 tmpl = opts.get('template')
862 style = None
862 style = None
863 if tmpl:
863 if tmpl:
864 tmpl = templater.parsestring(tmpl, quoted=False)
864 tmpl = templater.parsestring(tmpl, quoted=False)
865 else:
865 else:
866 style = opts.get('style')
866 style = opts.get('style')
867
867
868 # ui settings
868 # ui settings
869 if not (tmpl or style):
869 if not (tmpl or style):
870 tmpl = ui.config('ui', 'logtemplate')
870 tmpl = ui.config('ui', 'logtemplate')
871 if tmpl:
871 if tmpl:
872 tmpl = templater.parsestring(tmpl)
872 tmpl = templater.parsestring(tmpl)
873 else:
873 else:
874 style = util.expandpath(ui.config('ui', 'style', ''))
874 style = util.expandpath(ui.config('ui', 'style', ''))
875
875
876 if not (tmpl or style):
876 if not (tmpl or style):
877 return changeset_printer(ui, repo, patch, opts, buffered)
877 return changeset_printer(ui, repo, patch, opts, buffered)
878
878
879 mapfile = None
879 mapfile = None
880 if style and not tmpl:
880 if style and not tmpl:
881 mapfile = style
881 mapfile = style
882 if not os.path.split(mapfile)[0]:
882 if not os.path.split(mapfile)[0]:
883 mapname = (templater.templatepath('map-cmdline.' + mapfile)
883 mapname = (templater.templatepath('map-cmdline.' + mapfile)
884 or templater.templatepath(mapfile))
884 or templater.templatepath(mapfile))
885 if mapname:
885 if mapname:
886 mapfile = mapname
886 mapfile = mapname
887
887
888 try:
888 try:
889 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
889 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
890 except SyntaxError, inst:
890 except SyntaxError, inst:
891 raise util.Abort(inst.args[0])
891 raise util.Abort(inst.args[0])
892 if tmpl:
892 if tmpl:
893 t.use_template(tmpl)
893 t.use_template(tmpl)
894 return t
894 return t
895
895
896 def finddate(ui, repo, date):
896 def finddate(ui, repo, date):
897 """Find the tipmost changeset that matches the given date spec"""
897 """Find the tipmost changeset that matches the given date spec"""
898
898
899 df = util.matchdate(date)
899 df = util.matchdate(date)
900 m = scmutil.matchall(repo)
900 m = scmutil.matchall(repo)
901 results = {}
901 results = {}
902
902
903 def prep(ctx, fns):
903 def prep(ctx, fns):
904 d = ctx.date()
904 d = ctx.date()
905 if df(d[0]):
905 if df(d[0]):
906 results[ctx.rev()] = d
906 results[ctx.rev()] = d
907
907
908 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
908 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
909 rev = ctx.rev()
909 rev = ctx.rev()
910 if rev in results:
910 if rev in results:
911 ui.status(_("Found revision %s from %s\n") %
911 ui.status(_("Found revision %s from %s\n") %
912 (rev, util.datestr(results[rev])))
912 (rev, util.datestr(results[rev])))
913 return str(rev)
913 return str(rev)
914
914
915 raise util.Abort(_("revision matching date not found"))
915 raise util.Abort(_("revision matching date not found"))
916
916
917 def walkchangerevs(repo, match, opts, prepare):
917 def walkchangerevs(repo, match, opts, prepare):
918 '''Iterate over files and the revs in which they changed.
918 '''Iterate over files and the revs in which they changed.
919
919
920 Callers most commonly need to iterate backwards over the history
920 Callers most commonly need to iterate backwards over the history
921 in which they are interested. Doing so has awful (quadratic-looking)
921 in which they are interested. Doing so has awful (quadratic-looking)
922 performance, so we use iterators in a "windowed" way.
922 performance, so we use iterators in a "windowed" way.
923
923
924 We walk a window of revisions in the desired order. Within the
924 We walk a window of revisions in the desired order. Within the
925 window, we first walk forwards to gather data, then in the desired
925 window, we first walk forwards to gather data, then in the desired
926 order (usually backwards) to display it.
926 order (usually backwards) to display it.
927
927
928 This function returns an iterator yielding contexts. Before
928 This function returns an iterator yielding contexts. Before
929 yielding each context, the iterator will first call the prepare
929 yielding each context, the iterator will first call the prepare
930 function on each context in the window in forward order.'''
930 function on each context in the window in forward order.'''
931
931
932 def increasing_windows(start, end, windowsize=8, sizelimit=512):
932 def increasing_windows(start, end, windowsize=8, sizelimit=512):
933 if start < end:
933 if start < end:
934 while start < end:
934 while start < end:
935 yield start, min(windowsize, end - start)
935 yield start, min(windowsize, end - start)
936 start += windowsize
936 start += windowsize
937 if windowsize < sizelimit:
937 if windowsize < sizelimit:
938 windowsize *= 2
938 windowsize *= 2
939 else:
939 else:
940 while start > end:
940 while start > end:
941 yield start, min(windowsize, start - end - 1)
941 yield start, min(windowsize, start - end - 1)
942 start -= windowsize
942 start -= windowsize
943 if windowsize < sizelimit:
943 if windowsize < sizelimit:
944 windowsize *= 2
944 windowsize *= 2
945
945
946 follow = opts.get('follow') or opts.get('follow_first')
946 follow = opts.get('follow') or opts.get('follow_first')
947
947
948 if not len(repo):
948 if not len(repo):
949 return []
949 return []
950
950
951 if follow:
951 if follow:
952 defrange = '%s:0' % repo['.'].rev()
952 defrange = '%s:0' % repo['.'].rev()
953 else:
953 else:
954 defrange = '-1:0'
954 defrange = '-1:0'
955 revs = scmutil.revrange(repo, opts['rev'] or [defrange])
955 revs = scmutil.revrange(repo, opts['rev'] or [defrange])
956 if not revs:
956 if not revs:
957 return []
957 return []
958 wanted = set()
958 wanted = set()
959 slowpath = match.anypats() or (match.files() and opts.get('removed'))
959 slowpath = match.anypats() or (match.files() and opts.get('removed'))
960 fncache = {}
960 fncache = {}
961 change = util.cachefunc(repo.changectx)
961 change = util.cachefunc(repo.changectx)
962
962
963 # First step is to fill wanted, the set of revisions that we want to yield.
963 # First step is to fill wanted, the set of revisions that we want to yield.
964 # When it does not induce extra cost, we also fill fncache for revisions in
964 # When it does not induce extra cost, we also fill fncache for revisions in
965 # wanted: a cache of filenames that were changed (ctx.files()) and that
965 # wanted: a cache of filenames that were changed (ctx.files()) and that
966 # match the file filtering conditions.
966 # match the file filtering conditions.
967
967
968 if not slowpath and not match.files():
968 if not slowpath and not match.files():
969 # No files, no patterns. Display all revs.
969 # No files, no patterns. Display all revs.
970 wanted = set(revs)
970 wanted = set(revs)
971 copies = []
971 copies = []
972
972
973 if not slowpath:
973 if not slowpath:
974 # We only have to read through the filelog to find wanted revisions
974 # We only have to read through the filelog to find wanted revisions
975
975
976 minrev, maxrev = min(revs), max(revs)
976 minrev, maxrev = min(revs), max(revs)
977 def filerevgen(filelog, last):
977 def filerevgen(filelog, last):
978 """
978 """
979 Only files, no patterns. Check the history of each file.
979 Only files, no patterns. Check the history of each file.
980
980
981 Examines filelog entries within minrev, maxrev linkrev range
981 Examines filelog entries within minrev, maxrev linkrev range
982 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
982 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
983 tuples in backwards order
983 tuples in backwards order
984 """
984 """
985 cl_count = len(repo)
985 cl_count = len(repo)
986 revs = []
986 revs = []
987 for j in xrange(0, last + 1):
987 for j in xrange(0, last + 1):
988 linkrev = filelog.linkrev(j)
988 linkrev = filelog.linkrev(j)
989 if linkrev < minrev:
989 if linkrev < minrev:
990 continue
990 continue
991 # only yield rev for which we have the changelog, it can
991 # only yield rev for which we have the changelog, it can
992 # happen while doing "hg log" during a pull or commit
992 # happen while doing "hg log" during a pull or commit
993 if linkrev >= cl_count:
993 if linkrev >= cl_count:
994 break
994 break
995
995
996 parentlinkrevs = []
996 parentlinkrevs = []
997 for p in filelog.parentrevs(j):
997 for p in filelog.parentrevs(j):
998 if p != nullrev:
998 if p != nullrev:
999 parentlinkrevs.append(filelog.linkrev(p))
999 parentlinkrevs.append(filelog.linkrev(p))
1000 n = filelog.node(j)
1000 n = filelog.node(j)
1001 revs.append((linkrev, parentlinkrevs,
1001 revs.append((linkrev, parentlinkrevs,
1002 follow and filelog.renamed(n)))
1002 follow and filelog.renamed(n)))
1003
1003
1004 return reversed(revs)
1004 return reversed(revs)
1005 def iterfiles():
1005 def iterfiles():
1006 for filename in match.files():
1006 for filename in match.files():
1007 yield filename, None
1007 yield filename, None
1008 for filename_node in copies:
1008 for filename_node in copies:
1009 yield filename_node
1009 yield filename_node
1010 for file_, node in iterfiles():
1010 for file_, node in iterfiles():
1011 filelog = repo.file(file_)
1011 filelog = repo.file(file_)
1012 if not len(filelog):
1012 if not len(filelog):
1013 if node is None:
1013 if node is None:
1014 # A zero count may be a directory or deleted file, so
1014 # A zero count may be a directory or deleted file, so
1015 # try to find matching entries on the slow path.
1015 # try to find matching entries on the slow path.
1016 if follow:
1016 if follow:
1017 raise util.Abort(
1017 raise util.Abort(
1018 _('cannot follow nonexistent file: "%s"') % file_)
1018 _('cannot follow nonexistent file: "%s"') % file_)
1019 slowpath = True
1019 slowpath = True
1020 break
1020 break
1021 else:
1021 else:
1022 continue
1022 continue
1023
1023
1024 if node is None:
1024 if node is None:
1025 last = len(filelog) - 1
1025 last = len(filelog) - 1
1026 else:
1026 else:
1027 last = filelog.rev(node)
1027 last = filelog.rev(node)
1028
1028
1029
1029
1030 # keep track of all ancestors of the file
1030 # keep track of all ancestors of the file
1031 ancestors = set([filelog.linkrev(last)])
1031 ancestors = set([filelog.linkrev(last)])
1032
1032
1033 # iterate from latest to oldest revision
1033 # iterate from latest to oldest revision
1034 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1034 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1035 if not follow:
1035 if not follow:
1036 if rev > maxrev:
1036 if rev > maxrev:
1037 continue
1037 continue
1038 else:
1038 else:
1039 # Note that last might not be the first interesting
1039 # Note that last might not be the first interesting
1040 # rev to us:
1040 # rev to us:
1041 # if the file has been changed after maxrev, we'll
1041 # if the file has been changed after maxrev, we'll
1042 # have linkrev(last) > maxrev, and we still need
1042 # have linkrev(last) > maxrev, and we still need
1043 # to explore the file graph
1043 # to explore the file graph
1044 if rev not in ancestors:
1044 if rev not in ancestors:
1045 continue
1045 continue
1046 # XXX insert 1327 fix here
1046 # XXX insert 1327 fix here
1047 if flparentlinkrevs:
1047 if flparentlinkrevs:
1048 ancestors.update(flparentlinkrevs)
1048 ancestors.update(flparentlinkrevs)
1049
1049
1050 fncache.setdefault(rev, []).append(file_)
1050 fncache.setdefault(rev, []).append(file_)
1051 wanted.add(rev)
1051 wanted.add(rev)
1052 if copied:
1052 if copied:
1053 copies.append(copied)
1053 copies.append(copied)
1054 if slowpath:
1054 if slowpath:
1055 # We have to read the changelog to match filenames against
1055 # We have to read the changelog to match filenames against
1056 # changed files
1056 # changed files
1057
1057
1058 if follow:
1058 if follow:
1059 raise util.Abort(_('can only follow copies/renames for explicit '
1059 raise util.Abort(_('can only follow copies/renames for explicit '
1060 'filenames'))
1060 'filenames'))
1061
1061
1062 # The slow path checks files modified in every changeset.
1062 # The slow path checks files modified in every changeset.
1063 for i in sorted(revs):
1063 for i in sorted(revs):
1064 ctx = change(i)
1064 ctx = change(i)
1065 matches = filter(match, ctx.files())
1065 matches = filter(match, ctx.files())
1066 if matches:
1066 if matches:
1067 fncache[i] = matches
1067 fncache[i] = matches
1068 wanted.add(i)
1068 wanted.add(i)
1069
1069
1070 class followfilter(object):
1070 class followfilter(object):
1071 def __init__(self, onlyfirst=False):
1071 def __init__(self, onlyfirst=False):
1072 self.startrev = nullrev
1072 self.startrev = nullrev
1073 self.roots = set()
1073 self.roots = set()
1074 self.onlyfirst = onlyfirst
1074 self.onlyfirst = onlyfirst
1075
1075
1076 def match(self, rev):
1076 def match(self, rev):
1077 def realparents(rev):
1077 def realparents(rev):
1078 if self.onlyfirst:
1078 if self.onlyfirst:
1079 return repo.changelog.parentrevs(rev)[0:1]
1079 return repo.changelog.parentrevs(rev)[0:1]
1080 else:
1080 else:
1081 return filter(lambda x: x != nullrev,
1081 return filter(lambda x: x != nullrev,
1082 repo.changelog.parentrevs(rev))
1082 repo.changelog.parentrevs(rev))
1083
1083
1084 if self.startrev == nullrev:
1084 if self.startrev == nullrev:
1085 self.startrev = rev
1085 self.startrev = rev
1086 return True
1086 return True
1087
1087
1088 if rev > self.startrev:
1088 if rev > self.startrev:
1089 # forward: all descendants
1089 # forward: all descendants
1090 if not self.roots:
1090 if not self.roots:
1091 self.roots.add(self.startrev)
1091 self.roots.add(self.startrev)
1092 for parent in realparents(rev):
1092 for parent in realparents(rev):
1093 if parent in self.roots:
1093 if parent in self.roots:
1094 self.roots.add(rev)
1094 self.roots.add(rev)
1095 return True
1095 return True
1096 else:
1096 else:
1097 # backwards: all parents
1097 # backwards: all parents
1098 if not self.roots:
1098 if not self.roots:
1099 self.roots.update(realparents(self.startrev))
1099 self.roots.update(realparents(self.startrev))
1100 if rev in self.roots:
1100 if rev in self.roots:
1101 self.roots.remove(rev)
1101 self.roots.remove(rev)
1102 self.roots.update(realparents(rev))
1102 self.roots.update(realparents(rev))
1103 return True
1103 return True
1104
1104
1105 return False
1105 return False
1106
1106
1107 # it might be worthwhile to do this in the iterator if the rev range
1107 # it might be worthwhile to do this in the iterator if the rev range
1108 # is descending and the prune args are all within that range
1108 # is descending and the prune args are all within that range
1109 for rev in opts.get('prune', ()):
1109 for rev in opts.get('prune', ()):
1110 rev = repo.changelog.rev(repo.lookup(rev))
1110 rev = repo.changelog.rev(repo.lookup(rev))
1111 ff = followfilter()
1111 ff = followfilter()
1112 stop = min(revs[0], revs[-1])
1112 stop = min(revs[0], revs[-1])
1113 for x in xrange(rev, stop - 1, -1):
1113 for x in xrange(rev, stop - 1, -1):
1114 if ff.match(x):
1114 if ff.match(x):
1115 wanted.discard(x)
1115 wanted.discard(x)
1116
1116
1117 # Now that wanted is correctly initialized, we can iterate over the
1117 # Now that wanted is correctly initialized, we can iterate over the
1118 # revision range, yielding only revisions in wanted.
1118 # revision range, yielding only revisions in wanted.
1119 def iterate():
1119 def iterate():
1120 if follow and not match.files():
1120 if follow and not match.files():
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1122 def want(rev):
1122 def want(rev):
1123 return ff.match(rev) and rev in wanted
1123 return ff.match(rev) and rev in wanted
1124 else:
1124 else:
1125 def want(rev):
1125 def want(rev):
1126 return rev in wanted
1126 return rev in wanted
1127
1127
1128 for i, window in increasing_windows(0, len(revs)):
1128 for i, window in increasing_windows(0, len(revs)):
1129 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1129 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1130 for rev in sorted(nrevs):
1130 for rev in sorted(nrevs):
1131 fns = fncache.get(rev)
1131 fns = fncache.get(rev)
1132 ctx = change(rev)
1132 ctx = change(rev)
1133 if not fns:
1133 if not fns:
1134 def fns_generator():
1134 def fns_generator():
1135 for f in ctx.files():
1135 for f in ctx.files():
1136 if match(f):
1136 if match(f):
1137 yield f
1137 yield f
1138 fns = fns_generator()
1138 fns = fns_generator()
1139 prepare(ctx, fns)
1139 prepare(ctx, fns)
1140 for rev in nrevs:
1140 for rev in nrevs:
1141 yield change(rev)
1141 yield change(rev)
1142 return iterate()
1142 return iterate()
1143
1143
1144 def add(ui, repo, match, dryrun, listsubrepos, prefix):
1144 def add(ui, repo, match, dryrun, listsubrepos, prefix):
1145 join = lambda f: os.path.join(prefix, f)
1145 join = lambda f: os.path.join(prefix, f)
1146 bad = []
1146 bad = []
1147 oldbad = match.bad
1147 oldbad = match.bad
1148 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1148 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1149 names = []
1149 names = []
1150 wctx = repo[None]
1150 wctx = repo[None]
1151 cca = None
1151 cca = None
1152 abort, warn = scmutil.checkportabilityalert(ui)
1152 abort, warn = scmutil.checkportabilityalert(ui)
1153 if abort or warn:
1153 if abort or warn:
1154 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1154 cca = scmutil.casecollisionauditor(ui, abort, wctx)
1155 for f in repo.walk(match):
1155 for f in repo.walk(match):
1156 exact = match.exact(f)
1156 exact = match.exact(f)
1157 if exact or f not in repo.dirstate:
1157 if exact or f not in repo.dirstate:
1158 if cca:
1158 if cca:
1159 cca(f)
1159 cca(f)
1160 names.append(f)
1160 names.append(f)
1161 if ui.verbose or not exact:
1161 if ui.verbose or not exact:
1162 ui.status(_('adding %s\n') % match.rel(join(f)))
1162 ui.status(_('adding %s\n') % match.rel(join(f)))
1163
1163
1164 if listsubrepos:
1164 if listsubrepos:
1165 for subpath in wctx.substate:
1165 for subpath in wctx.substate:
1166 sub = wctx.sub(subpath)
1166 sub = wctx.sub(subpath)
1167 try:
1167 try:
1168 submatch = matchmod.narrowmatcher(subpath, match)
1168 submatch = matchmod.narrowmatcher(subpath, match)
1169 bad.extend(sub.add(ui, submatch, dryrun, prefix))
1169 bad.extend(sub.add(ui, submatch, dryrun, prefix))
1170 except error.LookupError:
1170 except error.LookupError:
1171 ui.status(_("skipping missing subrepository: %s\n")
1171 ui.status(_("skipping missing subrepository: %s\n")
1172 % join(subpath))
1172 % join(subpath))
1173
1173
1174 if not dryrun:
1174 if not dryrun:
1175 rejected = wctx.add(names, prefix)
1175 rejected = wctx.add(names, prefix)
1176 bad.extend(f for f in rejected if f in match.files())
1176 bad.extend(f for f in rejected if f in match.files())
1177 return bad
1177 return bad
1178
1178
1179 def duplicatecopies(repo, rev, p1, p2):
1180 "Reproduce copies found in the source revision in the dirstate for grafts"
1181 # Here we simulate the copies and renames in the source changeset
1182 cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True)
1183 m1 = repo[rev].manifest()
1184 m2 = repo[p1].manifest()
1185 for k, v in cop.iteritems():
1186 if k in m1:
1187 if v in m1 or v in m2:
1188 repo.dirstate.copy(v, k)
1189 if v in m2 and v not in m1 and k in m2:
1190 repo.dirstate.remove(v)
1191
1179 def commit(ui, repo, commitfunc, pats, opts):
1192 def commit(ui, repo, commitfunc, pats, opts):
1180 '''commit the specified files or all outstanding changes'''
1193 '''commit the specified files or all outstanding changes'''
1181 date = opts.get('date')
1194 date = opts.get('date')
1182 if date:
1195 if date:
1183 opts['date'] = util.parsedate(date)
1196 opts['date'] = util.parsedate(date)
1184 message = logmessage(ui, opts)
1197 message = logmessage(ui, opts)
1185
1198
1186 # extract addremove carefully -- this function can be called from a command
1199 # extract addremove carefully -- this function can be called from a command
1187 # that doesn't support addremove
1200 # that doesn't support addremove
1188 if opts.get('addremove'):
1201 if opts.get('addremove'):
1189 scmutil.addremove(repo, pats, opts)
1202 scmutil.addremove(repo, pats, opts)
1190
1203
1191 return commitfunc(ui, repo, message,
1204 return commitfunc(ui, repo, message,
1192 scmutil.match(repo[None], pats, opts), opts)
1205 scmutil.match(repo[None], pats, opts), opts)
1193
1206
1194 def commiteditor(repo, ctx, subs):
1207 def commiteditor(repo, ctx, subs):
1195 if ctx.description():
1208 if ctx.description():
1196 return ctx.description()
1209 return ctx.description()
1197 return commitforceeditor(repo, ctx, subs)
1210 return commitforceeditor(repo, ctx, subs)
1198
1211
1199 def commitforceeditor(repo, ctx, subs):
1212 def commitforceeditor(repo, ctx, subs):
1200 edittext = []
1213 edittext = []
1201 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1214 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1202 if ctx.description():
1215 if ctx.description():
1203 edittext.append(ctx.description())
1216 edittext.append(ctx.description())
1204 edittext.append("")
1217 edittext.append("")
1205 edittext.append("") # Empty line between message and comments.
1218 edittext.append("") # Empty line between message and comments.
1206 edittext.append(_("HG: Enter commit message."
1219 edittext.append(_("HG: Enter commit message."
1207 " Lines beginning with 'HG:' are removed."))
1220 " Lines beginning with 'HG:' are removed."))
1208 edittext.append(_("HG: Leave message empty to abort commit."))
1221 edittext.append(_("HG: Leave message empty to abort commit."))
1209 edittext.append("HG: --")
1222 edittext.append("HG: --")
1210 edittext.append(_("HG: user: %s") % ctx.user())
1223 edittext.append(_("HG: user: %s") % ctx.user())
1211 if ctx.p2():
1224 if ctx.p2():
1212 edittext.append(_("HG: branch merge"))
1225 edittext.append(_("HG: branch merge"))
1213 if ctx.branch():
1226 if ctx.branch():
1214 edittext.append(_("HG: branch '%s'") % ctx.branch())
1227 edittext.append(_("HG: branch '%s'") % ctx.branch())
1215 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1228 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1216 edittext.extend([_("HG: added %s") % f for f in added])
1229 edittext.extend([_("HG: added %s") % f for f in added])
1217 edittext.extend([_("HG: changed %s") % f for f in modified])
1230 edittext.extend([_("HG: changed %s") % f for f in modified])
1218 edittext.extend([_("HG: removed %s") % f for f in removed])
1231 edittext.extend([_("HG: removed %s") % f for f in removed])
1219 if not added and not modified and not removed:
1232 if not added and not modified and not removed:
1220 edittext.append(_("HG: no files changed"))
1233 edittext.append(_("HG: no files changed"))
1221 edittext.append("")
1234 edittext.append("")
1222 # run editor in the repository root
1235 # run editor in the repository root
1223 olddir = os.getcwd()
1236 olddir = os.getcwd()
1224 os.chdir(repo.root)
1237 os.chdir(repo.root)
1225 text = repo.ui.edit("\n".join(edittext), ctx.user())
1238 text = repo.ui.edit("\n".join(edittext), ctx.user())
1226 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1239 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
1227 os.chdir(olddir)
1240 os.chdir(olddir)
1228
1241
1229 if not text.strip():
1242 if not text.strip():
1230 raise util.Abort(_("empty commit message"))
1243 raise util.Abort(_("empty commit message"))
1231
1244
1232 return text
1245 return text
1233
1246
1234 def command(table):
1247 def command(table):
1235 '''returns a function object bound to table which can be used as
1248 '''returns a function object bound to table which can be used as
1236 a decorator for populating table as a command table'''
1249 a decorator for populating table as a command table'''
1237
1250
1238 def cmd(name, options, synopsis=None):
1251 def cmd(name, options, synopsis=None):
1239 def decorator(func):
1252 def decorator(func):
1240 if synopsis:
1253 if synopsis:
1241 table[name] = func, options[:], synopsis
1254 table[name] = func, options[:], synopsis
1242 else:
1255 else:
1243 table[name] = func, options[:]
1256 table[name] = func, options[:]
1244 return func
1257 return func
1245 return decorator
1258 return decorator
1246
1259
1247 return cmd
1260 return cmd
General Comments 0
You need to be logged in to leave comments. Login now