##// END OF EJS Templates
bookmarks: rename bookmarkcurrent to activebookmark (API)...
Ryan McElroy -
r24947:a02d293a default
parent child Browse files
Show More
@@ -1,1116 +1,1116 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 http://mercurial.selenic.com/wiki/RebaseExtension
14 http://mercurial.selenic.com/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
19 from mercurial import copies, repoview
19 from mercurial import copies, repoview
20 from mercurial.commands import templateopts
20 from mercurial.commands import templateopts
21 from mercurial.node import nullrev, nullid, hex, short
21 from mercurial.node import nullrev, nullid, hex, short
22 from mercurial.lock import release
22 from mercurial.lock import release
23 from mercurial.i18n import _
23 from mercurial.i18n import _
24 import os, errno
24 import os, errno
25
25
26 revtodo = -1
26 revtodo = -1
27 nullmerge = -2
27 nullmerge = -2
28 revignored = -3
28 revignored = -3
29
29
30 cmdtable = {}
30 cmdtable = {}
31 command = cmdutil.command(cmdtable)
31 command = cmdutil.command(cmdtable)
32 testedwith = 'internal'
32 testedwith = 'internal'
33
33
34 def _savegraft(ctx, extra):
34 def _savegraft(ctx, extra):
35 s = ctx.extra().get('source', None)
35 s = ctx.extra().get('source', None)
36 if s is not None:
36 if s is not None:
37 extra['source'] = s
37 extra['source'] = s
38
38
39 def _savebranch(ctx, extra):
39 def _savebranch(ctx, extra):
40 extra['branch'] = ctx.branch()
40 extra['branch'] = ctx.branch()
41
41
42 def _makeextrafn(copiers):
42 def _makeextrafn(copiers):
43 """make an extrafn out of the given copy-functions.
43 """make an extrafn out of the given copy-functions.
44
44
45 A copy function takes a context and an extra dict, and mutates the
45 A copy function takes a context and an extra dict, and mutates the
46 extra dict as needed based on the given context.
46 extra dict as needed based on the given context.
47 """
47 """
48 def extrafn(ctx, extra):
48 def extrafn(ctx, extra):
49 for c in copiers:
49 for c in copiers:
50 c(ctx, extra)
50 c(ctx, extra)
51 return extrafn
51 return extrafn
52
52
53 @command('rebase',
53 @command('rebase',
54 [('s', 'source', '',
54 [('s', 'source', '',
55 _('rebase the specified changeset and descendants'), _('REV')),
55 _('rebase the specified changeset and descendants'), _('REV')),
56 ('b', 'base', '',
56 ('b', 'base', '',
57 _('rebase everything from branching point of specified changeset'),
57 _('rebase everything from branching point of specified changeset'),
58 _('REV')),
58 _('REV')),
59 ('r', 'rev', [],
59 ('r', 'rev', [],
60 _('rebase these revisions'),
60 _('rebase these revisions'),
61 _('REV')),
61 _('REV')),
62 ('d', 'dest', '',
62 ('d', 'dest', '',
63 _('rebase onto the specified changeset'), _('REV')),
63 _('rebase onto the specified changeset'), _('REV')),
64 ('', 'collapse', False, _('collapse the rebased changesets')),
64 ('', 'collapse', False, _('collapse the rebased changesets')),
65 ('m', 'message', '',
65 ('m', 'message', '',
66 _('use text as collapse commit message'), _('TEXT')),
66 _('use text as collapse commit message'), _('TEXT')),
67 ('e', 'edit', False, _('invoke editor on commit messages')),
67 ('e', 'edit', False, _('invoke editor on commit messages')),
68 ('l', 'logfile', '',
68 ('l', 'logfile', '',
69 _('read collapse commit message from file'), _('FILE')),
69 _('read collapse commit message from file'), _('FILE')),
70 ('', 'keep', False, _('keep original changesets')),
70 ('', 'keep', False, _('keep original changesets')),
71 ('', 'keepbranches', False, _('keep original branch names')),
71 ('', 'keepbranches', False, _('keep original branch names')),
72 ('D', 'detach', False, _('(DEPRECATED)')),
72 ('D', 'detach', False, _('(DEPRECATED)')),
73 ('i', 'interactive', False, _('(DEPRECATED)')),
73 ('i', 'interactive', False, _('(DEPRECATED)')),
74 ('t', 'tool', '', _('specify merge tool')),
74 ('t', 'tool', '', _('specify merge tool')),
75 ('c', 'continue', False, _('continue an interrupted rebase')),
75 ('c', 'continue', False, _('continue an interrupted rebase')),
76 ('a', 'abort', False, _('abort an interrupted rebase'))] +
76 ('a', 'abort', False, _('abort an interrupted rebase'))] +
77 templateopts,
77 templateopts,
78 _('[-s REV | -b REV] [-d REV] [OPTION]'))
78 _('[-s REV | -b REV] [-d REV] [OPTION]'))
79 def rebase(ui, repo, **opts):
79 def rebase(ui, repo, **opts):
80 """move changeset (and descendants) to a different branch
80 """move changeset (and descendants) to a different branch
81
81
82 Rebase uses repeated merging to graft changesets from one part of
82 Rebase uses repeated merging to graft changesets from one part of
83 history (the source) onto another (the destination). This can be
83 history (the source) onto another (the destination). This can be
84 useful for linearizing *local* changes relative to a master
84 useful for linearizing *local* changes relative to a master
85 development tree.
85 development tree.
86
86
87 You should not rebase changesets that have already been shared
87 You should not rebase changesets that have already been shared
88 with others. Doing so will force everybody else to perform the
88 with others. Doing so will force everybody else to perform the
89 same rebase or they will end up with duplicated changesets after
89 same rebase or they will end up with duplicated changesets after
90 pulling in your rebased changesets.
90 pulling in your rebased changesets.
91
91
92 In its default configuration, Mercurial will prevent you from
92 In its default configuration, Mercurial will prevent you from
93 rebasing published changes. See :hg:`help phases` for details.
93 rebasing published changes. See :hg:`help phases` for details.
94
94
95 If you don't specify a destination changeset (``-d/--dest``),
95 If you don't specify a destination changeset (``-d/--dest``),
96 rebase uses the current branch tip as the destination. (The
96 rebase uses the current branch tip as the destination. (The
97 destination changeset is not modified by rebasing, but new
97 destination changeset is not modified by rebasing, but new
98 changesets are added as its descendants.)
98 changesets are added as its descendants.)
99
99
100 You can specify which changesets to rebase in two ways: as a
100 You can specify which changesets to rebase in two ways: as a
101 "source" changeset or as a "base" changeset. Both are shorthand
101 "source" changeset or as a "base" changeset. Both are shorthand
102 for a topologically related set of changesets (the "source
102 for a topologically related set of changesets (the "source
103 branch"). If you specify source (``-s/--source``), rebase will
103 branch"). If you specify source (``-s/--source``), rebase will
104 rebase that changeset and all of its descendants onto dest. If you
104 rebase that changeset and all of its descendants onto dest. If you
105 specify base (``-b/--base``), rebase will select ancestors of base
105 specify base (``-b/--base``), rebase will select ancestors of base
106 back to but not including the common ancestor with dest. Thus,
106 back to but not including the common ancestor with dest. Thus,
107 ``-b`` is less precise but more convenient than ``-s``: you can
107 ``-b`` is less precise but more convenient than ``-s``: you can
108 specify any changeset in the source branch, and rebase will select
108 specify any changeset in the source branch, and rebase will select
109 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
109 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
110 uses the parent of the working directory as the base.
110 uses the parent of the working directory as the base.
111
111
112 For advanced usage, a third way is available through the ``--rev``
112 For advanced usage, a third way is available through the ``--rev``
113 option. It allows you to specify an arbitrary set of changesets to
113 option. It allows you to specify an arbitrary set of changesets to
114 rebase. Descendants of revs you specify with this option are not
114 rebase. Descendants of revs you specify with this option are not
115 automatically included in the rebase.
115 automatically included in the rebase.
116
116
117 By default, rebase recreates the changesets in the source branch
117 By default, rebase recreates the changesets in the source branch
118 as descendants of dest and then destroys the originals. Use
118 as descendants of dest and then destroys the originals. Use
119 ``--keep`` to preserve the original source changesets. Some
119 ``--keep`` to preserve the original source changesets. Some
120 changesets in the source branch (e.g. merges from the destination
120 changesets in the source branch (e.g. merges from the destination
121 branch) may be dropped if they no longer contribute any change.
121 branch) may be dropped if they no longer contribute any change.
122
122
123 One result of the rules for selecting the destination changeset
123 One result of the rules for selecting the destination changeset
124 and source branch is that, unlike ``merge``, rebase will do
124 and source branch is that, unlike ``merge``, rebase will do
125 nothing if you are at the branch tip of a named branch
125 nothing if you are at the branch tip of a named branch
126 with two heads. You need to explicitly specify source and/or
126 with two heads. You need to explicitly specify source and/or
127 destination (or ``update`` to the other head, if it's the head of
127 destination (or ``update`` to the other head, if it's the head of
128 the intended source branch).
128 the intended source branch).
129
129
130 If a rebase is interrupted to manually resolve a merge, it can be
130 If a rebase is interrupted to manually resolve a merge, it can be
131 continued with --continue/-c or aborted with --abort/-a.
131 continued with --continue/-c or aborted with --abort/-a.
132
132
133 .. container:: verbose
133 .. container:: verbose
134
134
135 Examples:
135 Examples:
136
136
137 - move "local changes" (current commit back to branching point)
137 - move "local changes" (current commit back to branching point)
138 to the current branch tip after a pull::
138 to the current branch tip after a pull::
139
139
140 hg rebase
140 hg rebase
141
141
142 - move a single changeset to the stable branch::
142 - move a single changeset to the stable branch::
143
143
144 hg rebase -r 5f493448 -d stable
144 hg rebase -r 5f493448 -d stable
145
145
146 - splice a commit and all its descendants onto another part of history::
146 - splice a commit and all its descendants onto another part of history::
147
147
148 hg rebase --source c0c3 --dest 4cf9
148 hg rebase --source c0c3 --dest 4cf9
149
149
150 - rebase everything on a branch marked by a bookmark onto the
150 - rebase everything on a branch marked by a bookmark onto the
151 default branch::
151 default branch::
152
152
153 hg rebase --base myfeature --dest default
153 hg rebase --base myfeature --dest default
154
154
155 - collapse a sequence of changes into a single commit::
155 - collapse a sequence of changes into a single commit::
156
156
157 hg rebase --collapse -r 1520:1525 -d .
157 hg rebase --collapse -r 1520:1525 -d .
158
158
159 - move a named branch while preserving its name::
159 - move a named branch while preserving its name::
160
160
161 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
161 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
162
162
163 Returns 0 on success, 1 if nothing to rebase or there are
163 Returns 0 on success, 1 if nothing to rebase or there are
164 unresolved conflicts.
164 unresolved conflicts.
165
165
166 """
166 """
167 originalwd = target = None
167 originalwd = target = None
168 activebookmark = None
168 activebookmark = None
169 external = nullrev
169 external = nullrev
170 state = {}
170 state = {}
171 skipped = set()
171 skipped = set()
172 targetancestors = set()
172 targetancestors = set()
173
173
174
174
175 lock = wlock = None
175 lock = wlock = None
176 try:
176 try:
177 wlock = repo.wlock()
177 wlock = repo.wlock()
178 lock = repo.lock()
178 lock = repo.lock()
179
179
180 # Validate input and define rebasing points
180 # Validate input and define rebasing points
181 destf = opts.get('dest', None)
181 destf = opts.get('dest', None)
182 srcf = opts.get('source', None)
182 srcf = opts.get('source', None)
183 basef = opts.get('base', None)
183 basef = opts.get('base', None)
184 revf = opts.get('rev', [])
184 revf = opts.get('rev', [])
185 contf = opts.get('continue')
185 contf = opts.get('continue')
186 abortf = opts.get('abort')
186 abortf = opts.get('abort')
187 collapsef = opts.get('collapse', False)
187 collapsef = opts.get('collapse', False)
188 collapsemsg = cmdutil.logmessage(ui, opts)
188 collapsemsg = cmdutil.logmessage(ui, opts)
189 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
189 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
190 extrafns = [_savegraft]
190 extrafns = [_savegraft]
191 if e:
191 if e:
192 extrafns = [e]
192 extrafns = [e]
193 keepf = opts.get('keep', False)
193 keepf = opts.get('keep', False)
194 keepbranchesf = opts.get('keepbranches', False)
194 keepbranchesf = opts.get('keepbranches', False)
195 # keepopen is not meant for use on the command line, but by
195 # keepopen is not meant for use on the command line, but by
196 # other extensions
196 # other extensions
197 keepopen = opts.get('keepopen', False)
197 keepopen = opts.get('keepopen', False)
198
198
199 if opts.get('interactive'):
199 if opts.get('interactive'):
200 msg = _("interactive history editing is supported by the "
200 msg = _("interactive history editing is supported by the "
201 "'histedit' extension (see \"hg help histedit\")")
201 "'histedit' extension (see \"hg help histedit\")")
202 raise util.Abort(msg)
202 raise util.Abort(msg)
203
203
204 if collapsemsg and not collapsef:
204 if collapsemsg and not collapsef:
205 raise util.Abort(
205 raise util.Abort(
206 _('message can only be specified with collapse'))
206 _('message can only be specified with collapse'))
207
207
208 if contf or abortf:
208 if contf or abortf:
209 if contf and abortf:
209 if contf and abortf:
210 raise util.Abort(_('cannot use both abort and continue'))
210 raise util.Abort(_('cannot use both abort and continue'))
211 if collapsef:
211 if collapsef:
212 raise util.Abort(
212 raise util.Abort(
213 _('cannot use collapse with continue or abort'))
213 _('cannot use collapse with continue or abort'))
214 if srcf or basef or destf:
214 if srcf or basef or destf:
215 raise util.Abort(
215 raise util.Abort(
216 _('abort and continue do not allow specifying revisions'))
216 _('abort and continue do not allow specifying revisions'))
217 if opts.get('tool', False):
217 if opts.get('tool', False):
218 ui.warn(_('tool option will be ignored\n'))
218 ui.warn(_('tool option will be ignored\n'))
219
219
220 try:
220 try:
221 (originalwd, target, state, skipped, collapsef, keepf,
221 (originalwd, target, state, skipped, collapsef, keepf,
222 keepbranchesf, external, activebookmark) = restorestatus(repo)
222 keepbranchesf, external, activebookmark) = restorestatus(repo)
223 except error.RepoLookupError:
223 except error.RepoLookupError:
224 if abortf:
224 if abortf:
225 clearstatus(repo)
225 clearstatus(repo)
226 repo.ui.warn(_('rebase aborted (no revision is removed,'
226 repo.ui.warn(_('rebase aborted (no revision is removed,'
227 ' only broken state is cleared)\n'))
227 ' only broken state is cleared)\n'))
228 return 0
228 return 0
229 else:
229 else:
230 msg = _('cannot continue inconsistent rebase')
230 msg = _('cannot continue inconsistent rebase')
231 hint = _('use "hg rebase --abort" to clear broken state')
231 hint = _('use "hg rebase --abort" to clear broken state')
232 raise util.Abort(msg, hint=hint)
232 raise util.Abort(msg, hint=hint)
233 if abortf:
233 if abortf:
234 return abort(repo, originalwd, target, state,
234 return abort(repo, originalwd, target, state,
235 activebookmark=activebookmark)
235 activebookmark=activebookmark)
236 else:
236 else:
237 if srcf and basef:
237 if srcf and basef:
238 raise util.Abort(_('cannot specify both a '
238 raise util.Abort(_('cannot specify both a '
239 'source and a base'))
239 'source and a base'))
240 if revf and basef:
240 if revf and basef:
241 raise util.Abort(_('cannot specify both a '
241 raise util.Abort(_('cannot specify both a '
242 'revision and a base'))
242 'revision and a base'))
243 if revf and srcf:
243 if revf and srcf:
244 raise util.Abort(_('cannot specify both a '
244 raise util.Abort(_('cannot specify both a '
245 'revision and a source'))
245 'revision and a source'))
246
246
247 cmdutil.checkunfinished(repo)
247 cmdutil.checkunfinished(repo)
248 cmdutil.bailifchanged(repo)
248 cmdutil.bailifchanged(repo)
249
249
250 if not destf:
250 if not destf:
251 # Destination defaults to the latest revision in the
251 # Destination defaults to the latest revision in the
252 # current branch
252 # current branch
253 branch = repo[None].branch()
253 branch = repo[None].branch()
254 dest = repo[branch]
254 dest = repo[branch]
255 else:
255 else:
256 dest = scmutil.revsingle(repo, destf)
256 dest = scmutil.revsingle(repo, destf)
257
257
258 if revf:
258 if revf:
259 rebaseset = scmutil.revrange(repo, revf)
259 rebaseset = scmutil.revrange(repo, revf)
260 if not rebaseset:
260 if not rebaseset:
261 ui.status(_('empty "rev" revision set - '
261 ui.status(_('empty "rev" revision set - '
262 'nothing to rebase\n'))
262 'nothing to rebase\n'))
263 return 1
263 return 1
264 elif srcf:
264 elif srcf:
265 src = scmutil.revrange(repo, [srcf])
265 src = scmutil.revrange(repo, [srcf])
266 if not src:
266 if not src:
267 ui.status(_('empty "source" revision set - '
267 ui.status(_('empty "source" revision set - '
268 'nothing to rebase\n'))
268 'nothing to rebase\n'))
269 return 1
269 return 1
270 rebaseset = repo.revs('(%ld)::', src)
270 rebaseset = repo.revs('(%ld)::', src)
271 assert rebaseset
271 assert rebaseset
272 else:
272 else:
273 base = scmutil.revrange(repo, [basef or '.'])
273 base = scmutil.revrange(repo, [basef or '.'])
274 if not base:
274 if not base:
275 ui.status(_('empty "base" revision set - '
275 ui.status(_('empty "base" revision set - '
276 "can't compute rebase set\n"))
276 "can't compute rebase set\n"))
277 return 1
277 return 1
278 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
278 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
279 if commonanc is not None:
279 if commonanc is not None:
280 rebaseset = repo.revs('(%d::(%ld) - %d)::',
280 rebaseset = repo.revs('(%d::(%ld) - %d)::',
281 commonanc, base, commonanc)
281 commonanc, base, commonanc)
282 else:
282 else:
283 rebaseset = []
283 rebaseset = []
284
284
285 if not rebaseset:
285 if not rebaseset:
286 # transform to list because smartsets are not comparable to
286 # transform to list because smartsets are not comparable to
287 # lists. This should be improved to honor laziness of
287 # lists. This should be improved to honor laziness of
288 # smartset.
288 # smartset.
289 if list(base) == [dest.rev()]:
289 if list(base) == [dest.rev()]:
290 if basef:
290 if basef:
291 ui.status(_('nothing to rebase - %s is both "base"'
291 ui.status(_('nothing to rebase - %s is both "base"'
292 ' and destination\n') % dest)
292 ' and destination\n') % dest)
293 else:
293 else:
294 ui.status(_('nothing to rebase - working directory '
294 ui.status(_('nothing to rebase - working directory '
295 'parent is also destination\n'))
295 'parent is also destination\n'))
296 elif not repo.revs('%ld - ::%d', base, dest):
296 elif not repo.revs('%ld - ::%d', base, dest):
297 if basef:
297 if basef:
298 ui.status(_('nothing to rebase - "base" %s is '
298 ui.status(_('nothing to rebase - "base" %s is '
299 'already an ancestor of destination '
299 'already an ancestor of destination '
300 '%s\n') %
300 '%s\n') %
301 ('+'.join(str(repo[r]) for r in base),
301 ('+'.join(str(repo[r]) for r in base),
302 dest))
302 dest))
303 else:
303 else:
304 ui.status(_('nothing to rebase - working '
304 ui.status(_('nothing to rebase - working '
305 'directory parent is already an '
305 'directory parent is already an '
306 'ancestor of destination %s\n') % dest)
306 'ancestor of destination %s\n') % dest)
307 else: # can it happen?
307 else: # can it happen?
308 ui.status(_('nothing to rebase from %s to %s\n') %
308 ui.status(_('nothing to rebase from %s to %s\n') %
309 ('+'.join(str(repo[r]) for r in base), dest))
309 ('+'.join(str(repo[r]) for r in base), dest))
310 return 1
310 return 1
311
311
312 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
312 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
313 if (not (keepf or allowunstable)
313 if (not (keepf or allowunstable)
314 and repo.revs('first(children(%ld) - %ld)',
314 and repo.revs('first(children(%ld) - %ld)',
315 rebaseset, rebaseset)):
315 rebaseset, rebaseset)):
316 raise util.Abort(
316 raise util.Abort(
317 _("can't remove original changesets with"
317 _("can't remove original changesets with"
318 " unrebased descendants"),
318 " unrebased descendants"),
319 hint=_('use --keep to keep original changesets'))
319 hint=_('use --keep to keep original changesets'))
320
320
321 result = buildstate(repo, dest, rebaseset, collapsef)
321 result = buildstate(repo, dest, rebaseset, collapsef)
322 if not result:
322 if not result:
323 # Empty state built, nothing to rebase
323 # Empty state built, nothing to rebase
324 ui.status(_('nothing to rebase\n'))
324 ui.status(_('nothing to rebase\n'))
325 return 1
325 return 1
326
326
327 root = min(rebaseset)
327 root = min(rebaseset)
328 if not keepf and not repo[root].mutable():
328 if not keepf and not repo[root].mutable():
329 raise util.Abort(_("can't rebase immutable changeset %s")
329 raise util.Abort(_("can't rebase immutable changeset %s")
330 % repo[root],
330 % repo[root],
331 hint=_('see "hg help phases" for details'))
331 hint=_('see "hg help phases" for details'))
332
332
333 originalwd, target, state = result
333 originalwd, target, state = result
334 if collapsef:
334 if collapsef:
335 targetancestors = repo.changelog.ancestors([target],
335 targetancestors = repo.changelog.ancestors([target],
336 inclusive=True)
336 inclusive=True)
337 external = externalparent(repo, state, targetancestors)
337 external = externalparent(repo, state, targetancestors)
338
338
339 if dest.closesbranch() and not keepbranchesf:
339 if dest.closesbranch() and not keepbranchesf:
340 ui.status(_('reopening closed branch head %s\n') % dest)
340 ui.status(_('reopening closed branch head %s\n') % dest)
341
341
342 if keepbranchesf:
342 if keepbranchesf:
343 # insert _savebranch at the start of extrafns so if
343 # insert _savebranch at the start of extrafns so if
344 # there's a user-provided extrafn it can clobber branch if
344 # there's a user-provided extrafn it can clobber branch if
345 # desired
345 # desired
346 extrafns.insert(0, _savebranch)
346 extrafns.insert(0, _savebranch)
347 if collapsef:
347 if collapsef:
348 branches = set()
348 branches = set()
349 for rev in state:
349 for rev in state:
350 branches.add(repo[rev].branch())
350 branches.add(repo[rev].branch())
351 if len(branches) > 1:
351 if len(branches) > 1:
352 raise util.Abort(_('cannot collapse multiple named '
352 raise util.Abort(_('cannot collapse multiple named '
353 'branches'))
353 'branches'))
354
354
355 # Rebase
355 # Rebase
356 if not targetancestors:
356 if not targetancestors:
357 targetancestors = repo.changelog.ancestors([target], inclusive=True)
357 targetancestors = repo.changelog.ancestors([target], inclusive=True)
358
358
359 # Keep track of the current bookmarks in order to reset them later
359 # Keep track of the current bookmarks in order to reset them later
360 currentbookmarks = repo._bookmarks.copy()
360 currentbookmarks = repo._bookmarks.copy()
361 activebookmark = activebookmark or repo._bookmarkcurrent
361 activebookmark = activebookmark or repo._activebookmark
362 if activebookmark:
362 if activebookmark:
363 bookmarks.deactivate(repo)
363 bookmarks.deactivate(repo)
364
364
365 extrafn = _makeextrafn(extrafns)
365 extrafn = _makeextrafn(extrafns)
366
366
367 sortedstate = sorted(state)
367 sortedstate = sorted(state)
368 total = len(sortedstate)
368 total = len(sortedstate)
369 pos = 0
369 pos = 0
370 for rev in sortedstate:
370 for rev in sortedstate:
371 ctx = repo[rev]
371 ctx = repo[rev]
372 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
372 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
373 ctx.description().split('\n', 1)[0])
373 ctx.description().split('\n', 1)[0])
374 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
374 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
375 if names:
375 if names:
376 desc += ' (%s)' % ' '.join(names)
376 desc += ' (%s)' % ' '.join(names)
377 pos += 1
377 pos += 1
378 if state[rev] == revtodo:
378 if state[rev] == revtodo:
379 ui.status(_('rebasing %s\n') % desc)
379 ui.status(_('rebasing %s\n') % desc)
380 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
380 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
381 _('changesets'), total)
381 _('changesets'), total)
382 p1, p2, base = defineparents(repo, rev, target, state,
382 p1, p2, base = defineparents(repo, rev, target, state,
383 targetancestors)
383 targetancestors)
384 storestatus(repo, originalwd, target, state, collapsef, keepf,
384 storestatus(repo, originalwd, target, state, collapsef, keepf,
385 keepbranchesf, external, activebookmark)
385 keepbranchesf, external, activebookmark)
386 if len(repo.parents()) == 2:
386 if len(repo.parents()) == 2:
387 repo.ui.debug('resuming interrupted rebase\n')
387 repo.ui.debug('resuming interrupted rebase\n')
388 else:
388 else:
389 try:
389 try:
390 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
390 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
391 'rebase')
391 'rebase')
392 stats = rebasenode(repo, rev, p1, base, state,
392 stats = rebasenode(repo, rev, p1, base, state,
393 collapsef, target)
393 collapsef, target)
394 if stats and stats[3] > 0:
394 if stats and stats[3] > 0:
395 raise error.InterventionRequired(
395 raise error.InterventionRequired(
396 _('unresolved conflicts (see hg '
396 _('unresolved conflicts (see hg '
397 'resolve, then hg rebase --continue)'))
397 'resolve, then hg rebase --continue)'))
398 finally:
398 finally:
399 ui.setconfig('ui', 'forcemerge', '', 'rebase')
399 ui.setconfig('ui', 'forcemerge', '', 'rebase')
400 if not collapsef:
400 if not collapsef:
401 merging = p2 != nullrev
401 merging = p2 != nullrev
402 editform = cmdutil.mergeeditform(merging, 'rebase')
402 editform = cmdutil.mergeeditform(merging, 'rebase')
403 editor = cmdutil.getcommiteditor(editform=editform, **opts)
403 editor = cmdutil.getcommiteditor(editform=editform, **opts)
404 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
404 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
405 editor=editor)
405 editor=editor)
406 else:
406 else:
407 # Skip commit if we are collapsing
407 # Skip commit if we are collapsing
408 repo.dirstate.beginparentchange()
408 repo.dirstate.beginparentchange()
409 repo.setparents(repo[p1].node())
409 repo.setparents(repo[p1].node())
410 repo.dirstate.endparentchange()
410 repo.dirstate.endparentchange()
411 newnode = None
411 newnode = None
412 # Update the state
412 # Update the state
413 if newnode is not None:
413 if newnode is not None:
414 state[rev] = repo[newnode].rev()
414 state[rev] = repo[newnode].rev()
415 ui.debug('rebased as %s\n' % short(newnode))
415 ui.debug('rebased as %s\n' % short(newnode))
416 else:
416 else:
417 ui.warn(_('note: rebase of %d:%s created no changes '
417 ui.warn(_('note: rebase of %d:%s created no changes '
418 'to commit\n') % (rev, ctx))
418 'to commit\n') % (rev, ctx))
419 if not collapsef:
419 if not collapsef:
420 skipped.add(rev)
420 skipped.add(rev)
421 state[rev] = p1
421 state[rev] = p1
422 ui.debug('next revision set to %s\n' % p1)
422 ui.debug('next revision set to %s\n' % p1)
423 elif state[rev] == nullmerge:
423 elif state[rev] == nullmerge:
424 ui.debug('ignoring null merge rebase of %s\n' % rev)
424 ui.debug('ignoring null merge rebase of %s\n' % rev)
425 elif state[rev] == revignored:
425 elif state[rev] == revignored:
426 ui.status(_('not rebasing ignored %s\n') % desc)
426 ui.status(_('not rebasing ignored %s\n') % desc)
427 else:
427 else:
428 ui.status(_('already rebased %s as %s\n') %
428 ui.status(_('already rebased %s as %s\n') %
429 (desc, repo[state[rev]]))
429 (desc, repo[state[rev]]))
430
430
431 ui.progress(_('rebasing'), None)
431 ui.progress(_('rebasing'), None)
432 ui.note(_('rebase merging completed\n'))
432 ui.note(_('rebase merging completed\n'))
433
433
434 if collapsef and not keepopen:
434 if collapsef and not keepopen:
435 p1, p2, _base = defineparents(repo, min(state), target,
435 p1, p2, _base = defineparents(repo, min(state), target,
436 state, targetancestors)
436 state, targetancestors)
437 editopt = opts.get('edit')
437 editopt = opts.get('edit')
438 editform = 'rebase.collapse'
438 editform = 'rebase.collapse'
439 if collapsemsg:
439 if collapsemsg:
440 commitmsg = collapsemsg
440 commitmsg = collapsemsg
441 else:
441 else:
442 commitmsg = 'Collapsed revision'
442 commitmsg = 'Collapsed revision'
443 for rebased in state:
443 for rebased in state:
444 if rebased not in skipped and state[rebased] > nullmerge:
444 if rebased not in skipped and state[rebased] > nullmerge:
445 commitmsg += '\n* %s' % repo[rebased].description()
445 commitmsg += '\n* %s' % repo[rebased].description()
446 editopt = True
446 editopt = True
447 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
447 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
448 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
448 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
449 extrafn=extrafn, editor=editor)
449 extrafn=extrafn, editor=editor)
450 if newnode is None:
450 if newnode is None:
451 newrev = target
451 newrev = target
452 else:
452 else:
453 newrev = repo[newnode].rev()
453 newrev = repo[newnode].rev()
454 for oldrev in state.iterkeys():
454 for oldrev in state.iterkeys():
455 if state[oldrev] > nullmerge:
455 if state[oldrev] > nullmerge:
456 state[oldrev] = newrev
456 state[oldrev] = newrev
457
457
458 if 'qtip' in repo.tags():
458 if 'qtip' in repo.tags():
459 updatemq(repo, state, skipped, **opts)
459 updatemq(repo, state, skipped, **opts)
460
460
461 if currentbookmarks:
461 if currentbookmarks:
462 # Nodeids are needed to reset bookmarks
462 # Nodeids are needed to reset bookmarks
463 nstate = {}
463 nstate = {}
464 for k, v in state.iteritems():
464 for k, v in state.iteritems():
465 if v > nullmerge:
465 if v > nullmerge:
466 nstate[repo[k].node()] = repo[v].node()
466 nstate[repo[k].node()] = repo[v].node()
467 # XXX this is the same as dest.node() for the non-continue path --
467 # XXX this is the same as dest.node() for the non-continue path --
468 # this should probably be cleaned up
468 # this should probably be cleaned up
469 targetnode = repo[target].node()
469 targetnode = repo[target].node()
470
470
471 # restore original working directory
471 # restore original working directory
472 # (we do this before stripping)
472 # (we do this before stripping)
473 newwd = state.get(originalwd, originalwd)
473 newwd = state.get(originalwd, originalwd)
474 if newwd < 0:
474 if newwd < 0:
475 # original directory is a parent of rebase set root or ignored
475 # original directory is a parent of rebase set root or ignored
476 newwd = originalwd
476 newwd = originalwd
477 if newwd not in [c.rev() for c in repo[None].parents()]:
477 if newwd not in [c.rev() for c in repo[None].parents()]:
478 ui.note(_("update back to initial working directory parent\n"))
478 ui.note(_("update back to initial working directory parent\n"))
479 hg.updaterepo(repo, newwd, False)
479 hg.updaterepo(repo, newwd, False)
480
480
481 if not keepf:
481 if not keepf:
482 collapsedas = None
482 collapsedas = None
483 if collapsef:
483 if collapsef:
484 collapsedas = newnode
484 collapsedas = newnode
485 clearrebased(ui, repo, state, skipped, collapsedas)
485 clearrebased(ui, repo, state, skipped, collapsedas)
486
486
487 if currentbookmarks:
487 if currentbookmarks:
488 updatebookmarks(repo, targetnode, nstate, currentbookmarks)
488 updatebookmarks(repo, targetnode, nstate, currentbookmarks)
489 if activebookmark not in repo._bookmarks:
489 if activebookmark not in repo._bookmarks:
490 # active bookmark was divergent one and has been deleted
490 # active bookmark was divergent one and has been deleted
491 activebookmark = None
491 activebookmark = None
492
492
493 clearstatus(repo)
493 clearstatus(repo)
494 ui.note(_("rebase completed\n"))
494 ui.note(_("rebase completed\n"))
495 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
495 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
496 if skipped:
496 if skipped:
497 ui.note(_("%d revisions have been skipped\n") % len(skipped))
497 ui.note(_("%d revisions have been skipped\n") % len(skipped))
498
498
499 if (activebookmark and
499 if (activebookmark and
500 repo['.'].node() == repo._bookmarks[activebookmark]):
500 repo['.'].node() == repo._bookmarks[activebookmark]):
501 bookmarks.activate(repo, activebookmark)
501 bookmarks.activate(repo, activebookmark)
502
502
503 finally:
503 finally:
504 release(lock, wlock)
504 release(lock, wlock)
505
505
506 def externalparent(repo, state, targetancestors):
506 def externalparent(repo, state, targetancestors):
507 """Return the revision that should be used as the second parent
507 """Return the revision that should be used as the second parent
508 when the revisions in state is collapsed on top of targetancestors.
508 when the revisions in state is collapsed on top of targetancestors.
509 Abort if there is more than one parent.
509 Abort if there is more than one parent.
510 """
510 """
511 parents = set()
511 parents = set()
512 source = min(state)
512 source = min(state)
513 for rev in state:
513 for rev in state:
514 if rev == source:
514 if rev == source:
515 continue
515 continue
516 for p in repo[rev].parents():
516 for p in repo[rev].parents():
517 if (p.rev() not in state
517 if (p.rev() not in state
518 and p.rev() not in targetancestors):
518 and p.rev() not in targetancestors):
519 parents.add(p.rev())
519 parents.add(p.rev())
520 if not parents:
520 if not parents:
521 return nullrev
521 return nullrev
522 if len(parents) == 1:
522 if len(parents) == 1:
523 return parents.pop()
523 return parents.pop()
524 raise util.Abort(_('unable to collapse on top of %s, there is more '
524 raise util.Abort(_('unable to collapse on top of %s, there is more '
525 'than one external parent: %s') %
525 'than one external parent: %s') %
526 (max(targetancestors),
526 (max(targetancestors),
527 ', '.join(str(p) for p in sorted(parents))))
527 ', '.join(str(p) for p in sorted(parents))))
528
528
529 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
529 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
530 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
530 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
531 but also store useful information in extra.
531 but also store useful information in extra.
532 Return node of committed revision.'''
532 Return node of committed revision.'''
533 try:
533 try:
534 repo.dirstate.beginparentchange()
534 repo.dirstate.beginparentchange()
535 repo.setparents(repo[p1].node(), repo[p2].node())
535 repo.setparents(repo[p1].node(), repo[p2].node())
536 repo.dirstate.endparentchange()
536 repo.dirstate.endparentchange()
537 ctx = repo[rev]
537 ctx = repo[rev]
538 if commitmsg is None:
538 if commitmsg is None:
539 commitmsg = ctx.description()
539 commitmsg = ctx.description()
540 extra = {'rebase_source': ctx.hex()}
540 extra = {'rebase_source': ctx.hex()}
541 if extrafn:
541 if extrafn:
542 extrafn(ctx, extra)
542 extrafn(ctx, extra)
543
543
544 backup = repo.ui.backupconfig('phases', 'new-commit')
544 backup = repo.ui.backupconfig('phases', 'new-commit')
545 try:
545 try:
546 targetphase = max(ctx.phase(), phases.draft)
546 targetphase = max(ctx.phase(), phases.draft)
547 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
547 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
548 # Commit might fail if unresolved files exist
548 # Commit might fail if unresolved files exist
549 newnode = repo.commit(text=commitmsg, user=ctx.user(),
549 newnode = repo.commit(text=commitmsg, user=ctx.user(),
550 date=ctx.date(), extra=extra, editor=editor)
550 date=ctx.date(), extra=extra, editor=editor)
551 finally:
551 finally:
552 repo.ui.restoreconfig(backup)
552 repo.ui.restoreconfig(backup)
553
553
554 repo.dirstate.setbranch(repo[newnode].branch())
554 repo.dirstate.setbranch(repo[newnode].branch())
555 return newnode
555 return newnode
556 except util.Abort:
556 except util.Abort:
557 # Invalidate the previous setparents
557 # Invalidate the previous setparents
558 repo.dirstate.invalidate()
558 repo.dirstate.invalidate()
559 raise
559 raise
560
560
561 def rebasenode(repo, rev, p1, base, state, collapse, target):
561 def rebasenode(repo, rev, p1, base, state, collapse, target):
562 'Rebase a single revision rev on top of p1 using base as merge ancestor'
562 'Rebase a single revision rev on top of p1 using base as merge ancestor'
563 # Merge phase
563 # Merge phase
564 # Update to target and merge it with local
564 # Update to target and merge it with local
565 if repo['.'].rev() != p1:
565 if repo['.'].rev() != p1:
566 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
566 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
567 merge.update(repo, p1, False, True, False)
567 merge.update(repo, p1, False, True, False)
568 else:
568 else:
569 repo.ui.debug(" already in target\n")
569 repo.ui.debug(" already in target\n")
570 repo.dirstate.write()
570 repo.dirstate.write()
571 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
571 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
572 if base is not None:
572 if base is not None:
573 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
573 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
574 # When collapsing in-place, the parent is the common ancestor, we
574 # When collapsing in-place, the parent is the common ancestor, we
575 # have to allow merging with it.
575 # have to allow merging with it.
576 stats = merge.update(repo, rev, True, True, False, base, collapse,
576 stats = merge.update(repo, rev, True, True, False, base, collapse,
577 labels=['dest', 'source'])
577 labels=['dest', 'source'])
578 if collapse:
578 if collapse:
579 copies.duplicatecopies(repo, rev, target)
579 copies.duplicatecopies(repo, rev, target)
580 else:
580 else:
581 # If we're not using --collapse, we need to
581 # If we're not using --collapse, we need to
582 # duplicate copies between the revision we're
582 # duplicate copies between the revision we're
583 # rebasing and its first parent, but *not*
583 # rebasing and its first parent, but *not*
584 # duplicate any copies that have already been
584 # duplicate any copies that have already been
585 # performed in the destination.
585 # performed in the destination.
586 p1rev = repo[rev].p1().rev()
586 p1rev = repo[rev].p1().rev()
587 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
587 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
588 return stats
588 return stats
589
589
590 def nearestrebased(repo, rev, state):
590 def nearestrebased(repo, rev, state):
591 """return the nearest ancestors of rev in the rebase result"""
591 """return the nearest ancestors of rev in the rebase result"""
592 rebased = [r for r in state if state[r] > nullmerge]
592 rebased = [r for r in state if state[r] > nullmerge]
593 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
593 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
594 if candidates:
594 if candidates:
595 return state[candidates.first()]
595 return state[candidates.first()]
596 else:
596 else:
597 return None
597 return None
598
598
599 def defineparents(repo, rev, target, state, targetancestors):
599 def defineparents(repo, rev, target, state, targetancestors):
600 'Return the new parent relationship of the revision that will be rebased'
600 'Return the new parent relationship of the revision that will be rebased'
601 parents = repo[rev].parents()
601 parents = repo[rev].parents()
602 p1 = p2 = nullrev
602 p1 = p2 = nullrev
603
603
604 p1n = parents[0].rev()
604 p1n = parents[0].rev()
605 if p1n in targetancestors:
605 if p1n in targetancestors:
606 p1 = target
606 p1 = target
607 elif p1n in state:
607 elif p1n in state:
608 if state[p1n] == nullmerge:
608 if state[p1n] == nullmerge:
609 p1 = target
609 p1 = target
610 elif state[p1n] == revignored:
610 elif state[p1n] == revignored:
611 p1 = nearestrebased(repo, p1n, state)
611 p1 = nearestrebased(repo, p1n, state)
612 if p1 is None:
612 if p1 is None:
613 p1 = target
613 p1 = target
614 else:
614 else:
615 p1 = state[p1n]
615 p1 = state[p1n]
616 else: # p1n external
616 else: # p1n external
617 p1 = target
617 p1 = target
618 p2 = p1n
618 p2 = p1n
619
619
620 if len(parents) == 2 and parents[1].rev() not in targetancestors:
620 if len(parents) == 2 and parents[1].rev() not in targetancestors:
621 p2n = parents[1].rev()
621 p2n = parents[1].rev()
622 # interesting second parent
622 # interesting second parent
623 if p2n in state:
623 if p2n in state:
624 if p1 == target: # p1n in targetancestors or external
624 if p1 == target: # p1n in targetancestors or external
625 p1 = state[p2n]
625 p1 = state[p2n]
626 elif state[p2n] == revignored:
626 elif state[p2n] == revignored:
627 p2 = nearestrebased(repo, p2n, state)
627 p2 = nearestrebased(repo, p2n, state)
628 if p2 is None:
628 if p2 is None:
629 # no ancestors rebased yet, detach
629 # no ancestors rebased yet, detach
630 p2 = target
630 p2 = target
631 else:
631 else:
632 p2 = state[p2n]
632 p2 = state[p2n]
633 else: # p2n external
633 else: # p2n external
634 if p2 != nullrev: # p1n external too => rev is a merged revision
634 if p2 != nullrev: # p1n external too => rev is a merged revision
635 raise util.Abort(_('cannot use revision %d as base, result '
635 raise util.Abort(_('cannot use revision %d as base, result '
636 'would have 3 parents') % rev)
636 'would have 3 parents') % rev)
637 p2 = p2n
637 p2 = p2n
638 repo.ui.debug(" future parents are %d and %d\n" %
638 repo.ui.debug(" future parents are %d and %d\n" %
639 (repo[p1].rev(), repo[p2].rev()))
639 (repo[p1].rev(), repo[p2].rev()))
640
640
641 if rev == min(state):
641 if rev == min(state):
642 # Case (1) initial changeset of a non-detaching rebase.
642 # Case (1) initial changeset of a non-detaching rebase.
643 # Let the merge mechanism find the base itself.
643 # Let the merge mechanism find the base itself.
644 base = None
644 base = None
645 elif not repo[rev].p2():
645 elif not repo[rev].p2():
646 # Case (2) detaching the node with a single parent, use this parent
646 # Case (2) detaching the node with a single parent, use this parent
647 base = repo[rev].p1().rev()
647 base = repo[rev].p1().rev()
648 else:
648 else:
649 # Assuming there is a p1, this is the case where there also is a p2.
649 # Assuming there is a p1, this is the case where there also is a p2.
650 # We are thus rebasing a merge and need to pick the right merge base.
650 # We are thus rebasing a merge and need to pick the right merge base.
651 #
651 #
652 # Imagine we have:
652 # Imagine we have:
653 # - M: current rebase revision in this step
653 # - M: current rebase revision in this step
654 # - A: one parent of M
654 # - A: one parent of M
655 # - B: other parent of M
655 # - B: other parent of M
656 # - D: destination of this merge step (p1 var)
656 # - D: destination of this merge step (p1 var)
657 #
657 #
658 # Consider the case where D is a descendant of A or B and the other is
658 # Consider the case where D is a descendant of A or B and the other is
659 # 'outside'. In this case, the right merge base is the D ancestor.
659 # 'outside'. In this case, the right merge base is the D ancestor.
660 #
660 #
661 # An informal proof, assuming A is 'outside' and B is the D ancestor:
661 # An informal proof, assuming A is 'outside' and B is the D ancestor:
662 #
662 #
663 # If we pick B as the base, the merge involves:
663 # If we pick B as the base, the merge involves:
664 # - changes from B to M (actual changeset payload)
664 # - changes from B to M (actual changeset payload)
665 # - changes from B to D (induced by rebase) as D is a rebased
665 # - changes from B to D (induced by rebase) as D is a rebased
666 # version of B)
666 # version of B)
667 # Which exactly represent the rebase operation.
667 # Which exactly represent the rebase operation.
668 #
668 #
669 # If we pick A as the base, the merge involves:
669 # If we pick A as the base, the merge involves:
670 # - changes from A to M (actual changeset payload)
670 # - changes from A to M (actual changeset payload)
671 # - changes from A to D (with include changes between unrelated A and B
671 # - changes from A to D (with include changes between unrelated A and B
672 # plus changes induced by rebase)
672 # plus changes induced by rebase)
673 # Which does not represent anything sensible and creates a lot of
673 # Which does not represent anything sensible and creates a lot of
674 # conflicts. A is thus not the right choice - B is.
674 # conflicts. A is thus not the right choice - B is.
675 #
675 #
676 # Note: The base found in this 'proof' is only correct in the specified
676 # Note: The base found in this 'proof' is only correct in the specified
677 # case. This base does not make sense if is not D a descendant of A or B
677 # case. This base does not make sense if is not D a descendant of A or B
678 # or if the other is not parent 'outside' (especially not if the other
678 # or if the other is not parent 'outside' (especially not if the other
679 # parent has been rebased). The current implementation does not
679 # parent has been rebased). The current implementation does not
680 # make it feasible to consider different cases separately. In these
680 # make it feasible to consider different cases separately. In these
681 # other cases we currently just leave it to the user to correctly
681 # other cases we currently just leave it to the user to correctly
682 # resolve an impossible merge using a wrong ancestor.
682 # resolve an impossible merge using a wrong ancestor.
683 for p in repo[rev].parents():
683 for p in repo[rev].parents():
684 if state.get(p.rev()) == p1:
684 if state.get(p.rev()) == p1:
685 base = p.rev()
685 base = p.rev()
686 break
686 break
687 else: # fallback when base not found
687 else: # fallback when base not found
688 base = None
688 base = None
689
689
690 # Raise because this function is called wrong (see issue 4106)
690 # Raise because this function is called wrong (see issue 4106)
691 raise AssertionError('no base found to rebase on '
691 raise AssertionError('no base found to rebase on '
692 '(defineparents called wrong)')
692 '(defineparents called wrong)')
693 return p1, p2, base
693 return p1, p2, base
694
694
695 def isagitpatch(repo, patchname):
695 def isagitpatch(repo, patchname):
696 'Return true if the given patch is in git format'
696 'Return true if the given patch is in git format'
697 mqpatch = os.path.join(repo.mq.path, patchname)
697 mqpatch = os.path.join(repo.mq.path, patchname)
698 for line in patch.linereader(file(mqpatch, 'rb')):
698 for line in patch.linereader(file(mqpatch, 'rb')):
699 if line.startswith('diff --git'):
699 if line.startswith('diff --git'):
700 return True
700 return True
701 return False
701 return False
702
702
703 def updatemq(repo, state, skipped, **opts):
703 def updatemq(repo, state, skipped, **opts):
704 'Update rebased mq patches - finalize and then import them'
704 'Update rebased mq patches - finalize and then import them'
705 mqrebase = {}
705 mqrebase = {}
706 mq = repo.mq
706 mq = repo.mq
707 original_series = mq.fullseries[:]
707 original_series = mq.fullseries[:]
708 skippedpatches = set()
708 skippedpatches = set()
709
709
710 for p in mq.applied:
710 for p in mq.applied:
711 rev = repo[p.node].rev()
711 rev = repo[p.node].rev()
712 if rev in state:
712 if rev in state:
713 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
713 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
714 (rev, p.name))
714 (rev, p.name))
715 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
715 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
716 else:
716 else:
717 # Applied but not rebased, not sure this should happen
717 # Applied but not rebased, not sure this should happen
718 skippedpatches.add(p.name)
718 skippedpatches.add(p.name)
719
719
720 if mqrebase:
720 if mqrebase:
721 mq.finish(repo, mqrebase.keys())
721 mq.finish(repo, mqrebase.keys())
722
722
723 # We must start import from the newest revision
723 # We must start import from the newest revision
724 for rev in sorted(mqrebase, reverse=True):
724 for rev in sorted(mqrebase, reverse=True):
725 if rev not in skipped:
725 if rev not in skipped:
726 name, isgit = mqrebase[rev]
726 name, isgit = mqrebase[rev]
727 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
727 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
728 (name, state[rev], repo[state[rev]]))
728 (name, state[rev], repo[state[rev]]))
729 mq.qimport(repo, (), patchname=name, git=isgit,
729 mq.qimport(repo, (), patchname=name, git=isgit,
730 rev=[str(state[rev])])
730 rev=[str(state[rev])])
731 else:
731 else:
732 # Rebased and skipped
732 # Rebased and skipped
733 skippedpatches.add(mqrebase[rev][0])
733 skippedpatches.add(mqrebase[rev][0])
734
734
735 # Patches were either applied and rebased and imported in
735 # Patches were either applied and rebased and imported in
736 # order, applied and removed or unapplied. Discard the removed
736 # order, applied and removed or unapplied. Discard the removed
737 # ones while preserving the original series order and guards.
737 # ones while preserving the original series order and guards.
738 newseries = [s for s in original_series
738 newseries = [s for s in original_series
739 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
739 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
740 mq.fullseries[:] = newseries
740 mq.fullseries[:] = newseries
741 mq.seriesdirty = True
741 mq.seriesdirty = True
742 mq.savedirty()
742 mq.savedirty()
743
743
744 def updatebookmarks(repo, targetnode, nstate, originalbookmarks):
744 def updatebookmarks(repo, targetnode, nstate, originalbookmarks):
745 'Move bookmarks to their correct changesets, and delete divergent ones'
745 'Move bookmarks to their correct changesets, and delete divergent ones'
746 marks = repo._bookmarks
746 marks = repo._bookmarks
747 for k, v in originalbookmarks.iteritems():
747 for k, v in originalbookmarks.iteritems():
748 if v in nstate:
748 if v in nstate:
749 # update the bookmarks for revs that have moved
749 # update the bookmarks for revs that have moved
750 marks[k] = nstate[v]
750 marks[k] = nstate[v]
751 bookmarks.deletedivergent(repo, [targetnode], k)
751 bookmarks.deletedivergent(repo, [targetnode], k)
752
752
753 marks.write()
753 marks.write()
754
754
755 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
755 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
756 external, activebookmark):
756 external, activebookmark):
757 'Store the current status to allow recovery'
757 'Store the current status to allow recovery'
758 f = repo.vfs("rebasestate", "w")
758 f = repo.vfs("rebasestate", "w")
759 f.write(repo[originalwd].hex() + '\n')
759 f.write(repo[originalwd].hex() + '\n')
760 f.write(repo[target].hex() + '\n')
760 f.write(repo[target].hex() + '\n')
761 f.write(repo[external].hex() + '\n')
761 f.write(repo[external].hex() + '\n')
762 f.write('%d\n' % int(collapse))
762 f.write('%d\n' % int(collapse))
763 f.write('%d\n' % int(keep))
763 f.write('%d\n' % int(keep))
764 f.write('%d\n' % int(keepbranches))
764 f.write('%d\n' % int(keepbranches))
765 f.write('%s\n' % (activebookmark or ''))
765 f.write('%s\n' % (activebookmark or ''))
766 for d, v in state.iteritems():
766 for d, v in state.iteritems():
767 oldrev = repo[d].hex()
767 oldrev = repo[d].hex()
768 if v >= 0:
768 if v >= 0:
769 newrev = repo[v].hex()
769 newrev = repo[v].hex()
770 elif v == revtodo:
770 elif v == revtodo:
771 # To maintain format compatibility, we have to use nullid.
771 # To maintain format compatibility, we have to use nullid.
772 # Please do remove this special case when upgrading the format.
772 # Please do remove this special case when upgrading the format.
773 newrev = hex(nullid)
773 newrev = hex(nullid)
774 else:
774 else:
775 newrev = v
775 newrev = v
776 f.write("%s:%s\n" % (oldrev, newrev))
776 f.write("%s:%s\n" % (oldrev, newrev))
777 f.close()
777 f.close()
778 repo.ui.debug('rebase status stored\n')
778 repo.ui.debug('rebase status stored\n')
779
779
780 def clearstatus(repo):
780 def clearstatus(repo):
781 'Remove the status files'
781 'Remove the status files'
782 _clearrebasesetvisibiliy(repo)
782 _clearrebasesetvisibiliy(repo)
783 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
783 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
784
784
785 def restorestatus(repo):
785 def restorestatus(repo):
786 'Restore a previously stored status'
786 'Restore a previously stored status'
787 try:
787 try:
788 keepbranches = None
788 keepbranches = None
789 target = None
789 target = None
790 collapse = False
790 collapse = False
791 external = nullrev
791 external = nullrev
792 activebookmark = None
792 activebookmark = None
793 state = {}
793 state = {}
794 f = repo.vfs("rebasestate")
794 f = repo.vfs("rebasestate")
795 for i, l in enumerate(f.read().splitlines()):
795 for i, l in enumerate(f.read().splitlines()):
796 if i == 0:
796 if i == 0:
797 originalwd = repo[l].rev()
797 originalwd = repo[l].rev()
798 elif i == 1:
798 elif i == 1:
799 target = repo[l].rev()
799 target = repo[l].rev()
800 elif i == 2:
800 elif i == 2:
801 external = repo[l].rev()
801 external = repo[l].rev()
802 elif i == 3:
802 elif i == 3:
803 collapse = bool(int(l))
803 collapse = bool(int(l))
804 elif i == 4:
804 elif i == 4:
805 keep = bool(int(l))
805 keep = bool(int(l))
806 elif i == 5:
806 elif i == 5:
807 keepbranches = bool(int(l))
807 keepbranches = bool(int(l))
808 elif i == 6 and not (len(l) == 81 and ':' in l):
808 elif i == 6 and not (len(l) == 81 and ':' in l):
809 # line 6 is a recent addition, so for backwards compatibility
809 # line 6 is a recent addition, so for backwards compatibility
810 # check that the line doesn't look like the oldrev:newrev lines
810 # check that the line doesn't look like the oldrev:newrev lines
811 activebookmark = l
811 activebookmark = l
812 else:
812 else:
813 oldrev, newrev = l.split(':')
813 oldrev, newrev = l.split(':')
814 if newrev in (str(nullmerge), str(revignored)):
814 if newrev in (str(nullmerge), str(revignored)):
815 state[repo[oldrev].rev()] = int(newrev)
815 state[repo[oldrev].rev()] = int(newrev)
816 elif newrev == nullid:
816 elif newrev == nullid:
817 state[repo[oldrev].rev()] = revtodo
817 state[repo[oldrev].rev()] = revtodo
818 # Legacy compat special case
818 # Legacy compat special case
819 else:
819 else:
820 state[repo[oldrev].rev()] = repo[newrev].rev()
820 state[repo[oldrev].rev()] = repo[newrev].rev()
821
821
822 if keepbranches is None:
822 if keepbranches is None:
823 raise util.Abort(_('.hg/rebasestate is incomplete'))
823 raise util.Abort(_('.hg/rebasestate is incomplete'))
824
824
825 skipped = set()
825 skipped = set()
826 # recompute the set of skipped revs
826 # recompute the set of skipped revs
827 if not collapse:
827 if not collapse:
828 seen = set([target])
828 seen = set([target])
829 for old, new in sorted(state.items()):
829 for old, new in sorted(state.items()):
830 if new != revtodo and new in seen:
830 if new != revtodo and new in seen:
831 skipped.add(old)
831 skipped.add(old)
832 seen.add(new)
832 seen.add(new)
833 repo.ui.debug('computed skipped revs: %s\n' %
833 repo.ui.debug('computed skipped revs: %s\n' %
834 (' '.join(str(r) for r in sorted(skipped)) or None))
834 (' '.join(str(r) for r in sorted(skipped)) or None))
835 repo.ui.debug('rebase status resumed\n')
835 repo.ui.debug('rebase status resumed\n')
836 _setrebasesetvisibility(repo, state.keys())
836 _setrebasesetvisibility(repo, state.keys())
837 return (originalwd, target, state, skipped,
837 return (originalwd, target, state, skipped,
838 collapse, keep, keepbranches, external, activebookmark)
838 collapse, keep, keepbranches, external, activebookmark)
839 except IOError, err:
839 except IOError, err:
840 if err.errno != errno.ENOENT:
840 if err.errno != errno.ENOENT:
841 raise
841 raise
842 raise util.Abort(_('no rebase in progress'))
842 raise util.Abort(_('no rebase in progress'))
843
843
844 def inrebase(repo, originalwd, state):
844 def inrebase(repo, originalwd, state):
845 '''check whether the working dir is in an interrupted rebase'''
845 '''check whether the working dir is in an interrupted rebase'''
846 parents = [p.rev() for p in repo.parents()]
846 parents = [p.rev() for p in repo.parents()]
847 if originalwd in parents:
847 if originalwd in parents:
848 return True
848 return True
849
849
850 for newrev in state.itervalues():
850 for newrev in state.itervalues():
851 if newrev in parents:
851 if newrev in parents:
852 return True
852 return True
853
853
854 return False
854 return False
855
855
856 def abort(repo, originalwd, target, state, activebookmark=None):
856 def abort(repo, originalwd, target, state, activebookmark=None):
857 '''Restore the repository to its original state. Additional args:
857 '''Restore the repository to its original state. Additional args:
858
858
859 activebookmark: the name of the bookmark that should be active after the
859 activebookmark: the name of the bookmark that should be active after the
860 restore'''
860 restore'''
861 dstates = [s for s in state.values() if s >= 0]
861 dstates = [s for s in state.values() if s >= 0]
862 immutable = [d for d in dstates if not repo[d].mutable()]
862 immutable = [d for d in dstates if not repo[d].mutable()]
863 cleanup = True
863 cleanup = True
864 if immutable:
864 if immutable:
865 repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
865 repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
866 % ', '.join(str(repo[r]) for r in immutable),
866 % ', '.join(str(repo[r]) for r in immutable),
867 hint=_('see "hg help phases" for details'))
867 hint=_('see "hg help phases" for details'))
868 cleanup = False
868 cleanup = False
869
869
870 descendants = set()
870 descendants = set()
871 if dstates:
871 if dstates:
872 descendants = set(repo.changelog.descendants(dstates))
872 descendants = set(repo.changelog.descendants(dstates))
873 if descendants - set(dstates):
873 if descendants - set(dstates):
874 repo.ui.warn(_("warning: new changesets detected on target branch, "
874 repo.ui.warn(_("warning: new changesets detected on target branch, "
875 "can't strip\n"))
875 "can't strip\n"))
876 cleanup = False
876 cleanup = False
877
877
878 if cleanup:
878 if cleanup:
879 # Update away from the rebase if necessary
879 # Update away from the rebase if necessary
880 if inrebase(repo, originalwd, state):
880 if inrebase(repo, originalwd, state):
881 merge.update(repo, originalwd, False, True, False)
881 merge.update(repo, originalwd, False, True, False)
882
882
883 # Strip from the first rebased revision
883 # Strip from the first rebased revision
884 rebased = filter(lambda x: x >= 0 and x != target, state.values())
884 rebased = filter(lambda x: x >= 0 and x != target, state.values())
885 if rebased:
885 if rebased:
886 strippoints = [c.node() for c in repo.set('roots(%ld)', rebased)]
886 strippoints = [c.node() for c in repo.set('roots(%ld)', rebased)]
887 # no backup of rebased cset versions needed
887 # no backup of rebased cset versions needed
888 repair.strip(repo.ui, repo, strippoints)
888 repair.strip(repo.ui, repo, strippoints)
889
889
890 if activebookmark:
890 if activebookmark:
891 bookmarks.activate(repo, activebookmark)
891 bookmarks.activate(repo, activebookmark)
892
892
893 clearstatus(repo)
893 clearstatus(repo)
894 repo.ui.warn(_('rebase aborted\n'))
894 repo.ui.warn(_('rebase aborted\n'))
895 return 0
895 return 0
896
896
897 def buildstate(repo, dest, rebaseset, collapse):
897 def buildstate(repo, dest, rebaseset, collapse):
898 '''Define which revisions are going to be rebased and where
898 '''Define which revisions are going to be rebased and where
899
899
900 repo: repo
900 repo: repo
901 dest: context
901 dest: context
902 rebaseset: set of rev
902 rebaseset: set of rev
903 '''
903 '''
904 _setrebasesetvisibility(repo, rebaseset)
904 _setrebasesetvisibility(repo, rebaseset)
905
905
906 # This check isn't strictly necessary, since mq detects commits over an
906 # This check isn't strictly necessary, since mq detects commits over an
907 # applied patch. But it prevents messing up the working directory when
907 # applied patch. But it prevents messing up the working directory when
908 # a partially completed rebase is blocked by mq.
908 # a partially completed rebase is blocked by mq.
909 if 'qtip' in repo.tags() and (dest.node() in
909 if 'qtip' in repo.tags() and (dest.node() in
910 [s.node for s in repo.mq.applied]):
910 [s.node for s in repo.mq.applied]):
911 raise util.Abort(_('cannot rebase onto an applied mq patch'))
911 raise util.Abort(_('cannot rebase onto an applied mq patch'))
912
912
913 roots = list(repo.set('roots(%ld)', rebaseset))
913 roots = list(repo.set('roots(%ld)', rebaseset))
914 if not roots:
914 if not roots:
915 raise util.Abort(_('no matching revisions'))
915 raise util.Abort(_('no matching revisions'))
916 roots.sort()
916 roots.sort()
917 state = {}
917 state = {}
918 detachset = set()
918 detachset = set()
919 for root in roots:
919 for root in roots:
920 commonbase = root.ancestor(dest)
920 commonbase = root.ancestor(dest)
921 if commonbase == root:
921 if commonbase == root:
922 raise util.Abort(_('source is ancestor of destination'))
922 raise util.Abort(_('source is ancestor of destination'))
923 if commonbase == dest:
923 if commonbase == dest:
924 samebranch = root.branch() == dest.branch()
924 samebranch = root.branch() == dest.branch()
925 if not collapse and samebranch and root in dest.children():
925 if not collapse and samebranch and root in dest.children():
926 repo.ui.debug('source is a child of destination\n')
926 repo.ui.debug('source is a child of destination\n')
927 return None
927 return None
928
928
929 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
929 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
930 state.update(dict.fromkeys(rebaseset, revtodo))
930 state.update(dict.fromkeys(rebaseset, revtodo))
931 # Rebase tries to turn <dest> into a parent of <root> while
931 # Rebase tries to turn <dest> into a parent of <root> while
932 # preserving the number of parents of rebased changesets:
932 # preserving the number of parents of rebased changesets:
933 #
933 #
934 # - A changeset with a single parent will always be rebased as a
934 # - A changeset with a single parent will always be rebased as a
935 # changeset with a single parent.
935 # changeset with a single parent.
936 #
936 #
937 # - A merge will be rebased as merge unless its parents are both
937 # - A merge will be rebased as merge unless its parents are both
938 # ancestors of <dest> or are themselves in the rebased set and
938 # ancestors of <dest> or are themselves in the rebased set and
939 # pruned while rebased.
939 # pruned while rebased.
940 #
940 #
941 # If one parent of <root> is an ancestor of <dest>, the rebased
941 # If one parent of <root> is an ancestor of <dest>, the rebased
942 # version of this parent will be <dest>. This is always true with
942 # version of this parent will be <dest>. This is always true with
943 # --base option.
943 # --base option.
944 #
944 #
945 # Otherwise, we need to *replace* the original parents with
945 # Otherwise, we need to *replace* the original parents with
946 # <dest>. This "detaches" the rebased set from its former location
946 # <dest>. This "detaches" the rebased set from its former location
947 # and rebases it onto <dest>. Changes introduced by ancestors of
947 # and rebases it onto <dest>. Changes introduced by ancestors of
948 # <root> not common with <dest> (the detachset, marked as
948 # <root> not common with <dest> (the detachset, marked as
949 # nullmerge) are "removed" from the rebased changesets.
949 # nullmerge) are "removed" from the rebased changesets.
950 #
950 #
951 # - If <root> has a single parent, set it to <dest>.
951 # - If <root> has a single parent, set it to <dest>.
952 #
952 #
953 # - If <root> is a merge, we cannot decide which parent to
953 # - If <root> is a merge, we cannot decide which parent to
954 # replace, the rebase operation is not clearly defined.
954 # replace, the rebase operation is not clearly defined.
955 #
955 #
956 # The table below sums up this behavior:
956 # The table below sums up this behavior:
957 #
957 #
958 # +------------------+----------------------+-------------------------+
958 # +------------------+----------------------+-------------------------+
959 # | | one parent | merge |
959 # | | one parent | merge |
960 # +------------------+----------------------+-------------------------+
960 # +------------------+----------------------+-------------------------+
961 # | parent in | new parent is <dest> | parents in ::<dest> are |
961 # | parent in | new parent is <dest> | parents in ::<dest> are |
962 # | ::<dest> | | remapped to <dest> |
962 # | ::<dest> | | remapped to <dest> |
963 # +------------------+----------------------+-------------------------+
963 # +------------------+----------------------+-------------------------+
964 # | unrelated source | new parent is <dest> | ambiguous, abort |
964 # | unrelated source | new parent is <dest> | ambiguous, abort |
965 # +------------------+----------------------+-------------------------+
965 # +------------------+----------------------+-------------------------+
966 #
966 #
967 # The actual abort is handled by `defineparents`
967 # The actual abort is handled by `defineparents`
968 if len(root.parents()) <= 1:
968 if len(root.parents()) <= 1:
969 # ancestors of <root> not ancestors of <dest>
969 # ancestors of <root> not ancestors of <dest>
970 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
970 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
971 [root.rev()]))
971 [root.rev()]))
972 for r in detachset:
972 for r in detachset:
973 if r not in state:
973 if r not in state:
974 state[r] = nullmerge
974 state[r] = nullmerge
975 if len(roots) > 1:
975 if len(roots) > 1:
976 # If we have multiple roots, we may have "hole" in the rebase set.
976 # If we have multiple roots, we may have "hole" in the rebase set.
977 # Rebase roots that descend from those "hole" should not be detached as
977 # Rebase roots that descend from those "hole" should not be detached as
978 # other root are. We use the special `revignored` to inform rebase that
978 # other root are. We use the special `revignored` to inform rebase that
979 # the revision should be ignored but that `defineparents` should search
979 # the revision should be ignored but that `defineparents` should search
980 # a rebase destination that make sense regarding rebased topology.
980 # a rebase destination that make sense regarding rebased topology.
981 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
981 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
982 for ignored in set(rebasedomain) - set(rebaseset):
982 for ignored in set(rebasedomain) - set(rebaseset):
983 state[ignored] = revignored
983 state[ignored] = revignored
984 return repo['.'].rev(), dest.rev(), state
984 return repo['.'].rev(), dest.rev(), state
985
985
986 def clearrebased(ui, repo, state, skipped, collapsedas=None):
986 def clearrebased(ui, repo, state, skipped, collapsedas=None):
987 """dispose of rebased revision at the end of the rebase
987 """dispose of rebased revision at the end of the rebase
988
988
989 If `collapsedas` is not None, the rebase was a collapse whose result if the
989 If `collapsedas` is not None, the rebase was a collapse whose result if the
990 `collapsedas` node."""
990 `collapsedas` node."""
991 if obsolete.isenabled(repo, obsolete.createmarkersopt):
991 if obsolete.isenabled(repo, obsolete.createmarkersopt):
992 markers = []
992 markers = []
993 for rev, newrev in sorted(state.items()):
993 for rev, newrev in sorted(state.items()):
994 if newrev >= 0:
994 if newrev >= 0:
995 if rev in skipped:
995 if rev in skipped:
996 succs = ()
996 succs = ()
997 elif collapsedas is not None:
997 elif collapsedas is not None:
998 succs = (repo[collapsedas],)
998 succs = (repo[collapsedas],)
999 else:
999 else:
1000 succs = (repo[newrev],)
1000 succs = (repo[newrev],)
1001 markers.append((repo[rev], succs))
1001 markers.append((repo[rev], succs))
1002 if markers:
1002 if markers:
1003 obsolete.createmarkers(repo, markers)
1003 obsolete.createmarkers(repo, markers)
1004 else:
1004 else:
1005 rebased = [rev for rev in state if state[rev] > nullmerge]
1005 rebased = [rev for rev in state if state[rev] > nullmerge]
1006 if rebased:
1006 if rebased:
1007 stripped = []
1007 stripped = []
1008 for root in repo.set('roots(%ld)', rebased):
1008 for root in repo.set('roots(%ld)', rebased):
1009 if set(repo.changelog.descendants([root.rev()])) - set(state):
1009 if set(repo.changelog.descendants([root.rev()])) - set(state):
1010 ui.warn(_("warning: new changesets detected "
1010 ui.warn(_("warning: new changesets detected "
1011 "on source branch, not stripping\n"))
1011 "on source branch, not stripping\n"))
1012 else:
1012 else:
1013 stripped.append(root.node())
1013 stripped.append(root.node())
1014 if stripped:
1014 if stripped:
1015 # backup the old csets by default
1015 # backup the old csets by default
1016 repair.strip(ui, repo, stripped, "all")
1016 repair.strip(ui, repo, stripped, "all")
1017
1017
1018
1018
1019 def pullrebase(orig, ui, repo, *args, **opts):
1019 def pullrebase(orig, ui, repo, *args, **opts):
1020 'Call rebase after pull if the latter has been invoked with --rebase'
1020 'Call rebase after pull if the latter has been invoked with --rebase'
1021 if opts.get('rebase'):
1021 if opts.get('rebase'):
1022 if opts.get('update'):
1022 if opts.get('update'):
1023 del opts['update']
1023 del opts['update']
1024 ui.debug('--update and --rebase are not compatible, ignoring '
1024 ui.debug('--update and --rebase are not compatible, ignoring '
1025 'the update flag\n')
1025 'the update flag\n')
1026
1026
1027 movemarkfrom = repo['.'].node()
1027 movemarkfrom = repo['.'].node()
1028 revsprepull = len(repo)
1028 revsprepull = len(repo)
1029 origpostincoming = commands.postincoming
1029 origpostincoming = commands.postincoming
1030 def _dummy(*args, **kwargs):
1030 def _dummy(*args, **kwargs):
1031 pass
1031 pass
1032 commands.postincoming = _dummy
1032 commands.postincoming = _dummy
1033 try:
1033 try:
1034 orig(ui, repo, *args, **opts)
1034 orig(ui, repo, *args, **opts)
1035 finally:
1035 finally:
1036 commands.postincoming = origpostincoming
1036 commands.postincoming = origpostincoming
1037 revspostpull = len(repo)
1037 revspostpull = len(repo)
1038 if revspostpull > revsprepull:
1038 if revspostpull > revsprepull:
1039 # --rev option from pull conflict with rebase own --rev
1039 # --rev option from pull conflict with rebase own --rev
1040 # dropping it
1040 # dropping it
1041 if 'rev' in opts:
1041 if 'rev' in opts:
1042 del opts['rev']
1042 del opts['rev']
1043 # positional argument from pull conflicts with rebase's own
1043 # positional argument from pull conflicts with rebase's own
1044 # --source.
1044 # --source.
1045 if 'source' in opts:
1045 if 'source' in opts:
1046 del opts['source']
1046 del opts['source']
1047 rebase(ui, repo, **opts)
1047 rebase(ui, repo, **opts)
1048 branch = repo[None].branch()
1048 branch = repo[None].branch()
1049 dest = repo[branch].rev()
1049 dest = repo[branch].rev()
1050 if dest != repo['.'].rev():
1050 if dest != repo['.'].rev():
1051 # there was nothing to rebase we force an update
1051 # there was nothing to rebase we force an update
1052 hg.update(repo, dest)
1052 hg.update(repo, dest)
1053 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1053 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1054 ui.status(_("updating bookmark %s\n")
1054 ui.status(_("updating bookmark %s\n")
1055 % repo._bookmarkcurrent)
1055 % repo._activebookmark)
1056 else:
1056 else:
1057 if opts.get('tool'):
1057 if opts.get('tool'):
1058 raise util.Abort(_('--tool can only be used with --rebase'))
1058 raise util.Abort(_('--tool can only be used with --rebase'))
1059 orig(ui, repo, *args, **opts)
1059 orig(ui, repo, *args, **opts)
1060
1060
1061 def _setrebasesetvisibility(repo, revs):
1061 def _setrebasesetvisibility(repo, revs):
1062 """store the currently rebased set on the repo object
1062 """store the currently rebased set on the repo object
1063
1063
1064 This is used by another function to prevent rebased revision to because
1064 This is used by another function to prevent rebased revision to because
1065 hidden (see issue4505)"""
1065 hidden (see issue4505)"""
1066 repo = repo.unfiltered()
1066 repo = repo.unfiltered()
1067 revs = set(revs)
1067 revs = set(revs)
1068 repo._rebaseset = revs
1068 repo._rebaseset = revs
1069 # invalidate cache if visibility changes
1069 # invalidate cache if visibility changes
1070 hiddens = repo.filteredrevcache.get('visible', set())
1070 hiddens = repo.filteredrevcache.get('visible', set())
1071 if revs & hiddens:
1071 if revs & hiddens:
1072 repo.invalidatevolatilesets()
1072 repo.invalidatevolatilesets()
1073
1073
1074 def _clearrebasesetvisibiliy(repo):
1074 def _clearrebasesetvisibiliy(repo):
1075 """remove rebaseset data from the repo"""
1075 """remove rebaseset data from the repo"""
1076 repo = repo.unfiltered()
1076 repo = repo.unfiltered()
1077 if '_rebaseset' in vars(repo):
1077 if '_rebaseset' in vars(repo):
1078 del repo._rebaseset
1078 del repo._rebaseset
1079
1079
1080 def _rebasedvisible(orig, repo):
1080 def _rebasedvisible(orig, repo):
1081 """ensure rebased revs stay visible (see issue4505)"""
1081 """ensure rebased revs stay visible (see issue4505)"""
1082 blockers = orig(repo)
1082 blockers = orig(repo)
1083 blockers.update(getattr(repo, '_rebaseset', ()))
1083 blockers.update(getattr(repo, '_rebaseset', ()))
1084 return blockers
1084 return blockers
1085
1085
1086 def summaryhook(ui, repo):
1086 def summaryhook(ui, repo):
1087 if not os.path.exists(repo.join('rebasestate')):
1087 if not os.path.exists(repo.join('rebasestate')):
1088 return
1088 return
1089 try:
1089 try:
1090 state = restorestatus(repo)[2]
1090 state = restorestatus(repo)[2]
1091 except error.RepoLookupError:
1091 except error.RepoLookupError:
1092 # i18n: column positioning for "hg summary"
1092 # i18n: column positioning for "hg summary"
1093 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1093 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1094 ui.write(msg)
1094 ui.write(msg)
1095 return
1095 return
1096 numrebased = len([i for i in state.itervalues() if i >= 0])
1096 numrebased = len([i for i in state.itervalues() if i >= 0])
1097 # i18n: column positioning for "hg summary"
1097 # i18n: column positioning for "hg summary"
1098 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1098 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1099 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1099 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1100 ui.label(_('%d remaining'), 'rebase.remaining') %
1100 ui.label(_('%d remaining'), 'rebase.remaining') %
1101 (len(state) - numrebased)))
1101 (len(state) - numrebased)))
1102
1102
1103 def uisetup(ui):
1103 def uisetup(ui):
1104 #Replace pull with a decorator to provide --rebase option
1104 #Replace pull with a decorator to provide --rebase option
1105 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1105 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1106 entry[1].append(('', 'rebase', None,
1106 entry[1].append(('', 'rebase', None,
1107 _("rebase working directory to branch head")))
1107 _("rebase working directory to branch head")))
1108 entry[1].append(('t', 'tool', '',
1108 entry[1].append(('t', 'tool', '',
1109 _("specify merge tool for rebase")))
1109 _("specify merge tool for rebase")))
1110 cmdutil.summaryhooks.add('rebase', summaryhook)
1110 cmdutil.summaryhooks.add('rebase', summaryhook)
1111 cmdutil.unfinishedstates.append(
1111 cmdutil.unfinishedstates.append(
1112 ['rebasestate', False, False, _('rebase in progress'),
1112 ['rebasestate', False, False, _('rebase in progress'),
1113 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1113 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1114 # ensure rebased rev are not hidden
1114 # ensure rebased rev are not hidden
1115 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
1115 extensions.wrapfunction(repoview, '_getdynamicblockers', _rebasedvisible)
1116
1116
@@ -1,733 +1,733 b''
1 # shelve.py - save/restore working directory state
1 # shelve.py - save/restore working directory state
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """save and restore changes to the working directory
8 """save and restore changes to the working directory
9
9
10 The "hg shelve" command saves changes made to the working directory
10 The "hg shelve" command saves changes made to the working directory
11 and reverts those changes, resetting the working directory to a clean
11 and reverts those changes, resetting the working directory to a clean
12 state.
12 state.
13
13
14 Later on, the "hg unshelve" command restores the changes saved by "hg
14 Later on, the "hg unshelve" command restores the changes saved by "hg
15 shelve". Changes can be restored even after updating to a different
15 shelve". Changes can be restored even after updating to a different
16 parent, in which case Mercurial's merge machinery will resolve any
16 parent, in which case Mercurial's merge machinery will resolve any
17 conflicts if necessary.
17 conflicts if necessary.
18
18
19 You can have more than one shelved change outstanding at a time; each
19 You can have more than one shelved change outstanding at a time; each
20 shelved change has a distinct name. For details, see the help for "hg
20 shelved change has a distinct name. For details, see the help for "hg
21 shelve".
21 shelve".
22 """
22 """
23
23
24 from mercurial.i18n import _
24 from mercurial.i18n import _
25 from mercurial.node import nullid, nullrev, bin, hex
25 from mercurial.node import nullid, nullrev, bin, hex
26 from mercurial import changegroup, cmdutil, scmutil, phases, commands
26 from mercurial import changegroup, cmdutil, scmutil, phases, commands
27 from mercurial import error, hg, mdiff, merge, patch, repair, util
27 from mercurial import error, hg, mdiff, merge, patch, repair, util
28 from mercurial import templatefilters, exchange, bundlerepo
28 from mercurial import templatefilters, exchange, bundlerepo
29 from mercurial import lock as lockmod
29 from mercurial import lock as lockmod
30 from hgext import rebase
30 from hgext import rebase
31 import errno
31 import errno
32
32
33 cmdtable = {}
33 cmdtable = {}
34 command = cmdutil.command(cmdtable)
34 command = cmdutil.command(cmdtable)
35 testedwith = 'internal'
35 testedwith = 'internal'
36
36
37 class shelvedfile(object):
37 class shelvedfile(object):
38 """Helper for the file storing a single shelve
38 """Helper for the file storing a single shelve
39
39
40 Handles common functions on shelve files (.hg/.patch) using
40 Handles common functions on shelve files (.hg/.patch) using
41 the vfs layer"""
41 the vfs layer"""
42 def __init__(self, repo, name, filetype=None):
42 def __init__(self, repo, name, filetype=None):
43 self.repo = repo
43 self.repo = repo
44 self.name = name
44 self.name = name
45 self.vfs = scmutil.vfs(repo.join('shelved'))
45 self.vfs = scmutil.vfs(repo.join('shelved'))
46 self.ui = self.repo.ui
46 self.ui = self.repo.ui
47 if filetype:
47 if filetype:
48 self.fname = name + '.' + filetype
48 self.fname = name + '.' + filetype
49 else:
49 else:
50 self.fname = name
50 self.fname = name
51
51
52 def exists(self):
52 def exists(self):
53 return self.vfs.exists(self.fname)
53 return self.vfs.exists(self.fname)
54
54
55 def filename(self):
55 def filename(self):
56 return self.vfs.join(self.fname)
56 return self.vfs.join(self.fname)
57
57
58 def unlink(self):
58 def unlink(self):
59 util.unlink(self.filename())
59 util.unlink(self.filename())
60
60
61 def stat(self):
61 def stat(self):
62 return self.vfs.stat(self.fname)
62 return self.vfs.stat(self.fname)
63
63
64 def opener(self, mode='rb'):
64 def opener(self, mode='rb'):
65 try:
65 try:
66 return self.vfs(self.fname, mode)
66 return self.vfs(self.fname, mode)
67 except IOError, err:
67 except IOError, err:
68 if err.errno != errno.ENOENT:
68 if err.errno != errno.ENOENT:
69 raise
69 raise
70 raise util.Abort(_("shelved change '%s' not found") % self.name)
70 raise util.Abort(_("shelved change '%s' not found") % self.name)
71
71
72 def applybundle(self):
72 def applybundle(self):
73 fp = self.opener()
73 fp = self.opener()
74 try:
74 try:
75 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
75 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
76 changegroup.addchangegroup(self.repo, gen, 'unshelve',
76 changegroup.addchangegroup(self.repo, gen, 'unshelve',
77 'bundle:' + self.vfs.join(self.fname),
77 'bundle:' + self.vfs.join(self.fname),
78 targetphase=phases.secret)
78 targetphase=phases.secret)
79 finally:
79 finally:
80 fp.close()
80 fp.close()
81
81
82 def bundlerepo(self):
82 def bundlerepo(self):
83 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
83 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
84 self.vfs.join(self.fname))
84 self.vfs.join(self.fname))
85 def writebundle(self, cg):
85 def writebundle(self, cg):
86 changegroup.writebundle(self.ui, cg, self.fname, 'HG10UN', self.vfs)
86 changegroup.writebundle(self.ui, cg, self.fname, 'HG10UN', self.vfs)
87
87
88 class shelvedstate(object):
88 class shelvedstate(object):
89 """Handle persistence during unshelving operations.
89 """Handle persistence during unshelving operations.
90
90
91 Handles saving and restoring a shelved state. Ensures that different
91 Handles saving and restoring a shelved state. Ensures that different
92 versions of a shelved state are possible and handles them appropriately.
92 versions of a shelved state are possible and handles them appropriately.
93 """
93 """
94 _version = 1
94 _version = 1
95 _filename = 'shelvedstate'
95 _filename = 'shelvedstate'
96
96
97 @classmethod
97 @classmethod
98 def load(cls, repo):
98 def load(cls, repo):
99 fp = repo.vfs(cls._filename)
99 fp = repo.vfs(cls._filename)
100 try:
100 try:
101 version = int(fp.readline().strip())
101 version = int(fp.readline().strip())
102
102
103 if version != cls._version:
103 if version != cls._version:
104 raise util.Abort(_('this version of shelve is incompatible '
104 raise util.Abort(_('this version of shelve is incompatible '
105 'with the version used in this repo'))
105 'with the version used in this repo'))
106 name = fp.readline().strip()
106 name = fp.readline().strip()
107 wctx = fp.readline().strip()
107 wctx = fp.readline().strip()
108 pendingctx = fp.readline().strip()
108 pendingctx = fp.readline().strip()
109 parents = [bin(h) for h in fp.readline().split()]
109 parents = [bin(h) for h in fp.readline().split()]
110 stripnodes = [bin(h) for h in fp.readline().split()]
110 stripnodes = [bin(h) for h in fp.readline().split()]
111 finally:
111 finally:
112 fp.close()
112 fp.close()
113
113
114 obj = cls()
114 obj = cls()
115 obj.name = name
115 obj.name = name
116 obj.wctx = repo[bin(wctx)]
116 obj.wctx = repo[bin(wctx)]
117 obj.pendingctx = repo[bin(pendingctx)]
117 obj.pendingctx = repo[bin(pendingctx)]
118 obj.parents = parents
118 obj.parents = parents
119 obj.stripnodes = stripnodes
119 obj.stripnodes = stripnodes
120
120
121 return obj
121 return obj
122
122
123 @classmethod
123 @classmethod
124 def save(cls, repo, name, originalwctx, pendingctx, stripnodes):
124 def save(cls, repo, name, originalwctx, pendingctx, stripnodes):
125 fp = repo.vfs(cls._filename, 'wb')
125 fp = repo.vfs(cls._filename, 'wb')
126 fp.write('%i\n' % cls._version)
126 fp.write('%i\n' % cls._version)
127 fp.write('%s\n' % name)
127 fp.write('%s\n' % name)
128 fp.write('%s\n' % hex(originalwctx.node()))
128 fp.write('%s\n' % hex(originalwctx.node()))
129 fp.write('%s\n' % hex(pendingctx.node()))
129 fp.write('%s\n' % hex(pendingctx.node()))
130 fp.write('%s\n' % ' '.join([hex(p) for p in repo.dirstate.parents()]))
130 fp.write('%s\n' % ' '.join([hex(p) for p in repo.dirstate.parents()]))
131 fp.write('%s\n' % ' '.join([hex(n) for n in stripnodes]))
131 fp.write('%s\n' % ' '.join([hex(n) for n in stripnodes]))
132 fp.close()
132 fp.close()
133
133
134 @classmethod
134 @classmethod
135 def clear(cls, repo):
135 def clear(cls, repo):
136 util.unlinkpath(repo.join(cls._filename), ignoremissing=True)
136 util.unlinkpath(repo.join(cls._filename), ignoremissing=True)
137
137
138 def createcmd(ui, repo, pats, opts):
138 def createcmd(ui, repo, pats, opts):
139 """subcommand that creates a new shelve"""
139 """subcommand that creates a new shelve"""
140
140
141 def publicancestors(ctx):
141 def publicancestors(ctx):
142 """Compute the public ancestors of a commit.
142 """Compute the public ancestors of a commit.
143
143
144 Much faster than the revset ancestors(ctx) & draft()"""
144 Much faster than the revset ancestors(ctx) & draft()"""
145 seen = set([nullrev])
145 seen = set([nullrev])
146 visit = util.deque()
146 visit = util.deque()
147 visit.append(ctx)
147 visit.append(ctx)
148 while visit:
148 while visit:
149 ctx = visit.popleft()
149 ctx = visit.popleft()
150 yield ctx.node()
150 yield ctx.node()
151 for parent in ctx.parents():
151 for parent in ctx.parents():
152 rev = parent.rev()
152 rev = parent.rev()
153 if rev not in seen:
153 if rev not in seen:
154 seen.add(rev)
154 seen.add(rev)
155 if parent.mutable():
155 if parent.mutable():
156 visit.append(parent)
156 visit.append(parent)
157
157
158 wctx = repo[None]
158 wctx = repo[None]
159 parents = wctx.parents()
159 parents = wctx.parents()
160 if len(parents) > 1:
160 if len(parents) > 1:
161 raise util.Abort(_('cannot shelve while merging'))
161 raise util.Abort(_('cannot shelve while merging'))
162 parent = parents[0]
162 parent = parents[0]
163
163
164 # we never need the user, so we use a generic user for all shelve operations
164 # we never need the user, so we use a generic user for all shelve operations
165 user = 'shelve@localhost'
165 user = 'shelve@localhost'
166 label = repo._bookmarkcurrent or parent.branch() or 'default'
166 label = repo._activebookmark or parent.branch() or 'default'
167
167
168 # slashes aren't allowed in filenames, therefore we rename it
168 # slashes aren't allowed in filenames, therefore we rename it
169 label = label.replace('/', '_')
169 label = label.replace('/', '_')
170
170
171 def gennames():
171 def gennames():
172 yield label
172 yield label
173 for i in xrange(1, 100):
173 for i in xrange(1, 100):
174 yield '%s-%02d' % (label, i)
174 yield '%s-%02d' % (label, i)
175
175
176 def commitfunc(ui, repo, message, match, opts):
176 def commitfunc(ui, repo, message, match, opts):
177 hasmq = util.safehasattr(repo, 'mq')
177 hasmq = util.safehasattr(repo, 'mq')
178 if hasmq:
178 if hasmq:
179 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
179 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
180 backup = repo.ui.backupconfig('phases', 'new-commit')
180 backup = repo.ui.backupconfig('phases', 'new-commit')
181 try:
181 try:
182 repo.ui. setconfig('phases', 'new-commit', phases.secret)
182 repo.ui. setconfig('phases', 'new-commit', phases.secret)
183 editor = cmdutil.getcommiteditor(editform='shelve.shelve', **opts)
183 editor = cmdutil.getcommiteditor(editform='shelve.shelve', **opts)
184 return repo.commit(message, user, opts.get('date'), match,
184 return repo.commit(message, user, opts.get('date'), match,
185 editor=editor)
185 editor=editor)
186 finally:
186 finally:
187 repo.ui.restoreconfig(backup)
187 repo.ui.restoreconfig(backup)
188 if hasmq:
188 if hasmq:
189 repo.mq.checkapplied = saved
189 repo.mq.checkapplied = saved
190
190
191 if parent.node() != nullid:
191 if parent.node() != nullid:
192 desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
192 desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
193 else:
193 else:
194 desc = '(changes in empty repository)'
194 desc = '(changes in empty repository)'
195
195
196 if not opts['message']:
196 if not opts['message']:
197 opts['message'] = desc
197 opts['message'] = desc
198
198
199 name = opts['name']
199 name = opts['name']
200
200
201 wlock = lock = tr = bms = None
201 wlock = lock = tr = bms = None
202 try:
202 try:
203 wlock = repo.wlock()
203 wlock = repo.wlock()
204 lock = repo.lock()
204 lock = repo.lock()
205
205
206 bms = repo._bookmarks.copy()
206 bms = repo._bookmarks.copy()
207 # use an uncommitted transaction to generate the bundle to avoid
207 # use an uncommitted transaction to generate the bundle to avoid
208 # pull races. ensure we don't print the abort message to stderr.
208 # pull races. ensure we don't print the abort message to stderr.
209 tr = repo.transaction('commit', report=lambda x: None)
209 tr = repo.transaction('commit', report=lambda x: None)
210
210
211 if name:
211 if name:
212 if shelvedfile(repo, name, 'hg').exists():
212 if shelvedfile(repo, name, 'hg').exists():
213 raise util.Abort(_("a shelved change named '%s' already exists")
213 raise util.Abort(_("a shelved change named '%s' already exists")
214 % name)
214 % name)
215 else:
215 else:
216 for n in gennames():
216 for n in gennames():
217 if not shelvedfile(repo, n, 'hg').exists():
217 if not shelvedfile(repo, n, 'hg').exists():
218 name = n
218 name = n
219 break
219 break
220 else:
220 else:
221 raise util.Abort(_("too many shelved changes named '%s'") %
221 raise util.Abort(_("too many shelved changes named '%s'") %
222 label)
222 label)
223
223
224 # ensure we are not creating a subdirectory or a hidden file
224 # ensure we are not creating a subdirectory or a hidden file
225 if '/' in name or '\\' in name:
225 if '/' in name or '\\' in name:
226 raise util.Abort(_('shelved change names may not contain slashes'))
226 raise util.Abort(_('shelved change names may not contain slashes'))
227 if name.startswith('.'):
227 if name.startswith('.'):
228 raise util.Abort(_("shelved change names may not start with '.'"))
228 raise util.Abort(_("shelved change names may not start with '.'"))
229 interactive = opts.get('interactive', False)
229 interactive = opts.get('interactive', False)
230
230
231 def interactivecommitfunc(ui, repo, *pats, **opts):
231 def interactivecommitfunc(ui, repo, *pats, **opts):
232 match = scmutil.match(repo['.'], pats, {})
232 match = scmutil.match(repo['.'], pats, {})
233 message = opts['message']
233 message = opts['message']
234 return commitfunc(ui, repo, message, match, opts)
234 return commitfunc(ui, repo, message, match, opts)
235 if not interactive:
235 if not interactive:
236 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
236 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
237 else:
237 else:
238 node = cmdutil.dorecord(ui, repo, interactivecommitfunc, 'commit',
238 node = cmdutil.dorecord(ui, repo, interactivecommitfunc, 'commit',
239 False, cmdutil.recordfilter, *pats, **opts)
239 False, cmdutil.recordfilter, *pats, **opts)
240 if not node:
240 if not node:
241 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
241 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
242 if stat.deleted:
242 if stat.deleted:
243 ui.status(_("nothing changed (%d missing files, see "
243 ui.status(_("nothing changed (%d missing files, see "
244 "'hg status')\n") % len(stat.deleted))
244 "'hg status')\n") % len(stat.deleted))
245 else:
245 else:
246 ui.status(_("nothing changed\n"))
246 ui.status(_("nothing changed\n"))
247 return 1
247 return 1
248
248
249 bases = list(publicancestors(repo[node]))
249 bases = list(publicancestors(repo[node]))
250 cg = changegroup.changegroupsubset(repo, bases, [node], 'shelve')
250 cg = changegroup.changegroupsubset(repo, bases, [node], 'shelve')
251 shelvedfile(repo, name, 'hg').writebundle(cg)
251 shelvedfile(repo, name, 'hg').writebundle(cg)
252 cmdutil.export(repo, [node],
252 cmdutil.export(repo, [node],
253 fp=shelvedfile(repo, name, 'patch').opener('wb'),
253 fp=shelvedfile(repo, name, 'patch').opener('wb'),
254 opts=mdiff.diffopts(git=True))
254 opts=mdiff.diffopts(git=True))
255
255
256
256
257 if ui.formatted():
257 if ui.formatted():
258 desc = util.ellipsis(desc, ui.termwidth())
258 desc = util.ellipsis(desc, ui.termwidth())
259 ui.status(_('shelved as %s\n') % name)
259 ui.status(_('shelved as %s\n') % name)
260 hg.update(repo, parent.node())
260 hg.update(repo, parent.node())
261 finally:
261 finally:
262 if bms:
262 if bms:
263 # restore old bookmarks
263 # restore old bookmarks
264 repo._bookmarks.update(bms)
264 repo._bookmarks.update(bms)
265 repo._bookmarks.write()
265 repo._bookmarks.write()
266 if tr:
266 if tr:
267 tr.abort()
267 tr.abort()
268 lockmod.release(lock, wlock)
268 lockmod.release(lock, wlock)
269
269
270 def cleanupcmd(ui, repo):
270 def cleanupcmd(ui, repo):
271 """subcommand that deletes all shelves"""
271 """subcommand that deletes all shelves"""
272
272
273 wlock = None
273 wlock = None
274 try:
274 try:
275 wlock = repo.wlock()
275 wlock = repo.wlock()
276 for (name, _type) in repo.vfs.readdir('shelved'):
276 for (name, _type) in repo.vfs.readdir('shelved'):
277 suffix = name.rsplit('.', 1)[-1]
277 suffix = name.rsplit('.', 1)[-1]
278 if suffix in ('hg', 'patch'):
278 if suffix in ('hg', 'patch'):
279 shelvedfile(repo, name).unlink()
279 shelvedfile(repo, name).unlink()
280 finally:
280 finally:
281 lockmod.release(wlock)
281 lockmod.release(wlock)
282
282
283 def deletecmd(ui, repo, pats):
283 def deletecmd(ui, repo, pats):
284 """subcommand that deletes a specific shelve"""
284 """subcommand that deletes a specific shelve"""
285 if not pats:
285 if not pats:
286 raise util.Abort(_('no shelved changes specified!'))
286 raise util.Abort(_('no shelved changes specified!'))
287 wlock = None
287 wlock = None
288 try:
288 try:
289 wlock = repo.wlock()
289 wlock = repo.wlock()
290 try:
290 try:
291 for name in pats:
291 for name in pats:
292 for suffix in 'hg patch'.split():
292 for suffix in 'hg patch'.split():
293 shelvedfile(repo, name, suffix).unlink()
293 shelvedfile(repo, name, suffix).unlink()
294 except OSError, err:
294 except OSError, err:
295 if err.errno != errno.ENOENT:
295 if err.errno != errno.ENOENT:
296 raise
296 raise
297 raise util.Abort(_("shelved change '%s' not found") % name)
297 raise util.Abort(_("shelved change '%s' not found") % name)
298 finally:
298 finally:
299 lockmod.release(wlock)
299 lockmod.release(wlock)
300
300
301 def listshelves(repo):
301 def listshelves(repo):
302 """return all shelves in repo as list of (time, filename)"""
302 """return all shelves in repo as list of (time, filename)"""
303 try:
303 try:
304 names = repo.vfs.readdir('shelved')
304 names = repo.vfs.readdir('shelved')
305 except OSError, err:
305 except OSError, err:
306 if err.errno != errno.ENOENT:
306 if err.errno != errno.ENOENT:
307 raise
307 raise
308 return []
308 return []
309 info = []
309 info = []
310 for (name, _type) in names:
310 for (name, _type) in names:
311 pfx, sfx = name.rsplit('.', 1)
311 pfx, sfx = name.rsplit('.', 1)
312 if not pfx or sfx != 'patch':
312 if not pfx or sfx != 'patch':
313 continue
313 continue
314 st = shelvedfile(repo, name).stat()
314 st = shelvedfile(repo, name).stat()
315 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
315 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
316 return sorted(info, reverse=True)
316 return sorted(info, reverse=True)
317
317
318 def listcmd(ui, repo, pats, opts):
318 def listcmd(ui, repo, pats, opts):
319 """subcommand that displays the list of shelves"""
319 """subcommand that displays the list of shelves"""
320 pats = set(pats)
320 pats = set(pats)
321 width = 80
321 width = 80
322 if not ui.plain():
322 if not ui.plain():
323 width = ui.termwidth()
323 width = ui.termwidth()
324 namelabel = 'shelve.newest'
324 namelabel = 'shelve.newest'
325 for mtime, name in listshelves(repo):
325 for mtime, name in listshelves(repo):
326 sname = util.split(name)[1]
326 sname = util.split(name)[1]
327 if pats and sname not in pats:
327 if pats and sname not in pats:
328 continue
328 continue
329 ui.write(sname, label=namelabel)
329 ui.write(sname, label=namelabel)
330 namelabel = 'shelve.name'
330 namelabel = 'shelve.name'
331 if ui.quiet:
331 if ui.quiet:
332 ui.write('\n')
332 ui.write('\n')
333 continue
333 continue
334 ui.write(' ' * (16 - len(sname)))
334 ui.write(' ' * (16 - len(sname)))
335 used = 16
335 used = 16
336 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
336 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
337 ui.write(age, label='shelve.age')
337 ui.write(age, label='shelve.age')
338 ui.write(' ' * (12 - len(age)))
338 ui.write(' ' * (12 - len(age)))
339 used += 12
339 used += 12
340 fp = open(name + '.patch', 'rb')
340 fp = open(name + '.patch', 'rb')
341 try:
341 try:
342 while True:
342 while True:
343 line = fp.readline()
343 line = fp.readline()
344 if not line:
344 if not line:
345 break
345 break
346 if not line.startswith('#'):
346 if not line.startswith('#'):
347 desc = line.rstrip()
347 desc = line.rstrip()
348 if ui.formatted():
348 if ui.formatted():
349 desc = util.ellipsis(desc, width - used)
349 desc = util.ellipsis(desc, width - used)
350 ui.write(desc)
350 ui.write(desc)
351 break
351 break
352 ui.write('\n')
352 ui.write('\n')
353 if not (opts['patch'] or opts['stat']):
353 if not (opts['patch'] or opts['stat']):
354 continue
354 continue
355 difflines = fp.readlines()
355 difflines = fp.readlines()
356 if opts['patch']:
356 if opts['patch']:
357 for chunk, label in patch.difflabel(iter, difflines):
357 for chunk, label in patch.difflabel(iter, difflines):
358 ui.write(chunk, label=label)
358 ui.write(chunk, label=label)
359 if opts['stat']:
359 if opts['stat']:
360 for chunk, label in patch.diffstatui(difflines, width=width,
360 for chunk, label in patch.diffstatui(difflines, width=width,
361 git=True):
361 git=True):
362 ui.write(chunk, label=label)
362 ui.write(chunk, label=label)
363 finally:
363 finally:
364 fp.close()
364 fp.close()
365
365
366 def checkparents(repo, state):
366 def checkparents(repo, state):
367 """check parent while resuming an unshelve"""
367 """check parent while resuming an unshelve"""
368 if state.parents != repo.dirstate.parents():
368 if state.parents != repo.dirstate.parents():
369 raise util.Abort(_('working directory parents do not match unshelve '
369 raise util.Abort(_('working directory parents do not match unshelve '
370 'state'))
370 'state'))
371
371
372 def pathtofiles(repo, files):
372 def pathtofiles(repo, files):
373 cwd = repo.getcwd()
373 cwd = repo.getcwd()
374 return [repo.pathto(f, cwd) for f in files]
374 return [repo.pathto(f, cwd) for f in files]
375
375
376 def unshelveabort(ui, repo, state, opts):
376 def unshelveabort(ui, repo, state, opts):
377 """subcommand that abort an in-progress unshelve"""
377 """subcommand that abort an in-progress unshelve"""
378 wlock = repo.wlock()
378 wlock = repo.wlock()
379 lock = None
379 lock = None
380 try:
380 try:
381 checkparents(repo, state)
381 checkparents(repo, state)
382
382
383 util.rename(repo.join('unshelverebasestate'),
383 util.rename(repo.join('unshelverebasestate'),
384 repo.join('rebasestate'))
384 repo.join('rebasestate'))
385 try:
385 try:
386 rebase.rebase(ui, repo, **{
386 rebase.rebase(ui, repo, **{
387 'abort' : True
387 'abort' : True
388 })
388 })
389 except Exception:
389 except Exception:
390 util.rename(repo.join('rebasestate'),
390 util.rename(repo.join('rebasestate'),
391 repo.join('unshelverebasestate'))
391 repo.join('unshelverebasestate'))
392 raise
392 raise
393
393
394 lock = repo.lock()
394 lock = repo.lock()
395
395
396 mergefiles(ui, repo, state.wctx, state.pendingctx)
396 mergefiles(ui, repo, state.wctx, state.pendingctx)
397
397
398 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
398 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
399 shelvedstate.clear(repo)
399 shelvedstate.clear(repo)
400 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
400 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
401 finally:
401 finally:
402 lockmod.release(lock, wlock)
402 lockmod.release(lock, wlock)
403
403
404 def mergefiles(ui, repo, wctx, shelvectx):
404 def mergefiles(ui, repo, wctx, shelvectx):
405 """updates to wctx and merges the changes from shelvectx into the
405 """updates to wctx and merges the changes from shelvectx into the
406 dirstate."""
406 dirstate."""
407 oldquiet = ui.quiet
407 oldquiet = ui.quiet
408 try:
408 try:
409 ui.quiet = True
409 ui.quiet = True
410 hg.update(repo, wctx.node())
410 hg.update(repo, wctx.node())
411 files = []
411 files = []
412 files.extend(shelvectx.files())
412 files.extend(shelvectx.files())
413 files.extend(shelvectx.parents()[0].files())
413 files.extend(shelvectx.parents()[0].files())
414
414
415 # revert will overwrite unknown files, so move them out of the way
415 # revert will overwrite unknown files, so move them out of the way
416 for file in repo.status(unknown=True).unknown:
416 for file in repo.status(unknown=True).unknown:
417 if file in files:
417 if file in files:
418 util.rename(file, file + ".orig")
418 util.rename(file, file + ".orig")
419 ui.pushbuffer(True)
419 ui.pushbuffer(True)
420 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
420 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
421 *pathtofiles(repo, files),
421 *pathtofiles(repo, files),
422 **{'no_backup': True})
422 **{'no_backup': True})
423 ui.popbuffer()
423 ui.popbuffer()
424 finally:
424 finally:
425 ui.quiet = oldquiet
425 ui.quiet = oldquiet
426
426
427 def unshelvecleanup(ui, repo, name, opts):
427 def unshelvecleanup(ui, repo, name, opts):
428 """remove related files after an unshelve"""
428 """remove related files after an unshelve"""
429 if not opts['keep']:
429 if not opts['keep']:
430 for filetype in 'hg patch'.split():
430 for filetype in 'hg patch'.split():
431 shelvedfile(repo, name, filetype).unlink()
431 shelvedfile(repo, name, filetype).unlink()
432
432
433 def unshelvecontinue(ui, repo, state, opts):
433 def unshelvecontinue(ui, repo, state, opts):
434 """subcommand to continue an in-progress unshelve"""
434 """subcommand to continue an in-progress unshelve"""
435 # We're finishing off a merge. First parent is our original
435 # We're finishing off a merge. First parent is our original
436 # parent, second is the temporary "fake" commit we're unshelving.
436 # parent, second is the temporary "fake" commit we're unshelving.
437 wlock = repo.wlock()
437 wlock = repo.wlock()
438 lock = None
438 lock = None
439 try:
439 try:
440 checkparents(repo, state)
440 checkparents(repo, state)
441 ms = merge.mergestate(repo)
441 ms = merge.mergestate(repo)
442 if [f for f in ms if ms[f] == 'u']:
442 if [f for f in ms if ms[f] == 'u']:
443 raise util.Abort(
443 raise util.Abort(
444 _("unresolved conflicts, can't continue"),
444 _("unresolved conflicts, can't continue"),
445 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
445 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
446
446
447 lock = repo.lock()
447 lock = repo.lock()
448
448
449 util.rename(repo.join('unshelverebasestate'),
449 util.rename(repo.join('unshelverebasestate'),
450 repo.join('rebasestate'))
450 repo.join('rebasestate'))
451 try:
451 try:
452 rebase.rebase(ui, repo, **{
452 rebase.rebase(ui, repo, **{
453 'continue' : True
453 'continue' : True
454 })
454 })
455 except Exception:
455 except Exception:
456 util.rename(repo.join('rebasestate'),
456 util.rename(repo.join('rebasestate'),
457 repo.join('unshelverebasestate'))
457 repo.join('unshelverebasestate'))
458 raise
458 raise
459
459
460 shelvectx = repo['tip']
460 shelvectx = repo['tip']
461 if not shelvectx in state.pendingctx.children():
461 if not shelvectx in state.pendingctx.children():
462 # rebase was a no-op, so it produced no child commit
462 # rebase was a no-op, so it produced no child commit
463 shelvectx = state.pendingctx
463 shelvectx = state.pendingctx
464 else:
464 else:
465 # only strip the shelvectx if the rebase produced it
465 # only strip the shelvectx if the rebase produced it
466 state.stripnodes.append(shelvectx.node())
466 state.stripnodes.append(shelvectx.node())
467
467
468 mergefiles(ui, repo, state.wctx, shelvectx)
468 mergefiles(ui, repo, state.wctx, shelvectx)
469
469
470 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
470 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
471 shelvedstate.clear(repo)
471 shelvedstate.clear(repo)
472 unshelvecleanup(ui, repo, state.name, opts)
472 unshelvecleanup(ui, repo, state.name, opts)
473 ui.status(_("unshelve of '%s' complete\n") % state.name)
473 ui.status(_("unshelve of '%s' complete\n") % state.name)
474 finally:
474 finally:
475 lockmod.release(lock, wlock)
475 lockmod.release(lock, wlock)
476
476
477 @command('unshelve',
477 @command('unshelve',
478 [('a', 'abort', None,
478 [('a', 'abort', None,
479 _('abort an incomplete unshelve operation')),
479 _('abort an incomplete unshelve operation')),
480 ('c', 'continue', None,
480 ('c', 'continue', None,
481 _('continue an incomplete unshelve operation')),
481 _('continue an incomplete unshelve operation')),
482 ('', 'keep', None,
482 ('', 'keep', None,
483 _('keep shelve after unshelving')),
483 _('keep shelve after unshelving')),
484 ('', 'date', '',
484 ('', 'date', '',
485 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
485 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
486 _('hg unshelve [SHELVED]'))
486 _('hg unshelve [SHELVED]'))
487 def unshelve(ui, repo, *shelved, **opts):
487 def unshelve(ui, repo, *shelved, **opts):
488 """restore a shelved change to the working directory
488 """restore a shelved change to the working directory
489
489
490 This command accepts an optional name of a shelved change to
490 This command accepts an optional name of a shelved change to
491 restore. If none is given, the most recent shelved change is used.
491 restore. If none is given, the most recent shelved change is used.
492
492
493 If a shelved change is applied successfully, the bundle that
493 If a shelved change is applied successfully, the bundle that
494 contains the shelved changes is deleted afterwards.
494 contains the shelved changes is deleted afterwards.
495
495
496 Since you can restore a shelved change on top of an arbitrary
496 Since you can restore a shelved change on top of an arbitrary
497 commit, it is possible that unshelving will result in a conflict
497 commit, it is possible that unshelving will result in a conflict
498 between your changes and the commits you are unshelving onto. If
498 between your changes and the commits you are unshelving onto. If
499 this occurs, you must resolve the conflict, then use
499 this occurs, you must resolve the conflict, then use
500 ``--continue`` to complete the unshelve operation. (The bundle
500 ``--continue`` to complete the unshelve operation. (The bundle
501 will not be deleted until you successfully complete the unshelve.)
501 will not be deleted until you successfully complete the unshelve.)
502
502
503 (Alternatively, you can use ``--abort`` to abandon an unshelve
503 (Alternatively, you can use ``--abort`` to abandon an unshelve
504 that causes a conflict. This reverts the unshelved changes, and
504 that causes a conflict. This reverts the unshelved changes, and
505 does not delete the bundle.)
505 does not delete the bundle.)
506 """
506 """
507 abortf = opts['abort']
507 abortf = opts['abort']
508 continuef = opts['continue']
508 continuef = opts['continue']
509 if not abortf and not continuef:
509 if not abortf and not continuef:
510 cmdutil.checkunfinished(repo)
510 cmdutil.checkunfinished(repo)
511
511
512 if abortf or continuef:
512 if abortf or continuef:
513 if abortf and continuef:
513 if abortf and continuef:
514 raise util.Abort(_('cannot use both abort and continue'))
514 raise util.Abort(_('cannot use both abort and continue'))
515 if shelved:
515 if shelved:
516 raise util.Abort(_('cannot combine abort/continue with '
516 raise util.Abort(_('cannot combine abort/continue with '
517 'naming a shelved change'))
517 'naming a shelved change'))
518
518
519 try:
519 try:
520 state = shelvedstate.load(repo)
520 state = shelvedstate.load(repo)
521 except IOError, err:
521 except IOError, err:
522 if err.errno != errno.ENOENT:
522 if err.errno != errno.ENOENT:
523 raise
523 raise
524 raise util.Abort(_('no unshelve operation underway'))
524 raise util.Abort(_('no unshelve operation underway'))
525
525
526 if abortf:
526 if abortf:
527 return unshelveabort(ui, repo, state, opts)
527 return unshelveabort(ui, repo, state, opts)
528 elif continuef:
528 elif continuef:
529 return unshelvecontinue(ui, repo, state, opts)
529 return unshelvecontinue(ui, repo, state, opts)
530 elif len(shelved) > 1:
530 elif len(shelved) > 1:
531 raise util.Abort(_('can only unshelve one change at a time'))
531 raise util.Abort(_('can only unshelve one change at a time'))
532 elif not shelved:
532 elif not shelved:
533 shelved = listshelves(repo)
533 shelved = listshelves(repo)
534 if not shelved:
534 if not shelved:
535 raise util.Abort(_('no shelved changes to apply!'))
535 raise util.Abort(_('no shelved changes to apply!'))
536 basename = util.split(shelved[0][1])[1]
536 basename = util.split(shelved[0][1])[1]
537 ui.status(_("unshelving change '%s'\n") % basename)
537 ui.status(_("unshelving change '%s'\n") % basename)
538 else:
538 else:
539 basename = shelved[0]
539 basename = shelved[0]
540
540
541 if not shelvedfile(repo, basename, 'patch').exists():
541 if not shelvedfile(repo, basename, 'patch').exists():
542 raise util.Abort(_("shelved change '%s' not found") % basename)
542 raise util.Abort(_("shelved change '%s' not found") % basename)
543
543
544 oldquiet = ui.quiet
544 oldquiet = ui.quiet
545 wlock = lock = tr = None
545 wlock = lock = tr = None
546 try:
546 try:
547 wlock = repo.wlock()
547 wlock = repo.wlock()
548 lock = repo.lock()
548 lock = repo.lock()
549
549
550 tr = repo.transaction('unshelve', report=lambda x: None)
550 tr = repo.transaction('unshelve', report=lambda x: None)
551 oldtiprev = len(repo)
551 oldtiprev = len(repo)
552
552
553 pctx = repo['.']
553 pctx = repo['.']
554 tmpwctx = pctx
554 tmpwctx = pctx
555 # The goal is to have a commit structure like so:
555 # The goal is to have a commit structure like so:
556 # ...-> pctx -> tmpwctx -> shelvectx
556 # ...-> pctx -> tmpwctx -> shelvectx
557 # where tmpwctx is an optional commit with the user's pending changes
557 # where tmpwctx is an optional commit with the user's pending changes
558 # and shelvectx is the unshelved changes. Then we merge it all down
558 # and shelvectx is the unshelved changes. Then we merge it all down
559 # to the original pctx.
559 # to the original pctx.
560
560
561 # Store pending changes in a commit
561 # Store pending changes in a commit
562 s = repo.status()
562 s = repo.status()
563 if s.modified or s.added or s.removed or s.deleted:
563 if s.modified or s.added or s.removed or s.deleted:
564 ui.status(_("temporarily committing pending changes "
564 ui.status(_("temporarily committing pending changes "
565 "(restore with 'hg unshelve --abort')\n"))
565 "(restore with 'hg unshelve --abort')\n"))
566 def commitfunc(ui, repo, message, match, opts):
566 def commitfunc(ui, repo, message, match, opts):
567 hasmq = util.safehasattr(repo, 'mq')
567 hasmq = util.safehasattr(repo, 'mq')
568 if hasmq:
568 if hasmq:
569 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
569 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
570
570
571 backup = repo.ui.backupconfig('phases', 'new-commit')
571 backup = repo.ui.backupconfig('phases', 'new-commit')
572 try:
572 try:
573 repo.ui. setconfig('phases', 'new-commit', phases.secret)
573 repo.ui. setconfig('phases', 'new-commit', phases.secret)
574 return repo.commit(message, 'shelve@localhost',
574 return repo.commit(message, 'shelve@localhost',
575 opts.get('date'), match)
575 opts.get('date'), match)
576 finally:
576 finally:
577 repo.ui.restoreconfig(backup)
577 repo.ui.restoreconfig(backup)
578 if hasmq:
578 if hasmq:
579 repo.mq.checkapplied = saved
579 repo.mq.checkapplied = saved
580
580
581 tempopts = {}
581 tempopts = {}
582 tempopts['message'] = "pending changes temporary commit"
582 tempopts['message'] = "pending changes temporary commit"
583 tempopts['date'] = opts.get('date')
583 tempopts['date'] = opts.get('date')
584 ui.quiet = True
584 ui.quiet = True
585 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
585 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
586 tmpwctx = repo[node]
586 tmpwctx = repo[node]
587
587
588 ui.quiet = True
588 ui.quiet = True
589 shelvedfile(repo, basename, 'hg').applybundle()
589 shelvedfile(repo, basename, 'hg').applybundle()
590
590
591 ui.quiet = oldquiet
591 ui.quiet = oldquiet
592
592
593 shelvectx = repo['tip']
593 shelvectx = repo['tip']
594
594
595 # If the shelve is not immediately on top of the commit
595 # If the shelve is not immediately on top of the commit
596 # we'll be merging with, rebase it to be on top.
596 # we'll be merging with, rebase it to be on top.
597 if tmpwctx.node() != shelvectx.parents()[0].node():
597 if tmpwctx.node() != shelvectx.parents()[0].node():
598 ui.status(_('rebasing shelved changes\n'))
598 ui.status(_('rebasing shelved changes\n'))
599 try:
599 try:
600 rebase.rebase(ui, repo, **{
600 rebase.rebase(ui, repo, **{
601 'rev' : [shelvectx.rev()],
601 'rev' : [shelvectx.rev()],
602 'dest' : str(tmpwctx.rev()),
602 'dest' : str(tmpwctx.rev()),
603 'keep' : True,
603 'keep' : True,
604 })
604 })
605 except error.InterventionRequired:
605 except error.InterventionRequired:
606 tr.close()
606 tr.close()
607
607
608 stripnodes = [repo.changelog.node(rev)
608 stripnodes = [repo.changelog.node(rev)
609 for rev in xrange(oldtiprev, len(repo))]
609 for rev in xrange(oldtiprev, len(repo))]
610 shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes)
610 shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes)
611
611
612 util.rename(repo.join('rebasestate'),
612 util.rename(repo.join('rebasestate'),
613 repo.join('unshelverebasestate'))
613 repo.join('unshelverebasestate'))
614 raise error.InterventionRequired(
614 raise error.InterventionRequired(
615 _("unresolved conflicts (see 'hg resolve', then "
615 _("unresolved conflicts (see 'hg resolve', then "
616 "'hg unshelve --continue')"))
616 "'hg unshelve --continue')"))
617
617
618 # refresh ctx after rebase completes
618 # refresh ctx after rebase completes
619 shelvectx = repo['tip']
619 shelvectx = repo['tip']
620
620
621 if not shelvectx in tmpwctx.children():
621 if not shelvectx in tmpwctx.children():
622 # rebase was a no-op, so it produced no child commit
622 # rebase was a no-op, so it produced no child commit
623 shelvectx = tmpwctx
623 shelvectx = tmpwctx
624
624
625 mergefiles(ui, repo, pctx, shelvectx)
625 mergefiles(ui, repo, pctx, shelvectx)
626 shelvedstate.clear(repo)
626 shelvedstate.clear(repo)
627
627
628 # The transaction aborting will strip all the commits for us,
628 # The transaction aborting will strip all the commits for us,
629 # but it doesn't update the inmemory structures, so addchangegroup
629 # but it doesn't update the inmemory structures, so addchangegroup
630 # hooks still fire and try to operate on the missing commits.
630 # hooks still fire and try to operate on the missing commits.
631 # Clean up manually to prevent this.
631 # Clean up manually to prevent this.
632 repo.unfiltered().changelog.strip(oldtiprev, tr)
632 repo.unfiltered().changelog.strip(oldtiprev, tr)
633
633
634 unshelvecleanup(ui, repo, basename, opts)
634 unshelvecleanup(ui, repo, basename, opts)
635 finally:
635 finally:
636 ui.quiet = oldquiet
636 ui.quiet = oldquiet
637 if tr:
637 if tr:
638 tr.release()
638 tr.release()
639 lockmod.release(lock, wlock)
639 lockmod.release(lock, wlock)
640
640
641 @command('shelve',
641 @command('shelve',
642 [('A', 'addremove', None,
642 [('A', 'addremove', None,
643 _('mark new/missing files as added/removed before shelving')),
643 _('mark new/missing files as added/removed before shelving')),
644 ('', 'cleanup', None,
644 ('', 'cleanup', None,
645 _('delete all shelved changes')),
645 _('delete all shelved changes')),
646 ('', 'date', '',
646 ('', 'date', '',
647 _('shelve with the specified commit date'), _('DATE')),
647 _('shelve with the specified commit date'), _('DATE')),
648 ('d', 'delete', None,
648 ('d', 'delete', None,
649 _('delete the named shelved change(s)')),
649 _('delete the named shelved change(s)')),
650 ('e', 'edit', False,
650 ('e', 'edit', False,
651 _('invoke editor on commit messages')),
651 _('invoke editor on commit messages')),
652 ('l', 'list', None,
652 ('l', 'list', None,
653 _('list current shelves')),
653 _('list current shelves')),
654 ('m', 'message', '',
654 ('m', 'message', '',
655 _('use text as shelve message'), _('TEXT')),
655 _('use text as shelve message'), _('TEXT')),
656 ('n', 'name', '',
656 ('n', 'name', '',
657 _('use the given name for the shelved commit'), _('NAME')),
657 _('use the given name for the shelved commit'), _('NAME')),
658 ('p', 'patch', None,
658 ('p', 'patch', None,
659 _('show patch')),
659 _('show patch')),
660 ('i', 'interactive', None,
660 ('i', 'interactive', None,
661 _('interactive mode, only works while creating a shelve'
661 _('interactive mode, only works while creating a shelve'
662 '(EXPERIMENTAL)')),
662 '(EXPERIMENTAL)')),
663 ('', 'stat', None,
663 ('', 'stat', None,
664 _('output diffstat-style summary of changes'))] + commands.walkopts,
664 _('output diffstat-style summary of changes'))] + commands.walkopts,
665 _('hg shelve [OPTION]... [FILE]...'))
665 _('hg shelve [OPTION]... [FILE]...'))
666 def shelvecmd(ui, repo, *pats, **opts):
666 def shelvecmd(ui, repo, *pats, **opts):
667 '''save and set aside changes from the working directory
667 '''save and set aside changes from the working directory
668
668
669 Shelving takes files that "hg status" reports as not clean, saves
669 Shelving takes files that "hg status" reports as not clean, saves
670 the modifications to a bundle (a shelved change), and reverts the
670 the modifications to a bundle (a shelved change), and reverts the
671 files so that their state in the working directory becomes clean.
671 files so that their state in the working directory becomes clean.
672
672
673 To restore these changes to the working directory, using "hg
673 To restore these changes to the working directory, using "hg
674 unshelve"; this will work even if you switch to a different
674 unshelve"; this will work even if you switch to a different
675 commit.
675 commit.
676
676
677 When no files are specified, "hg shelve" saves all not-clean
677 When no files are specified, "hg shelve" saves all not-clean
678 files. If specific files or directories are named, only changes to
678 files. If specific files or directories are named, only changes to
679 those files are shelved.
679 those files are shelved.
680
680
681 Each shelved change has a name that makes it easier to find later.
681 Each shelved change has a name that makes it easier to find later.
682 The name of a shelved change defaults to being based on the active
682 The name of a shelved change defaults to being based on the active
683 bookmark, or if there is no active bookmark, the current named
683 bookmark, or if there is no active bookmark, the current named
684 branch. To specify a different name, use ``--name``.
684 branch. To specify a different name, use ``--name``.
685
685
686 To see a list of existing shelved changes, use the ``--list``
686 To see a list of existing shelved changes, use the ``--list``
687 option. For each shelved change, this will print its name, age,
687 option. For each shelved change, this will print its name, age,
688 and description; use ``--patch`` or ``--stat`` for more details.
688 and description; use ``--patch`` or ``--stat`` for more details.
689
689
690 To delete specific shelved changes, use ``--delete``. To delete
690 To delete specific shelved changes, use ``--delete``. To delete
691 all shelved changes, use ``--cleanup``.
691 all shelved changes, use ``--cleanup``.
692 '''
692 '''
693 cmdutil.checkunfinished(repo)
693 cmdutil.checkunfinished(repo)
694
694
695 allowables = [
695 allowables = [
696 ('addremove', 'create'), # 'create' is pseudo action
696 ('addremove', 'create'), # 'create' is pseudo action
697 ('cleanup', 'cleanup'),
697 ('cleanup', 'cleanup'),
698 # ('date', 'create'), # ignored for passing '--date "0 0"' in tests
698 # ('date', 'create'), # ignored for passing '--date "0 0"' in tests
699 ('delete', 'delete'),
699 ('delete', 'delete'),
700 ('edit', 'create'),
700 ('edit', 'create'),
701 ('list', 'list'),
701 ('list', 'list'),
702 ('message', 'create'),
702 ('message', 'create'),
703 ('name', 'create'),
703 ('name', 'create'),
704 ('patch', 'list'),
704 ('patch', 'list'),
705 ('stat', 'list'),
705 ('stat', 'list'),
706 ]
706 ]
707 def checkopt(opt):
707 def checkopt(opt):
708 if opts[opt]:
708 if opts[opt]:
709 for i, allowable in allowables:
709 for i, allowable in allowables:
710 if opts[i] and opt != allowable:
710 if opts[i] and opt != allowable:
711 raise util.Abort(_("options '--%s' and '--%s' may not be "
711 raise util.Abort(_("options '--%s' and '--%s' may not be "
712 "used together") % (opt, i))
712 "used together") % (opt, i))
713 return True
713 return True
714 if checkopt('cleanup'):
714 if checkopt('cleanup'):
715 if pats:
715 if pats:
716 raise util.Abort(_("cannot specify names when using '--cleanup'"))
716 raise util.Abort(_("cannot specify names when using '--cleanup'"))
717 return cleanupcmd(ui, repo)
717 return cleanupcmd(ui, repo)
718 elif checkopt('delete'):
718 elif checkopt('delete'):
719 return deletecmd(ui, repo, pats)
719 return deletecmd(ui, repo, pats)
720 elif checkopt('list'):
720 elif checkopt('list'):
721 return listcmd(ui, repo, pats, opts)
721 return listcmd(ui, repo, pats, opts)
722 else:
722 else:
723 for i in ('patch', 'stat'):
723 for i in ('patch', 'stat'):
724 if opts[i]:
724 if opts[i]:
725 raise util.Abort(_("option '--%s' may not be "
725 raise util.Abort(_("option '--%s' may not be "
726 "used when shelving a change") % (i,))
726 "used when shelving a change") % (i,))
727 return createcmd(ui, repo, pats, opts)
727 return createcmd(ui, repo, pats, opts)
728
728
729 def extsetup(ui):
729 def extsetup(ui):
730 cmdutil.unfinishedstates.append(
730 cmdutil.unfinishedstates.append(
731 [shelvedstate._filename, False, False,
731 [shelvedstate._filename, False, False,
732 _('unshelve already in progress'),
732 _('unshelve already in progress'),
733 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
733 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
@@ -1,221 +1,221 b''
1 """strip changesets and their descendants from history
1 """strip changesets and their descendants from history
2
2
3 This extension allows you to strip changesets and all their descendants from the
3 This extension allows you to strip changesets and all their descendants from the
4 repository. See the command help for details.
4 repository. See the command help for details.
5 """
5 """
6 from mercurial.i18n import _
6 from mercurial.i18n import _
7 from mercurial.node import nullid
7 from mercurial.node import nullid
8 from mercurial.lock import release
8 from mercurial.lock import release
9 from mercurial import cmdutil, hg, scmutil, util
9 from mercurial import cmdutil, hg, scmutil, util
10 from mercurial import repair, bookmarks, merge
10 from mercurial import repair, bookmarks, merge
11
11
12 cmdtable = {}
12 cmdtable = {}
13 command = cmdutil.command(cmdtable)
13 command = cmdutil.command(cmdtable)
14 testedwith = 'internal'
14 testedwith = 'internal'
15
15
16 def checksubstate(repo, baserev=None):
16 def checksubstate(repo, baserev=None):
17 '''return list of subrepos at a different revision than substate.
17 '''return list of subrepos at a different revision than substate.
18 Abort if any subrepos have uncommitted changes.'''
18 Abort if any subrepos have uncommitted changes.'''
19 inclsubs = []
19 inclsubs = []
20 wctx = repo[None]
20 wctx = repo[None]
21 if baserev:
21 if baserev:
22 bctx = repo[baserev]
22 bctx = repo[baserev]
23 else:
23 else:
24 bctx = wctx.parents()[0]
24 bctx = wctx.parents()[0]
25 for s in sorted(wctx.substate):
25 for s in sorted(wctx.substate):
26 wctx.sub(s).bailifchanged(True)
26 wctx.sub(s).bailifchanged(True)
27 if s not in bctx.substate or bctx.sub(s).dirty():
27 if s not in bctx.substate or bctx.sub(s).dirty():
28 inclsubs.append(s)
28 inclsubs.append(s)
29 return inclsubs
29 return inclsubs
30
30
31 def checklocalchanges(repo, force=False, excsuffix=''):
31 def checklocalchanges(repo, force=False, excsuffix=''):
32 cmdutil.checkunfinished(repo)
32 cmdutil.checkunfinished(repo)
33 s = repo.status()
33 s = repo.status()
34 if not force:
34 if not force:
35 if s.modified or s.added or s.removed or s.deleted:
35 if s.modified or s.added or s.removed or s.deleted:
36 _("local changes found") # i18n tool detection
36 _("local changes found") # i18n tool detection
37 raise util.Abort(_("local changes found" + excsuffix))
37 raise util.Abort(_("local changes found" + excsuffix))
38 if checksubstate(repo):
38 if checksubstate(repo):
39 _("local changed subrepos found") # i18n tool detection
39 _("local changed subrepos found") # i18n tool detection
40 raise util.Abort(_("local changed subrepos found" + excsuffix))
40 raise util.Abort(_("local changed subrepos found" + excsuffix))
41 return s
41 return s
42
42
43 def strip(ui, repo, revs, update=True, backup=True, force=None, bookmark=None):
43 def strip(ui, repo, revs, update=True, backup=True, force=None, bookmark=None):
44 wlock = lock = None
44 wlock = lock = None
45 try:
45 try:
46 wlock = repo.wlock()
46 wlock = repo.wlock()
47 lock = repo.lock()
47 lock = repo.lock()
48
48
49 if update:
49 if update:
50 checklocalchanges(repo, force=force)
50 checklocalchanges(repo, force=force)
51 urev, p2 = repo.changelog.parents(revs[0])
51 urev, p2 = repo.changelog.parents(revs[0])
52 if (util.safehasattr(repo, 'mq') and
52 if (util.safehasattr(repo, 'mq') and
53 p2 != nullid
53 p2 != nullid
54 and p2 in [x.node for x in repo.mq.applied]):
54 and p2 in [x.node for x in repo.mq.applied]):
55 urev = p2
55 urev = p2
56 hg.clean(repo, urev)
56 hg.clean(repo, urev)
57 repo.dirstate.write()
57 repo.dirstate.write()
58
58
59 repair.strip(ui, repo, revs, backup)
59 repair.strip(ui, repo, revs, backup)
60
60
61 marks = repo._bookmarks
61 marks = repo._bookmarks
62 if bookmark:
62 if bookmark:
63 if bookmark == repo._bookmarkcurrent:
63 if bookmark == repo._activebookmark:
64 bookmarks.deactivate(repo)
64 bookmarks.deactivate(repo)
65 del marks[bookmark]
65 del marks[bookmark]
66 marks.write()
66 marks.write()
67 ui.write(_("bookmark '%s' deleted\n") % bookmark)
67 ui.write(_("bookmark '%s' deleted\n") % bookmark)
68 finally:
68 finally:
69 release(lock, wlock)
69 release(lock, wlock)
70
70
71
71
72 @command("strip",
72 @command("strip",
73 [
73 [
74 ('r', 'rev', [], _('strip specified revision (optional, '
74 ('r', 'rev', [], _('strip specified revision (optional, '
75 'can specify revisions without this '
75 'can specify revisions without this '
76 'option)'), _('REV')),
76 'option)'), _('REV')),
77 ('f', 'force', None, _('force removal of changesets, discard '
77 ('f', 'force', None, _('force removal of changesets, discard '
78 'uncommitted changes (no backup)')),
78 'uncommitted changes (no backup)')),
79 ('', 'no-backup', None, _('no backups')),
79 ('', 'no-backup', None, _('no backups')),
80 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
80 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
81 ('n', '', None, _('ignored (DEPRECATED)')),
81 ('n', '', None, _('ignored (DEPRECATED)')),
82 ('k', 'keep', None, _("do not modify working directory during "
82 ('k', 'keep', None, _("do not modify working directory during "
83 "strip")),
83 "strip")),
84 ('B', 'bookmark', '', _("remove revs only reachable from given"
84 ('B', 'bookmark', '', _("remove revs only reachable from given"
85 " bookmark"))],
85 " bookmark"))],
86 _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...'))
86 _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...'))
87 def stripcmd(ui, repo, *revs, **opts):
87 def stripcmd(ui, repo, *revs, **opts):
88 """strip changesets and all their descendants from the repository
88 """strip changesets and all their descendants from the repository
89
89
90 The strip command removes the specified changesets and all their
90 The strip command removes the specified changesets and all their
91 descendants. If the working directory has uncommitted changes, the
91 descendants. If the working directory has uncommitted changes, the
92 operation is aborted unless the --force flag is supplied, in which
92 operation is aborted unless the --force flag is supplied, in which
93 case changes will be discarded.
93 case changes will be discarded.
94
94
95 If a parent of the working directory is stripped, then the working
95 If a parent of the working directory is stripped, then the working
96 directory will automatically be updated to the most recent
96 directory will automatically be updated to the most recent
97 available ancestor of the stripped parent after the operation
97 available ancestor of the stripped parent after the operation
98 completes.
98 completes.
99
99
100 Any stripped changesets are stored in ``.hg/strip-backup`` as a
100 Any stripped changesets are stored in ``.hg/strip-backup`` as a
101 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
101 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
102 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
102 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
103 where BUNDLE is the bundle file created by the strip. Note that
103 where BUNDLE is the bundle file created by the strip. Note that
104 the local revision numbers will in general be different after the
104 the local revision numbers will in general be different after the
105 restore.
105 restore.
106
106
107 Use the --no-backup option to discard the backup bundle once the
107 Use the --no-backup option to discard the backup bundle once the
108 operation completes.
108 operation completes.
109
109
110 Strip is not a history-rewriting operation and can be used on
110 Strip is not a history-rewriting operation and can be used on
111 changesets in the public phase. But if the stripped changesets have
111 changesets in the public phase. But if the stripped changesets have
112 been pushed to a remote repository you will likely pull them again.
112 been pushed to a remote repository you will likely pull them again.
113
113
114 Return 0 on success.
114 Return 0 on success.
115 """
115 """
116 backup = True
116 backup = True
117 if opts.get('no_backup') or opts.get('nobackup'):
117 if opts.get('no_backup') or opts.get('nobackup'):
118 backup = False
118 backup = False
119
119
120 cl = repo.changelog
120 cl = repo.changelog
121 revs = list(revs) + opts.get('rev')
121 revs = list(revs) + opts.get('rev')
122 revs = set(scmutil.revrange(repo, revs))
122 revs = set(scmutil.revrange(repo, revs))
123
123
124 wlock = repo.wlock()
124 wlock = repo.wlock()
125 try:
125 try:
126 if opts.get('bookmark'):
126 if opts.get('bookmark'):
127 mark = opts.get('bookmark')
127 mark = opts.get('bookmark')
128 marks = repo._bookmarks
128 marks = repo._bookmarks
129 if mark not in marks:
129 if mark not in marks:
130 raise util.Abort(_("bookmark '%s' not found") % mark)
130 raise util.Abort(_("bookmark '%s' not found") % mark)
131
131
132 # If the requested bookmark is not the only one pointing to a
132 # If the requested bookmark is not the only one pointing to a
133 # a revision we have to only delete the bookmark and not strip
133 # a revision we have to only delete the bookmark and not strip
134 # anything. revsets cannot detect that case.
134 # anything. revsets cannot detect that case.
135 uniquebm = True
135 uniquebm = True
136 for m, n in marks.iteritems():
136 for m, n in marks.iteritems():
137 if m != mark and n == repo[mark].node():
137 if m != mark and n == repo[mark].node():
138 uniquebm = False
138 uniquebm = False
139 break
139 break
140 if uniquebm:
140 if uniquebm:
141 rsrevs = repo.revs("ancestors(bookmark(%s)) - "
141 rsrevs = repo.revs("ancestors(bookmark(%s)) - "
142 "ancestors(head() and not bookmark(%s)) - "
142 "ancestors(head() and not bookmark(%s)) - "
143 "ancestors(bookmark() and not bookmark(%s))",
143 "ancestors(bookmark() and not bookmark(%s))",
144 mark, mark, mark)
144 mark, mark, mark)
145 revs.update(set(rsrevs))
145 revs.update(set(rsrevs))
146 if not revs:
146 if not revs:
147 del marks[mark]
147 del marks[mark]
148 marks.write()
148 marks.write()
149 ui.write(_("bookmark '%s' deleted\n") % mark)
149 ui.write(_("bookmark '%s' deleted\n") % mark)
150
150
151 if not revs:
151 if not revs:
152 raise util.Abort(_('empty revision set'))
152 raise util.Abort(_('empty revision set'))
153
153
154 descendants = set(cl.descendants(revs))
154 descendants = set(cl.descendants(revs))
155 strippedrevs = revs.union(descendants)
155 strippedrevs = revs.union(descendants)
156 roots = revs.difference(descendants)
156 roots = revs.difference(descendants)
157
157
158 update = False
158 update = False
159 # if one of the wdir parent is stripped we'll need
159 # if one of the wdir parent is stripped we'll need
160 # to update away to an earlier revision
160 # to update away to an earlier revision
161 for p in repo.dirstate.parents():
161 for p in repo.dirstate.parents():
162 if p != nullid and cl.rev(p) in strippedrevs:
162 if p != nullid and cl.rev(p) in strippedrevs:
163 update = True
163 update = True
164 break
164 break
165
165
166 rootnodes = set(cl.node(r) for r in roots)
166 rootnodes = set(cl.node(r) for r in roots)
167
167
168 q = getattr(repo, 'mq', None)
168 q = getattr(repo, 'mq', None)
169 if q is not None and q.applied:
169 if q is not None and q.applied:
170 # refresh queue state if we're about to strip
170 # refresh queue state if we're about to strip
171 # applied patches
171 # applied patches
172 if cl.rev(repo.lookup('qtip')) in strippedrevs:
172 if cl.rev(repo.lookup('qtip')) in strippedrevs:
173 q.applieddirty = True
173 q.applieddirty = True
174 start = 0
174 start = 0
175 end = len(q.applied)
175 end = len(q.applied)
176 for i, statusentry in enumerate(q.applied):
176 for i, statusentry in enumerate(q.applied):
177 if statusentry.node in rootnodes:
177 if statusentry.node in rootnodes:
178 # if one of the stripped roots is an applied
178 # if one of the stripped roots is an applied
179 # patch, only part of the queue is stripped
179 # patch, only part of the queue is stripped
180 start = i
180 start = i
181 break
181 break
182 del q.applied[start:end]
182 del q.applied[start:end]
183 q.savedirty()
183 q.savedirty()
184
184
185 revs = sorted(rootnodes)
185 revs = sorted(rootnodes)
186 if update and opts.get('keep'):
186 if update and opts.get('keep'):
187 urev, p2 = repo.changelog.parents(revs[0])
187 urev, p2 = repo.changelog.parents(revs[0])
188 if (util.safehasattr(repo, 'mq') and p2 != nullid
188 if (util.safehasattr(repo, 'mq') and p2 != nullid
189 and p2 in [x.node for x in repo.mq.applied]):
189 and p2 in [x.node for x in repo.mq.applied]):
190 urev = p2
190 urev = p2
191 uctx = repo[urev]
191 uctx = repo[urev]
192
192
193 # only reset the dirstate for files that would actually change
193 # only reset the dirstate for files that would actually change
194 # between the working context and uctx
194 # between the working context and uctx
195 descendantrevs = repo.revs("%s::." % uctx.rev())
195 descendantrevs = repo.revs("%s::." % uctx.rev())
196 changedfiles = []
196 changedfiles = []
197 for rev in descendantrevs:
197 for rev in descendantrevs:
198 # blindly reset the files, regardless of what actually changed
198 # blindly reset the files, regardless of what actually changed
199 changedfiles.extend(repo[rev].files())
199 changedfiles.extend(repo[rev].files())
200
200
201 # reset files that only changed in the dirstate too
201 # reset files that only changed in the dirstate too
202 dirstate = repo.dirstate
202 dirstate = repo.dirstate
203 dirchanges = [f for f in dirstate if dirstate[f] != 'n']
203 dirchanges = [f for f in dirstate if dirstate[f] != 'n']
204 changedfiles.extend(dirchanges)
204 changedfiles.extend(dirchanges)
205
205
206 repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
206 repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
207 repo.dirstate.write()
207 repo.dirstate.write()
208
208
209 # clear resolve state
209 # clear resolve state
210 ms = merge.mergestate(repo)
210 ms = merge.mergestate(repo)
211 ms.reset(repo['.'].node())
211 ms.reset(repo['.'].node())
212
212
213 update = False
213 update = False
214
214
215
215
216 strip(ui, repo, revs, backup=backup, update=update,
216 strip(ui, repo, revs, backup=backup, update=update,
217 force=opts.get('force'), bookmark=opts.get('bookmark'))
217 force=opts.get('force'), bookmark=opts.get('bookmark'))
218 finally:
218 finally:
219 wlock.release()
219 wlock.release()
220
220
221 return 0
221 return 0
@@ -1,558 +1,558 b''
1 # Mercurial bookmark support code
1 # Mercurial bookmark support code
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial.node import hex, bin
10 from mercurial.node import hex, bin
11 from mercurial import encoding, error, util, obsolete, lock as lockmod
11 from mercurial import encoding, error, util, obsolete, lock as lockmod
12 import errno
12 import errno
13
13
14 class bmstore(dict):
14 class bmstore(dict):
15 """Storage for bookmarks.
15 """Storage for bookmarks.
16
16
17 This object should do all bookmark reads and writes, so that it's
17 This object should do all bookmark reads and writes, so that it's
18 fairly simple to replace the storage underlying bookmarks without
18 fairly simple to replace the storage underlying bookmarks without
19 having to clone the logic surrounding bookmarks.
19 having to clone the logic surrounding bookmarks.
20
20
21 This particular bmstore implementation stores bookmarks as
21 This particular bmstore implementation stores bookmarks as
22 {hash}\s{name}\n (the same format as localtags) in
22 {hash}\s{name}\n (the same format as localtags) in
23 .hg/bookmarks. The mapping is stored as {name: nodeid}.
23 .hg/bookmarks. The mapping is stored as {name: nodeid}.
24
24
25 This class does NOT handle the "current" bookmark state at this
25 This class does NOT handle the "current" bookmark state at this
26 time.
26 time.
27 """
27 """
28
28
29 def __init__(self, repo):
29 def __init__(self, repo):
30 dict.__init__(self)
30 dict.__init__(self)
31 self._repo = repo
31 self._repo = repo
32 try:
32 try:
33 bkfile = self.getbkfile(repo)
33 bkfile = self.getbkfile(repo)
34 for line in bkfile:
34 for line in bkfile:
35 line = line.strip()
35 line = line.strip()
36 if not line:
36 if not line:
37 continue
37 continue
38 if ' ' not in line:
38 if ' ' not in line:
39 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
39 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
40 % line)
40 % line)
41 continue
41 continue
42 sha, refspec = line.split(' ', 1)
42 sha, refspec = line.split(' ', 1)
43 refspec = encoding.tolocal(refspec)
43 refspec = encoding.tolocal(refspec)
44 try:
44 try:
45 self[refspec] = repo.changelog.lookup(sha)
45 self[refspec] = repo.changelog.lookup(sha)
46 except LookupError:
46 except LookupError:
47 pass
47 pass
48 except IOError, inst:
48 except IOError, inst:
49 if inst.errno != errno.ENOENT:
49 if inst.errno != errno.ENOENT:
50 raise
50 raise
51
51
52 def getbkfile(self, repo):
52 def getbkfile(self, repo):
53 bkfile = None
53 bkfile = None
54 if 'HG_PENDING' in os.environ:
54 if 'HG_PENDING' in os.environ:
55 try:
55 try:
56 bkfile = repo.vfs('bookmarks.pending')
56 bkfile = repo.vfs('bookmarks.pending')
57 except IOError, inst:
57 except IOError, inst:
58 if inst.errno != errno.ENOENT:
58 if inst.errno != errno.ENOENT:
59 raise
59 raise
60 if bkfile is None:
60 if bkfile is None:
61 bkfile = repo.vfs('bookmarks')
61 bkfile = repo.vfs('bookmarks')
62 return bkfile
62 return bkfile
63
63
64 def recordchange(self, tr):
64 def recordchange(self, tr):
65 """record that bookmarks have been changed in a transaction
65 """record that bookmarks have been changed in a transaction
66
66
67 The transaction is then responsible for updating the file content."""
67 The transaction is then responsible for updating the file content."""
68 tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
68 tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
69 location='plain')
69 location='plain')
70 tr.hookargs['bookmark_moved'] = '1'
70 tr.hookargs['bookmark_moved'] = '1'
71
71
72 def write(self):
72 def write(self):
73 '''Write bookmarks
73 '''Write bookmarks
74
74
75 Write the given bookmark => hash dictionary to the .hg/bookmarks file
75 Write the given bookmark => hash dictionary to the .hg/bookmarks file
76 in a format equal to those of localtags.
76 in a format equal to those of localtags.
77
77
78 We also store a backup of the previous state in undo.bookmarks that
78 We also store a backup of the previous state in undo.bookmarks that
79 can be copied back on rollback.
79 can be copied back on rollback.
80 '''
80 '''
81 repo = self._repo
81 repo = self._repo
82 self._writerepo(repo)
82 self._writerepo(repo)
83
83
84 def _writerepo(self, repo):
84 def _writerepo(self, repo):
85 """Factored out for extensibility"""
85 """Factored out for extensibility"""
86 if repo._bookmarkcurrent not in self:
86 if repo._activebookmark not in self:
87 deactivate(repo)
87 deactivate(repo)
88
88
89 wlock = repo.wlock()
89 wlock = repo.wlock()
90 try:
90 try:
91
91
92 file = repo.vfs('bookmarks', 'w', atomictemp=True)
92 file = repo.vfs('bookmarks', 'w', atomictemp=True)
93 self._write(file)
93 self._write(file)
94 file.close()
94 file.close()
95
95
96 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
96 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
97 try:
97 try:
98 repo.svfs.utime('00changelog.i', None)
98 repo.svfs.utime('00changelog.i', None)
99 except OSError:
99 except OSError:
100 pass
100 pass
101
101
102 finally:
102 finally:
103 wlock.release()
103 wlock.release()
104
104
105 def _write(self, fp):
105 def _write(self, fp):
106 for name, node in self.iteritems():
106 for name, node in self.iteritems():
107 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
107 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
108
108
109 def readactive(repo):
109 def readactive(repo):
110 """
110 """
111 Get the active bookmark. We can have an active bookmark that updates
111 Get the active bookmark. We can have an active bookmark that updates
112 itself as we commit. This function returns the name of that bookmark.
112 itself as we commit. This function returns the name of that bookmark.
113 It is stored in .hg/bookmarks.current
113 It is stored in .hg/bookmarks.current
114 """
114 """
115 mark = None
115 mark = None
116 try:
116 try:
117 file = repo.vfs('bookmarks.current')
117 file = repo.vfs('bookmarks.current')
118 except IOError, inst:
118 except IOError, inst:
119 if inst.errno != errno.ENOENT:
119 if inst.errno != errno.ENOENT:
120 raise
120 raise
121 return None
121 return None
122 try:
122 try:
123 # No readline() in osutil.posixfile, reading everything is cheap
123 # No readline() in osutil.posixfile, reading everything is cheap
124 mark = encoding.tolocal((file.readlines() or [''])[0])
124 mark = encoding.tolocal((file.readlines() or [''])[0])
125 if mark == '' or mark not in repo._bookmarks:
125 if mark == '' or mark not in repo._bookmarks:
126 mark = None
126 mark = None
127 finally:
127 finally:
128 file.close()
128 file.close()
129 return mark
129 return mark
130
130
131 def activate(repo, mark):
131 def activate(repo, mark):
132 """
132 """
133 Set the given bookmark to be 'active', meaning that this bookmark will
133 Set the given bookmark to be 'active', meaning that this bookmark will
134 follow new commits that are made.
134 follow new commits that are made.
135 The name is recorded in .hg/bookmarks.current
135 The name is recorded in .hg/bookmarks.current
136 """
136 """
137 if mark not in repo._bookmarks:
137 if mark not in repo._bookmarks:
138 raise AssertionError('bookmark %s does not exist!' % mark)
138 raise AssertionError('bookmark %s does not exist!' % mark)
139
139
140 current = repo._bookmarkcurrent
140 current = repo._activebookmark
141 if current == mark:
141 if current == mark:
142 return
142 return
143
143
144 wlock = repo.wlock()
144 wlock = repo.wlock()
145 try:
145 try:
146 file = repo.vfs('bookmarks.current', 'w', atomictemp=True)
146 file = repo.vfs('bookmarks.current', 'w', atomictemp=True)
147 file.write(encoding.fromlocal(mark))
147 file.write(encoding.fromlocal(mark))
148 file.close()
148 file.close()
149 finally:
149 finally:
150 wlock.release()
150 wlock.release()
151 repo._bookmarkcurrent = mark
151 repo._activebookmark = mark
152
152
153 def deactivate(repo):
153 def deactivate(repo):
154 """
154 """
155 Unset the active bookmark in this reposiotry.
155 Unset the active bookmark in this reposiotry.
156 """
156 """
157 wlock = repo.wlock()
157 wlock = repo.wlock()
158 try:
158 try:
159 try:
159 try:
160 repo.vfs.unlink('bookmarks.current')
160 repo.vfs.unlink('bookmarks.current')
161 repo._bookmarkcurrent = None
161 repo._activebookmark = None
162 except OSError, inst:
162 except OSError, inst:
163 if inst.errno != errno.ENOENT:
163 if inst.errno != errno.ENOENT:
164 raise
164 raise
165 finally:
165 finally:
166 wlock.release()
166 wlock.release()
167
167
168 def iscurrent(repo, mark=None, parents=None):
168 def iscurrent(repo, mark=None, parents=None):
169 '''Tell whether the current bookmark is also active
169 '''Tell whether the current bookmark is also active
170
170
171 I.e., the bookmark listed in .hg/bookmarks.current also points to a
171 I.e., the bookmark listed in .hg/bookmarks.current also points to a
172 parent of the working directory.
172 parent of the working directory.
173 '''
173 '''
174 if not mark:
174 if not mark:
175 mark = repo._bookmarkcurrent
175 mark = repo._activebookmark
176 if not parents:
176 if not parents:
177 parents = [p.node() for p in repo[None].parents()]
177 parents = [p.node() for p in repo[None].parents()]
178 marks = repo._bookmarks
178 marks = repo._bookmarks
179 return (mark in marks and marks[mark] in parents)
179 return (mark in marks and marks[mark] in parents)
180
180
181 def updatecurrentbookmark(repo, oldnode, curbranch):
181 def updatecurrentbookmark(repo, oldnode, curbranch):
182 try:
182 try:
183 return update(repo, oldnode, repo.branchtip(curbranch))
183 return update(repo, oldnode, repo.branchtip(curbranch))
184 except error.RepoLookupError:
184 except error.RepoLookupError:
185 if curbranch == "default": # no default branch!
185 if curbranch == "default": # no default branch!
186 return update(repo, oldnode, repo.lookup("tip"))
186 return update(repo, oldnode, repo.lookup("tip"))
187 else:
187 else:
188 raise util.Abort(_("branch %s not found") % curbranch)
188 raise util.Abort(_("branch %s not found") % curbranch)
189
189
190 def deletedivergent(repo, deletefrom, bm):
190 def deletedivergent(repo, deletefrom, bm):
191 '''Delete divergent versions of bm on nodes in deletefrom.
191 '''Delete divergent versions of bm on nodes in deletefrom.
192
192
193 Return True if at least one bookmark was deleted, False otherwise.'''
193 Return True if at least one bookmark was deleted, False otherwise.'''
194 deleted = False
194 deleted = False
195 marks = repo._bookmarks
195 marks = repo._bookmarks
196 divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
196 divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
197 for mark in divergent:
197 for mark in divergent:
198 if mark == '@' or '@' not in mark:
198 if mark == '@' or '@' not in mark:
199 # can't be divergent by definition
199 # can't be divergent by definition
200 continue
200 continue
201 if mark and marks[mark] in deletefrom:
201 if mark and marks[mark] in deletefrom:
202 if mark != bm:
202 if mark != bm:
203 del marks[mark]
203 del marks[mark]
204 deleted = True
204 deleted = True
205 return deleted
205 return deleted
206
206
207 def calculateupdate(ui, repo, checkout):
207 def calculateupdate(ui, repo, checkout):
208 '''Return a tuple (targetrev, movemarkfrom) indicating the rev to
208 '''Return a tuple (targetrev, movemarkfrom) indicating the rev to
209 check out and where to move the active bookmark from, if needed.'''
209 check out and where to move the active bookmark from, if needed.'''
210 movemarkfrom = None
210 movemarkfrom = None
211 if checkout is None:
211 if checkout is None:
212 curmark = repo._bookmarkcurrent
212 curmark = repo._activebookmark
213 if iscurrent(repo):
213 if iscurrent(repo):
214 movemarkfrom = repo['.'].node()
214 movemarkfrom = repo['.'].node()
215 elif curmark:
215 elif curmark:
216 ui.status(_("updating to active bookmark %s\n") % curmark)
216 ui.status(_("updating to active bookmark %s\n") % curmark)
217 checkout = curmark
217 checkout = curmark
218 return (checkout, movemarkfrom)
218 return (checkout, movemarkfrom)
219
219
220 def update(repo, parents, node):
220 def update(repo, parents, node):
221 deletefrom = parents
221 deletefrom = parents
222 marks = repo._bookmarks
222 marks = repo._bookmarks
223 update = False
223 update = False
224 cur = repo._bookmarkcurrent
224 cur = repo._activebookmark
225 if not cur:
225 if not cur:
226 return False
226 return False
227
227
228 if marks[cur] in parents:
228 if marks[cur] in parents:
229 new = repo[node]
229 new = repo[node]
230 divs = [repo[b] for b in marks
230 divs = [repo[b] for b in marks
231 if b.split('@', 1)[0] == cur.split('@', 1)[0]]
231 if b.split('@', 1)[0] == cur.split('@', 1)[0]]
232 anc = repo.changelog.ancestors([new.rev()])
232 anc = repo.changelog.ancestors([new.rev()])
233 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
233 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
234 if validdest(repo, repo[marks[cur]], new):
234 if validdest(repo, repo[marks[cur]], new):
235 marks[cur] = new.node()
235 marks[cur] = new.node()
236 update = True
236 update = True
237
237
238 if deletedivergent(repo, deletefrom, cur):
238 if deletedivergent(repo, deletefrom, cur):
239 update = True
239 update = True
240
240
241 if update:
241 if update:
242 marks.write()
242 marks.write()
243 return update
243 return update
244
244
245 def listbookmarks(repo):
245 def listbookmarks(repo):
246 # We may try to list bookmarks on a repo type that does not
246 # We may try to list bookmarks on a repo type that does not
247 # support it (e.g., statichttprepository).
247 # support it (e.g., statichttprepository).
248 marks = getattr(repo, '_bookmarks', {})
248 marks = getattr(repo, '_bookmarks', {})
249
249
250 d = {}
250 d = {}
251 hasnode = repo.changelog.hasnode
251 hasnode = repo.changelog.hasnode
252 for k, v in marks.iteritems():
252 for k, v in marks.iteritems():
253 # don't expose local divergent bookmarks
253 # don't expose local divergent bookmarks
254 if hasnode(v) and ('@' not in k or k.endswith('@')):
254 if hasnode(v) and ('@' not in k or k.endswith('@')):
255 d[k] = hex(v)
255 d[k] = hex(v)
256 return d
256 return d
257
257
258 def pushbookmark(repo, key, old, new):
258 def pushbookmark(repo, key, old, new):
259 w = l = tr = None
259 w = l = tr = None
260 try:
260 try:
261 w = repo.wlock()
261 w = repo.wlock()
262 l = repo.lock()
262 l = repo.lock()
263 tr = repo.transaction('bookmarks')
263 tr = repo.transaction('bookmarks')
264 marks = repo._bookmarks
264 marks = repo._bookmarks
265 existing = hex(marks.get(key, ''))
265 existing = hex(marks.get(key, ''))
266 if existing != old and existing != new:
266 if existing != old and existing != new:
267 return False
267 return False
268 if new == '':
268 if new == '':
269 del marks[key]
269 del marks[key]
270 else:
270 else:
271 if new not in repo:
271 if new not in repo:
272 return False
272 return False
273 marks[key] = repo[new].node()
273 marks[key] = repo[new].node()
274 marks.recordchange(tr)
274 marks.recordchange(tr)
275 tr.close()
275 tr.close()
276 return True
276 return True
277 finally:
277 finally:
278 lockmod.release(tr, l, w)
278 lockmod.release(tr, l, w)
279
279
280 def compare(repo, srcmarks, dstmarks,
280 def compare(repo, srcmarks, dstmarks,
281 srchex=None, dsthex=None, targets=None):
281 srchex=None, dsthex=None, targets=None):
282 '''Compare bookmarks between srcmarks and dstmarks
282 '''Compare bookmarks between srcmarks and dstmarks
283
283
284 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
284 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
285 differ, invalid)", each are list of bookmarks below:
285 differ, invalid)", each are list of bookmarks below:
286
286
287 :addsrc: added on src side (removed on dst side, perhaps)
287 :addsrc: added on src side (removed on dst side, perhaps)
288 :adddst: added on dst side (removed on src side, perhaps)
288 :adddst: added on dst side (removed on src side, perhaps)
289 :advsrc: advanced on src side
289 :advsrc: advanced on src side
290 :advdst: advanced on dst side
290 :advdst: advanced on dst side
291 :diverge: diverge
291 :diverge: diverge
292 :differ: changed, but changeset referred on src is unknown on dst
292 :differ: changed, but changeset referred on src is unknown on dst
293 :invalid: unknown on both side
293 :invalid: unknown on both side
294 :same: same on both side
294 :same: same on both side
295
295
296 Each elements of lists in result tuple is tuple "(bookmark name,
296 Each elements of lists in result tuple is tuple "(bookmark name,
297 changeset ID on source side, changeset ID on destination
297 changeset ID on source side, changeset ID on destination
298 side)". Each changeset IDs are 40 hexadecimal digit string or
298 side)". Each changeset IDs are 40 hexadecimal digit string or
299 None.
299 None.
300
300
301 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
301 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
302 "invalid" list may be unknown for repo.
302 "invalid" list may be unknown for repo.
303
303
304 This function expects that "srcmarks" and "dstmarks" return
304 This function expects that "srcmarks" and "dstmarks" return
305 changeset ID in 40 hexadecimal digit string for specified
305 changeset ID in 40 hexadecimal digit string for specified
306 bookmark. If not so (e.g. bmstore "repo._bookmarks" returning
306 bookmark. If not so (e.g. bmstore "repo._bookmarks" returning
307 binary value), "srchex" or "dsthex" should be specified to convert
307 binary value), "srchex" or "dsthex" should be specified to convert
308 into such form.
308 into such form.
309
309
310 If "targets" is specified, only bookmarks listed in it are
310 If "targets" is specified, only bookmarks listed in it are
311 examined.
311 examined.
312 '''
312 '''
313 if not srchex:
313 if not srchex:
314 srchex = lambda x: x
314 srchex = lambda x: x
315 if not dsthex:
315 if not dsthex:
316 dsthex = lambda x: x
316 dsthex = lambda x: x
317
317
318 if targets:
318 if targets:
319 bset = set(targets)
319 bset = set(targets)
320 else:
320 else:
321 srcmarkset = set(srcmarks)
321 srcmarkset = set(srcmarks)
322 dstmarkset = set(dstmarks)
322 dstmarkset = set(dstmarks)
323 bset = srcmarkset | dstmarkset
323 bset = srcmarkset | dstmarkset
324
324
325 results = ([], [], [], [], [], [], [], [])
325 results = ([], [], [], [], [], [], [], [])
326 addsrc = results[0].append
326 addsrc = results[0].append
327 adddst = results[1].append
327 adddst = results[1].append
328 advsrc = results[2].append
328 advsrc = results[2].append
329 advdst = results[3].append
329 advdst = results[3].append
330 diverge = results[4].append
330 diverge = results[4].append
331 differ = results[5].append
331 differ = results[5].append
332 invalid = results[6].append
332 invalid = results[6].append
333 same = results[7].append
333 same = results[7].append
334
334
335 for b in sorted(bset):
335 for b in sorted(bset):
336 if b not in srcmarks:
336 if b not in srcmarks:
337 if b in dstmarks:
337 if b in dstmarks:
338 adddst((b, None, dsthex(dstmarks[b])))
338 adddst((b, None, dsthex(dstmarks[b])))
339 else:
339 else:
340 invalid((b, None, None))
340 invalid((b, None, None))
341 elif b not in dstmarks:
341 elif b not in dstmarks:
342 addsrc((b, srchex(srcmarks[b]), None))
342 addsrc((b, srchex(srcmarks[b]), None))
343 else:
343 else:
344 scid = srchex(srcmarks[b])
344 scid = srchex(srcmarks[b])
345 dcid = dsthex(dstmarks[b])
345 dcid = dsthex(dstmarks[b])
346 if scid == dcid:
346 if scid == dcid:
347 same((b, scid, dcid))
347 same((b, scid, dcid))
348 elif scid in repo and dcid in repo:
348 elif scid in repo and dcid in repo:
349 sctx = repo[scid]
349 sctx = repo[scid]
350 dctx = repo[dcid]
350 dctx = repo[dcid]
351 if sctx.rev() < dctx.rev():
351 if sctx.rev() < dctx.rev():
352 if validdest(repo, sctx, dctx):
352 if validdest(repo, sctx, dctx):
353 advdst((b, scid, dcid))
353 advdst((b, scid, dcid))
354 else:
354 else:
355 diverge((b, scid, dcid))
355 diverge((b, scid, dcid))
356 else:
356 else:
357 if validdest(repo, dctx, sctx):
357 if validdest(repo, dctx, sctx):
358 advsrc((b, scid, dcid))
358 advsrc((b, scid, dcid))
359 else:
359 else:
360 diverge((b, scid, dcid))
360 diverge((b, scid, dcid))
361 else:
361 else:
362 # it is too expensive to examine in detail, in this case
362 # it is too expensive to examine in detail, in this case
363 differ((b, scid, dcid))
363 differ((b, scid, dcid))
364
364
365 return results
365 return results
366
366
367 def _diverge(ui, b, path, localmarks, remotenode):
367 def _diverge(ui, b, path, localmarks, remotenode):
368 '''Return appropriate diverged bookmark for specified ``path``
368 '''Return appropriate diverged bookmark for specified ``path``
369
369
370 This returns None, if it is failed to assign any divergent
370 This returns None, if it is failed to assign any divergent
371 bookmark name.
371 bookmark name.
372
372
373 This reuses already existing one with "@number" suffix, if it
373 This reuses already existing one with "@number" suffix, if it
374 refers ``remotenode``.
374 refers ``remotenode``.
375 '''
375 '''
376 if b == '@':
376 if b == '@':
377 b = ''
377 b = ''
378 # try to use an @pathalias suffix
378 # try to use an @pathalias suffix
379 # if an @pathalias already exists, we overwrite (update) it
379 # if an @pathalias already exists, we overwrite (update) it
380 if path.startswith("file:"):
380 if path.startswith("file:"):
381 path = util.url(path).path
381 path = util.url(path).path
382 for p, u in ui.configitems("paths"):
382 for p, u in ui.configitems("paths"):
383 if u.startswith("file:"):
383 if u.startswith("file:"):
384 u = util.url(u).path
384 u = util.url(u).path
385 if path == u:
385 if path == u:
386 return '%s@%s' % (b, p)
386 return '%s@%s' % (b, p)
387
387
388 # assign a unique "@number" suffix newly
388 # assign a unique "@number" suffix newly
389 for x in range(1, 100):
389 for x in range(1, 100):
390 n = '%s@%d' % (b, x)
390 n = '%s@%d' % (b, x)
391 if n not in localmarks or localmarks[n] == remotenode:
391 if n not in localmarks or localmarks[n] == remotenode:
392 return n
392 return n
393
393
394 return None
394 return None
395
395
396 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
396 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
397 ui.debug("checking for updated bookmarks\n")
397 ui.debug("checking for updated bookmarks\n")
398 localmarks = repo._bookmarks
398 localmarks = repo._bookmarks
399 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same
399 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same
400 ) = compare(repo, remotemarks, localmarks, dsthex=hex)
400 ) = compare(repo, remotemarks, localmarks, dsthex=hex)
401
401
402 status = ui.status
402 status = ui.status
403 warn = ui.warn
403 warn = ui.warn
404 if ui.configbool('ui', 'quietbookmarkmove', False):
404 if ui.configbool('ui', 'quietbookmarkmove', False):
405 status = warn = ui.debug
405 status = warn = ui.debug
406
406
407 explicit = set(explicit)
407 explicit = set(explicit)
408 changed = []
408 changed = []
409 for b, scid, dcid in addsrc:
409 for b, scid, dcid in addsrc:
410 if scid in repo: # add remote bookmarks for changes we already have
410 if scid in repo: # add remote bookmarks for changes we already have
411 changed.append((b, bin(scid), status,
411 changed.append((b, bin(scid), status,
412 _("adding remote bookmark %s\n") % (b)))
412 _("adding remote bookmark %s\n") % (b)))
413 for b, scid, dcid in advsrc:
413 for b, scid, dcid in advsrc:
414 changed.append((b, bin(scid), status,
414 changed.append((b, bin(scid), status,
415 _("updating bookmark %s\n") % (b)))
415 _("updating bookmark %s\n") % (b)))
416 # remove normal movement from explicit set
416 # remove normal movement from explicit set
417 explicit.difference_update(d[0] for d in changed)
417 explicit.difference_update(d[0] for d in changed)
418
418
419 for b, scid, dcid in diverge:
419 for b, scid, dcid in diverge:
420 if b in explicit:
420 if b in explicit:
421 explicit.discard(b)
421 explicit.discard(b)
422 changed.append((b, bin(scid), status,
422 changed.append((b, bin(scid), status,
423 _("importing bookmark %s\n") % (b)))
423 _("importing bookmark %s\n") % (b)))
424 else:
424 else:
425 snode = bin(scid)
425 snode = bin(scid)
426 db = _diverge(ui, b, path, localmarks, snode)
426 db = _diverge(ui, b, path, localmarks, snode)
427 if db:
427 if db:
428 changed.append((db, snode, warn,
428 changed.append((db, snode, warn,
429 _("divergent bookmark %s stored as %s\n") %
429 _("divergent bookmark %s stored as %s\n") %
430 (b, db)))
430 (b, db)))
431 else:
431 else:
432 warn(_("warning: failed to assign numbered name "
432 warn(_("warning: failed to assign numbered name "
433 "to divergent bookmark %s\n") % (b))
433 "to divergent bookmark %s\n") % (b))
434 for b, scid, dcid in adddst + advdst:
434 for b, scid, dcid in adddst + advdst:
435 if b in explicit:
435 if b in explicit:
436 explicit.discard(b)
436 explicit.discard(b)
437 changed.append((b, bin(scid), status,
437 changed.append((b, bin(scid), status,
438 _("importing bookmark %s\n") % (b)))
438 _("importing bookmark %s\n") % (b)))
439
439
440 if changed:
440 if changed:
441 tr = trfunc()
441 tr = trfunc()
442 for b, node, writer, msg in sorted(changed):
442 for b, node, writer, msg in sorted(changed):
443 localmarks[b] = node
443 localmarks[b] = node
444 writer(msg)
444 writer(msg)
445 localmarks.recordchange(tr)
445 localmarks.recordchange(tr)
446
446
447 def incoming(ui, repo, other):
447 def incoming(ui, repo, other):
448 '''Show bookmarks incoming from other to repo
448 '''Show bookmarks incoming from other to repo
449 '''
449 '''
450 ui.status(_("searching for changed bookmarks\n"))
450 ui.status(_("searching for changed bookmarks\n"))
451
451
452 r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
452 r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
453 dsthex=hex)
453 dsthex=hex)
454 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
454 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
455
455
456 incomings = []
456 incomings = []
457 if ui.debugflag:
457 if ui.debugflag:
458 getid = lambda id: id
458 getid = lambda id: id
459 else:
459 else:
460 getid = lambda id: id[:12]
460 getid = lambda id: id[:12]
461 if ui.verbose:
461 if ui.verbose:
462 def add(b, id, st):
462 def add(b, id, st):
463 incomings.append(" %-25s %s %s\n" % (b, getid(id), st))
463 incomings.append(" %-25s %s %s\n" % (b, getid(id), st))
464 else:
464 else:
465 def add(b, id, st):
465 def add(b, id, st):
466 incomings.append(" %-25s %s\n" % (b, getid(id)))
466 incomings.append(" %-25s %s\n" % (b, getid(id)))
467 for b, scid, dcid in addsrc:
467 for b, scid, dcid in addsrc:
468 # i18n: "added" refers to a bookmark
468 # i18n: "added" refers to a bookmark
469 add(b, scid, _('added'))
469 add(b, scid, _('added'))
470 for b, scid, dcid in advsrc:
470 for b, scid, dcid in advsrc:
471 # i18n: "advanced" refers to a bookmark
471 # i18n: "advanced" refers to a bookmark
472 add(b, scid, _('advanced'))
472 add(b, scid, _('advanced'))
473 for b, scid, dcid in diverge:
473 for b, scid, dcid in diverge:
474 # i18n: "diverged" refers to a bookmark
474 # i18n: "diverged" refers to a bookmark
475 add(b, scid, _('diverged'))
475 add(b, scid, _('diverged'))
476 for b, scid, dcid in differ:
476 for b, scid, dcid in differ:
477 # i18n: "changed" refers to a bookmark
477 # i18n: "changed" refers to a bookmark
478 add(b, scid, _('changed'))
478 add(b, scid, _('changed'))
479
479
480 if not incomings:
480 if not incomings:
481 ui.status(_("no changed bookmarks found\n"))
481 ui.status(_("no changed bookmarks found\n"))
482 return 1
482 return 1
483
483
484 for s in sorted(incomings):
484 for s in sorted(incomings):
485 ui.write(s)
485 ui.write(s)
486
486
487 return 0
487 return 0
488
488
489 def outgoing(ui, repo, other):
489 def outgoing(ui, repo, other):
490 '''Show bookmarks outgoing from repo to other
490 '''Show bookmarks outgoing from repo to other
491 '''
491 '''
492 ui.status(_("searching for changed bookmarks\n"))
492 ui.status(_("searching for changed bookmarks\n"))
493
493
494 r = compare(repo, repo._bookmarks, other.listkeys('bookmarks'),
494 r = compare(repo, repo._bookmarks, other.listkeys('bookmarks'),
495 srchex=hex)
495 srchex=hex)
496 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
496 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
497
497
498 outgoings = []
498 outgoings = []
499 if ui.debugflag:
499 if ui.debugflag:
500 getid = lambda id: id
500 getid = lambda id: id
501 else:
501 else:
502 getid = lambda id: id[:12]
502 getid = lambda id: id[:12]
503 if ui.verbose:
503 if ui.verbose:
504 def add(b, id, st):
504 def add(b, id, st):
505 outgoings.append(" %-25s %s %s\n" % (b, getid(id), st))
505 outgoings.append(" %-25s %s %s\n" % (b, getid(id), st))
506 else:
506 else:
507 def add(b, id, st):
507 def add(b, id, st):
508 outgoings.append(" %-25s %s\n" % (b, getid(id)))
508 outgoings.append(" %-25s %s\n" % (b, getid(id)))
509 for b, scid, dcid in addsrc:
509 for b, scid, dcid in addsrc:
510 # i18n: "added refers to a bookmark
510 # i18n: "added refers to a bookmark
511 add(b, scid, _('added'))
511 add(b, scid, _('added'))
512 for b, scid, dcid in adddst:
512 for b, scid, dcid in adddst:
513 # i18n: "deleted" refers to a bookmark
513 # i18n: "deleted" refers to a bookmark
514 add(b, ' ' * 40, _('deleted'))
514 add(b, ' ' * 40, _('deleted'))
515 for b, scid, dcid in advsrc:
515 for b, scid, dcid in advsrc:
516 # i18n: "advanced" refers to a bookmark
516 # i18n: "advanced" refers to a bookmark
517 add(b, scid, _('advanced'))
517 add(b, scid, _('advanced'))
518 for b, scid, dcid in diverge:
518 for b, scid, dcid in diverge:
519 # i18n: "diverged" refers to a bookmark
519 # i18n: "diverged" refers to a bookmark
520 add(b, scid, _('diverged'))
520 add(b, scid, _('diverged'))
521 for b, scid, dcid in differ:
521 for b, scid, dcid in differ:
522 # i18n: "changed" refers to a bookmark
522 # i18n: "changed" refers to a bookmark
523 add(b, scid, _('changed'))
523 add(b, scid, _('changed'))
524
524
525 if not outgoings:
525 if not outgoings:
526 ui.status(_("no changed bookmarks found\n"))
526 ui.status(_("no changed bookmarks found\n"))
527 return 1
527 return 1
528
528
529 for s in sorted(outgoings):
529 for s in sorted(outgoings):
530 ui.write(s)
530 ui.write(s)
531
531
532 return 0
532 return 0
533
533
534 def summary(repo, other):
534 def summary(repo, other):
535 '''Compare bookmarks between repo and other for "hg summary" output
535 '''Compare bookmarks between repo and other for "hg summary" output
536
536
537 This returns "(# of incoming, # of outgoing)" tuple.
537 This returns "(# of incoming, # of outgoing)" tuple.
538 '''
538 '''
539 r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
539 r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks,
540 dsthex=hex)
540 dsthex=hex)
541 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
541 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
542 return (len(addsrc), len(adddst))
542 return (len(addsrc), len(adddst))
543
543
544 def validdest(repo, old, new):
544 def validdest(repo, old, new):
545 """Is the new bookmark destination a valid update from the old one"""
545 """Is the new bookmark destination a valid update from the old one"""
546 repo = repo.unfiltered()
546 repo = repo.unfiltered()
547 if old == new:
547 if old == new:
548 # Old == new -> nothing to update.
548 # Old == new -> nothing to update.
549 return False
549 return False
550 elif not old:
550 elif not old:
551 # old is nullrev, anything is valid.
551 # old is nullrev, anything is valid.
552 # (new != nullrev has been excluded by the previous check)
552 # (new != nullrev has been excluded by the previous check)
553 return True
553 return True
554 elif repo.obsstore:
554 elif repo.obsstore:
555 return new.node() in obsolete.foreground(repo, [old.node()])
555 return new.node() in obsolete.foreground(repo, [old.node()])
556 else:
556 else:
557 # still an independent clause as it is lazier (and therefore faster)
557 # still an independent clause as it is lazier (and therefore faster)
558 return old.descendant(new)
558 return old.descendant(new)
@@ -1,3261 +1,3261 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile, cStringIO, shutil
10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import encoding
16 import encoding
17 import crecord as crecordmod
17 import crecord as crecordmod
18 import lock as lockmod
18 import lock as lockmod
19
19
20 def parsealiases(cmd):
20 def parsealiases(cmd):
21 return cmd.lstrip("^").split("|")
21 return cmd.lstrip("^").split("|")
22
22
23 def setupwrapcolorwrite(ui):
23 def setupwrapcolorwrite(ui):
24 # wrap ui.write so diff output can be labeled/colorized
24 # wrap ui.write so diff output can be labeled/colorized
25 def wrapwrite(orig, *args, **kw):
25 def wrapwrite(orig, *args, **kw):
26 label = kw.pop('label', '')
26 label = kw.pop('label', '')
27 for chunk, l in patch.difflabel(lambda: args):
27 for chunk, l in patch.difflabel(lambda: args):
28 orig(chunk, label=label + l)
28 orig(chunk, label=label + l)
29
29
30 oldwrite = ui.write
30 oldwrite = ui.write
31 def wrap(*args, **kwargs):
31 def wrap(*args, **kwargs):
32 return wrapwrite(oldwrite, *args, **kwargs)
32 return wrapwrite(oldwrite, *args, **kwargs)
33 setattr(ui, 'write', wrap)
33 setattr(ui, 'write', wrap)
34 return oldwrite
34 return oldwrite
35
35
36 def filterchunks(ui, originalhunks, usecurses, testfile):
36 def filterchunks(ui, originalhunks, usecurses, testfile):
37 if usecurses:
37 if usecurses:
38 if testfile:
38 if testfile:
39 recordfn = crecordmod.testdecorator(testfile,
39 recordfn = crecordmod.testdecorator(testfile,
40 crecordmod.testchunkselector)
40 crecordmod.testchunkselector)
41 else:
41 else:
42 recordfn = crecordmod.chunkselector
42 recordfn = crecordmod.chunkselector
43
43
44 return crecordmod.filterpatch(ui, originalhunks, recordfn)
44 return crecordmod.filterpatch(ui, originalhunks, recordfn)
45
45
46 else:
46 else:
47 return patch.filterpatch(ui, originalhunks)
47 return patch.filterpatch(ui, originalhunks)
48
48
49 def recordfilter(ui, originalhunks):
49 def recordfilter(ui, originalhunks):
50 usecurses = ui.configbool('experimental', 'crecord', False)
50 usecurses = ui.configbool('experimental', 'crecord', False)
51 testfile = ui.config('experimental', 'crecordtest', None)
51 testfile = ui.config('experimental', 'crecordtest', None)
52 oldwrite = setupwrapcolorwrite(ui)
52 oldwrite = setupwrapcolorwrite(ui)
53 try:
53 try:
54 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
54 newchunks = filterchunks(ui, originalhunks, usecurses, testfile)
55 finally:
55 finally:
56 ui.write = oldwrite
56 ui.write = oldwrite
57 return newchunks
57 return newchunks
58
58
59 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
59 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
60 filterfn, *pats, **opts):
60 filterfn, *pats, **opts):
61 import merge as mergemod
61 import merge as mergemod
62 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
62 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
63 ishunk = lambda x: isinstance(x, hunkclasses)
63 ishunk = lambda x: isinstance(x, hunkclasses)
64
64
65 if not ui.interactive():
65 if not ui.interactive():
66 raise util.Abort(_('running non-interactively, use %s instead') %
66 raise util.Abort(_('running non-interactively, use %s instead') %
67 cmdsuggest)
67 cmdsuggest)
68
68
69 # make sure username is set before going interactive
69 # make sure username is set before going interactive
70 if not opts.get('user'):
70 if not opts.get('user'):
71 ui.username() # raise exception, username not provided
71 ui.username() # raise exception, username not provided
72
72
73 def recordfunc(ui, repo, message, match, opts):
73 def recordfunc(ui, repo, message, match, opts):
74 """This is generic record driver.
74 """This is generic record driver.
75
75
76 Its job is to interactively filter local changes, and
76 Its job is to interactively filter local changes, and
77 accordingly prepare working directory into a state in which the
77 accordingly prepare working directory into a state in which the
78 job can be delegated to a non-interactive commit command such as
78 job can be delegated to a non-interactive commit command such as
79 'commit' or 'qrefresh'.
79 'commit' or 'qrefresh'.
80
80
81 After the actual job is done by non-interactive command, the
81 After the actual job is done by non-interactive command, the
82 working directory is restored to its original state.
82 working directory is restored to its original state.
83
83
84 In the end we'll record interesting changes, and everything else
84 In the end we'll record interesting changes, and everything else
85 will be left in place, so the user can continue working.
85 will be left in place, so the user can continue working.
86 """
86 """
87
87
88 checkunfinished(repo, commit=True)
88 checkunfinished(repo, commit=True)
89 merge = len(repo[None].parents()) > 1
89 merge = len(repo[None].parents()) > 1
90 if merge:
90 if merge:
91 raise util.Abort(_('cannot partially commit a merge '
91 raise util.Abort(_('cannot partially commit a merge '
92 '(use "hg commit" instead)'))
92 '(use "hg commit" instead)'))
93
93
94 status = repo.status(match=match)
94 status = repo.status(match=match)
95 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
95 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
96 diffopts.nodates = True
96 diffopts.nodates = True
97 diffopts.git = True
97 diffopts.git = True
98 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
98 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
99 originalchunks = patch.parsepatch(originaldiff)
99 originalchunks = patch.parsepatch(originaldiff)
100
100
101 # 1. filter patch, so we have intending-to apply subset of it
101 # 1. filter patch, so we have intending-to apply subset of it
102 try:
102 try:
103 chunks = filterfn(ui, originalchunks)
103 chunks = filterfn(ui, originalchunks)
104 except patch.PatchError, err:
104 except patch.PatchError, err:
105 raise util.Abort(_('error parsing patch: %s') % err)
105 raise util.Abort(_('error parsing patch: %s') % err)
106
106
107 # We need to keep a backup of files that have been newly added and
107 # We need to keep a backup of files that have been newly added and
108 # modified during the recording process because there is a previous
108 # modified during the recording process because there is a previous
109 # version without the edit in the workdir
109 # version without the edit in the workdir
110 newlyaddedandmodifiedfiles = set()
110 newlyaddedandmodifiedfiles = set()
111 for chunk in chunks:
111 for chunk in chunks:
112 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
112 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
113 originalchunks:
113 originalchunks:
114 newlyaddedandmodifiedfiles.add(chunk.header.filename())
114 newlyaddedandmodifiedfiles.add(chunk.header.filename())
115 contenders = set()
115 contenders = set()
116 for h in chunks:
116 for h in chunks:
117 try:
117 try:
118 contenders.update(set(h.files()))
118 contenders.update(set(h.files()))
119 except AttributeError:
119 except AttributeError:
120 pass
120 pass
121
121
122 changed = status.modified + status.added + status.removed
122 changed = status.modified + status.added + status.removed
123 newfiles = [f for f in changed if f in contenders]
123 newfiles = [f for f in changed if f in contenders]
124 if not newfiles:
124 if not newfiles:
125 ui.status(_('no changes to record\n'))
125 ui.status(_('no changes to record\n'))
126 return 0
126 return 0
127
127
128 modified = set(status.modified)
128 modified = set(status.modified)
129
129
130 # 2. backup changed files, so we can restore them in the end
130 # 2. backup changed files, so we can restore them in the end
131
131
132 if backupall:
132 if backupall:
133 tobackup = changed
133 tobackup = changed
134 else:
134 else:
135 tobackup = [f for f in newfiles if f in modified or f in \
135 tobackup = [f for f in newfiles if f in modified or f in \
136 newlyaddedandmodifiedfiles]
136 newlyaddedandmodifiedfiles]
137 backups = {}
137 backups = {}
138 if tobackup:
138 if tobackup:
139 backupdir = repo.join('record-backups')
139 backupdir = repo.join('record-backups')
140 try:
140 try:
141 os.mkdir(backupdir)
141 os.mkdir(backupdir)
142 except OSError, err:
142 except OSError, err:
143 if err.errno != errno.EEXIST:
143 if err.errno != errno.EEXIST:
144 raise
144 raise
145 try:
145 try:
146 # backup continues
146 # backup continues
147 for f in tobackup:
147 for f in tobackup:
148 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
148 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
149 dir=backupdir)
149 dir=backupdir)
150 os.close(fd)
150 os.close(fd)
151 ui.debug('backup %r as %r\n' % (f, tmpname))
151 ui.debug('backup %r as %r\n' % (f, tmpname))
152 util.copyfile(repo.wjoin(f), tmpname)
152 util.copyfile(repo.wjoin(f), tmpname)
153 shutil.copystat(repo.wjoin(f), tmpname)
153 shutil.copystat(repo.wjoin(f), tmpname)
154 backups[f] = tmpname
154 backups[f] = tmpname
155
155
156 fp = cStringIO.StringIO()
156 fp = cStringIO.StringIO()
157 for c in chunks:
157 for c in chunks:
158 fname = c.filename()
158 fname = c.filename()
159 if fname in backups:
159 if fname in backups:
160 c.write(fp)
160 c.write(fp)
161 dopatch = fp.tell()
161 dopatch = fp.tell()
162 fp.seek(0)
162 fp.seek(0)
163
163
164 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
164 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
165 # 3a. apply filtered patch to clean repo (clean)
165 # 3a. apply filtered patch to clean repo (clean)
166 if backups:
166 if backups:
167 # Equivalent to hg.revert
167 # Equivalent to hg.revert
168 choices = lambda key: key in backups
168 choices = lambda key: key in backups
169 mergemod.update(repo, repo.dirstate.p1(),
169 mergemod.update(repo, repo.dirstate.p1(),
170 False, True, choices)
170 False, True, choices)
171
171
172 # 3b. (apply)
172 # 3b. (apply)
173 if dopatch:
173 if dopatch:
174 try:
174 try:
175 ui.debug('applying patch\n')
175 ui.debug('applying patch\n')
176 ui.debug(fp.getvalue())
176 ui.debug(fp.getvalue())
177 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
177 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
178 except patch.PatchError, err:
178 except patch.PatchError, err:
179 raise util.Abort(str(err))
179 raise util.Abort(str(err))
180 del fp
180 del fp
181
181
182 # 4. We prepared working directory according to filtered
182 # 4. We prepared working directory according to filtered
183 # patch. Now is the time to delegate the job to
183 # patch. Now is the time to delegate the job to
184 # commit/qrefresh or the like!
184 # commit/qrefresh or the like!
185
185
186 # Make all of the pathnames absolute.
186 # Make all of the pathnames absolute.
187 newfiles = [repo.wjoin(nf) for nf in newfiles]
187 newfiles = [repo.wjoin(nf) for nf in newfiles]
188 return commitfunc(ui, repo, *newfiles, **opts)
188 return commitfunc(ui, repo, *newfiles, **opts)
189 finally:
189 finally:
190 # 5. finally restore backed-up files
190 # 5. finally restore backed-up files
191 try:
191 try:
192 for realname, tmpname in backups.iteritems():
192 for realname, tmpname in backups.iteritems():
193 ui.debug('restoring %r to %r\n' % (tmpname, realname))
193 ui.debug('restoring %r to %r\n' % (tmpname, realname))
194 util.copyfile(tmpname, repo.wjoin(realname))
194 util.copyfile(tmpname, repo.wjoin(realname))
195 # Our calls to copystat() here and above are a
195 # Our calls to copystat() here and above are a
196 # hack to trick any editors that have f open that
196 # hack to trick any editors that have f open that
197 # we haven't modified them.
197 # we haven't modified them.
198 #
198 #
199 # Also note that this racy as an editor could
199 # Also note that this racy as an editor could
200 # notice the file's mtime before we've finished
200 # notice the file's mtime before we've finished
201 # writing it.
201 # writing it.
202 shutil.copystat(tmpname, repo.wjoin(realname))
202 shutil.copystat(tmpname, repo.wjoin(realname))
203 os.unlink(tmpname)
203 os.unlink(tmpname)
204 if tobackup:
204 if tobackup:
205 os.rmdir(backupdir)
205 os.rmdir(backupdir)
206 except OSError:
206 except OSError:
207 pass
207 pass
208
208
209 return commit(ui, repo, recordfunc, pats, opts)
209 return commit(ui, repo, recordfunc, pats, opts)
210
210
211 def findpossible(cmd, table, strict=False):
211 def findpossible(cmd, table, strict=False):
212 """
212 """
213 Return cmd -> (aliases, command table entry)
213 Return cmd -> (aliases, command table entry)
214 for each matching command.
214 for each matching command.
215 Return debug commands (or their aliases) only if no normal command matches.
215 Return debug commands (or their aliases) only if no normal command matches.
216 """
216 """
217 choice = {}
217 choice = {}
218 debugchoice = {}
218 debugchoice = {}
219
219
220 if cmd in table:
220 if cmd in table:
221 # short-circuit exact matches, "log" alias beats "^log|history"
221 # short-circuit exact matches, "log" alias beats "^log|history"
222 keys = [cmd]
222 keys = [cmd]
223 else:
223 else:
224 keys = table.keys()
224 keys = table.keys()
225
225
226 allcmds = []
226 allcmds = []
227 for e in keys:
227 for e in keys:
228 aliases = parsealiases(e)
228 aliases = parsealiases(e)
229 allcmds.extend(aliases)
229 allcmds.extend(aliases)
230 found = None
230 found = None
231 if cmd in aliases:
231 if cmd in aliases:
232 found = cmd
232 found = cmd
233 elif not strict:
233 elif not strict:
234 for a in aliases:
234 for a in aliases:
235 if a.startswith(cmd):
235 if a.startswith(cmd):
236 found = a
236 found = a
237 break
237 break
238 if found is not None:
238 if found is not None:
239 if aliases[0].startswith("debug") or found.startswith("debug"):
239 if aliases[0].startswith("debug") or found.startswith("debug"):
240 debugchoice[found] = (aliases, table[e])
240 debugchoice[found] = (aliases, table[e])
241 else:
241 else:
242 choice[found] = (aliases, table[e])
242 choice[found] = (aliases, table[e])
243
243
244 if not choice and debugchoice:
244 if not choice and debugchoice:
245 choice = debugchoice
245 choice = debugchoice
246
246
247 return choice, allcmds
247 return choice, allcmds
248
248
249 def findcmd(cmd, table, strict=True):
249 def findcmd(cmd, table, strict=True):
250 """Return (aliases, command table entry) for command string."""
250 """Return (aliases, command table entry) for command string."""
251 choice, allcmds = findpossible(cmd, table, strict)
251 choice, allcmds = findpossible(cmd, table, strict)
252
252
253 if cmd in choice:
253 if cmd in choice:
254 return choice[cmd]
254 return choice[cmd]
255
255
256 if len(choice) > 1:
256 if len(choice) > 1:
257 clist = choice.keys()
257 clist = choice.keys()
258 clist.sort()
258 clist.sort()
259 raise error.AmbiguousCommand(cmd, clist)
259 raise error.AmbiguousCommand(cmd, clist)
260
260
261 if choice:
261 if choice:
262 return choice.values()[0]
262 return choice.values()[0]
263
263
264 raise error.UnknownCommand(cmd, allcmds)
264 raise error.UnknownCommand(cmd, allcmds)
265
265
266 def findrepo(p):
266 def findrepo(p):
267 while not os.path.isdir(os.path.join(p, ".hg")):
267 while not os.path.isdir(os.path.join(p, ".hg")):
268 oldp, p = p, os.path.dirname(p)
268 oldp, p = p, os.path.dirname(p)
269 if p == oldp:
269 if p == oldp:
270 return None
270 return None
271
271
272 return p
272 return p
273
273
274 def bailifchanged(repo, merge=True):
274 def bailifchanged(repo, merge=True):
275 if merge and repo.dirstate.p2() != nullid:
275 if merge and repo.dirstate.p2() != nullid:
276 raise util.Abort(_('outstanding uncommitted merge'))
276 raise util.Abort(_('outstanding uncommitted merge'))
277 modified, added, removed, deleted = repo.status()[:4]
277 modified, added, removed, deleted = repo.status()[:4]
278 if modified or added or removed or deleted:
278 if modified or added or removed or deleted:
279 raise util.Abort(_('uncommitted changes'))
279 raise util.Abort(_('uncommitted changes'))
280 ctx = repo[None]
280 ctx = repo[None]
281 for s in sorted(ctx.substate):
281 for s in sorted(ctx.substate):
282 ctx.sub(s).bailifchanged()
282 ctx.sub(s).bailifchanged()
283
283
284 def logmessage(ui, opts):
284 def logmessage(ui, opts):
285 """ get the log message according to -m and -l option """
285 """ get the log message according to -m and -l option """
286 message = opts.get('message')
286 message = opts.get('message')
287 logfile = opts.get('logfile')
287 logfile = opts.get('logfile')
288
288
289 if message and logfile:
289 if message and logfile:
290 raise util.Abort(_('options --message and --logfile are mutually '
290 raise util.Abort(_('options --message and --logfile are mutually '
291 'exclusive'))
291 'exclusive'))
292 if not message and logfile:
292 if not message and logfile:
293 try:
293 try:
294 if logfile == '-':
294 if logfile == '-':
295 message = ui.fin.read()
295 message = ui.fin.read()
296 else:
296 else:
297 message = '\n'.join(util.readfile(logfile).splitlines())
297 message = '\n'.join(util.readfile(logfile).splitlines())
298 except IOError, inst:
298 except IOError, inst:
299 raise util.Abort(_("can't read commit message '%s': %s") %
299 raise util.Abort(_("can't read commit message '%s': %s") %
300 (logfile, inst.strerror))
300 (logfile, inst.strerror))
301 return message
301 return message
302
302
303 def mergeeditform(ctxorbool, baseformname):
303 def mergeeditform(ctxorbool, baseformname):
304 """return appropriate editform name (referencing a committemplate)
304 """return appropriate editform name (referencing a committemplate)
305
305
306 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
306 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
307 merging is committed.
307 merging is committed.
308
308
309 This returns baseformname with '.merge' appended if it is a merge,
309 This returns baseformname with '.merge' appended if it is a merge,
310 otherwise '.normal' is appended.
310 otherwise '.normal' is appended.
311 """
311 """
312 if isinstance(ctxorbool, bool):
312 if isinstance(ctxorbool, bool):
313 if ctxorbool:
313 if ctxorbool:
314 return baseformname + ".merge"
314 return baseformname + ".merge"
315 elif 1 < len(ctxorbool.parents()):
315 elif 1 < len(ctxorbool.parents()):
316 return baseformname + ".merge"
316 return baseformname + ".merge"
317
317
318 return baseformname + ".normal"
318 return baseformname + ".normal"
319
319
320 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
320 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
321 editform='', **opts):
321 editform='', **opts):
322 """get appropriate commit message editor according to '--edit' option
322 """get appropriate commit message editor according to '--edit' option
323
323
324 'finishdesc' is a function to be called with edited commit message
324 'finishdesc' is a function to be called with edited commit message
325 (= 'description' of the new changeset) just after editing, but
325 (= 'description' of the new changeset) just after editing, but
326 before checking empty-ness. It should return actual text to be
326 before checking empty-ness. It should return actual text to be
327 stored into history. This allows to change description before
327 stored into history. This allows to change description before
328 storing.
328 storing.
329
329
330 'extramsg' is a extra message to be shown in the editor instead of
330 'extramsg' is a extra message to be shown in the editor instead of
331 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
331 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
332 is automatically added.
332 is automatically added.
333
333
334 'editform' is a dot-separated list of names, to distinguish
334 'editform' is a dot-separated list of names, to distinguish
335 the purpose of commit text editing.
335 the purpose of commit text editing.
336
336
337 'getcommiteditor' returns 'commitforceeditor' regardless of
337 'getcommiteditor' returns 'commitforceeditor' regardless of
338 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
338 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
339 they are specific for usage in MQ.
339 they are specific for usage in MQ.
340 """
340 """
341 if edit or finishdesc or extramsg:
341 if edit or finishdesc or extramsg:
342 return lambda r, c, s: commitforceeditor(r, c, s,
342 return lambda r, c, s: commitforceeditor(r, c, s,
343 finishdesc=finishdesc,
343 finishdesc=finishdesc,
344 extramsg=extramsg,
344 extramsg=extramsg,
345 editform=editform)
345 editform=editform)
346 elif editform:
346 elif editform:
347 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
347 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
348 else:
348 else:
349 return commiteditor
349 return commiteditor
350
350
351 def loglimit(opts):
351 def loglimit(opts):
352 """get the log limit according to option -l/--limit"""
352 """get the log limit according to option -l/--limit"""
353 limit = opts.get('limit')
353 limit = opts.get('limit')
354 if limit:
354 if limit:
355 try:
355 try:
356 limit = int(limit)
356 limit = int(limit)
357 except ValueError:
357 except ValueError:
358 raise util.Abort(_('limit must be a positive integer'))
358 raise util.Abort(_('limit must be a positive integer'))
359 if limit <= 0:
359 if limit <= 0:
360 raise util.Abort(_('limit must be positive'))
360 raise util.Abort(_('limit must be positive'))
361 else:
361 else:
362 limit = None
362 limit = None
363 return limit
363 return limit
364
364
365 def makefilename(repo, pat, node, desc=None,
365 def makefilename(repo, pat, node, desc=None,
366 total=None, seqno=None, revwidth=None, pathname=None):
366 total=None, seqno=None, revwidth=None, pathname=None):
367 node_expander = {
367 node_expander = {
368 'H': lambda: hex(node),
368 'H': lambda: hex(node),
369 'R': lambda: str(repo.changelog.rev(node)),
369 'R': lambda: str(repo.changelog.rev(node)),
370 'h': lambda: short(node),
370 'h': lambda: short(node),
371 'm': lambda: re.sub('[^\w]', '_', str(desc))
371 'm': lambda: re.sub('[^\w]', '_', str(desc))
372 }
372 }
373 expander = {
373 expander = {
374 '%': lambda: '%',
374 '%': lambda: '%',
375 'b': lambda: os.path.basename(repo.root),
375 'b': lambda: os.path.basename(repo.root),
376 }
376 }
377
377
378 try:
378 try:
379 if node:
379 if node:
380 expander.update(node_expander)
380 expander.update(node_expander)
381 if node:
381 if node:
382 expander['r'] = (lambda:
382 expander['r'] = (lambda:
383 str(repo.changelog.rev(node)).zfill(revwidth or 0))
383 str(repo.changelog.rev(node)).zfill(revwidth or 0))
384 if total is not None:
384 if total is not None:
385 expander['N'] = lambda: str(total)
385 expander['N'] = lambda: str(total)
386 if seqno is not None:
386 if seqno is not None:
387 expander['n'] = lambda: str(seqno)
387 expander['n'] = lambda: str(seqno)
388 if total is not None and seqno is not None:
388 if total is not None and seqno is not None:
389 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
389 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
390 if pathname is not None:
390 if pathname is not None:
391 expander['s'] = lambda: os.path.basename(pathname)
391 expander['s'] = lambda: os.path.basename(pathname)
392 expander['d'] = lambda: os.path.dirname(pathname) or '.'
392 expander['d'] = lambda: os.path.dirname(pathname) or '.'
393 expander['p'] = lambda: pathname
393 expander['p'] = lambda: pathname
394
394
395 newname = []
395 newname = []
396 patlen = len(pat)
396 patlen = len(pat)
397 i = 0
397 i = 0
398 while i < patlen:
398 while i < patlen:
399 c = pat[i]
399 c = pat[i]
400 if c == '%':
400 if c == '%':
401 i += 1
401 i += 1
402 c = pat[i]
402 c = pat[i]
403 c = expander[c]()
403 c = expander[c]()
404 newname.append(c)
404 newname.append(c)
405 i += 1
405 i += 1
406 return ''.join(newname)
406 return ''.join(newname)
407 except KeyError, inst:
407 except KeyError, inst:
408 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
408 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
409 inst.args[0])
409 inst.args[0])
410
410
411 def makefileobj(repo, pat, node=None, desc=None, total=None,
411 def makefileobj(repo, pat, node=None, desc=None, total=None,
412 seqno=None, revwidth=None, mode='wb', modemap=None,
412 seqno=None, revwidth=None, mode='wb', modemap=None,
413 pathname=None):
413 pathname=None):
414
414
415 writable = mode not in ('r', 'rb')
415 writable = mode not in ('r', 'rb')
416
416
417 if not pat or pat == '-':
417 if not pat or pat == '-':
418 if writable:
418 if writable:
419 fp = repo.ui.fout
419 fp = repo.ui.fout
420 else:
420 else:
421 fp = repo.ui.fin
421 fp = repo.ui.fin
422 if util.safehasattr(fp, 'fileno'):
422 if util.safehasattr(fp, 'fileno'):
423 return os.fdopen(os.dup(fp.fileno()), mode)
423 return os.fdopen(os.dup(fp.fileno()), mode)
424 else:
424 else:
425 # if this fp can't be duped properly, return
425 # if this fp can't be duped properly, return
426 # a dummy object that can be closed
426 # a dummy object that can be closed
427 class wrappedfileobj(object):
427 class wrappedfileobj(object):
428 noop = lambda x: None
428 noop = lambda x: None
429 def __init__(self, f):
429 def __init__(self, f):
430 self.f = f
430 self.f = f
431 def __getattr__(self, attr):
431 def __getattr__(self, attr):
432 if attr == 'close':
432 if attr == 'close':
433 return self.noop
433 return self.noop
434 else:
434 else:
435 return getattr(self.f, attr)
435 return getattr(self.f, attr)
436
436
437 return wrappedfileobj(fp)
437 return wrappedfileobj(fp)
438 if util.safehasattr(pat, 'write') and writable:
438 if util.safehasattr(pat, 'write') and writable:
439 return pat
439 return pat
440 if util.safehasattr(pat, 'read') and 'r' in mode:
440 if util.safehasattr(pat, 'read') and 'r' in mode:
441 return pat
441 return pat
442 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
442 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
443 if modemap is not None:
443 if modemap is not None:
444 mode = modemap.get(fn, mode)
444 mode = modemap.get(fn, mode)
445 if mode == 'wb':
445 if mode == 'wb':
446 modemap[fn] = 'ab'
446 modemap[fn] = 'ab'
447 return open(fn, mode)
447 return open(fn, mode)
448
448
449 def openrevlog(repo, cmd, file_, opts):
449 def openrevlog(repo, cmd, file_, opts):
450 """opens the changelog, manifest, a filelog or a given revlog"""
450 """opens the changelog, manifest, a filelog or a given revlog"""
451 cl = opts['changelog']
451 cl = opts['changelog']
452 mf = opts['manifest']
452 mf = opts['manifest']
453 msg = None
453 msg = None
454 if cl and mf:
454 if cl and mf:
455 msg = _('cannot specify --changelog and --manifest at the same time')
455 msg = _('cannot specify --changelog and --manifest at the same time')
456 elif cl or mf:
456 elif cl or mf:
457 if file_:
457 if file_:
458 msg = _('cannot specify filename with --changelog or --manifest')
458 msg = _('cannot specify filename with --changelog or --manifest')
459 elif not repo:
459 elif not repo:
460 msg = _('cannot specify --changelog or --manifest '
460 msg = _('cannot specify --changelog or --manifest '
461 'without a repository')
461 'without a repository')
462 if msg:
462 if msg:
463 raise util.Abort(msg)
463 raise util.Abort(msg)
464
464
465 r = None
465 r = None
466 if repo:
466 if repo:
467 if cl:
467 if cl:
468 r = repo.unfiltered().changelog
468 r = repo.unfiltered().changelog
469 elif mf:
469 elif mf:
470 r = repo.manifest
470 r = repo.manifest
471 elif file_:
471 elif file_:
472 filelog = repo.file(file_)
472 filelog = repo.file(file_)
473 if len(filelog):
473 if len(filelog):
474 r = filelog
474 r = filelog
475 if not r:
475 if not r:
476 if not file_:
476 if not file_:
477 raise error.CommandError(cmd, _('invalid arguments'))
477 raise error.CommandError(cmd, _('invalid arguments'))
478 if not os.path.isfile(file_):
478 if not os.path.isfile(file_):
479 raise util.Abort(_("revlog '%s' not found") % file_)
479 raise util.Abort(_("revlog '%s' not found") % file_)
480 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
480 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
481 file_[:-2] + ".i")
481 file_[:-2] + ".i")
482 return r
482 return r
483
483
484 def copy(ui, repo, pats, opts, rename=False):
484 def copy(ui, repo, pats, opts, rename=False):
485 # called with the repo lock held
485 # called with the repo lock held
486 #
486 #
487 # hgsep => pathname that uses "/" to separate directories
487 # hgsep => pathname that uses "/" to separate directories
488 # ossep => pathname that uses os.sep to separate directories
488 # ossep => pathname that uses os.sep to separate directories
489 cwd = repo.getcwd()
489 cwd = repo.getcwd()
490 targets = {}
490 targets = {}
491 after = opts.get("after")
491 after = opts.get("after")
492 dryrun = opts.get("dry_run")
492 dryrun = opts.get("dry_run")
493 wctx = repo[None]
493 wctx = repo[None]
494
494
495 def walkpat(pat):
495 def walkpat(pat):
496 srcs = []
496 srcs = []
497 if after:
497 if after:
498 badstates = '?'
498 badstates = '?'
499 else:
499 else:
500 badstates = '?r'
500 badstates = '?r'
501 m = scmutil.match(repo[None], [pat], opts, globbed=True)
501 m = scmutil.match(repo[None], [pat], opts, globbed=True)
502 for abs in repo.walk(m):
502 for abs in repo.walk(m):
503 state = repo.dirstate[abs]
503 state = repo.dirstate[abs]
504 rel = m.rel(abs)
504 rel = m.rel(abs)
505 exact = m.exact(abs)
505 exact = m.exact(abs)
506 if state in badstates:
506 if state in badstates:
507 if exact and state == '?':
507 if exact and state == '?':
508 ui.warn(_('%s: not copying - file is not managed\n') % rel)
508 ui.warn(_('%s: not copying - file is not managed\n') % rel)
509 if exact and state == 'r':
509 if exact and state == 'r':
510 ui.warn(_('%s: not copying - file has been marked for'
510 ui.warn(_('%s: not copying - file has been marked for'
511 ' remove\n') % rel)
511 ' remove\n') % rel)
512 continue
512 continue
513 # abs: hgsep
513 # abs: hgsep
514 # rel: ossep
514 # rel: ossep
515 srcs.append((abs, rel, exact))
515 srcs.append((abs, rel, exact))
516 return srcs
516 return srcs
517
517
518 # abssrc: hgsep
518 # abssrc: hgsep
519 # relsrc: ossep
519 # relsrc: ossep
520 # otarget: ossep
520 # otarget: ossep
521 def copyfile(abssrc, relsrc, otarget, exact):
521 def copyfile(abssrc, relsrc, otarget, exact):
522 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
522 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
523 if '/' in abstarget:
523 if '/' in abstarget:
524 # We cannot normalize abstarget itself, this would prevent
524 # We cannot normalize abstarget itself, this would prevent
525 # case only renames, like a => A.
525 # case only renames, like a => A.
526 abspath, absname = abstarget.rsplit('/', 1)
526 abspath, absname = abstarget.rsplit('/', 1)
527 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
527 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
528 reltarget = repo.pathto(abstarget, cwd)
528 reltarget = repo.pathto(abstarget, cwd)
529 target = repo.wjoin(abstarget)
529 target = repo.wjoin(abstarget)
530 src = repo.wjoin(abssrc)
530 src = repo.wjoin(abssrc)
531 state = repo.dirstate[abstarget]
531 state = repo.dirstate[abstarget]
532
532
533 scmutil.checkportable(ui, abstarget)
533 scmutil.checkportable(ui, abstarget)
534
534
535 # check for collisions
535 # check for collisions
536 prevsrc = targets.get(abstarget)
536 prevsrc = targets.get(abstarget)
537 if prevsrc is not None:
537 if prevsrc is not None:
538 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
538 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
539 (reltarget, repo.pathto(abssrc, cwd),
539 (reltarget, repo.pathto(abssrc, cwd),
540 repo.pathto(prevsrc, cwd)))
540 repo.pathto(prevsrc, cwd)))
541 return
541 return
542
542
543 # check for overwrites
543 # check for overwrites
544 exists = os.path.lexists(target)
544 exists = os.path.lexists(target)
545 samefile = False
545 samefile = False
546 if exists and abssrc != abstarget:
546 if exists and abssrc != abstarget:
547 if (repo.dirstate.normalize(abssrc) ==
547 if (repo.dirstate.normalize(abssrc) ==
548 repo.dirstate.normalize(abstarget)):
548 repo.dirstate.normalize(abstarget)):
549 if not rename:
549 if not rename:
550 ui.warn(_("%s: can't copy - same file\n") % reltarget)
550 ui.warn(_("%s: can't copy - same file\n") % reltarget)
551 return
551 return
552 exists = False
552 exists = False
553 samefile = True
553 samefile = True
554
554
555 if not after and exists or after and state in 'mn':
555 if not after and exists or after and state in 'mn':
556 if not opts['force']:
556 if not opts['force']:
557 ui.warn(_('%s: not overwriting - file exists\n') %
557 ui.warn(_('%s: not overwriting - file exists\n') %
558 reltarget)
558 reltarget)
559 return
559 return
560
560
561 if after:
561 if after:
562 if not exists:
562 if not exists:
563 if rename:
563 if rename:
564 ui.warn(_('%s: not recording move - %s does not exist\n') %
564 ui.warn(_('%s: not recording move - %s does not exist\n') %
565 (relsrc, reltarget))
565 (relsrc, reltarget))
566 else:
566 else:
567 ui.warn(_('%s: not recording copy - %s does not exist\n') %
567 ui.warn(_('%s: not recording copy - %s does not exist\n') %
568 (relsrc, reltarget))
568 (relsrc, reltarget))
569 return
569 return
570 elif not dryrun:
570 elif not dryrun:
571 try:
571 try:
572 if exists:
572 if exists:
573 os.unlink(target)
573 os.unlink(target)
574 targetdir = os.path.dirname(target) or '.'
574 targetdir = os.path.dirname(target) or '.'
575 if not os.path.isdir(targetdir):
575 if not os.path.isdir(targetdir):
576 os.makedirs(targetdir)
576 os.makedirs(targetdir)
577 if samefile:
577 if samefile:
578 tmp = target + "~hgrename"
578 tmp = target + "~hgrename"
579 os.rename(src, tmp)
579 os.rename(src, tmp)
580 os.rename(tmp, target)
580 os.rename(tmp, target)
581 else:
581 else:
582 util.copyfile(src, target)
582 util.copyfile(src, target)
583 srcexists = True
583 srcexists = True
584 except IOError, inst:
584 except IOError, inst:
585 if inst.errno == errno.ENOENT:
585 if inst.errno == errno.ENOENT:
586 ui.warn(_('%s: deleted in working directory\n') % relsrc)
586 ui.warn(_('%s: deleted in working directory\n') % relsrc)
587 srcexists = False
587 srcexists = False
588 else:
588 else:
589 ui.warn(_('%s: cannot copy - %s\n') %
589 ui.warn(_('%s: cannot copy - %s\n') %
590 (relsrc, inst.strerror))
590 (relsrc, inst.strerror))
591 return True # report a failure
591 return True # report a failure
592
592
593 if ui.verbose or not exact:
593 if ui.verbose or not exact:
594 if rename:
594 if rename:
595 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
595 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
596 else:
596 else:
597 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
597 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
598
598
599 targets[abstarget] = abssrc
599 targets[abstarget] = abssrc
600
600
601 # fix up dirstate
601 # fix up dirstate
602 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
602 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
603 dryrun=dryrun, cwd=cwd)
603 dryrun=dryrun, cwd=cwd)
604 if rename and not dryrun:
604 if rename and not dryrun:
605 if not after and srcexists and not samefile:
605 if not after and srcexists and not samefile:
606 util.unlinkpath(repo.wjoin(abssrc))
606 util.unlinkpath(repo.wjoin(abssrc))
607 wctx.forget([abssrc])
607 wctx.forget([abssrc])
608
608
609 # pat: ossep
609 # pat: ossep
610 # dest ossep
610 # dest ossep
611 # srcs: list of (hgsep, hgsep, ossep, bool)
611 # srcs: list of (hgsep, hgsep, ossep, bool)
612 # return: function that takes hgsep and returns ossep
612 # return: function that takes hgsep and returns ossep
613 def targetpathfn(pat, dest, srcs):
613 def targetpathfn(pat, dest, srcs):
614 if os.path.isdir(pat):
614 if os.path.isdir(pat):
615 abspfx = pathutil.canonpath(repo.root, cwd, pat)
615 abspfx = pathutil.canonpath(repo.root, cwd, pat)
616 abspfx = util.localpath(abspfx)
616 abspfx = util.localpath(abspfx)
617 if destdirexists:
617 if destdirexists:
618 striplen = len(os.path.split(abspfx)[0])
618 striplen = len(os.path.split(abspfx)[0])
619 else:
619 else:
620 striplen = len(abspfx)
620 striplen = len(abspfx)
621 if striplen:
621 if striplen:
622 striplen += len(os.sep)
622 striplen += len(os.sep)
623 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
623 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
624 elif destdirexists:
624 elif destdirexists:
625 res = lambda p: os.path.join(dest,
625 res = lambda p: os.path.join(dest,
626 os.path.basename(util.localpath(p)))
626 os.path.basename(util.localpath(p)))
627 else:
627 else:
628 res = lambda p: dest
628 res = lambda p: dest
629 return res
629 return res
630
630
631 # pat: ossep
631 # pat: ossep
632 # dest ossep
632 # dest ossep
633 # srcs: list of (hgsep, hgsep, ossep, bool)
633 # srcs: list of (hgsep, hgsep, ossep, bool)
634 # return: function that takes hgsep and returns ossep
634 # return: function that takes hgsep and returns ossep
635 def targetpathafterfn(pat, dest, srcs):
635 def targetpathafterfn(pat, dest, srcs):
636 if matchmod.patkind(pat):
636 if matchmod.patkind(pat):
637 # a mercurial pattern
637 # a mercurial pattern
638 res = lambda p: os.path.join(dest,
638 res = lambda p: os.path.join(dest,
639 os.path.basename(util.localpath(p)))
639 os.path.basename(util.localpath(p)))
640 else:
640 else:
641 abspfx = pathutil.canonpath(repo.root, cwd, pat)
641 abspfx = pathutil.canonpath(repo.root, cwd, pat)
642 if len(abspfx) < len(srcs[0][0]):
642 if len(abspfx) < len(srcs[0][0]):
643 # A directory. Either the target path contains the last
643 # A directory. Either the target path contains the last
644 # component of the source path or it does not.
644 # component of the source path or it does not.
645 def evalpath(striplen):
645 def evalpath(striplen):
646 score = 0
646 score = 0
647 for s in srcs:
647 for s in srcs:
648 t = os.path.join(dest, util.localpath(s[0])[striplen:])
648 t = os.path.join(dest, util.localpath(s[0])[striplen:])
649 if os.path.lexists(t):
649 if os.path.lexists(t):
650 score += 1
650 score += 1
651 return score
651 return score
652
652
653 abspfx = util.localpath(abspfx)
653 abspfx = util.localpath(abspfx)
654 striplen = len(abspfx)
654 striplen = len(abspfx)
655 if striplen:
655 if striplen:
656 striplen += len(os.sep)
656 striplen += len(os.sep)
657 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
657 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
658 score = evalpath(striplen)
658 score = evalpath(striplen)
659 striplen1 = len(os.path.split(abspfx)[0])
659 striplen1 = len(os.path.split(abspfx)[0])
660 if striplen1:
660 if striplen1:
661 striplen1 += len(os.sep)
661 striplen1 += len(os.sep)
662 if evalpath(striplen1) > score:
662 if evalpath(striplen1) > score:
663 striplen = striplen1
663 striplen = striplen1
664 res = lambda p: os.path.join(dest,
664 res = lambda p: os.path.join(dest,
665 util.localpath(p)[striplen:])
665 util.localpath(p)[striplen:])
666 else:
666 else:
667 # a file
667 # a file
668 if destdirexists:
668 if destdirexists:
669 res = lambda p: os.path.join(dest,
669 res = lambda p: os.path.join(dest,
670 os.path.basename(util.localpath(p)))
670 os.path.basename(util.localpath(p)))
671 else:
671 else:
672 res = lambda p: dest
672 res = lambda p: dest
673 return res
673 return res
674
674
675 pats = scmutil.expandpats(pats)
675 pats = scmutil.expandpats(pats)
676 if not pats:
676 if not pats:
677 raise util.Abort(_('no source or destination specified'))
677 raise util.Abort(_('no source or destination specified'))
678 if len(pats) == 1:
678 if len(pats) == 1:
679 raise util.Abort(_('no destination specified'))
679 raise util.Abort(_('no destination specified'))
680 dest = pats.pop()
680 dest = pats.pop()
681 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
681 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
682 if not destdirexists:
682 if not destdirexists:
683 if len(pats) > 1 or matchmod.patkind(pats[0]):
683 if len(pats) > 1 or matchmod.patkind(pats[0]):
684 raise util.Abort(_('with multiple sources, destination must be an '
684 raise util.Abort(_('with multiple sources, destination must be an '
685 'existing directory'))
685 'existing directory'))
686 if util.endswithsep(dest):
686 if util.endswithsep(dest):
687 raise util.Abort(_('destination %s is not a directory') % dest)
687 raise util.Abort(_('destination %s is not a directory') % dest)
688
688
689 tfn = targetpathfn
689 tfn = targetpathfn
690 if after:
690 if after:
691 tfn = targetpathafterfn
691 tfn = targetpathafterfn
692 copylist = []
692 copylist = []
693 for pat in pats:
693 for pat in pats:
694 srcs = walkpat(pat)
694 srcs = walkpat(pat)
695 if not srcs:
695 if not srcs:
696 continue
696 continue
697 copylist.append((tfn(pat, dest, srcs), srcs))
697 copylist.append((tfn(pat, dest, srcs), srcs))
698 if not copylist:
698 if not copylist:
699 raise util.Abort(_('no files to copy'))
699 raise util.Abort(_('no files to copy'))
700
700
701 errors = 0
701 errors = 0
702 for targetpath, srcs in copylist:
702 for targetpath, srcs in copylist:
703 for abssrc, relsrc, exact in srcs:
703 for abssrc, relsrc, exact in srcs:
704 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
704 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
705 errors += 1
705 errors += 1
706
706
707 if errors:
707 if errors:
708 ui.warn(_('(consider using --after)\n'))
708 ui.warn(_('(consider using --after)\n'))
709
709
710 return errors != 0
710 return errors != 0
711
711
712 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
712 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
713 runargs=None, appendpid=False):
713 runargs=None, appendpid=False):
714 '''Run a command as a service.'''
714 '''Run a command as a service.'''
715
715
716 def writepid(pid):
716 def writepid(pid):
717 if opts['pid_file']:
717 if opts['pid_file']:
718 if appendpid:
718 if appendpid:
719 mode = 'a'
719 mode = 'a'
720 else:
720 else:
721 mode = 'w'
721 mode = 'w'
722 fp = open(opts['pid_file'], mode)
722 fp = open(opts['pid_file'], mode)
723 fp.write(str(pid) + '\n')
723 fp.write(str(pid) + '\n')
724 fp.close()
724 fp.close()
725
725
726 if opts['daemon'] and not opts['daemon_pipefds']:
726 if opts['daemon'] and not opts['daemon_pipefds']:
727 # Signal child process startup with file removal
727 # Signal child process startup with file removal
728 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
728 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
729 os.close(lockfd)
729 os.close(lockfd)
730 try:
730 try:
731 if not runargs:
731 if not runargs:
732 runargs = util.hgcmd() + sys.argv[1:]
732 runargs = util.hgcmd() + sys.argv[1:]
733 runargs.append('--daemon-pipefds=%s' % lockpath)
733 runargs.append('--daemon-pipefds=%s' % lockpath)
734 # Don't pass --cwd to the child process, because we've already
734 # Don't pass --cwd to the child process, because we've already
735 # changed directory.
735 # changed directory.
736 for i in xrange(1, len(runargs)):
736 for i in xrange(1, len(runargs)):
737 if runargs[i].startswith('--cwd='):
737 if runargs[i].startswith('--cwd='):
738 del runargs[i]
738 del runargs[i]
739 break
739 break
740 elif runargs[i].startswith('--cwd'):
740 elif runargs[i].startswith('--cwd'):
741 del runargs[i:i + 2]
741 del runargs[i:i + 2]
742 break
742 break
743 def condfn():
743 def condfn():
744 return not os.path.exists(lockpath)
744 return not os.path.exists(lockpath)
745 pid = util.rundetached(runargs, condfn)
745 pid = util.rundetached(runargs, condfn)
746 if pid < 0:
746 if pid < 0:
747 raise util.Abort(_('child process failed to start'))
747 raise util.Abort(_('child process failed to start'))
748 writepid(pid)
748 writepid(pid)
749 finally:
749 finally:
750 try:
750 try:
751 os.unlink(lockpath)
751 os.unlink(lockpath)
752 except OSError, e:
752 except OSError, e:
753 if e.errno != errno.ENOENT:
753 if e.errno != errno.ENOENT:
754 raise
754 raise
755 if parentfn:
755 if parentfn:
756 return parentfn(pid)
756 return parentfn(pid)
757 else:
757 else:
758 return
758 return
759
759
760 if initfn:
760 if initfn:
761 initfn()
761 initfn()
762
762
763 if not opts['daemon']:
763 if not opts['daemon']:
764 writepid(os.getpid())
764 writepid(os.getpid())
765
765
766 if opts['daemon_pipefds']:
766 if opts['daemon_pipefds']:
767 lockpath = opts['daemon_pipefds']
767 lockpath = opts['daemon_pipefds']
768 try:
768 try:
769 os.setsid()
769 os.setsid()
770 except AttributeError:
770 except AttributeError:
771 pass
771 pass
772 os.unlink(lockpath)
772 os.unlink(lockpath)
773 util.hidewindow()
773 util.hidewindow()
774 sys.stdout.flush()
774 sys.stdout.flush()
775 sys.stderr.flush()
775 sys.stderr.flush()
776
776
777 nullfd = os.open(os.devnull, os.O_RDWR)
777 nullfd = os.open(os.devnull, os.O_RDWR)
778 logfilefd = nullfd
778 logfilefd = nullfd
779 if logfile:
779 if logfile:
780 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
780 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
781 os.dup2(nullfd, 0)
781 os.dup2(nullfd, 0)
782 os.dup2(logfilefd, 1)
782 os.dup2(logfilefd, 1)
783 os.dup2(logfilefd, 2)
783 os.dup2(logfilefd, 2)
784 if nullfd not in (0, 1, 2):
784 if nullfd not in (0, 1, 2):
785 os.close(nullfd)
785 os.close(nullfd)
786 if logfile and logfilefd not in (0, 1, 2):
786 if logfile and logfilefd not in (0, 1, 2):
787 os.close(logfilefd)
787 os.close(logfilefd)
788
788
789 if runfn:
789 if runfn:
790 return runfn()
790 return runfn()
791
791
792 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
792 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
793 """Utility function used by commands.import to import a single patch
793 """Utility function used by commands.import to import a single patch
794
794
795 This function is explicitly defined here to help the evolve extension to
795 This function is explicitly defined here to help the evolve extension to
796 wrap this part of the import logic.
796 wrap this part of the import logic.
797
797
798 The API is currently a bit ugly because it a simple code translation from
798 The API is currently a bit ugly because it a simple code translation from
799 the import command. Feel free to make it better.
799 the import command. Feel free to make it better.
800
800
801 :hunk: a patch (as a binary string)
801 :hunk: a patch (as a binary string)
802 :parents: nodes that will be parent of the created commit
802 :parents: nodes that will be parent of the created commit
803 :opts: the full dict of option passed to the import command
803 :opts: the full dict of option passed to the import command
804 :msgs: list to save commit message to.
804 :msgs: list to save commit message to.
805 (used in case we need to save it when failing)
805 (used in case we need to save it when failing)
806 :updatefunc: a function that update a repo to a given node
806 :updatefunc: a function that update a repo to a given node
807 updatefunc(<repo>, <node>)
807 updatefunc(<repo>, <node>)
808 """
808 """
809 tmpname, message, user, date, branch, nodeid, p1, p2 = \
809 tmpname, message, user, date, branch, nodeid, p1, p2 = \
810 patch.extract(ui, hunk)
810 patch.extract(ui, hunk)
811
811
812 update = not opts.get('bypass')
812 update = not opts.get('bypass')
813 strip = opts["strip"]
813 strip = opts["strip"]
814 prefix = opts["prefix"]
814 prefix = opts["prefix"]
815 sim = float(opts.get('similarity') or 0)
815 sim = float(opts.get('similarity') or 0)
816 if not tmpname:
816 if not tmpname:
817 return (None, None, False)
817 return (None, None, False)
818 msg = _('applied to working directory')
818 msg = _('applied to working directory')
819
819
820 rejects = False
820 rejects = False
821
821
822 try:
822 try:
823 cmdline_message = logmessage(ui, opts)
823 cmdline_message = logmessage(ui, opts)
824 if cmdline_message:
824 if cmdline_message:
825 # pickup the cmdline msg
825 # pickup the cmdline msg
826 message = cmdline_message
826 message = cmdline_message
827 elif message:
827 elif message:
828 # pickup the patch msg
828 # pickup the patch msg
829 message = message.strip()
829 message = message.strip()
830 else:
830 else:
831 # launch the editor
831 # launch the editor
832 message = None
832 message = None
833 ui.debug('message:\n%s\n' % message)
833 ui.debug('message:\n%s\n' % message)
834
834
835 if len(parents) == 1:
835 if len(parents) == 1:
836 parents.append(repo[nullid])
836 parents.append(repo[nullid])
837 if opts.get('exact'):
837 if opts.get('exact'):
838 if not nodeid or not p1:
838 if not nodeid or not p1:
839 raise util.Abort(_('not a Mercurial patch'))
839 raise util.Abort(_('not a Mercurial patch'))
840 p1 = repo[p1]
840 p1 = repo[p1]
841 p2 = repo[p2 or nullid]
841 p2 = repo[p2 or nullid]
842 elif p2:
842 elif p2:
843 try:
843 try:
844 p1 = repo[p1]
844 p1 = repo[p1]
845 p2 = repo[p2]
845 p2 = repo[p2]
846 # Without any options, consider p2 only if the
846 # Without any options, consider p2 only if the
847 # patch is being applied on top of the recorded
847 # patch is being applied on top of the recorded
848 # first parent.
848 # first parent.
849 if p1 != parents[0]:
849 if p1 != parents[0]:
850 p1 = parents[0]
850 p1 = parents[0]
851 p2 = repo[nullid]
851 p2 = repo[nullid]
852 except error.RepoError:
852 except error.RepoError:
853 p1, p2 = parents
853 p1, p2 = parents
854 if p2.node() == nullid:
854 if p2.node() == nullid:
855 ui.warn(_("warning: import the patch as a normal revision\n"
855 ui.warn(_("warning: import the patch as a normal revision\n"
856 "(use --exact to import the patch as a merge)\n"))
856 "(use --exact to import the patch as a merge)\n"))
857 else:
857 else:
858 p1, p2 = parents
858 p1, p2 = parents
859
859
860 n = None
860 n = None
861 if update:
861 if update:
862 repo.dirstate.beginparentchange()
862 repo.dirstate.beginparentchange()
863 if p1 != parents[0]:
863 if p1 != parents[0]:
864 updatefunc(repo, p1.node())
864 updatefunc(repo, p1.node())
865 if p2 != parents[1]:
865 if p2 != parents[1]:
866 repo.setparents(p1.node(), p2.node())
866 repo.setparents(p1.node(), p2.node())
867
867
868 if opts.get('exact') or opts.get('import_branch'):
868 if opts.get('exact') or opts.get('import_branch'):
869 repo.dirstate.setbranch(branch or 'default')
869 repo.dirstate.setbranch(branch or 'default')
870
870
871 partial = opts.get('partial', False)
871 partial = opts.get('partial', False)
872 files = set()
872 files = set()
873 try:
873 try:
874 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
874 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
875 files=files, eolmode=None, similarity=sim / 100.0)
875 files=files, eolmode=None, similarity=sim / 100.0)
876 except patch.PatchError, e:
876 except patch.PatchError, e:
877 if not partial:
877 if not partial:
878 raise util.Abort(str(e))
878 raise util.Abort(str(e))
879 if partial:
879 if partial:
880 rejects = True
880 rejects = True
881
881
882 files = list(files)
882 files = list(files)
883 if opts.get('no_commit'):
883 if opts.get('no_commit'):
884 if message:
884 if message:
885 msgs.append(message)
885 msgs.append(message)
886 else:
886 else:
887 if opts.get('exact') or p2:
887 if opts.get('exact') or p2:
888 # If you got here, you either use --force and know what
888 # If you got here, you either use --force and know what
889 # you are doing or used --exact or a merge patch while
889 # you are doing or used --exact or a merge patch while
890 # being updated to its first parent.
890 # being updated to its first parent.
891 m = None
891 m = None
892 else:
892 else:
893 m = scmutil.matchfiles(repo, files or [])
893 m = scmutil.matchfiles(repo, files or [])
894 editform = mergeeditform(repo[None], 'import.normal')
894 editform = mergeeditform(repo[None], 'import.normal')
895 if opts.get('exact'):
895 if opts.get('exact'):
896 editor = None
896 editor = None
897 else:
897 else:
898 editor = getcommiteditor(editform=editform, **opts)
898 editor = getcommiteditor(editform=editform, **opts)
899 n = repo.commit(message, opts.get('user') or user,
899 n = repo.commit(message, opts.get('user') or user,
900 opts.get('date') or date, match=m,
900 opts.get('date') or date, match=m,
901 editor=editor, force=partial)
901 editor=editor, force=partial)
902 repo.dirstate.endparentchange()
902 repo.dirstate.endparentchange()
903 else:
903 else:
904 if opts.get('exact') or opts.get('import_branch'):
904 if opts.get('exact') or opts.get('import_branch'):
905 branch = branch or 'default'
905 branch = branch or 'default'
906 else:
906 else:
907 branch = p1.branch()
907 branch = p1.branch()
908 store = patch.filestore()
908 store = patch.filestore()
909 try:
909 try:
910 files = set()
910 files = set()
911 try:
911 try:
912 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
912 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
913 files, eolmode=None)
913 files, eolmode=None)
914 except patch.PatchError, e:
914 except patch.PatchError, e:
915 raise util.Abort(str(e))
915 raise util.Abort(str(e))
916 if opts.get('exact'):
916 if opts.get('exact'):
917 editor = None
917 editor = None
918 else:
918 else:
919 editor = getcommiteditor(editform='import.bypass')
919 editor = getcommiteditor(editform='import.bypass')
920 memctx = context.makememctx(repo, (p1.node(), p2.node()),
920 memctx = context.makememctx(repo, (p1.node(), p2.node()),
921 message,
921 message,
922 opts.get('user') or user,
922 opts.get('user') or user,
923 opts.get('date') or date,
923 opts.get('date') or date,
924 branch, files, store,
924 branch, files, store,
925 editor=editor)
925 editor=editor)
926 n = memctx.commit()
926 n = memctx.commit()
927 finally:
927 finally:
928 store.close()
928 store.close()
929 if opts.get('exact') and opts.get('no_commit'):
929 if opts.get('exact') and opts.get('no_commit'):
930 # --exact with --no-commit is still useful in that it does merge
930 # --exact with --no-commit is still useful in that it does merge
931 # and branch bits
931 # and branch bits
932 ui.warn(_("warning: can't check exact import with --no-commit\n"))
932 ui.warn(_("warning: can't check exact import with --no-commit\n"))
933 elif opts.get('exact') and hex(n) != nodeid:
933 elif opts.get('exact') and hex(n) != nodeid:
934 raise util.Abort(_('patch is damaged or loses information'))
934 raise util.Abort(_('patch is damaged or loses information'))
935 if n:
935 if n:
936 # i18n: refers to a short changeset id
936 # i18n: refers to a short changeset id
937 msg = _('created %s') % short(n)
937 msg = _('created %s') % short(n)
938 return (msg, n, rejects)
938 return (msg, n, rejects)
939 finally:
939 finally:
940 os.unlink(tmpname)
940 os.unlink(tmpname)
941
941
942 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
942 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
943 opts=None):
943 opts=None):
944 '''export changesets as hg patches.'''
944 '''export changesets as hg patches.'''
945
945
946 total = len(revs)
946 total = len(revs)
947 revwidth = max([len(str(rev)) for rev in revs])
947 revwidth = max([len(str(rev)) for rev in revs])
948 filemode = {}
948 filemode = {}
949
949
950 def single(rev, seqno, fp):
950 def single(rev, seqno, fp):
951 ctx = repo[rev]
951 ctx = repo[rev]
952 node = ctx.node()
952 node = ctx.node()
953 parents = [p.node() for p in ctx.parents() if p]
953 parents = [p.node() for p in ctx.parents() if p]
954 branch = ctx.branch()
954 branch = ctx.branch()
955 if switch_parent:
955 if switch_parent:
956 parents.reverse()
956 parents.reverse()
957
957
958 if parents:
958 if parents:
959 prev = parents[0]
959 prev = parents[0]
960 else:
960 else:
961 prev = nullid
961 prev = nullid
962
962
963 shouldclose = False
963 shouldclose = False
964 if not fp and len(template) > 0:
964 if not fp and len(template) > 0:
965 desc_lines = ctx.description().rstrip().split('\n')
965 desc_lines = ctx.description().rstrip().split('\n')
966 desc = desc_lines[0] #Commit always has a first line.
966 desc = desc_lines[0] #Commit always has a first line.
967 fp = makefileobj(repo, template, node, desc=desc, total=total,
967 fp = makefileobj(repo, template, node, desc=desc, total=total,
968 seqno=seqno, revwidth=revwidth, mode='wb',
968 seqno=seqno, revwidth=revwidth, mode='wb',
969 modemap=filemode)
969 modemap=filemode)
970 if fp != template:
970 if fp != template:
971 shouldclose = True
971 shouldclose = True
972 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
972 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
973 repo.ui.note("%s\n" % fp.name)
973 repo.ui.note("%s\n" % fp.name)
974
974
975 if not fp:
975 if not fp:
976 write = repo.ui.write
976 write = repo.ui.write
977 else:
977 else:
978 def write(s, **kw):
978 def write(s, **kw):
979 fp.write(s)
979 fp.write(s)
980
980
981 write("# HG changeset patch\n")
981 write("# HG changeset patch\n")
982 write("# User %s\n" % ctx.user())
982 write("# User %s\n" % ctx.user())
983 write("# Date %d %d\n" % ctx.date())
983 write("# Date %d %d\n" % ctx.date())
984 write("# %s\n" % util.datestr(ctx.date()))
984 write("# %s\n" % util.datestr(ctx.date()))
985 if branch and branch != 'default':
985 if branch and branch != 'default':
986 write("# Branch %s\n" % branch)
986 write("# Branch %s\n" % branch)
987 write("# Node ID %s\n" % hex(node))
987 write("# Node ID %s\n" % hex(node))
988 write("# Parent %s\n" % hex(prev))
988 write("# Parent %s\n" % hex(prev))
989 if len(parents) > 1:
989 if len(parents) > 1:
990 write("# Parent %s\n" % hex(parents[1]))
990 write("# Parent %s\n" % hex(parents[1]))
991 write(ctx.description().rstrip())
991 write(ctx.description().rstrip())
992 write("\n\n")
992 write("\n\n")
993
993
994 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
994 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
995 write(chunk, label=label)
995 write(chunk, label=label)
996
996
997 if shouldclose:
997 if shouldclose:
998 fp.close()
998 fp.close()
999
999
1000 for seqno, rev in enumerate(revs):
1000 for seqno, rev in enumerate(revs):
1001 single(rev, seqno + 1, fp)
1001 single(rev, seqno + 1, fp)
1002
1002
1003 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1003 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1004 changes=None, stat=False, fp=None, prefix='',
1004 changes=None, stat=False, fp=None, prefix='',
1005 root='', listsubrepos=False):
1005 root='', listsubrepos=False):
1006 '''show diff or diffstat.'''
1006 '''show diff or diffstat.'''
1007 if fp is None:
1007 if fp is None:
1008 write = ui.write
1008 write = ui.write
1009 else:
1009 else:
1010 def write(s, **kw):
1010 def write(s, **kw):
1011 fp.write(s)
1011 fp.write(s)
1012
1012
1013 if root:
1013 if root:
1014 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1014 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1015 else:
1015 else:
1016 relroot = ''
1016 relroot = ''
1017 if relroot != '':
1017 if relroot != '':
1018 # XXX relative roots currently don't work if the root is within a
1018 # XXX relative roots currently don't work if the root is within a
1019 # subrepo
1019 # subrepo
1020 uirelroot = match.uipath(relroot)
1020 uirelroot = match.uipath(relroot)
1021 relroot += '/'
1021 relroot += '/'
1022 for matchroot in match.files():
1022 for matchroot in match.files():
1023 if not matchroot.startswith(relroot):
1023 if not matchroot.startswith(relroot):
1024 ui.warn(_('warning: %s not inside relative root %s\n') % (
1024 ui.warn(_('warning: %s not inside relative root %s\n') % (
1025 match.uipath(matchroot), uirelroot))
1025 match.uipath(matchroot), uirelroot))
1026
1026
1027 if stat:
1027 if stat:
1028 diffopts = diffopts.copy(context=0)
1028 diffopts = diffopts.copy(context=0)
1029 width = 80
1029 width = 80
1030 if not ui.plain():
1030 if not ui.plain():
1031 width = ui.termwidth()
1031 width = ui.termwidth()
1032 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1032 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1033 prefix=prefix, relroot=relroot)
1033 prefix=prefix, relroot=relroot)
1034 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1034 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1035 width=width,
1035 width=width,
1036 git=diffopts.git):
1036 git=diffopts.git):
1037 write(chunk, label=label)
1037 write(chunk, label=label)
1038 else:
1038 else:
1039 for chunk, label in patch.diffui(repo, node1, node2, match,
1039 for chunk, label in patch.diffui(repo, node1, node2, match,
1040 changes, diffopts, prefix=prefix,
1040 changes, diffopts, prefix=prefix,
1041 relroot=relroot):
1041 relroot=relroot):
1042 write(chunk, label=label)
1042 write(chunk, label=label)
1043
1043
1044 if listsubrepos:
1044 if listsubrepos:
1045 ctx1 = repo[node1]
1045 ctx1 = repo[node1]
1046 ctx2 = repo[node2]
1046 ctx2 = repo[node2]
1047 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1047 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1048 tempnode2 = node2
1048 tempnode2 = node2
1049 try:
1049 try:
1050 if node2 is not None:
1050 if node2 is not None:
1051 tempnode2 = ctx2.substate[subpath][1]
1051 tempnode2 = ctx2.substate[subpath][1]
1052 except KeyError:
1052 except KeyError:
1053 # A subrepo that existed in node1 was deleted between node1 and
1053 # A subrepo that existed in node1 was deleted between node1 and
1054 # node2 (inclusive). Thus, ctx2's substate won't contain that
1054 # node2 (inclusive). Thus, ctx2's substate won't contain that
1055 # subpath. The best we can do is to ignore it.
1055 # subpath. The best we can do is to ignore it.
1056 tempnode2 = None
1056 tempnode2 = None
1057 submatch = matchmod.narrowmatcher(subpath, match)
1057 submatch = matchmod.narrowmatcher(subpath, match)
1058 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1058 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1059 stat=stat, fp=fp, prefix=prefix)
1059 stat=stat, fp=fp, prefix=prefix)
1060
1060
1061 class changeset_printer(object):
1061 class changeset_printer(object):
1062 '''show changeset information when templating not requested.'''
1062 '''show changeset information when templating not requested.'''
1063
1063
1064 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1064 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1065 self.ui = ui
1065 self.ui = ui
1066 self.repo = repo
1066 self.repo = repo
1067 self.buffered = buffered
1067 self.buffered = buffered
1068 self.matchfn = matchfn
1068 self.matchfn = matchfn
1069 self.diffopts = diffopts
1069 self.diffopts = diffopts
1070 self.header = {}
1070 self.header = {}
1071 self.hunk = {}
1071 self.hunk = {}
1072 self.lastheader = None
1072 self.lastheader = None
1073 self.footer = None
1073 self.footer = None
1074
1074
1075 def flush(self, rev):
1075 def flush(self, rev):
1076 if rev in self.header:
1076 if rev in self.header:
1077 h = self.header[rev]
1077 h = self.header[rev]
1078 if h != self.lastheader:
1078 if h != self.lastheader:
1079 self.lastheader = h
1079 self.lastheader = h
1080 self.ui.write(h)
1080 self.ui.write(h)
1081 del self.header[rev]
1081 del self.header[rev]
1082 if rev in self.hunk:
1082 if rev in self.hunk:
1083 self.ui.write(self.hunk[rev])
1083 self.ui.write(self.hunk[rev])
1084 del self.hunk[rev]
1084 del self.hunk[rev]
1085 return 1
1085 return 1
1086 return 0
1086 return 0
1087
1087
1088 def close(self):
1088 def close(self):
1089 if self.footer:
1089 if self.footer:
1090 self.ui.write(self.footer)
1090 self.ui.write(self.footer)
1091
1091
1092 def show(self, ctx, copies=None, matchfn=None, **props):
1092 def show(self, ctx, copies=None, matchfn=None, **props):
1093 if self.buffered:
1093 if self.buffered:
1094 self.ui.pushbuffer()
1094 self.ui.pushbuffer()
1095 self._show(ctx, copies, matchfn, props)
1095 self._show(ctx, copies, matchfn, props)
1096 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1096 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1097 else:
1097 else:
1098 self._show(ctx, copies, matchfn, props)
1098 self._show(ctx, copies, matchfn, props)
1099
1099
1100 def _show(self, ctx, copies, matchfn, props):
1100 def _show(self, ctx, copies, matchfn, props):
1101 '''show a single changeset or file revision'''
1101 '''show a single changeset or file revision'''
1102 changenode = ctx.node()
1102 changenode = ctx.node()
1103 rev = ctx.rev()
1103 rev = ctx.rev()
1104 if self.ui.debugflag:
1104 if self.ui.debugflag:
1105 hexfunc = hex
1105 hexfunc = hex
1106 else:
1106 else:
1107 hexfunc = short
1107 hexfunc = short
1108 if rev is None:
1108 if rev is None:
1109 pctx = ctx.p1()
1109 pctx = ctx.p1()
1110 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1110 revnode = (pctx.rev(), hexfunc(pctx.node()) + '+')
1111 else:
1111 else:
1112 revnode = (rev, hexfunc(changenode))
1112 revnode = (rev, hexfunc(changenode))
1113
1113
1114 if self.ui.quiet:
1114 if self.ui.quiet:
1115 self.ui.write("%d:%s\n" % revnode, label='log.node')
1115 self.ui.write("%d:%s\n" % revnode, label='log.node')
1116 return
1116 return
1117
1117
1118 date = util.datestr(ctx.date())
1118 date = util.datestr(ctx.date())
1119
1119
1120 # i18n: column positioning for "hg log"
1120 # i18n: column positioning for "hg log"
1121 self.ui.write(_("changeset: %d:%s\n") % revnode,
1121 self.ui.write(_("changeset: %d:%s\n") % revnode,
1122 label='log.changeset changeset.%s' % ctx.phasestr())
1122 label='log.changeset changeset.%s' % ctx.phasestr())
1123
1123
1124 # branches are shown first before any other names due to backwards
1124 # branches are shown first before any other names due to backwards
1125 # compatibility
1125 # compatibility
1126 branch = ctx.branch()
1126 branch = ctx.branch()
1127 # don't show the default branch name
1127 # don't show the default branch name
1128 if branch != 'default':
1128 if branch != 'default':
1129 # i18n: column positioning for "hg log"
1129 # i18n: column positioning for "hg log"
1130 self.ui.write(_("branch: %s\n") % branch,
1130 self.ui.write(_("branch: %s\n") % branch,
1131 label='log.branch')
1131 label='log.branch')
1132
1132
1133 for name, ns in self.repo.names.iteritems():
1133 for name, ns in self.repo.names.iteritems():
1134 # branches has special logic already handled above, so here we just
1134 # branches has special logic already handled above, so here we just
1135 # skip it
1135 # skip it
1136 if name == 'branches':
1136 if name == 'branches':
1137 continue
1137 continue
1138 # we will use the templatename as the color name since those two
1138 # we will use the templatename as the color name since those two
1139 # should be the same
1139 # should be the same
1140 for name in ns.names(self.repo, changenode):
1140 for name in ns.names(self.repo, changenode):
1141 self.ui.write(ns.logfmt % name,
1141 self.ui.write(ns.logfmt % name,
1142 label='log.%s' % ns.colorname)
1142 label='log.%s' % ns.colorname)
1143 if self.ui.debugflag:
1143 if self.ui.debugflag:
1144 # i18n: column positioning for "hg log"
1144 # i18n: column positioning for "hg log"
1145 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1145 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1146 label='log.phase')
1146 label='log.phase')
1147 for pctx in self._meaningful_parentrevs(ctx):
1147 for pctx in self._meaningful_parentrevs(ctx):
1148 label = 'log.parent changeset.%s' % pctx.phasestr()
1148 label = 'log.parent changeset.%s' % pctx.phasestr()
1149 # i18n: column positioning for "hg log"
1149 # i18n: column positioning for "hg log"
1150 self.ui.write(_("parent: %d:%s\n")
1150 self.ui.write(_("parent: %d:%s\n")
1151 % (pctx.rev(), hexfunc(pctx.node())),
1151 % (pctx.rev(), hexfunc(pctx.node())),
1152 label=label)
1152 label=label)
1153
1153
1154 if self.ui.debugflag and rev is not None:
1154 if self.ui.debugflag and rev is not None:
1155 mnode = ctx.manifestnode()
1155 mnode = ctx.manifestnode()
1156 # i18n: column positioning for "hg log"
1156 # i18n: column positioning for "hg log"
1157 self.ui.write(_("manifest: %d:%s\n") %
1157 self.ui.write(_("manifest: %d:%s\n") %
1158 (self.repo.manifest.rev(mnode), hex(mnode)),
1158 (self.repo.manifest.rev(mnode), hex(mnode)),
1159 label='ui.debug log.manifest')
1159 label='ui.debug log.manifest')
1160 # i18n: column positioning for "hg log"
1160 # i18n: column positioning for "hg log"
1161 self.ui.write(_("user: %s\n") % ctx.user(),
1161 self.ui.write(_("user: %s\n") % ctx.user(),
1162 label='log.user')
1162 label='log.user')
1163 # i18n: column positioning for "hg log"
1163 # i18n: column positioning for "hg log"
1164 self.ui.write(_("date: %s\n") % date,
1164 self.ui.write(_("date: %s\n") % date,
1165 label='log.date')
1165 label='log.date')
1166
1166
1167 if self.ui.debugflag:
1167 if self.ui.debugflag:
1168 files = ctx.p1().status(ctx)[:3]
1168 files = ctx.p1().status(ctx)[:3]
1169 for key, value in zip([# i18n: column positioning for "hg log"
1169 for key, value in zip([# i18n: column positioning for "hg log"
1170 _("files:"),
1170 _("files:"),
1171 # i18n: column positioning for "hg log"
1171 # i18n: column positioning for "hg log"
1172 _("files+:"),
1172 _("files+:"),
1173 # i18n: column positioning for "hg log"
1173 # i18n: column positioning for "hg log"
1174 _("files-:")], files):
1174 _("files-:")], files):
1175 if value:
1175 if value:
1176 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1176 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1177 label='ui.debug log.files')
1177 label='ui.debug log.files')
1178 elif ctx.files() and self.ui.verbose:
1178 elif ctx.files() and self.ui.verbose:
1179 # i18n: column positioning for "hg log"
1179 # i18n: column positioning for "hg log"
1180 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1180 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1181 label='ui.note log.files')
1181 label='ui.note log.files')
1182 if copies and self.ui.verbose:
1182 if copies and self.ui.verbose:
1183 copies = ['%s (%s)' % c for c in copies]
1183 copies = ['%s (%s)' % c for c in copies]
1184 # i18n: column positioning for "hg log"
1184 # i18n: column positioning for "hg log"
1185 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1185 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1186 label='ui.note log.copies')
1186 label='ui.note log.copies')
1187
1187
1188 extra = ctx.extra()
1188 extra = ctx.extra()
1189 if extra and self.ui.debugflag:
1189 if extra and self.ui.debugflag:
1190 for key, value in sorted(extra.items()):
1190 for key, value in sorted(extra.items()):
1191 # i18n: column positioning for "hg log"
1191 # i18n: column positioning for "hg log"
1192 self.ui.write(_("extra: %s=%s\n")
1192 self.ui.write(_("extra: %s=%s\n")
1193 % (key, value.encode('string_escape')),
1193 % (key, value.encode('string_escape')),
1194 label='ui.debug log.extra')
1194 label='ui.debug log.extra')
1195
1195
1196 description = ctx.description().strip()
1196 description = ctx.description().strip()
1197 if description:
1197 if description:
1198 if self.ui.verbose:
1198 if self.ui.verbose:
1199 self.ui.write(_("description:\n"),
1199 self.ui.write(_("description:\n"),
1200 label='ui.note log.description')
1200 label='ui.note log.description')
1201 self.ui.write(description,
1201 self.ui.write(description,
1202 label='ui.note log.description')
1202 label='ui.note log.description')
1203 self.ui.write("\n\n")
1203 self.ui.write("\n\n")
1204 else:
1204 else:
1205 # i18n: column positioning for "hg log"
1205 # i18n: column positioning for "hg log"
1206 self.ui.write(_("summary: %s\n") %
1206 self.ui.write(_("summary: %s\n") %
1207 description.splitlines()[0],
1207 description.splitlines()[0],
1208 label='log.summary')
1208 label='log.summary')
1209 self.ui.write("\n")
1209 self.ui.write("\n")
1210
1210
1211 self.showpatch(changenode, matchfn)
1211 self.showpatch(changenode, matchfn)
1212
1212
1213 def showpatch(self, node, matchfn):
1213 def showpatch(self, node, matchfn):
1214 if not matchfn:
1214 if not matchfn:
1215 matchfn = self.matchfn
1215 matchfn = self.matchfn
1216 if matchfn:
1216 if matchfn:
1217 stat = self.diffopts.get('stat')
1217 stat = self.diffopts.get('stat')
1218 diff = self.diffopts.get('patch')
1218 diff = self.diffopts.get('patch')
1219 diffopts = patch.diffallopts(self.ui, self.diffopts)
1219 diffopts = patch.diffallopts(self.ui, self.diffopts)
1220 prev = self.repo.changelog.parents(node)[0]
1220 prev = self.repo.changelog.parents(node)[0]
1221 if stat:
1221 if stat:
1222 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1222 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1223 match=matchfn, stat=True)
1223 match=matchfn, stat=True)
1224 if diff:
1224 if diff:
1225 if stat:
1225 if stat:
1226 self.ui.write("\n")
1226 self.ui.write("\n")
1227 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1227 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1228 match=matchfn, stat=False)
1228 match=matchfn, stat=False)
1229 self.ui.write("\n")
1229 self.ui.write("\n")
1230
1230
1231 def _meaningful_parentrevs(self, ctx):
1231 def _meaningful_parentrevs(self, ctx):
1232 """Return list of meaningful (or all if debug) parentrevs for rev.
1232 """Return list of meaningful (or all if debug) parentrevs for rev.
1233
1233
1234 For merges (two non-nullrev revisions) both parents are meaningful.
1234 For merges (two non-nullrev revisions) both parents are meaningful.
1235 Otherwise the first parent revision is considered meaningful if it
1235 Otherwise the first parent revision is considered meaningful if it
1236 is not the preceding revision.
1236 is not the preceding revision.
1237 """
1237 """
1238 parents = ctx.parents()
1238 parents = ctx.parents()
1239 if len(parents) > 1:
1239 if len(parents) > 1:
1240 return parents
1240 return parents
1241 if self.ui.debugflag:
1241 if self.ui.debugflag:
1242 return [parents[0], self.repo['null']]
1242 return [parents[0], self.repo['null']]
1243 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1243 if parents[0].rev() >= scmutil.intrev(self.repo, ctx.rev()) - 1:
1244 return []
1244 return []
1245 return parents
1245 return parents
1246
1246
1247 class jsonchangeset(changeset_printer):
1247 class jsonchangeset(changeset_printer):
1248 '''format changeset information.'''
1248 '''format changeset information.'''
1249
1249
1250 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1250 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1251 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1251 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1252 self.cache = {}
1252 self.cache = {}
1253 self._first = True
1253 self._first = True
1254
1254
1255 def close(self):
1255 def close(self):
1256 if not self._first:
1256 if not self._first:
1257 self.ui.write("\n]\n")
1257 self.ui.write("\n]\n")
1258 else:
1258 else:
1259 self.ui.write("[]\n")
1259 self.ui.write("[]\n")
1260
1260
1261 def _show(self, ctx, copies, matchfn, props):
1261 def _show(self, ctx, copies, matchfn, props):
1262 '''show a single changeset or file revision'''
1262 '''show a single changeset or file revision'''
1263 rev = ctx.rev()
1263 rev = ctx.rev()
1264 if rev is None:
1264 if rev is None:
1265 jrev = jnode = 'null'
1265 jrev = jnode = 'null'
1266 else:
1266 else:
1267 jrev = str(rev)
1267 jrev = str(rev)
1268 jnode = '"%s"' % hex(ctx.node())
1268 jnode = '"%s"' % hex(ctx.node())
1269 j = encoding.jsonescape
1269 j = encoding.jsonescape
1270
1270
1271 if self._first:
1271 if self._first:
1272 self.ui.write("[\n {")
1272 self.ui.write("[\n {")
1273 self._first = False
1273 self._first = False
1274 else:
1274 else:
1275 self.ui.write(",\n {")
1275 self.ui.write(",\n {")
1276
1276
1277 if self.ui.quiet:
1277 if self.ui.quiet:
1278 self.ui.write('\n "rev": %s' % jrev)
1278 self.ui.write('\n "rev": %s' % jrev)
1279 self.ui.write(',\n "node": %s' % jnode)
1279 self.ui.write(',\n "node": %s' % jnode)
1280 self.ui.write('\n }')
1280 self.ui.write('\n }')
1281 return
1281 return
1282
1282
1283 self.ui.write('\n "rev": %s' % jrev)
1283 self.ui.write('\n "rev": %s' % jrev)
1284 self.ui.write(',\n "node": %s' % jnode)
1284 self.ui.write(',\n "node": %s' % jnode)
1285 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1285 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1286 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1286 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1287 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1287 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1288 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1288 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1289 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1289 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1290
1290
1291 self.ui.write(',\n "bookmarks": [%s]' %
1291 self.ui.write(',\n "bookmarks": [%s]' %
1292 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1292 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1293 self.ui.write(',\n "tags": [%s]' %
1293 self.ui.write(',\n "tags": [%s]' %
1294 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1294 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1295 self.ui.write(',\n "parents": [%s]' %
1295 self.ui.write(',\n "parents": [%s]' %
1296 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1296 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1297
1297
1298 if self.ui.debugflag:
1298 if self.ui.debugflag:
1299 if rev is None:
1299 if rev is None:
1300 jmanifestnode = 'null'
1300 jmanifestnode = 'null'
1301 else:
1301 else:
1302 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1302 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1303 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1303 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1304
1304
1305 self.ui.write(',\n "extra": {%s}' %
1305 self.ui.write(',\n "extra": {%s}' %
1306 ", ".join('"%s": "%s"' % (j(k), j(v))
1306 ", ".join('"%s": "%s"' % (j(k), j(v))
1307 for k, v in ctx.extra().items()))
1307 for k, v in ctx.extra().items()))
1308
1308
1309 files = ctx.p1().status(ctx)
1309 files = ctx.p1().status(ctx)
1310 self.ui.write(',\n "modified": [%s]' %
1310 self.ui.write(',\n "modified": [%s]' %
1311 ", ".join('"%s"' % j(f) for f in files[0]))
1311 ", ".join('"%s"' % j(f) for f in files[0]))
1312 self.ui.write(',\n "added": [%s]' %
1312 self.ui.write(',\n "added": [%s]' %
1313 ", ".join('"%s"' % j(f) for f in files[1]))
1313 ", ".join('"%s"' % j(f) for f in files[1]))
1314 self.ui.write(',\n "removed": [%s]' %
1314 self.ui.write(',\n "removed": [%s]' %
1315 ", ".join('"%s"' % j(f) for f in files[2]))
1315 ", ".join('"%s"' % j(f) for f in files[2]))
1316
1316
1317 elif self.ui.verbose:
1317 elif self.ui.verbose:
1318 self.ui.write(',\n "files": [%s]' %
1318 self.ui.write(',\n "files": [%s]' %
1319 ", ".join('"%s"' % j(f) for f in ctx.files()))
1319 ", ".join('"%s"' % j(f) for f in ctx.files()))
1320
1320
1321 if copies:
1321 if copies:
1322 self.ui.write(',\n "copies": {%s}' %
1322 self.ui.write(',\n "copies": {%s}' %
1323 ", ".join('"%s": "%s"' % (j(k), j(v))
1323 ", ".join('"%s": "%s"' % (j(k), j(v))
1324 for k, v in copies))
1324 for k, v in copies))
1325
1325
1326 matchfn = self.matchfn
1326 matchfn = self.matchfn
1327 if matchfn:
1327 if matchfn:
1328 stat = self.diffopts.get('stat')
1328 stat = self.diffopts.get('stat')
1329 diff = self.diffopts.get('patch')
1329 diff = self.diffopts.get('patch')
1330 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1330 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1331 node, prev = ctx.node(), ctx.p1().node()
1331 node, prev = ctx.node(), ctx.p1().node()
1332 if stat:
1332 if stat:
1333 self.ui.pushbuffer()
1333 self.ui.pushbuffer()
1334 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1334 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1335 match=matchfn, stat=True)
1335 match=matchfn, stat=True)
1336 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1336 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1337 if diff:
1337 if diff:
1338 self.ui.pushbuffer()
1338 self.ui.pushbuffer()
1339 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1339 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1340 match=matchfn, stat=False)
1340 match=matchfn, stat=False)
1341 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1341 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1342
1342
1343 self.ui.write("\n }")
1343 self.ui.write("\n }")
1344
1344
1345 class changeset_templater(changeset_printer):
1345 class changeset_templater(changeset_printer):
1346 '''format changeset information.'''
1346 '''format changeset information.'''
1347
1347
1348 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1348 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1349 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1349 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1350 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1350 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1351 defaulttempl = {
1351 defaulttempl = {
1352 'parent': '{rev}:{node|formatnode} ',
1352 'parent': '{rev}:{node|formatnode} ',
1353 'manifest': '{rev}:{node|formatnode}',
1353 'manifest': '{rev}:{node|formatnode}',
1354 'file_copy': '{name} ({source})',
1354 'file_copy': '{name} ({source})',
1355 'extra': '{key}={value|stringescape}'
1355 'extra': '{key}={value|stringescape}'
1356 }
1356 }
1357 # filecopy is preserved for compatibility reasons
1357 # filecopy is preserved for compatibility reasons
1358 defaulttempl['filecopy'] = defaulttempl['file_copy']
1358 defaulttempl['filecopy'] = defaulttempl['file_copy']
1359 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1359 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1360 cache=defaulttempl)
1360 cache=defaulttempl)
1361 if tmpl:
1361 if tmpl:
1362 self.t.cache['changeset'] = tmpl
1362 self.t.cache['changeset'] = tmpl
1363
1363
1364 self.cache = {}
1364 self.cache = {}
1365
1365
1366 def _show(self, ctx, copies, matchfn, props):
1366 def _show(self, ctx, copies, matchfn, props):
1367 '''show a single changeset or file revision'''
1367 '''show a single changeset or file revision'''
1368
1368
1369 showlist = templatekw.showlist
1369 showlist = templatekw.showlist
1370
1370
1371 # showparents() behaviour depends on ui trace level which
1371 # showparents() behaviour depends on ui trace level which
1372 # causes unexpected behaviours at templating level and makes
1372 # causes unexpected behaviours at templating level and makes
1373 # it harder to extract it in a standalone function. Its
1373 # it harder to extract it in a standalone function. Its
1374 # behaviour cannot be changed so leave it here for now.
1374 # behaviour cannot be changed so leave it here for now.
1375 def showparents(**args):
1375 def showparents(**args):
1376 ctx = args['ctx']
1376 ctx = args['ctx']
1377 parents = [[('rev', p.rev()),
1377 parents = [[('rev', p.rev()),
1378 ('node', p.hex()),
1378 ('node', p.hex()),
1379 ('phase', p.phasestr())]
1379 ('phase', p.phasestr())]
1380 for p in self._meaningful_parentrevs(ctx)]
1380 for p in self._meaningful_parentrevs(ctx)]
1381 return showlist('parent', parents, **args)
1381 return showlist('parent', parents, **args)
1382
1382
1383 props = props.copy()
1383 props = props.copy()
1384 props.update(templatekw.keywords)
1384 props.update(templatekw.keywords)
1385 props['parents'] = showparents
1385 props['parents'] = showparents
1386 props['templ'] = self.t
1386 props['templ'] = self.t
1387 props['ctx'] = ctx
1387 props['ctx'] = ctx
1388 props['repo'] = self.repo
1388 props['repo'] = self.repo
1389 props['revcache'] = {'copies': copies}
1389 props['revcache'] = {'copies': copies}
1390 props['cache'] = self.cache
1390 props['cache'] = self.cache
1391
1391
1392 # find correct templates for current mode
1392 # find correct templates for current mode
1393
1393
1394 tmplmodes = [
1394 tmplmodes = [
1395 (True, None),
1395 (True, None),
1396 (self.ui.verbose, 'verbose'),
1396 (self.ui.verbose, 'verbose'),
1397 (self.ui.quiet, 'quiet'),
1397 (self.ui.quiet, 'quiet'),
1398 (self.ui.debugflag, 'debug'),
1398 (self.ui.debugflag, 'debug'),
1399 ]
1399 ]
1400
1400
1401 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1401 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1402 for mode, postfix in tmplmodes:
1402 for mode, postfix in tmplmodes:
1403 for type in types:
1403 for type in types:
1404 cur = postfix and ('%s_%s' % (type, postfix)) or type
1404 cur = postfix and ('%s_%s' % (type, postfix)) or type
1405 if mode and cur in self.t:
1405 if mode and cur in self.t:
1406 types[type] = cur
1406 types[type] = cur
1407
1407
1408 try:
1408 try:
1409
1409
1410 # write header
1410 # write header
1411 if types['header']:
1411 if types['header']:
1412 h = templater.stringify(self.t(types['header'], **props))
1412 h = templater.stringify(self.t(types['header'], **props))
1413 if self.buffered:
1413 if self.buffered:
1414 self.header[ctx.rev()] = h
1414 self.header[ctx.rev()] = h
1415 else:
1415 else:
1416 if self.lastheader != h:
1416 if self.lastheader != h:
1417 self.lastheader = h
1417 self.lastheader = h
1418 self.ui.write(h)
1418 self.ui.write(h)
1419
1419
1420 # write changeset metadata, then patch if requested
1420 # write changeset metadata, then patch if requested
1421 key = types['changeset']
1421 key = types['changeset']
1422 self.ui.write(templater.stringify(self.t(key, **props)))
1422 self.ui.write(templater.stringify(self.t(key, **props)))
1423 self.showpatch(ctx.node(), matchfn)
1423 self.showpatch(ctx.node(), matchfn)
1424
1424
1425 if types['footer']:
1425 if types['footer']:
1426 if not self.footer:
1426 if not self.footer:
1427 self.footer = templater.stringify(self.t(types['footer'],
1427 self.footer = templater.stringify(self.t(types['footer'],
1428 **props))
1428 **props))
1429
1429
1430 except KeyError, inst:
1430 except KeyError, inst:
1431 msg = _("%s: no key named '%s'")
1431 msg = _("%s: no key named '%s'")
1432 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1432 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1433 except SyntaxError, inst:
1433 except SyntaxError, inst:
1434 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1434 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1435
1435
1436 def gettemplate(ui, tmpl, style):
1436 def gettemplate(ui, tmpl, style):
1437 """
1437 """
1438 Find the template matching the given template spec or style.
1438 Find the template matching the given template spec or style.
1439 """
1439 """
1440
1440
1441 # ui settings
1441 # ui settings
1442 if not tmpl and not style: # template are stronger than style
1442 if not tmpl and not style: # template are stronger than style
1443 tmpl = ui.config('ui', 'logtemplate')
1443 tmpl = ui.config('ui', 'logtemplate')
1444 if tmpl:
1444 if tmpl:
1445 try:
1445 try:
1446 tmpl = templater.parsestring(tmpl)
1446 tmpl = templater.parsestring(tmpl)
1447 except SyntaxError:
1447 except SyntaxError:
1448 tmpl = templater.parsestring(tmpl, quoted=False)
1448 tmpl = templater.parsestring(tmpl, quoted=False)
1449 return tmpl, None
1449 return tmpl, None
1450 else:
1450 else:
1451 style = util.expandpath(ui.config('ui', 'style', ''))
1451 style = util.expandpath(ui.config('ui', 'style', ''))
1452
1452
1453 if not tmpl and style:
1453 if not tmpl and style:
1454 mapfile = style
1454 mapfile = style
1455 if not os.path.split(mapfile)[0]:
1455 if not os.path.split(mapfile)[0]:
1456 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1456 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1457 or templater.templatepath(mapfile))
1457 or templater.templatepath(mapfile))
1458 if mapname:
1458 if mapname:
1459 mapfile = mapname
1459 mapfile = mapname
1460 return None, mapfile
1460 return None, mapfile
1461
1461
1462 if not tmpl:
1462 if not tmpl:
1463 return None, None
1463 return None, None
1464
1464
1465 # looks like a literal template?
1465 # looks like a literal template?
1466 if '{' in tmpl:
1466 if '{' in tmpl:
1467 return tmpl, None
1467 return tmpl, None
1468
1468
1469 # perhaps a stock style?
1469 # perhaps a stock style?
1470 if not os.path.split(tmpl)[0]:
1470 if not os.path.split(tmpl)[0]:
1471 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1471 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1472 or templater.templatepath(tmpl))
1472 or templater.templatepath(tmpl))
1473 if mapname and os.path.isfile(mapname):
1473 if mapname and os.path.isfile(mapname):
1474 return None, mapname
1474 return None, mapname
1475
1475
1476 # perhaps it's a reference to [templates]
1476 # perhaps it's a reference to [templates]
1477 t = ui.config('templates', tmpl)
1477 t = ui.config('templates', tmpl)
1478 if t:
1478 if t:
1479 try:
1479 try:
1480 tmpl = templater.parsestring(t)
1480 tmpl = templater.parsestring(t)
1481 except SyntaxError:
1481 except SyntaxError:
1482 tmpl = templater.parsestring(t, quoted=False)
1482 tmpl = templater.parsestring(t, quoted=False)
1483 return tmpl, None
1483 return tmpl, None
1484
1484
1485 if tmpl == 'list':
1485 if tmpl == 'list':
1486 ui.write(_("available styles: %s\n") % templater.stylelist())
1486 ui.write(_("available styles: %s\n") % templater.stylelist())
1487 raise util.Abort(_("specify a template"))
1487 raise util.Abort(_("specify a template"))
1488
1488
1489 # perhaps it's a path to a map or a template
1489 # perhaps it's a path to a map or a template
1490 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1490 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1491 # is it a mapfile for a style?
1491 # is it a mapfile for a style?
1492 if os.path.basename(tmpl).startswith("map-"):
1492 if os.path.basename(tmpl).startswith("map-"):
1493 return None, os.path.realpath(tmpl)
1493 return None, os.path.realpath(tmpl)
1494 tmpl = open(tmpl).read()
1494 tmpl = open(tmpl).read()
1495 return tmpl, None
1495 return tmpl, None
1496
1496
1497 # constant string?
1497 # constant string?
1498 return tmpl, None
1498 return tmpl, None
1499
1499
1500 def show_changeset(ui, repo, opts, buffered=False):
1500 def show_changeset(ui, repo, opts, buffered=False):
1501 """show one changeset using template or regular display.
1501 """show one changeset using template or regular display.
1502
1502
1503 Display format will be the first non-empty hit of:
1503 Display format will be the first non-empty hit of:
1504 1. option 'template'
1504 1. option 'template'
1505 2. option 'style'
1505 2. option 'style'
1506 3. [ui] setting 'logtemplate'
1506 3. [ui] setting 'logtemplate'
1507 4. [ui] setting 'style'
1507 4. [ui] setting 'style'
1508 If all of these values are either the unset or the empty string,
1508 If all of these values are either the unset or the empty string,
1509 regular display via changeset_printer() is done.
1509 regular display via changeset_printer() is done.
1510 """
1510 """
1511 # options
1511 # options
1512 matchfn = None
1512 matchfn = None
1513 if opts.get('patch') or opts.get('stat'):
1513 if opts.get('patch') or opts.get('stat'):
1514 matchfn = scmutil.matchall(repo)
1514 matchfn = scmutil.matchall(repo)
1515
1515
1516 if opts.get('template') == 'json':
1516 if opts.get('template') == 'json':
1517 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1517 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1518
1518
1519 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1519 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1520
1520
1521 if not tmpl and not mapfile:
1521 if not tmpl and not mapfile:
1522 return changeset_printer(ui, repo, matchfn, opts, buffered)
1522 return changeset_printer(ui, repo, matchfn, opts, buffered)
1523
1523
1524 try:
1524 try:
1525 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1525 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1526 buffered)
1526 buffered)
1527 except SyntaxError, inst:
1527 except SyntaxError, inst:
1528 raise util.Abort(inst.args[0])
1528 raise util.Abort(inst.args[0])
1529 return t
1529 return t
1530
1530
1531 def showmarker(ui, marker):
1531 def showmarker(ui, marker):
1532 """utility function to display obsolescence marker in a readable way
1532 """utility function to display obsolescence marker in a readable way
1533
1533
1534 To be used by debug function."""
1534 To be used by debug function."""
1535 ui.write(hex(marker.precnode()))
1535 ui.write(hex(marker.precnode()))
1536 for repl in marker.succnodes():
1536 for repl in marker.succnodes():
1537 ui.write(' ')
1537 ui.write(' ')
1538 ui.write(hex(repl))
1538 ui.write(hex(repl))
1539 ui.write(' %X ' % marker.flags())
1539 ui.write(' %X ' % marker.flags())
1540 parents = marker.parentnodes()
1540 parents = marker.parentnodes()
1541 if parents is not None:
1541 if parents is not None:
1542 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1542 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1543 ui.write('(%s) ' % util.datestr(marker.date()))
1543 ui.write('(%s) ' % util.datestr(marker.date()))
1544 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1544 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1545 sorted(marker.metadata().items())
1545 sorted(marker.metadata().items())
1546 if t[0] != 'date')))
1546 if t[0] != 'date')))
1547 ui.write('\n')
1547 ui.write('\n')
1548
1548
1549 def finddate(ui, repo, date):
1549 def finddate(ui, repo, date):
1550 """Find the tipmost changeset that matches the given date spec"""
1550 """Find the tipmost changeset that matches the given date spec"""
1551
1551
1552 df = util.matchdate(date)
1552 df = util.matchdate(date)
1553 m = scmutil.matchall(repo)
1553 m = scmutil.matchall(repo)
1554 results = {}
1554 results = {}
1555
1555
1556 def prep(ctx, fns):
1556 def prep(ctx, fns):
1557 d = ctx.date()
1557 d = ctx.date()
1558 if df(d[0]):
1558 if df(d[0]):
1559 results[ctx.rev()] = d
1559 results[ctx.rev()] = d
1560
1560
1561 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1561 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1562 rev = ctx.rev()
1562 rev = ctx.rev()
1563 if rev in results:
1563 if rev in results:
1564 ui.status(_("found revision %s from %s\n") %
1564 ui.status(_("found revision %s from %s\n") %
1565 (rev, util.datestr(results[rev])))
1565 (rev, util.datestr(results[rev])))
1566 return str(rev)
1566 return str(rev)
1567
1567
1568 raise util.Abort(_("revision matching date not found"))
1568 raise util.Abort(_("revision matching date not found"))
1569
1569
1570 def increasingwindows(windowsize=8, sizelimit=512):
1570 def increasingwindows(windowsize=8, sizelimit=512):
1571 while True:
1571 while True:
1572 yield windowsize
1572 yield windowsize
1573 if windowsize < sizelimit:
1573 if windowsize < sizelimit:
1574 windowsize *= 2
1574 windowsize *= 2
1575
1575
1576 class FileWalkError(Exception):
1576 class FileWalkError(Exception):
1577 pass
1577 pass
1578
1578
1579 def walkfilerevs(repo, match, follow, revs, fncache):
1579 def walkfilerevs(repo, match, follow, revs, fncache):
1580 '''Walks the file history for the matched files.
1580 '''Walks the file history for the matched files.
1581
1581
1582 Returns the changeset revs that are involved in the file history.
1582 Returns the changeset revs that are involved in the file history.
1583
1583
1584 Throws FileWalkError if the file history can't be walked using
1584 Throws FileWalkError if the file history can't be walked using
1585 filelogs alone.
1585 filelogs alone.
1586 '''
1586 '''
1587 wanted = set()
1587 wanted = set()
1588 copies = []
1588 copies = []
1589 minrev, maxrev = min(revs), max(revs)
1589 minrev, maxrev = min(revs), max(revs)
1590 def filerevgen(filelog, last):
1590 def filerevgen(filelog, last):
1591 """
1591 """
1592 Only files, no patterns. Check the history of each file.
1592 Only files, no patterns. Check the history of each file.
1593
1593
1594 Examines filelog entries within minrev, maxrev linkrev range
1594 Examines filelog entries within minrev, maxrev linkrev range
1595 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1595 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1596 tuples in backwards order
1596 tuples in backwards order
1597 """
1597 """
1598 cl_count = len(repo)
1598 cl_count = len(repo)
1599 revs = []
1599 revs = []
1600 for j in xrange(0, last + 1):
1600 for j in xrange(0, last + 1):
1601 linkrev = filelog.linkrev(j)
1601 linkrev = filelog.linkrev(j)
1602 if linkrev < minrev:
1602 if linkrev < minrev:
1603 continue
1603 continue
1604 # only yield rev for which we have the changelog, it can
1604 # only yield rev for which we have the changelog, it can
1605 # happen while doing "hg log" during a pull or commit
1605 # happen while doing "hg log" during a pull or commit
1606 if linkrev >= cl_count:
1606 if linkrev >= cl_count:
1607 break
1607 break
1608
1608
1609 parentlinkrevs = []
1609 parentlinkrevs = []
1610 for p in filelog.parentrevs(j):
1610 for p in filelog.parentrevs(j):
1611 if p != nullrev:
1611 if p != nullrev:
1612 parentlinkrevs.append(filelog.linkrev(p))
1612 parentlinkrevs.append(filelog.linkrev(p))
1613 n = filelog.node(j)
1613 n = filelog.node(j)
1614 revs.append((linkrev, parentlinkrevs,
1614 revs.append((linkrev, parentlinkrevs,
1615 follow and filelog.renamed(n)))
1615 follow and filelog.renamed(n)))
1616
1616
1617 return reversed(revs)
1617 return reversed(revs)
1618 def iterfiles():
1618 def iterfiles():
1619 pctx = repo['.']
1619 pctx = repo['.']
1620 for filename in match.files():
1620 for filename in match.files():
1621 if follow:
1621 if follow:
1622 if filename not in pctx:
1622 if filename not in pctx:
1623 raise util.Abort(_('cannot follow file not in parent '
1623 raise util.Abort(_('cannot follow file not in parent '
1624 'revision: "%s"') % filename)
1624 'revision: "%s"') % filename)
1625 yield filename, pctx[filename].filenode()
1625 yield filename, pctx[filename].filenode()
1626 else:
1626 else:
1627 yield filename, None
1627 yield filename, None
1628 for filename_node in copies:
1628 for filename_node in copies:
1629 yield filename_node
1629 yield filename_node
1630
1630
1631 for file_, node in iterfiles():
1631 for file_, node in iterfiles():
1632 filelog = repo.file(file_)
1632 filelog = repo.file(file_)
1633 if not len(filelog):
1633 if not len(filelog):
1634 if node is None:
1634 if node is None:
1635 # A zero count may be a directory or deleted file, so
1635 # A zero count may be a directory or deleted file, so
1636 # try to find matching entries on the slow path.
1636 # try to find matching entries on the slow path.
1637 if follow:
1637 if follow:
1638 raise util.Abort(
1638 raise util.Abort(
1639 _('cannot follow nonexistent file: "%s"') % file_)
1639 _('cannot follow nonexistent file: "%s"') % file_)
1640 raise FileWalkError("Cannot walk via filelog")
1640 raise FileWalkError("Cannot walk via filelog")
1641 else:
1641 else:
1642 continue
1642 continue
1643
1643
1644 if node is None:
1644 if node is None:
1645 last = len(filelog) - 1
1645 last = len(filelog) - 1
1646 else:
1646 else:
1647 last = filelog.rev(node)
1647 last = filelog.rev(node)
1648
1648
1649 # keep track of all ancestors of the file
1649 # keep track of all ancestors of the file
1650 ancestors = set([filelog.linkrev(last)])
1650 ancestors = set([filelog.linkrev(last)])
1651
1651
1652 # iterate from latest to oldest revision
1652 # iterate from latest to oldest revision
1653 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1653 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1654 if not follow:
1654 if not follow:
1655 if rev > maxrev:
1655 if rev > maxrev:
1656 continue
1656 continue
1657 else:
1657 else:
1658 # Note that last might not be the first interesting
1658 # Note that last might not be the first interesting
1659 # rev to us:
1659 # rev to us:
1660 # if the file has been changed after maxrev, we'll
1660 # if the file has been changed after maxrev, we'll
1661 # have linkrev(last) > maxrev, and we still need
1661 # have linkrev(last) > maxrev, and we still need
1662 # to explore the file graph
1662 # to explore the file graph
1663 if rev not in ancestors:
1663 if rev not in ancestors:
1664 continue
1664 continue
1665 # XXX insert 1327 fix here
1665 # XXX insert 1327 fix here
1666 if flparentlinkrevs:
1666 if flparentlinkrevs:
1667 ancestors.update(flparentlinkrevs)
1667 ancestors.update(flparentlinkrevs)
1668
1668
1669 fncache.setdefault(rev, []).append(file_)
1669 fncache.setdefault(rev, []).append(file_)
1670 wanted.add(rev)
1670 wanted.add(rev)
1671 if copied:
1671 if copied:
1672 copies.append(copied)
1672 copies.append(copied)
1673
1673
1674 return wanted
1674 return wanted
1675
1675
1676 class _followfilter(object):
1676 class _followfilter(object):
1677 def __init__(self, repo, onlyfirst=False):
1677 def __init__(self, repo, onlyfirst=False):
1678 self.repo = repo
1678 self.repo = repo
1679 self.startrev = nullrev
1679 self.startrev = nullrev
1680 self.roots = set()
1680 self.roots = set()
1681 self.onlyfirst = onlyfirst
1681 self.onlyfirst = onlyfirst
1682
1682
1683 def match(self, rev):
1683 def match(self, rev):
1684 def realparents(rev):
1684 def realparents(rev):
1685 if self.onlyfirst:
1685 if self.onlyfirst:
1686 return self.repo.changelog.parentrevs(rev)[0:1]
1686 return self.repo.changelog.parentrevs(rev)[0:1]
1687 else:
1687 else:
1688 return filter(lambda x: x != nullrev,
1688 return filter(lambda x: x != nullrev,
1689 self.repo.changelog.parentrevs(rev))
1689 self.repo.changelog.parentrevs(rev))
1690
1690
1691 if self.startrev == nullrev:
1691 if self.startrev == nullrev:
1692 self.startrev = rev
1692 self.startrev = rev
1693 return True
1693 return True
1694
1694
1695 if rev > self.startrev:
1695 if rev > self.startrev:
1696 # forward: all descendants
1696 # forward: all descendants
1697 if not self.roots:
1697 if not self.roots:
1698 self.roots.add(self.startrev)
1698 self.roots.add(self.startrev)
1699 for parent in realparents(rev):
1699 for parent in realparents(rev):
1700 if parent in self.roots:
1700 if parent in self.roots:
1701 self.roots.add(rev)
1701 self.roots.add(rev)
1702 return True
1702 return True
1703 else:
1703 else:
1704 # backwards: all parents
1704 # backwards: all parents
1705 if not self.roots:
1705 if not self.roots:
1706 self.roots.update(realparents(self.startrev))
1706 self.roots.update(realparents(self.startrev))
1707 if rev in self.roots:
1707 if rev in self.roots:
1708 self.roots.remove(rev)
1708 self.roots.remove(rev)
1709 self.roots.update(realparents(rev))
1709 self.roots.update(realparents(rev))
1710 return True
1710 return True
1711
1711
1712 return False
1712 return False
1713
1713
1714 def walkchangerevs(repo, match, opts, prepare):
1714 def walkchangerevs(repo, match, opts, prepare):
1715 '''Iterate over files and the revs in which they changed.
1715 '''Iterate over files and the revs in which they changed.
1716
1716
1717 Callers most commonly need to iterate backwards over the history
1717 Callers most commonly need to iterate backwards over the history
1718 in which they are interested. Doing so has awful (quadratic-looking)
1718 in which they are interested. Doing so has awful (quadratic-looking)
1719 performance, so we use iterators in a "windowed" way.
1719 performance, so we use iterators in a "windowed" way.
1720
1720
1721 We walk a window of revisions in the desired order. Within the
1721 We walk a window of revisions in the desired order. Within the
1722 window, we first walk forwards to gather data, then in the desired
1722 window, we first walk forwards to gather data, then in the desired
1723 order (usually backwards) to display it.
1723 order (usually backwards) to display it.
1724
1724
1725 This function returns an iterator yielding contexts. Before
1725 This function returns an iterator yielding contexts. Before
1726 yielding each context, the iterator will first call the prepare
1726 yielding each context, the iterator will first call the prepare
1727 function on each context in the window in forward order.'''
1727 function on each context in the window in forward order.'''
1728
1728
1729 follow = opts.get('follow') or opts.get('follow_first')
1729 follow = opts.get('follow') or opts.get('follow_first')
1730 revs = _logrevs(repo, opts)
1730 revs = _logrevs(repo, opts)
1731 if not revs:
1731 if not revs:
1732 return []
1732 return []
1733 wanted = set()
1733 wanted = set()
1734 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1734 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1735 fncache = {}
1735 fncache = {}
1736 change = repo.changectx
1736 change = repo.changectx
1737
1737
1738 # First step is to fill wanted, the set of revisions that we want to yield.
1738 # First step is to fill wanted, the set of revisions that we want to yield.
1739 # When it does not induce extra cost, we also fill fncache for revisions in
1739 # When it does not induce extra cost, we also fill fncache for revisions in
1740 # wanted: a cache of filenames that were changed (ctx.files()) and that
1740 # wanted: a cache of filenames that were changed (ctx.files()) and that
1741 # match the file filtering conditions.
1741 # match the file filtering conditions.
1742
1742
1743 if match.always():
1743 if match.always():
1744 # No files, no patterns. Display all revs.
1744 # No files, no patterns. Display all revs.
1745 wanted = revs
1745 wanted = revs
1746
1746
1747 if not slowpath and match.files():
1747 if not slowpath and match.files():
1748 # We only have to read through the filelog to find wanted revisions
1748 # We only have to read through the filelog to find wanted revisions
1749
1749
1750 try:
1750 try:
1751 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1751 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1752 except FileWalkError:
1752 except FileWalkError:
1753 slowpath = True
1753 slowpath = True
1754
1754
1755 # We decided to fall back to the slowpath because at least one
1755 # We decided to fall back to the slowpath because at least one
1756 # of the paths was not a file. Check to see if at least one of them
1756 # of the paths was not a file. Check to see if at least one of them
1757 # existed in history, otherwise simply return
1757 # existed in history, otherwise simply return
1758 for path in match.files():
1758 for path in match.files():
1759 if path == '.' or path in repo.store:
1759 if path == '.' or path in repo.store:
1760 break
1760 break
1761 else:
1761 else:
1762 return []
1762 return []
1763
1763
1764 if slowpath:
1764 if slowpath:
1765 # We have to read the changelog to match filenames against
1765 # We have to read the changelog to match filenames against
1766 # changed files
1766 # changed files
1767
1767
1768 if follow:
1768 if follow:
1769 raise util.Abort(_('can only follow copies/renames for explicit '
1769 raise util.Abort(_('can only follow copies/renames for explicit '
1770 'filenames'))
1770 'filenames'))
1771
1771
1772 # The slow path checks files modified in every changeset.
1772 # The slow path checks files modified in every changeset.
1773 # This is really slow on large repos, so compute the set lazily.
1773 # This is really slow on large repos, so compute the set lazily.
1774 class lazywantedset(object):
1774 class lazywantedset(object):
1775 def __init__(self):
1775 def __init__(self):
1776 self.set = set()
1776 self.set = set()
1777 self.revs = set(revs)
1777 self.revs = set(revs)
1778
1778
1779 # No need to worry about locality here because it will be accessed
1779 # No need to worry about locality here because it will be accessed
1780 # in the same order as the increasing window below.
1780 # in the same order as the increasing window below.
1781 def __contains__(self, value):
1781 def __contains__(self, value):
1782 if value in self.set:
1782 if value in self.set:
1783 return True
1783 return True
1784 elif not value in self.revs:
1784 elif not value in self.revs:
1785 return False
1785 return False
1786 else:
1786 else:
1787 self.revs.discard(value)
1787 self.revs.discard(value)
1788 ctx = change(value)
1788 ctx = change(value)
1789 matches = filter(match, ctx.files())
1789 matches = filter(match, ctx.files())
1790 if matches:
1790 if matches:
1791 fncache[value] = matches
1791 fncache[value] = matches
1792 self.set.add(value)
1792 self.set.add(value)
1793 return True
1793 return True
1794 return False
1794 return False
1795
1795
1796 def discard(self, value):
1796 def discard(self, value):
1797 self.revs.discard(value)
1797 self.revs.discard(value)
1798 self.set.discard(value)
1798 self.set.discard(value)
1799
1799
1800 wanted = lazywantedset()
1800 wanted = lazywantedset()
1801
1801
1802 # it might be worthwhile to do this in the iterator if the rev range
1802 # it might be worthwhile to do this in the iterator if the rev range
1803 # is descending and the prune args are all within that range
1803 # is descending and the prune args are all within that range
1804 for rev in opts.get('prune', ()):
1804 for rev in opts.get('prune', ()):
1805 rev = repo[rev].rev()
1805 rev = repo[rev].rev()
1806 ff = _followfilter(repo)
1806 ff = _followfilter(repo)
1807 stop = min(revs[0], revs[-1])
1807 stop = min(revs[0], revs[-1])
1808 for x in xrange(rev, stop - 1, -1):
1808 for x in xrange(rev, stop - 1, -1):
1809 if ff.match(x):
1809 if ff.match(x):
1810 wanted = wanted - [x]
1810 wanted = wanted - [x]
1811
1811
1812 # Now that wanted is correctly initialized, we can iterate over the
1812 # Now that wanted is correctly initialized, we can iterate over the
1813 # revision range, yielding only revisions in wanted.
1813 # revision range, yielding only revisions in wanted.
1814 def iterate():
1814 def iterate():
1815 if follow and not match.files():
1815 if follow and not match.files():
1816 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1816 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1817 def want(rev):
1817 def want(rev):
1818 return ff.match(rev) and rev in wanted
1818 return ff.match(rev) and rev in wanted
1819 else:
1819 else:
1820 def want(rev):
1820 def want(rev):
1821 return rev in wanted
1821 return rev in wanted
1822
1822
1823 it = iter(revs)
1823 it = iter(revs)
1824 stopiteration = False
1824 stopiteration = False
1825 for windowsize in increasingwindows():
1825 for windowsize in increasingwindows():
1826 nrevs = []
1826 nrevs = []
1827 for i in xrange(windowsize):
1827 for i in xrange(windowsize):
1828 try:
1828 try:
1829 rev = it.next()
1829 rev = it.next()
1830 if want(rev):
1830 if want(rev):
1831 nrevs.append(rev)
1831 nrevs.append(rev)
1832 except (StopIteration):
1832 except (StopIteration):
1833 stopiteration = True
1833 stopiteration = True
1834 break
1834 break
1835 for rev in sorted(nrevs):
1835 for rev in sorted(nrevs):
1836 fns = fncache.get(rev)
1836 fns = fncache.get(rev)
1837 ctx = change(rev)
1837 ctx = change(rev)
1838 if not fns:
1838 if not fns:
1839 def fns_generator():
1839 def fns_generator():
1840 for f in ctx.files():
1840 for f in ctx.files():
1841 if match(f):
1841 if match(f):
1842 yield f
1842 yield f
1843 fns = fns_generator()
1843 fns = fns_generator()
1844 prepare(ctx, fns)
1844 prepare(ctx, fns)
1845 for rev in nrevs:
1845 for rev in nrevs:
1846 yield change(rev)
1846 yield change(rev)
1847
1847
1848 if stopiteration:
1848 if stopiteration:
1849 break
1849 break
1850
1850
1851 return iterate()
1851 return iterate()
1852
1852
1853 def _makefollowlogfilematcher(repo, files, followfirst):
1853 def _makefollowlogfilematcher(repo, files, followfirst):
1854 # When displaying a revision with --patch --follow FILE, we have
1854 # When displaying a revision with --patch --follow FILE, we have
1855 # to know which file of the revision must be diffed. With
1855 # to know which file of the revision must be diffed. With
1856 # --follow, we want the names of the ancestors of FILE in the
1856 # --follow, we want the names of the ancestors of FILE in the
1857 # revision, stored in "fcache". "fcache" is populated by
1857 # revision, stored in "fcache". "fcache" is populated by
1858 # reproducing the graph traversal already done by --follow revset
1858 # reproducing the graph traversal already done by --follow revset
1859 # and relating linkrevs to file names (which is not "correct" but
1859 # and relating linkrevs to file names (which is not "correct" but
1860 # good enough).
1860 # good enough).
1861 fcache = {}
1861 fcache = {}
1862 fcacheready = [False]
1862 fcacheready = [False]
1863 pctx = repo['.']
1863 pctx = repo['.']
1864
1864
1865 def populate():
1865 def populate():
1866 for fn in files:
1866 for fn in files:
1867 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1867 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1868 for c in i:
1868 for c in i:
1869 fcache.setdefault(c.linkrev(), set()).add(c.path())
1869 fcache.setdefault(c.linkrev(), set()).add(c.path())
1870
1870
1871 def filematcher(rev):
1871 def filematcher(rev):
1872 if not fcacheready[0]:
1872 if not fcacheready[0]:
1873 # Lazy initialization
1873 # Lazy initialization
1874 fcacheready[0] = True
1874 fcacheready[0] = True
1875 populate()
1875 populate()
1876 return scmutil.matchfiles(repo, fcache.get(rev, []))
1876 return scmutil.matchfiles(repo, fcache.get(rev, []))
1877
1877
1878 return filematcher
1878 return filematcher
1879
1879
1880 def _makenofollowlogfilematcher(repo, pats, opts):
1880 def _makenofollowlogfilematcher(repo, pats, opts):
1881 '''hook for extensions to override the filematcher for non-follow cases'''
1881 '''hook for extensions to override the filematcher for non-follow cases'''
1882 return None
1882 return None
1883
1883
1884 def _makelogrevset(repo, pats, opts, revs):
1884 def _makelogrevset(repo, pats, opts, revs):
1885 """Return (expr, filematcher) where expr is a revset string built
1885 """Return (expr, filematcher) where expr is a revset string built
1886 from log options and file patterns or None. If --stat or --patch
1886 from log options and file patterns or None. If --stat or --patch
1887 are not passed filematcher is None. Otherwise it is a callable
1887 are not passed filematcher is None. Otherwise it is a callable
1888 taking a revision number and returning a match objects filtering
1888 taking a revision number and returning a match objects filtering
1889 the files to be detailed when displaying the revision.
1889 the files to be detailed when displaying the revision.
1890 """
1890 """
1891 opt2revset = {
1891 opt2revset = {
1892 'no_merges': ('not merge()', None),
1892 'no_merges': ('not merge()', None),
1893 'only_merges': ('merge()', None),
1893 'only_merges': ('merge()', None),
1894 '_ancestors': ('ancestors(%(val)s)', None),
1894 '_ancestors': ('ancestors(%(val)s)', None),
1895 '_fancestors': ('_firstancestors(%(val)s)', None),
1895 '_fancestors': ('_firstancestors(%(val)s)', None),
1896 '_descendants': ('descendants(%(val)s)', None),
1896 '_descendants': ('descendants(%(val)s)', None),
1897 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1897 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1898 '_matchfiles': ('_matchfiles(%(val)s)', None),
1898 '_matchfiles': ('_matchfiles(%(val)s)', None),
1899 'date': ('date(%(val)r)', None),
1899 'date': ('date(%(val)r)', None),
1900 'branch': ('branch(%(val)r)', ' or '),
1900 'branch': ('branch(%(val)r)', ' or '),
1901 '_patslog': ('filelog(%(val)r)', ' or '),
1901 '_patslog': ('filelog(%(val)r)', ' or '),
1902 '_patsfollow': ('follow(%(val)r)', ' or '),
1902 '_patsfollow': ('follow(%(val)r)', ' or '),
1903 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1903 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1904 'keyword': ('keyword(%(val)r)', ' or '),
1904 'keyword': ('keyword(%(val)r)', ' or '),
1905 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1905 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1906 'user': ('user(%(val)r)', ' or '),
1906 'user': ('user(%(val)r)', ' or '),
1907 }
1907 }
1908
1908
1909 opts = dict(opts)
1909 opts = dict(opts)
1910 # follow or not follow?
1910 # follow or not follow?
1911 follow = opts.get('follow') or opts.get('follow_first')
1911 follow = opts.get('follow') or opts.get('follow_first')
1912 if opts.get('follow_first'):
1912 if opts.get('follow_first'):
1913 followfirst = 1
1913 followfirst = 1
1914 else:
1914 else:
1915 followfirst = 0
1915 followfirst = 0
1916 # --follow with FILE behaviour depends on revs...
1916 # --follow with FILE behaviour depends on revs...
1917 it = iter(revs)
1917 it = iter(revs)
1918 startrev = it.next()
1918 startrev = it.next()
1919 try:
1919 try:
1920 followdescendants = startrev < it.next()
1920 followdescendants = startrev < it.next()
1921 except (StopIteration):
1921 except (StopIteration):
1922 followdescendants = False
1922 followdescendants = False
1923
1923
1924 # branch and only_branch are really aliases and must be handled at
1924 # branch and only_branch are really aliases and must be handled at
1925 # the same time
1925 # the same time
1926 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1926 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1927 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1927 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1928 # pats/include/exclude are passed to match.match() directly in
1928 # pats/include/exclude are passed to match.match() directly in
1929 # _matchfiles() revset but walkchangerevs() builds its matcher with
1929 # _matchfiles() revset but walkchangerevs() builds its matcher with
1930 # scmutil.match(). The difference is input pats are globbed on
1930 # scmutil.match(). The difference is input pats are globbed on
1931 # platforms without shell expansion (windows).
1931 # platforms without shell expansion (windows).
1932 wctx = repo[None]
1932 wctx = repo[None]
1933 match, pats = scmutil.matchandpats(wctx, pats, opts)
1933 match, pats = scmutil.matchandpats(wctx, pats, opts)
1934 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1934 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1935 if not slowpath:
1935 if not slowpath:
1936 for f in match.files():
1936 for f in match.files():
1937 if follow and f not in wctx:
1937 if follow and f not in wctx:
1938 # If the file exists, it may be a directory, so let it
1938 # If the file exists, it may be a directory, so let it
1939 # take the slow path.
1939 # take the slow path.
1940 if os.path.exists(repo.wjoin(f)):
1940 if os.path.exists(repo.wjoin(f)):
1941 slowpath = True
1941 slowpath = True
1942 continue
1942 continue
1943 else:
1943 else:
1944 raise util.Abort(_('cannot follow file not in parent '
1944 raise util.Abort(_('cannot follow file not in parent '
1945 'revision: "%s"') % f)
1945 'revision: "%s"') % f)
1946 filelog = repo.file(f)
1946 filelog = repo.file(f)
1947 if not filelog:
1947 if not filelog:
1948 # A zero count may be a directory or deleted file, so
1948 # A zero count may be a directory or deleted file, so
1949 # try to find matching entries on the slow path.
1949 # try to find matching entries on the slow path.
1950 if follow:
1950 if follow:
1951 raise util.Abort(
1951 raise util.Abort(
1952 _('cannot follow nonexistent file: "%s"') % f)
1952 _('cannot follow nonexistent file: "%s"') % f)
1953 slowpath = True
1953 slowpath = True
1954
1954
1955 # We decided to fall back to the slowpath because at least one
1955 # We decided to fall back to the slowpath because at least one
1956 # of the paths was not a file. Check to see if at least one of them
1956 # of the paths was not a file. Check to see if at least one of them
1957 # existed in history - in that case, we'll continue down the
1957 # existed in history - in that case, we'll continue down the
1958 # slowpath; otherwise, we can turn off the slowpath
1958 # slowpath; otherwise, we can turn off the slowpath
1959 if slowpath:
1959 if slowpath:
1960 for path in match.files():
1960 for path in match.files():
1961 if path == '.' or path in repo.store:
1961 if path == '.' or path in repo.store:
1962 break
1962 break
1963 else:
1963 else:
1964 slowpath = False
1964 slowpath = False
1965
1965
1966 fpats = ('_patsfollow', '_patsfollowfirst')
1966 fpats = ('_patsfollow', '_patsfollowfirst')
1967 fnopats = (('_ancestors', '_fancestors'),
1967 fnopats = (('_ancestors', '_fancestors'),
1968 ('_descendants', '_fdescendants'))
1968 ('_descendants', '_fdescendants'))
1969 if slowpath:
1969 if slowpath:
1970 # See walkchangerevs() slow path.
1970 # See walkchangerevs() slow path.
1971 #
1971 #
1972 # pats/include/exclude cannot be represented as separate
1972 # pats/include/exclude cannot be represented as separate
1973 # revset expressions as their filtering logic applies at file
1973 # revset expressions as their filtering logic applies at file
1974 # level. For instance "-I a -X a" matches a revision touching
1974 # level. For instance "-I a -X a" matches a revision touching
1975 # "a" and "b" while "file(a) and not file(b)" does
1975 # "a" and "b" while "file(a) and not file(b)" does
1976 # not. Besides, filesets are evaluated against the working
1976 # not. Besides, filesets are evaluated against the working
1977 # directory.
1977 # directory.
1978 matchargs = ['r:', 'd:relpath']
1978 matchargs = ['r:', 'd:relpath']
1979 for p in pats:
1979 for p in pats:
1980 matchargs.append('p:' + p)
1980 matchargs.append('p:' + p)
1981 for p in opts.get('include', []):
1981 for p in opts.get('include', []):
1982 matchargs.append('i:' + p)
1982 matchargs.append('i:' + p)
1983 for p in opts.get('exclude', []):
1983 for p in opts.get('exclude', []):
1984 matchargs.append('x:' + p)
1984 matchargs.append('x:' + p)
1985 matchargs = ','.join(('%r' % p) for p in matchargs)
1985 matchargs = ','.join(('%r' % p) for p in matchargs)
1986 opts['_matchfiles'] = matchargs
1986 opts['_matchfiles'] = matchargs
1987 if follow:
1987 if follow:
1988 opts[fnopats[0][followfirst]] = '.'
1988 opts[fnopats[0][followfirst]] = '.'
1989 else:
1989 else:
1990 if follow:
1990 if follow:
1991 if pats:
1991 if pats:
1992 # follow() revset interprets its file argument as a
1992 # follow() revset interprets its file argument as a
1993 # manifest entry, so use match.files(), not pats.
1993 # manifest entry, so use match.files(), not pats.
1994 opts[fpats[followfirst]] = list(match.files())
1994 opts[fpats[followfirst]] = list(match.files())
1995 else:
1995 else:
1996 op = fnopats[followdescendants][followfirst]
1996 op = fnopats[followdescendants][followfirst]
1997 opts[op] = 'rev(%d)' % startrev
1997 opts[op] = 'rev(%d)' % startrev
1998 else:
1998 else:
1999 opts['_patslog'] = list(pats)
1999 opts['_patslog'] = list(pats)
2000
2000
2001 filematcher = None
2001 filematcher = None
2002 if opts.get('patch') or opts.get('stat'):
2002 if opts.get('patch') or opts.get('stat'):
2003 # When following files, track renames via a special matcher.
2003 # When following files, track renames via a special matcher.
2004 # If we're forced to take the slowpath it means we're following
2004 # If we're forced to take the slowpath it means we're following
2005 # at least one pattern/directory, so don't bother with rename tracking.
2005 # at least one pattern/directory, so don't bother with rename tracking.
2006 if follow and not match.always() and not slowpath:
2006 if follow and not match.always() and not slowpath:
2007 # _makefollowlogfilematcher expects its files argument to be
2007 # _makefollowlogfilematcher expects its files argument to be
2008 # relative to the repo root, so use match.files(), not pats.
2008 # relative to the repo root, so use match.files(), not pats.
2009 filematcher = _makefollowlogfilematcher(repo, match.files(),
2009 filematcher = _makefollowlogfilematcher(repo, match.files(),
2010 followfirst)
2010 followfirst)
2011 else:
2011 else:
2012 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2012 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2013 if filematcher is None:
2013 if filematcher is None:
2014 filematcher = lambda rev: match
2014 filematcher = lambda rev: match
2015
2015
2016 expr = []
2016 expr = []
2017 for op, val in sorted(opts.iteritems()):
2017 for op, val in sorted(opts.iteritems()):
2018 if not val:
2018 if not val:
2019 continue
2019 continue
2020 if op not in opt2revset:
2020 if op not in opt2revset:
2021 continue
2021 continue
2022 revop, andor = opt2revset[op]
2022 revop, andor = opt2revset[op]
2023 if '%(val)' not in revop:
2023 if '%(val)' not in revop:
2024 expr.append(revop)
2024 expr.append(revop)
2025 else:
2025 else:
2026 if not isinstance(val, list):
2026 if not isinstance(val, list):
2027 e = revop % {'val': val}
2027 e = revop % {'val': val}
2028 else:
2028 else:
2029 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2029 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2030 expr.append(e)
2030 expr.append(e)
2031
2031
2032 if expr:
2032 if expr:
2033 expr = '(' + ' and '.join(expr) + ')'
2033 expr = '(' + ' and '.join(expr) + ')'
2034 else:
2034 else:
2035 expr = None
2035 expr = None
2036 return expr, filematcher
2036 return expr, filematcher
2037
2037
2038 def _logrevs(repo, opts):
2038 def _logrevs(repo, opts):
2039 # Default --rev value depends on --follow but --follow behaviour
2039 # Default --rev value depends on --follow but --follow behaviour
2040 # depends on revisions resolved from --rev...
2040 # depends on revisions resolved from --rev...
2041 follow = opts.get('follow') or opts.get('follow_first')
2041 follow = opts.get('follow') or opts.get('follow_first')
2042 if opts.get('rev'):
2042 if opts.get('rev'):
2043 revs = scmutil.revrange(repo, opts['rev'])
2043 revs = scmutil.revrange(repo, opts['rev'])
2044 elif follow and repo.dirstate.p1() == nullid:
2044 elif follow and repo.dirstate.p1() == nullid:
2045 revs = revset.baseset()
2045 revs = revset.baseset()
2046 elif follow:
2046 elif follow:
2047 revs = repo.revs('reverse(:.)')
2047 revs = repo.revs('reverse(:.)')
2048 else:
2048 else:
2049 revs = revset.spanset(repo)
2049 revs = revset.spanset(repo)
2050 revs.reverse()
2050 revs.reverse()
2051 return revs
2051 return revs
2052
2052
2053 def getgraphlogrevs(repo, pats, opts):
2053 def getgraphlogrevs(repo, pats, opts):
2054 """Return (revs, expr, filematcher) where revs is an iterable of
2054 """Return (revs, expr, filematcher) where revs is an iterable of
2055 revision numbers, expr is a revset string built from log options
2055 revision numbers, expr is a revset string built from log options
2056 and file patterns or None, and used to filter 'revs'. If --stat or
2056 and file patterns or None, and used to filter 'revs'. If --stat or
2057 --patch are not passed filematcher is None. Otherwise it is a
2057 --patch are not passed filematcher is None. Otherwise it is a
2058 callable taking a revision number and returning a match objects
2058 callable taking a revision number and returning a match objects
2059 filtering the files to be detailed when displaying the revision.
2059 filtering the files to be detailed when displaying the revision.
2060 """
2060 """
2061 limit = loglimit(opts)
2061 limit = loglimit(opts)
2062 revs = _logrevs(repo, opts)
2062 revs = _logrevs(repo, opts)
2063 if not revs:
2063 if not revs:
2064 return revset.baseset(), None, None
2064 return revset.baseset(), None, None
2065 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2065 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2066 if opts.get('rev'):
2066 if opts.get('rev'):
2067 # User-specified revs might be unsorted, but don't sort before
2067 # User-specified revs might be unsorted, but don't sort before
2068 # _makelogrevset because it might depend on the order of revs
2068 # _makelogrevset because it might depend on the order of revs
2069 revs.sort(reverse=True)
2069 revs.sort(reverse=True)
2070 if expr:
2070 if expr:
2071 # Revset matchers often operate faster on revisions in changelog
2071 # Revset matchers often operate faster on revisions in changelog
2072 # order, because most filters deal with the changelog.
2072 # order, because most filters deal with the changelog.
2073 revs.reverse()
2073 revs.reverse()
2074 matcher = revset.match(repo.ui, expr)
2074 matcher = revset.match(repo.ui, expr)
2075 # Revset matches can reorder revisions. "A or B" typically returns
2075 # Revset matches can reorder revisions. "A or B" typically returns
2076 # returns the revision matching A then the revision matching B. Sort
2076 # returns the revision matching A then the revision matching B. Sort
2077 # again to fix that.
2077 # again to fix that.
2078 revs = matcher(repo, revs)
2078 revs = matcher(repo, revs)
2079 revs.sort(reverse=True)
2079 revs.sort(reverse=True)
2080 if limit is not None:
2080 if limit is not None:
2081 limitedrevs = []
2081 limitedrevs = []
2082 for idx, rev in enumerate(revs):
2082 for idx, rev in enumerate(revs):
2083 if idx >= limit:
2083 if idx >= limit:
2084 break
2084 break
2085 limitedrevs.append(rev)
2085 limitedrevs.append(rev)
2086 revs = revset.baseset(limitedrevs)
2086 revs = revset.baseset(limitedrevs)
2087
2087
2088 return revs, expr, filematcher
2088 return revs, expr, filematcher
2089
2089
2090 def getlogrevs(repo, pats, opts):
2090 def getlogrevs(repo, pats, opts):
2091 """Return (revs, expr, filematcher) where revs is an iterable of
2091 """Return (revs, expr, filematcher) where revs is an iterable of
2092 revision numbers, expr is a revset string built from log options
2092 revision numbers, expr is a revset string built from log options
2093 and file patterns or None, and used to filter 'revs'. If --stat or
2093 and file patterns or None, and used to filter 'revs'. If --stat or
2094 --patch are not passed filematcher is None. Otherwise it is a
2094 --patch are not passed filematcher is None. Otherwise it is a
2095 callable taking a revision number and returning a match objects
2095 callable taking a revision number and returning a match objects
2096 filtering the files to be detailed when displaying the revision.
2096 filtering the files to be detailed when displaying the revision.
2097 """
2097 """
2098 limit = loglimit(opts)
2098 limit = loglimit(opts)
2099 revs = _logrevs(repo, opts)
2099 revs = _logrevs(repo, opts)
2100 if not revs:
2100 if not revs:
2101 return revset.baseset([]), None, None
2101 return revset.baseset([]), None, None
2102 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2102 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2103 if expr:
2103 if expr:
2104 # Revset matchers often operate faster on revisions in changelog
2104 # Revset matchers often operate faster on revisions in changelog
2105 # order, because most filters deal with the changelog.
2105 # order, because most filters deal with the changelog.
2106 if not opts.get('rev'):
2106 if not opts.get('rev'):
2107 revs.reverse()
2107 revs.reverse()
2108 matcher = revset.match(repo.ui, expr)
2108 matcher = revset.match(repo.ui, expr)
2109 # Revset matches can reorder revisions. "A or B" typically returns
2109 # Revset matches can reorder revisions. "A or B" typically returns
2110 # returns the revision matching A then the revision matching B. Sort
2110 # returns the revision matching A then the revision matching B. Sort
2111 # again to fix that.
2111 # again to fix that.
2112 revs = matcher(repo, revs)
2112 revs = matcher(repo, revs)
2113 if not opts.get('rev'):
2113 if not opts.get('rev'):
2114 revs.sort(reverse=True)
2114 revs.sort(reverse=True)
2115 if limit is not None:
2115 if limit is not None:
2116 count = 0
2116 count = 0
2117 limitedrevs = []
2117 limitedrevs = []
2118 it = iter(revs)
2118 it = iter(revs)
2119 while count < limit:
2119 while count < limit:
2120 try:
2120 try:
2121 limitedrevs.append(it.next())
2121 limitedrevs.append(it.next())
2122 except (StopIteration):
2122 except (StopIteration):
2123 break
2123 break
2124 count += 1
2124 count += 1
2125 revs = revset.baseset(limitedrevs)
2125 revs = revset.baseset(limitedrevs)
2126
2126
2127 return revs, expr, filematcher
2127 return revs, expr, filematcher
2128
2128
2129 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2129 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2130 filematcher=None):
2130 filematcher=None):
2131 seen, state = [], graphmod.asciistate()
2131 seen, state = [], graphmod.asciistate()
2132 for rev, type, ctx, parents in dag:
2132 for rev, type, ctx, parents in dag:
2133 char = 'o'
2133 char = 'o'
2134 if ctx.node() in showparents:
2134 if ctx.node() in showparents:
2135 char = '@'
2135 char = '@'
2136 elif ctx.obsolete():
2136 elif ctx.obsolete():
2137 char = 'x'
2137 char = 'x'
2138 elif ctx.closesbranch():
2138 elif ctx.closesbranch():
2139 char = '_'
2139 char = '_'
2140 copies = None
2140 copies = None
2141 if getrenamed and ctx.rev():
2141 if getrenamed and ctx.rev():
2142 copies = []
2142 copies = []
2143 for fn in ctx.files():
2143 for fn in ctx.files():
2144 rename = getrenamed(fn, ctx.rev())
2144 rename = getrenamed(fn, ctx.rev())
2145 if rename:
2145 if rename:
2146 copies.append((fn, rename[0]))
2146 copies.append((fn, rename[0]))
2147 revmatchfn = None
2147 revmatchfn = None
2148 if filematcher is not None:
2148 if filematcher is not None:
2149 revmatchfn = filematcher(ctx.rev())
2149 revmatchfn = filematcher(ctx.rev())
2150 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2150 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2151 lines = displayer.hunk.pop(rev).split('\n')
2151 lines = displayer.hunk.pop(rev).split('\n')
2152 if not lines[-1]:
2152 if not lines[-1]:
2153 del lines[-1]
2153 del lines[-1]
2154 displayer.flush(rev)
2154 displayer.flush(rev)
2155 edges = edgefn(type, char, lines, seen, rev, parents)
2155 edges = edgefn(type, char, lines, seen, rev, parents)
2156 for type, char, lines, coldata in edges:
2156 for type, char, lines, coldata in edges:
2157 graphmod.ascii(ui, state, type, char, lines, coldata)
2157 graphmod.ascii(ui, state, type, char, lines, coldata)
2158 displayer.close()
2158 displayer.close()
2159
2159
2160 def graphlog(ui, repo, *pats, **opts):
2160 def graphlog(ui, repo, *pats, **opts):
2161 # Parameters are identical to log command ones
2161 # Parameters are identical to log command ones
2162 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2162 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2163 revdag = graphmod.dagwalker(repo, revs)
2163 revdag = graphmod.dagwalker(repo, revs)
2164
2164
2165 getrenamed = None
2165 getrenamed = None
2166 if opts.get('copies'):
2166 if opts.get('copies'):
2167 endrev = None
2167 endrev = None
2168 if opts.get('rev'):
2168 if opts.get('rev'):
2169 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2169 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2170 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2170 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2171 displayer = show_changeset(ui, repo, opts, buffered=True)
2171 displayer = show_changeset(ui, repo, opts, buffered=True)
2172 showparents = [ctx.node() for ctx in repo[None].parents()]
2172 showparents = [ctx.node() for ctx in repo[None].parents()]
2173 displaygraph(ui, revdag, displayer, showparents,
2173 displaygraph(ui, revdag, displayer, showparents,
2174 graphmod.asciiedges, getrenamed, filematcher)
2174 graphmod.asciiedges, getrenamed, filematcher)
2175
2175
2176 def checkunsupportedgraphflags(pats, opts):
2176 def checkunsupportedgraphflags(pats, opts):
2177 for op in ["newest_first"]:
2177 for op in ["newest_first"]:
2178 if op in opts and opts[op]:
2178 if op in opts and opts[op]:
2179 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2179 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2180 % op.replace("_", "-"))
2180 % op.replace("_", "-"))
2181
2181
2182 def graphrevs(repo, nodes, opts):
2182 def graphrevs(repo, nodes, opts):
2183 limit = loglimit(opts)
2183 limit = loglimit(opts)
2184 nodes.reverse()
2184 nodes.reverse()
2185 if limit is not None:
2185 if limit is not None:
2186 nodes = nodes[:limit]
2186 nodes = nodes[:limit]
2187 return graphmod.nodes(repo, nodes)
2187 return graphmod.nodes(repo, nodes)
2188
2188
2189 def add(ui, repo, match, prefix, explicitonly, **opts):
2189 def add(ui, repo, match, prefix, explicitonly, **opts):
2190 join = lambda f: os.path.join(prefix, f)
2190 join = lambda f: os.path.join(prefix, f)
2191 bad = []
2191 bad = []
2192 oldbad = match.bad
2192 oldbad = match.bad
2193 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2193 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2194 names = []
2194 names = []
2195 wctx = repo[None]
2195 wctx = repo[None]
2196 cca = None
2196 cca = None
2197 abort, warn = scmutil.checkportabilityalert(ui)
2197 abort, warn = scmutil.checkportabilityalert(ui)
2198 if abort or warn:
2198 if abort or warn:
2199 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2199 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2200 for f in wctx.walk(match):
2200 for f in wctx.walk(match):
2201 exact = match.exact(f)
2201 exact = match.exact(f)
2202 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2202 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2203 if cca:
2203 if cca:
2204 cca(f)
2204 cca(f)
2205 names.append(f)
2205 names.append(f)
2206 if ui.verbose or not exact:
2206 if ui.verbose or not exact:
2207 ui.status(_('adding %s\n') % match.rel(f))
2207 ui.status(_('adding %s\n') % match.rel(f))
2208
2208
2209 for subpath in sorted(wctx.substate):
2209 for subpath in sorted(wctx.substate):
2210 sub = wctx.sub(subpath)
2210 sub = wctx.sub(subpath)
2211 try:
2211 try:
2212 submatch = matchmod.narrowmatcher(subpath, match)
2212 submatch = matchmod.narrowmatcher(subpath, match)
2213 if opts.get('subrepos'):
2213 if opts.get('subrepos'):
2214 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2214 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2215 else:
2215 else:
2216 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2216 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2217 except error.LookupError:
2217 except error.LookupError:
2218 ui.status(_("skipping missing subrepository: %s\n")
2218 ui.status(_("skipping missing subrepository: %s\n")
2219 % join(subpath))
2219 % join(subpath))
2220
2220
2221 if not opts.get('dry_run'):
2221 if not opts.get('dry_run'):
2222 rejected = wctx.add(names, prefix)
2222 rejected = wctx.add(names, prefix)
2223 bad.extend(f for f in rejected if f in match.files())
2223 bad.extend(f for f in rejected if f in match.files())
2224 return bad
2224 return bad
2225
2225
2226 def forget(ui, repo, match, prefix, explicitonly):
2226 def forget(ui, repo, match, prefix, explicitonly):
2227 join = lambda f: os.path.join(prefix, f)
2227 join = lambda f: os.path.join(prefix, f)
2228 bad = []
2228 bad = []
2229 oldbad = match.bad
2229 oldbad = match.bad
2230 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2230 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2231 wctx = repo[None]
2231 wctx = repo[None]
2232 forgot = []
2232 forgot = []
2233 s = repo.status(match=match, clean=True)
2233 s = repo.status(match=match, clean=True)
2234 forget = sorted(s[0] + s[1] + s[3] + s[6])
2234 forget = sorted(s[0] + s[1] + s[3] + s[6])
2235 if explicitonly:
2235 if explicitonly:
2236 forget = [f for f in forget if match.exact(f)]
2236 forget = [f for f in forget if match.exact(f)]
2237
2237
2238 for subpath in sorted(wctx.substate):
2238 for subpath in sorted(wctx.substate):
2239 sub = wctx.sub(subpath)
2239 sub = wctx.sub(subpath)
2240 try:
2240 try:
2241 submatch = matchmod.narrowmatcher(subpath, match)
2241 submatch = matchmod.narrowmatcher(subpath, match)
2242 subbad, subforgot = sub.forget(submatch, prefix)
2242 subbad, subforgot = sub.forget(submatch, prefix)
2243 bad.extend([subpath + '/' + f for f in subbad])
2243 bad.extend([subpath + '/' + f for f in subbad])
2244 forgot.extend([subpath + '/' + f for f in subforgot])
2244 forgot.extend([subpath + '/' + f for f in subforgot])
2245 except error.LookupError:
2245 except error.LookupError:
2246 ui.status(_("skipping missing subrepository: %s\n")
2246 ui.status(_("skipping missing subrepository: %s\n")
2247 % join(subpath))
2247 % join(subpath))
2248
2248
2249 if not explicitonly:
2249 if not explicitonly:
2250 for f in match.files():
2250 for f in match.files():
2251 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2251 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2252 if f not in forgot:
2252 if f not in forgot:
2253 if repo.wvfs.exists(f):
2253 if repo.wvfs.exists(f):
2254 # Don't complain if the exact case match wasn't given.
2254 # Don't complain if the exact case match wasn't given.
2255 # But don't do this until after checking 'forgot', so
2255 # But don't do this until after checking 'forgot', so
2256 # that subrepo files aren't normalized, and this op is
2256 # that subrepo files aren't normalized, and this op is
2257 # purely from data cached by the status walk above.
2257 # purely from data cached by the status walk above.
2258 if repo.dirstate.normalize(f) in repo.dirstate:
2258 if repo.dirstate.normalize(f) in repo.dirstate:
2259 continue
2259 continue
2260 ui.warn(_('not removing %s: '
2260 ui.warn(_('not removing %s: '
2261 'file is already untracked\n')
2261 'file is already untracked\n')
2262 % match.rel(f))
2262 % match.rel(f))
2263 bad.append(f)
2263 bad.append(f)
2264
2264
2265 for f in forget:
2265 for f in forget:
2266 if ui.verbose or not match.exact(f):
2266 if ui.verbose or not match.exact(f):
2267 ui.status(_('removing %s\n') % match.rel(f))
2267 ui.status(_('removing %s\n') % match.rel(f))
2268
2268
2269 rejected = wctx.forget(forget, prefix)
2269 rejected = wctx.forget(forget, prefix)
2270 bad.extend(f for f in rejected if f in match.files())
2270 bad.extend(f for f in rejected if f in match.files())
2271 forgot.extend(f for f in forget if f not in rejected)
2271 forgot.extend(f for f in forget if f not in rejected)
2272 return bad, forgot
2272 return bad, forgot
2273
2273
2274 def files(ui, ctx, m, fm, fmt, subrepos):
2274 def files(ui, ctx, m, fm, fmt, subrepos):
2275 rev = ctx.rev()
2275 rev = ctx.rev()
2276 ret = 1
2276 ret = 1
2277 ds = ctx.repo().dirstate
2277 ds = ctx.repo().dirstate
2278
2278
2279 for f in ctx.matches(m):
2279 for f in ctx.matches(m):
2280 if rev is None and ds[f] == 'r':
2280 if rev is None and ds[f] == 'r':
2281 continue
2281 continue
2282 fm.startitem()
2282 fm.startitem()
2283 if ui.verbose:
2283 if ui.verbose:
2284 fc = ctx[f]
2284 fc = ctx[f]
2285 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2285 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2286 fm.data(abspath=f)
2286 fm.data(abspath=f)
2287 fm.write('path', fmt, m.rel(f))
2287 fm.write('path', fmt, m.rel(f))
2288 ret = 0
2288 ret = 0
2289
2289
2290 if subrepos:
2290 if subrepos:
2291 for subpath in sorted(ctx.substate):
2291 for subpath in sorted(ctx.substate):
2292 sub = ctx.sub(subpath)
2292 sub = ctx.sub(subpath)
2293 try:
2293 try:
2294 submatch = matchmod.narrowmatcher(subpath, m)
2294 submatch = matchmod.narrowmatcher(subpath, m)
2295 if sub.printfiles(ui, submatch, fm, fmt) == 0:
2295 if sub.printfiles(ui, submatch, fm, fmt) == 0:
2296 ret = 0
2296 ret = 0
2297 except error.LookupError:
2297 except error.LookupError:
2298 ui.status(_("skipping missing subrepository: %s\n")
2298 ui.status(_("skipping missing subrepository: %s\n")
2299 % m.abs(subpath))
2299 % m.abs(subpath))
2300
2300
2301 return ret
2301 return ret
2302
2302
2303 def remove(ui, repo, m, prefix, after, force, subrepos):
2303 def remove(ui, repo, m, prefix, after, force, subrepos):
2304 join = lambda f: os.path.join(prefix, f)
2304 join = lambda f: os.path.join(prefix, f)
2305 ret = 0
2305 ret = 0
2306 s = repo.status(match=m, clean=True)
2306 s = repo.status(match=m, clean=True)
2307 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2307 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2308
2308
2309 wctx = repo[None]
2309 wctx = repo[None]
2310
2310
2311 for subpath in sorted(wctx.substate):
2311 for subpath in sorted(wctx.substate):
2312 def matchessubrepo(matcher, subpath):
2312 def matchessubrepo(matcher, subpath):
2313 if matcher.exact(subpath):
2313 if matcher.exact(subpath):
2314 return True
2314 return True
2315 for f in matcher.files():
2315 for f in matcher.files():
2316 if f.startswith(subpath):
2316 if f.startswith(subpath):
2317 return True
2317 return True
2318 return False
2318 return False
2319
2319
2320 if subrepos or matchessubrepo(m, subpath):
2320 if subrepos or matchessubrepo(m, subpath):
2321 sub = wctx.sub(subpath)
2321 sub = wctx.sub(subpath)
2322 try:
2322 try:
2323 submatch = matchmod.narrowmatcher(subpath, m)
2323 submatch = matchmod.narrowmatcher(subpath, m)
2324 if sub.removefiles(submatch, prefix, after, force, subrepos):
2324 if sub.removefiles(submatch, prefix, after, force, subrepos):
2325 ret = 1
2325 ret = 1
2326 except error.LookupError:
2326 except error.LookupError:
2327 ui.status(_("skipping missing subrepository: %s\n")
2327 ui.status(_("skipping missing subrepository: %s\n")
2328 % join(subpath))
2328 % join(subpath))
2329
2329
2330 # warn about failure to delete explicit files/dirs
2330 # warn about failure to delete explicit files/dirs
2331 deleteddirs = util.dirs(deleted)
2331 deleteddirs = util.dirs(deleted)
2332 for f in m.files():
2332 for f in m.files():
2333 def insubrepo():
2333 def insubrepo():
2334 for subpath in wctx.substate:
2334 for subpath in wctx.substate:
2335 if f.startswith(subpath):
2335 if f.startswith(subpath):
2336 return True
2336 return True
2337 return False
2337 return False
2338
2338
2339 isdir = f in deleteddirs or f in wctx.dirs()
2339 isdir = f in deleteddirs or f in wctx.dirs()
2340 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2340 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2341 continue
2341 continue
2342
2342
2343 if repo.wvfs.exists(f):
2343 if repo.wvfs.exists(f):
2344 if repo.wvfs.isdir(f):
2344 if repo.wvfs.isdir(f):
2345 ui.warn(_('not removing %s: no tracked files\n')
2345 ui.warn(_('not removing %s: no tracked files\n')
2346 % m.rel(f))
2346 % m.rel(f))
2347 else:
2347 else:
2348 ui.warn(_('not removing %s: file is untracked\n')
2348 ui.warn(_('not removing %s: file is untracked\n')
2349 % m.rel(f))
2349 % m.rel(f))
2350 # missing files will generate a warning elsewhere
2350 # missing files will generate a warning elsewhere
2351 ret = 1
2351 ret = 1
2352
2352
2353 if force:
2353 if force:
2354 list = modified + deleted + clean + added
2354 list = modified + deleted + clean + added
2355 elif after:
2355 elif after:
2356 list = deleted
2356 list = deleted
2357 for f in modified + added + clean:
2357 for f in modified + added + clean:
2358 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2358 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2359 ret = 1
2359 ret = 1
2360 else:
2360 else:
2361 list = deleted + clean
2361 list = deleted + clean
2362 for f in modified:
2362 for f in modified:
2363 ui.warn(_('not removing %s: file is modified (use -f'
2363 ui.warn(_('not removing %s: file is modified (use -f'
2364 ' to force removal)\n') % m.rel(f))
2364 ' to force removal)\n') % m.rel(f))
2365 ret = 1
2365 ret = 1
2366 for f in added:
2366 for f in added:
2367 ui.warn(_('not removing %s: file has been marked for add'
2367 ui.warn(_('not removing %s: file has been marked for add'
2368 ' (use forget to undo)\n') % m.rel(f))
2368 ' (use forget to undo)\n') % m.rel(f))
2369 ret = 1
2369 ret = 1
2370
2370
2371 for f in sorted(list):
2371 for f in sorted(list):
2372 if ui.verbose or not m.exact(f):
2372 if ui.verbose or not m.exact(f):
2373 ui.status(_('removing %s\n') % m.rel(f))
2373 ui.status(_('removing %s\n') % m.rel(f))
2374
2374
2375 wlock = repo.wlock()
2375 wlock = repo.wlock()
2376 try:
2376 try:
2377 if not after:
2377 if not after:
2378 for f in list:
2378 for f in list:
2379 if f in added:
2379 if f in added:
2380 continue # we never unlink added files on remove
2380 continue # we never unlink added files on remove
2381 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2381 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2382 repo[None].forget(list)
2382 repo[None].forget(list)
2383 finally:
2383 finally:
2384 wlock.release()
2384 wlock.release()
2385
2385
2386 return ret
2386 return ret
2387
2387
2388 def cat(ui, repo, ctx, matcher, prefix, **opts):
2388 def cat(ui, repo, ctx, matcher, prefix, **opts):
2389 err = 1
2389 err = 1
2390
2390
2391 def write(path):
2391 def write(path):
2392 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2392 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2393 pathname=os.path.join(prefix, path))
2393 pathname=os.path.join(prefix, path))
2394 data = ctx[path].data()
2394 data = ctx[path].data()
2395 if opts.get('decode'):
2395 if opts.get('decode'):
2396 data = repo.wwritedata(path, data)
2396 data = repo.wwritedata(path, data)
2397 fp.write(data)
2397 fp.write(data)
2398 fp.close()
2398 fp.close()
2399
2399
2400 # Automation often uses hg cat on single files, so special case it
2400 # Automation often uses hg cat on single files, so special case it
2401 # for performance to avoid the cost of parsing the manifest.
2401 # for performance to avoid the cost of parsing the manifest.
2402 if len(matcher.files()) == 1 and not matcher.anypats():
2402 if len(matcher.files()) == 1 and not matcher.anypats():
2403 file = matcher.files()[0]
2403 file = matcher.files()[0]
2404 mf = repo.manifest
2404 mf = repo.manifest
2405 mfnode = ctx.manifestnode()
2405 mfnode = ctx.manifestnode()
2406 if mfnode and mf.find(mfnode, file)[0]:
2406 if mfnode and mf.find(mfnode, file)[0]:
2407 write(file)
2407 write(file)
2408 return 0
2408 return 0
2409
2409
2410 # Don't warn about "missing" files that are really in subrepos
2410 # Don't warn about "missing" files that are really in subrepos
2411 bad = matcher.bad
2411 bad = matcher.bad
2412
2412
2413 def badfn(path, msg):
2413 def badfn(path, msg):
2414 for subpath in ctx.substate:
2414 for subpath in ctx.substate:
2415 if path.startswith(subpath):
2415 if path.startswith(subpath):
2416 return
2416 return
2417 bad(path, msg)
2417 bad(path, msg)
2418
2418
2419 matcher.bad = badfn
2419 matcher.bad = badfn
2420
2420
2421 for abs in ctx.walk(matcher):
2421 for abs in ctx.walk(matcher):
2422 write(abs)
2422 write(abs)
2423 err = 0
2423 err = 0
2424
2424
2425 matcher.bad = bad
2425 matcher.bad = bad
2426
2426
2427 for subpath in sorted(ctx.substate):
2427 for subpath in sorted(ctx.substate):
2428 sub = ctx.sub(subpath)
2428 sub = ctx.sub(subpath)
2429 try:
2429 try:
2430 submatch = matchmod.narrowmatcher(subpath, matcher)
2430 submatch = matchmod.narrowmatcher(subpath, matcher)
2431
2431
2432 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2432 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2433 **opts):
2433 **opts):
2434 err = 0
2434 err = 0
2435 except error.RepoLookupError:
2435 except error.RepoLookupError:
2436 ui.status(_("skipping missing subrepository: %s\n")
2436 ui.status(_("skipping missing subrepository: %s\n")
2437 % os.path.join(prefix, subpath))
2437 % os.path.join(prefix, subpath))
2438
2438
2439 return err
2439 return err
2440
2440
2441 def commit(ui, repo, commitfunc, pats, opts):
2441 def commit(ui, repo, commitfunc, pats, opts):
2442 '''commit the specified files or all outstanding changes'''
2442 '''commit the specified files or all outstanding changes'''
2443 date = opts.get('date')
2443 date = opts.get('date')
2444 if date:
2444 if date:
2445 opts['date'] = util.parsedate(date)
2445 opts['date'] = util.parsedate(date)
2446 message = logmessage(ui, opts)
2446 message = logmessage(ui, opts)
2447 matcher = scmutil.match(repo[None], pats, opts)
2447 matcher = scmutil.match(repo[None], pats, opts)
2448
2448
2449 # extract addremove carefully -- this function can be called from a command
2449 # extract addremove carefully -- this function can be called from a command
2450 # that doesn't support addremove
2450 # that doesn't support addremove
2451 if opts.get('addremove'):
2451 if opts.get('addremove'):
2452 if scmutil.addremove(repo, matcher, "", opts) != 0:
2452 if scmutil.addremove(repo, matcher, "", opts) != 0:
2453 raise util.Abort(
2453 raise util.Abort(
2454 _("failed to mark all new/missing files as added/removed"))
2454 _("failed to mark all new/missing files as added/removed"))
2455
2455
2456 return commitfunc(ui, repo, message, matcher, opts)
2456 return commitfunc(ui, repo, message, matcher, opts)
2457
2457
2458 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2458 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2459 # amend will reuse the existing user if not specified, but the obsolete
2459 # amend will reuse the existing user if not specified, but the obsolete
2460 # marker creation requires that the current user's name is specified.
2460 # marker creation requires that the current user's name is specified.
2461 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2461 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2462 ui.username() # raise exception if username not set
2462 ui.username() # raise exception if username not set
2463
2463
2464 ui.note(_('amending changeset %s\n') % old)
2464 ui.note(_('amending changeset %s\n') % old)
2465 base = old.p1()
2465 base = old.p1()
2466
2466
2467 wlock = lock = newid = None
2467 wlock = lock = newid = None
2468 try:
2468 try:
2469 wlock = repo.wlock()
2469 wlock = repo.wlock()
2470 lock = repo.lock()
2470 lock = repo.lock()
2471 tr = repo.transaction('amend')
2471 tr = repo.transaction('amend')
2472 try:
2472 try:
2473 # See if we got a message from -m or -l, if not, open the editor
2473 # See if we got a message from -m or -l, if not, open the editor
2474 # with the message of the changeset to amend
2474 # with the message of the changeset to amend
2475 message = logmessage(ui, opts)
2475 message = logmessage(ui, opts)
2476 # ensure logfile does not conflict with later enforcement of the
2476 # ensure logfile does not conflict with later enforcement of the
2477 # message. potential logfile content has been processed by
2477 # message. potential logfile content has been processed by
2478 # `logmessage` anyway.
2478 # `logmessage` anyway.
2479 opts.pop('logfile')
2479 opts.pop('logfile')
2480 # First, do a regular commit to record all changes in the working
2480 # First, do a regular commit to record all changes in the working
2481 # directory (if there are any)
2481 # directory (if there are any)
2482 ui.callhooks = False
2482 ui.callhooks = False
2483 currentbookmark = repo._bookmarkcurrent
2483 currentbookmark = repo._activebookmark
2484 try:
2484 try:
2485 repo._bookmarkcurrent = None
2485 repo._activebookmark = None
2486 opts['message'] = 'temporary amend commit for %s' % old
2486 opts['message'] = 'temporary amend commit for %s' % old
2487 node = commit(ui, repo, commitfunc, pats, opts)
2487 node = commit(ui, repo, commitfunc, pats, opts)
2488 finally:
2488 finally:
2489 repo._bookmarkcurrent = currentbookmark
2489 repo._activebookmark = currentbookmark
2490 ui.callhooks = True
2490 ui.callhooks = True
2491 ctx = repo[node]
2491 ctx = repo[node]
2492
2492
2493 # Participating changesets:
2493 # Participating changesets:
2494 #
2494 #
2495 # node/ctx o - new (intermediate) commit that contains changes
2495 # node/ctx o - new (intermediate) commit that contains changes
2496 # | from working dir to go into amending commit
2496 # | from working dir to go into amending commit
2497 # | (or a workingctx if there were no changes)
2497 # | (or a workingctx if there were no changes)
2498 # |
2498 # |
2499 # old o - changeset to amend
2499 # old o - changeset to amend
2500 # |
2500 # |
2501 # base o - parent of amending changeset
2501 # base o - parent of amending changeset
2502
2502
2503 # Update extra dict from amended commit (e.g. to preserve graft
2503 # Update extra dict from amended commit (e.g. to preserve graft
2504 # source)
2504 # source)
2505 extra.update(old.extra())
2505 extra.update(old.extra())
2506
2506
2507 # Also update it from the intermediate commit or from the wctx
2507 # Also update it from the intermediate commit or from the wctx
2508 extra.update(ctx.extra())
2508 extra.update(ctx.extra())
2509
2509
2510 if len(old.parents()) > 1:
2510 if len(old.parents()) > 1:
2511 # ctx.files() isn't reliable for merges, so fall back to the
2511 # ctx.files() isn't reliable for merges, so fall back to the
2512 # slower repo.status() method
2512 # slower repo.status() method
2513 files = set([fn for st in repo.status(base, old)[:3]
2513 files = set([fn for st in repo.status(base, old)[:3]
2514 for fn in st])
2514 for fn in st])
2515 else:
2515 else:
2516 files = set(old.files())
2516 files = set(old.files())
2517
2517
2518 # Second, we use either the commit we just did, or if there were no
2518 # Second, we use either the commit we just did, or if there were no
2519 # changes the parent of the working directory as the version of the
2519 # changes the parent of the working directory as the version of the
2520 # files in the final amend commit
2520 # files in the final amend commit
2521 if node:
2521 if node:
2522 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2522 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2523
2523
2524 user = ctx.user()
2524 user = ctx.user()
2525 date = ctx.date()
2525 date = ctx.date()
2526 # Recompute copies (avoid recording a -> b -> a)
2526 # Recompute copies (avoid recording a -> b -> a)
2527 copied = copies.pathcopies(base, ctx)
2527 copied = copies.pathcopies(base, ctx)
2528 if old.p2:
2528 if old.p2:
2529 copied.update(copies.pathcopies(old.p2(), ctx))
2529 copied.update(copies.pathcopies(old.p2(), ctx))
2530
2530
2531 # Prune files which were reverted by the updates: if old
2531 # Prune files which were reverted by the updates: if old
2532 # introduced file X and our intermediate commit, node,
2532 # introduced file X and our intermediate commit, node,
2533 # renamed that file, then those two files are the same and
2533 # renamed that file, then those two files are the same and
2534 # we can discard X from our list of files. Likewise if X
2534 # we can discard X from our list of files. Likewise if X
2535 # was deleted, it's no longer relevant
2535 # was deleted, it's no longer relevant
2536 files.update(ctx.files())
2536 files.update(ctx.files())
2537
2537
2538 def samefile(f):
2538 def samefile(f):
2539 if f in ctx.manifest():
2539 if f in ctx.manifest():
2540 a = ctx.filectx(f)
2540 a = ctx.filectx(f)
2541 if f in base.manifest():
2541 if f in base.manifest():
2542 b = base.filectx(f)
2542 b = base.filectx(f)
2543 return (not a.cmp(b)
2543 return (not a.cmp(b)
2544 and a.flags() == b.flags())
2544 and a.flags() == b.flags())
2545 else:
2545 else:
2546 return False
2546 return False
2547 else:
2547 else:
2548 return f not in base.manifest()
2548 return f not in base.manifest()
2549 files = [f for f in files if not samefile(f)]
2549 files = [f for f in files if not samefile(f)]
2550
2550
2551 def filectxfn(repo, ctx_, path):
2551 def filectxfn(repo, ctx_, path):
2552 try:
2552 try:
2553 fctx = ctx[path]
2553 fctx = ctx[path]
2554 flags = fctx.flags()
2554 flags = fctx.flags()
2555 mctx = context.memfilectx(repo,
2555 mctx = context.memfilectx(repo,
2556 fctx.path(), fctx.data(),
2556 fctx.path(), fctx.data(),
2557 islink='l' in flags,
2557 islink='l' in flags,
2558 isexec='x' in flags,
2558 isexec='x' in flags,
2559 copied=copied.get(path))
2559 copied=copied.get(path))
2560 return mctx
2560 return mctx
2561 except KeyError:
2561 except KeyError:
2562 return None
2562 return None
2563 else:
2563 else:
2564 ui.note(_('copying changeset %s to %s\n') % (old, base))
2564 ui.note(_('copying changeset %s to %s\n') % (old, base))
2565
2565
2566 # Use version of files as in the old cset
2566 # Use version of files as in the old cset
2567 def filectxfn(repo, ctx_, path):
2567 def filectxfn(repo, ctx_, path):
2568 try:
2568 try:
2569 return old.filectx(path)
2569 return old.filectx(path)
2570 except KeyError:
2570 except KeyError:
2571 return None
2571 return None
2572
2572
2573 user = opts.get('user') or old.user()
2573 user = opts.get('user') or old.user()
2574 date = opts.get('date') or old.date()
2574 date = opts.get('date') or old.date()
2575 editform = mergeeditform(old, 'commit.amend')
2575 editform = mergeeditform(old, 'commit.amend')
2576 editor = getcommiteditor(editform=editform, **opts)
2576 editor = getcommiteditor(editform=editform, **opts)
2577 if not message:
2577 if not message:
2578 editor = getcommiteditor(edit=True, editform=editform)
2578 editor = getcommiteditor(edit=True, editform=editform)
2579 message = old.description()
2579 message = old.description()
2580
2580
2581 pureextra = extra.copy()
2581 pureextra = extra.copy()
2582 extra['amend_source'] = old.hex()
2582 extra['amend_source'] = old.hex()
2583
2583
2584 new = context.memctx(repo,
2584 new = context.memctx(repo,
2585 parents=[base.node(), old.p2().node()],
2585 parents=[base.node(), old.p2().node()],
2586 text=message,
2586 text=message,
2587 files=files,
2587 files=files,
2588 filectxfn=filectxfn,
2588 filectxfn=filectxfn,
2589 user=user,
2589 user=user,
2590 date=date,
2590 date=date,
2591 extra=extra,
2591 extra=extra,
2592 editor=editor)
2592 editor=editor)
2593
2593
2594 newdesc = changelog.stripdesc(new.description())
2594 newdesc = changelog.stripdesc(new.description())
2595 if ((not node)
2595 if ((not node)
2596 and newdesc == old.description()
2596 and newdesc == old.description()
2597 and user == old.user()
2597 and user == old.user()
2598 and date == old.date()
2598 and date == old.date()
2599 and pureextra == old.extra()):
2599 and pureextra == old.extra()):
2600 # nothing changed. continuing here would create a new node
2600 # nothing changed. continuing here would create a new node
2601 # anyway because of the amend_source noise.
2601 # anyway because of the amend_source noise.
2602 #
2602 #
2603 # This not what we expect from amend.
2603 # This not what we expect from amend.
2604 return old.node()
2604 return old.node()
2605
2605
2606 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2606 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2607 try:
2607 try:
2608 if opts.get('secret'):
2608 if opts.get('secret'):
2609 commitphase = 'secret'
2609 commitphase = 'secret'
2610 else:
2610 else:
2611 commitphase = old.phase()
2611 commitphase = old.phase()
2612 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2612 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2613 newid = repo.commitctx(new)
2613 newid = repo.commitctx(new)
2614 finally:
2614 finally:
2615 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2615 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2616 if newid != old.node():
2616 if newid != old.node():
2617 # Reroute the working copy parent to the new changeset
2617 # Reroute the working copy parent to the new changeset
2618 repo.setparents(newid, nullid)
2618 repo.setparents(newid, nullid)
2619
2619
2620 # Move bookmarks from old parent to amend commit
2620 # Move bookmarks from old parent to amend commit
2621 bms = repo.nodebookmarks(old.node())
2621 bms = repo.nodebookmarks(old.node())
2622 if bms:
2622 if bms:
2623 marks = repo._bookmarks
2623 marks = repo._bookmarks
2624 for bm in bms:
2624 for bm in bms:
2625 marks[bm] = newid
2625 marks[bm] = newid
2626 marks.write()
2626 marks.write()
2627 #commit the whole amend process
2627 #commit the whole amend process
2628 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2628 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2629 if createmarkers and newid != old.node():
2629 if createmarkers and newid != old.node():
2630 # mark the new changeset as successor of the rewritten one
2630 # mark the new changeset as successor of the rewritten one
2631 new = repo[newid]
2631 new = repo[newid]
2632 obs = [(old, (new,))]
2632 obs = [(old, (new,))]
2633 if node:
2633 if node:
2634 obs.append((ctx, ()))
2634 obs.append((ctx, ()))
2635
2635
2636 obsolete.createmarkers(repo, obs)
2636 obsolete.createmarkers(repo, obs)
2637 tr.close()
2637 tr.close()
2638 finally:
2638 finally:
2639 tr.release()
2639 tr.release()
2640 if not createmarkers and newid != old.node():
2640 if not createmarkers and newid != old.node():
2641 # Strip the intermediate commit (if there was one) and the amended
2641 # Strip the intermediate commit (if there was one) and the amended
2642 # commit
2642 # commit
2643 if node:
2643 if node:
2644 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2644 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2645 ui.note(_('stripping amended changeset %s\n') % old)
2645 ui.note(_('stripping amended changeset %s\n') % old)
2646 repair.strip(ui, repo, old.node(), topic='amend-backup')
2646 repair.strip(ui, repo, old.node(), topic='amend-backup')
2647 finally:
2647 finally:
2648 if newid is None:
2648 if newid is None:
2649 repo.dirstate.invalidate()
2649 repo.dirstate.invalidate()
2650 lockmod.release(lock, wlock)
2650 lockmod.release(lock, wlock)
2651 return newid
2651 return newid
2652
2652
2653 def commiteditor(repo, ctx, subs, editform=''):
2653 def commiteditor(repo, ctx, subs, editform=''):
2654 if ctx.description():
2654 if ctx.description():
2655 return ctx.description()
2655 return ctx.description()
2656 return commitforceeditor(repo, ctx, subs, editform=editform)
2656 return commitforceeditor(repo, ctx, subs, editform=editform)
2657
2657
2658 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2658 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2659 editform=''):
2659 editform=''):
2660 if not extramsg:
2660 if not extramsg:
2661 extramsg = _("Leave message empty to abort commit.")
2661 extramsg = _("Leave message empty to abort commit.")
2662
2662
2663 forms = [e for e in editform.split('.') if e]
2663 forms = [e for e in editform.split('.') if e]
2664 forms.insert(0, 'changeset')
2664 forms.insert(0, 'changeset')
2665 while forms:
2665 while forms:
2666 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2666 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2667 if tmpl:
2667 if tmpl:
2668 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2668 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2669 break
2669 break
2670 forms.pop()
2670 forms.pop()
2671 else:
2671 else:
2672 committext = buildcommittext(repo, ctx, subs, extramsg)
2672 committext = buildcommittext(repo, ctx, subs, extramsg)
2673
2673
2674 # run editor in the repository root
2674 # run editor in the repository root
2675 olddir = os.getcwd()
2675 olddir = os.getcwd()
2676 os.chdir(repo.root)
2676 os.chdir(repo.root)
2677 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2677 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2678 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2678 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2679 os.chdir(olddir)
2679 os.chdir(olddir)
2680
2680
2681 if finishdesc:
2681 if finishdesc:
2682 text = finishdesc(text)
2682 text = finishdesc(text)
2683 if not text.strip():
2683 if not text.strip():
2684 raise util.Abort(_("empty commit message"))
2684 raise util.Abort(_("empty commit message"))
2685
2685
2686 return text
2686 return text
2687
2687
2688 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2688 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2689 ui = repo.ui
2689 ui = repo.ui
2690 tmpl, mapfile = gettemplate(ui, tmpl, None)
2690 tmpl, mapfile = gettemplate(ui, tmpl, None)
2691
2691
2692 try:
2692 try:
2693 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2693 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2694 except SyntaxError, inst:
2694 except SyntaxError, inst:
2695 raise util.Abort(inst.args[0])
2695 raise util.Abort(inst.args[0])
2696
2696
2697 for k, v in repo.ui.configitems('committemplate'):
2697 for k, v in repo.ui.configitems('committemplate'):
2698 if k != 'changeset':
2698 if k != 'changeset':
2699 t.t.cache[k] = v
2699 t.t.cache[k] = v
2700
2700
2701 if not extramsg:
2701 if not extramsg:
2702 extramsg = '' # ensure that extramsg is string
2702 extramsg = '' # ensure that extramsg is string
2703
2703
2704 ui.pushbuffer()
2704 ui.pushbuffer()
2705 t.show(ctx, extramsg=extramsg)
2705 t.show(ctx, extramsg=extramsg)
2706 return ui.popbuffer()
2706 return ui.popbuffer()
2707
2707
2708 def buildcommittext(repo, ctx, subs, extramsg):
2708 def buildcommittext(repo, ctx, subs, extramsg):
2709 edittext = []
2709 edittext = []
2710 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2710 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2711 if ctx.description():
2711 if ctx.description():
2712 edittext.append(ctx.description())
2712 edittext.append(ctx.description())
2713 edittext.append("")
2713 edittext.append("")
2714 edittext.append("") # Empty line between message and comments.
2714 edittext.append("") # Empty line between message and comments.
2715 edittext.append(_("HG: Enter commit message."
2715 edittext.append(_("HG: Enter commit message."
2716 " Lines beginning with 'HG:' are removed."))
2716 " Lines beginning with 'HG:' are removed."))
2717 edittext.append("HG: %s" % extramsg)
2717 edittext.append("HG: %s" % extramsg)
2718 edittext.append("HG: --")
2718 edittext.append("HG: --")
2719 edittext.append(_("HG: user: %s") % ctx.user())
2719 edittext.append(_("HG: user: %s") % ctx.user())
2720 if ctx.p2():
2720 if ctx.p2():
2721 edittext.append(_("HG: branch merge"))
2721 edittext.append(_("HG: branch merge"))
2722 if ctx.branch():
2722 if ctx.branch():
2723 edittext.append(_("HG: branch '%s'") % ctx.branch())
2723 edittext.append(_("HG: branch '%s'") % ctx.branch())
2724 if bookmarks.iscurrent(repo):
2724 if bookmarks.iscurrent(repo):
2725 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2725 edittext.append(_("HG: bookmark '%s'") % repo._activebookmark)
2726 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2726 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2727 edittext.extend([_("HG: added %s") % f for f in added])
2727 edittext.extend([_("HG: added %s") % f for f in added])
2728 edittext.extend([_("HG: changed %s") % f for f in modified])
2728 edittext.extend([_("HG: changed %s") % f for f in modified])
2729 edittext.extend([_("HG: removed %s") % f for f in removed])
2729 edittext.extend([_("HG: removed %s") % f for f in removed])
2730 if not added and not modified and not removed:
2730 if not added and not modified and not removed:
2731 edittext.append(_("HG: no files changed"))
2731 edittext.append(_("HG: no files changed"))
2732 edittext.append("")
2732 edittext.append("")
2733
2733
2734 return "\n".join(edittext)
2734 return "\n".join(edittext)
2735
2735
2736 def commitstatus(repo, node, branch, bheads=None, opts={}):
2736 def commitstatus(repo, node, branch, bheads=None, opts={}):
2737 ctx = repo[node]
2737 ctx = repo[node]
2738 parents = ctx.parents()
2738 parents = ctx.parents()
2739
2739
2740 if (not opts.get('amend') and bheads and node not in bheads and not
2740 if (not opts.get('amend') and bheads and node not in bheads and not
2741 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2741 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2742 repo.ui.status(_('created new head\n'))
2742 repo.ui.status(_('created new head\n'))
2743 # The message is not printed for initial roots. For the other
2743 # The message is not printed for initial roots. For the other
2744 # changesets, it is printed in the following situations:
2744 # changesets, it is printed in the following situations:
2745 #
2745 #
2746 # Par column: for the 2 parents with ...
2746 # Par column: for the 2 parents with ...
2747 # N: null or no parent
2747 # N: null or no parent
2748 # B: parent is on another named branch
2748 # B: parent is on another named branch
2749 # C: parent is a regular non head changeset
2749 # C: parent is a regular non head changeset
2750 # H: parent was a branch head of the current branch
2750 # H: parent was a branch head of the current branch
2751 # Msg column: whether we print "created new head" message
2751 # Msg column: whether we print "created new head" message
2752 # In the following, it is assumed that there already exists some
2752 # In the following, it is assumed that there already exists some
2753 # initial branch heads of the current branch, otherwise nothing is
2753 # initial branch heads of the current branch, otherwise nothing is
2754 # printed anyway.
2754 # printed anyway.
2755 #
2755 #
2756 # Par Msg Comment
2756 # Par Msg Comment
2757 # N N y additional topo root
2757 # N N y additional topo root
2758 #
2758 #
2759 # B N y additional branch root
2759 # B N y additional branch root
2760 # C N y additional topo head
2760 # C N y additional topo head
2761 # H N n usual case
2761 # H N n usual case
2762 #
2762 #
2763 # B B y weird additional branch root
2763 # B B y weird additional branch root
2764 # C B y branch merge
2764 # C B y branch merge
2765 # H B n merge with named branch
2765 # H B n merge with named branch
2766 #
2766 #
2767 # C C y additional head from merge
2767 # C C y additional head from merge
2768 # C H n merge with a head
2768 # C H n merge with a head
2769 #
2769 #
2770 # H H n head merge: head count decreases
2770 # H H n head merge: head count decreases
2771
2771
2772 if not opts.get('close_branch'):
2772 if not opts.get('close_branch'):
2773 for r in parents:
2773 for r in parents:
2774 if r.closesbranch() and r.branch() == branch:
2774 if r.closesbranch() and r.branch() == branch:
2775 repo.ui.status(_('reopening closed branch head %d\n') % r)
2775 repo.ui.status(_('reopening closed branch head %d\n') % r)
2776
2776
2777 if repo.ui.debugflag:
2777 if repo.ui.debugflag:
2778 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2778 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2779 elif repo.ui.verbose:
2779 elif repo.ui.verbose:
2780 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2780 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2781
2781
2782 def revert(ui, repo, ctx, parents, *pats, **opts):
2782 def revert(ui, repo, ctx, parents, *pats, **opts):
2783 parent, p2 = parents
2783 parent, p2 = parents
2784 node = ctx.node()
2784 node = ctx.node()
2785
2785
2786 mf = ctx.manifest()
2786 mf = ctx.manifest()
2787 if node == p2:
2787 if node == p2:
2788 parent = p2
2788 parent = p2
2789 if node == parent:
2789 if node == parent:
2790 pmf = mf
2790 pmf = mf
2791 else:
2791 else:
2792 pmf = None
2792 pmf = None
2793
2793
2794 # need all matching names in dirstate and manifest of target rev,
2794 # need all matching names in dirstate and manifest of target rev,
2795 # so have to walk both. do not print errors if files exist in one
2795 # so have to walk both. do not print errors if files exist in one
2796 # but not other. in both cases, filesets should be evaluated against
2796 # but not other. in both cases, filesets should be evaluated against
2797 # workingctx to get consistent result (issue4497). this means 'set:**'
2797 # workingctx to get consistent result (issue4497). this means 'set:**'
2798 # cannot be used to select missing files from target rev.
2798 # cannot be used to select missing files from target rev.
2799
2799
2800 # `names` is a mapping for all elements in working copy and target revision
2800 # `names` is a mapping for all elements in working copy and target revision
2801 # The mapping is in the form:
2801 # The mapping is in the form:
2802 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2802 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2803 names = {}
2803 names = {}
2804
2804
2805 wlock = repo.wlock()
2805 wlock = repo.wlock()
2806 try:
2806 try:
2807 ## filling of the `names` mapping
2807 ## filling of the `names` mapping
2808 # walk dirstate to fill `names`
2808 # walk dirstate to fill `names`
2809
2809
2810 interactive = opts.get('interactive', False)
2810 interactive = opts.get('interactive', False)
2811 wctx = repo[None]
2811 wctx = repo[None]
2812 m = scmutil.match(wctx, pats, opts)
2812 m = scmutil.match(wctx, pats, opts)
2813
2813
2814 # we'll need this later
2814 # we'll need this later
2815 targetsubs = sorted(s for s in wctx.substate if m(s))
2815 targetsubs = sorted(s for s in wctx.substate if m(s))
2816
2816
2817 if not m.always():
2817 if not m.always():
2818 m.bad = lambda x, y: False
2818 m.bad = lambda x, y: False
2819 for abs in repo.walk(m):
2819 for abs in repo.walk(m):
2820 names[abs] = m.rel(abs), m.exact(abs)
2820 names[abs] = m.rel(abs), m.exact(abs)
2821
2821
2822 # walk target manifest to fill `names`
2822 # walk target manifest to fill `names`
2823
2823
2824 def badfn(path, msg):
2824 def badfn(path, msg):
2825 if path in names:
2825 if path in names:
2826 return
2826 return
2827 if path in ctx.substate:
2827 if path in ctx.substate:
2828 return
2828 return
2829 path_ = path + '/'
2829 path_ = path + '/'
2830 for f in names:
2830 for f in names:
2831 if f.startswith(path_):
2831 if f.startswith(path_):
2832 return
2832 return
2833 ui.warn("%s: %s\n" % (m.rel(path), msg))
2833 ui.warn("%s: %s\n" % (m.rel(path), msg))
2834
2834
2835 m.bad = badfn
2835 m.bad = badfn
2836 for abs in ctx.walk(m):
2836 for abs in ctx.walk(m):
2837 if abs not in names:
2837 if abs not in names:
2838 names[abs] = m.rel(abs), m.exact(abs)
2838 names[abs] = m.rel(abs), m.exact(abs)
2839
2839
2840 # Find status of all file in `names`.
2840 # Find status of all file in `names`.
2841 m = scmutil.matchfiles(repo, names)
2841 m = scmutil.matchfiles(repo, names)
2842
2842
2843 changes = repo.status(node1=node, match=m,
2843 changes = repo.status(node1=node, match=m,
2844 unknown=True, ignored=True, clean=True)
2844 unknown=True, ignored=True, clean=True)
2845 else:
2845 else:
2846 changes = repo.status(node1=node, match=m)
2846 changes = repo.status(node1=node, match=m)
2847 for kind in changes:
2847 for kind in changes:
2848 for abs in kind:
2848 for abs in kind:
2849 names[abs] = m.rel(abs), m.exact(abs)
2849 names[abs] = m.rel(abs), m.exact(abs)
2850
2850
2851 m = scmutil.matchfiles(repo, names)
2851 m = scmutil.matchfiles(repo, names)
2852
2852
2853 modified = set(changes.modified)
2853 modified = set(changes.modified)
2854 added = set(changes.added)
2854 added = set(changes.added)
2855 removed = set(changes.removed)
2855 removed = set(changes.removed)
2856 _deleted = set(changes.deleted)
2856 _deleted = set(changes.deleted)
2857 unknown = set(changes.unknown)
2857 unknown = set(changes.unknown)
2858 unknown.update(changes.ignored)
2858 unknown.update(changes.ignored)
2859 clean = set(changes.clean)
2859 clean = set(changes.clean)
2860 modadded = set()
2860 modadded = set()
2861
2861
2862 # split between files known in target manifest and the others
2862 # split between files known in target manifest and the others
2863 smf = set(mf)
2863 smf = set(mf)
2864
2864
2865 # determine the exact nature of the deleted changesets
2865 # determine the exact nature of the deleted changesets
2866 deladded = _deleted - smf
2866 deladded = _deleted - smf
2867 deleted = _deleted - deladded
2867 deleted = _deleted - deladded
2868
2868
2869 # We need to account for the state of the file in the dirstate,
2869 # We need to account for the state of the file in the dirstate,
2870 # even when we revert against something else than parent. This will
2870 # even when we revert against something else than parent. This will
2871 # slightly alter the behavior of revert (doing back up or not, delete
2871 # slightly alter the behavior of revert (doing back up or not, delete
2872 # or just forget etc).
2872 # or just forget etc).
2873 if parent == node:
2873 if parent == node:
2874 dsmodified = modified
2874 dsmodified = modified
2875 dsadded = added
2875 dsadded = added
2876 dsremoved = removed
2876 dsremoved = removed
2877 # store all local modifications, useful later for rename detection
2877 # store all local modifications, useful later for rename detection
2878 localchanges = dsmodified | dsadded
2878 localchanges = dsmodified | dsadded
2879 modified, added, removed = set(), set(), set()
2879 modified, added, removed = set(), set(), set()
2880 else:
2880 else:
2881 changes = repo.status(node1=parent, match=m)
2881 changes = repo.status(node1=parent, match=m)
2882 dsmodified = set(changes.modified)
2882 dsmodified = set(changes.modified)
2883 dsadded = set(changes.added)
2883 dsadded = set(changes.added)
2884 dsremoved = set(changes.removed)
2884 dsremoved = set(changes.removed)
2885 # store all local modifications, useful later for rename detection
2885 # store all local modifications, useful later for rename detection
2886 localchanges = dsmodified | dsadded
2886 localchanges = dsmodified | dsadded
2887
2887
2888 # only take into account for removes between wc and target
2888 # only take into account for removes between wc and target
2889 clean |= dsremoved - removed
2889 clean |= dsremoved - removed
2890 dsremoved &= removed
2890 dsremoved &= removed
2891 # distinct between dirstate remove and other
2891 # distinct between dirstate remove and other
2892 removed -= dsremoved
2892 removed -= dsremoved
2893
2893
2894 modadded = added & dsmodified
2894 modadded = added & dsmodified
2895 added -= modadded
2895 added -= modadded
2896
2896
2897 # tell newly modified apart.
2897 # tell newly modified apart.
2898 dsmodified &= modified
2898 dsmodified &= modified
2899 dsmodified |= modified & dsadded # dirstate added may needs backup
2899 dsmodified |= modified & dsadded # dirstate added may needs backup
2900 modified -= dsmodified
2900 modified -= dsmodified
2901
2901
2902 # We need to wait for some post-processing to update this set
2902 # We need to wait for some post-processing to update this set
2903 # before making the distinction. The dirstate will be used for
2903 # before making the distinction. The dirstate will be used for
2904 # that purpose.
2904 # that purpose.
2905 dsadded = added
2905 dsadded = added
2906
2906
2907 # in case of merge, files that are actually added can be reported as
2907 # in case of merge, files that are actually added can be reported as
2908 # modified, we need to post process the result
2908 # modified, we need to post process the result
2909 if p2 != nullid:
2909 if p2 != nullid:
2910 if pmf is None:
2910 if pmf is None:
2911 # only need parent manifest in the merge case,
2911 # only need parent manifest in the merge case,
2912 # so do not read by default
2912 # so do not read by default
2913 pmf = repo[parent].manifest()
2913 pmf = repo[parent].manifest()
2914 mergeadd = dsmodified - set(pmf)
2914 mergeadd = dsmodified - set(pmf)
2915 dsadded |= mergeadd
2915 dsadded |= mergeadd
2916 dsmodified -= mergeadd
2916 dsmodified -= mergeadd
2917
2917
2918 # if f is a rename, update `names` to also revert the source
2918 # if f is a rename, update `names` to also revert the source
2919 cwd = repo.getcwd()
2919 cwd = repo.getcwd()
2920 for f in localchanges:
2920 for f in localchanges:
2921 src = repo.dirstate.copied(f)
2921 src = repo.dirstate.copied(f)
2922 # XXX should we check for rename down to target node?
2922 # XXX should we check for rename down to target node?
2923 if src and src not in names and repo.dirstate[src] == 'r':
2923 if src and src not in names and repo.dirstate[src] == 'r':
2924 dsremoved.add(src)
2924 dsremoved.add(src)
2925 names[src] = (repo.pathto(src, cwd), True)
2925 names[src] = (repo.pathto(src, cwd), True)
2926
2926
2927 # distinguish between file to forget and the other
2927 # distinguish between file to forget and the other
2928 added = set()
2928 added = set()
2929 for abs in dsadded:
2929 for abs in dsadded:
2930 if repo.dirstate[abs] != 'a':
2930 if repo.dirstate[abs] != 'a':
2931 added.add(abs)
2931 added.add(abs)
2932 dsadded -= added
2932 dsadded -= added
2933
2933
2934 for abs in deladded:
2934 for abs in deladded:
2935 if repo.dirstate[abs] == 'a':
2935 if repo.dirstate[abs] == 'a':
2936 dsadded.add(abs)
2936 dsadded.add(abs)
2937 deladded -= dsadded
2937 deladded -= dsadded
2938
2938
2939 # For files marked as removed, we check if an unknown file is present at
2939 # For files marked as removed, we check if an unknown file is present at
2940 # the same path. If a such file exists it may need to be backed up.
2940 # the same path. If a such file exists it may need to be backed up.
2941 # Making the distinction at this stage helps have simpler backup
2941 # Making the distinction at this stage helps have simpler backup
2942 # logic.
2942 # logic.
2943 removunk = set()
2943 removunk = set()
2944 for abs in removed:
2944 for abs in removed:
2945 target = repo.wjoin(abs)
2945 target = repo.wjoin(abs)
2946 if os.path.lexists(target):
2946 if os.path.lexists(target):
2947 removunk.add(abs)
2947 removunk.add(abs)
2948 removed -= removunk
2948 removed -= removunk
2949
2949
2950 dsremovunk = set()
2950 dsremovunk = set()
2951 for abs in dsremoved:
2951 for abs in dsremoved:
2952 target = repo.wjoin(abs)
2952 target = repo.wjoin(abs)
2953 if os.path.lexists(target):
2953 if os.path.lexists(target):
2954 dsremovunk.add(abs)
2954 dsremovunk.add(abs)
2955 dsremoved -= dsremovunk
2955 dsremoved -= dsremovunk
2956
2956
2957 # action to be actually performed by revert
2957 # action to be actually performed by revert
2958 # (<list of file>, message>) tuple
2958 # (<list of file>, message>) tuple
2959 actions = {'revert': ([], _('reverting %s\n')),
2959 actions = {'revert': ([], _('reverting %s\n')),
2960 'add': ([], _('adding %s\n')),
2960 'add': ([], _('adding %s\n')),
2961 'remove': ([], _('removing %s\n')),
2961 'remove': ([], _('removing %s\n')),
2962 'drop': ([], _('removing %s\n')),
2962 'drop': ([], _('removing %s\n')),
2963 'forget': ([], _('forgetting %s\n')),
2963 'forget': ([], _('forgetting %s\n')),
2964 'undelete': ([], _('undeleting %s\n')),
2964 'undelete': ([], _('undeleting %s\n')),
2965 'noop': (None, _('no changes needed to %s\n')),
2965 'noop': (None, _('no changes needed to %s\n')),
2966 'unknown': (None, _('file not managed: %s\n')),
2966 'unknown': (None, _('file not managed: %s\n')),
2967 }
2967 }
2968
2968
2969 # "constant" that convey the backup strategy.
2969 # "constant" that convey the backup strategy.
2970 # All set to `discard` if `no-backup` is set do avoid checking
2970 # All set to `discard` if `no-backup` is set do avoid checking
2971 # no_backup lower in the code.
2971 # no_backup lower in the code.
2972 # These values are ordered for comparison purposes
2972 # These values are ordered for comparison purposes
2973 backup = 2 # unconditionally do backup
2973 backup = 2 # unconditionally do backup
2974 check = 1 # check if the existing file differs from target
2974 check = 1 # check if the existing file differs from target
2975 discard = 0 # never do backup
2975 discard = 0 # never do backup
2976 if opts.get('no_backup'):
2976 if opts.get('no_backup'):
2977 backup = check = discard
2977 backup = check = discard
2978
2978
2979 backupanddel = actions['remove']
2979 backupanddel = actions['remove']
2980 if not opts.get('no_backup'):
2980 if not opts.get('no_backup'):
2981 backupanddel = actions['drop']
2981 backupanddel = actions['drop']
2982
2982
2983 disptable = (
2983 disptable = (
2984 # dispatch table:
2984 # dispatch table:
2985 # file state
2985 # file state
2986 # action
2986 # action
2987 # make backup
2987 # make backup
2988
2988
2989 ## Sets that results that will change file on disk
2989 ## Sets that results that will change file on disk
2990 # Modified compared to target, no local change
2990 # Modified compared to target, no local change
2991 (modified, actions['revert'], discard),
2991 (modified, actions['revert'], discard),
2992 # Modified compared to target, but local file is deleted
2992 # Modified compared to target, but local file is deleted
2993 (deleted, actions['revert'], discard),
2993 (deleted, actions['revert'], discard),
2994 # Modified compared to target, local change
2994 # Modified compared to target, local change
2995 (dsmodified, actions['revert'], backup),
2995 (dsmodified, actions['revert'], backup),
2996 # Added since target
2996 # Added since target
2997 (added, actions['remove'], discard),
2997 (added, actions['remove'], discard),
2998 # Added in working directory
2998 # Added in working directory
2999 (dsadded, actions['forget'], discard),
2999 (dsadded, actions['forget'], discard),
3000 # Added since target, have local modification
3000 # Added since target, have local modification
3001 (modadded, backupanddel, backup),
3001 (modadded, backupanddel, backup),
3002 # Added since target but file is missing in working directory
3002 # Added since target but file is missing in working directory
3003 (deladded, actions['drop'], discard),
3003 (deladded, actions['drop'], discard),
3004 # Removed since target, before working copy parent
3004 # Removed since target, before working copy parent
3005 (removed, actions['add'], discard),
3005 (removed, actions['add'], discard),
3006 # Same as `removed` but an unknown file exists at the same path
3006 # Same as `removed` but an unknown file exists at the same path
3007 (removunk, actions['add'], check),
3007 (removunk, actions['add'], check),
3008 # Removed since targe, marked as such in working copy parent
3008 # Removed since targe, marked as such in working copy parent
3009 (dsremoved, actions['undelete'], discard),
3009 (dsremoved, actions['undelete'], discard),
3010 # Same as `dsremoved` but an unknown file exists at the same path
3010 # Same as `dsremoved` but an unknown file exists at the same path
3011 (dsremovunk, actions['undelete'], check),
3011 (dsremovunk, actions['undelete'], check),
3012 ## the following sets does not result in any file changes
3012 ## the following sets does not result in any file changes
3013 # File with no modification
3013 # File with no modification
3014 (clean, actions['noop'], discard),
3014 (clean, actions['noop'], discard),
3015 # Existing file, not tracked anywhere
3015 # Existing file, not tracked anywhere
3016 (unknown, actions['unknown'], discard),
3016 (unknown, actions['unknown'], discard),
3017 )
3017 )
3018
3018
3019 for abs, (rel, exact) in sorted(names.items()):
3019 for abs, (rel, exact) in sorted(names.items()):
3020 # target file to be touch on disk (relative to cwd)
3020 # target file to be touch on disk (relative to cwd)
3021 target = repo.wjoin(abs)
3021 target = repo.wjoin(abs)
3022 # search the entry in the dispatch table.
3022 # search the entry in the dispatch table.
3023 # if the file is in any of these sets, it was touched in the working
3023 # if the file is in any of these sets, it was touched in the working
3024 # directory parent and we are sure it needs to be reverted.
3024 # directory parent and we are sure it needs to be reverted.
3025 for table, (xlist, msg), dobackup in disptable:
3025 for table, (xlist, msg), dobackup in disptable:
3026 if abs not in table:
3026 if abs not in table:
3027 continue
3027 continue
3028 if xlist is not None:
3028 if xlist is not None:
3029 xlist.append(abs)
3029 xlist.append(abs)
3030 if dobackup and (backup <= dobackup
3030 if dobackup and (backup <= dobackup
3031 or wctx[abs].cmp(ctx[abs])):
3031 or wctx[abs].cmp(ctx[abs])):
3032 bakname = "%s.orig" % rel
3032 bakname = "%s.orig" % rel
3033 ui.note(_('saving current version of %s as %s\n') %
3033 ui.note(_('saving current version of %s as %s\n') %
3034 (rel, bakname))
3034 (rel, bakname))
3035 if not opts.get('dry_run'):
3035 if not opts.get('dry_run'):
3036 if interactive:
3036 if interactive:
3037 util.copyfile(target, bakname)
3037 util.copyfile(target, bakname)
3038 else:
3038 else:
3039 util.rename(target, bakname)
3039 util.rename(target, bakname)
3040 if ui.verbose or not exact:
3040 if ui.verbose or not exact:
3041 if not isinstance(msg, basestring):
3041 if not isinstance(msg, basestring):
3042 msg = msg(abs)
3042 msg = msg(abs)
3043 ui.status(msg % rel)
3043 ui.status(msg % rel)
3044 elif exact:
3044 elif exact:
3045 ui.warn(msg % rel)
3045 ui.warn(msg % rel)
3046 break
3046 break
3047
3047
3048 if not opts.get('dry_run'):
3048 if not opts.get('dry_run'):
3049 needdata = ('revert', 'add', 'undelete')
3049 needdata = ('revert', 'add', 'undelete')
3050 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3050 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3051 _performrevert(repo, parents, ctx, actions, interactive)
3051 _performrevert(repo, parents, ctx, actions, interactive)
3052
3052
3053 if targetsubs:
3053 if targetsubs:
3054 # Revert the subrepos on the revert list
3054 # Revert the subrepos on the revert list
3055 for sub in targetsubs:
3055 for sub in targetsubs:
3056 try:
3056 try:
3057 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3057 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3058 except KeyError:
3058 except KeyError:
3059 raise util.Abort("subrepository '%s' does not exist in %s!"
3059 raise util.Abort("subrepository '%s' does not exist in %s!"
3060 % (sub, short(ctx.node())))
3060 % (sub, short(ctx.node())))
3061 finally:
3061 finally:
3062 wlock.release()
3062 wlock.release()
3063
3063
3064 def _revertprefetch(repo, ctx, *files):
3064 def _revertprefetch(repo, ctx, *files):
3065 """Let extension changing the storage layer prefetch content"""
3065 """Let extension changing the storage layer prefetch content"""
3066 pass
3066 pass
3067
3067
3068 def _performrevert(repo, parents, ctx, actions, interactive=False):
3068 def _performrevert(repo, parents, ctx, actions, interactive=False):
3069 """function that actually perform all the actions computed for revert
3069 """function that actually perform all the actions computed for revert
3070
3070
3071 This is an independent function to let extension to plug in and react to
3071 This is an independent function to let extension to plug in and react to
3072 the imminent revert.
3072 the imminent revert.
3073
3073
3074 Make sure you have the working directory locked when calling this function.
3074 Make sure you have the working directory locked when calling this function.
3075 """
3075 """
3076 parent, p2 = parents
3076 parent, p2 = parents
3077 node = ctx.node()
3077 node = ctx.node()
3078 def checkout(f):
3078 def checkout(f):
3079 fc = ctx[f]
3079 fc = ctx[f]
3080 return repo.wwrite(f, fc.data(), fc.flags())
3080 return repo.wwrite(f, fc.data(), fc.flags())
3081
3081
3082 audit_path = pathutil.pathauditor(repo.root)
3082 audit_path = pathutil.pathauditor(repo.root)
3083 for f in actions['forget'][0]:
3083 for f in actions['forget'][0]:
3084 repo.dirstate.drop(f)
3084 repo.dirstate.drop(f)
3085 for f in actions['remove'][0]:
3085 for f in actions['remove'][0]:
3086 audit_path(f)
3086 audit_path(f)
3087 try:
3087 try:
3088 util.unlinkpath(repo.wjoin(f))
3088 util.unlinkpath(repo.wjoin(f))
3089 except OSError:
3089 except OSError:
3090 pass
3090 pass
3091 repo.dirstate.remove(f)
3091 repo.dirstate.remove(f)
3092 for f in actions['drop'][0]:
3092 for f in actions['drop'][0]:
3093 audit_path(f)
3093 audit_path(f)
3094 repo.dirstate.remove(f)
3094 repo.dirstate.remove(f)
3095
3095
3096 normal = None
3096 normal = None
3097 if node == parent:
3097 if node == parent:
3098 # We're reverting to our parent. If possible, we'd like status
3098 # We're reverting to our parent. If possible, we'd like status
3099 # to report the file as clean. We have to use normallookup for
3099 # to report the file as clean. We have to use normallookup for
3100 # merges to avoid losing information about merged/dirty files.
3100 # merges to avoid losing information about merged/dirty files.
3101 if p2 != nullid:
3101 if p2 != nullid:
3102 normal = repo.dirstate.normallookup
3102 normal = repo.dirstate.normallookup
3103 else:
3103 else:
3104 normal = repo.dirstate.normal
3104 normal = repo.dirstate.normal
3105
3105
3106 if interactive:
3106 if interactive:
3107 # Prompt the user for changes to revert
3107 # Prompt the user for changes to revert
3108 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3108 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3109 m = scmutil.match(ctx, torevert, {})
3109 m = scmutil.match(ctx, torevert, {})
3110 diff = patch.diff(repo, None, ctx.node(), m)
3110 diff = patch.diff(repo, None, ctx.node(), m)
3111 originalchunks = patch.parsepatch(diff)
3111 originalchunks = patch.parsepatch(diff)
3112 try:
3112 try:
3113 chunks = recordfilter(repo.ui, originalchunks)
3113 chunks = recordfilter(repo.ui, originalchunks)
3114 except patch.PatchError, err:
3114 except patch.PatchError, err:
3115 raise util.Abort(_('error parsing patch: %s') % err)
3115 raise util.Abort(_('error parsing patch: %s') % err)
3116
3116
3117 # Apply changes
3117 # Apply changes
3118 fp = cStringIO.StringIO()
3118 fp = cStringIO.StringIO()
3119 for c in chunks:
3119 for c in chunks:
3120 c.write(fp)
3120 c.write(fp)
3121 dopatch = fp.tell()
3121 dopatch = fp.tell()
3122 fp.seek(0)
3122 fp.seek(0)
3123 if dopatch:
3123 if dopatch:
3124 try:
3124 try:
3125 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3125 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3126 except patch.PatchError, err:
3126 except patch.PatchError, err:
3127 raise util.Abort(str(err))
3127 raise util.Abort(str(err))
3128 del fp
3128 del fp
3129 else:
3129 else:
3130 for f in actions['revert'][0]:
3130 for f in actions['revert'][0]:
3131 wsize = checkout(f)
3131 wsize = checkout(f)
3132 if normal:
3132 if normal:
3133 normal(f)
3133 normal(f)
3134 elif wsize == repo.dirstate._map[f][2]:
3134 elif wsize == repo.dirstate._map[f][2]:
3135 # changes may be overlooked without normallookup,
3135 # changes may be overlooked without normallookup,
3136 # if size isn't changed at reverting
3136 # if size isn't changed at reverting
3137 repo.dirstate.normallookup(f)
3137 repo.dirstate.normallookup(f)
3138
3138
3139 for f in actions['add'][0]:
3139 for f in actions['add'][0]:
3140 checkout(f)
3140 checkout(f)
3141 repo.dirstate.add(f)
3141 repo.dirstate.add(f)
3142
3142
3143 normal = repo.dirstate.normallookup
3143 normal = repo.dirstate.normallookup
3144 if node == parent and p2 == nullid:
3144 if node == parent and p2 == nullid:
3145 normal = repo.dirstate.normal
3145 normal = repo.dirstate.normal
3146 for f in actions['undelete'][0]:
3146 for f in actions['undelete'][0]:
3147 checkout(f)
3147 checkout(f)
3148 normal(f)
3148 normal(f)
3149
3149
3150 copied = copies.pathcopies(repo[parent], ctx)
3150 copied = copies.pathcopies(repo[parent], ctx)
3151
3151
3152 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3152 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3153 if f in copied:
3153 if f in copied:
3154 repo.dirstate.copy(copied[f], f)
3154 repo.dirstate.copy(copied[f], f)
3155
3155
3156 def command(table):
3156 def command(table):
3157 """Returns a function object to be used as a decorator for making commands.
3157 """Returns a function object to be used as a decorator for making commands.
3158
3158
3159 This function receives a command table as its argument. The table should
3159 This function receives a command table as its argument. The table should
3160 be a dict.
3160 be a dict.
3161
3161
3162 The returned function can be used as a decorator for adding commands
3162 The returned function can be used as a decorator for adding commands
3163 to that command table. This function accepts multiple arguments to define
3163 to that command table. This function accepts multiple arguments to define
3164 a command.
3164 a command.
3165
3165
3166 The first argument is the command name.
3166 The first argument is the command name.
3167
3167
3168 The options argument is an iterable of tuples defining command arguments.
3168 The options argument is an iterable of tuples defining command arguments.
3169 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3169 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3170
3170
3171 The synopsis argument defines a short, one line summary of how to use the
3171 The synopsis argument defines a short, one line summary of how to use the
3172 command. This shows up in the help output.
3172 command. This shows up in the help output.
3173
3173
3174 The norepo argument defines whether the command does not require a
3174 The norepo argument defines whether the command does not require a
3175 local repository. Most commands operate against a repository, thus the
3175 local repository. Most commands operate against a repository, thus the
3176 default is False.
3176 default is False.
3177
3177
3178 The optionalrepo argument defines whether the command optionally requires
3178 The optionalrepo argument defines whether the command optionally requires
3179 a local repository.
3179 a local repository.
3180
3180
3181 The inferrepo argument defines whether to try to find a repository from the
3181 The inferrepo argument defines whether to try to find a repository from the
3182 command line arguments. If True, arguments will be examined for potential
3182 command line arguments. If True, arguments will be examined for potential
3183 repository locations. See ``findrepo()``. If a repository is found, it
3183 repository locations. See ``findrepo()``. If a repository is found, it
3184 will be used.
3184 will be used.
3185 """
3185 """
3186 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3186 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3187 inferrepo=False):
3187 inferrepo=False):
3188 def decorator(func):
3188 def decorator(func):
3189 if synopsis:
3189 if synopsis:
3190 table[name] = func, list(options), synopsis
3190 table[name] = func, list(options), synopsis
3191 else:
3191 else:
3192 table[name] = func, list(options)
3192 table[name] = func, list(options)
3193
3193
3194 if norepo:
3194 if norepo:
3195 # Avoid import cycle.
3195 # Avoid import cycle.
3196 import commands
3196 import commands
3197 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3197 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3198
3198
3199 if optionalrepo:
3199 if optionalrepo:
3200 import commands
3200 import commands
3201 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3201 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3202
3202
3203 if inferrepo:
3203 if inferrepo:
3204 import commands
3204 import commands
3205 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3205 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3206
3206
3207 return func
3207 return func
3208 return decorator
3208 return decorator
3209
3209
3210 return cmd
3210 return cmd
3211
3211
3212 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3212 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3213 # commands.outgoing. "missing" is "missing" of the result of
3213 # commands.outgoing. "missing" is "missing" of the result of
3214 # "findcommonoutgoing()"
3214 # "findcommonoutgoing()"
3215 outgoinghooks = util.hooks()
3215 outgoinghooks = util.hooks()
3216
3216
3217 # a list of (ui, repo) functions called by commands.summary
3217 # a list of (ui, repo) functions called by commands.summary
3218 summaryhooks = util.hooks()
3218 summaryhooks = util.hooks()
3219
3219
3220 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3220 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3221 #
3221 #
3222 # functions should return tuple of booleans below, if 'changes' is None:
3222 # functions should return tuple of booleans below, if 'changes' is None:
3223 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3223 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3224 #
3224 #
3225 # otherwise, 'changes' is a tuple of tuples below:
3225 # otherwise, 'changes' is a tuple of tuples below:
3226 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3226 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3227 # - (desturl, destbranch, destpeer, outgoing)
3227 # - (desturl, destbranch, destpeer, outgoing)
3228 summaryremotehooks = util.hooks()
3228 summaryremotehooks = util.hooks()
3229
3229
3230 # A list of state files kept by multistep operations like graft.
3230 # A list of state files kept by multistep operations like graft.
3231 # Since graft cannot be aborted, it is considered 'clearable' by update.
3231 # Since graft cannot be aborted, it is considered 'clearable' by update.
3232 # note: bisect is intentionally excluded
3232 # note: bisect is intentionally excluded
3233 # (state file, clearable, allowcommit, error, hint)
3233 # (state file, clearable, allowcommit, error, hint)
3234 unfinishedstates = [
3234 unfinishedstates = [
3235 ('graftstate', True, False, _('graft in progress'),
3235 ('graftstate', True, False, _('graft in progress'),
3236 _("use 'hg graft --continue' or 'hg update' to abort")),
3236 _("use 'hg graft --continue' or 'hg update' to abort")),
3237 ('updatestate', True, False, _('last update was interrupted'),
3237 ('updatestate', True, False, _('last update was interrupted'),
3238 _("use 'hg update' to get a consistent checkout"))
3238 _("use 'hg update' to get a consistent checkout"))
3239 ]
3239 ]
3240
3240
3241 def checkunfinished(repo, commit=False):
3241 def checkunfinished(repo, commit=False):
3242 '''Look for an unfinished multistep operation, like graft, and abort
3242 '''Look for an unfinished multistep operation, like graft, and abort
3243 if found. It's probably good to check this right before
3243 if found. It's probably good to check this right before
3244 bailifchanged().
3244 bailifchanged().
3245 '''
3245 '''
3246 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3246 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3247 if commit and allowcommit:
3247 if commit and allowcommit:
3248 continue
3248 continue
3249 if repo.vfs.exists(f):
3249 if repo.vfs.exists(f):
3250 raise util.Abort(msg, hint=hint)
3250 raise util.Abort(msg, hint=hint)
3251
3251
3252 def clearunfinished(repo):
3252 def clearunfinished(repo):
3253 '''Check for unfinished operations (as above), and clear the ones
3253 '''Check for unfinished operations (as above), and clear the ones
3254 that are clearable.
3254 that are clearable.
3255 '''
3255 '''
3256 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3256 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3257 if not clearable and repo.vfs.exists(f):
3257 if not clearable and repo.vfs.exists(f):
3258 raise util.Abort(msg, hint=hint)
3258 raise util.Abort(msg, hint=hint)
3259 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3259 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3260 if clearable and repo.vfs.exists(f):
3260 if clearable and repo.vfs.exists(f):
3261 util.unlink(repo.join(f))
3261 util.unlink(repo.join(f))
@@ -1,6463 +1,6463 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _
10 from i18n import _
11 import os, re, difflib, time, tempfile, errno, shlex
11 import os, re, difflib, time, tempfile, errno, shlex
12 import sys, socket
12 import sys, socket
13 import hg, scmutil, util, revlog, copies, error, bookmarks
13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 import patch, help, encoding, templatekw, discovery
14 import patch, help, encoding, templatekw, discovery
15 import archival, changegroup, cmdutil, hbisect
15 import archival, changegroup, cmdutil, hbisect
16 import sshserver, hgweb, commandserver
16 import sshserver, hgweb, commandserver
17 import extensions
17 import extensions
18 from hgweb import server as hgweb_server
18 from hgweb import server as hgweb_server
19 import merge as mergemod
19 import merge as mergemod
20 import minirst, revset, fileset
20 import minirst, revset, fileset
21 import dagparser, context, simplemerge, graphmod, copies
21 import dagparser, context, simplemerge, graphmod, copies
22 import random
22 import random
23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 import phases, obsolete, exchange, bundle2
24 import phases, obsolete, exchange, bundle2
25 import ui as uimod
25 import ui as uimod
26
26
27 table = {}
27 table = {}
28
28
29 command = cmdutil.command(table)
29 command = cmdutil.command(table)
30
30
31 # Space delimited list of commands that don't require local repositories.
31 # Space delimited list of commands that don't require local repositories.
32 # This should be populated by passing norepo=True into the @command decorator.
32 # This should be populated by passing norepo=True into the @command decorator.
33 norepo = ''
33 norepo = ''
34 # Space delimited list of commands that optionally require local repositories.
34 # Space delimited list of commands that optionally require local repositories.
35 # This should be populated by passing optionalrepo=True into the @command
35 # This should be populated by passing optionalrepo=True into the @command
36 # decorator.
36 # decorator.
37 optionalrepo = ''
37 optionalrepo = ''
38 # Space delimited list of commands that will examine arguments looking for
38 # Space delimited list of commands that will examine arguments looking for
39 # a repository. This should be populated by passing inferrepo=True into the
39 # a repository. This should be populated by passing inferrepo=True into the
40 # @command decorator.
40 # @command decorator.
41 inferrepo = ''
41 inferrepo = ''
42
42
43 # common command options
43 # common command options
44
44
45 globalopts = [
45 globalopts = [
46 ('R', 'repository', '',
46 ('R', 'repository', '',
47 _('repository root directory or name of overlay bundle file'),
47 _('repository root directory or name of overlay bundle file'),
48 _('REPO')),
48 _('REPO')),
49 ('', 'cwd', '',
49 ('', 'cwd', '',
50 _('change working directory'), _('DIR')),
50 _('change working directory'), _('DIR')),
51 ('y', 'noninteractive', None,
51 ('y', 'noninteractive', None,
52 _('do not prompt, automatically pick the first choice for all prompts')),
52 _('do not prompt, automatically pick the first choice for all prompts')),
53 ('q', 'quiet', None, _('suppress output')),
53 ('q', 'quiet', None, _('suppress output')),
54 ('v', 'verbose', None, _('enable additional output')),
54 ('v', 'verbose', None, _('enable additional output')),
55 ('', 'config', [],
55 ('', 'config', [],
56 _('set/override config option (use \'section.name=value\')'),
56 _('set/override config option (use \'section.name=value\')'),
57 _('CONFIG')),
57 _('CONFIG')),
58 ('', 'debug', None, _('enable debugging output')),
58 ('', 'debug', None, _('enable debugging output')),
59 ('', 'debugger', None, _('start debugger')),
59 ('', 'debugger', None, _('start debugger')),
60 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
60 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
61 _('ENCODE')),
61 _('ENCODE')),
62 ('', 'encodingmode', encoding.encodingmode,
62 ('', 'encodingmode', encoding.encodingmode,
63 _('set the charset encoding mode'), _('MODE')),
63 _('set the charset encoding mode'), _('MODE')),
64 ('', 'traceback', None, _('always print a traceback on exception')),
64 ('', 'traceback', None, _('always print a traceback on exception')),
65 ('', 'time', None, _('time how long the command takes')),
65 ('', 'time', None, _('time how long the command takes')),
66 ('', 'profile', None, _('print command execution profile')),
66 ('', 'profile', None, _('print command execution profile')),
67 ('', 'version', None, _('output version information and exit')),
67 ('', 'version', None, _('output version information and exit')),
68 ('h', 'help', None, _('display help and exit')),
68 ('h', 'help', None, _('display help and exit')),
69 ('', 'hidden', False, _('consider hidden changesets')),
69 ('', 'hidden', False, _('consider hidden changesets')),
70 ]
70 ]
71
71
72 dryrunopts = [('n', 'dry-run', None,
72 dryrunopts = [('n', 'dry-run', None,
73 _('do not perform actions, just print output'))]
73 _('do not perform actions, just print output'))]
74
74
75 remoteopts = [
75 remoteopts = [
76 ('e', 'ssh', '',
76 ('e', 'ssh', '',
77 _('specify ssh command to use'), _('CMD')),
77 _('specify ssh command to use'), _('CMD')),
78 ('', 'remotecmd', '',
78 ('', 'remotecmd', '',
79 _('specify hg command to run on the remote side'), _('CMD')),
79 _('specify hg command to run on the remote side'), _('CMD')),
80 ('', 'insecure', None,
80 ('', 'insecure', None,
81 _('do not verify server certificate (ignoring web.cacerts config)')),
81 _('do not verify server certificate (ignoring web.cacerts config)')),
82 ]
82 ]
83
83
84 walkopts = [
84 walkopts = [
85 ('I', 'include', [],
85 ('I', 'include', [],
86 _('include names matching the given patterns'), _('PATTERN')),
86 _('include names matching the given patterns'), _('PATTERN')),
87 ('X', 'exclude', [],
87 ('X', 'exclude', [],
88 _('exclude names matching the given patterns'), _('PATTERN')),
88 _('exclude names matching the given patterns'), _('PATTERN')),
89 ]
89 ]
90
90
91 commitopts = [
91 commitopts = [
92 ('m', 'message', '',
92 ('m', 'message', '',
93 _('use text as commit message'), _('TEXT')),
93 _('use text as commit message'), _('TEXT')),
94 ('l', 'logfile', '',
94 ('l', 'logfile', '',
95 _('read commit message from file'), _('FILE')),
95 _('read commit message from file'), _('FILE')),
96 ]
96 ]
97
97
98 commitopts2 = [
98 commitopts2 = [
99 ('d', 'date', '',
99 ('d', 'date', '',
100 _('record the specified date as commit date'), _('DATE')),
100 _('record the specified date as commit date'), _('DATE')),
101 ('u', 'user', '',
101 ('u', 'user', '',
102 _('record the specified user as committer'), _('USER')),
102 _('record the specified user as committer'), _('USER')),
103 ]
103 ]
104
104
105 # hidden for now
105 # hidden for now
106 formatteropts = [
106 formatteropts = [
107 ('T', 'template', '',
107 ('T', 'template', '',
108 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
108 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
109 ]
109 ]
110
110
111 templateopts = [
111 templateopts = [
112 ('', 'style', '',
112 ('', 'style', '',
113 _('display using template map file (DEPRECATED)'), _('STYLE')),
113 _('display using template map file (DEPRECATED)'), _('STYLE')),
114 ('T', 'template', '',
114 ('T', 'template', '',
115 _('display with template'), _('TEMPLATE')),
115 _('display with template'), _('TEMPLATE')),
116 ]
116 ]
117
117
118 logopts = [
118 logopts = [
119 ('p', 'patch', None, _('show patch')),
119 ('p', 'patch', None, _('show patch')),
120 ('g', 'git', None, _('use git extended diff format')),
120 ('g', 'git', None, _('use git extended diff format')),
121 ('l', 'limit', '',
121 ('l', 'limit', '',
122 _('limit number of changes displayed'), _('NUM')),
122 _('limit number of changes displayed'), _('NUM')),
123 ('M', 'no-merges', None, _('do not show merges')),
123 ('M', 'no-merges', None, _('do not show merges')),
124 ('', 'stat', None, _('output diffstat-style summary of changes')),
124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 ('G', 'graph', None, _("show the revision DAG")),
125 ('G', 'graph', None, _("show the revision DAG")),
126 ] + templateopts
126 ] + templateopts
127
127
128 diffopts = [
128 diffopts = [
129 ('a', 'text', None, _('treat all files as text')),
129 ('a', 'text', None, _('treat all files as text')),
130 ('g', 'git', None, _('use git extended diff format')),
130 ('g', 'git', None, _('use git extended diff format')),
131 ('', 'nodates', None, _('omit dates from diff headers'))
131 ('', 'nodates', None, _('omit dates from diff headers'))
132 ]
132 ]
133
133
134 diffwsopts = [
134 diffwsopts = [
135 ('w', 'ignore-all-space', None,
135 ('w', 'ignore-all-space', None,
136 _('ignore white space when comparing lines')),
136 _('ignore white space when comparing lines')),
137 ('b', 'ignore-space-change', None,
137 ('b', 'ignore-space-change', None,
138 _('ignore changes in the amount of white space')),
138 _('ignore changes in the amount of white space')),
139 ('B', 'ignore-blank-lines', None,
139 ('B', 'ignore-blank-lines', None,
140 _('ignore changes whose lines are all blank')),
140 _('ignore changes whose lines are all blank')),
141 ]
141 ]
142
142
143 diffopts2 = [
143 diffopts2 = [
144 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
144 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
145 ('p', 'show-function', None, _('show which function each change is in')),
145 ('p', 'show-function', None, _('show which function each change is in')),
146 ('', 'reverse', None, _('produce a diff that undoes the changes')),
146 ('', 'reverse', None, _('produce a diff that undoes the changes')),
147 ] + diffwsopts + [
147 ] + diffwsopts + [
148 ('U', 'unified', '',
148 ('U', 'unified', '',
149 _('number of lines of context to show'), _('NUM')),
149 _('number of lines of context to show'), _('NUM')),
150 ('', 'stat', None, _('output diffstat-style summary of changes')),
150 ('', 'stat', None, _('output diffstat-style summary of changes')),
151 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
151 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
152 ]
152 ]
153
153
154 mergetoolopts = [
154 mergetoolopts = [
155 ('t', 'tool', '', _('specify merge tool')),
155 ('t', 'tool', '', _('specify merge tool')),
156 ]
156 ]
157
157
158 similarityopts = [
158 similarityopts = [
159 ('s', 'similarity', '',
159 ('s', 'similarity', '',
160 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
160 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
161 ]
161 ]
162
162
163 subrepoopts = [
163 subrepoopts = [
164 ('S', 'subrepos', None,
164 ('S', 'subrepos', None,
165 _('recurse into subrepositories'))
165 _('recurse into subrepositories'))
166 ]
166 ]
167
167
168 # Commands start here, listed alphabetically
168 # Commands start here, listed alphabetically
169
169
170 @command('^add',
170 @command('^add',
171 walkopts + subrepoopts + dryrunopts,
171 walkopts + subrepoopts + dryrunopts,
172 _('[OPTION]... [FILE]...'),
172 _('[OPTION]... [FILE]...'),
173 inferrepo=True)
173 inferrepo=True)
174 def add(ui, repo, *pats, **opts):
174 def add(ui, repo, *pats, **opts):
175 """add the specified files on the next commit
175 """add the specified files on the next commit
176
176
177 Schedule files to be version controlled and added to the
177 Schedule files to be version controlled and added to the
178 repository.
178 repository.
179
179
180 The files will be added to the repository at the next commit. To
180 The files will be added to the repository at the next commit. To
181 undo an add before that, see :hg:`forget`.
181 undo an add before that, see :hg:`forget`.
182
182
183 If no names are given, add all files to the repository.
183 If no names are given, add all files to the repository.
184
184
185 .. container:: verbose
185 .. container:: verbose
186
186
187 An example showing how new (unknown) files are added
187 An example showing how new (unknown) files are added
188 automatically by :hg:`add`::
188 automatically by :hg:`add`::
189
189
190 $ ls
190 $ ls
191 foo.c
191 foo.c
192 $ hg status
192 $ hg status
193 ? foo.c
193 ? foo.c
194 $ hg add
194 $ hg add
195 adding foo.c
195 adding foo.c
196 $ hg status
196 $ hg status
197 A foo.c
197 A foo.c
198
198
199 Returns 0 if all files are successfully added.
199 Returns 0 if all files are successfully added.
200 """
200 """
201
201
202 m = scmutil.match(repo[None], pats, opts)
202 m = scmutil.match(repo[None], pats, opts)
203 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
203 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
204 return rejected and 1 or 0
204 return rejected and 1 or 0
205
205
206 @command('addremove',
206 @command('addremove',
207 similarityopts + subrepoopts + walkopts + dryrunopts,
207 similarityopts + subrepoopts + walkopts + dryrunopts,
208 _('[OPTION]... [FILE]...'),
208 _('[OPTION]... [FILE]...'),
209 inferrepo=True)
209 inferrepo=True)
210 def addremove(ui, repo, *pats, **opts):
210 def addremove(ui, repo, *pats, **opts):
211 """add all new files, delete all missing files
211 """add all new files, delete all missing files
212
212
213 Add all new files and remove all missing files from the
213 Add all new files and remove all missing files from the
214 repository.
214 repository.
215
215
216 New files are ignored if they match any of the patterns in
216 New files are ignored if they match any of the patterns in
217 ``.hgignore``. As with add, these changes take effect at the next
217 ``.hgignore``. As with add, these changes take effect at the next
218 commit.
218 commit.
219
219
220 Use the -s/--similarity option to detect renamed files. This
220 Use the -s/--similarity option to detect renamed files. This
221 option takes a percentage between 0 (disabled) and 100 (files must
221 option takes a percentage between 0 (disabled) and 100 (files must
222 be identical) as its parameter. With a parameter greater than 0,
222 be identical) as its parameter. With a parameter greater than 0,
223 this compares every removed file with every added file and records
223 this compares every removed file with every added file and records
224 those similar enough as renames. Detecting renamed files this way
224 those similar enough as renames. Detecting renamed files this way
225 can be expensive. After using this option, :hg:`status -C` can be
225 can be expensive. After using this option, :hg:`status -C` can be
226 used to check which files were identified as moved or renamed. If
226 used to check which files were identified as moved or renamed. If
227 not specified, -s/--similarity defaults to 100 and only renames of
227 not specified, -s/--similarity defaults to 100 and only renames of
228 identical files are detected.
228 identical files are detected.
229
229
230 Returns 0 if all files are successfully added.
230 Returns 0 if all files are successfully added.
231 """
231 """
232 try:
232 try:
233 sim = float(opts.get('similarity') or 100)
233 sim = float(opts.get('similarity') or 100)
234 except ValueError:
234 except ValueError:
235 raise util.Abort(_('similarity must be a number'))
235 raise util.Abort(_('similarity must be a number'))
236 if sim < 0 or sim > 100:
236 if sim < 0 or sim > 100:
237 raise util.Abort(_('similarity must be between 0 and 100'))
237 raise util.Abort(_('similarity must be between 0 and 100'))
238 matcher = scmutil.match(repo[None], pats, opts)
238 matcher = scmutil.match(repo[None], pats, opts)
239 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
239 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
240
240
241 @command('^annotate|blame',
241 @command('^annotate|blame',
242 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
242 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
243 ('', 'follow', None,
243 ('', 'follow', None,
244 _('follow copies/renames and list the filename (DEPRECATED)')),
244 _('follow copies/renames and list the filename (DEPRECATED)')),
245 ('', 'no-follow', None, _("don't follow copies and renames")),
245 ('', 'no-follow', None, _("don't follow copies and renames")),
246 ('a', 'text', None, _('treat all files as text')),
246 ('a', 'text', None, _('treat all files as text')),
247 ('u', 'user', None, _('list the author (long with -v)')),
247 ('u', 'user', None, _('list the author (long with -v)')),
248 ('f', 'file', None, _('list the filename')),
248 ('f', 'file', None, _('list the filename')),
249 ('d', 'date', None, _('list the date (short with -q)')),
249 ('d', 'date', None, _('list the date (short with -q)')),
250 ('n', 'number', None, _('list the revision number (default)')),
250 ('n', 'number', None, _('list the revision number (default)')),
251 ('c', 'changeset', None, _('list the changeset')),
251 ('c', 'changeset', None, _('list the changeset')),
252 ('l', 'line-number', None, _('show line number at the first appearance'))
252 ('l', 'line-number', None, _('show line number at the first appearance'))
253 ] + diffwsopts + walkopts + formatteropts,
253 ] + diffwsopts + walkopts + formatteropts,
254 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
254 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
255 inferrepo=True)
255 inferrepo=True)
256 def annotate(ui, repo, *pats, **opts):
256 def annotate(ui, repo, *pats, **opts):
257 """show changeset information by line for each file
257 """show changeset information by line for each file
258
258
259 List changes in files, showing the revision id responsible for
259 List changes in files, showing the revision id responsible for
260 each line
260 each line
261
261
262 This command is useful for discovering when a change was made and
262 This command is useful for discovering when a change was made and
263 by whom.
263 by whom.
264
264
265 Without the -a/--text option, annotate will avoid processing files
265 Without the -a/--text option, annotate will avoid processing files
266 it detects as binary. With -a, annotate will annotate the file
266 it detects as binary. With -a, annotate will annotate the file
267 anyway, although the results will probably be neither useful
267 anyway, although the results will probably be neither useful
268 nor desirable.
268 nor desirable.
269
269
270 Returns 0 on success.
270 Returns 0 on success.
271 """
271 """
272 if not pats:
272 if not pats:
273 raise util.Abort(_('at least one filename or pattern is required'))
273 raise util.Abort(_('at least one filename or pattern is required'))
274
274
275 if opts.get('follow'):
275 if opts.get('follow'):
276 # --follow is deprecated and now just an alias for -f/--file
276 # --follow is deprecated and now just an alias for -f/--file
277 # to mimic the behavior of Mercurial before version 1.5
277 # to mimic the behavior of Mercurial before version 1.5
278 opts['file'] = True
278 opts['file'] = True
279
279
280 ctx = scmutil.revsingle(repo, opts.get('rev'))
280 ctx = scmutil.revsingle(repo, opts.get('rev'))
281
281
282 fm = ui.formatter('annotate', opts)
282 fm = ui.formatter('annotate', opts)
283 if ui.quiet:
283 if ui.quiet:
284 datefunc = util.shortdate
284 datefunc = util.shortdate
285 else:
285 else:
286 datefunc = util.datestr
286 datefunc = util.datestr
287 if ctx.rev() is None:
287 if ctx.rev() is None:
288 def hexfn(node):
288 def hexfn(node):
289 if node is None:
289 if node is None:
290 return None
290 return None
291 else:
291 else:
292 return fm.hexfunc(node)
292 return fm.hexfunc(node)
293 if opts.get('changeset'):
293 if opts.get('changeset'):
294 # omit "+" suffix which is appended to node hex
294 # omit "+" suffix which is appended to node hex
295 def formatrev(rev):
295 def formatrev(rev):
296 if rev is None:
296 if rev is None:
297 return '%d' % ctx.p1().rev()
297 return '%d' % ctx.p1().rev()
298 else:
298 else:
299 return '%d' % rev
299 return '%d' % rev
300 else:
300 else:
301 def formatrev(rev):
301 def formatrev(rev):
302 if rev is None:
302 if rev is None:
303 return '%d+' % ctx.p1().rev()
303 return '%d+' % ctx.p1().rev()
304 else:
304 else:
305 return '%d ' % rev
305 return '%d ' % rev
306 def formathex(hex):
306 def formathex(hex):
307 if hex is None:
307 if hex is None:
308 return '%s+' % fm.hexfunc(ctx.p1().node())
308 return '%s+' % fm.hexfunc(ctx.p1().node())
309 else:
309 else:
310 return '%s ' % hex
310 return '%s ' % hex
311 else:
311 else:
312 hexfn = fm.hexfunc
312 hexfn = fm.hexfunc
313 formatrev = formathex = str
313 formatrev = formathex = str
314
314
315 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
315 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
316 ('number', ' ', lambda x: x[0].rev(), formatrev),
316 ('number', ' ', lambda x: x[0].rev(), formatrev),
317 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
317 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
318 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
318 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
319 ('file', ' ', lambda x: x[0].path(), str),
319 ('file', ' ', lambda x: x[0].path(), str),
320 ('line_number', ':', lambda x: x[1], str),
320 ('line_number', ':', lambda x: x[1], str),
321 ]
321 ]
322 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
322 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
323
323
324 if (not opts.get('user') and not opts.get('changeset')
324 if (not opts.get('user') and not opts.get('changeset')
325 and not opts.get('date') and not opts.get('file')):
325 and not opts.get('date') and not opts.get('file')):
326 opts['number'] = True
326 opts['number'] = True
327
327
328 linenumber = opts.get('line_number') is not None
328 linenumber = opts.get('line_number') is not None
329 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
329 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
330 raise util.Abort(_('at least one of -n/-c is required for -l'))
330 raise util.Abort(_('at least one of -n/-c is required for -l'))
331
331
332 if fm:
332 if fm:
333 def makefunc(get, fmt):
333 def makefunc(get, fmt):
334 return get
334 return get
335 else:
335 else:
336 def makefunc(get, fmt):
336 def makefunc(get, fmt):
337 return lambda x: fmt(get(x))
337 return lambda x: fmt(get(x))
338 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
338 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
339 if opts.get(op)]
339 if opts.get(op)]
340 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
340 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
341 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
341 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
342 if opts.get(op))
342 if opts.get(op))
343
343
344 def bad(x, y):
344 def bad(x, y):
345 raise util.Abort("%s: %s" % (x, y))
345 raise util.Abort("%s: %s" % (x, y))
346
346
347 m = scmutil.match(ctx, pats, opts)
347 m = scmutil.match(ctx, pats, opts)
348 m.bad = bad
348 m.bad = bad
349 follow = not opts.get('no_follow')
349 follow = not opts.get('no_follow')
350 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
350 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
351 whitespace=True)
351 whitespace=True)
352 for abs in ctx.walk(m):
352 for abs in ctx.walk(m):
353 fctx = ctx[abs]
353 fctx = ctx[abs]
354 if not opts.get('text') and util.binary(fctx.data()):
354 if not opts.get('text') and util.binary(fctx.data()):
355 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
355 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
356 continue
356 continue
357
357
358 lines = fctx.annotate(follow=follow, linenumber=linenumber,
358 lines = fctx.annotate(follow=follow, linenumber=linenumber,
359 diffopts=diffopts)
359 diffopts=diffopts)
360 formats = []
360 formats = []
361 pieces = []
361 pieces = []
362
362
363 for f, sep in funcmap:
363 for f, sep in funcmap:
364 l = [f(n) for n, dummy in lines]
364 l = [f(n) for n, dummy in lines]
365 if l:
365 if l:
366 if fm:
366 if fm:
367 formats.append(['%s' for x in l])
367 formats.append(['%s' for x in l])
368 else:
368 else:
369 sizes = [encoding.colwidth(x) for x in l]
369 sizes = [encoding.colwidth(x) for x in l]
370 ml = max(sizes)
370 ml = max(sizes)
371 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
371 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
372 pieces.append(l)
372 pieces.append(l)
373
373
374 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
374 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
375 fm.startitem()
375 fm.startitem()
376 fm.write(fields, "".join(f), *p)
376 fm.write(fields, "".join(f), *p)
377 fm.write('line', ": %s", l[1])
377 fm.write('line', ": %s", l[1])
378
378
379 if lines and not lines[-1][1].endswith('\n'):
379 if lines and not lines[-1][1].endswith('\n'):
380 fm.plain('\n')
380 fm.plain('\n')
381
381
382 fm.end()
382 fm.end()
383
383
384 @command('archive',
384 @command('archive',
385 [('', 'no-decode', None, _('do not pass files through decoders')),
385 [('', 'no-decode', None, _('do not pass files through decoders')),
386 ('p', 'prefix', '', _('directory prefix for files in archive'),
386 ('p', 'prefix', '', _('directory prefix for files in archive'),
387 _('PREFIX')),
387 _('PREFIX')),
388 ('r', 'rev', '', _('revision to distribute'), _('REV')),
388 ('r', 'rev', '', _('revision to distribute'), _('REV')),
389 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
389 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
390 ] + subrepoopts + walkopts,
390 ] + subrepoopts + walkopts,
391 _('[OPTION]... DEST'))
391 _('[OPTION]... DEST'))
392 def archive(ui, repo, dest, **opts):
392 def archive(ui, repo, dest, **opts):
393 '''create an unversioned archive of a repository revision
393 '''create an unversioned archive of a repository revision
394
394
395 By default, the revision used is the parent of the working
395 By default, the revision used is the parent of the working
396 directory; use -r/--rev to specify a different revision.
396 directory; use -r/--rev to specify a different revision.
397
397
398 The archive type is automatically detected based on file
398 The archive type is automatically detected based on file
399 extension (or override using -t/--type).
399 extension (or override using -t/--type).
400
400
401 .. container:: verbose
401 .. container:: verbose
402
402
403 Examples:
403 Examples:
404
404
405 - create a zip file containing the 1.0 release::
405 - create a zip file containing the 1.0 release::
406
406
407 hg archive -r 1.0 project-1.0.zip
407 hg archive -r 1.0 project-1.0.zip
408
408
409 - create a tarball excluding .hg files::
409 - create a tarball excluding .hg files::
410
410
411 hg archive project.tar.gz -X ".hg*"
411 hg archive project.tar.gz -X ".hg*"
412
412
413 Valid types are:
413 Valid types are:
414
414
415 :``files``: a directory full of files (default)
415 :``files``: a directory full of files (default)
416 :``tar``: tar archive, uncompressed
416 :``tar``: tar archive, uncompressed
417 :``tbz2``: tar archive, compressed using bzip2
417 :``tbz2``: tar archive, compressed using bzip2
418 :``tgz``: tar archive, compressed using gzip
418 :``tgz``: tar archive, compressed using gzip
419 :``uzip``: zip archive, uncompressed
419 :``uzip``: zip archive, uncompressed
420 :``zip``: zip archive, compressed using deflate
420 :``zip``: zip archive, compressed using deflate
421
421
422 The exact name of the destination archive or directory is given
422 The exact name of the destination archive or directory is given
423 using a format string; see :hg:`help export` for details.
423 using a format string; see :hg:`help export` for details.
424
424
425 Each member added to an archive file has a directory prefix
425 Each member added to an archive file has a directory prefix
426 prepended. Use -p/--prefix to specify a format string for the
426 prepended. Use -p/--prefix to specify a format string for the
427 prefix. The default is the basename of the archive, with suffixes
427 prefix. The default is the basename of the archive, with suffixes
428 removed.
428 removed.
429
429
430 Returns 0 on success.
430 Returns 0 on success.
431 '''
431 '''
432
432
433 ctx = scmutil.revsingle(repo, opts.get('rev'))
433 ctx = scmutil.revsingle(repo, opts.get('rev'))
434 if not ctx:
434 if not ctx:
435 raise util.Abort(_('no working directory: please specify a revision'))
435 raise util.Abort(_('no working directory: please specify a revision'))
436 node = ctx.node()
436 node = ctx.node()
437 dest = cmdutil.makefilename(repo, dest, node)
437 dest = cmdutil.makefilename(repo, dest, node)
438 if os.path.realpath(dest) == repo.root:
438 if os.path.realpath(dest) == repo.root:
439 raise util.Abort(_('repository root cannot be destination'))
439 raise util.Abort(_('repository root cannot be destination'))
440
440
441 kind = opts.get('type') or archival.guesskind(dest) or 'files'
441 kind = opts.get('type') or archival.guesskind(dest) or 'files'
442 prefix = opts.get('prefix')
442 prefix = opts.get('prefix')
443
443
444 if dest == '-':
444 if dest == '-':
445 if kind == 'files':
445 if kind == 'files':
446 raise util.Abort(_('cannot archive plain files to stdout'))
446 raise util.Abort(_('cannot archive plain files to stdout'))
447 dest = cmdutil.makefileobj(repo, dest)
447 dest = cmdutil.makefileobj(repo, dest)
448 if not prefix:
448 if not prefix:
449 prefix = os.path.basename(repo.root) + '-%h'
449 prefix = os.path.basename(repo.root) + '-%h'
450
450
451 prefix = cmdutil.makefilename(repo, prefix, node)
451 prefix = cmdutil.makefilename(repo, prefix, node)
452 matchfn = scmutil.match(ctx, [], opts)
452 matchfn = scmutil.match(ctx, [], opts)
453 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
453 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
454 matchfn, prefix, subrepos=opts.get('subrepos'))
454 matchfn, prefix, subrepos=opts.get('subrepos'))
455
455
456 @command('backout',
456 @command('backout',
457 [('', 'merge', None, _('merge with old dirstate parent after backout')),
457 [('', 'merge', None, _('merge with old dirstate parent after backout')),
458 ('', 'commit', None, _('commit if no conflicts were encountered')),
458 ('', 'commit', None, _('commit if no conflicts were encountered')),
459 ('', 'parent', '',
459 ('', 'parent', '',
460 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
460 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
461 ('r', 'rev', '', _('revision to backout'), _('REV')),
461 ('r', 'rev', '', _('revision to backout'), _('REV')),
462 ('e', 'edit', False, _('invoke editor on commit messages')),
462 ('e', 'edit', False, _('invoke editor on commit messages')),
463 ] + mergetoolopts + walkopts + commitopts + commitopts2,
463 ] + mergetoolopts + walkopts + commitopts + commitopts2,
464 _('[OPTION]... [-r] REV'))
464 _('[OPTION]... [-r] REV'))
465 def backout(ui, repo, node=None, rev=None, commit=False, **opts):
465 def backout(ui, repo, node=None, rev=None, commit=False, **opts):
466 '''reverse effect of earlier changeset
466 '''reverse effect of earlier changeset
467
467
468 Prepare a new changeset with the effect of REV undone in the
468 Prepare a new changeset with the effect of REV undone in the
469 current working directory.
469 current working directory.
470
470
471 If REV is the parent of the working directory, then this new changeset
471 If REV is the parent of the working directory, then this new changeset
472 is committed automatically. Otherwise, hg needs to merge the
472 is committed automatically. Otherwise, hg needs to merge the
473 changes and the merged result is left uncommitted.
473 changes and the merged result is left uncommitted.
474
474
475 .. note::
475 .. note::
476
476
477 backout cannot be used to fix either an unwanted or
477 backout cannot be used to fix either an unwanted or
478 incorrect merge.
478 incorrect merge.
479
479
480 .. container:: verbose
480 .. container:: verbose
481
481
482 By default, the pending changeset will have one parent,
482 By default, the pending changeset will have one parent,
483 maintaining a linear history. With --merge, the pending
483 maintaining a linear history. With --merge, the pending
484 changeset will instead have two parents: the old parent of the
484 changeset will instead have two parents: the old parent of the
485 working directory and a new child of REV that simply undoes REV.
485 working directory and a new child of REV that simply undoes REV.
486
486
487 Before version 1.7, the behavior without --merge was equivalent
487 Before version 1.7, the behavior without --merge was equivalent
488 to specifying --merge followed by :hg:`update --clean .` to
488 to specifying --merge followed by :hg:`update --clean .` to
489 cancel the merge and leave the child of REV as a head to be
489 cancel the merge and leave the child of REV as a head to be
490 merged separately.
490 merged separately.
491
491
492 See :hg:`help dates` for a list of formats valid for -d/--date.
492 See :hg:`help dates` for a list of formats valid for -d/--date.
493
493
494 Returns 0 on success, 1 if nothing to backout or there are unresolved
494 Returns 0 on success, 1 if nothing to backout or there are unresolved
495 files.
495 files.
496 '''
496 '''
497 if rev and node:
497 if rev and node:
498 raise util.Abort(_("please specify just one revision"))
498 raise util.Abort(_("please specify just one revision"))
499
499
500 if not rev:
500 if not rev:
501 rev = node
501 rev = node
502
502
503 if not rev:
503 if not rev:
504 raise util.Abort(_("please specify a revision to backout"))
504 raise util.Abort(_("please specify a revision to backout"))
505
505
506 date = opts.get('date')
506 date = opts.get('date')
507 if date:
507 if date:
508 opts['date'] = util.parsedate(date)
508 opts['date'] = util.parsedate(date)
509
509
510 cmdutil.checkunfinished(repo)
510 cmdutil.checkunfinished(repo)
511 cmdutil.bailifchanged(repo)
511 cmdutil.bailifchanged(repo)
512 node = scmutil.revsingle(repo, rev).node()
512 node = scmutil.revsingle(repo, rev).node()
513
513
514 op1, op2 = repo.dirstate.parents()
514 op1, op2 = repo.dirstate.parents()
515 if not repo.changelog.isancestor(node, op1):
515 if not repo.changelog.isancestor(node, op1):
516 raise util.Abort(_('cannot backout change that is not an ancestor'))
516 raise util.Abort(_('cannot backout change that is not an ancestor'))
517
517
518 p1, p2 = repo.changelog.parents(node)
518 p1, p2 = repo.changelog.parents(node)
519 if p1 == nullid:
519 if p1 == nullid:
520 raise util.Abort(_('cannot backout a change with no parents'))
520 raise util.Abort(_('cannot backout a change with no parents'))
521 if p2 != nullid:
521 if p2 != nullid:
522 if not opts.get('parent'):
522 if not opts.get('parent'):
523 raise util.Abort(_('cannot backout a merge changeset'))
523 raise util.Abort(_('cannot backout a merge changeset'))
524 p = repo.lookup(opts['parent'])
524 p = repo.lookup(opts['parent'])
525 if p not in (p1, p2):
525 if p not in (p1, p2):
526 raise util.Abort(_('%s is not a parent of %s') %
526 raise util.Abort(_('%s is not a parent of %s') %
527 (short(p), short(node)))
527 (short(p), short(node)))
528 parent = p
528 parent = p
529 else:
529 else:
530 if opts.get('parent'):
530 if opts.get('parent'):
531 raise util.Abort(_('cannot use --parent on non-merge changeset'))
531 raise util.Abort(_('cannot use --parent on non-merge changeset'))
532 parent = p1
532 parent = p1
533
533
534 # the backout should appear on the same branch
534 # the backout should appear on the same branch
535 wlock = repo.wlock()
535 wlock = repo.wlock()
536 try:
536 try:
537 branch = repo.dirstate.branch()
537 branch = repo.dirstate.branch()
538 bheads = repo.branchheads(branch)
538 bheads = repo.branchheads(branch)
539 rctx = scmutil.revsingle(repo, hex(parent))
539 rctx = scmutil.revsingle(repo, hex(parent))
540 if not opts.get('merge') and op1 != node:
540 if not opts.get('merge') and op1 != node:
541 try:
541 try:
542 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
542 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
543 'backout')
543 'backout')
544 repo.dirstate.beginparentchange()
544 repo.dirstate.beginparentchange()
545 stats = mergemod.update(repo, parent, True, True, False,
545 stats = mergemod.update(repo, parent, True, True, False,
546 node, False)
546 node, False)
547 repo.setparents(op1, op2)
547 repo.setparents(op1, op2)
548 repo.dirstate.endparentchange()
548 repo.dirstate.endparentchange()
549 hg._showstats(repo, stats)
549 hg._showstats(repo, stats)
550 if stats[3]:
550 if stats[3]:
551 repo.ui.status(_("use 'hg resolve' to retry unresolved "
551 repo.ui.status(_("use 'hg resolve' to retry unresolved "
552 "file merges\n"))
552 "file merges\n"))
553 return 1
553 return 1
554 elif not commit:
554 elif not commit:
555 msg = _("changeset %s backed out, "
555 msg = _("changeset %s backed out, "
556 "don't forget to commit.\n")
556 "don't forget to commit.\n")
557 ui.status(msg % short(node))
557 ui.status(msg % short(node))
558 return 0
558 return 0
559 finally:
559 finally:
560 ui.setconfig('ui', 'forcemerge', '', '')
560 ui.setconfig('ui', 'forcemerge', '', '')
561 else:
561 else:
562 hg.clean(repo, node, show_stats=False)
562 hg.clean(repo, node, show_stats=False)
563 repo.dirstate.setbranch(branch)
563 repo.dirstate.setbranch(branch)
564 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
564 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
565
565
566
566
567 def commitfunc(ui, repo, message, match, opts):
567 def commitfunc(ui, repo, message, match, opts):
568 editform = 'backout'
568 editform = 'backout'
569 e = cmdutil.getcommiteditor(editform=editform, **opts)
569 e = cmdutil.getcommiteditor(editform=editform, **opts)
570 if not message:
570 if not message:
571 # we don't translate commit messages
571 # we don't translate commit messages
572 message = "Backed out changeset %s" % short(node)
572 message = "Backed out changeset %s" % short(node)
573 e = cmdutil.getcommiteditor(edit=True, editform=editform)
573 e = cmdutil.getcommiteditor(edit=True, editform=editform)
574 return repo.commit(message, opts.get('user'), opts.get('date'),
574 return repo.commit(message, opts.get('user'), opts.get('date'),
575 match, editor=e)
575 match, editor=e)
576 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
576 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
577 if not newnode:
577 if not newnode:
578 ui.status(_("nothing changed\n"))
578 ui.status(_("nothing changed\n"))
579 return 1
579 return 1
580 cmdutil.commitstatus(repo, newnode, branch, bheads)
580 cmdutil.commitstatus(repo, newnode, branch, bheads)
581
581
582 def nice(node):
582 def nice(node):
583 return '%d:%s' % (repo.changelog.rev(node), short(node))
583 return '%d:%s' % (repo.changelog.rev(node), short(node))
584 ui.status(_('changeset %s backs out changeset %s\n') %
584 ui.status(_('changeset %s backs out changeset %s\n') %
585 (nice(repo.changelog.tip()), nice(node)))
585 (nice(repo.changelog.tip()), nice(node)))
586 if opts.get('merge') and op1 != node:
586 if opts.get('merge') and op1 != node:
587 hg.clean(repo, op1, show_stats=False)
587 hg.clean(repo, op1, show_stats=False)
588 ui.status(_('merging with changeset %s\n')
588 ui.status(_('merging with changeset %s\n')
589 % nice(repo.changelog.tip()))
589 % nice(repo.changelog.tip()))
590 try:
590 try:
591 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
591 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
592 'backout')
592 'backout')
593 return hg.merge(repo, hex(repo.changelog.tip()))
593 return hg.merge(repo, hex(repo.changelog.tip()))
594 finally:
594 finally:
595 ui.setconfig('ui', 'forcemerge', '', '')
595 ui.setconfig('ui', 'forcemerge', '', '')
596 finally:
596 finally:
597 wlock.release()
597 wlock.release()
598 return 0
598 return 0
599
599
600 @command('bisect',
600 @command('bisect',
601 [('r', 'reset', False, _('reset bisect state')),
601 [('r', 'reset', False, _('reset bisect state')),
602 ('g', 'good', False, _('mark changeset good')),
602 ('g', 'good', False, _('mark changeset good')),
603 ('b', 'bad', False, _('mark changeset bad')),
603 ('b', 'bad', False, _('mark changeset bad')),
604 ('s', 'skip', False, _('skip testing changeset')),
604 ('s', 'skip', False, _('skip testing changeset')),
605 ('e', 'extend', False, _('extend the bisect range')),
605 ('e', 'extend', False, _('extend the bisect range')),
606 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
606 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
607 ('U', 'noupdate', False, _('do not update to target'))],
607 ('U', 'noupdate', False, _('do not update to target'))],
608 _("[-gbsr] [-U] [-c CMD] [REV]"))
608 _("[-gbsr] [-U] [-c CMD] [REV]"))
609 def bisect(ui, repo, rev=None, extra=None, command=None,
609 def bisect(ui, repo, rev=None, extra=None, command=None,
610 reset=None, good=None, bad=None, skip=None, extend=None,
610 reset=None, good=None, bad=None, skip=None, extend=None,
611 noupdate=None):
611 noupdate=None):
612 """subdivision search of changesets
612 """subdivision search of changesets
613
613
614 This command helps to find changesets which introduce problems. To
614 This command helps to find changesets which introduce problems. To
615 use, mark the earliest changeset you know exhibits the problem as
615 use, mark the earliest changeset you know exhibits the problem as
616 bad, then mark the latest changeset which is free from the problem
616 bad, then mark the latest changeset which is free from the problem
617 as good. Bisect will update your working directory to a revision
617 as good. Bisect will update your working directory to a revision
618 for testing (unless the -U/--noupdate option is specified). Once
618 for testing (unless the -U/--noupdate option is specified). Once
619 you have performed tests, mark the working directory as good or
619 you have performed tests, mark the working directory as good or
620 bad, and bisect will either update to another candidate changeset
620 bad, and bisect will either update to another candidate changeset
621 or announce that it has found the bad revision.
621 or announce that it has found the bad revision.
622
622
623 As a shortcut, you can also use the revision argument to mark a
623 As a shortcut, you can also use the revision argument to mark a
624 revision as good or bad without checking it out first.
624 revision as good or bad without checking it out first.
625
625
626 If you supply a command, it will be used for automatic bisection.
626 If you supply a command, it will be used for automatic bisection.
627 The environment variable HG_NODE will contain the ID of the
627 The environment variable HG_NODE will contain the ID of the
628 changeset being tested. The exit status of the command will be
628 changeset being tested. The exit status of the command will be
629 used to mark revisions as good or bad: status 0 means good, 125
629 used to mark revisions as good or bad: status 0 means good, 125
630 means to skip the revision, 127 (command not found) will abort the
630 means to skip the revision, 127 (command not found) will abort the
631 bisection, and any other non-zero exit status means the revision
631 bisection, and any other non-zero exit status means the revision
632 is bad.
632 is bad.
633
633
634 .. container:: verbose
634 .. container:: verbose
635
635
636 Some examples:
636 Some examples:
637
637
638 - start a bisection with known bad revision 34, and good revision 12::
638 - start a bisection with known bad revision 34, and good revision 12::
639
639
640 hg bisect --bad 34
640 hg bisect --bad 34
641 hg bisect --good 12
641 hg bisect --good 12
642
642
643 - advance the current bisection by marking current revision as good or
643 - advance the current bisection by marking current revision as good or
644 bad::
644 bad::
645
645
646 hg bisect --good
646 hg bisect --good
647 hg bisect --bad
647 hg bisect --bad
648
648
649 - mark the current revision, or a known revision, to be skipped (e.g. if
649 - mark the current revision, or a known revision, to be skipped (e.g. if
650 that revision is not usable because of another issue)::
650 that revision is not usable because of another issue)::
651
651
652 hg bisect --skip
652 hg bisect --skip
653 hg bisect --skip 23
653 hg bisect --skip 23
654
654
655 - skip all revisions that do not touch directories ``foo`` or ``bar``::
655 - skip all revisions that do not touch directories ``foo`` or ``bar``::
656
656
657 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
657 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
658
658
659 - forget the current bisection::
659 - forget the current bisection::
660
660
661 hg bisect --reset
661 hg bisect --reset
662
662
663 - use 'make && make tests' to automatically find the first broken
663 - use 'make && make tests' to automatically find the first broken
664 revision::
664 revision::
665
665
666 hg bisect --reset
666 hg bisect --reset
667 hg bisect --bad 34
667 hg bisect --bad 34
668 hg bisect --good 12
668 hg bisect --good 12
669 hg bisect --command "make && make tests"
669 hg bisect --command "make && make tests"
670
670
671 - see all changesets whose states are already known in the current
671 - see all changesets whose states are already known in the current
672 bisection::
672 bisection::
673
673
674 hg log -r "bisect(pruned)"
674 hg log -r "bisect(pruned)"
675
675
676 - see the changeset currently being bisected (especially useful
676 - see the changeset currently being bisected (especially useful
677 if running with -U/--noupdate)::
677 if running with -U/--noupdate)::
678
678
679 hg log -r "bisect(current)"
679 hg log -r "bisect(current)"
680
680
681 - see all changesets that took part in the current bisection::
681 - see all changesets that took part in the current bisection::
682
682
683 hg log -r "bisect(range)"
683 hg log -r "bisect(range)"
684
684
685 - you can even get a nice graph::
685 - you can even get a nice graph::
686
686
687 hg log --graph -r "bisect(range)"
687 hg log --graph -r "bisect(range)"
688
688
689 See :hg:`help revsets` for more about the `bisect()` keyword.
689 See :hg:`help revsets` for more about the `bisect()` keyword.
690
690
691 Returns 0 on success.
691 Returns 0 on success.
692 """
692 """
693 def extendbisectrange(nodes, good):
693 def extendbisectrange(nodes, good):
694 # bisect is incomplete when it ends on a merge node and
694 # bisect is incomplete when it ends on a merge node and
695 # one of the parent was not checked.
695 # one of the parent was not checked.
696 parents = repo[nodes[0]].parents()
696 parents = repo[nodes[0]].parents()
697 if len(parents) > 1:
697 if len(parents) > 1:
698 if good:
698 if good:
699 side = state['bad']
699 side = state['bad']
700 else:
700 else:
701 side = state['good']
701 side = state['good']
702 num = len(set(i.node() for i in parents) & set(side))
702 num = len(set(i.node() for i in parents) & set(side))
703 if num == 1:
703 if num == 1:
704 return parents[0].ancestor(parents[1])
704 return parents[0].ancestor(parents[1])
705 return None
705 return None
706
706
707 def print_result(nodes, good):
707 def print_result(nodes, good):
708 displayer = cmdutil.show_changeset(ui, repo, {})
708 displayer = cmdutil.show_changeset(ui, repo, {})
709 if len(nodes) == 1:
709 if len(nodes) == 1:
710 # narrowed it down to a single revision
710 # narrowed it down to a single revision
711 if good:
711 if good:
712 ui.write(_("The first good revision is:\n"))
712 ui.write(_("The first good revision is:\n"))
713 else:
713 else:
714 ui.write(_("The first bad revision is:\n"))
714 ui.write(_("The first bad revision is:\n"))
715 displayer.show(repo[nodes[0]])
715 displayer.show(repo[nodes[0]])
716 extendnode = extendbisectrange(nodes, good)
716 extendnode = extendbisectrange(nodes, good)
717 if extendnode is not None:
717 if extendnode is not None:
718 ui.write(_('Not all ancestors of this changeset have been'
718 ui.write(_('Not all ancestors of this changeset have been'
719 ' checked.\nUse bisect --extend to continue the '
719 ' checked.\nUse bisect --extend to continue the '
720 'bisection from\nthe common ancestor, %s.\n')
720 'bisection from\nthe common ancestor, %s.\n')
721 % extendnode)
721 % extendnode)
722 else:
722 else:
723 # multiple possible revisions
723 # multiple possible revisions
724 if good:
724 if good:
725 ui.write(_("Due to skipped revisions, the first "
725 ui.write(_("Due to skipped revisions, the first "
726 "good revision could be any of:\n"))
726 "good revision could be any of:\n"))
727 else:
727 else:
728 ui.write(_("Due to skipped revisions, the first "
728 ui.write(_("Due to skipped revisions, the first "
729 "bad revision could be any of:\n"))
729 "bad revision could be any of:\n"))
730 for n in nodes:
730 for n in nodes:
731 displayer.show(repo[n])
731 displayer.show(repo[n])
732 displayer.close()
732 displayer.close()
733
733
734 def check_state(state, interactive=True):
734 def check_state(state, interactive=True):
735 if not state['good'] or not state['bad']:
735 if not state['good'] or not state['bad']:
736 if (good or bad or skip or reset) and interactive:
736 if (good or bad or skip or reset) and interactive:
737 return
737 return
738 if not state['good']:
738 if not state['good']:
739 raise util.Abort(_('cannot bisect (no known good revisions)'))
739 raise util.Abort(_('cannot bisect (no known good revisions)'))
740 else:
740 else:
741 raise util.Abort(_('cannot bisect (no known bad revisions)'))
741 raise util.Abort(_('cannot bisect (no known bad revisions)'))
742 return True
742 return True
743
743
744 # backward compatibility
744 # backward compatibility
745 if rev in "good bad reset init".split():
745 if rev in "good bad reset init".split():
746 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
746 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
747 cmd, rev, extra = rev, extra, None
747 cmd, rev, extra = rev, extra, None
748 if cmd == "good":
748 if cmd == "good":
749 good = True
749 good = True
750 elif cmd == "bad":
750 elif cmd == "bad":
751 bad = True
751 bad = True
752 else:
752 else:
753 reset = True
753 reset = True
754 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
754 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
755 raise util.Abort(_('incompatible arguments'))
755 raise util.Abort(_('incompatible arguments'))
756
756
757 cmdutil.checkunfinished(repo)
757 cmdutil.checkunfinished(repo)
758
758
759 if reset:
759 if reset:
760 p = repo.join("bisect.state")
760 p = repo.join("bisect.state")
761 if os.path.exists(p):
761 if os.path.exists(p):
762 os.unlink(p)
762 os.unlink(p)
763 return
763 return
764
764
765 state = hbisect.load_state(repo)
765 state = hbisect.load_state(repo)
766
766
767 if command:
767 if command:
768 changesets = 1
768 changesets = 1
769 if noupdate:
769 if noupdate:
770 try:
770 try:
771 node = state['current'][0]
771 node = state['current'][0]
772 except LookupError:
772 except LookupError:
773 raise util.Abort(_('current bisect revision is unknown - '
773 raise util.Abort(_('current bisect revision is unknown - '
774 'start a new bisect to fix'))
774 'start a new bisect to fix'))
775 else:
775 else:
776 node, p2 = repo.dirstate.parents()
776 node, p2 = repo.dirstate.parents()
777 if p2 != nullid:
777 if p2 != nullid:
778 raise util.Abort(_('current bisect revision is a merge'))
778 raise util.Abort(_('current bisect revision is a merge'))
779 try:
779 try:
780 while changesets:
780 while changesets:
781 # update state
781 # update state
782 state['current'] = [node]
782 state['current'] = [node]
783 hbisect.save_state(repo, state)
783 hbisect.save_state(repo, state)
784 status = ui.system(command, environ={'HG_NODE': hex(node)})
784 status = ui.system(command, environ={'HG_NODE': hex(node)})
785 if status == 125:
785 if status == 125:
786 transition = "skip"
786 transition = "skip"
787 elif status == 0:
787 elif status == 0:
788 transition = "good"
788 transition = "good"
789 # status < 0 means process was killed
789 # status < 0 means process was killed
790 elif status == 127:
790 elif status == 127:
791 raise util.Abort(_("failed to execute %s") % command)
791 raise util.Abort(_("failed to execute %s") % command)
792 elif status < 0:
792 elif status < 0:
793 raise util.Abort(_("%s killed") % command)
793 raise util.Abort(_("%s killed") % command)
794 else:
794 else:
795 transition = "bad"
795 transition = "bad"
796 ctx = scmutil.revsingle(repo, rev, node)
796 ctx = scmutil.revsingle(repo, rev, node)
797 rev = None # clear for future iterations
797 rev = None # clear for future iterations
798 state[transition].append(ctx.node())
798 state[transition].append(ctx.node())
799 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
799 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
800 check_state(state, interactive=False)
800 check_state(state, interactive=False)
801 # bisect
801 # bisect
802 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
802 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
803 # update to next check
803 # update to next check
804 node = nodes[0]
804 node = nodes[0]
805 if not noupdate:
805 if not noupdate:
806 cmdutil.bailifchanged(repo)
806 cmdutil.bailifchanged(repo)
807 hg.clean(repo, node, show_stats=False)
807 hg.clean(repo, node, show_stats=False)
808 finally:
808 finally:
809 state['current'] = [node]
809 state['current'] = [node]
810 hbisect.save_state(repo, state)
810 hbisect.save_state(repo, state)
811 print_result(nodes, bgood)
811 print_result(nodes, bgood)
812 return
812 return
813
813
814 # update state
814 # update state
815
815
816 if rev:
816 if rev:
817 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
817 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
818 else:
818 else:
819 nodes = [repo.lookup('.')]
819 nodes = [repo.lookup('.')]
820
820
821 if good or bad or skip:
821 if good or bad or skip:
822 if good:
822 if good:
823 state['good'] += nodes
823 state['good'] += nodes
824 elif bad:
824 elif bad:
825 state['bad'] += nodes
825 state['bad'] += nodes
826 elif skip:
826 elif skip:
827 state['skip'] += nodes
827 state['skip'] += nodes
828 hbisect.save_state(repo, state)
828 hbisect.save_state(repo, state)
829
829
830 if not check_state(state):
830 if not check_state(state):
831 return
831 return
832
832
833 # actually bisect
833 # actually bisect
834 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
834 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
835 if extend:
835 if extend:
836 if not changesets:
836 if not changesets:
837 extendnode = extendbisectrange(nodes, good)
837 extendnode = extendbisectrange(nodes, good)
838 if extendnode is not None:
838 if extendnode is not None:
839 ui.write(_("Extending search to changeset %d:%s\n")
839 ui.write(_("Extending search to changeset %d:%s\n")
840 % (extendnode.rev(), extendnode))
840 % (extendnode.rev(), extendnode))
841 state['current'] = [extendnode.node()]
841 state['current'] = [extendnode.node()]
842 hbisect.save_state(repo, state)
842 hbisect.save_state(repo, state)
843 if noupdate:
843 if noupdate:
844 return
844 return
845 cmdutil.bailifchanged(repo)
845 cmdutil.bailifchanged(repo)
846 return hg.clean(repo, extendnode.node())
846 return hg.clean(repo, extendnode.node())
847 raise util.Abort(_("nothing to extend"))
847 raise util.Abort(_("nothing to extend"))
848
848
849 if changesets == 0:
849 if changesets == 0:
850 print_result(nodes, good)
850 print_result(nodes, good)
851 else:
851 else:
852 assert len(nodes) == 1 # only a single node can be tested next
852 assert len(nodes) == 1 # only a single node can be tested next
853 node = nodes[0]
853 node = nodes[0]
854 # compute the approximate number of remaining tests
854 # compute the approximate number of remaining tests
855 tests, size = 0, 2
855 tests, size = 0, 2
856 while size <= changesets:
856 while size <= changesets:
857 tests, size = tests + 1, size * 2
857 tests, size = tests + 1, size * 2
858 rev = repo.changelog.rev(node)
858 rev = repo.changelog.rev(node)
859 ui.write(_("Testing changeset %d:%s "
859 ui.write(_("Testing changeset %d:%s "
860 "(%d changesets remaining, ~%d tests)\n")
860 "(%d changesets remaining, ~%d tests)\n")
861 % (rev, short(node), changesets, tests))
861 % (rev, short(node), changesets, tests))
862 state['current'] = [node]
862 state['current'] = [node]
863 hbisect.save_state(repo, state)
863 hbisect.save_state(repo, state)
864 if not noupdate:
864 if not noupdate:
865 cmdutil.bailifchanged(repo)
865 cmdutil.bailifchanged(repo)
866 return hg.clean(repo, node)
866 return hg.clean(repo, node)
867
867
868 @command('bookmarks|bookmark',
868 @command('bookmarks|bookmark',
869 [('f', 'force', False, _('force')),
869 [('f', 'force', False, _('force')),
870 ('r', 'rev', '', _('revision'), _('REV')),
870 ('r', 'rev', '', _('revision'), _('REV')),
871 ('d', 'delete', False, _('delete a given bookmark')),
871 ('d', 'delete', False, _('delete a given bookmark')),
872 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
872 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
873 ('i', 'inactive', False, _('mark a bookmark inactive')),
873 ('i', 'inactive', False, _('mark a bookmark inactive')),
874 ] + formatteropts,
874 ] + formatteropts,
875 _('hg bookmarks [OPTIONS]... [NAME]...'))
875 _('hg bookmarks [OPTIONS]... [NAME]...'))
876 def bookmark(ui, repo, *names, **opts):
876 def bookmark(ui, repo, *names, **opts):
877 '''create a new bookmark or list existing bookmarks
877 '''create a new bookmark or list existing bookmarks
878
878
879 Bookmarks are labels on changesets to help track lines of development.
879 Bookmarks are labels on changesets to help track lines of development.
880 Bookmarks are unversioned and can be moved, renamed and deleted.
880 Bookmarks are unversioned and can be moved, renamed and deleted.
881 Deleting or moving a bookmark has no effect on the associated changesets.
881 Deleting or moving a bookmark has no effect on the associated changesets.
882
882
883 Creating or updating to a bookmark causes it to be marked as 'active'.
883 Creating or updating to a bookmark causes it to be marked as 'active'.
884 The active bookmark is indicated with a '*'.
884 The active bookmark is indicated with a '*'.
885 When a commit is made, the active bookmark will advance to the new commit.
885 When a commit is made, the active bookmark will advance to the new commit.
886 A plain :hg:`update` will also advance an active bookmark, if possible.
886 A plain :hg:`update` will also advance an active bookmark, if possible.
887 Updating away from a bookmark will cause it to be deactivated.
887 Updating away from a bookmark will cause it to be deactivated.
888
888
889 Bookmarks can be pushed and pulled between repositories (see
889 Bookmarks can be pushed and pulled between repositories (see
890 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
890 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
891 diverged, a new 'divergent bookmark' of the form 'name@path' will
891 diverged, a new 'divergent bookmark' of the form 'name@path' will
892 be created. Using :hg:`merge` will resolve the divergence.
892 be created. Using :hg:`merge` will resolve the divergence.
893
893
894 A bookmark named '@' has the special property that :hg:`clone` will
894 A bookmark named '@' has the special property that :hg:`clone` will
895 check it out by default if it exists.
895 check it out by default if it exists.
896
896
897 .. container:: verbose
897 .. container:: verbose
898
898
899 Examples:
899 Examples:
900
900
901 - create an active bookmark for a new line of development::
901 - create an active bookmark for a new line of development::
902
902
903 hg book new-feature
903 hg book new-feature
904
904
905 - create an inactive bookmark as a place marker::
905 - create an inactive bookmark as a place marker::
906
906
907 hg book -i reviewed
907 hg book -i reviewed
908
908
909 - create an inactive bookmark on another changeset::
909 - create an inactive bookmark on another changeset::
910
910
911 hg book -r .^ tested
911 hg book -r .^ tested
912
912
913 - move the '@' bookmark from another branch::
913 - move the '@' bookmark from another branch::
914
914
915 hg book -f @
915 hg book -f @
916 '''
916 '''
917 force = opts.get('force')
917 force = opts.get('force')
918 rev = opts.get('rev')
918 rev = opts.get('rev')
919 delete = opts.get('delete')
919 delete = opts.get('delete')
920 rename = opts.get('rename')
920 rename = opts.get('rename')
921 inactive = opts.get('inactive')
921 inactive = opts.get('inactive')
922
922
923 def checkformat(mark):
923 def checkformat(mark):
924 mark = mark.strip()
924 mark = mark.strip()
925 if not mark:
925 if not mark:
926 raise util.Abort(_("bookmark names cannot consist entirely of "
926 raise util.Abort(_("bookmark names cannot consist entirely of "
927 "whitespace"))
927 "whitespace"))
928 scmutil.checknewlabel(repo, mark, 'bookmark')
928 scmutil.checknewlabel(repo, mark, 'bookmark')
929 return mark
929 return mark
930
930
931 def checkconflict(repo, mark, cur, force=False, target=None):
931 def checkconflict(repo, mark, cur, force=False, target=None):
932 if mark in marks and not force:
932 if mark in marks and not force:
933 if target:
933 if target:
934 if marks[mark] == target and target == cur:
934 if marks[mark] == target and target == cur:
935 # re-activating a bookmark
935 # re-activating a bookmark
936 return
936 return
937 anc = repo.changelog.ancestors([repo[target].rev()])
937 anc = repo.changelog.ancestors([repo[target].rev()])
938 bmctx = repo[marks[mark]]
938 bmctx = repo[marks[mark]]
939 divs = [repo[b].node() for b in marks
939 divs = [repo[b].node() for b in marks
940 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
940 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
941
941
942 # allow resolving a single divergent bookmark even if moving
942 # allow resolving a single divergent bookmark even if moving
943 # the bookmark across branches when a revision is specified
943 # the bookmark across branches when a revision is specified
944 # that contains a divergent bookmark
944 # that contains a divergent bookmark
945 if bmctx.rev() not in anc and target in divs:
945 if bmctx.rev() not in anc and target in divs:
946 bookmarks.deletedivergent(repo, [target], mark)
946 bookmarks.deletedivergent(repo, [target], mark)
947 return
947 return
948
948
949 deletefrom = [b for b in divs
949 deletefrom = [b for b in divs
950 if repo[b].rev() in anc or b == target]
950 if repo[b].rev() in anc or b == target]
951 bookmarks.deletedivergent(repo, deletefrom, mark)
951 bookmarks.deletedivergent(repo, deletefrom, mark)
952 if bookmarks.validdest(repo, bmctx, repo[target]):
952 if bookmarks.validdest(repo, bmctx, repo[target]):
953 ui.status(_("moving bookmark '%s' forward from %s\n") %
953 ui.status(_("moving bookmark '%s' forward from %s\n") %
954 (mark, short(bmctx.node())))
954 (mark, short(bmctx.node())))
955 return
955 return
956 raise util.Abort(_("bookmark '%s' already exists "
956 raise util.Abort(_("bookmark '%s' already exists "
957 "(use -f to force)") % mark)
957 "(use -f to force)") % mark)
958 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
958 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
959 and not force):
959 and not force):
960 raise util.Abort(
960 raise util.Abort(
961 _("a bookmark cannot have the name of an existing branch"))
961 _("a bookmark cannot have the name of an existing branch"))
962
962
963 if delete and rename:
963 if delete and rename:
964 raise util.Abort(_("--delete and --rename are incompatible"))
964 raise util.Abort(_("--delete and --rename are incompatible"))
965 if delete and rev:
965 if delete and rev:
966 raise util.Abort(_("--rev is incompatible with --delete"))
966 raise util.Abort(_("--rev is incompatible with --delete"))
967 if rename and rev:
967 if rename and rev:
968 raise util.Abort(_("--rev is incompatible with --rename"))
968 raise util.Abort(_("--rev is incompatible with --rename"))
969 if not names and (delete or rev):
969 if not names and (delete or rev):
970 raise util.Abort(_("bookmark name required"))
970 raise util.Abort(_("bookmark name required"))
971
971
972 if delete or rename or names or inactive:
972 if delete or rename or names or inactive:
973 wlock = repo.wlock()
973 wlock = repo.wlock()
974 try:
974 try:
975 cur = repo.changectx('.').node()
975 cur = repo.changectx('.').node()
976 marks = repo._bookmarks
976 marks = repo._bookmarks
977 if delete:
977 if delete:
978 for mark in names:
978 for mark in names:
979 if mark not in marks:
979 if mark not in marks:
980 raise util.Abort(_("bookmark '%s' does not exist") %
980 raise util.Abort(_("bookmark '%s' does not exist") %
981 mark)
981 mark)
982 if mark == repo._bookmarkcurrent:
982 if mark == repo._activebookmark:
983 bookmarks.deactivate(repo)
983 bookmarks.deactivate(repo)
984 del marks[mark]
984 del marks[mark]
985 marks.write()
985 marks.write()
986
986
987 elif rename:
987 elif rename:
988 if not names:
988 if not names:
989 raise util.Abort(_("new bookmark name required"))
989 raise util.Abort(_("new bookmark name required"))
990 elif len(names) > 1:
990 elif len(names) > 1:
991 raise util.Abort(_("only one new bookmark name allowed"))
991 raise util.Abort(_("only one new bookmark name allowed"))
992 mark = checkformat(names[0])
992 mark = checkformat(names[0])
993 if rename not in marks:
993 if rename not in marks:
994 raise util.Abort(_("bookmark '%s' does not exist") % rename)
994 raise util.Abort(_("bookmark '%s' does not exist") % rename)
995 checkconflict(repo, mark, cur, force)
995 checkconflict(repo, mark, cur, force)
996 marks[mark] = marks[rename]
996 marks[mark] = marks[rename]
997 if repo._bookmarkcurrent == rename and not inactive:
997 if repo._activebookmark == rename and not inactive:
998 bookmarks.activate(repo, mark)
998 bookmarks.activate(repo, mark)
999 del marks[rename]
999 del marks[rename]
1000 marks.write()
1000 marks.write()
1001
1001
1002 elif names:
1002 elif names:
1003 newact = None
1003 newact = None
1004 for mark in names:
1004 for mark in names:
1005 mark = checkformat(mark)
1005 mark = checkformat(mark)
1006 if newact is None:
1006 if newact is None:
1007 newact = mark
1007 newact = mark
1008 if inactive and mark == repo._bookmarkcurrent:
1008 if inactive and mark == repo._activebookmark:
1009 bookmarks.deactivate(repo)
1009 bookmarks.deactivate(repo)
1010 return
1010 return
1011 tgt = cur
1011 tgt = cur
1012 if rev:
1012 if rev:
1013 tgt = scmutil.revsingle(repo, rev).node()
1013 tgt = scmutil.revsingle(repo, rev).node()
1014 checkconflict(repo, mark, cur, force, tgt)
1014 checkconflict(repo, mark, cur, force, tgt)
1015 marks[mark] = tgt
1015 marks[mark] = tgt
1016 if not inactive and cur == marks[newact] and not rev:
1016 if not inactive and cur == marks[newact] and not rev:
1017 bookmarks.activate(repo, newact)
1017 bookmarks.activate(repo, newact)
1018 elif cur != tgt and newact == repo._bookmarkcurrent:
1018 elif cur != tgt and newact == repo._activebookmark:
1019 bookmarks.deactivate(repo)
1019 bookmarks.deactivate(repo)
1020 marks.write()
1020 marks.write()
1021
1021
1022 elif inactive:
1022 elif inactive:
1023 if len(marks) == 0:
1023 if len(marks) == 0:
1024 ui.status(_("no bookmarks set\n"))
1024 ui.status(_("no bookmarks set\n"))
1025 elif not repo._bookmarkcurrent:
1025 elif not repo._activebookmark:
1026 ui.status(_("no active bookmark\n"))
1026 ui.status(_("no active bookmark\n"))
1027 else:
1027 else:
1028 bookmarks.deactivate(repo)
1028 bookmarks.deactivate(repo)
1029 finally:
1029 finally:
1030 wlock.release()
1030 wlock.release()
1031 else: # show bookmarks
1031 else: # show bookmarks
1032 fm = ui.formatter('bookmarks', opts)
1032 fm = ui.formatter('bookmarks', opts)
1033 hexfn = fm.hexfunc
1033 hexfn = fm.hexfunc
1034 marks = repo._bookmarks
1034 marks = repo._bookmarks
1035 if len(marks) == 0 and not fm:
1035 if len(marks) == 0 and not fm:
1036 ui.status(_("no bookmarks set\n"))
1036 ui.status(_("no bookmarks set\n"))
1037 for bmark, n in sorted(marks.iteritems()):
1037 for bmark, n in sorted(marks.iteritems()):
1038 current = repo._bookmarkcurrent
1038 current = repo._activebookmark
1039 if bmark == current:
1039 if bmark == current:
1040 prefix, label = '*', 'bookmarks.current'
1040 prefix, label = '*', 'bookmarks.current'
1041 else:
1041 else:
1042 prefix, label = ' ', ''
1042 prefix, label = ' ', ''
1043
1043
1044 fm.startitem()
1044 fm.startitem()
1045 if not ui.quiet:
1045 if not ui.quiet:
1046 fm.plain(' %s ' % prefix, label=label)
1046 fm.plain(' %s ' % prefix, label=label)
1047 fm.write('bookmark', '%s', bmark, label=label)
1047 fm.write('bookmark', '%s', bmark, label=label)
1048 pad = " " * (25 - encoding.colwidth(bmark))
1048 pad = " " * (25 - encoding.colwidth(bmark))
1049 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1049 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1050 repo.changelog.rev(n), hexfn(n), label=label)
1050 repo.changelog.rev(n), hexfn(n), label=label)
1051 fm.data(active=(bmark == current))
1051 fm.data(active=(bmark == current))
1052 fm.plain('\n')
1052 fm.plain('\n')
1053 fm.end()
1053 fm.end()
1054
1054
1055 @command('branch',
1055 @command('branch',
1056 [('f', 'force', None,
1056 [('f', 'force', None,
1057 _('set branch name even if it shadows an existing branch')),
1057 _('set branch name even if it shadows an existing branch')),
1058 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1058 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1059 _('[-fC] [NAME]'))
1059 _('[-fC] [NAME]'))
1060 def branch(ui, repo, label=None, **opts):
1060 def branch(ui, repo, label=None, **opts):
1061 """set or show the current branch name
1061 """set or show the current branch name
1062
1062
1063 .. note::
1063 .. note::
1064
1064
1065 Branch names are permanent and global. Use :hg:`bookmark` to create a
1065 Branch names are permanent and global. Use :hg:`bookmark` to create a
1066 light-weight bookmark instead. See :hg:`help glossary` for more
1066 light-weight bookmark instead. See :hg:`help glossary` for more
1067 information about named branches and bookmarks.
1067 information about named branches and bookmarks.
1068
1068
1069 With no argument, show the current branch name. With one argument,
1069 With no argument, show the current branch name. With one argument,
1070 set the working directory branch name (the branch will not exist
1070 set the working directory branch name (the branch will not exist
1071 in the repository until the next commit). Standard practice
1071 in the repository until the next commit). Standard practice
1072 recommends that primary development take place on the 'default'
1072 recommends that primary development take place on the 'default'
1073 branch.
1073 branch.
1074
1074
1075 Unless -f/--force is specified, branch will not let you set a
1075 Unless -f/--force is specified, branch will not let you set a
1076 branch name that already exists.
1076 branch name that already exists.
1077
1077
1078 Use -C/--clean to reset the working directory branch to that of
1078 Use -C/--clean to reset the working directory branch to that of
1079 the parent of the working directory, negating a previous branch
1079 the parent of the working directory, negating a previous branch
1080 change.
1080 change.
1081
1081
1082 Use the command :hg:`update` to switch to an existing branch. Use
1082 Use the command :hg:`update` to switch to an existing branch. Use
1083 :hg:`commit --close-branch` to mark this branch as closed.
1083 :hg:`commit --close-branch` to mark this branch as closed.
1084
1084
1085 Returns 0 on success.
1085 Returns 0 on success.
1086 """
1086 """
1087 if label:
1087 if label:
1088 label = label.strip()
1088 label = label.strip()
1089
1089
1090 if not opts.get('clean') and not label:
1090 if not opts.get('clean') and not label:
1091 ui.write("%s\n" % repo.dirstate.branch())
1091 ui.write("%s\n" % repo.dirstate.branch())
1092 return
1092 return
1093
1093
1094 wlock = repo.wlock()
1094 wlock = repo.wlock()
1095 try:
1095 try:
1096 if opts.get('clean'):
1096 if opts.get('clean'):
1097 label = repo[None].p1().branch()
1097 label = repo[None].p1().branch()
1098 repo.dirstate.setbranch(label)
1098 repo.dirstate.setbranch(label)
1099 ui.status(_('reset working directory to branch %s\n') % label)
1099 ui.status(_('reset working directory to branch %s\n') % label)
1100 elif label:
1100 elif label:
1101 if not opts.get('force') and label in repo.branchmap():
1101 if not opts.get('force') and label in repo.branchmap():
1102 if label not in [p.branch() for p in repo.parents()]:
1102 if label not in [p.branch() for p in repo.parents()]:
1103 raise util.Abort(_('a branch of the same name already'
1103 raise util.Abort(_('a branch of the same name already'
1104 ' exists'),
1104 ' exists'),
1105 # i18n: "it" refers to an existing branch
1105 # i18n: "it" refers to an existing branch
1106 hint=_("use 'hg update' to switch to it"))
1106 hint=_("use 'hg update' to switch to it"))
1107 scmutil.checknewlabel(repo, label, 'branch')
1107 scmutil.checknewlabel(repo, label, 'branch')
1108 repo.dirstate.setbranch(label)
1108 repo.dirstate.setbranch(label)
1109 ui.status(_('marked working directory as branch %s\n') % label)
1109 ui.status(_('marked working directory as branch %s\n') % label)
1110 ui.status(_('(branches are permanent and global, '
1110 ui.status(_('(branches are permanent and global, '
1111 'did you want a bookmark?)\n'))
1111 'did you want a bookmark?)\n'))
1112 finally:
1112 finally:
1113 wlock.release()
1113 wlock.release()
1114
1114
1115 @command('branches',
1115 @command('branches',
1116 [('a', 'active', False,
1116 [('a', 'active', False,
1117 _('show only branches that have unmerged heads (DEPRECATED)')),
1117 _('show only branches that have unmerged heads (DEPRECATED)')),
1118 ('c', 'closed', False, _('show normal and closed branches')),
1118 ('c', 'closed', False, _('show normal and closed branches')),
1119 ] + formatteropts,
1119 ] + formatteropts,
1120 _('[-ac]'))
1120 _('[-ac]'))
1121 def branches(ui, repo, active=False, closed=False, **opts):
1121 def branches(ui, repo, active=False, closed=False, **opts):
1122 """list repository named branches
1122 """list repository named branches
1123
1123
1124 List the repository's named branches, indicating which ones are
1124 List the repository's named branches, indicating which ones are
1125 inactive. If -c/--closed is specified, also list branches which have
1125 inactive. If -c/--closed is specified, also list branches which have
1126 been marked closed (see :hg:`commit --close-branch`).
1126 been marked closed (see :hg:`commit --close-branch`).
1127
1127
1128 Use the command :hg:`update` to switch to an existing branch.
1128 Use the command :hg:`update` to switch to an existing branch.
1129
1129
1130 Returns 0.
1130 Returns 0.
1131 """
1131 """
1132
1132
1133 fm = ui.formatter('branches', opts)
1133 fm = ui.formatter('branches', opts)
1134 hexfunc = fm.hexfunc
1134 hexfunc = fm.hexfunc
1135
1135
1136 allheads = set(repo.heads())
1136 allheads = set(repo.heads())
1137 branches = []
1137 branches = []
1138 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1138 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1139 isactive = not isclosed and bool(set(heads) & allheads)
1139 isactive = not isclosed and bool(set(heads) & allheads)
1140 branches.append((tag, repo[tip], isactive, not isclosed))
1140 branches.append((tag, repo[tip], isactive, not isclosed))
1141 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1141 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1142 reverse=True)
1142 reverse=True)
1143
1143
1144 for tag, ctx, isactive, isopen in branches:
1144 for tag, ctx, isactive, isopen in branches:
1145 if active and not isactive:
1145 if active and not isactive:
1146 continue
1146 continue
1147 if isactive:
1147 if isactive:
1148 label = 'branches.active'
1148 label = 'branches.active'
1149 notice = ''
1149 notice = ''
1150 elif not isopen:
1150 elif not isopen:
1151 if not closed:
1151 if not closed:
1152 continue
1152 continue
1153 label = 'branches.closed'
1153 label = 'branches.closed'
1154 notice = _(' (closed)')
1154 notice = _(' (closed)')
1155 else:
1155 else:
1156 label = 'branches.inactive'
1156 label = 'branches.inactive'
1157 notice = _(' (inactive)')
1157 notice = _(' (inactive)')
1158 current = (tag == repo.dirstate.branch())
1158 current = (tag == repo.dirstate.branch())
1159 if current:
1159 if current:
1160 label = 'branches.current'
1160 label = 'branches.current'
1161
1161
1162 fm.startitem()
1162 fm.startitem()
1163 fm.write('branch', '%s', tag, label=label)
1163 fm.write('branch', '%s', tag, label=label)
1164 rev = ctx.rev()
1164 rev = ctx.rev()
1165 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1165 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1166 fmt = ' ' * padsize + ' %d:%s'
1166 fmt = ' ' * padsize + ' %d:%s'
1167 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1167 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1168 label='log.changeset changeset.%s' % ctx.phasestr())
1168 label='log.changeset changeset.%s' % ctx.phasestr())
1169 fm.data(active=isactive, closed=not isopen, current=current)
1169 fm.data(active=isactive, closed=not isopen, current=current)
1170 if not ui.quiet:
1170 if not ui.quiet:
1171 fm.plain(notice)
1171 fm.plain(notice)
1172 fm.plain('\n')
1172 fm.plain('\n')
1173 fm.end()
1173 fm.end()
1174
1174
1175 @command('bundle',
1175 @command('bundle',
1176 [('f', 'force', None, _('run even when the destination is unrelated')),
1176 [('f', 'force', None, _('run even when the destination is unrelated')),
1177 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1177 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1178 _('REV')),
1178 _('REV')),
1179 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1179 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1180 _('BRANCH')),
1180 _('BRANCH')),
1181 ('', 'base', [],
1181 ('', 'base', [],
1182 _('a base changeset assumed to be available at the destination'),
1182 _('a base changeset assumed to be available at the destination'),
1183 _('REV')),
1183 _('REV')),
1184 ('a', 'all', None, _('bundle all changesets in the repository')),
1184 ('a', 'all', None, _('bundle all changesets in the repository')),
1185 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1185 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1186 ] + remoteopts,
1186 ] + remoteopts,
1187 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1187 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1188 def bundle(ui, repo, fname, dest=None, **opts):
1188 def bundle(ui, repo, fname, dest=None, **opts):
1189 """create a changegroup file
1189 """create a changegroup file
1190
1190
1191 Generate a compressed changegroup file collecting changesets not
1191 Generate a compressed changegroup file collecting changesets not
1192 known to be in another repository.
1192 known to be in another repository.
1193
1193
1194 If you omit the destination repository, then hg assumes the
1194 If you omit the destination repository, then hg assumes the
1195 destination will have all the nodes you specify with --base
1195 destination will have all the nodes you specify with --base
1196 parameters. To create a bundle containing all changesets, use
1196 parameters. To create a bundle containing all changesets, use
1197 -a/--all (or --base null).
1197 -a/--all (or --base null).
1198
1198
1199 You can change compression method with the -t/--type option.
1199 You can change compression method with the -t/--type option.
1200 The available compression methods are: none, bzip2, and
1200 The available compression methods are: none, bzip2, and
1201 gzip (by default, bundles are compressed using bzip2).
1201 gzip (by default, bundles are compressed using bzip2).
1202
1202
1203 The bundle file can then be transferred using conventional means
1203 The bundle file can then be transferred using conventional means
1204 and applied to another repository with the unbundle or pull
1204 and applied to another repository with the unbundle or pull
1205 command. This is useful when direct push and pull are not
1205 command. This is useful when direct push and pull are not
1206 available or when exporting an entire repository is undesirable.
1206 available or when exporting an entire repository is undesirable.
1207
1207
1208 Applying bundles preserves all changeset contents including
1208 Applying bundles preserves all changeset contents including
1209 permissions, copy/rename information, and revision history.
1209 permissions, copy/rename information, and revision history.
1210
1210
1211 Returns 0 on success, 1 if no changes found.
1211 Returns 0 on success, 1 if no changes found.
1212 """
1212 """
1213 revs = None
1213 revs = None
1214 if 'rev' in opts:
1214 if 'rev' in opts:
1215 revs = scmutil.revrange(repo, opts['rev'])
1215 revs = scmutil.revrange(repo, opts['rev'])
1216
1216
1217 bundletype = opts.get('type', 'bzip2').lower()
1217 bundletype = opts.get('type', 'bzip2').lower()
1218 btypes = {'none': 'HG10UN',
1218 btypes = {'none': 'HG10UN',
1219 'bzip2': 'HG10BZ',
1219 'bzip2': 'HG10BZ',
1220 'gzip': 'HG10GZ',
1220 'gzip': 'HG10GZ',
1221 'bundle2': 'HG20'}
1221 'bundle2': 'HG20'}
1222 bundletype = btypes.get(bundletype)
1222 bundletype = btypes.get(bundletype)
1223 if bundletype not in changegroup.bundletypes:
1223 if bundletype not in changegroup.bundletypes:
1224 raise util.Abort(_('unknown bundle type specified with --type'))
1224 raise util.Abort(_('unknown bundle type specified with --type'))
1225
1225
1226 if opts.get('all'):
1226 if opts.get('all'):
1227 base = ['null']
1227 base = ['null']
1228 else:
1228 else:
1229 base = scmutil.revrange(repo, opts.get('base'))
1229 base = scmutil.revrange(repo, opts.get('base'))
1230 # TODO: get desired bundlecaps from command line.
1230 # TODO: get desired bundlecaps from command line.
1231 bundlecaps = None
1231 bundlecaps = None
1232 if base:
1232 if base:
1233 if dest:
1233 if dest:
1234 raise util.Abort(_("--base is incompatible with specifying "
1234 raise util.Abort(_("--base is incompatible with specifying "
1235 "a destination"))
1235 "a destination"))
1236 common = [repo.lookup(rev) for rev in base]
1236 common = [repo.lookup(rev) for rev in base]
1237 heads = revs and map(repo.lookup, revs) or revs
1237 heads = revs and map(repo.lookup, revs) or revs
1238 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1238 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1239 common=common, bundlecaps=bundlecaps)
1239 common=common, bundlecaps=bundlecaps)
1240 outgoing = None
1240 outgoing = None
1241 else:
1241 else:
1242 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1242 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1243 dest, branches = hg.parseurl(dest, opts.get('branch'))
1243 dest, branches = hg.parseurl(dest, opts.get('branch'))
1244 other = hg.peer(repo, opts, dest)
1244 other = hg.peer(repo, opts, dest)
1245 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1245 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1246 heads = revs and map(repo.lookup, revs) or revs
1246 heads = revs and map(repo.lookup, revs) or revs
1247 outgoing = discovery.findcommonoutgoing(repo, other,
1247 outgoing = discovery.findcommonoutgoing(repo, other,
1248 onlyheads=heads,
1248 onlyheads=heads,
1249 force=opts.get('force'),
1249 force=opts.get('force'),
1250 portable=True)
1250 portable=True)
1251 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1251 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1252 bundlecaps)
1252 bundlecaps)
1253 if not cg:
1253 if not cg:
1254 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1254 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1255 return 1
1255 return 1
1256
1256
1257 changegroup.writebundle(ui, cg, fname, bundletype)
1257 changegroup.writebundle(ui, cg, fname, bundletype)
1258
1258
1259 @command('cat',
1259 @command('cat',
1260 [('o', 'output', '',
1260 [('o', 'output', '',
1261 _('print output to file with formatted name'), _('FORMAT')),
1261 _('print output to file with formatted name'), _('FORMAT')),
1262 ('r', 'rev', '', _('print the given revision'), _('REV')),
1262 ('r', 'rev', '', _('print the given revision'), _('REV')),
1263 ('', 'decode', None, _('apply any matching decode filter')),
1263 ('', 'decode', None, _('apply any matching decode filter')),
1264 ] + walkopts,
1264 ] + walkopts,
1265 _('[OPTION]... FILE...'),
1265 _('[OPTION]... FILE...'),
1266 inferrepo=True)
1266 inferrepo=True)
1267 def cat(ui, repo, file1, *pats, **opts):
1267 def cat(ui, repo, file1, *pats, **opts):
1268 """output the current or given revision of files
1268 """output the current or given revision of files
1269
1269
1270 Print the specified files as they were at the given revision. If
1270 Print the specified files as they were at the given revision. If
1271 no revision is given, the parent of the working directory is used.
1271 no revision is given, the parent of the working directory is used.
1272
1272
1273 Output may be to a file, in which case the name of the file is
1273 Output may be to a file, in which case the name of the file is
1274 given using a format string. The formatting rules as follows:
1274 given using a format string. The formatting rules as follows:
1275
1275
1276 :``%%``: literal "%" character
1276 :``%%``: literal "%" character
1277 :``%s``: basename of file being printed
1277 :``%s``: basename of file being printed
1278 :``%d``: dirname of file being printed, or '.' if in repository root
1278 :``%d``: dirname of file being printed, or '.' if in repository root
1279 :``%p``: root-relative path name of file being printed
1279 :``%p``: root-relative path name of file being printed
1280 :``%H``: changeset hash (40 hexadecimal digits)
1280 :``%H``: changeset hash (40 hexadecimal digits)
1281 :``%R``: changeset revision number
1281 :``%R``: changeset revision number
1282 :``%h``: short-form changeset hash (12 hexadecimal digits)
1282 :``%h``: short-form changeset hash (12 hexadecimal digits)
1283 :``%r``: zero-padded changeset revision number
1283 :``%r``: zero-padded changeset revision number
1284 :``%b``: basename of the exporting repository
1284 :``%b``: basename of the exporting repository
1285
1285
1286 Returns 0 on success.
1286 Returns 0 on success.
1287 """
1287 """
1288 ctx = scmutil.revsingle(repo, opts.get('rev'))
1288 ctx = scmutil.revsingle(repo, opts.get('rev'))
1289 m = scmutil.match(ctx, (file1,) + pats, opts)
1289 m = scmutil.match(ctx, (file1,) + pats, opts)
1290
1290
1291 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1291 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1292
1292
1293 @command('^clone',
1293 @command('^clone',
1294 [('U', 'noupdate', None, _('the clone will include an empty working '
1294 [('U', 'noupdate', None, _('the clone will include an empty working '
1295 'directory (only a repository)')),
1295 'directory (only a repository)')),
1296 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1296 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1297 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1297 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1298 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1298 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1299 ('', 'pull', None, _('use pull protocol to copy metadata')),
1299 ('', 'pull', None, _('use pull protocol to copy metadata')),
1300 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1300 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1301 ] + remoteopts,
1301 ] + remoteopts,
1302 _('[OPTION]... SOURCE [DEST]'),
1302 _('[OPTION]... SOURCE [DEST]'),
1303 norepo=True)
1303 norepo=True)
1304 def clone(ui, source, dest=None, **opts):
1304 def clone(ui, source, dest=None, **opts):
1305 """make a copy of an existing repository
1305 """make a copy of an existing repository
1306
1306
1307 Create a copy of an existing repository in a new directory.
1307 Create a copy of an existing repository in a new directory.
1308
1308
1309 If no destination directory name is specified, it defaults to the
1309 If no destination directory name is specified, it defaults to the
1310 basename of the source.
1310 basename of the source.
1311
1311
1312 The location of the source is added to the new repository's
1312 The location of the source is added to the new repository's
1313 ``.hg/hgrc`` file, as the default to be used for future pulls.
1313 ``.hg/hgrc`` file, as the default to be used for future pulls.
1314
1314
1315 Only local paths and ``ssh://`` URLs are supported as
1315 Only local paths and ``ssh://`` URLs are supported as
1316 destinations. For ``ssh://`` destinations, no working directory or
1316 destinations. For ``ssh://`` destinations, no working directory or
1317 ``.hg/hgrc`` will be created on the remote side.
1317 ``.hg/hgrc`` will be created on the remote side.
1318
1318
1319 To pull only a subset of changesets, specify one or more revisions
1319 To pull only a subset of changesets, specify one or more revisions
1320 identifiers with -r/--rev or branches with -b/--branch. The
1320 identifiers with -r/--rev or branches with -b/--branch. The
1321 resulting clone will contain only the specified changesets and
1321 resulting clone will contain only the specified changesets and
1322 their ancestors. These options (or 'clone src#rev dest') imply
1322 their ancestors. These options (or 'clone src#rev dest') imply
1323 --pull, even for local source repositories. Note that specifying a
1323 --pull, even for local source repositories. Note that specifying a
1324 tag will include the tagged changeset but not the changeset
1324 tag will include the tagged changeset but not the changeset
1325 containing the tag.
1325 containing the tag.
1326
1326
1327 If the source repository has a bookmark called '@' set, that
1327 If the source repository has a bookmark called '@' set, that
1328 revision will be checked out in the new repository by default.
1328 revision will be checked out in the new repository by default.
1329
1329
1330 To check out a particular version, use -u/--update, or
1330 To check out a particular version, use -u/--update, or
1331 -U/--noupdate to create a clone with no working directory.
1331 -U/--noupdate to create a clone with no working directory.
1332
1332
1333 .. container:: verbose
1333 .. container:: verbose
1334
1334
1335 For efficiency, hardlinks are used for cloning whenever the
1335 For efficiency, hardlinks are used for cloning whenever the
1336 source and destination are on the same filesystem (note this
1336 source and destination are on the same filesystem (note this
1337 applies only to the repository data, not to the working
1337 applies only to the repository data, not to the working
1338 directory). Some filesystems, such as AFS, implement hardlinking
1338 directory). Some filesystems, such as AFS, implement hardlinking
1339 incorrectly, but do not report errors. In these cases, use the
1339 incorrectly, but do not report errors. In these cases, use the
1340 --pull option to avoid hardlinking.
1340 --pull option to avoid hardlinking.
1341
1341
1342 In some cases, you can clone repositories and the working
1342 In some cases, you can clone repositories and the working
1343 directory using full hardlinks with ::
1343 directory using full hardlinks with ::
1344
1344
1345 $ cp -al REPO REPOCLONE
1345 $ cp -al REPO REPOCLONE
1346
1346
1347 This is the fastest way to clone, but it is not always safe. The
1347 This is the fastest way to clone, but it is not always safe. The
1348 operation is not atomic (making sure REPO is not modified during
1348 operation is not atomic (making sure REPO is not modified during
1349 the operation is up to you) and you have to make sure your
1349 the operation is up to you) and you have to make sure your
1350 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1350 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1351 so). Also, this is not compatible with certain extensions that
1351 so). Also, this is not compatible with certain extensions that
1352 place their metadata under the .hg directory, such as mq.
1352 place their metadata under the .hg directory, such as mq.
1353
1353
1354 Mercurial will update the working directory to the first applicable
1354 Mercurial will update the working directory to the first applicable
1355 revision from this list:
1355 revision from this list:
1356
1356
1357 a) null if -U or the source repository has no changesets
1357 a) null if -U or the source repository has no changesets
1358 b) if -u . and the source repository is local, the first parent of
1358 b) if -u . and the source repository is local, the first parent of
1359 the source repository's working directory
1359 the source repository's working directory
1360 c) the changeset specified with -u (if a branch name, this means the
1360 c) the changeset specified with -u (if a branch name, this means the
1361 latest head of that branch)
1361 latest head of that branch)
1362 d) the changeset specified with -r
1362 d) the changeset specified with -r
1363 e) the tipmost head specified with -b
1363 e) the tipmost head specified with -b
1364 f) the tipmost head specified with the url#branch source syntax
1364 f) the tipmost head specified with the url#branch source syntax
1365 g) the revision marked with the '@' bookmark, if present
1365 g) the revision marked with the '@' bookmark, if present
1366 h) the tipmost head of the default branch
1366 h) the tipmost head of the default branch
1367 i) tip
1367 i) tip
1368
1368
1369 Examples:
1369 Examples:
1370
1370
1371 - clone a remote repository to a new directory named hg/::
1371 - clone a remote repository to a new directory named hg/::
1372
1372
1373 hg clone http://selenic.com/hg
1373 hg clone http://selenic.com/hg
1374
1374
1375 - create a lightweight local clone::
1375 - create a lightweight local clone::
1376
1376
1377 hg clone project/ project-feature/
1377 hg clone project/ project-feature/
1378
1378
1379 - clone from an absolute path on an ssh server (note double-slash)::
1379 - clone from an absolute path on an ssh server (note double-slash)::
1380
1380
1381 hg clone ssh://user@server//home/projects/alpha/
1381 hg clone ssh://user@server//home/projects/alpha/
1382
1382
1383 - do a high-speed clone over a LAN while checking out a
1383 - do a high-speed clone over a LAN while checking out a
1384 specified version::
1384 specified version::
1385
1385
1386 hg clone --uncompressed http://server/repo -u 1.5
1386 hg clone --uncompressed http://server/repo -u 1.5
1387
1387
1388 - create a repository without changesets after a particular revision::
1388 - create a repository without changesets after a particular revision::
1389
1389
1390 hg clone -r 04e544 experimental/ good/
1390 hg clone -r 04e544 experimental/ good/
1391
1391
1392 - clone (and track) a particular named branch::
1392 - clone (and track) a particular named branch::
1393
1393
1394 hg clone http://selenic.com/hg#stable
1394 hg clone http://selenic.com/hg#stable
1395
1395
1396 See :hg:`help urls` for details on specifying URLs.
1396 See :hg:`help urls` for details on specifying URLs.
1397
1397
1398 Returns 0 on success.
1398 Returns 0 on success.
1399 """
1399 """
1400 if opts.get('noupdate') and opts.get('updaterev'):
1400 if opts.get('noupdate') and opts.get('updaterev'):
1401 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1401 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1402
1402
1403 r = hg.clone(ui, opts, source, dest,
1403 r = hg.clone(ui, opts, source, dest,
1404 pull=opts.get('pull'),
1404 pull=opts.get('pull'),
1405 stream=opts.get('uncompressed'),
1405 stream=opts.get('uncompressed'),
1406 rev=opts.get('rev'),
1406 rev=opts.get('rev'),
1407 update=opts.get('updaterev') or not opts.get('noupdate'),
1407 update=opts.get('updaterev') or not opts.get('noupdate'),
1408 branch=opts.get('branch'))
1408 branch=opts.get('branch'))
1409
1409
1410 return r is None
1410 return r is None
1411
1411
1412 @command('^commit|ci',
1412 @command('^commit|ci',
1413 [('A', 'addremove', None,
1413 [('A', 'addremove', None,
1414 _('mark new/missing files as added/removed before committing')),
1414 _('mark new/missing files as added/removed before committing')),
1415 ('', 'close-branch', None,
1415 ('', 'close-branch', None,
1416 _('mark a branch as closed, hiding it from the branch list')),
1416 _('mark a branch as closed, hiding it from the branch list')),
1417 ('', 'amend', None, _('amend the parent of the working directory')),
1417 ('', 'amend', None, _('amend the parent of the working directory')),
1418 ('s', 'secret', None, _('use the secret phase for committing')),
1418 ('s', 'secret', None, _('use the secret phase for committing')),
1419 ('e', 'edit', None, _('invoke editor on commit messages')),
1419 ('e', 'edit', None, _('invoke editor on commit messages')),
1420 ('i', 'interactive', None, _('use interactive mode')),
1420 ('i', 'interactive', None, _('use interactive mode')),
1421 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1421 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1422 _('[OPTION]... [FILE]...'),
1422 _('[OPTION]... [FILE]...'),
1423 inferrepo=True)
1423 inferrepo=True)
1424 def commit(ui, repo, *pats, **opts):
1424 def commit(ui, repo, *pats, **opts):
1425 """commit the specified files or all outstanding changes
1425 """commit the specified files or all outstanding changes
1426
1426
1427 Commit changes to the given files into the repository. Unlike a
1427 Commit changes to the given files into the repository. Unlike a
1428 centralized SCM, this operation is a local operation. See
1428 centralized SCM, this operation is a local operation. See
1429 :hg:`push` for a way to actively distribute your changes.
1429 :hg:`push` for a way to actively distribute your changes.
1430
1430
1431 If a list of files is omitted, all changes reported by :hg:`status`
1431 If a list of files is omitted, all changes reported by :hg:`status`
1432 will be committed.
1432 will be committed.
1433
1433
1434 If you are committing the result of a merge, do not provide any
1434 If you are committing the result of a merge, do not provide any
1435 filenames or -I/-X filters.
1435 filenames or -I/-X filters.
1436
1436
1437 If no commit message is specified, Mercurial starts your
1437 If no commit message is specified, Mercurial starts your
1438 configured editor where you can enter a message. In case your
1438 configured editor where you can enter a message. In case your
1439 commit fails, you will find a backup of your message in
1439 commit fails, you will find a backup of your message in
1440 ``.hg/last-message.txt``.
1440 ``.hg/last-message.txt``.
1441
1441
1442 The --amend flag can be used to amend the parent of the
1442 The --amend flag can be used to amend the parent of the
1443 working directory with a new commit that contains the changes
1443 working directory with a new commit that contains the changes
1444 in the parent in addition to those currently reported by :hg:`status`,
1444 in the parent in addition to those currently reported by :hg:`status`,
1445 if there are any. The old commit is stored in a backup bundle in
1445 if there are any. The old commit is stored in a backup bundle in
1446 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1446 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1447 on how to restore it).
1447 on how to restore it).
1448
1448
1449 Message, user and date are taken from the amended commit unless
1449 Message, user and date are taken from the amended commit unless
1450 specified. When a message isn't specified on the command line,
1450 specified. When a message isn't specified on the command line,
1451 the editor will open with the message of the amended commit.
1451 the editor will open with the message of the amended commit.
1452
1452
1453 It is not possible to amend public changesets (see :hg:`help phases`)
1453 It is not possible to amend public changesets (see :hg:`help phases`)
1454 or changesets that have children.
1454 or changesets that have children.
1455
1455
1456 See :hg:`help dates` for a list of formats valid for -d/--date.
1456 See :hg:`help dates` for a list of formats valid for -d/--date.
1457
1457
1458 Returns 0 on success, 1 if nothing changed.
1458 Returns 0 on success, 1 if nothing changed.
1459 """
1459 """
1460 if opts.get('interactive'):
1460 if opts.get('interactive'):
1461 opts.pop('interactive')
1461 opts.pop('interactive')
1462 cmdutil.dorecord(ui, repo, commit, 'commit', False,
1462 cmdutil.dorecord(ui, repo, commit, 'commit', False,
1463 cmdutil.recordfilter, *pats, **opts)
1463 cmdutil.recordfilter, *pats, **opts)
1464 return
1464 return
1465
1465
1466 if opts.get('subrepos'):
1466 if opts.get('subrepos'):
1467 if opts.get('amend'):
1467 if opts.get('amend'):
1468 raise util.Abort(_('cannot amend with --subrepos'))
1468 raise util.Abort(_('cannot amend with --subrepos'))
1469 # Let --subrepos on the command line override config setting.
1469 # Let --subrepos on the command line override config setting.
1470 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1470 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1471
1471
1472 cmdutil.checkunfinished(repo, commit=True)
1472 cmdutil.checkunfinished(repo, commit=True)
1473
1473
1474 branch = repo[None].branch()
1474 branch = repo[None].branch()
1475 bheads = repo.branchheads(branch)
1475 bheads = repo.branchheads(branch)
1476
1476
1477 extra = {}
1477 extra = {}
1478 if opts.get('close_branch'):
1478 if opts.get('close_branch'):
1479 extra['close'] = 1
1479 extra['close'] = 1
1480
1480
1481 if not bheads:
1481 if not bheads:
1482 raise util.Abort(_('can only close branch heads'))
1482 raise util.Abort(_('can only close branch heads'))
1483 elif opts.get('amend'):
1483 elif opts.get('amend'):
1484 if repo.parents()[0].p1().branch() != branch and \
1484 if repo.parents()[0].p1().branch() != branch and \
1485 repo.parents()[0].p2().branch() != branch:
1485 repo.parents()[0].p2().branch() != branch:
1486 raise util.Abort(_('can only close branch heads'))
1486 raise util.Abort(_('can only close branch heads'))
1487
1487
1488 if opts.get('amend'):
1488 if opts.get('amend'):
1489 if ui.configbool('ui', 'commitsubrepos'):
1489 if ui.configbool('ui', 'commitsubrepos'):
1490 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1490 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1491
1491
1492 old = repo['.']
1492 old = repo['.']
1493 if not old.mutable():
1493 if not old.mutable():
1494 raise util.Abort(_('cannot amend public changesets'))
1494 raise util.Abort(_('cannot amend public changesets'))
1495 if len(repo[None].parents()) > 1:
1495 if len(repo[None].parents()) > 1:
1496 raise util.Abort(_('cannot amend while merging'))
1496 raise util.Abort(_('cannot amend while merging'))
1497 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1497 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1498 if not allowunstable and old.children():
1498 if not allowunstable and old.children():
1499 raise util.Abort(_('cannot amend changeset with children'))
1499 raise util.Abort(_('cannot amend changeset with children'))
1500
1500
1501 # commitfunc is used only for temporary amend commit by cmdutil.amend
1501 # commitfunc is used only for temporary amend commit by cmdutil.amend
1502 def commitfunc(ui, repo, message, match, opts):
1502 def commitfunc(ui, repo, message, match, opts):
1503 return repo.commit(message,
1503 return repo.commit(message,
1504 opts.get('user') or old.user(),
1504 opts.get('user') or old.user(),
1505 opts.get('date') or old.date(),
1505 opts.get('date') or old.date(),
1506 match,
1506 match,
1507 extra=extra)
1507 extra=extra)
1508
1508
1509 current = repo._bookmarkcurrent
1509 current = repo._activebookmark
1510 marks = old.bookmarks()
1510 marks = old.bookmarks()
1511 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1511 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1512 if node == old.node():
1512 if node == old.node():
1513 ui.status(_("nothing changed\n"))
1513 ui.status(_("nothing changed\n"))
1514 return 1
1514 return 1
1515 elif marks:
1515 elif marks:
1516 ui.debug('moving bookmarks %r from %s to %s\n' %
1516 ui.debug('moving bookmarks %r from %s to %s\n' %
1517 (marks, old.hex(), hex(node)))
1517 (marks, old.hex(), hex(node)))
1518 newmarks = repo._bookmarks
1518 newmarks = repo._bookmarks
1519 for bm in marks:
1519 for bm in marks:
1520 newmarks[bm] = node
1520 newmarks[bm] = node
1521 if bm == current:
1521 if bm == current:
1522 bookmarks.activate(repo, bm)
1522 bookmarks.activate(repo, bm)
1523 newmarks.write()
1523 newmarks.write()
1524 else:
1524 else:
1525 def commitfunc(ui, repo, message, match, opts):
1525 def commitfunc(ui, repo, message, match, opts):
1526 backup = ui.backupconfig('phases', 'new-commit')
1526 backup = ui.backupconfig('phases', 'new-commit')
1527 baseui = repo.baseui
1527 baseui = repo.baseui
1528 basebackup = baseui.backupconfig('phases', 'new-commit')
1528 basebackup = baseui.backupconfig('phases', 'new-commit')
1529 try:
1529 try:
1530 if opts.get('secret'):
1530 if opts.get('secret'):
1531 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1531 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1532 # Propagate to subrepos
1532 # Propagate to subrepos
1533 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1533 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1534
1534
1535 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1535 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1536 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1536 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1537 return repo.commit(message, opts.get('user'), opts.get('date'),
1537 return repo.commit(message, opts.get('user'), opts.get('date'),
1538 match,
1538 match,
1539 editor=editor,
1539 editor=editor,
1540 extra=extra)
1540 extra=extra)
1541 finally:
1541 finally:
1542 ui.restoreconfig(backup)
1542 ui.restoreconfig(backup)
1543 repo.baseui.restoreconfig(basebackup)
1543 repo.baseui.restoreconfig(basebackup)
1544
1544
1545
1545
1546 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1546 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1547
1547
1548 if not node:
1548 if not node:
1549 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1549 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1550 if stat[3]:
1550 if stat[3]:
1551 ui.status(_("nothing changed (%d missing files, see "
1551 ui.status(_("nothing changed (%d missing files, see "
1552 "'hg status')\n") % len(stat[3]))
1552 "'hg status')\n") % len(stat[3]))
1553 else:
1553 else:
1554 ui.status(_("nothing changed\n"))
1554 ui.status(_("nothing changed\n"))
1555 return 1
1555 return 1
1556
1556
1557 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1557 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1558
1558
1559 @command('config|showconfig|debugconfig',
1559 @command('config|showconfig|debugconfig',
1560 [('u', 'untrusted', None, _('show untrusted configuration options')),
1560 [('u', 'untrusted', None, _('show untrusted configuration options')),
1561 ('e', 'edit', None, _('edit user config')),
1561 ('e', 'edit', None, _('edit user config')),
1562 ('l', 'local', None, _('edit repository config')),
1562 ('l', 'local', None, _('edit repository config')),
1563 ('g', 'global', None, _('edit global config'))],
1563 ('g', 'global', None, _('edit global config'))],
1564 _('[-u] [NAME]...'),
1564 _('[-u] [NAME]...'),
1565 optionalrepo=True)
1565 optionalrepo=True)
1566 def config(ui, repo, *values, **opts):
1566 def config(ui, repo, *values, **opts):
1567 """show combined config settings from all hgrc files
1567 """show combined config settings from all hgrc files
1568
1568
1569 With no arguments, print names and values of all config items.
1569 With no arguments, print names and values of all config items.
1570
1570
1571 With one argument of the form section.name, print just the value
1571 With one argument of the form section.name, print just the value
1572 of that config item.
1572 of that config item.
1573
1573
1574 With multiple arguments, print names and values of all config
1574 With multiple arguments, print names and values of all config
1575 items with matching section names.
1575 items with matching section names.
1576
1576
1577 With --edit, start an editor on the user-level config file. With
1577 With --edit, start an editor on the user-level config file. With
1578 --global, edit the system-wide config file. With --local, edit the
1578 --global, edit the system-wide config file. With --local, edit the
1579 repository-level config file.
1579 repository-level config file.
1580
1580
1581 With --debug, the source (filename and line number) is printed
1581 With --debug, the source (filename and line number) is printed
1582 for each config item.
1582 for each config item.
1583
1583
1584 See :hg:`help config` for more information about config files.
1584 See :hg:`help config` for more information about config files.
1585
1585
1586 Returns 0 on success, 1 if NAME does not exist.
1586 Returns 0 on success, 1 if NAME does not exist.
1587
1587
1588 """
1588 """
1589
1589
1590 if opts.get('edit') or opts.get('local') or opts.get('global'):
1590 if opts.get('edit') or opts.get('local') or opts.get('global'):
1591 if opts.get('local') and opts.get('global'):
1591 if opts.get('local') and opts.get('global'):
1592 raise util.Abort(_("can't use --local and --global together"))
1592 raise util.Abort(_("can't use --local and --global together"))
1593
1593
1594 if opts.get('local'):
1594 if opts.get('local'):
1595 if not repo:
1595 if not repo:
1596 raise util.Abort(_("can't use --local outside a repository"))
1596 raise util.Abort(_("can't use --local outside a repository"))
1597 paths = [repo.join('hgrc')]
1597 paths = [repo.join('hgrc')]
1598 elif opts.get('global'):
1598 elif opts.get('global'):
1599 paths = scmutil.systemrcpath()
1599 paths = scmutil.systemrcpath()
1600 else:
1600 else:
1601 paths = scmutil.userrcpath()
1601 paths = scmutil.userrcpath()
1602
1602
1603 for f in paths:
1603 for f in paths:
1604 if os.path.exists(f):
1604 if os.path.exists(f):
1605 break
1605 break
1606 else:
1606 else:
1607 if opts.get('global'):
1607 if opts.get('global'):
1608 samplehgrc = uimod.samplehgrcs['global']
1608 samplehgrc = uimod.samplehgrcs['global']
1609 elif opts.get('local'):
1609 elif opts.get('local'):
1610 samplehgrc = uimod.samplehgrcs['local']
1610 samplehgrc = uimod.samplehgrcs['local']
1611 else:
1611 else:
1612 samplehgrc = uimod.samplehgrcs['user']
1612 samplehgrc = uimod.samplehgrcs['user']
1613
1613
1614 f = paths[0]
1614 f = paths[0]
1615 fp = open(f, "w")
1615 fp = open(f, "w")
1616 fp.write(samplehgrc)
1616 fp.write(samplehgrc)
1617 fp.close()
1617 fp.close()
1618
1618
1619 editor = ui.geteditor()
1619 editor = ui.geteditor()
1620 ui.system("%s \"%s\"" % (editor, f),
1620 ui.system("%s \"%s\"" % (editor, f),
1621 onerr=util.Abort, errprefix=_("edit failed"))
1621 onerr=util.Abort, errprefix=_("edit failed"))
1622 return
1622 return
1623
1623
1624 for f in scmutil.rcpath():
1624 for f in scmutil.rcpath():
1625 ui.debug('read config from: %s\n' % f)
1625 ui.debug('read config from: %s\n' % f)
1626 untrusted = bool(opts.get('untrusted'))
1626 untrusted = bool(opts.get('untrusted'))
1627 if values:
1627 if values:
1628 sections = [v for v in values if '.' not in v]
1628 sections = [v for v in values if '.' not in v]
1629 items = [v for v in values if '.' in v]
1629 items = [v for v in values if '.' in v]
1630 if len(items) > 1 or items and sections:
1630 if len(items) > 1 or items and sections:
1631 raise util.Abort(_('only one config item permitted'))
1631 raise util.Abort(_('only one config item permitted'))
1632 matched = False
1632 matched = False
1633 for section, name, value in ui.walkconfig(untrusted=untrusted):
1633 for section, name, value in ui.walkconfig(untrusted=untrusted):
1634 value = str(value).replace('\n', '\\n')
1634 value = str(value).replace('\n', '\\n')
1635 sectname = section + '.' + name
1635 sectname = section + '.' + name
1636 if values:
1636 if values:
1637 for v in values:
1637 for v in values:
1638 if v == section:
1638 if v == section:
1639 ui.debug('%s: ' %
1639 ui.debug('%s: ' %
1640 ui.configsource(section, name, untrusted))
1640 ui.configsource(section, name, untrusted))
1641 ui.write('%s=%s\n' % (sectname, value))
1641 ui.write('%s=%s\n' % (sectname, value))
1642 matched = True
1642 matched = True
1643 elif v == sectname:
1643 elif v == sectname:
1644 ui.debug('%s: ' %
1644 ui.debug('%s: ' %
1645 ui.configsource(section, name, untrusted))
1645 ui.configsource(section, name, untrusted))
1646 ui.write(value, '\n')
1646 ui.write(value, '\n')
1647 matched = True
1647 matched = True
1648 else:
1648 else:
1649 ui.debug('%s: ' %
1649 ui.debug('%s: ' %
1650 ui.configsource(section, name, untrusted))
1650 ui.configsource(section, name, untrusted))
1651 ui.write('%s=%s\n' % (sectname, value))
1651 ui.write('%s=%s\n' % (sectname, value))
1652 matched = True
1652 matched = True
1653 if matched:
1653 if matched:
1654 return 0
1654 return 0
1655 return 1
1655 return 1
1656
1656
1657 @command('copy|cp',
1657 @command('copy|cp',
1658 [('A', 'after', None, _('record a copy that has already occurred')),
1658 [('A', 'after', None, _('record a copy that has already occurred')),
1659 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1659 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1660 ] + walkopts + dryrunopts,
1660 ] + walkopts + dryrunopts,
1661 _('[OPTION]... [SOURCE]... DEST'))
1661 _('[OPTION]... [SOURCE]... DEST'))
1662 def copy(ui, repo, *pats, **opts):
1662 def copy(ui, repo, *pats, **opts):
1663 """mark files as copied for the next commit
1663 """mark files as copied for the next commit
1664
1664
1665 Mark dest as having copies of source files. If dest is a
1665 Mark dest as having copies of source files. If dest is a
1666 directory, copies are put in that directory. If dest is a file,
1666 directory, copies are put in that directory. If dest is a file,
1667 the source must be a single file.
1667 the source must be a single file.
1668
1668
1669 By default, this command copies the contents of files as they
1669 By default, this command copies the contents of files as they
1670 exist in the working directory. If invoked with -A/--after, the
1670 exist in the working directory. If invoked with -A/--after, the
1671 operation is recorded, but no copying is performed.
1671 operation is recorded, but no copying is performed.
1672
1672
1673 This command takes effect with the next commit. To undo a copy
1673 This command takes effect with the next commit. To undo a copy
1674 before that, see :hg:`revert`.
1674 before that, see :hg:`revert`.
1675
1675
1676 Returns 0 on success, 1 if errors are encountered.
1676 Returns 0 on success, 1 if errors are encountered.
1677 """
1677 """
1678 wlock = repo.wlock(False)
1678 wlock = repo.wlock(False)
1679 try:
1679 try:
1680 return cmdutil.copy(ui, repo, pats, opts)
1680 return cmdutil.copy(ui, repo, pats, opts)
1681 finally:
1681 finally:
1682 wlock.release()
1682 wlock.release()
1683
1683
1684 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1684 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1685 def debugancestor(ui, repo, *args):
1685 def debugancestor(ui, repo, *args):
1686 """find the ancestor revision of two revisions in a given index"""
1686 """find the ancestor revision of two revisions in a given index"""
1687 if len(args) == 3:
1687 if len(args) == 3:
1688 index, rev1, rev2 = args
1688 index, rev1, rev2 = args
1689 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1689 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1690 lookup = r.lookup
1690 lookup = r.lookup
1691 elif len(args) == 2:
1691 elif len(args) == 2:
1692 if not repo:
1692 if not repo:
1693 raise util.Abort(_("there is no Mercurial repository here "
1693 raise util.Abort(_("there is no Mercurial repository here "
1694 "(.hg not found)"))
1694 "(.hg not found)"))
1695 rev1, rev2 = args
1695 rev1, rev2 = args
1696 r = repo.changelog
1696 r = repo.changelog
1697 lookup = repo.lookup
1697 lookup = repo.lookup
1698 else:
1698 else:
1699 raise util.Abort(_('either two or three arguments required'))
1699 raise util.Abort(_('either two or three arguments required'))
1700 a = r.ancestor(lookup(rev1), lookup(rev2))
1700 a = r.ancestor(lookup(rev1), lookup(rev2))
1701 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1701 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1702
1702
1703 @command('debugbuilddag',
1703 @command('debugbuilddag',
1704 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1704 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1705 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1705 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1706 ('n', 'new-file', None, _('add new file at each rev'))],
1706 ('n', 'new-file', None, _('add new file at each rev'))],
1707 _('[OPTION]... [TEXT]'))
1707 _('[OPTION]... [TEXT]'))
1708 def debugbuilddag(ui, repo, text=None,
1708 def debugbuilddag(ui, repo, text=None,
1709 mergeable_file=False,
1709 mergeable_file=False,
1710 overwritten_file=False,
1710 overwritten_file=False,
1711 new_file=False):
1711 new_file=False):
1712 """builds a repo with a given DAG from scratch in the current empty repo
1712 """builds a repo with a given DAG from scratch in the current empty repo
1713
1713
1714 The description of the DAG is read from stdin if not given on the
1714 The description of the DAG is read from stdin if not given on the
1715 command line.
1715 command line.
1716
1716
1717 Elements:
1717 Elements:
1718
1718
1719 - "+n" is a linear run of n nodes based on the current default parent
1719 - "+n" is a linear run of n nodes based on the current default parent
1720 - "." is a single node based on the current default parent
1720 - "." is a single node based on the current default parent
1721 - "$" resets the default parent to null (implied at the start);
1721 - "$" resets the default parent to null (implied at the start);
1722 otherwise the default parent is always the last node created
1722 otherwise the default parent is always the last node created
1723 - "<p" sets the default parent to the backref p
1723 - "<p" sets the default parent to the backref p
1724 - "*p" is a fork at parent p, which is a backref
1724 - "*p" is a fork at parent p, which is a backref
1725 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1725 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1726 - "/p2" is a merge of the preceding node and p2
1726 - "/p2" is a merge of the preceding node and p2
1727 - ":tag" defines a local tag for the preceding node
1727 - ":tag" defines a local tag for the preceding node
1728 - "@branch" sets the named branch for subsequent nodes
1728 - "@branch" sets the named branch for subsequent nodes
1729 - "#...\\n" is a comment up to the end of the line
1729 - "#...\\n" is a comment up to the end of the line
1730
1730
1731 Whitespace between the above elements is ignored.
1731 Whitespace between the above elements is ignored.
1732
1732
1733 A backref is either
1733 A backref is either
1734
1734
1735 - a number n, which references the node curr-n, where curr is the current
1735 - a number n, which references the node curr-n, where curr is the current
1736 node, or
1736 node, or
1737 - the name of a local tag you placed earlier using ":tag", or
1737 - the name of a local tag you placed earlier using ":tag", or
1738 - empty to denote the default parent.
1738 - empty to denote the default parent.
1739
1739
1740 All string valued-elements are either strictly alphanumeric, or must
1740 All string valued-elements are either strictly alphanumeric, or must
1741 be enclosed in double quotes ("..."), with "\\" as escape character.
1741 be enclosed in double quotes ("..."), with "\\" as escape character.
1742 """
1742 """
1743
1743
1744 if text is None:
1744 if text is None:
1745 ui.status(_("reading DAG from stdin\n"))
1745 ui.status(_("reading DAG from stdin\n"))
1746 text = ui.fin.read()
1746 text = ui.fin.read()
1747
1747
1748 cl = repo.changelog
1748 cl = repo.changelog
1749 if len(cl) > 0:
1749 if len(cl) > 0:
1750 raise util.Abort(_('repository is not empty'))
1750 raise util.Abort(_('repository is not empty'))
1751
1751
1752 # determine number of revs in DAG
1752 # determine number of revs in DAG
1753 total = 0
1753 total = 0
1754 for type, data in dagparser.parsedag(text):
1754 for type, data in dagparser.parsedag(text):
1755 if type == 'n':
1755 if type == 'n':
1756 total += 1
1756 total += 1
1757
1757
1758 if mergeable_file:
1758 if mergeable_file:
1759 linesperrev = 2
1759 linesperrev = 2
1760 # make a file with k lines per rev
1760 # make a file with k lines per rev
1761 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1761 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1762 initialmergedlines.append("")
1762 initialmergedlines.append("")
1763
1763
1764 tags = []
1764 tags = []
1765
1765
1766 lock = tr = None
1766 lock = tr = None
1767 try:
1767 try:
1768 lock = repo.lock()
1768 lock = repo.lock()
1769 tr = repo.transaction("builddag")
1769 tr = repo.transaction("builddag")
1770
1770
1771 at = -1
1771 at = -1
1772 atbranch = 'default'
1772 atbranch = 'default'
1773 nodeids = []
1773 nodeids = []
1774 id = 0
1774 id = 0
1775 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1775 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1776 for type, data in dagparser.parsedag(text):
1776 for type, data in dagparser.parsedag(text):
1777 if type == 'n':
1777 if type == 'n':
1778 ui.note(('node %s\n' % str(data)))
1778 ui.note(('node %s\n' % str(data)))
1779 id, ps = data
1779 id, ps = data
1780
1780
1781 files = []
1781 files = []
1782 fctxs = {}
1782 fctxs = {}
1783
1783
1784 p2 = None
1784 p2 = None
1785 if mergeable_file:
1785 if mergeable_file:
1786 fn = "mf"
1786 fn = "mf"
1787 p1 = repo[ps[0]]
1787 p1 = repo[ps[0]]
1788 if len(ps) > 1:
1788 if len(ps) > 1:
1789 p2 = repo[ps[1]]
1789 p2 = repo[ps[1]]
1790 pa = p1.ancestor(p2)
1790 pa = p1.ancestor(p2)
1791 base, local, other = [x[fn].data() for x in (pa, p1,
1791 base, local, other = [x[fn].data() for x in (pa, p1,
1792 p2)]
1792 p2)]
1793 m3 = simplemerge.Merge3Text(base, local, other)
1793 m3 = simplemerge.Merge3Text(base, local, other)
1794 ml = [l.strip() for l in m3.merge_lines()]
1794 ml = [l.strip() for l in m3.merge_lines()]
1795 ml.append("")
1795 ml.append("")
1796 elif at > 0:
1796 elif at > 0:
1797 ml = p1[fn].data().split("\n")
1797 ml = p1[fn].data().split("\n")
1798 else:
1798 else:
1799 ml = initialmergedlines
1799 ml = initialmergedlines
1800 ml[id * linesperrev] += " r%i" % id
1800 ml[id * linesperrev] += " r%i" % id
1801 mergedtext = "\n".join(ml)
1801 mergedtext = "\n".join(ml)
1802 files.append(fn)
1802 files.append(fn)
1803 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1803 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1804
1804
1805 if overwritten_file:
1805 if overwritten_file:
1806 fn = "of"
1806 fn = "of"
1807 files.append(fn)
1807 files.append(fn)
1808 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1808 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1809
1809
1810 if new_file:
1810 if new_file:
1811 fn = "nf%i" % id
1811 fn = "nf%i" % id
1812 files.append(fn)
1812 files.append(fn)
1813 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1813 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1814 if len(ps) > 1:
1814 if len(ps) > 1:
1815 if not p2:
1815 if not p2:
1816 p2 = repo[ps[1]]
1816 p2 = repo[ps[1]]
1817 for fn in p2:
1817 for fn in p2:
1818 if fn.startswith("nf"):
1818 if fn.startswith("nf"):
1819 files.append(fn)
1819 files.append(fn)
1820 fctxs[fn] = p2[fn]
1820 fctxs[fn] = p2[fn]
1821
1821
1822 def fctxfn(repo, cx, path):
1822 def fctxfn(repo, cx, path):
1823 return fctxs.get(path)
1823 return fctxs.get(path)
1824
1824
1825 if len(ps) == 0 or ps[0] < 0:
1825 if len(ps) == 0 or ps[0] < 0:
1826 pars = [None, None]
1826 pars = [None, None]
1827 elif len(ps) == 1:
1827 elif len(ps) == 1:
1828 pars = [nodeids[ps[0]], None]
1828 pars = [nodeids[ps[0]], None]
1829 else:
1829 else:
1830 pars = [nodeids[p] for p in ps]
1830 pars = [nodeids[p] for p in ps]
1831 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1831 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1832 date=(id, 0),
1832 date=(id, 0),
1833 user="debugbuilddag",
1833 user="debugbuilddag",
1834 extra={'branch': atbranch})
1834 extra={'branch': atbranch})
1835 nodeid = repo.commitctx(cx)
1835 nodeid = repo.commitctx(cx)
1836 nodeids.append(nodeid)
1836 nodeids.append(nodeid)
1837 at = id
1837 at = id
1838 elif type == 'l':
1838 elif type == 'l':
1839 id, name = data
1839 id, name = data
1840 ui.note(('tag %s\n' % name))
1840 ui.note(('tag %s\n' % name))
1841 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1841 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1842 elif type == 'a':
1842 elif type == 'a':
1843 ui.note(('branch %s\n' % data))
1843 ui.note(('branch %s\n' % data))
1844 atbranch = data
1844 atbranch = data
1845 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1845 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1846 tr.close()
1846 tr.close()
1847
1847
1848 if tags:
1848 if tags:
1849 repo.vfs.write("localtags", "".join(tags))
1849 repo.vfs.write("localtags", "".join(tags))
1850 finally:
1850 finally:
1851 ui.progress(_('building'), None)
1851 ui.progress(_('building'), None)
1852 release(tr, lock)
1852 release(tr, lock)
1853
1853
1854 @command('debugbundle',
1854 @command('debugbundle',
1855 [('a', 'all', None, _('show all details'))],
1855 [('a', 'all', None, _('show all details'))],
1856 _('FILE'),
1856 _('FILE'),
1857 norepo=True)
1857 norepo=True)
1858 def debugbundle(ui, bundlepath, all=None, **opts):
1858 def debugbundle(ui, bundlepath, all=None, **opts):
1859 """lists the contents of a bundle"""
1859 """lists the contents of a bundle"""
1860 f = hg.openpath(ui, bundlepath)
1860 f = hg.openpath(ui, bundlepath)
1861 try:
1861 try:
1862 gen = exchange.readbundle(ui, f, bundlepath)
1862 gen = exchange.readbundle(ui, f, bundlepath)
1863 if isinstance(gen, bundle2.unbundle20):
1863 if isinstance(gen, bundle2.unbundle20):
1864 return _debugbundle2(ui, gen, all=all, **opts)
1864 return _debugbundle2(ui, gen, all=all, **opts)
1865 if all:
1865 if all:
1866 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1866 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1867
1867
1868 def showchunks(named):
1868 def showchunks(named):
1869 ui.write("\n%s\n" % named)
1869 ui.write("\n%s\n" % named)
1870 chain = None
1870 chain = None
1871 while True:
1871 while True:
1872 chunkdata = gen.deltachunk(chain)
1872 chunkdata = gen.deltachunk(chain)
1873 if not chunkdata:
1873 if not chunkdata:
1874 break
1874 break
1875 node = chunkdata['node']
1875 node = chunkdata['node']
1876 p1 = chunkdata['p1']
1876 p1 = chunkdata['p1']
1877 p2 = chunkdata['p2']
1877 p2 = chunkdata['p2']
1878 cs = chunkdata['cs']
1878 cs = chunkdata['cs']
1879 deltabase = chunkdata['deltabase']
1879 deltabase = chunkdata['deltabase']
1880 delta = chunkdata['delta']
1880 delta = chunkdata['delta']
1881 ui.write("%s %s %s %s %s %s\n" %
1881 ui.write("%s %s %s %s %s %s\n" %
1882 (hex(node), hex(p1), hex(p2),
1882 (hex(node), hex(p1), hex(p2),
1883 hex(cs), hex(deltabase), len(delta)))
1883 hex(cs), hex(deltabase), len(delta)))
1884 chain = node
1884 chain = node
1885
1885
1886 chunkdata = gen.changelogheader()
1886 chunkdata = gen.changelogheader()
1887 showchunks("changelog")
1887 showchunks("changelog")
1888 chunkdata = gen.manifestheader()
1888 chunkdata = gen.manifestheader()
1889 showchunks("manifest")
1889 showchunks("manifest")
1890 while True:
1890 while True:
1891 chunkdata = gen.filelogheader()
1891 chunkdata = gen.filelogheader()
1892 if not chunkdata:
1892 if not chunkdata:
1893 break
1893 break
1894 fname = chunkdata['filename']
1894 fname = chunkdata['filename']
1895 showchunks(fname)
1895 showchunks(fname)
1896 else:
1896 else:
1897 if isinstance(gen, bundle2.unbundle20):
1897 if isinstance(gen, bundle2.unbundle20):
1898 raise util.Abort(_('use debugbundle2 for this file'))
1898 raise util.Abort(_('use debugbundle2 for this file'))
1899 chunkdata = gen.changelogheader()
1899 chunkdata = gen.changelogheader()
1900 chain = None
1900 chain = None
1901 while True:
1901 while True:
1902 chunkdata = gen.deltachunk(chain)
1902 chunkdata = gen.deltachunk(chain)
1903 if not chunkdata:
1903 if not chunkdata:
1904 break
1904 break
1905 node = chunkdata['node']
1905 node = chunkdata['node']
1906 ui.write("%s\n" % hex(node))
1906 ui.write("%s\n" % hex(node))
1907 chain = node
1907 chain = node
1908 finally:
1908 finally:
1909 f.close()
1909 f.close()
1910
1910
1911 def _debugbundle2(ui, gen, **opts):
1911 def _debugbundle2(ui, gen, **opts):
1912 """lists the contents of a bundle2"""
1912 """lists the contents of a bundle2"""
1913 if not isinstance(gen, bundle2.unbundle20):
1913 if not isinstance(gen, bundle2.unbundle20):
1914 raise util.Abort(_('not a bundle2 file'))
1914 raise util.Abort(_('not a bundle2 file'))
1915 ui.write(('Stream params: %s\n' % repr(gen.params)))
1915 ui.write(('Stream params: %s\n' % repr(gen.params)))
1916 for part in gen.iterparts():
1916 for part in gen.iterparts():
1917 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
1917 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
1918 if part.type == 'changegroup':
1918 if part.type == 'changegroup':
1919 version = part.params.get('version', '01')
1919 version = part.params.get('version', '01')
1920 cg = changegroup.packermap[version][1](part, 'UN')
1920 cg = changegroup.packermap[version][1](part, 'UN')
1921 chunkdata = cg.changelogheader()
1921 chunkdata = cg.changelogheader()
1922 chain = None
1922 chain = None
1923 while True:
1923 while True:
1924 chunkdata = cg.deltachunk(chain)
1924 chunkdata = cg.deltachunk(chain)
1925 if not chunkdata:
1925 if not chunkdata:
1926 break
1926 break
1927 node = chunkdata['node']
1927 node = chunkdata['node']
1928 ui.write(" %s\n" % hex(node))
1928 ui.write(" %s\n" % hex(node))
1929 chain = node
1929 chain = node
1930
1930
1931 @command('debugcheckstate', [], '')
1931 @command('debugcheckstate', [], '')
1932 def debugcheckstate(ui, repo):
1932 def debugcheckstate(ui, repo):
1933 """validate the correctness of the current dirstate"""
1933 """validate the correctness of the current dirstate"""
1934 parent1, parent2 = repo.dirstate.parents()
1934 parent1, parent2 = repo.dirstate.parents()
1935 m1 = repo[parent1].manifest()
1935 m1 = repo[parent1].manifest()
1936 m2 = repo[parent2].manifest()
1936 m2 = repo[parent2].manifest()
1937 errors = 0
1937 errors = 0
1938 for f in repo.dirstate:
1938 for f in repo.dirstate:
1939 state = repo.dirstate[f]
1939 state = repo.dirstate[f]
1940 if state in "nr" and f not in m1:
1940 if state in "nr" and f not in m1:
1941 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1941 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1942 errors += 1
1942 errors += 1
1943 if state in "a" and f in m1:
1943 if state in "a" and f in m1:
1944 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1944 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1945 errors += 1
1945 errors += 1
1946 if state in "m" and f not in m1 and f not in m2:
1946 if state in "m" and f not in m1 and f not in m2:
1947 ui.warn(_("%s in state %s, but not in either manifest\n") %
1947 ui.warn(_("%s in state %s, but not in either manifest\n") %
1948 (f, state))
1948 (f, state))
1949 errors += 1
1949 errors += 1
1950 for f in m1:
1950 for f in m1:
1951 state = repo.dirstate[f]
1951 state = repo.dirstate[f]
1952 if state not in "nrm":
1952 if state not in "nrm":
1953 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1953 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1954 errors += 1
1954 errors += 1
1955 if errors:
1955 if errors:
1956 error = _(".hg/dirstate inconsistent with current parent's manifest")
1956 error = _(".hg/dirstate inconsistent with current parent's manifest")
1957 raise util.Abort(error)
1957 raise util.Abort(error)
1958
1958
1959 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1959 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1960 def debugcommands(ui, cmd='', *args):
1960 def debugcommands(ui, cmd='', *args):
1961 """list all available commands and options"""
1961 """list all available commands and options"""
1962 for cmd, vals in sorted(table.iteritems()):
1962 for cmd, vals in sorted(table.iteritems()):
1963 cmd = cmd.split('|')[0].strip('^')
1963 cmd = cmd.split('|')[0].strip('^')
1964 opts = ', '.join([i[1] for i in vals[1]])
1964 opts = ', '.join([i[1] for i in vals[1]])
1965 ui.write('%s: %s\n' % (cmd, opts))
1965 ui.write('%s: %s\n' % (cmd, opts))
1966
1966
1967 @command('debugcomplete',
1967 @command('debugcomplete',
1968 [('o', 'options', None, _('show the command options'))],
1968 [('o', 'options', None, _('show the command options'))],
1969 _('[-o] CMD'),
1969 _('[-o] CMD'),
1970 norepo=True)
1970 norepo=True)
1971 def debugcomplete(ui, cmd='', **opts):
1971 def debugcomplete(ui, cmd='', **opts):
1972 """returns the completion list associated with the given command"""
1972 """returns the completion list associated with the given command"""
1973
1973
1974 if opts.get('options'):
1974 if opts.get('options'):
1975 options = []
1975 options = []
1976 otables = [globalopts]
1976 otables = [globalopts]
1977 if cmd:
1977 if cmd:
1978 aliases, entry = cmdutil.findcmd(cmd, table, False)
1978 aliases, entry = cmdutil.findcmd(cmd, table, False)
1979 otables.append(entry[1])
1979 otables.append(entry[1])
1980 for t in otables:
1980 for t in otables:
1981 for o in t:
1981 for o in t:
1982 if "(DEPRECATED)" in o[3]:
1982 if "(DEPRECATED)" in o[3]:
1983 continue
1983 continue
1984 if o[0]:
1984 if o[0]:
1985 options.append('-%s' % o[0])
1985 options.append('-%s' % o[0])
1986 options.append('--%s' % o[1])
1986 options.append('--%s' % o[1])
1987 ui.write("%s\n" % "\n".join(options))
1987 ui.write("%s\n" % "\n".join(options))
1988 return
1988 return
1989
1989
1990 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1990 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1991 if ui.verbose:
1991 if ui.verbose:
1992 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1992 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1993 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1993 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1994
1994
1995 @command('debugdag',
1995 @command('debugdag',
1996 [('t', 'tags', None, _('use tags as labels')),
1996 [('t', 'tags', None, _('use tags as labels')),
1997 ('b', 'branches', None, _('annotate with branch names')),
1997 ('b', 'branches', None, _('annotate with branch names')),
1998 ('', 'dots', None, _('use dots for runs')),
1998 ('', 'dots', None, _('use dots for runs')),
1999 ('s', 'spaces', None, _('separate elements by spaces'))],
1999 ('s', 'spaces', None, _('separate elements by spaces'))],
2000 _('[OPTION]... [FILE [REV]...]'),
2000 _('[OPTION]... [FILE [REV]...]'),
2001 optionalrepo=True)
2001 optionalrepo=True)
2002 def debugdag(ui, repo, file_=None, *revs, **opts):
2002 def debugdag(ui, repo, file_=None, *revs, **opts):
2003 """format the changelog or an index DAG as a concise textual description
2003 """format the changelog or an index DAG as a concise textual description
2004
2004
2005 If you pass a revlog index, the revlog's DAG is emitted. If you list
2005 If you pass a revlog index, the revlog's DAG is emitted. If you list
2006 revision numbers, they get labeled in the output as rN.
2006 revision numbers, they get labeled in the output as rN.
2007
2007
2008 Otherwise, the changelog DAG of the current repo is emitted.
2008 Otherwise, the changelog DAG of the current repo is emitted.
2009 """
2009 """
2010 spaces = opts.get('spaces')
2010 spaces = opts.get('spaces')
2011 dots = opts.get('dots')
2011 dots = opts.get('dots')
2012 if file_:
2012 if file_:
2013 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2013 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2014 revs = set((int(r) for r in revs))
2014 revs = set((int(r) for r in revs))
2015 def events():
2015 def events():
2016 for r in rlog:
2016 for r in rlog:
2017 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2017 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2018 if p != -1))
2018 if p != -1))
2019 if r in revs:
2019 if r in revs:
2020 yield 'l', (r, "r%i" % r)
2020 yield 'l', (r, "r%i" % r)
2021 elif repo:
2021 elif repo:
2022 cl = repo.changelog
2022 cl = repo.changelog
2023 tags = opts.get('tags')
2023 tags = opts.get('tags')
2024 branches = opts.get('branches')
2024 branches = opts.get('branches')
2025 if tags:
2025 if tags:
2026 labels = {}
2026 labels = {}
2027 for l, n in repo.tags().items():
2027 for l, n in repo.tags().items():
2028 labels.setdefault(cl.rev(n), []).append(l)
2028 labels.setdefault(cl.rev(n), []).append(l)
2029 def events():
2029 def events():
2030 b = "default"
2030 b = "default"
2031 for r in cl:
2031 for r in cl:
2032 if branches:
2032 if branches:
2033 newb = cl.read(cl.node(r))[5]['branch']
2033 newb = cl.read(cl.node(r))[5]['branch']
2034 if newb != b:
2034 if newb != b:
2035 yield 'a', newb
2035 yield 'a', newb
2036 b = newb
2036 b = newb
2037 yield 'n', (r, list(p for p in cl.parentrevs(r)
2037 yield 'n', (r, list(p for p in cl.parentrevs(r)
2038 if p != -1))
2038 if p != -1))
2039 if tags:
2039 if tags:
2040 ls = labels.get(r)
2040 ls = labels.get(r)
2041 if ls:
2041 if ls:
2042 for l in ls:
2042 for l in ls:
2043 yield 'l', (r, l)
2043 yield 'l', (r, l)
2044 else:
2044 else:
2045 raise util.Abort(_('need repo for changelog dag'))
2045 raise util.Abort(_('need repo for changelog dag'))
2046
2046
2047 for line in dagparser.dagtextlines(events(),
2047 for line in dagparser.dagtextlines(events(),
2048 addspaces=spaces,
2048 addspaces=spaces,
2049 wraplabels=True,
2049 wraplabels=True,
2050 wrapannotations=True,
2050 wrapannotations=True,
2051 wrapnonlinear=dots,
2051 wrapnonlinear=dots,
2052 usedots=dots,
2052 usedots=dots,
2053 maxlinewidth=70):
2053 maxlinewidth=70):
2054 ui.write(line)
2054 ui.write(line)
2055 ui.write("\n")
2055 ui.write("\n")
2056
2056
2057 @command('debugdata',
2057 @command('debugdata',
2058 [('c', 'changelog', False, _('open changelog')),
2058 [('c', 'changelog', False, _('open changelog')),
2059 ('m', 'manifest', False, _('open manifest'))],
2059 ('m', 'manifest', False, _('open manifest'))],
2060 _('-c|-m|FILE REV'))
2060 _('-c|-m|FILE REV'))
2061 def debugdata(ui, repo, file_, rev=None, **opts):
2061 def debugdata(ui, repo, file_, rev=None, **opts):
2062 """dump the contents of a data file revision"""
2062 """dump the contents of a data file revision"""
2063 if opts.get('changelog') or opts.get('manifest'):
2063 if opts.get('changelog') or opts.get('manifest'):
2064 file_, rev = None, file_
2064 file_, rev = None, file_
2065 elif rev is None:
2065 elif rev is None:
2066 raise error.CommandError('debugdata', _('invalid arguments'))
2066 raise error.CommandError('debugdata', _('invalid arguments'))
2067 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2067 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2068 try:
2068 try:
2069 ui.write(r.revision(r.lookup(rev)))
2069 ui.write(r.revision(r.lookup(rev)))
2070 except KeyError:
2070 except KeyError:
2071 raise util.Abort(_('invalid revision identifier %s') % rev)
2071 raise util.Abort(_('invalid revision identifier %s') % rev)
2072
2072
2073 @command('debugdate',
2073 @command('debugdate',
2074 [('e', 'extended', None, _('try extended date formats'))],
2074 [('e', 'extended', None, _('try extended date formats'))],
2075 _('[-e] DATE [RANGE]'),
2075 _('[-e] DATE [RANGE]'),
2076 norepo=True, optionalrepo=True)
2076 norepo=True, optionalrepo=True)
2077 def debugdate(ui, date, range=None, **opts):
2077 def debugdate(ui, date, range=None, **opts):
2078 """parse and display a date"""
2078 """parse and display a date"""
2079 if opts["extended"]:
2079 if opts["extended"]:
2080 d = util.parsedate(date, util.extendeddateformats)
2080 d = util.parsedate(date, util.extendeddateformats)
2081 else:
2081 else:
2082 d = util.parsedate(date)
2082 d = util.parsedate(date)
2083 ui.write(("internal: %s %s\n") % d)
2083 ui.write(("internal: %s %s\n") % d)
2084 ui.write(("standard: %s\n") % util.datestr(d))
2084 ui.write(("standard: %s\n") % util.datestr(d))
2085 if range:
2085 if range:
2086 m = util.matchdate(range)
2086 m = util.matchdate(range)
2087 ui.write(("match: %s\n") % m(d[0]))
2087 ui.write(("match: %s\n") % m(d[0]))
2088
2088
2089 @command('debugdiscovery',
2089 @command('debugdiscovery',
2090 [('', 'old', None, _('use old-style discovery')),
2090 [('', 'old', None, _('use old-style discovery')),
2091 ('', 'nonheads', None,
2091 ('', 'nonheads', None,
2092 _('use old-style discovery with non-heads included')),
2092 _('use old-style discovery with non-heads included')),
2093 ] + remoteopts,
2093 ] + remoteopts,
2094 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2094 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2095 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2095 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2096 """runs the changeset discovery protocol in isolation"""
2096 """runs the changeset discovery protocol in isolation"""
2097 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2097 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2098 opts.get('branch'))
2098 opts.get('branch'))
2099 remote = hg.peer(repo, opts, remoteurl)
2099 remote = hg.peer(repo, opts, remoteurl)
2100 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2100 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2101
2101
2102 # make sure tests are repeatable
2102 # make sure tests are repeatable
2103 random.seed(12323)
2103 random.seed(12323)
2104
2104
2105 def doit(localheads, remoteheads, remote=remote):
2105 def doit(localheads, remoteheads, remote=remote):
2106 if opts.get('old'):
2106 if opts.get('old'):
2107 if localheads:
2107 if localheads:
2108 raise util.Abort('cannot use localheads with old style '
2108 raise util.Abort('cannot use localheads with old style '
2109 'discovery')
2109 'discovery')
2110 if not util.safehasattr(remote, 'branches'):
2110 if not util.safehasattr(remote, 'branches'):
2111 # enable in-client legacy support
2111 # enable in-client legacy support
2112 remote = localrepo.locallegacypeer(remote.local())
2112 remote = localrepo.locallegacypeer(remote.local())
2113 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2113 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2114 force=True)
2114 force=True)
2115 common = set(common)
2115 common = set(common)
2116 if not opts.get('nonheads'):
2116 if not opts.get('nonheads'):
2117 ui.write(("unpruned common: %s\n") %
2117 ui.write(("unpruned common: %s\n") %
2118 " ".join(sorted(short(n) for n in common)))
2118 " ".join(sorted(short(n) for n in common)))
2119 dag = dagutil.revlogdag(repo.changelog)
2119 dag = dagutil.revlogdag(repo.changelog)
2120 all = dag.ancestorset(dag.internalizeall(common))
2120 all = dag.ancestorset(dag.internalizeall(common))
2121 common = dag.externalizeall(dag.headsetofconnecteds(all))
2121 common = dag.externalizeall(dag.headsetofconnecteds(all))
2122 else:
2122 else:
2123 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2123 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2124 common = set(common)
2124 common = set(common)
2125 rheads = set(hds)
2125 rheads = set(hds)
2126 lheads = set(repo.heads())
2126 lheads = set(repo.heads())
2127 ui.write(("common heads: %s\n") %
2127 ui.write(("common heads: %s\n") %
2128 " ".join(sorted(short(n) for n in common)))
2128 " ".join(sorted(short(n) for n in common)))
2129 if lheads <= common:
2129 if lheads <= common:
2130 ui.write(("local is subset\n"))
2130 ui.write(("local is subset\n"))
2131 elif rheads <= common:
2131 elif rheads <= common:
2132 ui.write(("remote is subset\n"))
2132 ui.write(("remote is subset\n"))
2133
2133
2134 serverlogs = opts.get('serverlog')
2134 serverlogs = opts.get('serverlog')
2135 if serverlogs:
2135 if serverlogs:
2136 for filename in serverlogs:
2136 for filename in serverlogs:
2137 logfile = open(filename, 'r')
2137 logfile = open(filename, 'r')
2138 try:
2138 try:
2139 line = logfile.readline()
2139 line = logfile.readline()
2140 while line:
2140 while line:
2141 parts = line.strip().split(';')
2141 parts = line.strip().split(';')
2142 op = parts[1]
2142 op = parts[1]
2143 if op == 'cg':
2143 if op == 'cg':
2144 pass
2144 pass
2145 elif op == 'cgss':
2145 elif op == 'cgss':
2146 doit(parts[2].split(' '), parts[3].split(' '))
2146 doit(parts[2].split(' '), parts[3].split(' '))
2147 elif op == 'unb':
2147 elif op == 'unb':
2148 doit(parts[3].split(' '), parts[2].split(' '))
2148 doit(parts[3].split(' '), parts[2].split(' '))
2149 line = logfile.readline()
2149 line = logfile.readline()
2150 finally:
2150 finally:
2151 logfile.close()
2151 logfile.close()
2152
2152
2153 else:
2153 else:
2154 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2154 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2155 opts.get('remote_head'))
2155 opts.get('remote_head'))
2156 localrevs = opts.get('local_head')
2156 localrevs = opts.get('local_head')
2157 doit(localrevs, remoterevs)
2157 doit(localrevs, remoterevs)
2158
2158
2159 @command('debugfileset',
2159 @command('debugfileset',
2160 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2160 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2161 _('[-r REV] FILESPEC'))
2161 _('[-r REV] FILESPEC'))
2162 def debugfileset(ui, repo, expr, **opts):
2162 def debugfileset(ui, repo, expr, **opts):
2163 '''parse and apply a fileset specification'''
2163 '''parse and apply a fileset specification'''
2164 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2164 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2165 if ui.verbose:
2165 if ui.verbose:
2166 tree = fileset.parse(expr)[0]
2166 tree = fileset.parse(expr)[0]
2167 ui.note(tree, "\n")
2167 ui.note(tree, "\n")
2168
2168
2169 for f in ctx.getfileset(expr):
2169 for f in ctx.getfileset(expr):
2170 ui.write("%s\n" % f)
2170 ui.write("%s\n" % f)
2171
2171
2172 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2172 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2173 def debugfsinfo(ui, path="."):
2173 def debugfsinfo(ui, path="."):
2174 """show information detected about current filesystem"""
2174 """show information detected about current filesystem"""
2175 util.writefile('.debugfsinfo', '')
2175 util.writefile('.debugfsinfo', '')
2176 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2176 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2177 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2177 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2178 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2178 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2179 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2179 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2180 and 'yes' or 'no'))
2180 and 'yes' or 'no'))
2181 os.unlink('.debugfsinfo')
2181 os.unlink('.debugfsinfo')
2182
2182
2183 @command('debuggetbundle',
2183 @command('debuggetbundle',
2184 [('H', 'head', [], _('id of head node'), _('ID')),
2184 [('H', 'head', [], _('id of head node'), _('ID')),
2185 ('C', 'common', [], _('id of common node'), _('ID')),
2185 ('C', 'common', [], _('id of common node'), _('ID')),
2186 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2186 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2187 _('REPO FILE [-H|-C ID]...'),
2187 _('REPO FILE [-H|-C ID]...'),
2188 norepo=True)
2188 norepo=True)
2189 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2189 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2190 """retrieves a bundle from a repo
2190 """retrieves a bundle from a repo
2191
2191
2192 Every ID must be a full-length hex node id string. Saves the bundle to the
2192 Every ID must be a full-length hex node id string. Saves the bundle to the
2193 given file.
2193 given file.
2194 """
2194 """
2195 repo = hg.peer(ui, opts, repopath)
2195 repo = hg.peer(ui, opts, repopath)
2196 if not repo.capable('getbundle'):
2196 if not repo.capable('getbundle'):
2197 raise util.Abort("getbundle() not supported by target repository")
2197 raise util.Abort("getbundle() not supported by target repository")
2198 args = {}
2198 args = {}
2199 if common:
2199 if common:
2200 args['common'] = [bin(s) for s in common]
2200 args['common'] = [bin(s) for s in common]
2201 if head:
2201 if head:
2202 args['heads'] = [bin(s) for s in head]
2202 args['heads'] = [bin(s) for s in head]
2203 # TODO: get desired bundlecaps from command line.
2203 # TODO: get desired bundlecaps from command line.
2204 args['bundlecaps'] = None
2204 args['bundlecaps'] = None
2205 bundle = repo.getbundle('debug', **args)
2205 bundle = repo.getbundle('debug', **args)
2206
2206
2207 bundletype = opts.get('type', 'bzip2').lower()
2207 bundletype = opts.get('type', 'bzip2').lower()
2208 btypes = {'none': 'HG10UN',
2208 btypes = {'none': 'HG10UN',
2209 'bzip2': 'HG10BZ',
2209 'bzip2': 'HG10BZ',
2210 'gzip': 'HG10GZ',
2210 'gzip': 'HG10GZ',
2211 'bundle2': 'HG20'}
2211 'bundle2': 'HG20'}
2212 bundletype = btypes.get(bundletype)
2212 bundletype = btypes.get(bundletype)
2213 if bundletype not in changegroup.bundletypes:
2213 if bundletype not in changegroup.bundletypes:
2214 raise util.Abort(_('unknown bundle type specified with --type'))
2214 raise util.Abort(_('unknown bundle type specified with --type'))
2215 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2215 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2216
2216
2217 @command('debugignore', [], '')
2217 @command('debugignore', [], '')
2218 def debugignore(ui, repo, *values, **opts):
2218 def debugignore(ui, repo, *values, **opts):
2219 """display the combined ignore pattern"""
2219 """display the combined ignore pattern"""
2220 ignore = repo.dirstate._ignore
2220 ignore = repo.dirstate._ignore
2221 includepat = getattr(ignore, 'includepat', None)
2221 includepat = getattr(ignore, 'includepat', None)
2222 if includepat is not None:
2222 if includepat is not None:
2223 ui.write("%s\n" % includepat)
2223 ui.write("%s\n" % includepat)
2224 else:
2224 else:
2225 raise util.Abort(_("no ignore patterns found"))
2225 raise util.Abort(_("no ignore patterns found"))
2226
2226
2227 @command('debugindex',
2227 @command('debugindex',
2228 [('c', 'changelog', False, _('open changelog')),
2228 [('c', 'changelog', False, _('open changelog')),
2229 ('m', 'manifest', False, _('open manifest')),
2229 ('m', 'manifest', False, _('open manifest')),
2230 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2230 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2231 _('[-f FORMAT] -c|-m|FILE'),
2231 _('[-f FORMAT] -c|-m|FILE'),
2232 optionalrepo=True)
2232 optionalrepo=True)
2233 def debugindex(ui, repo, file_=None, **opts):
2233 def debugindex(ui, repo, file_=None, **opts):
2234 """dump the contents of an index file"""
2234 """dump the contents of an index file"""
2235 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2235 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2236 format = opts.get('format', 0)
2236 format = opts.get('format', 0)
2237 if format not in (0, 1):
2237 if format not in (0, 1):
2238 raise util.Abort(_("unknown format %d") % format)
2238 raise util.Abort(_("unknown format %d") % format)
2239
2239
2240 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2240 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2241 if generaldelta:
2241 if generaldelta:
2242 basehdr = ' delta'
2242 basehdr = ' delta'
2243 else:
2243 else:
2244 basehdr = ' base'
2244 basehdr = ' base'
2245
2245
2246 if ui.debugflag:
2246 if ui.debugflag:
2247 shortfn = hex
2247 shortfn = hex
2248 else:
2248 else:
2249 shortfn = short
2249 shortfn = short
2250
2250
2251 # There might not be anything in r, so have a sane default
2251 # There might not be anything in r, so have a sane default
2252 idlen = 12
2252 idlen = 12
2253 for i in r:
2253 for i in r:
2254 idlen = len(shortfn(r.node(i)))
2254 idlen = len(shortfn(r.node(i)))
2255 break
2255 break
2256
2256
2257 if format == 0:
2257 if format == 0:
2258 ui.write(" rev offset length " + basehdr + " linkrev"
2258 ui.write(" rev offset length " + basehdr + " linkrev"
2259 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2259 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2260 elif format == 1:
2260 elif format == 1:
2261 ui.write(" rev flag offset length"
2261 ui.write(" rev flag offset length"
2262 " size " + basehdr + " link p1 p2"
2262 " size " + basehdr + " link p1 p2"
2263 " %s\n" % "nodeid".rjust(idlen))
2263 " %s\n" % "nodeid".rjust(idlen))
2264
2264
2265 for i in r:
2265 for i in r:
2266 node = r.node(i)
2266 node = r.node(i)
2267 if generaldelta:
2267 if generaldelta:
2268 base = r.deltaparent(i)
2268 base = r.deltaparent(i)
2269 else:
2269 else:
2270 base = r.chainbase(i)
2270 base = r.chainbase(i)
2271 if format == 0:
2271 if format == 0:
2272 try:
2272 try:
2273 pp = r.parents(node)
2273 pp = r.parents(node)
2274 except Exception:
2274 except Exception:
2275 pp = [nullid, nullid]
2275 pp = [nullid, nullid]
2276 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2276 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2277 i, r.start(i), r.length(i), base, r.linkrev(i),
2277 i, r.start(i), r.length(i), base, r.linkrev(i),
2278 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2278 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2279 elif format == 1:
2279 elif format == 1:
2280 pr = r.parentrevs(i)
2280 pr = r.parentrevs(i)
2281 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2281 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2282 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2282 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2283 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2283 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2284
2284
2285 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2285 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2286 def debugindexdot(ui, repo, file_):
2286 def debugindexdot(ui, repo, file_):
2287 """dump an index DAG as a graphviz dot file"""
2287 """dump an index DAG as a graphviz dot file"""
2288 r = None
2288 r = None
2289 if repo:
2289 if repo:
2290 filelog = repo.file(file_)
2290 filelog = repo.file(file_)
2291 if len(filelog):
2291 if len(filelog):
2292 r = filelog
2292 r = filelog
2293 if not r:
2293 if not r:
2294 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2294 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2295 ui.write(("digraph G {\n"))
2295 ui.write(("digraph G {\n"))
2296 for i in r:
2296 for i in r:
2297 node = r.node(i)
2297 node = r.node(i)
2298 pp = r.parents(node)
2298 pp = r.parents(node)
2299 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2299 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2300 if pp[1] != nullid:
2300 if pp[1] != nullid:
2301 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2301 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2302 ui.write("}\n")
2302 ui.write("}\n")
2303
2303
2304 @command('debuginstall', [], '', norepo=True)
2304 @command('debuginstall', [], '', norepo=True)
2305 def debuginstall(ui):
2305 def debuginstall(ui):
2306 '''test Mercurial installation
2306 '''test Mercurial installation
2307
2307
2308 Returns 0 on success.
2308 Returns 0 on success.
2309 '''
2309 '''
2310
2310
2311 def writetemp(contents):
2311 def writetemp(contents):
2312 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2312 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2313 f = os.fdopen(fd, "wb")
2313 f = os.fdopen(fd, "wb")
2314 f.write(contents)
2314 f.write(contents)
2315 f.close()
2315 f.close()
2316 return name
2316 return name
2317
2317
2318 problems = 0
2318 problems = 0
2319
2319
2320 # encoding
2320 # encoding
2321 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2321 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2322 try:
2322 try:
2323 encoding.fromlocal("test")
2323 encoding.fromlocal("test")
2324 except util.Abort, inst:
2324 except util.Abort, inst:
2325 ui.write(" %s\n" % inst)
2325 ui.write(" %s\n" % inst)
2326 ui.write(_(" (check that your locale is properly set)\n"))
2326 ui.write(_(" (check that your locale is properly set)\n"))
2327 problems += 1
2327 problems += 1
2328
2328
2329 # Python
2329 # Python
2330 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2330 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2331 ui.status(_("checking Python version (%s)\n")
2331 ui.status(_("checking Python version (%s)\n")
2332 % ("%s.%s.%s" % sys.version_info[:3]))
2332 % ("%s.%s.%s" % sys.version_info[:3]))
2333 ui.status(_("checking Python lib (%s)...\n")
2333 ui.status(_("checking Python lib (%s)...\n")
2334 % os.path.dirname(os.__file__))
2334 % os.path.dirname(os.__file__))
2335
2335
2336 # compiled modules
2336 # compiled modules
2337 ui.status(_("checking installed modules (%s)...\n")
2337 ui.status(_("checking installed modules (%s)...\n")
2338 % os.path.dirname(__file__))
2338 % os.path.dirname(__file__))
2339 try:
2339 try:
2340 import bdiff, mpatch, base85, osutil
2340 import bdiff, mpatch, base85, osutil
2341 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2341 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2342 except Exception, inst:
2342 except Exception, inst:
2343 ui.write(" %s\n" % inst)
2343 ui.write(" %s\n" % inst)
2344 ui.write(_(" One or more extensions could not be found"))
2344 ui.write(_(" One or more extensions could not be found"))
2345 ui.write(_(" (check that you compiled the extensions)\n"))
2345 ui.write(_(" (check that you compiled the extensions)\n"))
2346 problems += 1
2346 problems += 1
2347
2347
2348 # templates
2348 # templates
2349 import templater
2349 import templater
2350 p = templater.templatepaths()
2350 p = templater.templatepaths()
2351 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2351 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2352 if p:
2352 if p:
2353 m = templater.templatepath("map-cmdline.default")
2353 m = templater.templatepath("map-cmdline.default")
2354 if m:
2354 if m:
2355 # template found, check if it is working
2355 # template found, check if it is working
2356 try:
2356 try:
2357 templater.templater(m)
2357 templater.templater(m)
2358 except Exception, inst:
2358 except Exception, inst:
2359 ui.write(" %s\n" % inst)
2359 ui.write(" %s\n" % inst)
2360 p = None
2360 p = None
2361 else:
2361 else:
2362 ui.write(_(" template 'default' not found\n"))
2362 ui.write(_(" template 'default' not found\n"))
2363 p = None
2363 p = None
2364 else:
2364 else:
2365 ui.write(_(" no template directories found\n"))
2365 ui.write(_(" no template directories found\n"))
2366 if not p:
2366 if not p:
2367 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2367 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2368 problems += 1
2368 problems += 1
2369
2369
2370 # editor
2370 # editor
2371 ui.status(_("checking commit editor...\n"))
2371 ui.status(_("checking commit editor...\n"))
2372 editor = ui.geteditor()
2372 editor = ui.geteditor()
2373 editor = util.expandpath(editor)
2373 editor = util.expandpath(editor)
2374 cmdpath = util.findexe(shlex.split(editor)[0])
2374 cmdpath = util.findexe(shlex.split(editor)[0])
2375 if not cmdpath:
2375 if not cmdpath:
2376 if editor == 'vi':
2376 if editor == 'vi':
2377 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2377 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2378 ui.write(_(" (specify a commit editor in your configuration"
2378 ui.write(_(" (specify a commit editor in your configuration"
2379 " file)\n"))
2379 " file)\n"))
2380 else:
2380 else:
2381 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2381 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2382 ui.write(_(" (specify a commit editor in your configuration"
2382 ui.write(_(" (specify a commit editor in your configuration"
2383 " file)\n"))
2383 " file)\n"))
2384 problems += 1
2384 problems += 1
2385
2385
2386 # check username
2386 # check username
2387 ui.status(_("checking username...\n"))
2387 ui.status(_("checking username...\n"))
2388 try:
2388 try:
2389 ui.username()
2389 ui.username()
2390 except util.Abort, e:
2390 except util.Abort, e:
2391 ui.write(" %s\n" % e)
2391 ui.write(" %s\n" % e)
2392 ui.write(_(" (specify a username in your configuration file)\n"))
2392 ui.write(_(" (specify a username in your configuration file)\n"))
2393 problems += 1
2393 problems += 1
2394
2394
2395 if not problems:
2395 if not problems:
2396 ui.status(_("no problems detected\n"))
2396 ui.status(_("no problems detected\n"))
2397 else:
2397 else:
2398 ui.write(_("%s problems detected,"
2398 ui.write(_("%s problems detected,"
2399 " please check your install!\n") % problems)
2399 " please check your install!\n") % problems)
2400
2400
2401 return problems
2401 return problems
2402
2402
2403 @command('debugknown', [], _('REPO ID...'), norepo=True)
2403 @command('debugknown', [], _('REPO ID...'), norepo=True)
2404 def debugknown(ui, repopath, *ids, **opts):
2404 def debugknown(ui, repopath, *ids, **opts):
2405 """test whether node ids are known to a repo
2405 """test whether node ids are known to a repo
2406
2406
2407 Every ID must be a full-length hex node id string. Returns a list of 0s
2407 Every ID must be a full-length hex node id string. Returns a list of 0s
2408 and 1s indicating unknown/known.
2408 and 1s indicating unknown/known.
2409 """
2409 """
2410 repo = hg.peer(ui, opts, repopath)
2410 repo = hg.peer(ui, opts, repopath)
2411 if not repo.capable('known'):
2411 if not repo.capable('known'):
2412 raise util.Abort("known() not supported by target repository")
2412 raise util.Abort("known() not supported by target repository")
2413 flags = repo.known([bin(s) for s in ids])
2413 flags = repo.known([bin(s) for s in ids])
2414 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2414 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2415
2415
2416 @command('debuglabelcomplete', [], _('LABEL...'))
2416 @command('debuglabelcomplete', [], _('LABEL...'))
2417 def debuglabelcomplete(ui, repo, *args):
2417 def debuglabelcomplete(ui, repo, *args):
2418 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2418 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2419 debugnamecomplete(ui, repo, *args)
2419 debugnamecomplete(ui, repo, *args)
2420
2420
2421 @command('debugnamecomplete', [], _('NAME...'))
2421 @command('debugnamecomplete', [], _('NAME...'))
2422 def debugnamecomplete(ui, repo, *args):
2422 def debugnamecomplete(ui, repo, *args):
2423 '''complete "names" - tags, open branch names, bookmark names'''
2423 '''complete "names" - tags, open branch names, bookmark names'''
2424
2424
2425 names = set()
2425 names = set()
2426 # since we previously only listed open branches, we will handle that
2426 # since we previously only listed open branches, we will handle that
2427 # specially (after this for loop)
2427 # specially (after this for loop)
2428 for name, ns in repo.names.iteritems():
2428 for name, ns in repo.names.iteritems():
2429 if name != 'branches':
2429 if name != 'branches':
2430 names.update(ns.listnames(repo))
2430 names.update(ns.listnames(repo))
2431 names.update(tag for (tag, heads, tip, closed)
2431 names.update(tag for (tag, heads, tip, closed)
2432 in repo.branchmap().iterbranches() if not closed)
2432 in repo.branchmap().iterbranches() if not closed)
2433 completions = set()
2433 completions = set()
2434 if not args:
2434 if not args:
2435 args = ['']
2435 args = ['']
2436 for a in args:
2436 for a in args:
2437 completions.update(n for n in names if n.startswith(a))
2437 completions.update(n for n in names if n.startswith(a))
2438 ui.write('\n'.join(sorted(completions)))
2438 ui.write('\n'.join(sorted(completions)))
2439 ui.write('\n')
2439 ui.write('\n')
2440
2440
2441 @command('debuglocks',
2441 @command('debuglocks',
2442 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2442 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2443 ('W', 'force-wlock', None,
2443 ('W', 'force-wlock', None,
2444 _('free the working state lock (DANGEROUS)'))],
2444 _('free the working state lock (DANGEROUS)'))],
2445 _('[OPTION]...'))
2445 _('[OPTION]...'))
2446 def debuglocks(ui, repo, **opts):
2446 def debuglocks(ui, repo, **opts):
2447 """show or modify state of locks
2447 """show or modify state of locks
2448
2448
2449 By default, this command will show which locks are held. This
2449 By default, this command will show which locks are held. This
2450 includes the user and process holding the lock, the amount of time
2450 includes the user and process holding the lock, the amount of time
2451 the lock has been held, and the machine name where the process is
2451 the lock has been held, and the machine name where the process is
2452 running if it's not local.
2452 running if it's not local.
2453
2453
2454 Locks protect the integrity of Mercurial's data, so should be
2454 Locks protect the integrity of Mercurial's data, so should be
2455 treated with care. System crashes or other interruptions may cause
2455 treated with care. System crashes or other interruptions may cause
2456 locks to not be properly released, though Mercurial will usually
2456 locks to not be properly released, though Mercurial will usually
2457 detect and remove such stale locks automatically.
2457 detect and remove such stale locks automatically.
2458
2458
2459 However, detecting stale locks may not always be possible (for
2459 However, detecting stale locks may not always be possible (for
2460 instance, on a shared filesystem). Removing locks may also be
2460 instance, on a shared filesystem). Removing locks may also be
2461 blocked by filesystem permissions.
2461 blocked by filesystem permissions.
2462
2462
2463 Returns 0 if no locks are held.
2463 Returns 0 if no locks are held.
2464
2464
2465 """
2465 """
2466
2466
2467 if opts.get('force_lock'):
2467 if opts.get('force_lock'):
2468 repo.svfs.unlink('lock')
2468 repo.svfs.unlink('lock')
2469 if opts.get('force_wlock'):
2469 if opts.get('force_wlock'):
2470 repo.vfs.unlink('wlock')
2470 repo.vfs.unlink('wlock')
2471 if opts.get('force_lock') or opts.get('force_lock'):
2471 if opts.get('force_lock') or opts.get('force_lock'):
2472 return 0
2472 return 0
2473
2473
2474 now = time.time()
2474 now = time.time()
2475 held = 0
2475 held = 0
2476
2476
2477 def report(vfs, name, method):
2477 def report(vfs, name, method):
2478 # this causes stale locks to get reaped for more accurate reporting
2478 # this causes stale locks to get reaped for more accurate reporting
2479 try:
2479 try:
2480 l = method(False)
2480 l = method(False)
2481 except error.LockHeld:
2481 except error.LockHeld:
2482 l = None
2482 l = None
2483
2483
2484 if l:
2484 if l:
2485 l.release()
2485 l.release()
2486 else:
2486 else:
2487 try:
2487 try:
2488 stat = vfs.lstat(name)
2488 stat = vfs.lstat(name)
2489 age = now - stat.st_mtime
2489 age = now - stat.st_mtime
2490 user = util.username(stat.st_uid)
2490 user = util.username(stat.st_uid)
2491 locker = vfs.readlock(name)
2491 locker = vfs.readlock(name)
2492 if ":" in locker:
2492 if ":" in locker:
2493 host, pid = locker.split(':')
2493 host, pid = locker.split(':')
2494 if host == socket.gethostname():
2494 if host == socket.gethostname():
2495 locker = 'user %s, process %s' % (user, pid)
2495 locker = 'user %s, process %s' % (user, pid)
2496 else:
2496 else:
2497 locker = 'user %s, process %s, host %s' \
2497 locker = 'user %s, process %s, host %s' \
2498 % (user, pid, host)
2498 % (user, pid, host)
2499 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2499 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2500 return 1
2500 return 1
2501 except OSError, e:
2501 except OSError, e:
2502 if e.errno != errno.ENOENT:
2502 if e.errno != errno.ENOENT:
2503 raise
2503 raise
2504
2504
2505 ui.write("%-6s free\n" % (name + ":"))
2505 ui.write("%-6s free\n" % (name + ":"))
2506 return 0
2506 return 0
2507
2507
2508 held += report(repo.svfs, "lock", repo.lock)
2508 held += report(repo.svfs, "lock", repo.lock)
2509 held += report(repo.vfs, "wlock", repo.wlock)
2509 held += report(repo.vfs, "wlock", repo.wlock)
2510
2510
2511 return held
2511 return held
2512
2512
2513 @command('debugobsolete',
2513 @command('debugobsolete',
2514 [('', 'flags', 0, _('markers flag')),
2514 [('', 'flags', 0, _('markers flag')),
2515 ('', 'record-parents', False,
2515 ('', 'record-parents', False,
2516 _('record parent information for the precursor')),
2516 _('record parent information for the precursor')),
2517 ('r', 'rev', [], _('display markers relevant to REV')),
2517 ('r', 'rev', [], _('display markers relevant to REV')),
2518 ] + commitopts2,
2518 ] + commitopts2,
2519 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2519 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2520 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2520 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2521 """create arbitrary obsolete marker
2521 """create arbitrary obsolete marker
2522
2522
2523 With no arguments, displays the list of obsolescence markers."""
2523 With no arguments, displays the list of obsolescence markers."""
2524
2524
2525 def parsenodeid(s):
2525 def parsenodeid(s):
2526 try:
2526 try:
2527 # We do not use revsingle/revrange functions here to accept
2527 # We do not use revsingle/revrange functions here to accept
2528 # arbitrary node identifiers, possibly not present in the
2528 # arbitrary node identifiers, possibly not present in the
2529 # local repository.
2529 # local repository.
2530 n = bin(s)
2530 n = bin(s)
2531 if len(n) != len(nullid):
2531 if len(n) != len(nullid):
2532 raise TypeError()
2532 raise TypeError()
2533 return n
2533 return n
2534 except TypeError:
2534 except TypeError:
2535 raise util.Abort('changeset references must be full hexadecimal '
2535 raise util.Abort('changeset references must be full hexadecimal '
2536 'node identifiers')
2536 'node identifiers')
2537
2537
2538 if precursor is not None:
2538 if precursor is not None:
2539 if opts['rev']:
2539 if opts['rev']:
2540 raise util.Abort('cannot select revision when creating marker')
2540 raise util.Abort('cannot select revision when creating marker')
2541 metadata = {}
2541 metadata = {}
2542 metadata['user'] = opts['user'] or ui.username()
2542 metadata['user'] = opts['user'] or ui.username()
2543 succs = tuple(parsenodeid(succ) for succ in successors)
2543 succs = tuple(parsenodeid(succ) for succ in successors)
2544 l = repo.lock()
2544 l = repo.lock()
2545 try:
2545 try:
2546 tr = repo.transaction('debugobsolete')
2546 tr = repo.transaction('debugobsolete')
2547 try:
2547 try:
2548 try:
2548 try:
2549 date = opts.get('date')
2549 date = opts.get('date')
2550 if date:
2550 if date:
2551 date = util.parsedate(date)
2551 date = util.parsedate(date)
2552 else:
2552 else:
2553 date = None
2553 date = None
2554 prec = parsenodeid(precursor)
2554 prec = parsenodeid(precursor)
2555 parents = None
2555 parents = None
2556 if opts['record_parents']:
2556 if opts['record_parents']:
2557 if prec not in repo.unfiltered():
2557 if prec not in repo.unfiltered():
2558 raise util.Abort('cannot used --record-parents on '
2558 raise util.Abort('cannot used --record-parents on '
2559 'unknown changesets')
2559 'unknown changesets')
2560 parents = repo.unfiltered()[prec].parents()
2560 parents = repo.unfiltered()[prec].parents()
2561 parents = tuple(p.node() for p in parents)
2561 parents = tuple(p.node() for p in parents)
2562 repo.obsstore.create(tr, prec, succs, opts['flags'],
2562 repo.obsstore.create(tr, prec, succs, opts['flags'],
2563 parents=parents, date=date,
2563 parents=parents, date=date,
2564 metadata=metadata)
2564 metadata=metadata)
2565 tr.close()
2565 tr.close()
2566 except ValueError, exc:
2566 except ValueError, exc:
2567 raise util.Abort(_('bad obsmarker input: %s') % exc)
2567 raise util.Abort(_('bad obsmarker input: %s') % exc)
2568 finally:
2568 finally:
2569 tr.release()
2569 tr.release()
2570 finally:
2570 finally:
2571 l.release()
2571 l.release()
2572 else:
2572 else:
2573 if opts['rev']:
2573 if opts['rev']:
2574 revs = scmutil.revrange(repo, opts['rev'])
2574 revs = scmutil.revrange(repo, opts['rev'])
2575 nodes = [repo[r].node() for r in revs]
2575 nodes = [repo[r].node() for r in revs]
2576 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2576 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2577 markers.sort(key=lambda x: x._data)
2577 markers.sort(key=lambda x: x._data)
2578 else:
2578 else:
2579 markers = obsolete.getmarkers(repo)
2579 markers = obsolete.getmarkers(repo)
2580
2580
2581 for m in markers:
2581 for m in markers:
2582 cmdutil.showmarker(ui, m)
2582 cmdutil.showmarker(ui, m)
2583
2583
2584 @command('debugpathcomplete',
2584 @command('debugpathcomplete',
2585 [('f', 'full', None, _('complete an entire path')),
2585 [('f', 'full', None, _('complete an entire path')),
2586 ('n', 'normal', None, _('show only normal files')),
2586 ('n', 'normal', None, _('show only normal files')),
2587 ('a', 'added', None, _('show only added files')),
2587 ('a', 'added', None, _('show only added files')),
2588 ('r', 'removed', None, _('show only removed files'))],
2588 ('r', 'removed', None, _('show only removed files'))],
2589 _('FILESPEC...'))
2589 _('FILESPEC...'))
2590 def debugpathcomplete(ui, repo, *specs, **opts):
2590 def debugpathcomplete(ui, repo, *specs, **opts):
2591 '''complete part or all of a tracked path
2591 '''complete part or all of a tracked path
2592
2592
2593 This command supports shells that offer path name completion. It
2593 This command supports shells that offer path name completion. It
2594 currently completes only files already known to the dirstate.
2594 currently completes only files already known to the dirstate.
2595
2595
2596 Completion extends only to the next path segment unless
2596 Completion extends only to the next path segment unless
2597 --full is specified, in which case entire paths are used.'''
2597 --full is specified, in which case entire paths are used.'''
2598
2598
2599 def complete(path, acceptable):
2599 def complete(path, acceptable):
2600 dirstate = repo.dirstate
2600 dirstate = repo.dirstate
2601 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2601 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2602 rootdir = repo.root + os.sep
2602 rootdir = repo.root + os.sep
2603 if spec != repo.root and not spec.startswith(rootdir):
2603 if spec != repo.root and not spec.startswith(rootdir):
2604 return [], []
2604 return [], []
2605 if os.path.isdir(spec):
2605 if os.path.isdir(spec):
2606 spec += '/'
2606 spec += '/'
2607 spec = spec[len(rootdir):]
2607 spec = spec[len(rootdir):]
2608 fixpaths = os.sep != '/'
2608 fixpaths = os.sep != '/'
2609 if fixpaths:
2609 if fixpaths:
2610 spec = spec.replace(os.sep, '/')
2610 spec = spec.replace(os.sep, '/')
2611 speclen = len(spec)
2611 speclen = len(spec)
2612 fullpaths = opts['full']
2612 fullpaths = opts['full']
2613 files, dirs = set(), set()
2613 files, dirs = set(), set()
2614 adddir, addfile = dirs.add, files.add
2614 adddir, addfile = dirs.add, files.add
2615 for f, st in dirstate.iteritems():
2615 for f, st in dirstate.iteritems():
2616 if f.startswith(spec) and st[0] in acceptable:
2616 if f.startswith(spec) and st[0] in acceptable:
2617 if fixpaths:
2617 if fixpaths:
2618 f = f.replace('/', os.sep)
2618 f = f.replace('/', os.sep)
2619 if fullpaths:
2619 if fullpaths:
2620 addfile(f)
2620 addfile(f)
2621 continue
2621 continue
2622 s = f.find(os.sep, speclen)
2622 s = f.find(os.sep, speclen)
2623 if s >= 0:
2623 if s >= 0:
2624 adddir(f[:s])
2624 adddir(f[:s])
2625 else:
2625 else:
2626 addfile(f)
2626 addfile(f)
2627 return files, dirs
2627 return files, dirs
2628
2628
2629 acceptable = ''
2629 acceptable = ''
2630 if opts['normal']:
2630 if opts['normal']:
2631 acceptable += 'nm'
2631 acceptable += 'nm'
2632 if opts['added']:
2632 if opts['added']:
2633 acceptable += 'a'
2633 acceptable += 'a'
2634 if opts['removed']:
2634 if opts['removed']:
2635 acceptable += 'r'
2635 acceptable += 'r'
2636 cwd = repo.getcwd()
2636 cwd = repo.getcwd()
2637 if not specs:
2637 if not specs:
2638 specs = ['.']
2638 specs = ['.']
2639
2639
2640 files, dirs = set(), set()
2640 files, dirs = set(), set()
2641 for spec in specs:
2641 for spec in specs:
2642 f, d = complete(spec, acceptable or 'nmar')
2642 f, d = complete(spec, acceptable or 'nmar')
2643 files.update(f)
2643 files.update(f)
2644 dirs.update(d)
2644 dirs.update(d)
2645 files.update(dirs)
2645 files.update(dirs)
2646 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2646 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2647 ui.write('\n')
2647 ui.write('\n')
2648
2648
2649 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2649 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2650 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2650 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2651 '''access the pushkey key/value protocol
2651 '''access the pushkey key/value protocol
2652
2652
2653 With two args, list the keys in the given namespace.
2653 With two args, list the keys in the given namespace.
2654
2654
2655 With five args, set a key to new if it currently is set to old.
2655 With five args, set a key to new if it currently is set to old.
2656 Reports success or failure.
2656 Reports success or failure.
2657 '''
2657 '''
2658
2658
2659 target = hg.peer(ui, {}, repopath)
2659 target = hg.peer(ui, {}, repopath)
2660 if keyinfo:
2660 if keyinfo:
2661 key, old, new = keyinfo
2661 key, old, new = keyinfo
2662 r = target.pushkey(namespace, key, old, new)
2662 r = target.pushkey(namespace, key, old, new)
2663 ui.status(str(r) + '\n')
2663 ui.status(str(r) + '\n')
2664 return not r
2664 return not r
2665 else:
2665 else:
2666 for k, v in sorted(target.listkeys(namespace).iteritems()):
2666 for k, v in sorted(target.listkeys(namespace).iteritems()):
2667 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2667 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2668 v.encode('string-escape')))
2668 v.encode('string-escape')))
2669
2669
2670 @command('debugpvec', [], _('A B'))
2670 @command('debugpvec', [], _('A B'))
2671 def debugpvec(ui, repo, a, b=None):
2671 def debugpvec(ui, repo, a, b=None):
2672 ca = scmutil.revsingle(repo, a)
2672 ca = scmutil.revsingle(repo, a)
2673 cb = scmutil.revsingle(repo, b)
2673 cb = scmutil.revsingle(repo, b)
2674 pa = pvec.ctxpvec(ca)
2674 pa = pvec.ctxpvec(ca)
2675 pb = pvec.ctxpvec(cb)
2675 pb = pvec.ctxpvec(cb)
2676 if pa == pb:
2676 if pa == pb:
2677 rel = "="
2677 rel = "="
2678 elif pa > pb:
2678 elif pa > pb:
2679 rel = ">"
2679 rel = ">"
2680 elif pa < pb:
2680 elif pa < pb:
2681 rel = "<"
2681 rel = "<"
2682 elif pa | pb:
2682 elif pa | pb:
2683 rel = "|"
2683 rel = "|"
2684 ui.write(_("a: %s\n") % pa)
2684 ui.write(_("a: %s\n") % pa)
2685 ui.write(_("b: %s\n") % pb)
2685 ui.write(_("b: %s\n") % pb)
2686 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2686 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2687 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2687 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2688 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2688 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2689 pa.distance(pb), rel))
2689 pa.distance(pb), rel))
2690
2690
2691 @command('debugrebuilddirstate|debugrebuildstate',
2691 @command('debugrebuilddirstate|debugrebuildstate',
2692 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2692 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2693 _('[-r REV]'))
2693 _('[-r REV]'))
2694 def debugrebuilddirstate(ui, repo, rev):
2694 def debugrebuilddirstate(ui, repo, rev):
2695 """rebuild the dirstate as it would look like for the given revision
2695 """rebuild the dirstate as it would look like for the given revision
2696
2696
2697 If no revision is specified the first current parent will be used.
2697 If no revision is specified the first current parent will be used.
2698
2698
2699 The dirstate will be set to the files of the given revision.
2699 The dirstate will be set to the files of the given revision.
2700 The actual working directory content or existing dirstate
2700 The actual working directory content or existing dirstate
2701 information such as adds or removes is not considered.
2701 information such as adds or removes is not considered.
2702
2702
2703 One use of this command is to make the next :hg:`status` invocation
2703 One use of this command is to make the next :hg:`status` invocation
2704 check the actual file content.
2704 check the actual file content.
2705 """
2705 """
2706 ctx = scmutil.revsingle(repo, rev)
2706 ctx = scmutil.revsingle(repo, rev)
2707 wlock = repo.wlock()
2707 wlock = repo.wlock()
2708 try:
2708 try:
2709 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2709 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2710 finally:
2710 finally:
2711 wlock.release()
2711 wlock.release()
2712
2712
2713 @command('debugrename',
2713 @command('debugrename',
2714 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2714 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2715 _('[-r REV] FILE'))
2715 _('[-r REV] FILE'))
2716 def debugrename(ui, repo, file1, *pats, **opts):
2716 def debugrename(ui, repo, file1, *pats, **opts):
2717 """dump rename information"""
2717 """dump rename information"""
2718
2718
2719 ctx = scmutil.revsingle(repo, opts.get('rev'))
2719 ctx = scmutil.revsingle(repo, opts.get('rev'))
2720 m = scmutil.match(ctx, (file1,) + pats, opts)
2720 m = scmutil.match(ctx, (file1,) + pats, opts)
2721 for abs in ctx.walk(m):
2721 for abs in ctx.walk(m):
2722 fctx = ctx[abs]
2722 fctx = ctx[abs]
2723 o = fctx.filelog().renamed(fctx.filenode())
2723 o = fctx.filelog().renamed(fctx.filenode())
2724 rel = m.rel(abs)
2724 rel = m.rel(abs)
2725 if o:
2725 if o:
2726 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2726 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2727 else:
2727 else:
2728 ui.write(_("%s not renamed\n") % rel)
2728 ui.write(_("%s not renamed\n") % rel)
2729
2729
2730 @command('debugrevlog',
2730 @command('debugrevlog',
2731 [('c', 'changelog', False, _('open changelog')),
2731 [('c', 'changelog', False, _('open changelog')),
2732 ('m', 'manifest', False, _('open manifest')),
2732 ('m', 'manifest', False, _('open manifest')),
2733 ('d', 'dump', False, _('dump index data'))],
2733 ('d', 'dump', False, _('dump index data'))],
2734 _('-c|-m|FILE'),
2734 _('-c|-m|FILE'),
2735 optionalrepo=True)
2735 optionalrepo=True)
2736 def debugrevlog(ui, repo, file_=None, **opts):
2736 def debugrevlog(ui, repo, file_=None, **opts):
2737 """show data and statistics about a revlog"""
2737 """show data and statistics about a revlog"""
2738 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2738 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2739
2739
2740 if opts.get("dump"):
2740 if opts.get("dump"):
2741 numrevs = len(r)
2741 numrevs = len(r)
2742 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2742 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2743 " rawsize totalsize compression heads chainlen\n")
2743 " rawsize totalsize compression heads chainlen\n")
2744 ts = 0
2744 ts = 0
2745 heads = set()
2745 heads = set()
2746
2746
2747 for rev in xrange(numrevs):
2747 for rev in xrange(numrevs):
2748 dbase = r.deltaparent(rev)
2748 dbase = r.deltaparent(rev)
2749 if dbase == -1:
2749 if dbase == -1:
2750 dbase = rev
2750 dbase = rev
2751 cbase = r.chainbase(rev)
2751 cbase = r.chainbase(rev)
2752 clen = r.chainlen(rev)
2752 clen = r.chainlen(rev)
2753 p1, p2 = r.parentrevs(rev)
2753 p1, p2 = r.parentrevs(rev)
2754 rs = r.rawsize(rev)
2754 rs = r.rawsize(rev)
2755 ts = ts + rs
2755 ts = ts + rs
2756 heads -= set(r.parentrevs(rev))
2756 heads -= set(r.parentrevs(rev))
2757 heads.add(rev)
2757 heads.add(rev)
2758 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2758 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2759 "%11d %5d %8d\n" %
2759 "%11d %5d %8d\n" %
2760 (rev, p1, p2, r.start(rev), r.end(rev),
2760 (rev, p1, p2, r.start(rev), r.end(rev),
2761 r.start(dbase), r.start(cbase),
2761 r.start(dbase), r.start(cbase),
2762 r.start(p1), r.start(p2),
2762 r.start(p1), r.start(p2),
2763 rs, ts, ts / r.end(rev), len(heads), clen))
2763 rs, ts, ts / r.end(rev), len(heads), clen))
2764 return 0
2764 return 0
2765
2765
2766 v = r.version
2766 v = r.version
2767 format = v & 0xFFFF
2767 format = v & 0xFFFF
2768 flags = []
2768 flags = []
2769 gdelta = False
2769 gdelta = False
2770 if v & revlog.REVLOGNGINLINEDATA:
2770 if v & revlog.REVLOGNGINLINEDATA:
2771 flags.append('inline')
2771 flags.append('inline')
2772 if v & revlog.REVLOGGENERALDELTA:
2772 if v & revlog.REVLOGGENERALDELTA:
2773 gdelta = True
2773 gdelta = True
2774 flags.append('generaldelta')
2774 flags.append('generaldelta')
2775 if not flags:
2775 if not flags:
2776 flags = ['(none)']
2776 flags = ['(none)']
2777
2777
2778 nummerges = 0
2778 nummerges = 0
2779 numfull = 0
2779 numfull = 0
2780 numprev = 0
2780 numprev = 0
2781 nump1 = 0
2781 nump1 = 0
2782 nump2 = 0
2782 nump2 = 0
2783 numother = 0
2783 numother = 0
2784 nump1prev = 0
2784 nump1prev = 0
2785 nump2prev = 0
2785 nump2prev = 0
2786 chainlengths = []
2786 chainlengths = []
2787
2787
2788 datasize = [None, 0, 0L]
2788 datasize = [None, 0, 0L]
2789 fullsize = [None, 0, 0L]
2789 fullsize = [None, 0, 0L]
2790 deltasize = [None, 0, 0L]
2790 deltasize = [None, 0, 0L]
2791
2791
2792 def addsize(size, l):
2792 def addsize(size, l):
2793 if l[0] is None or size < l[0]:
2793 if l[0] is None or size < l[0]:
2794 l[0] = size
2794 l[0] = size
2795 if size > l[1]:
2795 if size > l[1]:
2796 l[1] = size
2796 l[1] = size
2797 l[2] += size
2797 l[2] += size
2798
2798
2799 numrevs = len(r)
2799 numrevs = len(r)
2800 for rev in xrange(numrevs):
2800 for rev in xrange(numrevs):
2801 p1, p2 = r.parentrevs(rev)
2801 p1, p2 = r.parentrevs(rev)
2802 delta = r.deltaparent(rev)
2802 delta = r.deltaparent(rev)
2803 if format > 0:
2803 if format > 0:
2804 addsize(r.rawsize(rev), datasize)
2804 addsize(r.rawsize(rev), datasize)
2805 if p2 != nullrev:
2805 if p2 != nullrev:
2806 nummerges += 1
2806 nummerges += 1
2807 size = r.length(rev)
2807 size = r.length(rev)
2808 if delta == nullrev:
2808 if delta == nullrev:
2809 chainlengths.append(0)
2809 chainlengths.append(0)
2810 numfull += 1
2810 numfull += 1
2811 addsize(size, fullsize)
2811 addsize(size, fullsize)
2812 else:
2812 else:
2813 chainlengths.append(chainlengths[delta] + 1)
2813 chainlengths.append(chainlengths[delta] + 1)
2814 addsize(size, deltasize)
2814 addsize(size, deltasize)
2815 if delta == rev - 1:
2815 if delta == rev - 1:
2816 numprev += 1
2816 numprev += 1
2817 if delta == p1:
2817 if delta == p1:
2818 nump1prev += 1
2818 nump1prev += 1
2819 elif delta == p2:
2819 elif delta == p2:
2820 nump2prev += 1
2820 nump2prev += 1
2821 elif delta == p1:
2821 elif delta == p1:
2822 nump1 += 1
2822 nump1 += 1
2823 elif delta == p2:
2823 elif delta == p2:
2824 nump2 += 1
2824 nump2 += 1
2825 elif delta != nullrev:
2825 elif delta != nullrev:
2826 numother += 1
2826 numother += 1
2827
2827
2828 # Adjust size min value for empty cases
2828 # Adjust size min value for empty cases
2829 for size in (datasize, fullsize, deltasize):
2829 for size in (datasize, fullsize, deltasize):
2830 if size[0] is None:
2830 if size[0] is None:
2831 size[0] = 0
2831 size[0] = 0
2832
2832
2833 numdeltas = numrevs - numfull
2833 numdeltas = numrevs - numfull
2834 numoprev = numprev - nump1prev - nump2prev
2834 numoprev = numprev - nump1prev - nump2prev
2835 totalrawsize = datasize[2]
2835 totalrawsize = datasize[2]
2836 datasize[2] /= numrevs
2836 datasize[2] /= numrevs
2837 fulltotal = fullsize[2]
2837 fulltotal = fullsize[2]
2838 fullsize[2] /= numfull
2838 fullsize[2] /= numfull
2839 deltatotal = deltasize[2]
2839 deltatotal = deltasize[2]
2840 if numrevs - numfull > 0:
2840 if numrevs - numfull > 0:
2841 deltasize[2] /= numrevs - numfull
2841 deltasize[2] /= numrevs - numfull
2842 totalsize = fulltotal + deltatotal
2842 totalsize = fulltotal + deltatotal
2843 avgchainlen = sum(chainlengths) / numrevs
2843 avgchainlen = sum(chainlengths) / numrevs
2844 maxchainlen = max(chainlengths)
2844 maxchainlen = max(chainlengths)
2845 compratio = totalrawsize / totalsize
2845 compratio = totalrawsize / totalsize
2846
2846
2847 basedfmtstr = '%%%dd\n'
2847 basedfmtstr = '%%%dd\n'
2848 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2848 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2849
2849
2850 def dfmtstr(max):
2850 def dfmtstr(max):
2851 return basedfmtstr % len(str(max))
2851 return basedfmtstr % len(str(max))
2852 def pcfmtstr(max, padding=0):
2852 def pcfmtstr(max, padding=0):
2853 return basepcfmtstr % (len(str(max)), ' ' * padding)
2853 return basepcfmtstr % (len(str(max)), ' ' * padding)
2854
2854
2855 def pcfmt(value, total):
2855 def pcfmt(value, total):
2856 return (value, 100 * float(value) / total)
2856 return (value, 100 * float(value) / total)
2857
2857
2858 ui.write(('format : %d\n') % format)
2858 ui.write(('format : %d\n') % format)
2859 ui.write(('flags : %s\n') % ', '.join(flags))
2859 ui.write(('flags : %s\n') % ', '.join(flags))
2860
2860
2861 ui.write('\n')
2861 ui.write('\n')
2862 fmt = pcfmtstr(totalsize)
2862 fmt = pcfmtstr(totalsize)
2863 fmt2 = dfmtstr(totalsize)
2863 fmt2 = dfmtstr(totalsize)
2864 ui.write(('revisions : ') + fmt2 % numrevs)
2864 ui.write(('revisions : ') + fmt2 % numrevs)
2865 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2865 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2866 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2866 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2867 ui.write(('revisions : ') + fmt2 % numrevs)
2867 ui.write(('revisions : ') + fmt2 % numrevs)
2868 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2868 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2869 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2869 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2870 ui.write(('revision size : ') + fmt2 % totalsize)
2870 ui.write(('revision size : ') + fmt2 % totalsize)
2871 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2871 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2872 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2872 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2873
2873
2874 ui.write('\n')
2874 ui.write('\n')
2875 fmt = dfmtstr(max(avgchainlen, compratio))
2875 fmt = dfmtstr(max(avgchainlen, compratio))
2876 ui.write(('avg chain length : ') + fmt % avgchainlen)
2876 ui.write(('avg chain length : ') + fmt % avgchainlen)
2877 ui.write(('max chain length : ') + fmt % maxchainlen)
2877 ui.write(('max chain length : ') + fmt % maxchainlen)
2878 ui.write(('compression ratio : ') + fmt % compratio)
2878 ui.write(('compression ratio : ') + fmt % compratio)
2879
2879
2880 if format > 0:
2880 if format > 0:
2881 ui.write('\n')
2881 ui.write('\n')
2882 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2882 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2883 % tuple(datasize))
2883 % tuple(datasize))
2884 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2884 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2885 % tuple(fullsize))
2885 % tuple(fullsize))
2886 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2886 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2887 % tuple(deltasize))
2887 % tuple(deltasize))
2888
2888
2889 if numdeltas > 0:
2889 if numdeltas > 0:
2890 ui.write('\n')
2890 ui.write('\n')
2891 fmt = pcfmtstr(numdeltas)
2891 fmt = pcfmtstr(numdeltas)
2892 fmt2 = pcfmtstr(numdeltas, 4)
2892 fmt2 = pcfmtstr(numdeltas, 4)
2893 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2893 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2894 if numprev > 0:
2894 if numprev > 0:
2895 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2895 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2896 numprev))
2896 numprev))
2897 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2897 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2898 numprev))
2898 numprev))
2899 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2899 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2900 numprev))
2900 numprev))
2901 if gdelta:
2901 if gdelta:
2902 ui.write(('deltas against p1 : ')
2902 ui.write(('deltas against p1 : ')
2903 + fmt % pcfmt(nump1, numdeltas))
2903 + fmt % pcfmt(nump1, numdeltas))
2904 ui.write(('deltas against p2 : ')
2904 ui.write(('deltas against p2 : ')
2905 + fmt % pcfmt(nump2, numdeltas))
2905 + fmt % pcfmt(nump2, numdeltas))
2906 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2906 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2907 numdeltas))
2907 numdeltas))
2908
2908
2909 @command('debugrevspec',
2909 @command('debugrevspec',
2910 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2910 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2911 ('REVSPEC'))
2911 ('REVSPEC'))
2912 def debugrevspec(ui, repo, expr, **opts):
2912 def debugrevspec(ui, repo, expr, **opts):
2913 """parse and apply a revision specification
2913 """parse and apply a revision specification
2914
2914
2915 Use --verbose to print the parsed tree before and after aliases
2915 Use --verbose to print the parsed tree before and after aliases
2916 expansion.
2916 expansion.
2917 """
2917 """
2918 if ui.verbose:
2918 if ui.verbose:
2919 tree = revset.parse(expr)[0]
2919 tree = revset.parse(expr)[0]
2920 ui.note(revset.prettyformat(tree), "\n")
2920 ui.note(revset.prettyformat(tree), "\n")
2921 newtree = revset.findaliases(ui, tree)
2921 newtree = revset.findaliases(ui, tree)
2922 if newtree != tree:
2922 if newtree != tree:
2923 ui.note(revset.prettyformat(newtree), "\n")
2923 ui.note(revset.prettyformat(newtree), "\n")
2924 tree = newtree
2924 tree = newtree
2925 newtree = revset.foldconcat(tree)
2925 newtree = revset.foldconcat(tree)
2926 if newtree != tree:
2926 if newtree != tree:
2927 ui.note(revset.prettyformat(newtree), "\n")
2927 ui.note(revset.prettyformat(newtree), "\n")
2928 if opts["optimize"]:
2928 if opts["optimize"]:
2929 weight, optimizedtree = revset.optimize(newtree, True)
2929 weight, optimizedtree = revset.optimize(newtree, True)
2930 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2930 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2931 func = revset.match(ui, expr)
2931 func = revset.match(ui, expr)
2932 revs = func(repo)
2932 revs = func(repo)
2933 if ui.verbose:
2933 if ui.verbose:
2934 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
2934 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
2935 for c in revs:
2935 for c in revs:
2936 ui.write("%s\n" % c)
2936 ui.write("%s\n" % c)
2937
2937
2938 @command('debugsetparents', [], _('REV1 [REV2]'))
2938 @command('debugsetparents', [], _('REV1 [REV2]'))
2939 def debugsetparents(ui, repo, rev1, rev2=None):
2939 def debugsetparents(ui, repo, rev1, rev2=None):
2940 """manually set the parents of the current working directory
2940 """manually set the parents of the current working directory
2941
2941
2942 This is useful for writing repository conversion tools, but should
2942 This is useful for writing repository conversion tools, but should
2943 be used with care. For example, neither the working directory nor the
2943 be used with care. For example, neither the working directory nor the
2944 dirstate is updated, so file status may be incorrect after running this
2944 dirstate is updated, so file status may be incorrect after running this
2945 command.
2945 command.
2946
2946
2947 Returns 0 on success.
2947 Returns 0 on success.
2948 """
2948 """
2949
2949
2950 r1 = scmutil.revsingle(repo, rev1).node()
2950 r1 = scmutil.revsingle(repo, rev1).node()
2951 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2951 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2952
2952
2953 wlock = repo.wlock()
2953 wlock = repo.wlock()
2954 try:
2954 try:
2955 repo.dirstate.beginparentchange()
2955 repo.dirstate.beginparentchange()
2956 repo.setparents(r1, r2)
2956 repo.setparents(r1, r2)
2957 repo.dirstate.endparentchange()
2957 repo.dirstate.endparentchange()
2958 finally:
2958 finally:
2959 wlock.release()
2959 wlock.release()
2960
2960
2961 @command('debugdirstate|debugstate',
2961 @command('debugdirstate|debugstate',
2962 [('', 'nodates', None, _('do not display the saved mtime')),
2962 [('', 'nodates', None, _('do not display the saved mtime')),
2963 ('', 'datesort', None, _('sort by saved mtime'))],
2963 ('', 'datesort', None, _('sort by saved mtime'))],
2964 _('[OPTION]...'))
2964 _('[OPTION]...'))
2965 def debugstate(ui, repo, nodates=None, datesort=None):
2965 def debugstate(ui, repo, nodates=None, datesort=None):
2966 """show the contents of the current dirstate"""
2966 """show the contents of the current dirstate"""
2967 timestr = ""
2967 timestr = ""
2968 if datesort:
2968 if datesort:
2969 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2969 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2970 else:
2970 else:
2971 keyfunc = None # sort by filename
2971 keyfunc = None # sort by filename
2972 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2972 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2973 if ent[3] == -1:
2973 if ent[3] == -1:
2974 timestr = 'unset '
2974 timestr = 'unset '
2975 elif nodates:
2975 elif nodates:
2976 timestr = 'set '
2976 timestr = 'set '
2977 else:
2977 else:
2978 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2978 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2979 time.localtime(ent[3]))
2979 time.localtime(ent[3]))
2980 if ent[1] & 020000:
2980 if ent[1] & 020000:
2981 mode = 'lnk'
2981 mode = 'lnk'
2982 else:
2982 else:
2983 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2983 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2984 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2984 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2985 for f in repo.dirstate.copies():
2985 for f in repo.dirstate.copies():
2986 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2986 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2987
2987
2988 @command('debugsub',
2988 @command('debugsub',
2989 [('r', 'rev', '',
2989 [('r', 'rev', '',
2990 _('revision to check'), _('REV'))],
2990 _('revision to check'), _('REV'))],
2991 _('[-r REV] [REV]'))
2991 _('[-r REV] [REV]'))
2992 def debugsub(ui, repo, rev=None):
2992 def debugsub(ui, repo, rev=None):
2993 ctx = scmutil.revsingle(repo, rev, None)
2993 ctx = scmutil.revsingle(repo, rev, None)
2994 for k, v in sorted(ctx.substate.items()):
2994 for k, v in sorted(ctx.substate.items()):
2995 ui.write(('path %s\n') % k)
2995 ui.write(('path %s\n') % k)
2996 ui.write((' source %s\n') % v[0])
2996 ui.write((' source %s\n') % v[0])
2997 ui.write((' revision %s\n') % v[1])
2997 ui.write((' revision %s\n') % v[1])
2998
2998
2999 @command('debugsuccessorssets',
2999 @command('debugsuccessorssets',
3000 [],
3000 [],
3001 _('[REV]'))
3001 _('[REV]'))
3002 def debugsuccessorssets(ui, repo, *revs):
3002 def debugsuccessorssets(ui, repo, *revs):
3003 """show set of successors for revision
3003 """show set of successors for revision
3004
3004
3005 A successors set of changeset A is a consistent group of revisions that
3005 A successors set of changeset A is a consistent group of revisions that
3006 succeed A. It contains non-obsolete changesets only.
3006 succeed A. It contains non-obsolete changesets only.
3007
3007
3008 In most cases a changeset A has a single successors set containing a single
3008 In most cases a changeset A has a single successors set containing a single
3009 successor (changeset A replaced by A').
3009 successor (changeset A replaced by A').
3010
3010
3011 A changeset that is made obsolete with no successors are called "pruned".
3011 A changeset that is made obsolete with no successors are called "pruned".
3012 Such changesets have no successors sets at all.
3012 Such changesets have no successors sets at all.
3013
3013
3014 A changeset that has been "split" will have a successors set containing
3014 A changeset that has been "split" will have a successors set containing
3015 more than one successor.
3015 more than one successor.
3016
3016
3017 A changeset that has been rewritten in multiple different ways is called
3017 A changeset that has been rewritten in multiple different ways is called
3018 "divergent". Such changesets have multiple successor sets (each of which
3018 "divergent". Such changesets have multiple successor sets (each of which
3019 may also be split, i.e. have multiple successors).
3019 may also be split, i.e. have multiple successors).
3020
3020
3021 Results are displayed as follows::
3021 Results are displayed as follows::
3022
3022
3023 <rev1>
3023 <rev1>
3024 <successors-1A>
3024 <successors-1A>
3025 <rev2>
3025 <rev2>
3026 <successors-2A>
3026 <successors-2A>
3027 <successors-2B1> <successors-2B2> <successors-2B3>
3027 <successors-2B1> <successors-2B2> <successors-2B3>
3028
3028
3029 Here rev2 has two possible (i.e. divergent) successors sets. The first
3029 Here rev2 has two possible (i.e. divergent) successors sets. The first
3030 holds one element, whereas the second holds three (i.e. the changeset has
3030 holds one element, whereas the second holds three (i.e. the changeset has
3031 been split).
3031 been split).
3032 """
3032 """
3033 # passed to successorssets caching computation from one call to another
3033 # passed to successorssets caching computation from one call to another
3034 cache = {}
3034 cache = {}
3035 ctx2str = str
3035 ctx2str = str
3036 node2str = short
3036 node2str = short
3037 if ui.debug():
3037 if ui.debug():
3038 def ctx2str(ctx):
3038 def ctx2str(ctx):
3039 return ctx.hex()
3039 return ctx.hex()
3040 node2str = hex
3040 node2str = hex
3041 for rev in scmutil.revrange(repo, revs):
3041 for rev in scmutil.revrange(repo, revs):
3042 ctx = repo[rev]
3042 ctx = repo[rev]
3043 ui.write('%s\n'% ctx2str(ctx))
3043 ui.write('%s\n'% ctx2str(ctx))
3044 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3044 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3045 if succsset:
3045 if succsset:
3046 ui.write(' ')
3046 ui.write(' ')
3047 ui.write(node2str(succsset[0]))
3047 ui.write(node2str(succsset[0]))
3048 for node in succsset[1:]:
3048 for node in succsset[1:]:
3049 ui.write(' ')
3049 ui.write(' ')
3050 ui.write(node2str(node))
3050 ui.write(node2str(node))
3051 ui.write('\n')
3051 ui.write('\n')
3052
3052
3053 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3053 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3054 def debugwalk(ui, repo, *pats, **opts):
3054 def debugwalk(ui, repo, *pats, **opts):
3055 """show how files match on given patterns"""
3055 """show how files match on given patterns"""
3056 m = scmutil.match(repo[None], pats, opts)
3056 m = scmutil.match(repo[None], pats, opts)
3057 items = list(repo.walk(m))
3057 items = list(repo.walk(m))
3058 if not items:
3058 if not items:
3059 return
3059 return
3060 f = lambda fn: fn
3060 f = lambda fn: fn
3061 if ui.configbool('ui', 'slash') and os.sep != '/':
3061 if ui.configbool('ui', 'slash') and os.sep != '/':
3062 f = lambda fn: util.normpath(fn)
3062 f = lambda fn: util.normpath(fn)
3063 fmt = 'f %%-%ds %%-%ds %%s' % (
3063 fmt = 'f %%-%ds %%-%ds %%s' % (
3064 max([len(abs) for abs in items]),
3064 max([len(abs) for abs in items]),
3065 max([len(m.rel(abs)) for abs in items]))
3065 max([len(m.rel(abs)) for abs in items]))
3066 for abs in items:
3066 for abs in items:
3067 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3067 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3068 ui.write("%s\n" % line.rstrip())
3068 ui.write("%s\n" % line.rstrip())
3069
3069
3070 @command('debugwireargs',
3070 @command('debugwireargs',
3071 [('', 'three', '', 'three'),
3071 [('', 'three', '', 'three'),
3072 ('', 'four', '', 'four'),
3072 ('', 'four', '', 'four'),
3073 ('', 'five', '', 'five'),
3073 ('', 'five', '', 'five'),
3074 ] + remoteopts,
3074 ] + remoteopts,
3075 _('REPO [OPTIONS]... [ONE [TWO]]'),
3075 _('REPO [OPTIONS]... [ONE [TWO]]'),
3076 norepo=True)
3076 norepo=True)
3077 def debugwireargs(ui, repopath, *vals, **opts):
3077 def debugwireargs(ui, repopath, *vals, **opts):
3078 repo = hg.peer(ui, opts, repopath)
3078 repo = hg.peer(ui, opts, repopath)
3079 for opt in remoteopts:
3079 for opt in remoteopts:
3080 del opts[opt[1]]
3080 del opts[opt[1]]
3081 args = {}
3081 args = {}
3082 for k, v in opts.iteritems():
3082 for k, v in opts.iteritems():
3083 if v:
3083 if v:
3084 args[k] = v
3084 args[k] = v
3085 # run twice to check that we don't mess up the stream for the next command
3085 # run twice to check that we don't mess up the stream for the next command
3086 res1 = repo.debugwireargs(*vals, **args)
3086 res1 = repo.debugwireargs(*vals, **args)
3087 res2 = repo.debugwireargs(*vals, **args)
3087 res2 = repo.debugwireargs(*vals, **args)
3088 ui.write("%s\n" % res1)
3088 ui.write("%s\n" % res1)
3089 if res1 != res2:
3089 if res1 != res2:
3090 ui.warn("%s\n" % res2)
3090 ui.warn("%s\n" % res2)
3091
3091
3092 @command('^diff',
3092 @command('^diff',
3093 [('r', 'rev', [], _('revision'), _('REV')),
3093 [('r', 'rev', [], _('revision'), _('REV')),
3094 ('c', 'change', '', _('change made by revision'), _('REV'))
3094 ('c', 'change', '', _('change made by revision'), _('REV'))
3095 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3095 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3096 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3096 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3097 inferrepo=True)
3097 inferrepo=True)
3098 def diff(ui, repo, *pats, **opts):
3098 def diff(ui, repo, *pats, **opts):
3099 """diff repository (or selected files)
3099 """diff repository (or selected files)
3100
3100
3101 Show differences between revisions for the specified files.
3101 Show differences between revisions for the specified files.
3102
3102
3103 Differences between files are shown using the unified diff format.
3103 Differences between files are shown using the unified diff format.
3104
3104
3105 .. note::
3105 .. note::
3106
3106
3107 diff may generate unexpected results for merges, as it will
3107 diff may generate unexpected results for merges, as it will
3108 default to comparing against the working directory's first
3108 default to comparing against the working directory's first
3109 parent changeset if no revisions are specified.
3109 parent changeset if no revisions are specified.
3110
3110
3111 When two revision arguments are given, then changes are shown
3111 When two revision arguments are given, then changes are shown
3112 between those revisions. If only one revision is specified then
3112 between those revisions. If only one revision is specified then
3113 that revision is compared to the working directory, and, when no
3113 that revision is compared to the working directory, and, when no
3114 revisions are specified, the working directory files are compared
3114 revisions are specified, the working directory files are compared
3115 to its parent.
3115 to its parent.
3116
3116
3117 Alternatively you can specify -c/--change with a revision to see
3117 Alternatively you can specify -c/--change with a revision to see
3118 the changes in that changeset relative to its first parent.
3118 the changes in that changeset relative to its first parent.
3119
3119
3120 Without the -a/--text option, diff will avoid generating diffs of
3120 Without the -a/--text option, diff will avoid generating diffs of
3121 files it detects as binary. With -a, diff will generate a diff
3121 files it detects as binary. With -a, diff will generate a diff
3122 anyway, probably with undesirable results.
3122 anyway, probably with undesirable results.
3123
3123
3124 Use the -g/--git option to generate diffs in the git extended diff
3124 Use the -g/--git option to generate diffs in the git extended diff
3125 format. For more information, read :hg:`help diffs`.
3125 format. For more information, read :hg:`help diffs`.
3126
3126
3127 .. container:: verbose
3127 .. container:: verbose
3128
3128
3129 Examples:
3129 Examples:
3130
3130
3131 - compare a file in the current working directory to its parent::
3131 - compare a file in the current working directory to its parent::
3132
3132
3133 hg diff foo.c
3133 hg diff foo.c
3134
3134
3135 - compare two historical versions of a directory, with rename info::
3135 - compare two historical versions of a directory, with rename info::
3136
3136
3137 hg diff --git -r 1.0:1.2 lib/
3137 hg diff --git -r 1.0:1.2 lib/
3138
3138
3139 - get change stats relative to the last change on some date::
3139 - get change stats relative to the last change on some date::
3140
3140
3141 hg diff --stat -r "date('may 2')"
3141 hg diff --stat -r "date('may 2')"
3142
3142
3143 - diff all newly-added files that contain a keyword::
3143 - diff all newly-added files that contain a keyword::
3144
3144
3145 hg diff "set:added() and grep(GNU)"
3145 hg diff "set:added() and grep(GNU)"
3146
3146
3147 - compare a revision and its parents::
3147 - compare a revision and its parents::
3148
3148
3149 hg diff -c 9353 # compare against first parent
3149 hg diff -c 9353 # compare against first parent
3150 hg diff -r 9353^:9353 # same using revset syntax
3150 hg diff -r 9353^:9353 # same using revset syntax
3151 hg diff -r 9353^2:9353 # compare against the second parent
3151 hg diff -r 9353^2:9353 # compare against the second parent
3152
3152
3153 Returns 0 on success.
3153 Returns 0 on success.
3154 """
3154 """
3155
3155
3156 revs = opts.get('rev')
3156 revs = opts.get('rev')
3157 change = opts.get('change')
3157 change = opts.get('change')
3158 stat = opts.get('stat')
3158 stat = opts.get('stat')
3159 reverse = opts.get('reverse')
3159 reverse = opts.get('reverse')
3160
3160
3161 if revs and change:
3161 if revs and change:
3162 msg = _('cannot specify --rev and --change at the same time')
3162 msg = _('cannot specify --rev and --change at the same time')
3163 raise util.Abort(msg)
3163 raise util.Abort(msg)
3164 elif change:
3164 elif change:
3165 node2 = scmutil.revsingle(repo, change, None).node()
3165 node2 = scmutil.revsingle(repo, change, None).node()
3166 node1 = repo[node2].p1().node()
3166 node1 = repo[node2].p1().node()
3167 else:
3167 else:
3168 node1, node2 = scmutil.revpair(repo, revs)
3168 node1, node2 = scmutil.revpair(repo, revs)
3169
3169
3170 if reverse:
3170 if reverse:
3171 node1, node2 = node2, node1
3171 node1, node2 = node2, node1
3172
3172
3173 diffopts = patch.diffallopts(ui, opts)
3173 diffopts = patch.diffallopts(ui, opts)
3174 m = scmutil.match(repo[node2], pats, opts)
3174 m = scmutil.match(repo[node2], pats, opts)
3175 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3175 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3176 listsubrepos=opts.get('subrepos'),
3176 listsubrepos=opts.get('subrepos'),
3177 root=opts.get('root'))
3177 root=opts.get('root'))
3178
3178
3179 @command('^export',
3179 @command('^export',
3180 [('o', 'output', '',
3180 [('o', 'output', '',
3181 _('print output to file with formatted name'), _('FORMAT')),
3181 _('print output to file with formatted name'), _('FORMAT')),
3182 ('', 'switch-parent', None, _('diff against the second parent')),
3182 ('', 'switch-parent', None, _('diff against the second parent')),
3183 ('r', 'rev', [], _('revisions to export'), _('REV')),
3183 ('r', 'rev', [], _('revisions to export'), _('REV')),
3184 ] + diffopts,
3184 ] + diffopts,
3185 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3185 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3186 def export(ui, repo, *changesets, **opts):
3186 def export(ui, repo, *changesets, **opts):
3187 """dump the header and diffs for one or more changesets
3187 """dump the header and diffs for one or more changesets
3188
3188
3189 Print the changeset header and diffs for one or more revisions.
3189 Print the changeset header and diffs for one or more revisions.
3190 If no revision is given, the parent of the working directory is used.
3190 If no revision is given, the parent of the working directory is used.
3191
3191
3192 The information shown in the changeset header is: author, date,
3192 The information shown in the changeset header is: author, date,
3193 branch name (if non-default), changeset hash, parent(s) and commit
3193 branch name (if non-default), changeset hash, parent(s) and commit
3194 comment.
3194 comment.
3195
3195
3196 .. note::
3196 .. note::
3197
3197
3198 export may generate unexpected diff output for merge
3198 export may generate unexpected diff output for merge
3199 changesets, as it will compare the merge changeset against its
3199 changesets, as it will compare the merge changeset against its
3200 first parent only.
3200 first parent only.
3201
3201
3202 Output may be to a file, in which case the name of the file is
3202 Output may be to a file, in which case the name of the file is
3203 given using a format string. The formatting rules are as follows:
3203 given using a format string. The formatting rules are as follows:
3204
3204
3205 :``%%``: literal "%" character
3205 :``%%``: literal "%" character
3206 :``%H``: changeset hash (40 hexadecimal digits)
3206 :``%H``: changeset hash (40 hexadecimal digits)
3207 :``%N``: number of patches being generated
3207 :``%N``: number of patches being generated
3208 :``%R``: changeset revision number
3208 :``%R``: changeset revision number
3209 :``%b``: basename of the exporting repository
3209 :``%b``: basename of the exporting repository
3210 :``%h``: short-form changeset hash (12 hexadecimal digits)
3210 :``%h``: short-form changeset hash (12 hexadecimal digits)
3211 :``%m``: first line of the commit message (only alphanumeric characters)
3211 :``%m``: first line of the commit message (only alphanumeric characters)
3212 :``%n``: zero-padded sequence number, starting at 1
3212 :``%n``: zero-padded sequence number, starting at 1
3213 :``%r``: zero-padded changeset revision number
3213 :``%r``: zero-padded changeset revision number
3214
3214
3215 Without the -a/--text option, export will avoid generating diffs
3215 Without the -a/--text option, export will avoid generating diffs
3216 of files it detects as binary. With -a, export will generate a
3216 of files it detects as binary. With -a, export will generate a
3217 diff anyway, probably with undesirable results.
3217 diff anyway, probably with undesirable results.
3218
3218
3219 Use the -g/--git option to generate diffs in the git extended diff
3219 Use the -g/--git option to generate diffs in the git extended diff
3220 format. See :hg:`help diffs` for more information.
3220 format. See :hg:`help diffs` for more information.
3221
3221
3222 With the --switch-parent option, the diff will be against the
3222 With the --switch-parent option, the diff will be against the
3223 second parent. It can be useful to review a merge.
3223 second parent. It can be useful to review a merge.
3224
3224
3225 .. container:: verbose
3225 .. container:: verbose
3226
3226
3227 Examples:
3227 Examples:
3228
3228
3229 - use export and import to transplant a bugfix to the current
3229 - use export and import to transplant a bugfix to the current
3230 branch::
3230 branch::
3231
3231
3232 hg export -r 9353 | hg import -
3232 hg export -r 9353 | hg import -
3233
3233
3234 - export all the changesets between two revisions to a file with
3234 - export all the changesets between two revisions to a file with
3235 rename information::
3235 rename information::
3236
3236
3237 hg export --git -r 123:150 > changes.txt
3237 hg export --git -r 123:150 > changes.txt
3238
3238
3239 - split outgoing changes into a series of patches with
3239 - split outgoing changes into a series of patches with
3240 descriptive names::
3240 descriptive names::
3241
3241
3242 hg export -r "outgoing()" -o "%n-%m.patch"
3242 hg export -r "outgoing()" -o "%n-%m.patch"
3243
3243
3244 Returns 0 on success.
3244 Returns 0 on success.
3245 """
3245 """
3246 changesets += tuple(opts.get('rev', []))
3246 changesets += tuple(opts.get('rev', []))
3247 if not changesets:
3247 if not changesets:
3248 changesets = ['.']
3248 changesets = ['.']
3249 revs = scmutil.revrange(repo, changesets)
3249 revs = scmutil.revrange(repo, changesets)
3250 if not revs:
3250 if not revs:
3251 raise util.Abort(_("export requires at least one changeset"))
3251 raise util.Abort(_("export requires at least one changeset"))
3252 if len(revs) > 1:
3252 if len(revs) > 1:
3253 ui.note(_('exporting patches:\n'))
3253 ui.note(_('exporting patches:\n'))
3254 else:
3254 else:
3255 ui.note(_('exporting patch:\n'))
3255 ui.note(_('exporting patch:\n'))
3256 cmdutil.export(repo, revs, template=opts.get('output'),
3256 cmdutil.export(repo, revs, template=opts.get('output'),
3257 switch_parent=opts.get('switch_parent'),
3257 switch_parent=opts.get('switch_parent'),
3258 opts=patch.diffallopts(ui, opts))
3258 opts=patch.diffallopts(ui, opts))
3259
3259
3260 @command('files',
3260 @command('files',
3261 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3261 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3262 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3262 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3263 ] + walkopts + formatteropts + subrepoopts,
3263 ] + walkopts + formatteropts + subrepoopts,
3264 _('[OPTION]... [PATTERN]...'))
3264 _('[OPTION]... [PATTERN]...'))
3265 def files(ui, repo, *pats, **opts):
3265 def files(ui, repo, *pats, **opts):
3266 """list tracked files
3266 """list tracked files
3267
3267
3268 Print files under Mercurial control in the working directory or
3268 Print files under Mercurial control in the working directory or
3269 specified revision whose names match the given patterns (excluding
3269 specified revision whose names match the given patterns (excluding
3270 removed files).
3270 removed files).
3271
3271
3272 If no patterns are given to match, this command prints the names
3272 If no patterns are given to match, this command prints the names
3273 of all files under Mercurial control in the working directory.
3273 of all files under Mercurial control in the working directory.
3274
3274
3275 .. container:: verbose
3275 .. container:: verbose
3276
3276
3277 Examples:
3277 Examples:
3278
3278
3279 - list all files under the current directory::
3279 - list all files under the current directory::
3280
3280
3281 hg files .
3281 hg files .
3282
3282
3283 - shows sizes and flags for current revision::
3283 - shows sizes and flags for current revision::
3284
3284
3285 hg files -vr .
3285 hg files -vr .
3286
3286
3287 - list all files named README::
3287 - list all files named README::
3288
3288
3289 hg files -I "**/README"
3289 hg files -I "**/README"
3290
3290
3291 - list all binary files::
3291 - list all binary files::
3292
3292
3293 hg files "set:binary()"
3293 hg files "set:binary()"
3294
3294
3295 - find files containing a regular expression::
3295 - find files containing a regular expression::
3296
3296
3297 hg files "set:grep('bob')"
3297 hg files "set:grep('bob')"
3298
3298
3299 - search tracked file contents with xargs and grep::
3299 - search tracked file contents with xargs and grep::
3300
3300
3301 hg files -0 | xargs -0 grep foo
3301 hg files -0 | xargs -0 grep foo
3302
3302
3303 See :hg:`help patterns` and :hg:`help filesets` for more information
3303 See :hg:`help patterns` and :hg:`help filesets` for more information
3304 on specifying file patterns.
3304 on specifying file patterns.
3305
3305
3306 Returns 0 if a match is found, 1 otherwise.
3306 Returns 0 if a match is found, 1 otherwise.
3307
3307
3308 """
3308 """
3309 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3309 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3310
3310
3311 end = '\n'
3311 end = '\n'
3312 if opts.get('print0'):
3312 if opts.get('print0'):
3313 end = '\0'
3313 end = '\0'
3314 fm = ui.formatter('files', opts)
3314 fm = ui.formatter('files', opts)
3315 fmt = '%s' + end
3315 fmt = '%s' + end
3316
3316
3317 m = scmutil.match(ctx, pats, opts)
3317 m = scmutil.match(ctx, pats, opts)
3318 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3318 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3319
3319
3320 fm.end()
3320 fm.end()
3321
3321
3322 return ret
3322 return ret
3323
3323
3324 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3324 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3325 def forget(ui, repo, *pats, **opts):
3325 def forget(ui, repo, *pats, **opts):
3326 """forget the specified files on the next commit
3326 """forget the specified files on the next commit
3327
3327
3328 Mark the specified files so they will no longer be tracked
3328 Mark the specified files so they will no longer be tracked
3329 after the next commit.
3329 after the next commit.
3330
3330
3331 This only removes files from the current branch, not from the
3331 This only removes files from the current branch, not from the
3332 entire project history, and it does not delete them from the
3332 entire project history, and it does not delete them from the
3333 working directory.
3333 working directory.
3334
3334
3335 To undo a forget before the next commit, see :hg:`add`.
3335 To undo a forget before the next commit, see :hg:`add`.
3336
3336
3337 .. container:: verbose
3337 .. container:: verbose
3338
3338
3339 Examples:
3339 Examples:
3340
3340
3341 - forget newly-added binary files::
3341 - forget newly-added binary files::
3342
3342
3343 hg forget "set:added() and binary()"
3343 hg forget "set:added() and binary()"
3344
3344
3345 - forget files that would be excluded by .hgignore::
3345 - forget files that would be excluded by .hgignore::
3346
3346
3347 hg forget "set:hgignore()"
3347 hg forget "set:hgignore()"
3348
3348
3349 Returns 0 on success.
3349 Returns 0 on success.
3350 """
3350 """
3351
3351
3352 if not pats:
3352 if not pats:
3353 raise util.Abort(_('no files specified'))
3353 raise util.Abort(_('no files specified'))
3354
3354
3355 m = scmutil.match(repo[None], pats, opts)
3355 m = scmutil.match(repo[None], pats, opts)
3356 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3356 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3357 return rejected and 1 or 0
3357 return rejected and 1 or 0
3358
3358
3359 @command(
3359 @command(
3360 'graft',
3360 'graft',
3361 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3361 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3362 ('c', 'continue', False, _('resume interrupted graft')),
3362 ('c', 'continue', False, _('resume interrupted graft')),
3363 ('e', 'edit', False, _('invoke editor on commit messages')),
3363 ('e', 'edit', False, _('invoke editor on commit messages')),
3364 ('', 'log', None, _('append graft info to log message')),
3364 ('', 'log', None, _('append graft info to log message')),
3365 ('f', 'force', False, _('force graft')),
3365 ('f', 'force', False, _('force graft')),
3366 ('D', 'currentdate', False,
3366 ('D', 'currentdate', False,
3367 _('record the current date as commit date')),
3367 _('record the current date as commit date')),
3368 ('U', 'currentuser', False,
3368 ('U', 'currentuser', False,
3369 _('record the current user as committer'), _('DATE'))]
3369 _('record the current user as committer'), _('DATE'))]
3370 + commitopts2 + mergetoolopts + dryrunopts,
3370 + commitopts2 + mergetoolopts + dryrunopts,
3371 _('[OPTION]... [-r] REV...'))
3371 _('[OPTION]... [-r] REV...'))
3372 def graft(ui, repo, *revs, **opts):
3372 def graft(ui, repo, *revs, **opts):
3373 '''copy changes from other branches onto the current branch
3373 '''copy changes from other branches onto the current branch
3374
3374
3375 This command uses Mercurial's merge logic to copy individual
3375 This command uses Mercurial's merge logic to copy individual
3376 changes from other branches without merging branches in the
3376 changes from other branches without merging branches in the
3377 history graph. This is sometimes known as 'backporting' or
3377 history graph. This is sometimes known as 'backporting' or
3378 'cherry-picking'. By default, graft will copy user, date, and
3378 'cherry-picking'. By default, graft will copy user, date, and
3379 description from the source changesets.
3379 description from the source changesets.
3380
3380
3381 Changesets that are ancestors of the current revision, that have
3381 Changesets that are ancestors of the current revision, that have
3382 already been grafted, or that are merges will be skipped.
3382 already been grafted, or that are merges will be skipped.
3383
3383
3384 If --log is specified, log messages will have a comment appended
3384 If --log is specified, log messages will have a comment appended
3385 of the form::
3385 of the form::
3386
3386
3387 (grafted from CHANGESETHASH)
3387 (grafted from CHANGESETHASH)
3388
3388
3389 If --force is specified, revisions will be grafted even if they
3389 If --force is specified, revisions will be grafted even if they
3390 are already ancestors of or have been grafted to the destination.
3390 are already ancestors of or have been grafted to the destination.
3391 This is useful when the revisions have since been backed out.
3391 This is useful when the revisions have since been backed out.
3392
3392
3393 If a graft merge results in conflicts, the graft process is
3393 If a graft merge results in conflicts, the graft process is
3394 interrupted so that the current merge can be manually resolved.
3394 interrupted so that the current merge can be manually resolved.
3395 Once all conflicts are addressed, the graft process can be
3395 Once all conflicts are addressed, the graft process can be
3396 continued with the -c/--continue option.
3396 continued with the -c/--continue option.
3397
3397
3398 .. note::
3398 .. note::
3399
3399
3400 The -c/--continue option does not reapply earlier options, except
3400 The -c/--continue option does not reapply earlier options, except
3401 for --force.
3401 for --force.
3402
3402
3403 .. container:: verbose
3403 .. container:: verbose
3404
3404
3405 Examples:
3405 Examples:
3406
3406
3407 - copy a single change to the stable branch and edit its description::
3407 - copy a single change to the stable branch and edit its description::
3408
3408
3409 hg update stable
3409 hg update stable
3410 hg graft --edit 9393
3410 hg graft --edit 9393
3411
3411
3412 - graft a range of changesets with one exception, updating dates::
3412 - graft a range of changesets with one exception, updating dates::
3413
3413
3414 hg graft -D "2085::2093 and not 2091"
3414 hg graft -D "2085::2093 and not 2091"
3415
3415
3416 - continue a graft after resolving conflicts::
3416 - continue a graft after resolving conflicts::
3417
3417
3418 hg graft -c
3418 hg graft -c
3419
3419
3420 - show the source of a grafted changeset::
3420 - show the source of a grafted changeset::
3421
3421
3422 hg log --debug -r .
3422 hg log --debug -r .
3423
3423
3424 See :hg:`help revisions` and :hg:`help revsets` for more about
3424 See :hg:`help revisions` and :hg:`help revsets` for more about
3425 specifying revisions.
3425 specifying revisions.
3426
3426
3427 Returns 0 on successful completion.
3427 Returns 0 on successful completion.
3428 '''
3428 '''
3429
3429
3430 revs = list(revs)
3430 revs = list(revs)
3431 revs.extend(opts['rev'])
3431 revs.extend(opts['rev'])
3432
3432
3433 if not opts.get('user') and opts.get('currentuser'):
3433 if not opts.get('user') and opts.get('currentuser'):
3434 opts['user'] = ui.username()
3434 opts['user'] = ui.username()
3435 if not opts.get('date') and opts.get('currentdate'):
3435 if not opts.get('date') and opts.get('currentdate'):
3436 opts['date'] = "%d %d" % util.makedate()
3436 opts['date'] = "%d %d" % util.makedate()
3437
3437
3438 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3438 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3439
3439
3440 cont = False
3440 cont = False
3441 if opts['continue']:
3441 if opts['continue']:
3442 cont = True
3442 cont = True
3443 if revs:
3443 if revs:
3444 raise util.Abort(_("can't specify --continue and revisions"))
3444 raise util.Abort(_("can't specify --continue and revisions"))
3445 # read in unfinished revisions
3445 # read in unfinished revisions
3446 try:
3446 try:
3447 nodes = repo.vfs.read('graftstate').splitlines()
3447 nodes = repo.vfs.read('graftstate').splitlines()
3448 revs = [repo[node].rev() for node in nodes]
3448 revs = [repo[node].rev() for node in nodes]
3449 except IOError, inst:
3449 except IOError, inst:
3450 if inst.errno != errno.ENOENT:
3450 if inst.errno != errno.ENOENT:
3451 raise
3451 raise
3452 raise util.Abort(_("no graft state found, can't continue"))
3452 raise util.Abort(_("no graft state found, can't continue"))
3453 else:
3453 else:
3454 cmdutil.checkunfinished(repo)
3454 cmdutil.checkunfinished(repo)
3455 cmdutil.bailifchanged(repo)
3455 cmdutil.bailifchanged(repo)
3456 if not revs:
3456 if not revs:
3457 raise util.Abort(_('no revisions specified'))
3457 raise util.Abort(_('no revisions specified'))
3458 revs = scmutil.revrange(repo, revs)
3458 revs = scmutil.revrange(repo, revs)
3459
3459
3460 skipped = set()
3460 skipped = set()
3461 # check for merges
3461 # check for merges
3462 for rev in repo.revs('%ld and merge()', revs):
3462 for rev in repo.revs('%ld and merge()', revs):
3463 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3463 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3464 skipped.add(rev)
3464 skipped.add(rev)
3465 revs = [r for r in revs if r not in skipped]
3465 revs = [r for r in revs if r not in skipped]
3466 if not revs:
3466 if not revs:
3467 return -1
3467 return -1
3468
3468
3469 # Don't check in the --continue case, in effect retaining --force across
3469 # Don't check in the --continue case, in effect retaining --force across
3470 # --continues. That's because without --force, any revisions we decided to
3470 # --continues. That's because without --force, any revisions we decided to
3471 # skip would have been filtered out here, so they wouldn't have made their
3471 # skip would have been filtered out here, so they wouldn't have made their
3472 # way to the graftstate. With --force, any revisions we would have otherwise
3472 # way to the graftstate. With --force, any revisions we would have otherwise
3473 # skipped would not have been filtered out, and if they hadn't been applied
3473 # skipped would not have been filtered out, and if they hadn't been applied
3474 # already, they'd have been in the graftstate.
3474 # already, they'd have been in the graftstate.
3475 if not (cont or opts.get('force')):
3475 if not (cont or opts.get('force')):
3476 # check for ancestors of dest branch
3476 # check for ancestors of dest branch
3477 crev = repo['.'].rev()
3477 crev = repo['.'].rev()
3478 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3478 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3479 # Cannot use x.remove(y) on smart set, this has to be a list.
3479 # Cannot use x.remove(y) on smart set, this has to be a list.
3480 # XXX make this lazy in the future
3480 # XXX make this lazy in the future
3481 revs = list(revs)
3481 revs = list(revs)
3482 # don't mutate while iterating, create a copy
3482 # don't mutate while iterating, create a copy
3483 for rev in list(revs):
3483 for rev in list(revs):
3484 if rev in ancestors:
3484 if rev in ancestors:
3485 ui.warn(_('skipping ancestor revision %d:%s\n') %
3485 ui.warn(_('skipping ancestor revision %d:%s\n') %
3486 (rev, repo[rev]))
3486 (rev, repo[rev]))
3487 # XXX remove on list is slow
3487 # XXX remove on list is slow
3488 revs.remove(rev)
3488 revs.remove(rev)
3489 if not revs:
3489 if not revs:
3490 return -1
3490 return -1
3491
3491
3492 # analyze revs for earlier grafts
3492 # analyze revs for earlier grafts
3493 ids = {}
3493 ids = {}
3494 for ctx in repo.set("%ld", revs):
3494 for ctx in repo.set("%ld", revs):
3495 ids[ctx.hex()] = ctx.rev()
3495 ids[ctx.hex()] = ctx.rev()
3496 n = ctx.extra().get('source')
3496 n = ctx.extra().get('source')
3497 if n:
3497 if n:
3498 ids[n] = ctx.rev()
3498 ids[n] = ctx.rev()
3499
3499
3500 # check ancestors for earlier grafts
3500 # check ancestors for earlier grafts
3501 ui.debug('scanning for duplicate grafts\n')
3501 ui.debug('scanning for duplicate grafts\n')
3502
3502
3503 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3503 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3504 ctx = repo[rev]
3504 ctx = repo[rev]
3505 n = ctx.extra().get('source')
3505 n = ctx.extra().get('source')
3506 if n in ids:
3506 if n in ids:
3507 try:
3507 try:
3508 r = repo[n].rev()
3508 r = repo[n].rev()
3509 except error.RepoLookupError:
3509 except error.RepoLookupError:
3510 r = None
3510 r = None
3511 if r in revs:
3511 if r in revs:
3512 ui.warn(_('skipping revision %d:%s '
3512 ui.warn(_('skipping revision %d:%s '
3513 '(already grafted to %d:%s)\n')
3513 '(already grafted to %d:%s)\n')
3514 % (r, repo[r], rev, ctx))
3514 % (r, repo[r], rev, ctx))
3515 revs.remove(r)
3515 revs.remove(r)
3516 elif ids[n] in revs:
3516 elif ids[n] in revs:
3517 if r is None:
3517 if r is None:
3518 ui.warn(_('skipping already grafted revision %d:%s '
3518 ui.warn(_('skipping already grafted revision %d:%s '
3519 '(%d:%s also has unknown origin %s)\n')
3519 '(%d:%s also has unknown origin %s)\n')
3520 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3520 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3521 else:
3521 else:
3522 ui.warn(_('skipping already grafted revision %d:%s '
3522 ui.warn(_('skipping already grafted revision %d:%s '
3523 '(%d:%s also has origin %d:%s)\n')
3523 '(%d:%s also has origin %d:%s)\n')
3524 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3524 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3525 revs.remove(ids[n])
3525 revs.remove(ids[n])
3526 elif ctx.hex() in ids:
3526 elif ctx.hex() in ids:
3527 r = ids[ctx.hex()]
3527 r = ids[ctx.hex()]
3528 ui.warn(_('skipping already grafted revision %d:%s '
3528 ui.warn(_('skipping already grafted revision %d:%s '
3529 '(was grafted from %d:%s)\n') %
3529 '(was grafted from %d:%s)\n') %
3530 (r, repo[r], rev, ctx))
3530 (r, repo[r], rev, ctx))
3531 revs.remove(r)
3531 revs.remove(r)
3532 if not revs:
3532 if not revs:
3533 return -1
3533 return -1
3534
3534
3535 wlock = repo.wlock()
3535 wlock = repo.wlock()
3536 try:
3536 try:
3537 for pos, ctx in enumerate(repo.set("%ld", revs)):
3537 for pos, ctx in enumerate(repo.set("%ld", revs)):
3538 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3538 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3539 ctx.description().split('\n', 1)[0])
3539 ctx.description().split('\n', 1)[0])
3540 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3540 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3541 if names:
3541 if names:
3542 desc += ' (%s)' % ' '.join(names)
3542 desc += ' (%s)' % ' '.join(names)
3543 ui.status(_('grafting %s\n') % desc)
3543 ui.status(_('grafting %s\n') % desc)
3544 if opts.get('dry_run'):
3544 if opts.get('dry_run'):
3545 continue
3545 continue
3546
3546
3547 source = ctx.extra().get('source')
3547 source = ctx.extra().get('source')
3548 extra = {}
3548 extra = {}
3549 if source:
3549 if source:
3550 extra['source'] = source
3550 extra['source'] = source
3551 extra['intermediate-source'] = ctx.hex()
3551 extra['intermediate-source'] = ctx.hex()
3552 else:
3552 else:
3553 extra['source'] = ctx.hex()
3553 extra['source'] = ctx.hex()
3554 user = ctx.user()
3554 user = ctx.user()
3555 if opts.get('user'):
3555 if opts.get('user'):
3556 user = opts['user']
3556 user = opts['user']
3557 date = ctx.date()
3557 date = ctx.date()
3558 if opts.get('date'):
3558 if opts.get('date'):
3559 date = opts['date']
3559 date = opts['date']
3560 message = ctx.description()
3560 message = ctx.description()
3561 if opts.get('log'):
3561 if opts.get('log'):
3562 message += '\n(grafted from %s)' % ctx.hex()
3562 message += '\n(grafted from %s)' % ctx.hex()
3563
3563
3564 # we don't merge the first commit when continuing
3564 # we don't merge the first commit when continuing
3565 if not cont:
3565 if not cont:
3566 # perform the graft merge with p1(rev) as 'ancestor'
3566 # perform the graft merge with p1(rev) as 'ancestor'
3567 try:
3567 try:
3568 # ui.forcemerge is an internal variable, do not document
3568 # ui.forcemerge is an internal variable, do not document
3569 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3569 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3570 'graft')
3570 'graft')
3571 stats = mergemod.graft(repo, ctx, ctx.p1(),
3571 stats = mergemod.graft(repo, ctx, ctx.p1(),
3572 ['local', 'graft'])
3572 ['local', 'graft'])
3573 finally:
3573 finally:
3574 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3574 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3575 # report any conflicts
3575 # report any conflicts
3576 if stats and stats[3] > 0:
3576 if stats and stats[3] > 0:
3577 # write out state for --continue
3577 # write out state for --continue
3578 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3578 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3579 repo.vfs.write('graftstate', ''.join(nodelines))
3579 repo.vfs.write('graftstate', ''.join(nodelines))
3580 raise util.Abort(
3580 raise util.Abort(
3581 _("unresolved conflicts, can't continue"),
3581 _("unresolved conflicts, can't continue"),
3582 hint=_('use hg resolve and hg graft --continue'))
3582 hint=_('use hg resolve and hg graft --continue'))
3583 else:
3583 else:
3584 cont = False
3584 cont = False
3585
3585
3586 # commit
3586 # commit
3587 node = repo.commit(text=message, user=user,
3587 node = repo.commit(text=message, user=user,
3588 date=date, extra=extra, editor=editor)
3588 date=date, extra=extra, editor=editor)
3589 if node is None:
3589 if node is None:
3590 ui.warn(
3590 ui.warn(
3591 _('note: graft of %d:%s created no changes to commit\n') %
3591 _('note: graft of %d:%s created no changes to commit\n') %
3592 (ctx.rev(), ctx))
3592 (ctx.rev(), ctx))
3593 finally:
3593 finally:
3594 wlock.release()
3594 wlock.release()
3595
3595
3596 # remove state when we complete successfully
3596 # remove state when we complete successfully
3597 if not opts.get('dry_run'):
3597 if not opts.get('dry_run'):
3598 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3598 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3599
3599
3600 return 0
3600 return 0
3601
3601
3602 @command('grep',
3602 @command('grep',
3603 [('0', 'print0', None, _('end fields with NUL')),
3603 [('0', 'print0', None, _('end fields with NUL')),
3604 ('', 'all', None, _('print all revisions that match')),
3604 ('', 'all', None, _('print all revisions that match')),
3605 ('a', 'text', None, _('treat all files as text')),
3605 ('a', 'text', None, _('treat all files as text')),
3606 ('f', 'follow', None,
3606 ('f', 'follow', None,
3607 _('follow changeset history,'
3607 _('follow changeset history,'
3608 ' or file history across copies and renames')),
3608 ' or file history across copies and renames')),
3609 ('i', 'ignore-case', None, _('ignore case when matching')),
3609 ('i', 'ignore-case', None, _('ignore case when matching')),
3610 ('l', 'files-with-matches', None,
3610 ('l', 'files-with-matches', None,
3611 _('print only filenames and revisions that match')),
3611 _('print only filenames and revisions that match')),
3612 ('n', 'line-number', None, _('print matching line numbers')),
3612 ('n', 'line-number', None, _('print matching line numbers')),
3613 ('r', 'rev', [],
3613 ('r', 'rev', [],
3614 _('only search files changed within revision range'), _('REV')),
3614 _('only search files changed within revision range'), _('REV')),
3615 ('u', 'user', None, _('list the author (long with -v)')),
3615 ('u', 'user', None, _('list the author (long with -v)')),
3616 ('d', 'date', None, _('list the date (short with -q)')),
3616 ('d', 'date', None, _('list the date (short with -q)')),
3617 ] + walkopts,
3617 ] + walkopts,
3618 _('[OPTION]... PATTERN [FILE]...'),
3618 _('[OPTION]... PATTERN [FILE]...'),
3619 inferrepo=True)
3619 inferrepo=True)
3620 def grep(ui, repo, pattern, *pats, **opts):
3620 def grep(ui, repo, pattern, *pats, **opts):
3621 """search for a pattern in specified files and revisions
3621 """search for a pattern in specified files and revisions
3622
3622
3623 Search revisions of files for a regular expression.
3623 Search revisions of files for a regular expression.
3624
3624
3625 This command behaves differently than Unix grep. It only accepts
3625 This command behaves differently than Unix grep. It only accepts
3626 Python/Perl regexps. It searches repository history, not the
3626 Python/Perl regexps. It searches repository history, not the
3627 working directory. It always prints the revision number in which a
3627 working directory. It always prints the revision number in which a
3628 match appears.
3628 match appears.
3629
3629
3630 By default, grep only prints output for the first revision of a
3630 By default, grep only prints output for the first revision of a
3631 file in which it finds a match. To get it to print every revision
3631 file in which it finds a match. To get it to print every revision
3632 that contains a change in match status ("-" for a match that
3632 that contains a change in match status ("-" for a match that
3633 becomes a non-match, or "+" for a non-match that becomes a match),
3633 becomes a non-match, or "+" for a non-match that becomes a match),
3634 use the --all flag.
3634 use the --all flag.
3635
3635
3636 Returns 0 if a match is found, 1 otherwise.
3636 Returns 0 if a match is found, 1 otherwise.
3637 """
3637 """
3638 reflags = re.M
3638 reflags = re.M
3639 if opts.get('ignore_case'):
3639 if opts.get('ignore_case'):
3640 reflags |= re.I
3640 reflags |= re.I
3641 try:
3641 try:
3642 regexp = util.re.compile(pattern, reflags)
3642 regexp = util.re.compile(pattern, reflags)
3643 except re.error, inst:
3643 except re.error, inst:
3644 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3644 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3645 return 1
3645 return 1
3646 sep, eol = ':', '\n'
3646 sep, eol = ':', '\n'
3647 if opts.get('print0'):
3647 if opts.get('print0'):
3648 sep = eol = '\0'
3648 sep = eol = '\0'
3649
3649
3650 getfile = util.lrucachefunc(repo.file)
3650 getfile = util.lrucachefunc(repo.file)
3651
3651
3652 def matchlines(body):
3652 def matchlines(body):
3653 begin = 0
3653 begin = 0
3654 linenum = 0
3654 linenum = 0
3655 while begin < len(body):
3655 while begin < len(body):
3656 match = regexp.search(body, begin)
3656 match = regexp.search(body, begin)
3657 if not match:
3657 if not match:
3658 break
3658 break
3659 mstart, mend = match.span()
3659 mstart, mend = match.span()
3660 linenum += body.count('\n', begin, mstart) + 1
3660 linenum += body.count('\n', begin, mstart) + 1
3661 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3661 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3662 begin = body.find('\n', mend) + 1 or len(body) + 1
3662 begin = body.find('\n', mend) + 1 or len(body) + 1
3663 lend = begin - 1
3663 lend = begin - 1
3664 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3664 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3665
3665
3666 class linestate(object):
3666 class linestate(object):
3667 def __init__(self, line, linenum, colstart, colend):
3667 def __init__(self, line, linenum, colstart, colend):
3668 self.line = line
3668 self.line = line
3669 self.linenum = linenum
3669 self.linenum = linenum
3670 self.colstart = colstart
3670 self.colstart = colstart
3671 self.colend = colend
3671 self.colend = colend
3672
3672
3673 def __hash__(self):
3673 def __hash__(self):
3674 return hash((self.linenum, self.line))
3674 return hash((self.linenum, self.line))
3675
3675
3676 def __eq__(self, other):
3676 def __eq__(self, other):
3677 return self.line == other.line
3677 return self.line == other.line
3678
3678
3679 def __iter__(self):
3679 def __iter__(self):
3680 yield (self.line[:self.colstart], '')
3680 yield (self.line[:self.colstart], '')
3681 yield (self.line[self.colstart:self.colend], 'grep.match')
3681 yield (self.line[self.colstart:self.colend], 'grep.match')
3682 rest = self.line[self.colend:]
3682 rest = self.line[self.colend:]
3683 while rest != '':
3683 while rest != '':
3684 match = regexp.search(rest)
3684 match = regexp.search(rest)
3685 if not match:
3685 if not match:
3686 yield (rest, '')
3686 yield (rest, '')
3687 break
3687 break
3688 mstart, mend = match.span()
3688 mstart, mend = match.span()
3689 yield (rest[:mstart], '')
3689 yield (rest[:mstart], '')
3690 yield (rest[mstart:mend], 'grep.match')
3690 yield (rest[mstart:mend], 'grep.match')
3691 rest = rest[mend:]
3691 rest = rest[mend:]
3692
3692
3693 matches = {}
3693 matches = {}
3694 copies = {}
3694 copies = {}
3695 def grepbody(fn, rev, body):
3695 def grepbody(fn, rev, body):
3696 matches[rev].setdefault(fn, [])
3696 matches[rev].setdefault(fn, [])
3697 m = matches[rev][fn]
3697 m = matches[rev][fn]
3698 for lnum, cstart, cend, line in matchlines(body):
3698 for lnum, cstart, cend, line in matchlines(body):
3699 s = linestate(line, lnum, cstart, cend)
3699 s = linestate(line, lnum, cstart, cend)
3700 m.append(s)
3700 m.append(s)
3701
3701
3702 def difflinestates(a, b):
3702 def difflinestates(a, b):
3703 sm = difflib.SequenceMatcher(None, a, b)
3703 sm = difflib.SequenceMatcher(None, a, b)
3704 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3704 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3705 if tag == 'insert':
3705 if tag == 'insert':
3706 for i in xrange(blo, bhi):
3706 for i in xrange(blo, bhi):
3707 yield ('+', b[i])
3707 yield ('+', b[i])
3708 elif tag == 'delete':
3708 elif tag == 'delete':
3709 for i in xrange(alo, ahi):
3709 for i in xrange(alo, ahi):
3710 yield ('-', a[i])
3710 yield ('-', a[i])
3711 elif tag == 'replace':
3711 elif tag == 'replace':
3712 for i in xrange(alo, ahi):
3712 for i in xrange(alo, ahi):
3713 yield ('-', a[i])
3713 yield ('-', a[i])
3714 for i in xrange(blo, bhi):
3714 for i in xrange(blo, bhi):
3715 yield ('+', b[i])
3715 yield ('+', b[i])
3716
3716
3717 def display(fn, ctx, pstates, states):
3717 def display(fn, ctx, pstates, states):
3718 rev = ctx.rev()
3718 rev = ctx.rev()
3719 if ui.quiet:
3719 if ui.quiet:
3720 datefunc = util.shortdate
3720 datefunc = util.shortdate
3721 else:
3721 else:
3722 datefunc = util.datestr
3722 datefunc = util.datestr
3723 found = False
3723 found = False
3724 @util.cachefunc
3724 @util.cachefunc
3725 def binary():
3725 def binary():
3726 flog = getfile(fn)
3726 flog = getfile(fn)
3727 return util.binary(flog.read(ctx.filenode(fn)))
3727 return util.binary(flog.read(ctx.filenode(fn)))
3728
3728
3729 if opts.get('all'):
3729 if opts.get('all'):
3730 iter = difflinestates(pstates, states)
3730 iter = difflinestates(pstates, states)
3731 else:
3731 else:
3732 iter = [('', l) for l in states]
3732 iter = [('', l) for l in states]
3733 for change, l in iter:
3733 for change, l in iter:
3734 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3734 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3735
3735
3736 if opts.get('line_number'):
3736 if opts.get('line_number'):
3737 cols.append((str(l.linenum), 'grep.linenumber'))
3737 cols.append((str(l.linenum), 'grep.linenumber'))
3738 if opts.get('all'):
3738 if opts.get('all'):
3739 cols.append((change, 'grep.change'))
3739 cols.append((change, 'grep.change'))
3740 if opts.get('user'):
3740 if opts.get('user'):
3741 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3741 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3742 if opts.get('date'):
3742 if opts.get('date'):
3743 cols.append((datefunc(ctx.date()), 'grep.date'))
3743 cols.append((datefunc(ctx.date()), 'grep.date'))
3744 for col, label in cols[:-1]:
3744 for col, label in cols[:-1]:
3745 ui.write(col, label=label)
3745 ui.write(col, label=label)
3746 ui.write(sep, label='grep.sep')
3746 ui.write(sep, label='grep.sep')
3747 ui.write(cols[-1][0], label=cols[-1][1])
3747 ui.write(cols[-1][0], label=cols[-1][1])
3748 if not opts.get('files_with_matches'):
3748 if not opts.get('files_with_matches'):
3749 ui.write(sep, label='grep.sep')
3749 ui.write(sep, label='grep.sep')
3750 if not opts.get('text') and binary():
3750 if not opts.get('text') and binary():
3751 ui.write(" Binary file matches")
3751 ui.write(" Binary file matches")
3752 else:
3752 else:
3753 for s, label in l:
3753 for s, label in l:
3754 ui.write(s, label=label)
3754 ui.write(s, label=label)
3755 ui.write(eol)
3755 ui.write(eol)
3756 found = True
3756 found = True
3757 if opts.get('files_with_matches'):
3757 if opts.get('files_with_matches'):
3758 break
3758 break
3759 return found
3759 return found
3760
3760
3761 skip = {}
3761 skip = {}
3762 revfiles = {}
3762 revfiles = {}
3763 matchfn = scmutil.match(repo[None], pats, opts)
3763 matchfn = scmutil.match(repo[None], pats, opts)
3764 found = False
3764 found = False
3765 follow = opts.get('follow')
3765 follow = opts.get('follow')
3766
3766
3767 def prep(ctx, fns):
3767 def prep(ctx, fns):
3768 rev = ctx.rev()
3768 rev = ctx.rev()
3769 pctx = ctx.p1()
3769 pctx = ctx.p1()
3770 parent = pctx.rev()
3770 parent = pctx.rev()
3771 matches.setdefault(rev, {})
3771 matches.setdefault(rev, {})
3772 matches.setdefault(parent, {})
3772 matches.setdefault(parent, {})
3773 files = revfiles.setdefault(rev, [])
3773 files = revfiles.setdefault(rev, [])
3774 for fn in fns:
3774 for fn in fns:
3775 flog = getfile(fn)
3775 flog = getfile(fn)
3776 try:
3776 try:
3777 fnode = ctx.filenode(fn)
3777 fnode = ctx.filenode(fn)
3778 except error.LookupError:
3778 except error.LookupError:
3779 continue
3779 continue
3780
3780
3781 copied = flog.renamed(fnode)
3781 copied = flog.renamed(fnode)
3782 copy = follow and copied and copied[0]
3782 copy = follow and copied and copied[0]
3783 if copy:
3783 if copy:
3784 copies.setdefault(rev, {})[fn] = copy
3784 copies.setdefault(rev, {})[fn] = copy
3785 if fn in skip:
3785 if fn in skip:
3786 if copy:
3786 if copy:
3787 skip[copy] = True
3787 skip[copy] = True
3788 continue
3788 continue
3789 files.append(fn)
3789 files.append(fn)
3790
3790
3791 if fn not in matches[rev]:
3791 if fn not in matches[rev]:
3792 grepbody(fn, rev, flog.read(fnode))
3792 grepbody(fn, rev, flog.read(fnode))
3793
3793
3794 pfn = copy or fn
3794 pfn = copy or fn
3795 if pfn not in matches[parent]:
3795 if pfn not in matches[parent]:
3796 try:
3796 try:
3797 fnode = pctx.filenode(pfn)
3797 fnode = pctx.filenode(pfn)
3798 grepbody(pfn, parent, flog.read(fnode))
3798 grepbody(pfn, parent, flog.read(fnode))
3799 except error.LookupError:
3799 except error.LookupError:
3800 pass
3800 pass
3801
3801
3802 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3802 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3803 rev = ctx.rev()
3803 rev = ctx.rev()
3804 parent = ctx.p1().rev()
3804 parent = ctx.p1().rev()
3805 for fn in sorted(revfiles.get(rev, [])):
3805 for fn in sorted(revfiles.get(rev, [])):
3806 states = matches[rev][fn]
3806 states = matches[rev][fn]
3807 copy = copies.get(rev, {}).get(fn)
3807 copy = copies.get(rev, {}).get(fn)
3808 if fn in skip:
3808 if fn in skip:
3809 if copy:
3809 if copy:
3810 skip[copy] = True
3810 skip[copy] = True
3811 continue
3811 continue
3812 pstates = matches.get(parent, {}).get(copy or fn, [])
3812 pstates = matches.get(parent, {}).get(copy or fn, [])
3813 if pstates or states:
3813 if pstates or states:
3814 r = display(fn, ctx, pstates, states)
3814 r = display(fn, ctx, pstates, states)
3815 found = found or r
3815 found = found or r
3816 if r and not opts.get('all'):
3816 if r and not opts.get('all'):
3817 skip[fn] = True
3817 skip[fn] = True
3818 if copy:
3818 if copy:
3819 skip[copy] = True
3819 skip[copy] = True
3820 del matches[rev]
3820 del matches[rev]
3821 del revfiles[rev]
3821 del revfiles[rev]
3822
3822
3823 return not found
3823 return not found
3824
3824
3825 @command('heads',
3825 @command('heads',
3826 [('r', 'rev', '',
3826 [('r', 'rev', '',
3827 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3827 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3828 ('t', 'topo', False, _('show topological heads only')),
3828 ('t', 'topo', False, _('show topological heads only')),
3829 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3829 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3830 ('c', 'closed', False, _('show normal and closed branch heads')),
3830 ('c', 'closed', False, _('show normal and closed branch heads')),
3831 ] + templateopts,
3831 ] + templateopts,
3832 _('[-ct] [-r STARTREV] [REV]...'))
3832 _('[-ct] [-r STARTREV] [REV]...'))
3833 def heads(ui, repo, *branchrevs, **opts):
3833 def heads(ui, repo, *branchrevs, **opts):
3834 """show branch heads
3834 """show branch heads
3835
3835
3836 With no arguments, show all open branch heads in the repository.
3836 With no arguments, show all open branch heads in the repository.
3837 Branch heads are changesets that have no descendants on the
3837 Branch heads are changesets that have no descendants on the
3838 same branch. They are where development generally takes place and
3838 same branch. They are where development generally takes place and
3839 are the usual targets for update and merge operations.
3839 are the usual targets for update and merge operations.
3840
3840
3841 If one or more REVs are given, only open branch heads on the
3841 If one or more REVs are given, only open branch heads on the
3842 branches associated with the specified changesets are shown. This
3842 branches associated with the specified changesets are shown. This
3843 means that you can use :hg:`heads .` to see the heads on the
3843 means that you can use :hg:`heads .` to see the heads on the
3844 currently checked-out branch.
3844 currently checked-out branch.
3845
3845
3846 If -c/--closed is specified, also show branch heads marked closed
3846 If -c/--closed is specified, also show branch heads marked closed
3847 (see :hg:`commit --close-branch`).
3847 (see :hg:`commit --close-branch`).
3848
3848
3849 If STARTREV is specified, only those heads that are descendants of
3849 If STARTREV is specified, only those heads that are descendants of
3850 STARTREV will be displayed.
3850 STARTREV will be displayed.
3851
3851
3852 If -t/--topo is specified, named branch mechanics will be ignored and only
3852 If -t/--topo is specified, named branch mechanics will be ignored and only
3853 topological heads (changesets with no children) will be shown.
3853 topological heads (changesets with no children) will be shown.
3854
3854
3855 Returns 0 if matching heads are found, 1 if not.
3855 Returns 0 if matching heads are found, 1 if not.
3856 """
3856 """
3857
3857
3858 start = None
3858 start = None
3859 if 'rev' in opts:
3859 if 'rev' in opts:
3860 start = scmutil.revsingle(repo, opts['rev'], None).node()
3860 start = scmutil.revsingle(repo, opts['rev'], None).node()
3861
3861
3862 if opts.get('topo'):
3862 if opts.get('topo'):
3863 heads = [repo[h] for h in repo.heads(start)]
3863 heads = [repo[h] for h in repo.heads(start)]
3864 else:
3864 else:
3865 heads = []
3865 heads = []
3866 for branch in repo.branchmap():
3866 for branch in repo.branchmap():
3867 heads += repo.branchheads(branch, start, opts.get('closed'))
3867 heads += repo.branchheads(branch, start, opts.get('closed'))
3868 heads = [repo[h] for h in heads]
3868 heads = [repo[h] for h in heads]
3869
3869
3870 if branchrevs:
3870 if branchrevs:
3871 branches = set(repo[br].branch() for br in branchrevs)
3871 branches = set(repo[br].branch() for br in branchrevs)
3872 heads = [h for h in heads if h.branch() in branches]
3872 heads = [h for h in heads if h.branch() in branches]
3873
3873
3874 if opts.get('active') and branchrevs:
3874 if opts.get('active') and branchrevs:
3875 dagheads = repo.heads(start)
3875 dagheads = repo.heads(start)
3876 heads = [h for h in heads if h.node() in dagheads]
3876 heads = [h for h in heads if h.node() in dagheads]
3877
3877
3878 if branchrevs:
3878 if branchrevs:
3879 haveheads = set(h.branch() for h in heads)
3879 haveheads = set(h.branch() for h in heads)
3880 if branches - haveheads:
3880 if branches - haveheads:
3881 headless = ', '.join(b for b in branches - haveheads)
3881 headless = ', '.join(b for b in branches - haveheads)
3882 msg = _('no open branch heads found on branches %s')
3882 msg = _('no open branch heads found on branches %s')
3883 if opts.get('rev'):
3883 if opts.get('rev'):
3884 msg += _(' (started at %s)') % opts['rev']
3884 msg += _(' (started at %s)') % opts['rev']
3885 ui.warn((msg + '\n') % headless)
3885 ui.warn((msg + '\n') % headless)
3886
3886
3887 if not heads:
3887 if not heads:
3888 return 1
3888 return 1
3889
3889
3890 heads = sorted(heads, key=lambda x: -x.rev())
3890 heads = sorted(heads, key=lambda x: -x.rev())
3891 displayer = cmdutil.show_changeset(ui, repo, opts)
3891 displayer = cmdutil.show_changeset(ui, repo, opts)
3892 for ctx in heads:
3892 for ctx in heads:
3893 displayer.show(ctx)
3893 displayer.show(ctx)
3894 displayer.close()
3894 displayer.close()
3895
3895
3896 @command('help',
3896 @command('help',
3897 [('e', 'extension', None, _('show only help for extensions')),
3897 [('e', 'extension', None, _('show only help for extensions')),
3898 ('c', 'command', None, _('show only help for commands')),
3898 ('c', 'command', None, _('show only help for commands')),
3899 ('k', 'keyword', '', _('show topics matching keyword')),
3899 ('k', 'keyword', '', _('show topics matching keyword')),
3900 ],
3900 ],
3901 _('[-ec] [TOPIC]'),
3901 _('[-ec] [TOPIC]'),
3902 norepo=True)
3902 norepo=True)
3903 def help_(ui, name=None, **opts):
3903 def help_(ui, name=None, **opts):
3904 """show help for a given topic or a help overview
3904 """show help for a given topic or a help overview
3905
3905
3906 With no arguments, print a list of commands with short help messages.
3906 With no arguments, print a list of commands with short help messages.
3907
3907
3908 Given a topic, extension, or command name, print help for that
3908 Given a topic, extension, or command name, print help for that
3909 topic.
3909 topic.
3910
3910
3911 Returns 0 if successful.
3911 Returns 0 if successful.
3912 """
3912 """
3913
3913
3914 textwidth = min(ui.termwidth(), 80) - 2
3914 textwidth = min(ui.termwidth(), 80) - 2
3915
3915
3916 keep = []
3916 keep = []
3917 if ui.verbose:
3917 if ui.verbose:
3918 keep.append('verbose')
3918 keep.append('verbose')
3919 if sys.platform.startswith('win'):
3919 if sys.platform.startswith('win'):
3920 keep.append('windows')
3920 keep.append('windows')
3921 elif sys.platform == 'OpenVMS':
3921 elif sys.platform == 'OpenVMS':
3922 keep.append('vms')
3922 keep.append('vms')
3923 elif sys.platform == 'plan9':
3923 elif sys.platform == 'plan9':
3924 keep.append('plan9')
3924 keep.append('plan9')
3925 else:
3925 else:
3926 keep.append('unix')
3926 keep.append('unix')
3927 keep.append(sys.platform.lower())
3927 keep.append(sys.platform.lower())
3928
3928
3929 section = None
3929 section = None
3930 if name and '.' in name:
3930 if name and '.' in name:
3931 name, section = name.split('.', 1)
3931 name, section = name.split('.', 1)
3932
3932
3933 text = help.help_(ui, name, **opts)
3933 text = help.help_(ui, name, **opts)
3934
3934
3935 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3935 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3936 section=section)
3936 section=section)
3937 if section and not formatted:
3937 if section and not formatted:
3938 raise util.Abort(_("help section not found"))
3938 raise util.Abort(_("help section not found"))
3939
3939
3940 if 'verbose' in pruned:
3940 if 'verbose' in pruned:
3941 keep.append('omitted')
3941 keep.append('omitted')
3942 else:
3942 else:
3943 keep.append('notomitted')
3943 keep.append('notomitted')
3944 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3944 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3945 section=section)
3945 section=section)
3946 ui.write(formatted)
3946 ui.write(formatted)
3947
3947
3948
3948
3949 @command('identify|id',
3949 @command('identify|id',
3950 [('r', 'rev', '',
3950 [('r', 'rev', '',
3951 _('identify the specified revision'), _('REV')),
3951 _('identify the specified revision'), _('REV')),
3952 ('n', 'num', None, _('show local revision number')),
3952 ('n', 'num', None, _('show local revision number')),
3953 ('i', 'id', None, _('show global revision id')),
3953 ('i', 'id', None, _('show global revision id')),
3954 ('b', 'branch', None, _('show branch')),
3954 ('b', 'branch', None, _('show branch')),
3955 ('t', 'tags', None, _('show tags')),
3955 ('t', 'tags', None, _('show tags')),
3956 ('B', 'bookmarks', None, _('show bookmarks')),
3956 ('B', 'bookmarks', None, _('show bookmarks')),
3957 ] + remoteopts,
3957 ] + remoteopts,
3958 _('[-nibtB] [-r REV] [SOURCE]'),
3958 _('[-nibtB] [-r REV] [SOURCE]'),
3959 optionalrepo=True)
3959 optionalrepo=True)
3960 def identify(ui, repo, source=None, rev=None,
3960 def identify(ui, repo, source=None, rev=None,
3961 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3961 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3962 """identify the working directory or specified revision
3962 """identify the working directory or specified revision
3963
3963
3964 Print a summary identifying the repository state at REV using one or
3964 Print a summary identifying the repository state at REV using one or
3965 two parent hash identifiers, followed by a "+" if the working
3965 two parent hash identifiers, followed by a "+" if the working
3966 directory has uncommitted changes, the branch name (if not default),
3966 directory has uncommitted changes, the branch name (if not default),
3967 a list of tags, and a list of bookmarks.
3967 a list of tags, and a list of bookmarks.
3968
3968
3969 When REV is not given, print a summary of the current state of the
3969 When REV is not given, print a summary of the current state of the
3970 repository.
3970 repository.
3971
3971
3972 Specifying a path to a repository root or Mercurial bundle will
3972 Specifying a path to a repository root or Mercurial bundle will
3973 cause lookup to operate on that repository/bundle.
3973 cause lookup to operate on that repository/bundle.
3974
3974
3975 .. container:: verbose
3975 .. container:: verbose
3976
3976
3977 Examples:
3977 Examples:
3978
3978
3979 - generate a build identifier for the working directory::
3979 - generate a build identifier for the working directory::
3980
3980
3981 hg id --id > build-id.dat
3981 hg id --id > build-id.dat
3982
3982
3983 - find the revision corresponding to a tag::
3983 - find the revision corresponding to a tag::
3984
3984
3985 hg id -n -r 1.3
3985 hg id -n -r 1.3
3986
3986
3987 - check the most recent revision of a remote repository::
3987 - check the most recent revision of a remote repository::
3988
3988
3989 hg id -r tip http://selenic.com/hg/
3989 hg id -r tip http://selenic.com/hg/
3990
3990
3991 Returns 0 if successful.
3991 Returns 0 if successful.
3992 """
3992 """
3993
3993
3994 if not repo and not source:
3994 if not repo and not source:
3995 raise util.Abort(_("there is no Mercurial repository here "
3995 raise util.Abort(_("there is no Mercurial repository here "
3996 "(.hg not found)"))
3996 "(.hg not found)"))
3997
3997
3998 if ui.debugflag:
3998 if ui.debugflag:
3999 hexfunc = hex
3999 hexfunc = hex
4000 else:
4000 else:
4001 hexfunc = short
4001 hexfunc = short
4002 default = not (num or id or branch or tags or bookmarks)
4002 default = not (num or id or branch or tags or bookmarks)
4003 output = []
4003 output = []
4004 revs = []
4004 revs = []
4005
4005
4006 if source:
4006 if source:
4007 source, branches = hg.parseurl(ui.expandpath(source))
4007 source, branches = hg.parseurl(ui.expandpath(source))
4008 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4008 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4009 repo = peer.local()
4009 repo = peer.local()
4010 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4010 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4011
4011
4012 if not repo:
4012 if not repo:
4013 if num or branch or tags:
4013 if num or branch or tags:
4014 raise util.Abort(
4014 raise util.Abort(
4015 _("can't query remote revision number, branch, or tags"))
4015 _("can't query remote revision number, branch, or tags"))
4016 if not rev and revs:
4016 if not rev and revs:
4017 rev = revs[0]
4017 rev = revs[0]
4018 if not rev:
4018 if not rev:
4019 rev = "tip"
4019 rev = "tip"
4020
4020
4021 remoterev = peer.lookup(rev)
4021 remoterev = peer.lookup(rev)
4022 if default or id:
4022 if default or id:
4023 output = [hexfunc(remoterev)]
4023 output = [hexfunc(remoterev)]
4024
4024
4025 def getbms():
4025 def getbms():
4026 bms = []
4026 bms = []
4027
4027
4028 if 'bookmarks' in peer.listkeys('namespaces'):
4028 if 'bookmarks' in peer.listkeys('namespaces'):
4029 hexremoterev = hex(remoterev)
4029 hexremoterev = hex(remoterev)
4030 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4030 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4031 if bmr == hexremoterev]
4031 if bmr == hexremoterev]
4032
4032
4033 return sorted(bms)
4033 return sorted(bms)
4034
4034
4035 if bookmarks:
4035 if bookmarks:
4036 output.extend(getbms())
4036 output.extend(getbms())
4037 elif default and not ui.quiet:
4037 elif default and not ui.quiet:
4038 # multiple bookmarks for a single parent separated by '/'
4038 # multiple bookmarks for a single parent separated by '/'
4039 bm = '/'.join(getbms())
4039 bm = '/'.join(getbms())
4040 if bm:
4040 if bm:
4041 output.append(bm)
4041 output.append(bm)
4042 else:
4042 else:
4043 if not rev:
4043 if not rev:
4044 ctx = repo[None]
4044 ctx = repo[None]
4045 parents = ctx.parents()
4045 parents = ctx.parents()
4046 changed = ""
4046 changed = ""
4047 if default or id or num:
4047 if default or id or num:
4048 if (util.any(repo.status())
4048 if (util.any(repo.status())
4049 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
4049 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
4050 changed = '+'
4050 changed = '+'
4051 if default or id:
4051 if default or id:
4052 output = ["%s%s" %
4052 output = ["%s%s" %
4053 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4053 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4054 if num:
4054 if num:
4055 output.append("%s%s" %
4055 output.append("%s%s" %
4056 ('+'.join([str(p.rev()) for p in parents]), changed))
4056 ('+'.join([str(p.rev()) for p in parents]), changed))
4057 else:
4057 else:
4058 ctx = scmutil.revsingle(repo, rev)
4058 ctx = scmutil.revsingle(repo, rev)
4059 if default or id:
4059 if default or id:
4060 output = [hexfunc(ctx.node())]
4060 output = [hexfunc(ctx.node())]
4061 if num:
4061 if num:
4062 output.append(str(ctx.rev()))
4062 output.append(str(ctx.rev()))
4063
4063
4064 if default and not ui.quiet:
4064 if default and not ui.quiet:
4065 b = ctx.branch()
4065 b = ctx.branch()
4066 if b != 'default':
4066 if b != 'default':
4067 output.append("(%s)" % b)
4067 output.append("(%s)" % b)
4068
4068
4069 # multiple tags for a single parent separated by '/'
4069 # multiple tags for a single parent separated by '/'
4070 t = '/'.join(ctx.tags())
4070 t = '/'.join(ctx.tags())
4071 if t:
4071 if t:
4072 output.append(t)
4072 output.append(t)
4073
4073
4074 # multiple bookmarks for a single parent separated by '/'
4074 # multiple bookmarks for a single parent separated by '/'
4075 bm = '/'.join(ctx.bookmarks())
4075 bm = '/'.join(ctx.bookmarks())
4076 if bm:
4076 if bm:
4077 output.append(bm)
4077 output.append(bm)
4078 else:
4078 else:
4079 if branch:
4079 if branch:
4080 output.append(ctx.branch())
4080 output.append(ctx.branch())
4081
4081
4082 if tags:
4082 if tags:
4083 output.extend(ctx.tags())
4083 output.extend(ctx.tags())
4084
4084
4085 if bookmarks:
4085 if bookmarks:
4086 output.extend(ctx.bookmarks())
4086 output.extend(ctx.bookmarks())
4087
4087
4088 ui.write("%s\n" % ' '.join(output))
4088 ui.write("%s\n" % ' '.join(output))
4089
4089
4090 @command('import|patch',
4090 @command('import|patch',
4091 [('p', 'strip', 1,
4091 [('p', 'strip', 1,
4092 _('directory strip option for patch. This has the same '
4092 _('directory strip option for patch. This has the same '
4093 'meaning as the corresponding patch option'), _('NUM')),
4093 'meaning as the corresponding patch option'), _('NUM')),
4094 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4094 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4095 ('e', 'edit', False, _('invoke editor on commit messages')),
4095 ('e', 'edit', False, _('invoke editor on commit messages')),
4096 ('f', 'force', None,
4096 ('f', 'force', None,
4097 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4097 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4098 ('', 'no-commit', None,
4098 ('', 'no-commit', None,
4099 _("don't commit, just update the working directory")),
4099 _("don't commit, just update the working directory")),
4100 ('', 'bypass', None,
4100 ('', 'bypass', None,
4101 _("apply patch without touching the working directory")),
4101 _("apply patch without touching the working directory")),
4102 ('', 'partial', None,
4102 ('', 'partial', None,
4103 _('commit even if some hunks fail')),
4103 _('commit even if some hunks fail')),
4104 ('', 'exact', None,
4104 ('', 'exact', None,
4105 _('apply patch to the nodes from which it was generated')),
4105 _('apply patch to the nodes from which it was generated')),
4106 ('', 'prefix', '',
4106 ('', 'prefix', '',
4107 _('apply patch to subdirectory'), _('DIR')),
4107 _('apply patch to subdirectory'), _('DIR')),
4108 ('', 'import-branch', None,
4108 ('', 'import-branch', None,
4109 _('use any branch information in patch (implied by --exact)'))] +
4109 _('use any branch information in patch (implied by --exact)'))] +
4110 commitopts + commitopts2 + similarityopts,
4110 commitopts + commitopts2 + similarityopts,
4111 _('[OPTION]... PATCH...'))
4111 _('[OPTION]... PATCH...'))
4112 def import_(ui, repo, patch1=None, *patches, **opts):
4112 def import_(ui, repo, patch1=None, *patches, **opts):
4113 """import an ordered set of patches
4113 """import an ordered set of patches
4114
4114
4115 Import a list of patches and commit them individually (unless
4115 Import a list of patches and commit them individually (unless
4116 --no-commit is specified).
4116 --no-commit is specified).
4117
4117
4118 Because import first applies changes to the working directory,
4118 Because import first applies changes to the working directory,
4119 import will abort if there are outstanding changes.
4119 import will abort if there are outstanding changes.
4120
4120
4121 You can import a patch straight from a mail message. Even patches
4121 You can import a patch straight from a mail message. Even patches
4122 as attachments work (to use the body part, it must have type
4122 as attachments work (to use the body part, it must have type
4123 text/plain or text/x-patch). From and Subject headers of email
4123 text/plain or text/x-patch). From and Subject headers of email
4124 message are used as default committer and commit message. All
4124 message are used as default committer and commit message. All
4125 text/plain body parts before first diff are added to commit
4125 text/plain body parts before first diff are added to commit
4126 message.
4126 message.
4127
4127
4128 If the imported patch was generated by :hg:`export`, user and
4128 If the imported patch was generated by :hg:`export`, user and
4129 description from patch override values from message headers and
4129 description from patch override values from message headers and
4130 body. Values given on command line with -m/--message and -u/--user
4130 body. Values given on command line with -m/--message and -u/--user
4131 override these.
4131 override these.
4132
4132
4133 If --exact is specified, import will set the working directory to
4133 If --exact is specified, import will set the working directory to
4134 the parent of each patch before applying it, and will abort if the
4134 the parent of each patch before applying it, and will abort if the
4135 resulting changeset has a different ID than the one recorded in
4135 resulting changeset has a different ID than the one recorded in
4136 the patch. This may happen due to character set problems or other
4136 the patch. This may happen due to character set problems or other
4137 deficiencies in the text patch format.
4137 deficiencies in the text patch format.
4138
4138
4139 Use --bypass to apply and commit patches directly to the
4139 Use --bypass to apply and commit patches directly to the
4140 repository, not touching the working directory. Without --exact,
4140 repository, not touching the working directory. Without --exact,
4141 patches will be applied on top of the working directory parent
4141 patches will be applied on top of the working directory parent
4142 revision.
4142 revision.
4143
4143
4144 With -s/--similarity, hg will attempt to discover renames and
4144 With -s/--similarity, hg will attempt to discover renames and
4145 copies in the patch in the same way as :hg:`addremove`.
4145 copies in the patch in the same way as :hg:`addremove`.
4146
4146
4147 Use --partial to ensure a changeset will be created from the patch
4147 Use --partial to ensure a changeset will be created from the patch
4148 even if some hunks fail to apply. Hunks that fail to apply will be
4148 even if some hunks fail to apply. Hunks that fail to apply will be
4149 written to a <target-file>.rej file. Conflicts can then be resolved
4149 written to a <target-file>.rej file. Conflicts can then be resolved
4150 by hand before :hg:`commit --amend` is run to update the created
4150 by hand before :hg:`commit --amend` is run to update the created
4151 changeset. This flag exists to let people import patches that
4151 changeset. This flag exists to let people import patches that
4152 partially apply without losing the associated metadata (author,
4152 partially apply without losing the associated metadata (author,
4153 date, description, ...). Note that when none of the hunk applies
4153 date, description, ...). Note that when none of the hunk applies
4154 cleanly, :hg:`import --partial` will create an empty changeset,
4154 cleanly, :hg:`import --partial` will create an empty changeset,
4155 importing only the patch metadata.
4155 importing only the patch metadata.
4156
4156
4157 To read a patch from standard input, use "-" as the patch name. If
4157 To read a patch from standard input, use "-" as the patch name. If
4158 a URL is specified, the patch will be downloaded from it.
4158 a URL is specified, the patch will be downloaded from it.
4159 See :hg:`help dates` for a list of formats valid for -d/--date.
4159 See :hg:`help dates` for a list of formats valid for -d/--date.
4160
4160
4161 .. container:: verbose
4161 .. container:: verbose
4162
4162
4163 Examples:
4163 Examples:
4164
4164
4165 - import a traditional patch from a website and detect renames::
4165 - import a traditional patch from a website and detect renames::
4166
4166
4167 hg import -s 80 http://example.com/bugfix.patch
4167 hg import -s 80 http://example.com/bugfix.patch
4168
4168
4169 - import a changeset from an hgweb server::
4169 - import a changeset from an hgweb server::
4170
4170
4171 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4171 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4172
4172
4173 - import all the patches in an Unix-style mbox::
4173 - import all the patches in an Unix-style mbox::
4174
4174
4175 hg import incoming-patches.mbox
4175 hg import incoming-patches.mbox
4176
4176
4177 - attempt to exactly restore an exported changeset (not always
4177 - attempt to exactly restore an exported changeset (not always
4178 possible)::
4178 possible)::
4179
4179
4180 hg import --exact proposed-fix.patch
4180 hg import --exact proposed-fix.patch
4181
4181
4182 Returns 0 on success, 1 on partial success (see --partial).
4182 Returns 0 on success, 1 on partial success (see --partial).
4183 """
4183 """
4184
4184
4185 if not patch1:
4185 if not patch1:
4186 raise util.Abort(_('need at least one patch to import'))
4186 raise util.Abort(_('need at least one patch to import'))
4187
4187
4188 patches = (patch1,) + patches
4188 patches = (patch1,) + patches
4189
4189
4190 date = opts.get('date')
4190 date = opts.get('date')
4191 if date:
4191 if date:
4192 opts['date'] = util.parsedate(date)
4192 opts['date'] = util.parsedate(date)
4193
4193
4194 update = not opts.get('bypass')
4194 update = not opts.get('bypass')
4195 if not update and opts.get('no_commit'):
4195 if not update and opts.get('no_commit'):
4196 raise util.Abort(_('cannot use --no-commit with --bypass'))
4196 raise util.Abort(_('cannot use --no-commit with --bypass'))
4197 try:
4197 try:
4198 sim = float(opts.get('similarity') or 0)
4198 sim = float(opts.get('similarity') or 0)
4199 except ValueError:
4199 except ValueError:
4200 raise util.Abort(_('similarity must be a number'))
4200 raise util.Abort(_('similarity must be a number'))
4201 if sim < 0 or sim > 100:
4201 if sim < 0 or sim > 100:
4202 raise util.Abort(_('similarity must be between 0 and 100'))
4202 raise util.Abort(_('similarity must be between 0 and 100'))
4203 if sim and not update:
4203 if sim and not update:
4204 raise util.Abort(_('cannot use --similarity with --bypass'))
4204 raise util.Abort(_('cannot use --similarity with --bypass'))
4205 if opts.get('exact') and opts.get('edit'):
4205 if opts.get('exact') and opts.get('edit'):
4206 raise util.Abort(_('cannot use --exact with --edit'))
4206 raise util.Abort(_('cannot use --exact with --edit'))
4207 if opts.get('exact') and opts.get('prefix'):
4207 if opts.get('exact') and opts.get('prefix'):
4208 raise util.Abort(_('cannot use --exact with --prefix'))
4208 raise util.Abort(_('cannot use --exact with --prefix'))
4209
4209
4210 if update:
4210 if update:
4211 cmdutil.checkunfinished(repo)
4211 cmdutil.checkunfinished(repo)
4212 if (opts.get('exact') or not opts.get('force')) and update:
4212 if (opts.get('exact') or not opts.get('force')) and update:
4213 cmdutil.bailifchanged(repo)
4213 cmdutil.bailifchanged(repo)
4214
4214
4215 base = opts["base"]
4215 base = opts["base"]
4216 wlock = lock = tr = None
4216 wlock = lock = tr = None
4217 msgs = []
4217 msgs = []
4218 ret = 0
4218 ret = 0
4219
4219
4220
4220
4221 try:
4221 try:
4222 try:
4222 try:
4223 wlock = repo.wlock()
4223 wlock = repo.wlock()
4224 repo.dirstate.beginparentchange()
4224 repo.dirstate.beginparentchange()
4225 if not opts.get('no_commit'):
4225 if not opts.get('no_commit'):
4226 lock = repo.lock()
4226 lock = repo.lock()
4227 tr = repo.transaction('import')
4227 tr = repo.transaction('import')
4228 parents = repo.parents()
4228 parents = repo.parents()
4229 for patchurl in patches:
4229 for patchurl in patches:
4230 if patchurl == '-':
4230 if patchurl == '-':
4231 ui.status(_('applying patch from stdin\n'))
4231 ui.status(_('applying patch from stdin\n'))
4232 patchfile = ui.fin
4232 patchfile = ui.fin
4233 patchurl = 'stdin' # for error message
4233 patchurl = 'stdin' # for error message
4234 else:
4234 else:
4235 patchurl = os.path.join(base, patchurl)
4235 patchurl = os.path.join(base, patchurl)
4236 ui.status(_('applying %s\n') % patchurl)
4236 ui.status(_('applying %s\n') % patchurl)
4237 patchfile = hg.openpath(ui, patchurl)
4237 patchfile = hg.openpath(ui, patchurl)
4238
4238
4239 haspatch = False
4239 haspatch = False
4240 for hunk in patch.split(patchfile):
4240 for hunk in patch.split(patchfile):
4241 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4241 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4242 parents, opts,
4242 parents, opts,
4243 msgs, hg.clean)
4243 msgs, hg.clean)
4244 if msg:
4244 if msg:
4245 haspatch = True
4245 haspatch = True
4246 ui.note(msg + '\n')
4246 ui.note(msg + '\n')
4247 if update or opts.get('exact'):
4247 if update or opts.get('exact'):
4248 parents = repo.parents()
4248 parents = repo.parents()
4249 else:
4249 else:
4250 parents = [repo[node]]
4250 parents = [repo[node]]
4251 if rej:
4251 if rej:
4252 ui.write_err(_("patch applied partially\n"))
4252 ui.write_err(_("patch applied partially\n"))
4253 ui.write_err(_("(fix the .rej files and run "
4253 ui.write_err(_("(fix the .rej files and run "
4254 "`hg commit --amend`)\n"))
4254 "`hg commit --amend`)\n"))
4255 ret = 1
4255 ret = 1
4256 break
4256 break
4257
4257
4258 if not haspatch:
4258 if not haspatch:
4259 raise util.Abort(_('%s: no diffs found') % patchurl)
4259 raise util.Abort(_('%s: no diffs found') % patchurl)
4260
4260
4261 if tr:
4261 if tr:
4262 tr.close()
4262 tr.close()
4263 if msgs:
4263 if msgs:
4264 repo.savecommitmessage('\n* * *\n'.join(msgs))
4264 repo.savecommitmessage('\n* * *\n'.join(msgs))
4265 repo.dirstate.endparentchange()
4265 repo.dirstate.endparentchange()
4266 return ret
4266 return ret
4267 except: # re-raises
4267 except: # re-raises
4268 # wlock.release() indirectly calls dirstate.write(): since
4268 # wlock.release() indirectly calls dirstate.write(): since
4269 # we're crashing, we do not want to change the working dir
4269 # we're crashing, we do not want to change the working dir
4270 # parent after all, so make sure it writes nothing
4270 # parent after all, so make sure it writes nothing
4271 repo.dirstate.invalidate()
4271 repo.dirstate.invalidate()
4272 raise
4272 raise
4273 finally:
4273 finally:
4274 if tr:
4274 if tr:
4275 tr.release()
4275 tr.release()
4276 release(lock, wlock)
4276 release(lock, wlock)
4277
4277
4278 @command('incoming|in',
4278 @command('incoming|in',
4279 [('f', 'force', None,
4279 [('f', 'force', None,
4280 _('run even if remote repository is unrelated')),
4280 _('run even if remote repository is unrelated')),
4281 ('n', 'newest-first', None, _('show newest record first')),
4281 ('n', 'newest-first', None, _('show newest record first')),
4282 ('', 'bundle', '',
4282 ('', 'bundle', '',
4283 _('file to store the bundles into'), _('FILE')),
4283 _('file to store the bundles into'), _('FILE')),
4284 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4284 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4285 ('B', 'bookmarks', False, _("compare bookmarks")),
4285 ('B', 'bookmarks', False, _("compare bookmarks")),
4286 ('b', 'branch', [],
4286 ('b', 'branch', [],
4287 _('a specific branch you would like to pull'), _('BRANCH')),
4287 _('a specific branch you would like to pull'), _('BRANCH')),
4288 ] + logopts + remoteopts + subrepoopts,
4288 ] + logopts + remoteopts + subrepoopts,
4289 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4289 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4290 def incoming(ui, repo, source="default", **opts):
4290 def incoming(ui, repo, source="default", **opts):
4291 """show new changesets found in source
4291 """show new changesets found in source
4292
4292
4293 Show new changesets found in the specified path/URL or the default
4293 Show new changesets found in the specified path/URL or the default
4294 pull location. These are the changesets that would have been pulled
4294 pull location. These are the changesets that would have been pulled
4295 if a pull at the time you issued this command.
4295 if a pull at the time you issued this command.
4296
4296
4297 See pull for valid source format details.
4297 See pull for valid source format details.
4298
4298
4299 .. container:: verbose
4299 .. container:: verbose
4300
4300
4301 With -B/--bookmarks, the result of bookmark comparison between
4301 With -B/--bookmarks, the result of bookmark comparison between
4302 local and remote repositories is displayed. With -v/--verbose,
4302 local and remote repositories is displayed. With -v/--verbose,
4303 status is also displayed for each bookmark like below::
4303 status is also displayed for each bookmark like below::
4304
4304
4305 BM1 01234567890a added
4305 BM1 01234567890a added
4306 BM2 1234567890ab advanced
4306 BM2 1234567890ab advanced
4307 BM3 234567890abc diverged
4307 BM3 234567890abc diverged
4308 BM4 34567890abcd changed
4308 BM4 34567890abcd changed
4309
4309
4310 The action taken locally when pulling depends on the
4310 The action taken locally when pulling depends on the
4311 status of each bookmark:
4311 status of each bookmark:
4312
4312
4313 :``added``: pull will create it
4313 :``added``: pull will create it
4314 :``advanced``: pull will update it
4314 :``advanced``: pull will update it
4315 :``diverged``: pull will create a divergent bookmark
4315 :``diverged``: pull will create a divergent bookmark
4316 :``changed``: result depends on remote changesets
4316 :``changed``: result depends on remote changesets
4317
4317
4318 From the point of view of pulling behavior, bookmark
4318 From the point of view of pulling behavior, bookmark
4319 existing only in the remote repository are treated as ``added``,
4319 existing only in the remote repository are treated as ``added``,
4320 even if it is in fact locally deleted.
4320 even if it is in fact locally deleted.
4321
4321
4322 .. container:: verbose
4322 .. container:: verbose
4323
4323
4324 For remote repository, using --bundle avoids downloading the
4324 For remote repository, using --bundle avoids downloading the
4325 changesets twice if the incoming is followed by a pull.
4325 changesets twice if the incoming is followed by a pull.
4326
4326
4327 Examples:
4327 Examples:
4328
4328
4329 - show incoming changes with patches and full description::
4329 - show incoming changes with patches and full description::
4330
4330
4331 hg incoming -vp
4331 hg incoming -vp
4332
4332
4333 - show incoming changes excluding merges, store a bundle::
4333 - show incoming changes excluding merges, store a bundle::
4334
4334
4335 hg in -vpM --bundle incoming.hg
4335 hg in -vpM --bundle incoming.hg
4336 hg pull incoming.hg
4336 hg pull incoming.hg
4337
4337
4338 - briefly list changes inside a bundle::
4338 - briefly list changes inside a bundle::
4339
4339
4340 hg in changes.hg -T "{desc|firstline}\\n"
4340 hg in changes.hg -T "{desc|firstline}\\n"
4341
4341
4342 Returns 0 if there are incoming changes, 1 otherwise.
4342 Returns 0 if there are incoming changes, 1 otherwise.
4343 """
4343 """
4344 if opts.get('graph'):
4344 if opts.get('graph'):
4345 cmdutil.checkunsupportedgraphflags([], opts)
4345 cmdutil.checkunsupportedgraphflags([], opts)
4346 def display(other, chlist, displayer):
4346 def display(other, chlist, displayer):
4347 revdag = cmdutil.graphrevs(other, chlist, opts)
4347 revdag = cmdutil.graphrevs(other, chlist, opts)
4348 showparents = [ctx.node() for ctx in repo[None].parents()]
4348 showparents = [ctx.node() for ctx in repo[None].parents()]
4349 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4349 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4350 graphmod.asciiedges)
4350 graphmod.asciiedges)
4351
4351
4352 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4352 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4353 return 0
4353 return 0
4354
4354
4355 if opts.get('bundle') and opts.get('subrepos'):
4355 if opts.get('bundle') and opts.get('subrepos'):
4356 raise util.Abort(_('cannot combine --bundle and --subrepos'))
4356 raise util.Abort(_('cannot combine --bundle and --subrepos'))
4357
4357
4358 if opts.get('bookmarks'):
4358 if opts.get('bookmarks'):
4359 source, branches = hg.parseurl(ui.expandpath(source),
4359 source, branches = hg.parseurl(ui.expandpath(source),
4360 opts.get('branch'))
4360 opts.get('branch'))
4361 other = hg.peer(repo, opts, source)
4361 other = hg.peer(repo, opts, source)
4362 if 'bookmarks' not in other.listkeys('namespaces'):
4362 if 'bookmarks' not in other.listkeys('namespaces'):
4363 ui.warn(_("remote doesn't support bookmarks\n"))
4363 ui.warn(_("remote doesn't support bookmarks\n"))
4364 return 0
4364 return 0
4365 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4365 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4366 return bookmarks.incoming(ui, repo, other)
4366 return bookmarks.incoming(ui, repo, other)
4367
4367
4368 repo._subtoppath = ui.expandpath(source)
4368 repo._subtoppath = ui.expandpath(source)
4369 try:
4369 try:
4370 return hg.incoming(ui, repo, source, opts)
4370 return hg.incoming(ui, repo, source, opts)
4371 finally:
4371 finally:
4372 del repo._subtoppath
4372 del repo._subtoppath
4373
4373
4374
4374
4375 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4375 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4376 norepo=True)
4376 norepo=True)
4377 def init(ui, dest=".", **opts):
4377 def init(ui, dest=".", **opts):
4378 """create a new repository in the given directory
4378 """create a new repository in the given directory
4379
4379
4380 Initialize a new repository in the given directory. If the given
4380 Initialize a new repository in the given directory. If the given
4381 directory does not exist, it will be created.
4381 directory does not exist, it will be created.
4382
4382
4383 If no directory is given, the current directory is used.
4383 If no directory is given, the current directory is used.
4384
4384
4385 It is possible to specify an ``ssh://`` URL as the destination.
4385 It is possible to specify an ``ssh://`` URL as the destination.
4386 See :hg:`help urls` for more information.
4386 See :hg:`help urls` for more information.
4387
4387
4388 Returns 0 on success.
4388 Returns 0 on success.
4389 """
4389 """
4390 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4390 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4391
4391
4392 @command('locate',
4392 @command('locate',
4393 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4393 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4394 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4394 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4395 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4395 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4396 ] + walkopts,
4396 ] + walkopts,
4397 _('[OPTION]... [PATTERN]...'))
4397 _('[OPTION]... [PATTERN]...'))
4398 def locate(ui, repo, *pats, **opts):
4398 def locate(ui, repo, *pats, **opts):
4399 """locate files matching specific patterns (DEPRECATED)
4399 """locate files matching specific patterns (DEPRECATED)
4400
4400
4401 Print files under Mercurial control in the working directory whose
4401 Print files under Mercurial control in the working directory whose
4402 names match the given patterns.
4402 names match the given patterns.
4403
4403
4404 By default, this command searches all directories in the working
4404 By default, this command searches all directories in the working
4405 directory. To search just the current directory and its
4405 directory. To search just the current directory and its
4406 subdirectories, use "--include .".
4406 subdirectories, use "--include .".
4407
4407
4408 If no patterns are given to match, this command prints the names
4408 If no patterns are given to match, this command prints the names
4409 of all files under Mercurial control in the working directory.
4409 of all files under Mercurial control in the working directory.
4410
4410
4411 If you want to feed the output of this command into the "xargs"
4411 If you want to feed the output of this command into the "xargs"
4412 command, use the -0 option to both this command and "xargs". This
4412 command, use the -0 option to both this command and "xargs". This
4413 will avoid the problem of "xargs" treating single filenames that
4413 will avoid the problem of "xargs" treating single filenames that
4414 contain whitespace as multiple filenames.
4414 contain whitespace as multiple filenames.
4415
4415
4416 See :hg:`help files` for a more versatile command.
4416 See :hg:`help files` for a more versatile command.
4417
4417
4418 Returns 0 if a match is found, 1 otherwise.
4418 Returns 0 if a match is found, 1 otherwise.
4419 """
4419 """
4420 if opts.get('print0'):
4420 if opts.get('print0'):
4421 end = '\0'
4421 end = '\0'
4422 else:
4422 else:
4423 end = '\n'
4423 end = '\n'
4424 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4424 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4425
4425
4426 ret = 1
4426 ret = 1
4427 ctx = repo[rev]
4427 ctx = repo[rev]
4428 m = scmutil.match(ctx, pats, opts, default='relglob')
4428 m = scmutil.match(ctx, pats, opts, default='relglob')
4429 m.bad = lambda x, y: False
4429 m.bad = lambda x, y: False
4430
4430
4431 for abs in ctx.matches(m):
4431 for abs in ctx.matches(m):
4432 if opts.get('fullpath'):
4432 if opts.get('fullpath'):
4433 ui.write(repo.wjoin(abs), end)
4433 ui.write(repo.wjoin(abs), end)
4434 else:
4434 else:
4435 ui.write(((pats and m.rel(abs)) or abs), end)
4435 ui.write(((pats and m.rel(abs)) or abs), end)
4436 ret = 0
4436 ret = 0
4437
4437
4438 return ret
4438 return ret
4439
4439
4440 @command('^log|history',
4440 @command('^log|history',
4441 [('f', 'follow', None,
4441 [('f', 'follow', None,
4442 _('follow changeset history, or file history across copies and renames')),
4442 _('follow changeset history, or file history across copies and renames')),
4443 ('', 'follow-first', None,
4443 ('', 'follow-first', None,
4444 _('only follow the first parent of merge changesets (DEPRECATED)')),
4444 _('only follow the first parent of merge changesets (DEPRECATED)')),
4445 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4445 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4446 ('C', 'copies', None, _('show copied files')),
4446 ('C', 'copies', None, _('show copied files')),
4447 ('k', 'keyword', [],
4447 ('k', 'keyword', [],
4448 _('do case-insensitive search for a given text'), _('TEXT')),
4448 _('do case-insensitive search for a given text'), _('TEXT')),
4449 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4449 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4450 ('', 'removed', None, _('include revisions where files were removed')),
4450 ('', 'removed', None, _('include revisions where files were removed')),
4451 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4451 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4452 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4452 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4453 ('', 'only-branch', [],
4453 ('', 'only-branch', [],
4454 _('show only changesets within the given named branch (DEPRECATED)'),
4454 _('show only changesets within the given named branch (DEPRECATED)'),
4455 _('BRANCH')),
4455 _('BRANCH')),
4456 ('b', 'branch', [],
4456 ('b', 'branch', [],
4457 _('show changesets within the given named branch'), _('BRANCH')),
4457 _('show changesets within the given named branch'), _('BRANCH')),
4458 ('P', 'prune', [],
4458 ('P', 'prune', [],
4459 _('do not display revision or any of its ancestors'), _('REV')),
4459 _('do not display revision or any of its ancestors'), _('REV')),
4460 ] + logopts + walkopts,
4460 ] + logopts + walkopts,
4461 _('[OPTION]... [FILE]'),
4461 _('[OPTION]... [FILE]'),
4462 inferrepo=True)
4462 inferrepo=True)
4463 def log(ui, repo, *pats, **opts):
4463 def log(ui, repo, *pats, **opts):
4464 """show revision history of entire repository or files
4464 """show revision history of entire repository or files
4465
4465
4466 Print the revision history of the specified files or the entire
4466 Print the revision history of the specified files or the entire
4467 project.
4467 project.
4468
4468
4469 If no revision range is specified, the default is ``tip:0`` unless
4469 If no revision range is specified, the default is ``tip:0`` unless
4470 --follow is set, in which case the working directory parent is
4470 --follow is set, in which case the working directory parent is
4471 used as the starting revision.
4471 used as the starting revision.
4472
4472
4473 File history is shown without following rename or copy history of
4473 File history is shown without following rename or copy history of
4474 files. Use -f/--follow with a filename to follow history across
4474 files. Use -f/--follow with a filename to follow history across
4475 renames and copies. --follow without a filename will only show
4475 renames and copies. --follow without a filename will only show
4476 ancestors or descendants of the starting revision.
4476 ancestors or descendants of the starting revision.
4477
4477
4478 By default this command prints revision number and changeset id,
4478 By default this command prints revision number and changeset id,
4479 tags, non-trivial parents, user, date and time, and a summary for
4479 tags, non-trivial parents, user, date and time, and a summary for
4480 each commit. When the -v/--verbose switch is used, the list of
4480 each commit. When the -v/--verbose switch is used, the list of
4481 changed files and full commit message are shown.
4481 changed files and full commit message are shown.
4482
4482
4483 With --graph the revisions are shown as an ASCII art DAG with the most
4483 With --graph the revisions are shown as an ASCII art DAG with the most
4484 recent changeset at the top.
4484 recent changeset at the top.
4485 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4485 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4486 and '+' represents a fork where the changeset from the lines below is a
4486 and '+' represents a fork where the changeset from the lines below is a
4487 parent of the 'o' merge on the same line.
4487 parent of the 'o' merge on the same line.
4488
4488
4489 .. note::
4489 .. note::
4490
4490
4491 log -p/--patch may generate unexpected diff output for merge
4491 log -p/--patch may generate unexpected diff output for merge
4492 changesets, as it will only compare the merge changeset against
4492 changesets, as it will only compare the merge changeset against
4493 its first parent. Also, only files different from BOTH parents
4493 its first parent. Also, only files different from BOTH parents
4494 will appear in files:.
4494 will appear in files:.
4495
4495
4496 .. note::
4496 .. note::
4497
4497
4498 for performance reasons, log FILE may omit duplicate changes
4498 for performance reasons, log FILE may omit duplicate changes
4499 made on branches and will not show removals or mode changes. To
4499 made on branches and will not show removals or mode changes. To
4500 see all such changes, use the --removed switch.
4500 see all such changes, use the --removed switch.
4501
4501
4502 .. container:: verbose
4502 .. container:: verbose
4503
4503
4504 Some examples:
4504 Some examples:
4505
4505
4506 - changesets with full descriptions and file lists::
4506 - changesets with full descriptions and file lists::
4507
4507
4508 hg log -v
4508 hg log -v
4509
4509
4510 - changesets ancestral to the working directory::
4510 - changesets ancestral to the working directory::
4511
4511
4512 hg log -f
4512 hg log -f
4513
4513
4514 - last 10 commits on the current branch::
4514 - last 10 commits on the current branch::
4515
4515
4516 hg log -l 10 -b .
4516 hg log -l 10 -b .
4517
4517
4518 - changesets showing all modifications of a file, including removals::
4518 - changesets showing all modifications of a file, including removals::
4519
4519
4520 hg log --removed file.c
4520 hg log --removed file.c
4521
4521
4522 - all changesets that touch a directory, with diffs, excluding merges::
4522 - all changesets that touch a directory, with diffs, excluding merges::
4523
4523
4524 hg log -Mp lib/
4524 hg log -Mp lib/
4525
4525
4526 - all revision numbers that match a keyword::
4526 - all revision numbers that match a keyword::
4527
4527
4528 hg log -k bug --template "{rev}\\n"
4528 hg log -k bug --template "{rev}\\n"
4529
4529
4530 - list available log templates::
4530 - list available log templates::
4531
4531
4532 hg log -T list
4532 hg log -T list
4533
4533
4534 - check if a given changeset is included in a tagged release::
4534 - check if a given changeset is included in a tagged release::
4535
4535
4536 hg log -r "a21ccf and ancestor(1.9)"
4536 hg log -r "a21ccf and ancestor(1.9)"
4537
4537
4538 - find all changesets by some user in a date range::
4538 - find all changesets by some user in a date range::
4539
4539
4540 hg log -k alice -d "may 2008 to jul 2008"
4540 hg log -k alice -d "may 2008 to jul 2008"
4541
4541
4542 - summary of all changesets after the last tag::
4542 - summary of all changesets after the last tag::
4543
4543
4544 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4544 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4545
4545
4546 See :hg:`help dates` for a list of formats valid for -d/--date.
4546 See :hg:`help dates` for a list of formats valid for -d/--date.
4547
4547
4548 See :hg:`help revisions` and :hg:`help revsets` for more about
4548 See :hg:`help revisions` and :hg:`help revsets` for more about
4549 specifying revisions.
4549 specifying revisions.
4550
4550
4551 See :hg:`help templates` for more about pre-packaged styles and
4551 See :hg:`help templates` for more about pre-packaged styles and
4552 specifying custom templates.
4552 specifying custom templates.
4553
4553
4554 Returns 0 on success.
4554 Returns 0 on success.
4555
4555
4556 """
4556 """
4557 if opts.get('follow') and opts.get('rev'):
4557 if opts.get('follow') and opts.get('rev'):
4558 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4558 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4559 del opts['follow']
4559 del opts['follow']
4560
4560
4561 if opts.get('graph'):
4561 if opts.get('graph'):
4562 return cmdutil.graphlog(ui, repo, *pats, **opts)
4562 return cmdutil.graphlog(ui, repo, *pats, **opts)
4563
4563
4564 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4564 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4565 limit = cmdutil.loglimit(opts)
4565 limit = cmdutil.loglimit(opts)
4566 count = 0
4566 count = 0
4567
4567
4568 getrenamed = None
4568 getrenamed = None
4569 if opts.get('copies'):
4569 if opts.get('copies'):
4570 endrev = None
4570 endrev = None
4571 if opts.get('rev'):
4571 if opts.get('rev'):
4572 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4572 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4573 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4573 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4574
4574
4575 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4575 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4576 for rev in revs:
4576 for rev in revs:
4577 if count == limit:
4577 if count == limit:
4578 break
4578 break
4579 ctx = repo[rev]
4579 ctx = repo[rev]
4580 copies = None
4580 copies = None
4581 if getrenamed is not None and rev:
4581 if getrenamed is not None and rev:
4582 copies = []
4582 copies = []
4583 for fn in ctx.files():
4583 for fn in ctx.files():
4584 rename = getrenamed(fn, rev)
4584 rename = getrenamed(fn, rev)
4585 if rename:
4585 if rename:
4586 copies.append((fn, rename[0]))
4586 copies.append((fn, rename[0]))
4587 if filematcher:
4587 if filematcher:
4588 revmatchfn = filematcher(ctx.rev())
4588 revmatchfn = filematcher(ctx.rev())
4589 else:
4589 else:
4590 revmatchfn = None
4590 revmatchfn = None
4591 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4591 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4592 if displayer.flush(rev):
4592 if displayer.flush(rev):
4593 count += 1
4593 count += 1
4594
4594
4595 displayer.close()
4595 displayer.close()
4596
4596
4597 @command('manifest',
4597 @command('manifest',
4598 [('r', 'rev', '', _('revision to display'), _('REV')),
4598 [('r', 'rev', '', _('revision to display'), _('REV')),
4599 ('', 'all', False, _("list files from all revisions"))]
4599 ('', 'all', False, _("list files from all revisions"))]
4600 + formatteropts,
4600 + formatteropts,
4601 _('[-r REV]'))
4601 _('[-r REV]'))
4602 def manifest(ui, repo, node=None, rev=None, **opts):
4602 def manifest(ui, repo, node=None, rev=None, **opts):
4603 """output the current or given revision of the project manifest
4603 """output the current or given revision of the project manifest
4604
4604
4605 Print a list of version controlled files for the given revision.
4605 Print a list of version controlled files for the given revision.
4606 If no revision is given, the first parent of the working directory
4606 If no revision is given, the first parent of the working directory
4607 is used, or the null revision if no revision is checked out.
4607 is used, or the null revision if no revision is checked out.
4608
4608
4609 With -v, print file permissions, symlink and executable bits.
4609 With -v, print file permissions, symlink and executable bits.
4610 With --debug, print file revision hashes.
4610 With --debug, print file revision hashes.
4611
4611
4612 If option --all is specified, the list of all files from all revisions
4612 If option --all is specified, the list of all files from all revisions
4613 is printed. This includes deleted and renamed files.
4613 is printed. This includes deleted and renamed files.
4614
4614
4615 Returns 0 on success.
4615 Returns 0 on success.
4616 """
4616 """
4617
4617
4618 fm = ui.formatter('manifest', opts)
4618 fm = ui.formatter('manifest', opts)
4619
4619
4620 if opts.get('all'):
4620 if opts.get('all'):
4621 if rev or node:
4621 if rev or node:
4622 raise util.Abort(_("can't specify a revision with --all"))
4622 raise util.Abort(_("can't specify a revision with --all"))
4623
4623
4624 res = []
4624 res = []
4625 prefix = "data/"
4625 prefix = "data/"
4626 suffix = ".i"
4626 suffix = ".i"
4627 plen = len(prefix)
4627 plen = len(prefix)
4628 slen = len(suffix)
4628 slen = len(suffix)
4629 lock = repo.lock()
4629 lock = repo.lock()
4630 try:
4630 try:
4631 for fn, b, size in repo.store.datafiles():
4631 for fn, b, size in repo.store.datafiles():
4632 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4632 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4633 res.append(fn[plen:-slen])
4633 res.append(fn[plen:-slen])
4634 finally:
4634 finally:
4635 lock.release()
4635 lock.release()
4636 for f in res:
4636 for f in res:
4637 fm.startitem()
4637 fm.startitem()
4638 fm.write("path", '%s\n', f)
4638 fm.write("path", '%s\n', f)
4639 fm.end()
4639 fm.end()
4640 return
4640 return
4641
4641
4642 if rev and node:
4642 if rev and node:
4643 raise util.Abort(_("please specify just one revision"))
4643 raise util.Abort(_("please specify just one revision"))
4644
4644
4645 if not node:
4645 if not node:
4646 node = rev
4646 node = rev
4647
4647
4648 char = {'l': '@', 'x': '*', '': ''}
4648 char = {'l': '@', 'x': '*', '': ''}
4649 mode = {'l': '644', 'x': '755', '': '644'}
4649 mode = {'l': '644', 'x': '755', '': '644'}
4650 ctx = scmutil.revsingle(repo, node)
4650 ctx = scmutil.revsingle(repo, node)
4651 mf = ctx.manifest()
4651 mf = ctx.manifest()
4652 for f in ctx:
4652 for f in ctx:
4653 fm.startitem()
4653 fm.startitem()
4654 fl = ctx[f].flags()
4654 fl = ctx[f].flags()
4655 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4655 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4656 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4656 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4657 fm.write('path', '%s\n', f)
4657 fm.write('path', '%s\n', f)
4658 fm.end()
4658 fm.end()
4659
4659
4660 @command('^merge',
4660 @command('^merge',
4661 [('f', 'force', None,
4661 [('f', 'force', None,
4662 _('force a merge including outstanding changes (DEPRECATED)')),
4662 _('force a merge including outstanding changes (DEPRECATED)')),
4663 ('r', 'rev', '', _('revision to merge'), _('REV')),
4663 ('r', 'rev', '', _('revision to merge'), _('REV')),
4664 ('P', 'preview', None,
4664 ('P', 'preview', None,
4665 _('review revisions to merge (no merge is performed)'))
4665 _('review revisions to merge (no merge is performed)'))
4666 ] + mergetoolopts,
4666 ] + mergetoolopts,
4667 _('[-P] [-f] [[-r] REV]'))
4667 _('[-P] [-f] [[-r] REV]'))
4668 def merge(ui, repo, node=None, **opts):
4668 def merge(ui, repo, node=None, **opts):
4669 """merge another revision into working directory
4669 """merge another revision into working directory
4670
4670
4671 The current working directory is updated with all changes made in
4671 The current working directory is updated with all changes made in
4672 the requested revision since the last common predecessor revision.
4672 the requested revision since the last common predecessor revision.
4673
4673
4674 Files that changed between either parent are marked as changed for
4674 Files that changed between either parent are marked as changed for
4675 the next commit and a commit must be performed before any further
4675 the next commit and a commit must be performed before any further
4676 updates to the repository are allowed. The next commit will have
4676 updates to the repository are allowed. The next commit will have
4677 two parents.
4677 two parents.
4678
4678
4679 ``--tool`` can be used to specify the merge tool used for file
4679 ``--tool`` can be used to specify the merge tool used for file
4680 merges. It overrides the HGMERGE environment variable and your
4680 merges. It overrides the HGMERGE environment variable and your
4681 configuration files. See :hg:`help merge-tools` for options.
4681 configuration files. See :hg:`help merge-tools` for options.
4682
4682
4683 If no revision is specified, the working directory's parent is a
4683 If no revision is specified, the working directory's parent is a
4684 head revision, and the current branch contains exactly one other
4684 head revision, and the current branch contains exactly one other
4685 head, the other head is merged with by default. Otherwise, an
4685 head, the other head is merged with by default. Otherwise, an
4686 explicit revision with which to merge with must be provided.
4686 explicit revision with which to merge with must be provided.
4687
4687
4688 :hg:`resolve` must be used to resolve unresolved files.
4688 :hg:`resolve` must be used to resolve unresolved files.
4689
4689
4690 To undo an uncommitted merge, use :hg:`update --clean .` which
4690 To undo an uncommitted merge, use :hg:`update --clean .` which
4691 will check out a clean copy of the original merge parent, losing
4691 will check out a clean copy of the original merge parent, losing
4692 all changes.
4692 all changes.
4693
4693
4694 Returns 0 on success, 1 if there are unresolved files.
4694 Returns 0 on success, 1 if there are unresolved files.
4695 """
4695 """
4696
4696
4697 if opts.get('rev') and node:
4697 if opts.get('rev') and node:
4698 raise util.Abort(_("please specify just one revision"))
4698 raise util.Abort(_("please specify just one revision"))
4699 if not node:
4699 if not node:
4700 node = opts.get('rev')
4700 node = opts.get('rev')
4701
4701
4702 if node:
4702 if node:
4703 node = scmutil.revsingle(repo, node).node()
4703 node = scmutil.revsingle(repo, node).node()
4704
4704
4705 if not node and repo._bookmarkcurrent:
4705 if not node and repo._activebookmark:
4706 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4706 bmheads = repo.bookmarkheads(repo._activebookmark)
4707 curhead = repo[repo._bookmarkcurrent].node()
4707 curhead = repo[repo._activebookmark].node()
4708 if len(bmheads) == 2:
4708 if len(bmheads) == 2:
4709 if curhead == bmheads[0]:
4709 if curhead == bmheads[0]:
4710 node = bmheads[1]
4710 node = bmheads[1]
4711 else:
4711 else:
4712 node = bmheads[0]
4712 node = bmheads[0]
4713 elif len(bmheads) > 2:
4713 elif len(bmheads) > 2:
4714 raise util.Abort(_("multiple matching bookmarks to merge - "
4714 raise util.Abort(_("multiple matching bookmarks to merge - "
4715 "please merge with an explicit rev or bookmark"),
4715 "please merge with an explicit rev or bookmark"),
4716 hint=_("run 'hg heads' to see all heads"))
4716 hint=_("run 'hg heads' to see all heads"))
4717 elif len(bmheads) <= 1:
4717 elif len(bmheads) <= 1:
4718 raise util.Abort(_("no matching bookmark to merge - "
4718 raise util.Abort(_("no matching bookmark to merge - "
4719 "please merge with an explicit rev or bookmark"),
4719 "please merge with an explicit rev or bookmark"),
4720 hint=_("run 'hg heads' to see all heads"))
4720 hint=_("run 'hg heads' to see all heads"))
4721
4721
4722 if not node and not repo._bookmarkcurrent:
4722 if not node and not repo._activebookmark:
4723 branch = repo[None].branch()
4723 branch = repo[None].branch()
4724 bheads = repo.branchheads(branch)
4724 bheads = repo.branchheads(branch)
4725 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4725 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4726
4726
4727 if len(nbhs) > 2:
4727 if len(nbhs) > 2:
4728 raise util.Abort(_("branch '%s' has %d heads - "
4728 raise util.Abort(_("branch '%s' has %d heads - "
4729 "please merge with an explicit rev")
4729 "please merge with an explicit rev")
4730 % (branch, len(bheads)),
4730 % (branch, len(bheads)),
4731 hint=_("run 'hg heads .' to see heads"))
4731 hint=_("run 'hg heads .' to see heads"))
4732
4732
4733 parent = repo.dirstate.p1()
4733 parent = repo.dirstate.p1()
4734 if len(nbhs) <= 1:
4734 if len(nbhs) <= 1:
4735 if len(bheads) > 1:
4735 if len(bheads) > 1:
4736 raise util.Abort(_("heads are bookmarked - "
4736 raise util.Abort(_("heads are bookmarked - "
4737 "please merge with an explicit rev"),
4737 "please merge with an explicit rev"),
4738 hint=_("run 'hg heads' to see all heads"))
4738 hint=_("run 'hg heads' to see all heads"))
4739 if len(repo.heads()) > 1:
4739 if len(repo.heads()) > 1:
4740 raise util.Abort(_("branch '%s' has one head - "
4740 raise util.Abort(_("branch '%s' has one head - "
4741 "please merge with an explicit rev")
4741 "please merge with an explicit rev")
4742 % branch,
4742 % branch,
4743 hint=_("run 'hg heads' to see all heads"))
4743 hint=_("run 'hg heads' to see all heads"))
4744 msg, hint = _('nothing to merge'), None
4744 msg, hint = _('nothing to merge'), None
4745 if parent != repo.lookup(branch):
4745 if parent != repo.lookup(branch):
4746 hint = _("use 'hg update' instead")
4746 hint = _("use 'hg update' instead")
4747 raise util.Abort(msg, hint=hint)
4747 raise util.Abort(msg, hint=hint)
4748
4748
4749 if parent not in bheads:
4749 if parent not in bheads:
4750 raise util.Abort(_('working directory not at a head revision'),
4750 raise util.Abort(_('working directory not at a head revision'),
4751 hint=_("use 'hg update' or merge with an "
4751 hint=_("use 'hg update' or merge with an "
4752 "explicit revision"))
4752 "explicit revision"))
4753 if parent == nbhs[0]:
4753 if parent == nbhs[0]:
4754 node = nbhs[-1]
4754 node = nbhs[-1]
4755 else:
4755 else:
4756 node = nbhs[0]
4756 node = nbhs[0]
4757
4757
4758 if opts.get('preview'):
4758 if opts.get('preview'):
4759 # find nodes that are ancestors of p2 but not of p1
4759 # find nodes that are ancestors of p2 but not of p1
4760 p1 = repo.lookup('.')
4760 p1 = repo.lookup('.')
4761 p2 = repo.lookup(node)
4761 p2 = repo.lookup(node)
4762 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4762 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4763
4763
4764 displayer = cmdutil.show_changeset(ui, repo, opts)
4764 displayer = cmdutil.show_changeset(ui, repo, opts)
4765 for node in nodes:
4765 for node in nodes:
4766 displayer.show(repo[node])
4766 displayer.show(repo[node])
4767 displayer.close()
4767 displayer.close()
4768 return 0
4768 return 0
4769
4769
4770 try:
4770 try:
4771 # ui.forcemerge is an internal variable, do not document
4771 # ui.forcemerge is an internal variable, do not document
4772 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4772 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4773 return hg.merge(repo, node, force=opts.get('force'))
4773 return hg.merge(repo, node, force=opts.get('force'))
4774 finally:
4774 finally:
4775 ui.setconfig('ui', 'forcemerge', '', 'merge')
4775 ui.setconfig('ui', 'forcemerge', '', 'merge')
4776
4776
4777 @command('outgoing|out',
4777 @command('outgoing|out',
4778 [('f', 'force', None, _('run even when the destination is unrelated')),
4778 [('f', 'force', None, _('run even when the destination is unrelated')),
4779 ('r', 'rev', [],
4779 ('r', 'rev', [],
4780 _('a changeset intended to be included in the destination'), _('REV')),
4780 _('a changeset intended to be included in the destination'), _('REV')),
4781 ('n', 'newest-first', None, _('show newest record first')),
4781 ('n', 'newest-first', None, _('show newest record first')),
4782 ('B', 'bookmarks', False, _('compare bookmarks')),
4782 ('B', 'bookmarks', False, _('compare bookmarks')),
4783 ('b', 'branch', [], _('a specific branch you would like to push'),
4783 ('b', 'branch', [], _('a specific branch you would like to push'),
4784 _('BRANCH')),
4784 _('BRANCH')),
4785 ] + logopts + remoteopts + subrepoopts,
4785 ] + logopts + remoteopts + subrepoopts,
4786 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4786 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4787 def outgoing(ui, repo, dest=None, **opts):
4787 def outgoing(ui, repo, dest=None, **opts):
4788 """show changesets not found in the destination
4788 """show changesets not found in the destination
4789
4789
4790 Show changesets not found in the specified destination repository
4790 Show changesets not found in the specified destination repository
4791 or the default push location. These are the changesets that would
4791 or the default push location. These are the changesets that would
4792 be pushed if a push was requested.
4792 be pushed if a push was requested.
4793
4793
4794 See pull for details of valid destination formats.
4794 See pull for details of valid destination formats.
4795
4795
4796 .. container:: verbose
4796 .. container:: verbose
4797
4797
4798 With -B/--bookmarks, the result of bookmark comparison between
4798 With -B/--bookmarks, the result of bookmark comparison between
4799 local and remote repositories is displayed. With -v/--verbose,
4799 local and remote repositories is displayed. With -v/--verbose,
4800 status is also displayed for each bookmark like below::
4800 status is also displayed for each bookmark like below::
4801
4801
4802 BM1 01234567890a added
4802 BM1 01234567890a added
4803 BM2 deleted
4803 BM2 deleted
4804 BM3 234567890abc advanced
4804 BM3 234567890abc advanced
4805 BM4 34567890abcd diverged
4805 BM4 34567890abcd diverged
4806 BM5 4567890abcde changed
4806 BM5 4567890abcde changed
4807
4807
4808 The action taken when pushing depends on the
4808 The action taken when pushing depends on the
4809 status of each bookmark:
4809 status of each bookmark:
4810
4810
4811 :``added``: push with ``-B`` will create it
4811 :``added``: push with ``-B`` will create it
4812 :``deleted``: push with ``-B`` will delete it
4812 :``deleted``: push with ``-B`` will delete it
4813 :``advanced``: push will update it
4813 :``advanced``: push will update it
4814 :``diverged``: push with ``-B`` will update it
4814 :``diverged``: push with ``-B`` will update it
4815 :``changed``: push with ``-B`` will update it
4815 :``changed``: push with ``-B`` will update it
4816
4816
4817 From the point of view of pushing behavior, bookmarks
4817 From the point of view of pushing behavior, bookmarks
4818 existing only in the remote repository are treated as
4818 existing only in the remote repository are treated as
4819 ``deleted``, even if it is in fact added remotely.
4819 ``deleted``, even if it is in fact added remotely.
4820
4820
4821 Returns 0 if there are outgoing changes, 1 otherwise.
4821 Returns 0 if there are outgoing changes, 1 otherwise.
4822 """
4822 """
4823 if opts.get('graph'):
4823 if opts.get('graph'):
4824 cmdutil.checkunsupportedgraphflags([], opts)
4824 cmdutil.checkunsupportedgraphflags([], opts)
4825 o, other = hg._outgoing(ui, repo, dest, opts)
4825 o, other = hg._outgoing(ui, repo, dest, opts)
4826 if not o:
4826 if not o:
4827 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4827 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4828 return
4828 return
4829
4829
4830 revdag = cmdutil.graphrevs(repo, o, opts)
4830 revdag = cmdutil.graphrevs(repo, o, opts)
4831 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4831 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4832 showparents = [ctx.node() for ctx in repo[None].parents()]
4832 showparents = [ctx.node() for ctx in repo[None].parents()]
4833 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4833 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4834 graphmod.asciiedges)
4834 graphmod.asciiedges)
4835 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4835 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4836 return 0
4836 return 0
4837
4837
4838 if opts.get('bookmarks'):
4838 if opts.get('bookmarks'):
4839 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4839 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4840 dest, branches = hg.parseurl(dest, opts.get('branch'))
4840 dest, branches = hg.parseurl(dest, opts.get('branch'))
4841 other = hg.peer(repo, opts, dest)
4841 other = hg.peer(repo, opts, dest)
4842 if 'bookmarks' not in other.listkeys('namespaces'):
4842 if 'bookmarks' not in other.listkeys('namespaces'):
4843 ui.warn(_("remote doesn't support bookmarks\n"))
4843 ui.warn(_("remote doesn't support bookmarks\n"))
4844 return 0
4844 return 0
4845 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4845 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4846 return bookmarks.outgoing(ui, repo, other)
4846 return bookmarks.outgoing(ui, repo, other)
4847
4847
4848 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4848 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4849 try:
4849 try:
4850 return hg.outgoing(ui, repo, dest, opts)
4850 return hg.outgoing(ui, repo, dest, opts)
4851 finally:
4851 finally:
4852 del repo._subtoppath
4852 del repo._subtoppath
4853
4853
4854 @command('parents',
4854 @command('parents',
4855 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4855 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4856 ] + templateopts,
4856 ] + templateopts,
4857 _('[-r REV] [FILE]'),
4857 _('[-r REV] [FILE]'),
4858 inferrepo=True)
4858 inferrepo=True)
4859 def parents(ui, repo, file_=None, **opts):
4859 def parents(ui, repo, file_=None, **opts):
4860 """show the parents of the working directory or revision (DEPRECATED)
4860 """show the parents of the working directory or revision (DEPRECATED)
4861
4861
4862 Print the working directory's parent revisions. If a revision is
4862 Print the working directory's parent revisions. If a revision is
4863 given via -r/--rev, the parent of that revision will be printed.
4863 given via -r/--rev, the parent of that revision will be printed.
4864 If a file argument is given, the revision in which the file was
4864 If a file argument is given, the revision in which the file was
4865 last changed (before the working directory revision or the
4865 last changed (before the working directory revision or the
4866 argument to --rev if given) is printed.
4866 argument to --rev if given) is printed.
4867
4867
4868 See :hg:`summary` and :hg:`help revsets` for related information.
4868 See :hg:`summary` and :hg:`help revsets` for related information.
4869
4869
4870 Returns 0 on success.
4870 Returns 0 on success.
4871 """
4871 """
4872
4872
4873 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4873 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4874
4874
4875 if file_:
4875 if file_:
4876 m = scmutil.match(ctx, (file_,), opts)
4876 m = scmutil.match(ctx, (file_,), opts)
4877 if m.anypats() or len(m.files()) != 1:
4877 if m.anypats() or len(m.files()) != 1:
4878 raise util.Abort(_('can only specify an explicit filename'))
4878 raise util.Abort(_('can only specify an explicit filename'))
4879 file_ = m.files()[0]
4879 file_ = m.files()[0]
4880 filenodes = []
4880 filenodes = []
4881 for cp in ctx.parents():
4881 for cp in ctx.parents():
4882 if not cp:
4882 if not cp:
4883 continue
4883 continue
4884 try:
4884 try:
4885 filenodes.append(cp.filenode(file_))
4885 filenodes.append(cp.filenode(file_))
4886 except error.LookupError:
4886 except error.LookupError:
4887 pass
4887 pass
4888 if not filenodes:
4888 if not filenodes:
4889 raise util.Abort(_("'%s' not found in manifest!") % file_)
4889 raise util.Abort(_("'%s' not found in manifest!") % file_)
4890 p = []
4890 p = []
4891 for fn in filenodes:
4891 for fn in filenodes:
4892 fctx = repo.filectx(file_, fileid=fn)
4892 fctx = repo.filectx(file_, fileid=fn)
4893 p.append(fctx.node())
4893 p.append(fctx.node())
4894 else:
4894 else:
4895 p = [cp.node() for cp in ctx.parents()]
4895 p = [cp.node() for cp in ctx.parents()]
4896
4896
4897 displayer = cmdutil.show_changeset(ui, repo, opts)
4897 displayer = cmdutil.show_changeset(ui, repo, opts)
4898 for n in p:
4898 for n in p:
4899 if n != nullid:
4899 if n != nullid:
4900 displayer.show(repo[n])
4900 displayer.show(repo[n])
4901 displayer.close()
4901 displayer.close()
4902
4902
4903 @command('paths', [], _('[NAME]'), optionalrepo=True)
4903 @command('paths', [], _('[NAME]'), optionalrepo=True)
4904 def paths(ui, repo, search=None):
4904 def paths(ui, repo, search=None):
4905 """show aliases for remote repositories
4905 """show aliases for remote repositories
4906
4906
4907 Show definition of symbolic path name NAME. If no name is given,
4907 Show definition of symbolic path name NAME. If no name is given,
4908 show definition of all available names.
4908 show definition of all available names.
4909
4909
4910 Option -q/--quiet suppresses all output when searching for NAME
4910 Option -q/--quiet suppresses all output when searching for NAME
4911 and shows only the path names when listing all definitions.
4911 and shows only the path names when listing all definitions.
4912
4912
4913 Path names are defined in the [paths] section of your
4913 Path names are defined in the [paths] section of your
4914 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4914 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4915 repository, ``.hg/hgrc`` is used, too.
4915 repository, ``.hg/hgrc`` is used, too.
4916
4916
4917 The path names ``default`` and ``default-push`` have a special
4917 The path names ``default`` and ``default-push`` have a special
4918 meaning. When performing a push or pull operation, they are used
4918 meaning. When performing a push or pull operation, they are used
4919 as fallbacks if no location is specified on the command-line.
4919 as fallbacks if no location is specified on the command-line.
4920 When ``default-push`` is set, it will be used for push and
4920 When ``default-push`` is set, it will be used for push and
4921 ``default`` will be used for pull; otherwise ``default`` is used
4921 ``default`` will be used for pull; otherwise ``default`` is used
4922 as the fallback for both. When cloning a repository, the clone
4922 as the fallback for both. When cloning a repository, the clone
4923 source is written as ``default`` in ``.hg/hgrc``. Note that
4923 source is written as ``default`` in ``.hg/hgrc``. Note that
4924 ``default`` and ``default-push`` apply to all inbound (e.g.
4924 ``default`` and ``default-push`` apply to all inbound (e.g.
4925 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4925 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4926 :hg:`bundle`) operations.
4926 :hg:`bundle`) operations.
4927
4927
4928 See :hg:`help urls` for more information.
4928 See :hg:`help urls` for more information.
4929
4929
4930 Returns 0 on success.
4930 Returns 0 on success.
4931 """
4931 """
4932 if search:
4932 if search:
4933 for name, path in sorted(ui.paths.iteritems()):
4933 for name, path in sorted(ui.paths.iteritems()):
4934 if name == search:
4934 if name == search:
4935 ui.status("%s\n" % util.hidepassword(path.loc))
4935 ui.status("%s\n" % util.hidepassword(path.loc))
4936 return
4936 return
4937 if not ui.quiet:
4937 if not ui.quiet:
4938 ui.warn(_("not found!\n"))
4938 ui.warn(_("not found!\n"))
4939 return 1
4939 return 1
4940 else:
4940 else:
4941 for name, path in sorted(ui.paths.iteritems()):
4941 for name, path in sorted(ui.paths.iteritems()):
4942 if ui.quiet:
4942 if ui.quiet:
4943 ui.write("%s\n" % name)
4943 ui.write("%s\n" % name)
4944 else:
4944 else:
4945 ui.write("%s = %s\n" % (name,
4945 ui.write("%s = %s\n" % (name,
4946 util.hidepassword(path.loc)))
4946 util.hidepassword(path.loc)))
4947
4947
4948 @command('phase',
4948 @command('phase',
4949 [('p', 'public', False, _('set changeset phase to public')),
4949 [('p', 'public', False, _('set changeset phase to public')),
4950 ('d', 'draft', False, _('set changeset phase to draft')),
4950 ('d', 'draft', False, _('set changeset phase to draft')),
4951 ('s', 'secret', False, _('set changeset phase to secret')),
4951 ('s', 'secret', False, _('set changeset phase to secret')),
4952 ('f', 'force', False, _('allow to move boundary backward')),
4952 ('f', 'force', False, _('allow to move boundary backward')),
4953 ('r', 'rev', [], _('target revision'), _('REV')),
4953 ('r', 'rev', [], _('target revision'), _('REV')),
4954 ],
4954 ],
4955 _('[-p|-d|-s] [-f] [-r] REV...'))
4955 _('[-p|-d|-s] [-f] [-r] REV...'))
4956 def phase(ui, repo, *revs, **opts):
4956 def phase(ui, repo, *revs, **opts):
4957 """set or show the current phase name
4957 """set or show the current phase name
4958
4958
4959 With no argument, show the phase name of specified revisions.
4959 With no argument, show the phase name of specified revisions.
4960
4960
4961 With one of -p/--public, -d/--draft or -s/--secret, change the
4961 With one of -p/--public, -d/--draft or -s/--secret, change the
4962 phase value of the specified revisions.
4962 phase value of the specified revisions.
4963
4963
4964 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4964 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4965 lower phase to an higher phase. Phases are ordered as follows::
4965 lower phase to an higher phase. Phases are ordered as follows::
4966
4966
4967 public < draft < secret
4967 public < draft < secret
4968
4968
4969 Returns 0 on success, 1 if no phases were changed or some could not
4969 Returns 0 on success, 1 if no phases were changed or some could not
4970 be changed.
4970 be changed.
4971 """
4971 """
4972 # search for a unique phase argument
4972 # search for a unique phase argument
4973 targetphase = None
4973 targetphase = None
4974 for idx, name in enumerate(phases.phasenames):
4974 for idx, name in enumerate(phases.phasenames):
4975 if opts[name]:
4975 if opts[name]:
4976 if targetphase is not None:
4976 if targetphase is not None:
4977 raise util.Abort(_('only one phase can be specified'))
4977 raise util.Abort(_('only one phase can be specified'))
4978 targetphase = idx
4978 targetphase = idx
4979
4979
4980 # look for specified revision
4980 # look for specified revision
4981 revs = list(revs)
4981 revs = list(revs)
4982 revs.extend(opts['rev'])
4982 revs.extend(opts['rev'])
4983 if not revs:
4983 if not revs:
4984 raise util.Abort(_('no revisions specified'))
4984 raise util.Abort(_('no revisions specified'))
4985
4985
4986 revs = scmutil.revrange(repo, revs)
4986 revs = scmutil.revrange(repo, revs)
4987
4987
4988 lock = None
4988 lock = None
4989 ret = 0
4989 ret = 0
4990 if targetphase is None:
4990 if targetphase is None:
4991 # display
4991 # display
4992 for r in revs:
4992 for r in revs:
4993 ctx = repo[r]
4993 ctx = repo[r]
4994 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4994 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4995 else:
4995 else:
4996 tr = None
4996 tr = None
4997 lock = repo.lock()
4997 lock = repo.lock()
4998 try:
4998 try:
4999 tr = repo.transaction("phase")
4999 tr = repo.transaction("phase")
5000 # set phase
5000 # set phase
5001 if not revs:
5001 if not revs:
5002 raise util.Abort(_('empty revision set'))
5002 raise util.Abort(_('empty revision set'))
5003 nodes = [repo[r].node() for r in revs]
5003 nodes = [repo[r].node() for r in revs]
5004 # moving revision from public to draft may hide them
5004 # moving revision from public to draft may hide them
5005 # We have to check result on an unfiltered repository
5005 # We have to check result on an unfiltered repository
5006 unfi = repo.unfiltered()
5006 unfi = repo.unfiltered()
5007 getphase = unfi._phasecache.phase
5007 getphase = unfi._phasecache.phase
5008 olddata = [getphase(unfi, r) for r in unfi]
5008 olddata = [getphase(unfi, r) for r in unfi]
5009 phases.advanceboundary(repo, tr, targetphase, nodes)
5009 phases.advanceboundary(repo, tr, targetphase, nodes)
5010 if opts['force']:
5010 if opts['force']:
5011 phases.retractboundary(repo, tr, targetphase, nodes)
5011 phases.retractboundary(repo, tr, targetphase, nodes)
5012 tr.close()
5012 tr.close()
5013 finally:
5013 finally:
5014 if tr is not None:
5014 if tr is not None:
5015 tr.release()
5015 tr.release()
5016 lock.release()
5016 lock.release()
5017 getphase = unfi._phasecache.phase
5017 getphase = unfi._phasecache.phase
5018 newdata = [getphase(unfi, r) for r in unfi]
5018 newdata = [getphase(unfi, r) for r in unfi]
5019 changes = sum(newdata[r] != olddata[r] for r in unfi)
5019 changes = sum(newdata[r] != olddata[r] for r in unfi)
5020 cl = unfi.changelog
5020 cl = unfi.changelog
5021 rejected = [n for n in nodes
5021 rejected = [n for n in nodes
5022 if newdata[cl.rev(n)] < targetphase]
5022 if newdata[cl.rev(n)] < targetphase]
5023 if rejected:
5023 if rejected:
5024 ui.warn(_('cannot move %i changesets to a higher '
5024 ui.warn(_('cannot move %i changesets to a higher '
5025 'phase, use --force\n') % len(rejected))
5025 'phase, use --force\n') % len(rejected))
5026 ret = 1
5026 ret = 1
5027 if changes:
5027 if changes:
5028 msg = _('phase changed for %i changesets\n') % changes
5028 msg = _('phase changed for %i changesets\n') % changes
5029 if ret:
5029 if ret:
5030 ui.status(msg)
5030 ui.status(msg)
5031 else:
5031 else:
5032 ui.note(msg)
5032 ui.note(msg)
5033 else:
5033 else:
5034 ui.warn(_('no phases changed\n'))
5034 ui.warn(_('no phases changed\n'))
5035 ret = 1
5035 ret = 1
5036 return ret
5036 return ret
5037
5037
5038 def postincoming(ui, repo, modheads, optupdate, checkout):
5038 def postincoming(ui, repo, modheads, optupdate, checkout):
5039 if modheads == 0:
5039 if modheads == 0:
5040 return
5040 return
5041 if optupdate:
5041 if optupdate:
5042 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
5042 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
5043 try:
5043 try:
5044 ret = hg.update(repo, checkout)
5044 ret = hg.update(repo, checkout)
5045 except util.Abort, inst:
5045 except util.Abort, inst:
5046 ui.warn(_("not updating: %s\n") % str(inst))
5046 ui.warn(_("not updating: %s\n") % str(inst))
5047 if inst.hint:
5047 if inst.hint:
5048 ui.warn(_("(%s)\n") % inst.hint)
5048 ui.warn(_("(%s)\n") % inst.hint)
5049 return 0
5049 return 0
5050 if not ret and not checkout:
5050 if not ret and not checkout:
5051 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5051 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5052 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5052 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
5053 return ret
5053 return ret
5054 if modheads > 1:
5054 if modheads > 1:
5055 currentbranchheads = len(repo.branchheads())
5055 currentbranchheads = len(repo.branchheads())
5056 if currentbranchheads == modheads:
5056 if currentbranchheads == modheads:
5057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5058 elif currentbranchheads > 1:
5058 elif currentbranchheads > 1:
5059 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5059 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5060 "merge)\n"))
5060 "merge)\n"))
5061 else:
5061 else:
5062 ui.status(_("(run 'hg heads' to see heads)\n"))
5062 ui.status(_("(run 'hg heads' to see heads)\n"))
5063 else:
5063 else:
5064 ui.status(_("(run 'hg update' to get a working copy)\n"))
5064 ui.status(_("(run 'hg update' to get a working copy)\n"))
5065
5065
5066 @command('^pull',
5066 @command('^pull',
5067 [('u', 'update', None,
5067 [('u', 'update', None,
5068 _('update to new branch head if changesets were pulled')),
5068 _('update to new branch head if changesets were pulled')),
5069 ('f', 'force', None, _('run even when remote repository is unrelated')),
5069 ('f', 'force', None, _('run even when remote repository is unrelated')),
5070 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5070 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5071 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5071 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5072 ('b', 'branch', [], _('a specific branch you would like to pull'),
5072 ('b', 'branch', [], _('a specific branch you would like to pull'),
5073 _('BRANCH')),
5073 _('BRANCH')),
5074 ] + remoteopts,
5074 ] + remoteopts,
5075 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5075 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5076 def pull(ui, repo, source="default", **opts):
5076 def pull(ui, repo, source="default", **opts):
5077 """pull changes from the specified source
5077 """pull changes from the specified source
5078
5078
5079 Pull changes from a remote repository to a local one.
5079 Pull changes from a remote repository to a local one.
5080
5080
5081 This finds all changes from the repository at the specified path
5081 This finds all changes from the repository at the specified path
5082 or URL and adds them to a local repository (the current one unless
5082 or URL and adds them to a local repository (the current one unless
5083 -R is specified). By default, this does not update the copy of the
5083 -R is specified). By default, this does not update the copy of the
5084 project in the working directory.
5084 project in the working directory.
5085
5085
5086 Use :hg:`incoming` if you want to see what would have been added
5086 Use :hg:`incoming` if you want to see what would have been added
5087 by a pull at the time you issued this command. If you then decide
5087 by a pull at the time you issued this command. If you then decide
5088 to add those changes to the repository, you should use :hg:`pull
5088 to add those changes to the repository, you should use :hg:`pull
5089 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5089 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5090
5090
5091 If SOURCE is omitted, the 'default' path will be used.
5091 If SOURCE is omitted, the 'default' path will be used.
5092 See :hg:`help urls` for more information.
5092 See :hg:`help urls` for more information.
5093
5093
5094 Returns 0 on success, 1 if an update had unresolved files.
5094 Returns 0 on success, 1 if an update had unresolved files.
5095 """
5095 """
5096 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5096 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5097 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5097 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5098 other = hg.peer(repo, opts, source)
5098 other = hg.peer(repo, opts, source)
5099 try:
5099 try:
5100 revs, checkout = hg.addbranchrevs(repo, other, branches,
5100 revs, checkout = hg.addbranchrevs(repo, other, branches,
5101 opts.get('rev'))
5101 opts.get('rev'))
5102
5102
5103 remotebookmarks = other.listkeys('bookmarks')
5103 remotebookmarks = other.listkeys('bookmarks')
5104
5104
5105 if opts.get('bookmark'):
5105 if opts.get('bookmark'):
5106 if not revs:
5106 if not revs:
5107 revs = []
5107 revs = []
5108 for b in opts['bookmark']:
5108 for b in opts['bookmark']:
5109 if b not in remotebookmarks:
5109 if b not in remotebookmarks:
5110 raise util.Abort(_('remote bookmark %s not found!') % b)
5110 raise util.Abort(_('remote bookmark %s not found!') % b)
5111 revs.append(remotebookmarks[b])
5111 revs.append(remotebookmarks[b])
5112
5112
5113 if revs:
5113 if revs:
5114 try:
5114 try:
5115 revs = [other.lookup(rev) for rev in revs]
5115 revs = [other.lookup(rev) for rev in revs]
5116 except error.CapabilityError:
5116 except error.CapabilityError:
5117 err = _("other repository doesn't support revision lookup, "
5117 err = _("other repository doesn't support revision lookup, "
5118 "so a rev cannot be specified.")
5118 "so a rev cannot be specified.")
5119 raise util.Abort(err)
5119 raise util.Abort(err)
5120
5120
5121 modheads = exchange.pull(repo, other, heads=revs,
5121 modheads = exchange.pull(repo, other, heads=revs,
5122 force=opts.get('force'),
5122 force=opts.get('force'),
5123 bookmarks=opts.get('bookmark', ())).cgresult
5123 bookmarks=opts.get('bookmark', ())).cgresult
5124 if checkout:
5124 if checkout:
5125 checkout = str(repo.changelog.rev(other.lookup(checkout)))
5125 checkout = str(repo.changelog.rev(other.lookup(checkout)))
5126 repo._subtoppath = source
5126 repo._subtoppath = source
5127 try:
5127 try:
5128 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5128 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5129
5129
5130 finally:
5130 finally:
5131 del repo._subtoppath
5131 del repo._subtoppath
5132
5132
5133 finally:
5133 finally:
5134 other.close()
5134 other.close()
5135 return ret
5135 return ret
5136
5136
5137 @command('^push',
5137 @command('^push',
5138 [('f', 'force', None, _('force push')),
5138 [('f', 'force', None, _('force push')),
5139 ('r', 'rev', [],
5139 ('r', 'rev', [],
5140 _('a changeset intended to be included in the destination'),
5140 _('a changeset intended to be included in the destination'),
5141 _('REV')),
5141 _('REV')),
5142 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5142 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5143 ('b', 'branch', [],
5143 ('b', 'branch', [],
5144 _('a specific branch you would like to push'), _('BRANCH')),
5144 _('a specific branch you would like to push'), _('BRANCH')),
5145 ('', 'new-branch', False, _('allow pushing a new branch')),
5145 ('', 'new-branch', False, _('allow pushing a new branch')),
5146 ] + remoteopts,
5146 ] + remoteopts,
5147 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5147 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5148 def push(ui, repo, dest=None, **opts):
5148 def push(ui, repo, dest=None, **opts):
5149 """push changes to the specified destination
5149 """push changes to the specified destination
5150
5150
5151 Push changesets from the local repository to the specified
5151 Push changesets from the local repository to the specified
5152 destination.
5152 destination.
5153
5153
5154 This operation is symmetrical to pull: it is identical to a pull
5154 This operation is symmetrical to pull: it is identical to a pull
5155 in the destination repository from the current one.
5155 in the destination repository from the current one.
5156
5156
5157 By default, push will not allow creation of new heads at the
5157 By default, push will not allow creation of new heads at the
5158 destination, since multiple heads would make it unclear which head
5158 destination, since multiple heads would make it unclear which head
5159 to use. In this situation, it is recommended to pull and merge
5159 to use. In this situation, it is recommended to pull and merge
5160 before pushing.
5160 before pushing.
5161
5161
5162 Use --new-branch if you want to allow push to create a new named
5162 Use --new-branch if you want to allow push to create a new named
5163 branch that is not present at the destination. This allows you to
5163 branch that is not present at the destination. This allows you to
5164 only create a new branch without forcing other changes.
5164 only create a new branch without forcing other changes.
5165
5165
5166 .. note::
5166 .. note::
5167
5167
5168 Extra care should be taken with the -f/--force option,
5168 Extra care should be taken with the -f/--force option,
5169 which will push all new heads on all branches, an action which will
5169 which will push all new heads on all branches, an action which will
5170 almost always cause confusion for collaborators.
5170 almost always cause confusion for collaborators.
5171
5171
5172 If -r/--rev is used, the specified revision and all its ancestors
5172 If -r/--rev is used, the specified revision and all its ancestors
5173 will be pushed to the remote repository.
5173 will be pushed to the remote repository.
5174
5174
5175 If -B/--bookmark is used, the specified bookmarked revision, its
5175 If -B/--bookmark is used, the specified bookmarked revision, its
5176 ancestors, and the bookmark will be pushed to the remote
5176 ancestors, and the bookmark will be pushed to the remote
5177 repository.
5177 repository.
5178
5178
5179 Please see :hg:`help urls` for important details about ``ssh://``
5179 Please see :hg:`help urls` for important details about ``ssh://``
5180 URLs. If DESTINATION is omitted, a default path will be used.
5180 URLs. If DESTINATION is omitted, a default path will be used.
5181
5181
5182 Returns 0 if push was successful, 1 if nothing to push.
5182 Returns 0 if push was successful, 1 if nothing to push.
5183 """
5183 """
5184
5184
5185 if opts.get('bookmark'):
5185 if opts.get('bookmark'):
5186 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5186 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5187 for b in opts['bookmark']:
5187 for b in opts['bookmark']:
5188 # translate -B options to -r so changesets get pushed
5188 # translate -B options to -r so changesets get pushed
5189 if b in repo._bookmarks:
5189 if b in repo._bookmarks:
5190 opts.setdefault('rev', []).append(b)
5190 opts.setdefault('rev', []).append(b)
5191 else:
5191 else:
5192 # if we try to push a deleted bookmark, translate it to null
5192 # if we try to push a deleted bookmark, translate it to null
5193 # this lets simultaneous -r, -b options continue working
5193 # this lets simultaneous -r, -b options continue working
5194 opts.setdefault('rev', []).append("null")
5194 opts.setdefault('rev', []).append("null")
5195
5195
5196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5197 dest, branches = hg.parseurl(dest, opts.get('branch'))
5197 dest, branches = hg.parseurl(dest, opts.get('branch'))
5198 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5198 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5199 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5199 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5200 try:
5200 try:
5201 other = hg.peer(repo, opts, dest)
5201 other = hg.peer(repo, opts, dest)
5202 except error.RepoError:
5202 except error.RepoError:
5203 if dest == "default-push":
5203 if dest == "default-push":
5204 raise util.Abort(_("default repository not configured!"),
5204 raise util.Abort(_("default repository not configured!"),
5205 hint=_('see the "path" section in "hg help config"'))
5205 hint=_('see the "path" section in "hg help config"'))
5206 else:
5206 else:
5207 raise
5207 raise
5208
5208
5209 if revs:
5209 if revs:
5210 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5210 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5211 if not revs:
5211 if not revs:
5212 raise util.Abort(_("specified revisions evaluate to an empty set"),
5212 raise util.Abort(_("specified revisions evaluate to an empty set"),
5213 hint=_("use different revision arguments"))
5213 hint=_("use different revision arguments"))
5214
5214
5215 repo._subtoppath = dest
5215 repo._subtoppath = dest
5216 try:
5216 try:
5217 # push subrepos depth-first for coherent ordering
5217 # push subrepos depth-first for coherent ordering
5218 c = repo['']
5218 c = repo['']
5219 subs = c.substate # only repos that are committed
5219 subs = c.substate # only repos that are committed
5220 for s in sorted(subs):
5220 for s in sorted(subs):
5221 result = c.sub(s).push(opts)
5221 result = c.sub(s).push(opts)
5222 if result == 0:
5222 if result == 0:
5223 return not result
5223 return not result
5224 finally:
5224 finally:
5225 del repo._subtoppath
5225 del repo._subtoppath
5226 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5226 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5227 newbranch=opts.get('new_branch'),
5227 newbranch=opts.get('new_branch'),
5228 bookmarks=opts.get('bookmark', ()))
5228 bookmarks=opts.get('bookmark', ()))
5229
5229
5230 result = not pushop.cgresult
5230 result = not pushop.cgresult
5231
5231
5232 if pushop.bkresult is not None:
5232 if pushop.bkresult is not None:
5233 if pushop.bkresult == 2:
5233 if pushop.bkresult == 2:
5234 result = 2
5234 result = 2
5235 elif not result and pushop.bkresult:
5235 elif not result and pushop.bkresult:
5236 result = 2
5236 result = 2
5237
5237
5238 return result
5238 return result
5239
5239
5240 @command('recover', [])
5240 @command('recover', [])
5241 def recover(ui, repo):
5241 def recover(ui, repo):
5242 """roll back an interrupted transaction
5242 """roll back an interrupted transaction
5243
5243
5244 Recover from an interrupted commit or pull.
5244 Recover from an interrupted commit or pull.
5245
5245
5246 This command tries to fix the repository status after an
5246 This command tries to fix the repository status after an
5247 interrupted operation. It should only be necessary when Mercurial
5247 interrupted operation. It should only be necessary when Mercurial
5248 suggests it.
5248 suggests it.
5249
5249
5250 Returns 0 if successful, 1 if nothing to recover or verify fails.
5250 Returns 0 if successful, 1 if nothing to recover or verify fails.
5251 """
5251 """
5252 if repo.recover():
5252 if repo.recover():
5253 return hg.verify(repo)
5253 return hg.verify(repo)
5254 return 1
5254 return 1
5255
5255
5256 @command('^remove|rm',
5256 @command('^remove|rm',
5257 [('A', 'after', None, _('record delete for missing files')),
5257 [('A', 'after', None, _('record delete for missing files')),
5258 ('f', 'force', None,
5258 ('f', 'force', None,
5259 _('remove (and delete) file even if added or modified')),
5259 _('remove (and delete) file even if added or modified')),
5260 ] + subrepoopts + walkopts,
5260 ] + subrepoopts + walkopts,
5261 _('[OPTION]... FILE...'),
5261 _('[OPTION]... FILE...'),
5262 inferrepo=True)
5262 inferrepo=True)
5263 def remove(ui, repo, *pats, **opts):
5263 def remove(ui, repo, *pats, **opts):
5264 """remove the specified files on the next commit
5264 """remove the specified files on the next commit
5265
5265
5266 Schedule the indicated files for removal from the current branch.
5266 Schedule the indicated files for removal from the current branch.
5267
5267
5268 This command schedules the files to be removed at the next commit.
5268 This command schedules the files to be removed at the next commit.
5269 To undo a remove before that, see :hg:`revert`. To undo added
5269 To undo a remove before that, see :hg:`revert`. To undo added
5270 files, see :hg:`forget`.
5270 files, see :hg:`forget`.
5271
5271
5272 .. container:: verbose
5272 .. container:: verbose
5273
5273
5274 -A/--after can be used to remove only files that have already
5274 -A/--after can be used to remove only files that have already
5275 been deleted, -f/--force can be used to force deletion, and -Af
5275 been deleted, -f/--force can be used to force deletion, and -Af
5276 can be used to remove files from the next revision without
5276 can be used to remove files from the next revision without
5277 deleting them from the working directory.
5277 deleting them from the working directory.
5278
5278
5279 The following table details the behavior of remove for different
5279 The following table details the behavior of remove for different
5280 file states (columns) and option combinations (rows). The file
5280 file states (columns) and option combinations (rows). The file
5281 states are Added [A], Clean [C], Modified [M] and Missing [!]
5281 states are Added [A], Clean [C], Modified [M] and Missing [!]
5282 (as reported by :hg:`status`). The actions are Warn, Remove
5282 (as reported by :hg:`status`). The actions are Warn, Remove
5283 (from branch) and Delete (from disk):
5283 (from branch) and Delete (from disk):
5284
5284
5285 ========= == == == ==
5285 ========= == == == ==
5286 opt/state A C M !
5286 opt/state A C M !
5287 ========= == == == ==
5287 ========= == == == ==
5288 none W RD W R
5288 none W RD W R
5289 -f R RD RD R
5289 -f R RD RD R
5290 -A W W W R
5290 -A W W W R
5291 -Af R R R R
5291 -Af R R R R
5292 ========= == == == ==
5292 ========= == == == ==
5293
5293
5294 Note that remove never deletes files in Added [A] state from the
5294 Note that remove never deletes files in Added [A] state from the
5295 working directory, not even if option --force is specified.
5295 working directory, not even if option --force is specified.
5296
5296
5297 Returns 0 on success, 1 if any warnings encountered.
5297 Returns 0 on success, 1 if any warnings encountered.
5298 """
5298 """
5299
5299
5300 after, force = opts.get('after'), opts.get('force')
5300 after, force = opts.get('after'), opts.get('force')
5301 if not pats and not after:
5301 if not pats and not after:
5302 raise util.Abort(_('no files specified'))
5302 raise util.Abort(_('no files specified'))
5303
5303
5304 m = scmutil.match(repo[None], pats, opts)
5304 m = scmutil.match(repo[None], pats, opts)
5305 subrepos = opts.get('subrepos')
5305 subrepos = opts.get('subrepos')
5306 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5306 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5307
5307
5308 @command('rename|move|mv',
5308 @command('rename|move|mv',
5309 [('A', 'after', None, _('record a rename that has already occurred')),
5309 [('A', 'after', None, _('record a rename that has already occurred')),
5310 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5310 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5311 ] + walkopts + dryrunopts,
5311 ] + walkopts + dryrunopts,
5312 _('[OPTION]... SOURCE... DEST'))
5312 _('[OPTION]... SOURCE... DEST'))
5313 def rename(ui, repo, *pats, **opts):
5313 def rename(ui, repo, *pats, **opts):
5314 """rename files; equivalent of copy + remove
5314 """rename files; equivalent of copy + remove
5315
5315
5316 Mark dest as copies of sources; mark sources for deletion. If dest
5316 Mark dest as copies of sources; mark sources for deletion. If dest
5317 is a directory, copies are put in that directory. If dest is a
5317 is a directory, copies are put in that directory. If dest is a
5318 file, there can only be one source.
5318 file, there can only be one source.
5319
5319
5320 By default, this command copies the contents of files as they
5320 By default, this command copies the contents of files as they
5321 exist in the working directory. If invoked with -A/--after, the
5321 exist in the working directory. If invoked with -A/--after, the
5322 operation is recorded, but no copying is performed.
5322 operation is recorded, but no copying is performed.
5323
5323
5324 This command takes effect at the next commit. To undo a rename
5324 This command takes effect at the next commit. To undo a rename
5325 before that, see :hg:`revert`.
5325 before that, see :hg:`revert`.
5326
5326
5327 Returns 0 on success, 1 if errors are encountered.
5327 Returns 0 on success, 1 if errors are encountered.
5328 """
5328 """
5329 wlock = repo.wlock(False)
5329 wlock = repo.wlock(False)
5330 try:
5330 try:
5331 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5331 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5332 finally:
5332 finally:
5333 wlock.release()
5333 wlock.release()
5334
5334
5335 @command('resolve',
5335 @command('resolve',
5336 [('a', 'all', None, _('select all unresolved files')),
5336 [('a', 'all', None, _('select all unresolved files')),
5337 ('l', 'list', None, _('list state of files needing merge')),
5337 ('l', 'list', None, _('list state of files needing merge')),
5338 ('m', 'mark', None, _('mark files as resolved')),
5338 ('m', 'mark', None, _('mark files as resolved')),
5339 ('u', 'unmark', None, _('mark files as unresolved')),
5339 ('u', 'unmark', None, _('mark files as unresolved')),
5340 ('n', 'no-status', None, _('hide status prefix'))]
5340 ('n', 'no-status', None, _('hide status prefix'))]
5341 + mergetoolopts + walkopts + formatteropts,
5341 + mergetoolopts + walkopts + formatteropts,
5342 _('[OPTION]... [FILE]...'),
5342 _('[OPTION]... [FILE]...'),
5343 inferrepo=True)
5343 inferrepo=True)
5344 def resolve(ui, repo, *pats, **opts):
5344 def resolve(ui, repo, *pats, **opts):
5345 """redo merges or set/view the merge status of files
5345 """redo merges or set/view the merge status of files
5346
5346
5347 Merges with unresolved conflicts are often the result of
5347 Merges with unresolved conflicts are often the result of
5348 non-interactive merging using the ``internal:merge`` configuration
5348 non-interactive merging using the ``internal:merge`` configuration
5349 setting, or a command-line merge tool like ``diff3``. The resolve
5349 setting, or a command-line merge tool like ``diff3``. The resolve
5350 command is used to manage the files involved in a merge, after
5350 command is used to manage the files involved in a merge, after
5351 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5351 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5352 working directory must have two parents). See :hg:`help
5352 working directory must have two parents). See :hg:`help
5353 merge-tools` for information on configuring merge tools.
5353 merge-tools` for information on configuring merge tools.
5354
5354
5355 The resolve command can be used in the following ways:
5355 The resolve command can be used in the following ways:
5356
5356
5357 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5357 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5358 files, discarding any previous merge attempts. Re-merging is not
5358 files, discarding any previous merge attempts. Re-merging is not
5359 performed for files already marked as resolved. Use ``--all/-a``
5359 performed for files already marked as resolved. Use ``--all/-a``
5360 to select all unresolved files. ``--tool`` can be used to specify
5360 to select all unresolved files. ``--tool`` can be used to specify
5361 the merge tool used for the given files. It overrides the HGMERGE
5361 the merge tool used for the given files. It overrides the HGMERGE
5362 environment variable and your configuration files. Previous file
5362 environment variable and your configuration files. Previous file
5363 contents are saved with a ``.orig`` suffix.
5363 contents are saved with a ``.orig`` suffix.
5364
5364
5365 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5365 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5366 (e.g. after having manually fixed-up the files). The default is
5366 (e.g. after having manually fixed-up the files). The default is
5367 to mark all unresolved files.
5367 to mark all unresolved files.
5368
5368
5369 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5369 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5370 default is to mark all resolved files.
5370 default is to mark all resolved files.
5371
5371
5372 - :hg:`resolve -l`: list files which had or still have conflicts.
5372 - :hg:`resolve -l`: list files which had or still have conflicts.
5373 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5373 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5374
5374
5375 Note that Mercurial will not let you commit files with unresolved
5375 Note that Mercurial will not let you commit files with unresolved
5376 merge conflicts. You must use :hg:`resolve -m ...` before you can
5376 merge conflicts. You must use :hg:`resolve -m ...` before you can
5377 commit after a conflicting merge.
5377 commit after a conflicting merge.
5378
5378
5379 Returns 0 on success, 1 if any files fail a resolve attempt.
5379 Returns 0 on success, 1 if any files fail a resolve attempt.
5380 """
5380 """
5381
5381
5382 all, mark, unmark, show, nostatus = \
5382 all, mark, unmark, show, nostatus = \
5383 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5383 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5384
5384
5385 if (show and (mark or unmark)) or (mark and unmark):
5385 if (show and (mark or unmark)) or (mark and unmark):
5386 raise util.Abort(_("too many options specified"))
5386 raise util.Abort(_("too many options specified"))
5387 if pats and all:
5387 if pats and all:
5388 raise util.Abort(_("can't specify --all and patterns"))
5388 raise util.Abort(_("can't specify --all and patterns"))
5389 if not (all or pats or show or mark or unmark):
5389 if not (all or pats or show or mark or unmark):
5390 raise util.Abort(_('no files or directories specified'),
5390 raise util.Abort(_('no files or directories specified'),
5391 hint=('use --all to remerge all files'))
5391 hint=('use --all to remerge all files'))
5392
5392
5393 if show:
5393 if show:
5394 fm = ui.formatter('resolve', opts)
5394 fm = ui.formatter('resolve', opts)
5395 ms = mergemod.mergestate(repo)
5395 ms = mergemod.mergestate(repo)
5396 m = scmutil.match(repo[None], pats, opts)
5396 m = scmutil.match(repo[None], pats, opts)
5397 for f in ms:
5397 for f in ms:
5398 if not m(f):
5398 if not m(f):
5399 continue
5399 continue
5400 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
5400 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
5401 fm.startitem()
5401 fm.startitem()
5402 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5402 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5403 fm.write('path', '%s\n', f, label=l)
5403 fm.write('path', '%s\n', f, label=l)
5404 fm.end()
5404 fm.end()
5405 return 0
5405 return 0
5406
5406
5407 wlock = repo.wlock()
5407 wlock = repo.wlock()
5408 try:
5408 try:
5409 ms = mergemod.mergestate(repo)
5409 ms = mergemod.mergestate(repo)
5410
5410
5411 if not (ms.active() or repo.dirstate.p2() != nullid):
5411 if not (ms.active() or repo.dirstate.p2() != nullid):
5412 raise util.Abort(
5412 raise util.Abort(
5413 _('resolve command not applicable when not merging'))
5413 _('resolve command not applicable when not merging'))
5414
5414
5415 m = scmutil.match(repo[None], pats, opts)
5415 m = scmutil.match(repo[None], pats, opts)
5416 ret = 0
5416 ret = 0
5417 didwork = False
5417 didwork = False
5418
5418
5419 for f in ms:
5419 for f in ms:
5420 if not m(f):
5420 if not m(f):
5421 continue
5421 continue
5422
5422
5423 didwork = True
5423 didwork = True
5424
5424
5425 if mark:
5425 if mark:
5426 ms.mark(f, "r")
5426 ms.mark(f, "r")
5427 elif unmark:
5427 elif unmark:
5428 ms.mark(f, "u")
5428 ms.mark(f, "u")
5429 else:
5429 else:
5430 wctx = repo[None]
5430 wctx = repo[None]
5431
5431
5432 # backup pre-resolve (merge uses .orig for its own purposes)
5432 # backup pre-resolve (merge uses .orig for its own purposes)
5433 a = repo.wjoin(f)
5433 a = repo.wjoin(f)
5434 util.copyfile(a, a + ".resolve")
5434 util.copyfile(a, a + ".resolve")
5435
5435
5436 try:
5436 try:
5437 # resolve file
5437 # resolve file
5438 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5438 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5439 'resolve')
5439 'resolve')
5440 if ms.resolve(f, wctx):
5440 if ms.resolve(f, wctx):
5441 ret = 1
5441 ret = 1
5442 finally:
5442 finally:
5443 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5443 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5444 ms.commit()
5444 ms.commit()
5445
5445
5446 # replace filemerge's .orig file with our resolve file
5446 # replace filemerge's .orig file with our resolve file
5447 util.rename(a + ".resolve", a + ".orig")
5447 util.rename(a + ".resolve", a + ".orig")
5448
5448
5449 ms.commit()
5449 ms.commit()
5450
5450
5451 if not didwork and pats:
5451 if not didwork and pats:
5452 ui.warn(_("arguments do not match paths that need resolving\n"))
5452 ui.warn(_("arguments do not match paths that need resolving\n"))
5453
5453
5454 finally:
5454 finally:
5455 wlock.release()
5455 wlock.release()
5456
5456
5457 # Nudge users into finishing an unfinished operation
5457 # Nudge users into finishing an unfinished operation
5458 if not list(ms.unresolved()):
5458 if not list(ms.unresolved()):
5459 ui.status(_('(no more unresolved files)\n'))
5459 ui.status(_('(no more unresolved files)\n'))
5460
5460
5461 return ret
5461 return ret
5462
5462
5463 @command('revert',
5463 @command('revert',
5464 [('a', 'all', None, _('revert all changes when no arguments given')),
5464 [('a', 'all', None, _('revert all changes when no arguments given')),
5465 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5465 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5466 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5466 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5467 ('C', 'no-backup', None, _('do not save backup copies of files')),
5467 ('C', 'no-backup', None, _('do not save backup copies of files')),
5468 ('i', 'interactive', None,
5468 ('i', 'interactive', None,
5469 _('interactively select the changes (EXPERIMENTAL)')),
5469 _('interactively select the changes (EXPERIMENTAL)')),
5470 ] + walkopts + dryrunopts,
5470 ] + walkopts + dryrunopts,
5471 _('[OPTION]... [-r REV] [NAME]...'))
5471 _('[OPTION]... [-r REV] [NAME]...'))
5472 def revert(ui, repo, *pats, **opts):
5472 def revert(ui, repo, *pats, **opts):
5473 """restore files to their checkout state
5473 """restore files to their checkout state
5474
5474
5475 .. note::
5475 .. note::
5476
5476
5477 To check out earlier revisions, you should use :hg:`update REV`.
5477 To check out earlier revisions, you should use :hg:`update REV`.
5478 To cancel an uncommitted merge (and lose your changes),
5478 To cancel an uncommitted merge (and lose your changes),
5479 use :hg:`update --clean .`.
5479 use :hg:`update --clean .`.
5480
5480
5481 With no revision specified, revert the specified files or directories
5481 With no revision specified, revert the specified files or directories
5482 to the contents they had in the parent of the working directory.
5482 to the contents they had in the parent of the working directory.
5483 This restores the contents of files to an unmodified
5483 This restores the contents of files to an unmodified
5484 state and unschedules adds, removes, copies, and renames. If the
5484 state and unschedules adds, removes, copies, and renames. If the
5485 working directory has two parents, you must explicitly specify a
5485 working directory has two parents, you must explicitly specify a
5486 revision.
5486 revision.
5487
5487
5488 Using the -r/--rev or -d/--date options, revert the given files or
5488 Using the -r/--rev or -d/--date options, revert the given files or
5489 directories to their states as of a specific revision. Because
5489 directories to their states as of a specific revision. Because
5490 revert does not change the working directory parents, this will
5490 revert does not change the working directory parents, this will
5491 cause these files to appear modified. This can be helpful to "back
5491 cause these files to appear modified. This can be helpful to "back
5492 out" some or all of an earlier change. See :hg:`backout` for a
5492 out" some or all of an earlier change. See :hg:`backout` for a
5493 related method.
5493 related method.
5494
5494
5495 Modified files are saved with a .orig suffix before reverting.
5495 Modified files are saved with a .orig suffix before reverting.
5496 To disable these backups, use --no-backup.
5496 To disable these backups, use --no-backup.
5497
5497
5498 See :hg:`help dates` for a list of formats valid for -d/--date.
5498 See :hg:`help dates` for a list of formats valid for -d/--date.
5499
5499
5500 Returns 0 on success.
5500 Returns 0 on success.
5501 """
5501 """
5502
5502
5503 if opts.get("date"):
5503 if opts.get("date"):
5504 if opts.get("rev"):
5504 if opts.get("rev"):
5505 raise util.Abort(_("you can't specify a revision and a date"))
5505 raise util.Abort(_("you can't specify a revision and a date"))
5506 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5506 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5507
5507
5508 parent, p2 = repo.dirstate.parents()
5508 parent, p2 = repo.dirstate.parents()
5509 if not opts.get('rev') and p2 != nullid:
5509 if not opts.get('rev') and p2 != nullid:
5510 # revert after merge is a trap for new users (issue2915)
5510 # revert after merge is a trap for new users (issue2915)
5511 raise util.Abort(_('uncommitted merge with no revision specified'),
5511 raise util.Abort(_('uncommitted merge with no revision specified'),
5512 hint=_('use "hg update" or see "hg help revert"'))
5512 hint=_('use "hg update" or see "hg help revert"'))
5513
5513
5514 ctx = scmutil.revsingle(repo, opts.get('rev'))
5514 ctx = scmutil.revsingle(repo, opts.get('rev'))
5515
5515
5516 if (not (pats or opts.get('include') or opts.get('exclude') or
5516 if (not (pats or opts.get('include') or opts.get('exclude') or
5517 opts.get('all') or opts.get('interactive'))):
5517 opts.get('all') or opts.get('interactive'))):
5518 msg = _("no files or directories specified")
5518 msg = _("no files or directories specified")
5519 if p2 != nullid:
5519 if p2 != nullid:
5520 hint = _("uncommitted merge, use --all to discard all changes,"
5520 hint = _("uncommitted merge, use --all to discard all changes,"
5521 " or 'hg update -C .' to abort the merge")
5521 " or 'hg update -C .' to abort the merge")
5522 raise util.Abort(msg, hint=hint)
5522 raise util.Abort(msg, hint=hint)
5523 dirty = util.any(repo.status())
5523 dirty = util.any(repo.status())
5524 node = ctx.node()
5524 node = ctx.node()
5525 if node != parent:
5525 if node != parent:
5526 if dirty:
5526 if dirty:
5527 hint = _("uncommitted changes, use --all to discard all"
5527 hint = _("uncommitted changes, use --all to discard all"
5528 " changes, or 'hg update %s' to update") % ctx.rev()
5528 " changes, or 'hg update %s' to update") % ctx.rev()
5529 else:
5529 else:
5530 hint = _("use --all to revert all files,"
5530 hint = _("use --all to revert all files,"
5531 " or 'hg update %s' to update") % ctx.rev()
5531 " or 'hg update %s' to update") % ctx.rev()
5532 elif dirty:
5532 elif dirty:
5533 hint = _("uncommitted changes, use --all to discard all changes")
5533 hint = _("uncommitted changes, use --all to discard all changes")
5534 else:
5534 else:
5535 hint = _("use --all to revert all files")
5535 hint = _("use --all to revert all files")
5536 raise util.Abort(msg, hint=hint)
5536 raise util.Abort(msg, hint=hint)
5537
5537
5538 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5538 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5539
5539
5540 @command('rollback', dryrunopts +
5540 @command('rollback', dryrunopts +
5541 [('f', 'force', False, _('ignore safety measures'))])
5541 [('f', 'force', False, _('ignore safety measures'))])
5542 def rollback(ui, repo, **opts):
5542 def rollback(ui, repo, **opts):
5543 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5543 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5544
5544
5545 Please use :hg:`commit --amend` instead of rollback to correct
5545 Please use :hg:`commit --amend` instead of rollback to correct
5546 mistakes in the last commit.
5546 mistakes in the last commit.
5547
5547
5548 This command should be used with care. There is only one level of
5548 This command should be used with care. There is only one level of
5549 rollback, and there is no way to undo a rollback. It will also
5549 rollback, and there is no way to undo a rollback. It will also
5550 restore the dirstate at the time of the last transaction, losing
5550 restore the dirstate at the time of the last transaction, losing
5551 any dirstate changes since that time. This command does not alter
5551 any dirstate changes since that time. This command does not alter
5552 the working directory.
5552 the working directory.
5553
5553
5554 Transactions are used to encapsulate the effects of all commands
5554 Transactions are used to encapsulate the effects of all commands
5555 that create new changesets or propagate existing changesets into a
5555 that create new changesets or propagate existing changesets into a
5556 repository.
5556 repository.
5557
5557
5558 .. container:: verbose
5558 .. container:: verbose
5559
5559
5560 For example, the following commands are transactional, and their
5560 For example, the following commands are transactional, and their
5561 effects can be rolled back:
5561 effects can be rolled back:
5562
5562
5563 - commit
5563 - commit
5564 - import
5564 - import
5565 - pull
5565 - pull
5566 - push (with this repository as the destination)
5566 - push (with this repository as the destination)
5567 - unbundle
5567 - unbundle
5568
5568
5569 To avoid permanent data loss, rollback will refuse to rollback a
5569 To avoid permanent data loss, rollback will refuse to rollback a
5570 commit transaction if it isn't checked out. Use --force to
5570 commit transaction if it isn't checked out. Use --force to
5571 override this protection.
5571 override this protection.
5572
5572
5573 This command is not intended for use on public repositories. Once
5573 This command is not intended for use on public repositories. Once
5574 changes are visible for pull by other users, rolling a transaction
5574 changes are visible for pull by other users, rolling a transaction
5575 back locally is ineffective (someone else may already have pulled
5575 back locally is ineffective (someone else may already have pulled
5576 the changes). Furthermore, a race is possible with readers of the
5576 the changes). Furthermore, a race is possible with readers of the
5577 repository; for example an in-progress pull from the repository
5577 repository; for example an in-progress pull from the repository
5578 may fail if a rollback is performed.
5578 may fail if a rollback is performed.
5579
5579
5580 Returns 0 on success, 1 if no rollback data is available.
5580 Returns 0 on success, 1 if no rollback data is available.
5581 """
5581 """
5582 return repo.rollback(dryrun=opts.get('dry_run'),
5582 return repo.rollback(dryrun=opts.get('dry_run'),
5583 force=opts.get('force'))
5583 force=opts.get('force'))
5584
5584
5585 @command('root', [])
5585 @command('root', [])
5586 def root(ui, repo):
5586 def root(ui, repo):
5587 """print the root (top) of the current working directory
5587 """print the root (top) of the current working directory
5588
5588
5589 Print the root directory of the current repository.
5589 Print the root directory of the current repository.
5590
5590
5591 Returns 0 on success.
5591 Returns 0 on success.
5592 """
5592 """
5593 ui.write(repo.root + "\n")
5593 ui.write(repo.root + "\n")
5594
5594
5595 @command('^serve',
5595 @command('^serve',
5596 [('A', 'accesslog', '', _('name of access log file to write to'),
5596 [('A', 'accesslog', '', _('name of access log file to write to'),
5597 _('FILE')),
5597 _('FILE')),
5598 ('d', 'daemon', None, _('run server in background')),
5598 ('d', 'daemon', None, _('run server in background')),
5599 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5599 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5600 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5600 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5601 # use string type, then we can check if something was passed
5601 # use string type, then we can check if something was passed
5602 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5602 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5603 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5603 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5604 _('ADDR')),
5604 _('ADDR')),
5605 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5605 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5606 _('PREFIX')),
5606 _('PREFIX')),
5607 ('n', 'name', '',
5607 ('n', 'name', '',
5608 _('name to show in web pages (default: working directory)'), _('NAME')),
5608 _('name to show in web pages (default: working directory)'), _('NAME')),
5609 ('', 'web-conf', '',
5609 ('', 'web-conf', '',
5610 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5610 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5611 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5611 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5612 _('FILE')),
5612 _('FILE')),
5613 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5613 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5614 ('', 'stdio', None, _('for remote clients')),
5614 ('', 'stdio', None, _('for remote clients')),
5615 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5615 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5616 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5616 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5617 ('', 'style', '', _('template style to use'), _('STYLE')),
5617 ('', 'style', '', _('template style to use'), _('STYLE')),
5618 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5618 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5619 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5619 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5620 _('[OPTION]...'),
5620 _('[OPTION]...'),
5621 optionalrepo=True)
5621 optionalrepo=True)
5622 def serve(ui, repo, **opts):
5622 def serve(ui, repo, **opts):
5623 """start stand-alone webserver
5623 """start stand-alone webserver
5624
5624
5625 Start a local HTTP repository browser and pull server. You can use
5625 Start a local HTTP repository browser and pull server. You can use
5626 this for ad-hoc sharing and browsing of repositories. It is
5626 this for ad-hoc sharing and browsing of repositories. It is
5627 recommended to use a real web server to serve a repository for
5627 recommended to use a real web server to serve a repository for
5628 longer periods of time.
5628 longer periods of time.
5629
5629
5630 Please note that the server does not implement access control.
5630 Please note that the server does not implement access control.
5631 This means that, by default, anybody can read from the server and
5631 This means that, by default, anybody can read from the server and
5632 nobody can write to it by default. Set the ``web.allow_push``
5632 nobody can write to it by default. Set the ``web.allow_push``
5633 option to ``*`` to allow everybody to push to the server. You
5633 option to ``*`` to allow everybody to push to the server. You
5634 should use a real web server if you need to authenticate users.
5634 should use a real web server if you need to authenticate users.
5635
5635
5636 By default, the server logs accesses to stdout and errors to
5636 By default, the server logs accesses to stdout and errors to
5637 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5637 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5638 files.
5638 files.
5639
5639
5640 To have the server choose a free port number to listen on, specify
5640 To have the server choose a free port number to listen on, specify
5641 a port number of 0; in this case, the server will print the port
5641 a port number of 0; in this case, the server will print the port
5642 number it uses.
5642 number it uses.
5643
5643
5644 Returns 0 on success.
5644 Returns 0 on success.
5645 """
5645 """
5646
5646
5647 if opts["stdio"] and opts["cmdserver"]:
5647 if opts["stdio"] and opts["cmdserver"]:
5648 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5648 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5649
5649
5650 if opts["stdio"]:
5650 if opts["stdio"]:
5651 if repo is None:
5651 if repo is None:
5652 raise error.RepoError(_("there is no Mercurial repository here"
5652 raise error.RepoError(_("there is no Mercurial repository here"
5653 " (.hg not found)"))
5653 " (.hg not found)"))
5654 s = sshserver.sshserver(ui, repo)
5654 s = sshserver.sshserver(ui, repo)
5655 s.serve_forever()
5655 s.serve_forever()
5656
5656
5657 if opts["cmdserver"]:
5657 if opts["cmdserver"]:
5658 service = commandserver.createservice(ui, repo, opts)
5658 service = commandserver.createservice(ui, repo, opts)
5659 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5659 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5660
5660
5661 # this way we can check if something was given in the command-line
5661 # this way we can check if something was given in the command-line
5662 if opts.get('port'):
5662 if opts.get('port'):
5663 opts['port'] = util.getport(opts.get('port'))
5663 opts['port'] = util.getport(opts.get('port'))
5664
5664
5665 if repo:
5665 if repo:
5666 baseui = repo.baseui
5666 baseui = repo.baseui
5667 else:
5667 else:
5668 baseui = ui
5668 baseui = ui
5669 optlist = ("name templates style address port prefix ipv6"
5669 optlist = ("name templates style address port prefix ipv6"
5670 " accesslog errorlog certificate encoding")
5670 " accesslog errorlog certificate encoding")
5671 for o in optlist.split():
5671 for o in optlist.split():
5672 val = opts.get(o, '')
5672 val = opts.get(o, '')
5673 if val in (None, ''): # should check against default options instead
5673 if val in (None, ''): # should check against default options instead
5674 continue
5674 continue
5675 baseui.setconfig("web", o, val, 'serve')
5675 baseui.setconfig("web", o, val, 'serve')
5676 if repo and repo.ui != baseui:
5676 if repo and repo.ui != baseui:
5677 repo.ui.setconfig("web", o, val, 'serve')
5677 repo.ui.setconfig("web", o, val, 'serve')
5678
5678
5679 o = opts.get('web_conf') or opts.get('webdir_conf')
5679 o = opts.get('web_conf') or opts.get('webdir_conf')
5680 if not o:
5680 if not o:
5681 if not repo:
5681 if not repo:
5682 raise error.RepoError(_("there is no Mercurial repository"
5682 raise error.RepoError(_("there is no Mercurial repository"
5683 " here (.hg not found)"))
5683 " here (.hg not found)"))
5684 o = repo
5684 o = repo
5685
5685
5686 app = hgweb.hgweb(o, baseui=baseui)
5686 app = hgweb.hgweb(o, baseui=baseui)
5687 service = httpservice(ui, app, opts)
5687 service = httpservice(ui, app, opts)
5688 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5688 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5689
5689
5690 class httpservice(object):
5690 class httpservice(object):
5691 def __init__(self, ui, app, opts):
5691 def __init__(self, ui, app, opts):
5692 self.ui = ui
5692 self.ui = ui
5693 self.app = app
5693 self.app = app
5694 self.opts = opts
5694 self.opts = opts
5695
5695
5696 def init(self):
5696 def init(self):
5697 util.setsignalhandler()
5697 util.setsignalhandler()
5698 self.httpd = hgweb_server.create_server(self.ui, self.app)
5698 self.httpd = hgweb_server.create_server(self.ui, self.app)
5699
5699
5700 if self.opts['port'] and not self.ui.verbose:
5700 if self.opts['port'] and not self.ui.verbose:
5701 return
5701 return
5702
5702
5703 if self.httpd.prefix:
5703 if self.httpd.prefix:
5704 prefix = self.httpd.prefix.strip('/') + '/'
5704 prefix = self.httpd.prefix.strip('/') + '/'
5705 else:
5705 else:
5706 prefix = ''
5706 prefix = ''
5707
5707
5708 port = ':%d' % self.httpd.port
5708 port = ':%d' % self.httpd.port
5709 if port == ':80':
5709 if port == ':80':
5710 port = ''
5710 port = ''
5711
5711
5712 bindaddr = self.httpd.addr
5712 bindaddr = self.httpd.addr
5713 if bindaddr == '0.0.0.0':
5713 if bindaddr == '0.0.0.0':
5714 bindaddr = '*'
5714 bindaddr = '*'
5715 elif ':' in bindaddr: # IPv6
5715 elif ':' in bindaddr: # IPv6
5716 bindaddr = '[%s]' % bindaddr
5716 bindaddr = '[%s]' % bindaddr
5717
5717
5718 fqaddr = self.httpd.fqaddr
5718 fqaddr = self.httpd.fqaddr
5719 if ':' in fqaddr:
5719 if ':' in fqaddr:
5720 fqaddr = '[%s]' % fqaddr
5720 fqaddr = '[%s]' % fqaddr
5721 if self.opts['port']:
5721 if self.opts['port']:
5722 write = self.ui.status
5722 write = self.ui.status
5723 else:
5723 else:
5724 write = self.ui.write
5724 write = self.ui.write
5725 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5725 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5726 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5726 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5727 self.ui.flush() # avoid buffering of status message
5727 self.ui.flush() # avoid buffering of status message
5728
5728
5729 def run(self):
5729 def run(self):
5730 self.httpd.serve_forever()
5730 self.httpd.serve_forever()
5731
5731
5732
5732
5733 @command('^status|st',
5733 @command('^status|st',
5734 [('A', 'all', None, _('show status of all files')),
5734 [('A', 'all', None, _('show status of all files')),
5735 ('m', 'modified', None, _('show only modified files')),
5735 ('m', 'modified', None, _('show only modified files')),
5736 ('a', 'added', None, _('show only added files')),
5736 ('a', 'added', None, _('show only added files')),
5737 ('r', 'removed', None, _('show only removed files')),
5737 ('r', 'removed', None, _('show only removed files')),
5738 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5738 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5739 ('c', 'clean', None, _('show only files without changes')),
5739 ('c', 'clean', None, _('show only files without changes')),
5740 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5740 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5741 ('i', 'ignored', None, _('show only ignored files')),
5741 ('i', 'ignored', None, _('show only ignored files')),
5742 ('n', 'no-status', None, _('hide status prefix')),
5742 ('n', 'no-status', None, _('hide status prefix')),
5743 ('C', 'copies', None, _('show source of copied files')),
5743 ('C', 'copies', None, _('show source of copied files')),
5744 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5744 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5745 ('', 'rev', [], _('show difference from revision'), _('REV')),
5745 ('', 'rev', [], _('show difference from revision'), _('REV')),
5746 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5746 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5747 ] + walkopts + subrepoopts + formatteropts,
5747 ] + walkopts + subrepoopts + formatteropts,
5748 _('[OPTION]... [FILE]...'),
5748 _('[OPTION]... [FILE]...'),
5749 inferrepo=True)
5749 inferrepo=True)
5750 def status(ui, repo, *pats, **opts):
5750 def status(ui, repo, *pats, **opts):
5751 """show changed files in the working directory
5751 """show changed files in the working directory
5752
5752
5753 Show status of files in the repository. If names are given, only
5753 Show status of files in the repository. If names are given, only
5754 files that match are shown. Files that are clean or ignored or
5754 files that match are shown. Files that are clean or ignored or
5755 the source of a copy/move operation, are not listed unless
5755 the source of a copy/move operation, are not listed unless
5756 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5756 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5757 Unless options described with "show only ..." are given, the
5757 Unless options described with "show only ..." are given, the
5758 options -mardu are used.
5758 options -mardu are used.
5759
5759
5760 Option -q/--quiet hides untracked (unknown and ignored) files
5760 Option -q/--quiet hides untracked (unknown and ignored) files
5761 unless explicitly requested with -u/--unknown or -i/--ignored.
5761 unless explicitly requested with -u/--unknown or -i/--ignored.
5762
5762
5763 .. note::
5763 .. note::
5764
5764
5765 status may appear to disagree with diff if permissions have
5765 status may appear to disagree with diff if permissions have
5766 changed or a merge has occurred. The standard diff format does
5766 changed or a merge has occurred. The standard diff format does
5767 not report permission changes and diff only reports changes
5767 not report permission changes and diff only reports changes
5768 relative to one merge parent.
5768 relative to one merge parent.
5769
5769
5770 If one revision is given, it is used as the base revision.
5770 If one revision is given, it is used as the base revision.
5771 If two revisions are given, the differences between them are
5771 If two revisions are given, the differences between them are
5772 shown. The --change option can also be used as a shortcut to list
5772 shown. The --change option can also be used as a shortcut to list
5773 the changed files of a revision from its first parent.
5773 the changed files of a revision from its first parent.
5774
5774
5775 The codes used to show the status of files are::
5775 The codes used to show the status of files are::
5776
5776
5777 M = modified
5777 M = modified
5778 A = added
5778 A = added
5779 R = removed
5779 R = removed
5780 C = clean
5780 C = clean
5781 ! = missing (deleted by non-hg command, but still tracked)
5781 ! = missing (deleted by non-hg command, but still tracked)
5782 ? = not tracked
5782 ? = not tracked
5783 I = ignored
5783 I = ignored
5784 = origin of the previous file (with --copies)
5784 = origin of the previous file (with --copies)
5785
5785
5786 .. container:: verbose
5786 .. container:: verbose
5787
5787
5788 Examples:
5788 Examples:
5789
5789
5790 - show changes in the working directory relative to a
5790 - show changes in the working directory relative to a
5791 changeset::
5791 changeset::
5792
5792
5793 hg status --rev 9353
5793 hg status --rev 9353
5794
5794
5795 - show changes in the working directory relative to the
5795 - show changes in the working directory relative to the
5796 current directory (see :hg:`help patterns` for more information)::
5796 current directory (see :hg:`help patterns` for more information)::
5797
5797
5798 hg status re:
5798 hg status re:
5799
5799
5800 - show all changes including copies in an existing changeset::
5800 - show all changes including copies in an existing changeset::
5801
5801
5802 hg status --copies --change 9353
5802 hg status --copies --change 9353
5803
5803
5804 - get a NUL separated list of added files, suitable for xargs::
5804 - get a NUL separated list of added files, suitable for xargs::
5805
5805
5806 hg status -an0
5806 hg status -an0
5807
5807
5808 Returns 0 on success.
5808 Returns 0 on success.
5809 """
5809 """
5810
5810
5811 revs = opts.get('rev')
5811 revs = opts.get('rev')
5812 change = opts.get('change')
5812 change = opts.get('change')
5813
5813
5814 if revs and change:
5814 if revs and change:
5815 msg = _('cannot specify --rev and --change at the same time')
5815 msg = _('cannot specify --rev and --change at the same time')
5816 raise util.Abort(msg)
5816 raise util.Abort(msg)
5817 elif change:
5817 elif change:
5818 node2 = scmutil.revsingle(repo, change, None).node()
5818 node2 = scmutil.revsingle(repo, change, None).node()
5819 node1 = repo[node2].p1().node()
5819 node1 = repo[node2].p1().node()
5820 else:
5820 else:
5821 node1, node2 = scmutil.revpair(repo, revs)
5821 node1, node2 = scmutil.revpair(repo, revs)
5822
5822
5823 if pats:
5823 if pats:
5824 cwd = repo.getcwd()
5824 cwd = repo.getcwd()
5825 else:
5825 else:
5826 cwd = ''
5826 cwd = ''
5827
5827
5828 if opts.get('print0'):
5828 if opts.get('print0'):
5829 end = '\0'
5829 end = '\0'
5830 else:
5830 else:
5831 end = '\n'
5831 end = '\n'
5832 copy = {}
5832 copy = {}
5833 states = 'modified added removed deleted unknown ignored clean'.split()
5833 states = 'modified added removed deleted unknown ignored clean'.split()
5834 show = [k for k in states if opts.get(k)]
5834 show = [k for k in states if opts.get(k)]
5835 if opts.get('all'):
5835 if opts.get('all'):
5836 show += ui.quiet and (states[:4] + ['clean']) or states
5836 show += ui.quiet and (states[:4] + ['clean']) or states
5837 if not show:
5837 if not show:
5838 if ui.quiet:
5838 if ui.quiet:
5839 show = states[:4]
5839 show = states[:4]
5840 else:
5840 else:
5841 show = states[:5]
5841 show = states[:5]
5842
5842
5843 m = scmutil.match(repo[node2], pats, opts)
5843 m = scmutil.match(repo[node2], pats, opts)
5844 stat = repo.status(node1, node2, m,
5844 stat = repo.status(node1, node2, m,
5845 'ignored' in show, 'clean' in show, 'unknown' in show,
5845 'ignored' in show, 'clean' in show, 'unknown' in show,
5846 opts.get('subrepos'))
5846 opts.get('subrepos'))
5847 changestates = zip(states, 'MAR!?IC', stat)
5847 changestates = zip(states, 'MAR!?IC', stat)
5848
5848
5849 if (opts.get('all') or opts.get('copies')
5849 if (opts.get('all') or opts.get('copies')
5850 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5850 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5851 copy = copies.pathcopies(repo[node1], repo[node2], m)
5851 copy = copies.pathcopies(repo[node1], repo[node2], m)
5852
5852
5853 fm = ui.formatter('status', opts)
5853 fm = ui.formatter('status', opts)
5854 fmt = '%s' + end
5854 fmt = '%s' + end
5855 showchar = not opts.get('no_status')
5855 showchar = not opts.get('no_status')
5856
5856
5857 for state, char, files in changestates:
5857 for state, char, files in changestates:
5858 if state in show:
5858 if state in show:
5859 label = 'status.' + state
5859 label = 'status.' + state
5860 for f in files:
5860 for f in files:
5861 fm.startitem()
5861 fm.startitem()
5862 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5862 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5863 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5863 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5864 if f in copy:
5864 if f in copy:
5865 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5865 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5866 label='status.copied')
5866 label='status.copied')
5867 fm.end()
5867 fm.end()
5868
5868
5869 @command('^summary|sum',
5869 @command('^summary|sum',
5870 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5870 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5871 def summary(ui, repo, **opts):
5871 def summary(ui, repo, **opts):
5872 """summarize working directory state
5872 """summarize working directory state
5873
5873
5874 This generates a brief summary of the working directory state,
5874 This generates a brief summary of the working directory state,
5875 including parents, branch, commit status, and available updates.
5875 including parents, branch, commit status, and available updates.
5876
5876
5877 With the --remote option, this will check the default paths for
5877 With the --remote option, this will check the default paths for
5878 incoming and outgoing changes. This can be time-consuming.
5878 incoming and outgoing changes. This can be time-consuming.
5879
5879
5880 Returns 0 on success.
5880 Returns 0 on success.
5881 """
5881 """
5882
5882
5883 ctx = repo[None]
5883 ctx = repo[None]
5884 parents = ctx.parents()
5884 parents = ctx.parents()
5885 pnode = parents[0].node()
5885 pnode = parents[0].node()
5886 marks = []
5886 marks = []
5887
5887
5888 for p in parents:
5888 for p in parents:
5889 # label with log.changeset (instead of log.parent) since this
5889 # label with log.changeset (instead of log.parent) since this
5890 # shows a working directory parent *changeset*:
5890 # shows a working directory parent *changeset*:
5891 # i18n: column positioning for "hg summary"
5891 # i18n: column positioning for "hg summary"
5892 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5892 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5893 label='log.changeset changeset.%s' % p.phasestr())
5893 label='log.changeset changeset.%s' % p.phasestr())
5894 ui.write(' '.join(p.tags()), label='log.tag')
5894 ui.write(' '.join(p.tags()), label='log.tag')
5895 if p.bookmarks():
5895 if p.bookmarks():
5896 marks.extend(p.bookmarks())
5896 marks.extend(p.bookmarks())
5897 if p.rev() == -1:
5897 if p.rev() == -1:
5898 if not len(repo):
5898 if not len(repo):
5899 ui.write(_(' (empty repository)'))
5899 ui.write(_(' (empty repository)'))
5900 else:
5900 else:
5901 ui.write(_(' (no revision checked out)'))
5901 ui.write(_(' (no revision checked out)'))
5902 ui.write('\n')
5902 ui.write('\n')
5903 if p.description():
5903 if p.description():
5904 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5904 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5905 label='log.summary')
5905 label='log.summary')
5906
5906
5907 branch = ctx.branch()
5907 branch = ctx.branch()
5908 bheads = repo.branchheads(branch)
5908 bheads = repo.branchheads(branch)
5909 # i18n: column positioning for "hg summary"
5909 # i18n: column positioning for "hg summary"
5910 m = _('branch: %s\n') % branch
5910 m = _('branch: %s\n') % branch
5911 if branch != 'default':
5911 if branch != 'default':
5912 ui.write(m, label='log.branch')
5912 ui.write(m, label='log.branch')
5913 else:
5913 else:
5914 ui.status(m, label='log.branch')
5914 ui.status(m, label='log.branch')
5915
5915
5916 if marks:
5916 if marks:
5917 current = repo._bookmarkcurrent
5917 current = repo._activebookmark
5918 # i18n: column positioning for "hg summary"
5918 # i18n: column positioning for "hg summary"
5919 ui.write(_('bookmarks:'), label='log.bookmark')
5919 ui.write(_('bookmarks:'), label='log.bookmark')
5920 if current is not None:
5920 if current is not None:
5921 if current in marks:
5921 if current in marks:
5922 ui.write(' *' + current, label='bookmarks.current')
5922 ui.write(' *' + current, label='bookmarks.current')
5923 marks.remove(current)
5923 marks.remove(current)
5924 else:
5924 else:
5925 ui.write(' [%s]' % current, label='bookmarks.current')
5925 ui.write(' [%s]' % current, label='bookmarks.current')
5926 for m in marks:
5926 for m in marks:
5927 ui.write(' ' + m, label='log.bookmark')
5927 ui.write(' ' + m, label='log.bookmark')
5928 ui.write('\n', label='log.bookmark')
5928 ui.write('\n', label='log.bookmark')
5929
5929
5930 status = repo.status(unknown=True)
5930 status = repo.status(unknown=True)
5931
5931
5932 c = repo.dirstate.copies()
5932 c = repo.dirstate.copies()
5933 copied, renamed = [], []
5933 copied, renamed = [], []
5934 for d, s in c.iteritems():
5934 for d, s in c.iteritems():
5935 if s in status.removed:
5935 if s in status.removed:
5936 status.removed.remove(s)
5936 status.removed.remove(s)
5937 renamed.append(d)
5937 renamed.append(d)
5938 else:
5938 else:
5939 copied.append(d)
5939 copied.append(d)
5940 if d in status.added:
5940 if d in status.added:
5941 status.added.remove(d)
5941 status.added.remove(d)
5942
5942
5943 ms = mergemod.mergestate(repo)
5943 ms = mergemod.mergestate(repo)
5944 unresolved = [f for f in ms if ms[f] == 'u']
5944 unresolved = [f for f in ms if ms[f] == 'u']
5945
5945
5946 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5946 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5947
5947
5948 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5948 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5949 (ui.label(_('%d added'), 'status.added'), status.added),
5949 (ui.label(_('%d added'), 'status.added'), status.added),
5950 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5950 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5951 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5951 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5952 (ui.label(_('%d copied'), 'status.copied'), copied),
5952 (ui.label(_('%d copied'), 'status.copied'), copied),
5953 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5953 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5954 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5954 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5955 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5955 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5956 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5956 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5957 t = []
5957 t = []
5958 for l, s in labels:
5958 for l, s in labels:
5959 if s:
5959 if s:
5960 t.append(l % len(s))
5960 t.append(l % len(s))
5961
5961
5962 t = ', '.join(t)
5962 t = ', '.join(t)
5963 cleanworkdir = False
5963 cleanworkdir = False
5964
5964
5965 if repo.vfs.exists('updatestate'):
5965 if repo.vfs.exists('updatestate'):
5966 t += _(' (interrupted update)')
5966 t += _(' (interrupted update)')
5967 elif len(parents) > 1:
5967 elif len(parents) > 1:
5968 t += _(' (merge)')
5968 t += _(' (merge)')
5969 elif branch != parents[0].branch():
5969 elif branch != parents[0].branch():
5970 t += _(' (new branch)')
5970 t += _(' (new branch)')
5971 elif (parents[0].closesbranch() and
5971 elif (parents[0].closesbranch() and
5972 pnode in repo.branchheads(branch, closed=True)):
5972 pnode in repo.branchheads(branch, closed=True)):
5973 t += _(' (head closed)')
5973 t += _(' (head closed)')
5974 elif not (status.modified or status.added or status.removed or renamed or
5974 elif not (status.modified or status.added or status.removed or renamed or
5975 copied or subs):
5975 copied or subs):
5976 t += _(' (clean)')
5976 t += _(' (clean)')
5977 cleanworkdir = True
5977 cleanworkdir = True
5978 elif pnode not in bheads:
5978 elif pnode not in bheads:
5979 t += _(' (new branch head)')
5979 t += _(' (new branch head)')
5980
5980
5981 if cleanworkdir:
5981 if cleanworkdir:
5982 # i18n: column positioning for "hg summary"
5982 # i18n: column positioning for "hg summary"
5983 ui.status(_('commit: %s\n') % t.strip())
5983 ui.status(_('commit: %s\n') % t.strip())
5984 else:
5984 else:
5985 # i18n: column positioning for "hg summary"
5985 # i18n: column positioning for "hg summary"
5986 ui.write(_('commit: %s\n') % t.strip())
5986 ui.write(_('commit: %s\n') % t.strip())
5987
5987
5988 # all ancestors of branch heads - all ancestors of parent = new csets
5988 # all ancestors of branch heads - all ancestors of parent = new csets
5989 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5989 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5990 bheads))
5990 bheads))
5991
5991
5992 if new == 0:
5992 if new == 0:
5993 # i18n: column positioning for "hg summary"
5993 # i18n: column positioning for "hg summary"
5994 ui.status(_('update: (current)\n'))
5994 ui.status(_('update: (current)\n'))
5995 elif pnode not in bheads:
5995 elif pnode not in bheads:
5996 # i18n: column positioning for "hg summary"
5996 # i18n: column positioning for "hg summary"
5997 ui.write(_('update: %d new changesets (update)\n') % new)
5997 ui.write(_('update: %d new changesets (update)\n') % new)
5998 else:
5998 else:
5999 # i18n: column positioning for "hg summary"
5999 # i18n: column positioning for "hg summary"
6000 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6000 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6001 (new, len(bheads)))
6001 (new, len(bheads)))
6002
6002
6003 cmdutil.summaryhooks(ui, repo)
6003 cmdutil.summaryhooks(ui, repo)
6004
6004
6005 if opts.get('remote'):
6005 if opts.get('remote'):
6006 needsincoming, needsoutgoing = True, True
6006 needsincoming, needsoutgoing = True, True
6007 else:
6007 else:
6008 needsincoming, needsoutgoing = False, False
6008 needsincoming, needsoutgoing = False, False
6009 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6009 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6010 if i:
6010 if i:
6011 needsincoming = True
6011 needsincoming = True
6012 if o:
6012 if o:
6013 needsoutgoing = True
6013 needsoutgoing = True
6014 if not needsincoming and not needsoutgoing:
6014 if not needsincoming and not needsoutgoing:
6015 return
6015 return
6016
6016
6017 def getincoming():
6017 def getincoming():
6018 source, branches = hg.parseurl(ui.expandpath('default'))
6018 source, branches = hg.parseurl(ui.expandpath('default'))
6019 sbranch = branches[0]
6019 sbranch = branches[0]
6020 try:
6020 try:
6021 other = hg.peer(repo, {}, source)
6021 other = hg.peer(repo, {}, source)
6022 except error.RepoError:
6022 except error.RepoError:
6023 if opts.get('remote'):
6023 if opts.get('remote'):
6024 raise
6024 raise
6025 return source, sbranch, None, None, None
6025 return source, sbranch, None, None, None
6026 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6026 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6027 if revs:
6027 if revs:
6028 revs = [other.lookup(rev) for rev in revs]
6028 revs = [other.lookup(rev) for rev in revs]
6029 ui.debug('comparing with %s\n' % util.hidepassword(source))
6029 ui.debug('comparing with %s\n' % util.hidepassword(source))
6030 repo.ui.pushbuffer()
6030 repo.ui.pushbuffer()
6031 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6031 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6032 repo.ui.popbuffer()
6032 repo.ui.popbuffer()
6033 return source, sbranch, other, commoninc, commoninc[1]
6033 return source, sbranch, other, commoninc, commoninc[1]
6034
6034
6035 if needsincoming:
6035 if needsincoming:
6036 source, sbranch, sother, commoninc, incoming = getincoming()
6036 source, sbranch, sother, commoninc, incoming = getincoming()
6037 else:
6037 else:
6038 source = sbranch = sother = commoninc = incoming = None
6038 source = sbranch = sother = commoninc = incoming = None
6039
6039
6040 def getoutgoing():
6040 def getoutgoing():
6041 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6041 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6042 dbranch = branches[0]
6042 dbranch = branches[0]
6043 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6043 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6044 if source != dest:
6044 if source != dest:
6045 try:
6045 try:
6046 dother = hg.peer(repo, {}, dest)
6046 dother = hg.peer(repo, {}, dest)
6047 except error.RepoError:
6047 except error.RepoError:
6048 if opts.get('remote'):
6048 if opts.get('remote'):
6049 raise
6049 raise
6050 return dest, dbranch, None, None
6050 return dest, dbranch, None, None
6051 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6051 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6052 elif sother is None:
6052 elif sother is None:
6053 # there is no explicit destination peer, but source one is invalid
6053 # there is no explicit destination peer, but source one is invalid
6054 return dest, dbranch, None, None
6054 return dest, dbranch, None, None
6055 else:
6055 else:
6056 dother = sother
6056 dother = sother
6057 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6057 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6058 common = None
6058 common = None
6059 else:
6059 else:
6060 common = commoninc
6060 common = commoninc
6061 if revs:
6061 if revs:
6062 revs = [repo.lookup(rev) for rev in revs]
6062 revs = [repo.lookup(rev) for rev in revs]
6063 repo.ui.pushbuffer()
6063 repo.ui.pushbuffer()
6064 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6064 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6065 commoninc=common)
6065 commoninc=common)
6066 repo.ui.popbuffer()
6066 repo.ui.popbuffer()
6067 return dest, dbranch, dother, outgoing
6067 return dest, dbranch, dother, outgoing
6068
6068
6069 if needsoutgoing:
6069 if needsoutgoing:
6070 dest, dbranch, dother, outgoing = getoutgoing()
6070 dest, dbranch, dother, outgoing = getoutgoing()
6071 else:
6071 else:
6072 dest = dbranch = dother = outgoing = None
6072 dest = dbranch = dother = outgoing = None
6073
6073
6074 if opts.get('remote'):
6074 if opts.get('remote'):
6075 t = []
6075 t = []
6076 if incoming:
6076 if incoming:
6077 t.append(_('1 or more incoming'))
6077 t.append(_('1 or more incoming'))
6078 o = outgoing.missing
6078 o = outgoing.missing
6079 if o:
6079 if o:
6080 t.append(_('%d outgoing') % len(o))
6080 t.append(_('%d outgoing') % len(o))
6081 other = dother or sother
6081 other = dother or sother
6082 if 'bookmarks' in other.listkeys('namespaces'):
6082 if 'bookmarks' in other.listkeys('namespaces'):
6083 counts = bookmarks.summary(repo, other)
6083 counts = bookmarks.summary(repo, other)
6084 if counts[0] > 0:
6084 if counts[0] > 0:
6085 t.append(_('%d incoming bookmarks') % counts[0])
6085 t.append(_('%d incoming bookmarks') % counts[0])
6086 if counts[1] > 0:
6086 if counts[1] > 0:
6087 t.append(_('%d outgoing bookmarks') % counts[1])
6087 t.append(_('%d outgoing bookmarks') % counts[1])
6088
6088
6089 if t:
6089 if t:
6090 # i18n: column positioning for "hg summary"
6090 # i18n: column positioning for "hg summary"
6091 ui.write(_('remote: %s\n') % (', '.join(t)))
6091 ui.write(_('remote: %s\n') % (', '.join(t)))
6092 else:
6092 else:
6093 # i18n: column positioning for "hg summary"
6093 # i18n: column positioning for "hg summary"
6094 ui.status(_('remote: (synced)\n'))
6094 ui.status(_('remote: (synced)\n'))
6095
6095
6096 cmdutil.summaryremotehooks(ui, repo, opts,
6096 cmdutil.summaryremotehooks(ui, repo, opts,
6097 ((source, sbranch, sother, commoninc),
6097 ((source, sbranch, sother, commoninc),
6098 (dest, dbranch, dother, outgoing)))
6098 (dest, dbranch, dother, outgoing)))
6099
6099
6100 @command('tag',
6100 @command('tag',
6101 [('f', 'force', None, _('force tag')),
6101 [('f', 'force', None, _('force tag')),
6102 ('l', 'local', None, _('make the tag local')),
6102 ('l', 'local', None, _('make the tag local')),
6103 ('r', 'rev', '', _('revision to tag'), _('REV')),
6103 ('r', 'rev', '', _('revision to tag'), _('REV')),
6104 ('', 'remove', None, _('remove a tag')),
6104 ('', 'remove', None, _('remove a tag')),
6105 # -l/--local is already there, commitopts cannot be used
6105 # -l/--local is already there, commitopts cannot be used
6106 ('e', 'edit', None, _('invoke editor on commit messages')),
6106 ('e', 'edit', None, _('invoke editor on commit messages')),
6107 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6107 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6108 ] + commitopts2,
6108 ] + commitopts2,
6109 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6109 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6110 def tag(ui, repo, name1, *names, **opts):
6110 def tag(ui, repo, name1, *names, **opts):
6111 """add one or more tags for the current or given revision
6111 """add one or more tags for the current or given revision
6112
6112
6113 Name a particular revision using <name>.
6113 Name a particular revision using <name>.
6114
6114
6115 Tags are used to name particular revisions of the repository and are
6115 Tags are used to name particular revisions of the repository and are
6116 very useful to compare different revisions, to go back to significant
6116 very useful to compare different revisions, to go back to significant
6117 earlier versions or to mark branch points as releases, etc. Changing
6117 earlier versions or to mark branch points as releases, etc. Changing
6118 an existing tag is normally disallowed; use -f/--force to override.
6118 an existing tag is normally disallowed; use -f/--force to override.
6119
6119
6120 If no revision is given, the parent of the working directory is
6120 If no revision is given, the parent of the working directory is
6121 used.
6121 used.
6122
6122
6123 To facilitate version control, distribution, and merging of tags,
6123 To facilitate version control, distribution, and merging of tags,
6124 they are stored as a file named ".hgtags" which is managed similarly
6124 they are stored as a file named ".hgtags" which is managed similarly
6125 to other project files and can be hand-edited if necessary. This
6125 to other project files and can be hand-edited if necessary. This
6126 also means that tagging creates a new commit. The file
6126 also means that tagging creates a new commit. The file
6127 ".hg/localtags" is used for local tags (not shared among
6127 ".hg/localtags" is used for local tags (not shared among
6128 repositories).
6128 repositories).
6129
6129
6130 Tag commits are usually made at the head of a branch. If the parent
6130 Tag commits are usually made at the head of a branch. If the parent
6131 of the working directory is not a branch head, :hg:`tag` aborts; use
6131 of the working directory is not a branch head, :hg:`tag` aborts; use
6132 -f/--force to force the tag commit to be based on a non-head
6132 -f/--force to force the tag commit to be based on a non-head
6133 changeset.
6133 changeset.
6134
6134
6135 See :hg:`help dates` for a list of formats valid for -d/--date.
6135 See :hg:`help dates` for a list of formats valid for -d/--date.
6136
6136
6137 Since tag names have priority over branch names during revision
6137 Since tag names have priority over branch names during revision
6138 lookup, using an existing branch name as a tag name is discouraged.
6138 lookup, using an existing branch name as a tag name is discouraged.
6139
6139
6140 Returns 0 on success.
6140 Returns 0 on success.
6141 """
6141 """
6142 wlock = lock = None
6142 wlock = lock = None
6143 try:
6143 try:
6144 wlock = repo.wlock()
6144 wlock = repo.wlock()
6145 lock = repo.lock()
6145 lock = repo.lock()
6146 rev_ = "."
6146 rev_ = "."
6147 names = [t.strip() for t in (name1,) + names]
6147 names = [t.strip() for t in (name1,) + names]
6148 if len(names) != len(set(names)):
6148 if len(names) != len(set(names)):
6149 raise util.Abort(_('tag names must be unique'))
6149 raise util.Abort(_('tag names must be unique'))
6150 for n in names:
6150 for n in names:
6151 scmutil.checknewlabel(repo, n, 'tag')
6151 scmutil.checknewlabel(repo, n, 'tag')
6152 if not n:
6152 if not n:
6153 raise util.Abort(_('tag names cannot consist entirely of '
6153 raise util.Abort(_('tag names cannot consist entirely of '
6154 'whitespace'))
6154 'whitespace'))
6155 if opts.get('rev') and opts.get('remove'):
6155 if opts.get('rev') and opts.get('remove'):
6156 raise util.Abort(_("--rev and --remove are incompatible"))
6156 raise util.Abort(_("--rev and --remove are incompatible"))
6157 if opts.get('rev'):
6157 if opts.get('rev'):
6158 rev_ = opts['rev']
6158 rev_ = opts['rev']
6159 message = opts.get('message')
6159 message = opts.get('message')
6160 if opts.get('remove'):
6160 if opts.get('remove'):
6161 if opts.get('local'):
6161 if opts.get('local'):
6162 expectedtype = 'local'
6162 expectedtype = 'local'
6163 else:
6163 else:
6164 expectedtype = 'global'
6164 expectedtype = 'global'
6165
6165
6166 for n in names:
6166 for n in names:
6167 if not repo.tagtype(n):
6167 if not repo.tagtype(n):
6168 raise util.Abort(_("tag '%s' does not exist") % n)
6168 raise util.Abort(_("tag '%s' does not exist") % n)
6169 if repo.tagtype(n) != expectedtype:
6169 if repo.tagtype(n) != expectedtype:
6170 if expectedtype == 'global':
6170 if expectedtype == 'global':
6171 raise util.Abort(_("tag '%s' is not a global tag") % n)
6171 raise util.Abort(_("tag '%s' is not a global tag") % n)
6172 else:
6172 else:
6173 raise util.Abort(_("tag '%s' is not a local tag") % n)
6173 raise util.Abort(_("tag '%s' is not a local tag") % n)
6174 rev_ = nullid
6174 rev_ = nullid
6175 if not message:
6175 if not message:
6176 # we don't translate commit messages
6176 # we don't translate commit messages
6177 message = 'Removed tag %s' % ', '.join(names)
6177 message = 'Removed tag %s' % ', '.join(names)
6178 elif not opts.get('force'):
6178 elif not opts.get('force'):
6179 for n in names:
6179 for n in names:
6180 if n in repo.tags():
6180 if n in repo.tags():
6181 raise util.Abort(_("tag '%s' already exists "
6181 raise util.Abort(_("tag '%s' already exists "
6182 "(use -f to force)") % n)
6182 "(use -f to force)") % n)
6183 if not opts.get('local'):
6183 if not opts.get('local'):
6184 p1, p2 = repo.dirstate.parents()
6184 p1, p2 = repo.dirstate.parents()
6185 if p2 != nullid:
6185 if p2 != nullid:
6186 raise util.Abort(_('uncommitted merge'))
6186 raise util.Abort(_('uncommitted merge'))
6187 bheads = repo.branchheads()
6187 bheads = repo.branchheads()
6188 if not opts.get('force') and bheads and p1 not in bheads:
6188 if not opts.get('force') and bheads and p1 not in bheads:
6189 raise util.Abort(_('not at a branch head (use -f to force)'))
6189 raise util.Abort(_('not at a branch head (use -f to force)'))
6190 r = scmutil.revsingle(repo, rev_).node()
6190 r = scmutil.revsingle(repo, rev_).node()
6191
6191
6192 if not message:
6192 if not message:
6193 # we don't translate commit messages
6193 # we don't translate commit messages
6194 message = ('Added tag %s for changeset %s' %
6194 message = ('Added tag %s for changeset %s' %
6195 (', '.join(names), short(r)))
6195 (', '.join(names), short(r)))
6196
6196
6197 date = opts.get('date')
6197 date = opts.get('date')
6198 if date:
6198 if date:
6199 date = util.parsedate(date)
6199 date = util.parsedate(date)
6200
6200
6201 if opts.get('remove'):
6201 if opts.get('remove'):
6202 editform = 'tag.remove'
6202 editform = 'tag.remove'
6203 else:
6203 else:
6204 editform = 'tag.add'
6204 editform = 'tag.add'
6205 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6205 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6206
6206
6207 # don't allow tagging the null rev
6207 # don't allow tagging the null rev
6208 if (not opts.get('remove') and
6208 if (not opts.get('remove') and
6209 scmutil.revsingle(repo, rev_).rev() == nullrev):
6209 scmutil.revsingle(repo, rev_).rev() == nullrev):
6210 raise util.Abort(_("cannot tag null revision"))
6210 raise util.Abort(_("cannot tag null revision"))
6211
6211
6212 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6212 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6213 editor=editor)
6213 editor=editor)
6214 finally:
6214 finally:
6215 release(lock, wlock)
6215 release(lock, wlock)
6216
6216
6217 @command('tags', formatteropts, '')
6217 @command('tags', formatteropts, '')
6218 def tags(ui, repo, **opts):
6218 def tags(ui, repo, **opts):
6219 """list repository tags
6219 """list repository tags
6220
6220
6221 This lists both regular and local tags. When the -v/--verbose
6221 This lists both regular and local tags. When the -v/--verbose
6222 switch is used, a third column "local" is printed for local tags.
6222 switch is used, a third column "local" is printed for local tags.
6223
6223
6224 Returns 0 on success.
6224 Returns 0 on success.
6225 """
6225 """
6226
6226
6227 fm = ui.formatter('tags', opts)
6227 fm = ui.formatter('tags', opts)
6228 hexfunc = fm.hexfunc
6228 hexfunc = fm.hexfunc
6229 tagtype = ""
6229 tagtype = ""
6230
6230
6231 for t, n in reversed(repo.tagslist()):
6231 for t, n in reversed(repo.tagslist()):
6232 hn = hexfunc(n)
6232 hn = hexfunc(n)
6233 label = 'tags.normal'
6233 label = 'tags.normal'
6234 tagtype = ''
6234 tagtype = ''
6235 if repo.tagtype(t) == 'local':
6235 if repo.tagtype(t) == 'local':
6236 label = 'tags.local'
6236 label = 'tags.local'
6237 tagtype = 'local'
6237 tagtype = 'local'
6238
6238
6239 fm.startitem()
6239 fm.startitem()
6240 fm.write('tag', '%s', t, label=label)
6240 fm.write('tag', '%s', t, label=label)
6241 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6241 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6242 fm.condwrite(not ui.quiet, 'rev node', fmt,
6242 fm.condwrite(not ui.quiet, 'rev node', fmt,
6243 repo.changelog.rev(n), hn, label=label)
6243 repo.changelog.rev(n), hn, label=label)
6244 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6244 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6245 tagtype, label=label)
6245 tagtype, label=label)
6246 fm.plain('\n')
6246 fm.plain('\n')
6247 fm.end()
6247 fm.end()
6248
6248
6249 @command('tip',
6249 @command('tip',
6250 [('p', 'patch', None, _('show patch')),
6250 [('p', 'patch', None, _('show patch')),
6251 ('g', 'git', None, _('use git extended diff format')),
6251 ('g', 'git', None, _('use git extended diff format')),
6252 ] + templateopts,
6252 ] + templateopts,
6253 _('[-p] [-g]'))
6253 _('[-p] [-g]'))
6254 def tip(ui, repo, **opts):
6254 def tip(ui, repo, **opts):
6255 """show the tip revision (DEPRECATED)
6255 """show the tip revision (DEPRECATED)
6256
6256
6257 The tip revision (usually just called the tip) is the changeset
6257 The tip revision (usually just called the tip) is the changeset
6258 most recently added to the repository (and therefore the most
6258 most recently added to the repository (and therefore the most
6259 recently changed head).
6259 recently changed head).
6260
6260
6261 If you have just made a commit, that commit will be the tip. If
6261 If you have just made a commit, that commit will be the tip. If
6262 you have just pulled changes from another repository, the tip of
6262 you have just pulled changes from another repository, the tip of
6263 that repository becomes the current tip. The "tip" tag is special
6263 that repository becomes the current tip. The "tip" tag is special
6264 and cannot be renamed or assigned to a different changeset.
6264 and cannot be renamed or assigned to a different changeset.
6265
6265
6266 This command is deprecated, please use :hg:`heads` instead.
6266 This command is deprecated, please use :hg:`heads` instead.
6267
6267
6268 Returns 0 on success.
6268 Returns 0 on success.
6269 """
6269 """
6270 displayer = cmdutil.show_changeset(ui, repo, opts)
6270 displayer = cmdutil.show_changeset(ui, repo, opts)
6271 displayer.show(repo['tip'])
6271 displayer.show(repo['tip'])
6272 displayer.close()
6272 displayer.close()
6273
6273
6274 @command('unbundle',
6274 @command('unbundle',
6275 [('u', 'update', None,
6275 [('u', 'update', None,
6276 _('update to new branch head if changesets were unbundled'))],
6276 _('update to new branch head if changesets were unbundled'))],
6277 _('[-u] FILE...'))
6277 _('[-u] FILE...'))
6278 def unbundle(ui, repo, fname1, *fnames, **opts):
6278 def unbundle(ui, repo, fname1, *fnames, **opts):
6279 """apply one or more changegroup files
6279 """apply one or more changegroup files
6280
6280
6281 Apply one or more compressed changegroup files generated by the
6281 Apply one or more compressed changegroup files generated by the
6282 bundle command.
6282 bundle command.
6283
6283
6284 Returns 0 on success, 1 if an update has unresolved files.
6284 Returns 0 on success, 1 if an update has unresolved files.
6285 """
6285 """
6286 fnames = (fname1,) + fnames
6286 fnames = (fname1,) + fnames
6287
6287
6288 lock = repo.lock()
6288 lock = repo.lock()
6289 try:
6289 try:
6290 for fname in fnames:
6290 for fname in fnames:
6291 f = hg.openpath(ui, fname)
6291 f = hg.openpath(ui, fname)
6292 gen = exchange.readbundle(ui, f, fname)
6292 gen = exchange.readbundle(ui, f, fname)
6293 if isinstance(gen, bundle2.unbundle20):
6293 if isinstance(gen, bundle2.unbundle20):
6294 tr = repo.transaction('unbundle')
6294 tr = repo.transaction('unbundle')
6295 try:
6295 try:
6296 op = bundle2.processbundle(repo, gen, lambda: tr)
6296 op = bundle2.processbundle(repo, gen, lambda: tr)
6297 tr.close()
6297 tr.close()
6298 finally:
6298 finally:
6299 if tr:
6299 if tr:
6300 tr.release()
6300 tr.release()
6301 changes = [r.get('result', 0)
6301 changes = [r.get('result', 0)
6302 for r in op.records['changegroup']]
6302 for r in op.records['changegroup']]
6303 modheads = changegroup.combineresults(changes)
6303 modheads = changegroup.combineresults(changes)
6304 else:
6304 else:
6305 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
6305 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
6306 'bundle:' + fname)
6306 'bundle:' + fname)
6307 finally:
6307 finally:
6308 lock.release()
6308 lock.release()
6309
6309
6310 return postincoming(ui, repo, modheads, opts.get('update'), None)
6310 return postincoming(ui, repo, modheads, opts.get('update'), None)
6311
6311
6312 @command('^update|up|checkout|co',
6312 @command('^update|up|checkout|co',
6313 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6313 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6314 ('c', 'check', None,
6314 ('c', 'check', None,
6315 _('update across branches if no uncommitted changes')),
6315 _('update across branches if no uncommitted changes')),
6316 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6316 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6317 ('r', 'rev', '', _('revision'), _('REV'))
6317 ('r', 'rev', '', _('revision'), _('REV'))
6318 ] + mergetoolopts,
6318 ] + mergetoolopts,
6319 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6319 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6320 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6320 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6321 tool=None):
6321 tool=None):
6322 """update working directory (or switch revisions)
6322 """update working directory (or switch revisions)
6323
6323
6324 Update the repository's working directory to the specified
6324 Update the repository's working directory to the specified
6325 changeset. If no changeset is specified, update to the tip of the
6325 changeset. If no changeset is specified, update to the tip of the
6326 current named branch and move the current bookmark (see :hg:`help
6326 current named branch and move the current bookmark (see :hg:`help
6327 bookmarks`).
6327 bookmarks`).
6328
6328
6329 Update sets the working directory's parent revision to the specified
6329 Update sets the working directory's parent revision to the specified
6330 changeset (see :hg:`help parents`).
6330 changeset (see :hg:`help parents`).
6331
6331
6332 If the changeset is not a descendant or ancestor of the working
6332 If the changeset is not a descendant or ancestor of the working
6333 directory's parent, the update is aborted. With the -c/--check
6333 directory's parent, the update is aborted. With the -c/--check
6334 option, the working directory is checked for uncommitted changes; if
6334 option, the working directory is checked for uncommitted changes; if
6335 none are found, the working directory is updated to the specified
6335 none are found, the working directory is updated to the specified
6336 changeset.
6336 changeset.
6337
6337
6338 .. container:: verbose
6338 .. container:: verbose
6339
6339
6340 The following rules apply when the working directory contains
6340 The following rules apply when the working directory contains
6341 uncommitted changes:
6341 uncommitted changes:
6342
6342
6343 1. If neither -c/--check nor -C/--clean is specified, and if
6343 1. If neither -c/--check nor -C/--clean is specified, and if
6344 the requested changeset is an ancestor or descendant of
6344 the requested changeset is an ancestor or descendant of
6345 the working directory's parent, the uncommitted changes
6345 the working directory's parent, the uncommitted changes
6346 are merged into the requested changeset and the merged
6346 are merged into the requested changeset and the merged
6347 result is left uncommitted. If the requested changeset is
6347 result is left uncommitted. If the requested changeset is
6348 not an ancestor or descendant (that is, it is on another
6348 not an ancestor or descendant (that is, it is on another
6349 branch), the update is aborted and the uncommitted changes
6349 branch), the update is aborted and the uncommitted changes
6350 are preserved.
6350 are preserved.
6351
6351
6352 2. With the -c/--check option, the update is aborted and the
6352 2. With the -c/--check option, the update is aborted and the
6353 uncommitted changes are preserved.
6353 uncommitted changes are preserved.
6354
6354
6355 3. With the -C/--clean option, uncommitted changes are discarded and
6355 3. With the -C/--clean option, uncommitted changes are discarded and
6356 the working directory is updated to the requested changeset.
6356 the working directory is updated to the requested changeset.
6357
6357
6358 To cancel an uncommitted merge (and lose your changes), use
6358 To cancel an uncommitted merge (and lose your changes), use
6359 :hg:`update --clean .`.
6359 :hg:`update --clean .`.
6360
6360
6361 Use null as the changeset to remove the working directory (like
6361 Use null as the changeset to remove the working directory (like
6362 :hg:`clone -U`).
6362 :hg:`clone -U`).
6363
6363
6364 If you want to revert just one file to an older revision, use
6364 If you want to revert just one file to an older revision, use
6365 :hg:`revert [-r REV] NAME`.
6365 :hg:`revert [-r REV] NAME`.
6366
6366
6367 See :hg:`help dates` for a list of formats valid for -d/--date.
6367 See :hg:`help dates` for a list of formats valid for -d/--date.
6368
6368
6369 Returns 0 on success, 1 if there are unresolved files.
6369 Returns 0 on success, 1 if there are unresolved files.
6370 """
6370 """
6371 if rev and node:
6371 if rev and node:
6372 raise util.Abort(_("please specify just one revision"))
6372 raise util.Abort(_("please specify just one revision"))
6373
6373
6374 if rev is None or rev == '':
6374 if rev is None or rev == '':
6375 rev = node
6375 rev = node
6376
6376
6377 cmdutil.clearunfinished(repo)
6377 cmdutil.clearunfinished(repo)
6378
6378
6379 # with no argument, we also move the current bookmark, if any
6379 # with no argument, we also move the current bookmark, if any
6380 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
6380 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
6381
6381
6382 # if we defined a bookmark, we have to remember the original bookmark name
6382 # if we defined a bookmark, we have to remember the original bookmark name
6383 brev = rev
6383 brev = rev
6384 rev = scmutil.revsingle(repo, rev, rev).rev()
6384 rev = scmutil.revsingle(repo, rev, rev).rev()
6385
6385
6386 if check and clean:
6386 if check and clean:
6387 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
6387 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
6388
6388
6389 if date:
6389 if date:
6390 if rev is not None:
6390 if rev is not None:
6391 raise util.Abort(_("you can't specify a revision and a date"))
6391 raise util.Abort(_("you can't specify a revision and a date"))
6392 rev = cmdutil.finddate(ui, repo, date)
6392 rev = cmdutil.finddate(ui, repo, date)
6393
6393
6394 if check:
6394 if check:
6395 cmdutil.bailifchanged(repo, merge=False)
6395 cmdutil.bailifchanged(repo, merge=False)
6396 if rev is None:
6396 if rev is None:
6397 rev = repo[repo[None].branch()].rev()
6397 rev = repo[repo[None].branch()].rev()
6398
6398
6399 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6399 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6400
6400
6401 if clean:
6401 if clean:
6402 ret = hg.clean(repo, rev)
6402 ret = hg.clean(repo, rev)
6403 else:
6403 else:
6404 ret = hg.update(repo, rev)
6404 ret = hg.update(repo, rev)
6405
6405
6406 if not ret and movemarkfrom:
6406 if not ret and movemarkfrom:
6407 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6407 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6408 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6408 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
6409 elif brev in repo._bookmarks:
6409 elif brev in repo._bookmarks:
6410 bookmarks.activate(repo, brev)
6410 bookmarks.activate(repo, brev)
6411 ui.status(_("(activating bookmark %s)\n") % brev)
6411 ui.status(_("(activating bookmark %s)\n") % brev)
6412 elif brev:
6412 elif brev:
6413 if repo._bookmarkcurrent:
6413 if repo._activebookmark:
6414 ui.status(_("(leaving bookmark %s)\n") %
6414 ui.status(_("(leaving bookmark %s)\n") %
6415 repo._bookmarkcurrent)
6415 repo._activebookmark)
6416 bookmarks.deactivate(repo)
6416 bookmarks.deactivate(repo)
6417
6417
6418 return ret
6418 return ret
6419
6419
6420 @command('verify', [])
6420 @command('verify', [])
6421 def verify(ui, repo):
6421 def verify(ui, repo):
6422 """verify the integrity of the repository
6422 """verify the integrity of the repository
6423
6423
6424 Verify the integrity of the current repository.
6424 Verify the integrity of the current repository.
6425
6425
6426 This will perform an extensive check of the repository's
6426 This will perform an extensive check of the repository's
6427 integrity, validating the hashes and checksums of each entry in
6427 integrity, validating the hashes and checksums of each entry in
6428 the changelog, manifest, and tracked files, as well as the
6428 the changelog, manifest, and tracked files, as well as the
6429 integrity of their crosslinks and indices.
6429 integrity of their crosslinks and indices.
6430
6430
6431 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6431 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6432 for more information about recovery from corruption of the
6432 for more information about recovery from corruption of the
6433 repository.
6433 repository.
6434
6434
6435 Returns 0 on success, 1 if errors are encountered.
6435 Returns 0 on success, 1 if errors are encountered.
6436 """
6436 """
6437 return hg.verify(repo)
6437 return hg.verify(repo)
6438
6438
6439 @command('version', [], norepo=True)
6439 @command('version', [], norepo=True)
6440 def version_(ui):
6440 def version_(ui):
6441 """output version and copyright information"""
6441 """output version and copyright information"""
6442 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6442 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6443 % util.version())
6443 % util.version())
6444 ui.status(_(
6444 ui.status(_(
6445 "(see http://mercurial.selenic.com for more information)\n"
6445 "(see http://mercurial.selenic.com for more information)\n"
6446 "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
6446 "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
6447 "This is free software; see the source for copying conditions. "
6447 "This is free software; see the source for copying conditions. "
6448 "There is NO\nwarranty; "
6448 "There is NO\nwarranty; "
6449 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6449 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6450 ))
6450 ))
6451
6451
6452 ui.note(_("\nEnabled extensions:\n\n"))
6452 ui.note(_("\nEnabled extensions:\n\n"))
6453 if ui.verbose:
6453 if ui.verbose:
6454 # format names and versions into columns
6454 # format names and versions into columns
6455 names = []
6455 names = []
6456 vers = []
6456 vers = []
6457 for name, module in extensions.extensions():
6457 for name, module in extensions.extensions():
6458 names.append(name)
6458 names.append(name)
6459 vers.append(extensions.moduleversion(module))
6459 vers.append(extensions.moduleversion(module))
6460 if names:
6460 if names:
6461 maxnamelen = max(len(n) for n in names)
6461 maxnamelen = max(len(n) for n in names)
6462 for i, name in enumerate(names):
6462 for i, name in enumerate(names):
6463 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
6463 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1969 +1,1969 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import urllib
9 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock as lockmod
12 import lock as lockmod
13 import transaction, store, encoding, exchange, bundle2
13 import transaction, store, encoding, exchange, bundle2
14 import scmutil, util, extensions, hook, error, revset
14 import scmutil, util, extensions, hook, error, revset
15 import match as matchmod
15 import match as matchmod
16 import merge as mergemod
16 import merge as mergemod
17 import tags as tagsmod
17 import tags as tagsmod
18 from lock import release
18 from lock import release
19 import weakref, errno, os, time, inspect
19 import weakref, errno, os, time, inspect
20 import branchmap, pathutil
20 import branchmap, pathutil
21 import namespaces
21 import namespaces
22 propertycache = util.propertycache
22 propertycache = util.propertycache
23 filecache = scmutil.filecache
23 filecache = scmutil.filecache
24
24
25 class repofilecache(filecache):
25 class repofilecache(filecache):
26 """All filecache usage on repo are done for logic that should be unfiltered
26 """All filecache usage on repo are done for logic that should be unfiltered
27 """
27 """
28
28
29 def __get__(self, repo, type=None):
29 def __get__(self, repo, type=None):
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 def __set__(self, repo, value):
31 def __set__(self, repo, value):
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 def __delete__(self, repo):
33 def __delete__(self, repo):
34 return super(repofilecache, self).__delete__(repo.unfiltered())
34 return super(repofilecache, self).__delete__(repo.unfiltered())
35
35
36 class storecache(repofilecache):
36 class storecache(repofilecache):
37 """filecache for files in the store"""
37 """filecache for files in the store"""
38 def join(self, obj, fname):
38 def join(self, obj, fname):
39 return obj.sjoin(fname)
39 return obj.sjoin(fname)
40
40
41 class unfilteredpropertycache(propertycache):
41 class unfilteredpropertycache(propertycache):
42 """propertycache that apply to unfiltered repo only"""
42 """propertycache that apply to unfiltered repo only"""
43
43
44 def __get__(self, repo, type=None):
44 def __get__(self, repo, type=None):
45 unfi = repo.unfiltered()
45 unfi = repo.unfiltered()
46 if unfi is repo:
46 if unfi is repo:
47 return super(unfilteredpropertycache, self).__get__(unfi)
47 return super(unfilteredpropertycache, self).__get__(unfi)
48 return getattr(unfi, self.name)
48 return getattr(unfi, self.name)
49
49
50 class filteredpropertycache(propertycache):
50 class filteredpropertycache(propertycache):
51 """propertycache that must take filtering in account"""
51 """propertycache that must take filtering in account"""
52
52
53 def cachevalue(self, obj, value):
53 def cachevalue(self, obj, value):
54 object.__setattr__(obj, self.name, value)
54 object.__setattr__(obj, self.name, value)
55
55
56
56
57 def hasunfilteredcache(repo, name):
57 def hasunfilteredcache(repo, name):
58 """check if a repo has an unfilteredpropertycache value for <name>"""
58 """check if a repo has an unfilteredpropertycache value for <name>"""
59 return name in vars(repo.unfiltered())
59 return name in vars(repo.unfiltered())
60
60
61 def unfilteredmethod(orig):
61 def unfilteredmethod(orig):
62 """decorate method that always need to be run on unfiltered version"""
62 """decorate method that always need to be run on unfiltered version"""
63 def wrapper(repo, *args, **kwargs):
63 def wrapper(repo, *args, **kwargs):
64 return orig(repo.unfiltered(), *args, **kwargs)
64 return orig(repo.unfiltered(), *args, **kwargs)
65 return wrapper
65 return wrapper
66
66
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 'unbundle'))
68 'unbundle'))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
70
70
71 class localpeer(peer.peerrepository):
71 class localpeer(peer.peerrepository):
72 '''peer for a local repo; reflects only the most recent API'''
72 '''peer for a local repo; reflects only the most recent API'''
73
73
74 def __init__(self, repo, caps=moderncaps):
74 def __init__(self, repo, caps=moderncaps):
75 peer.peerrepository.__init__(self)
75 peer.peerrepository.__init__(self)
76 self._repo = repo.filtered('served')
76 self._repo = repo.filtered('served')
77 self.ui = repo.ui
77 self.ui = repo.ui
78 self._caps = repo._restrictcapabilities(caps)
78 self._caps = repo._restrictcapabilities(caps)
79 self.requirements = repo.requirements
79 self.requirements = repo.requirements
80 self.supportedformats = repo.supportedformats
80 self.supportedformats = repo.supportedformats
81
81
82 def close(self):
82 def close(self):
83 self._repo.close()
83 self._repo.close()
84
84
85 def _capabilities(self):
85 def _capabilities(self):
86 return self._caps
86 return self._caps
87
87
88 def local(self):
88 def local(self):
89 return self._repo
89 return self._repo
90
90
91 def canpush(self):
91 def canpush(self):
92 return True
92 return True
93
93
94 def url(self):
94 def url(self):
95 return self._repo.url()
95 return self._repo.url()
96
96
97 def lookup(self, key):
97 def lookup(self, key):
98 return self._repo.lookup(key)
98 return self._repo.lookup(key)
99
99
100 def branchmap(self):
100 def branchmap(self):
101 return self._repo.branchmap()
101 return self._repo.branchmap()
102
102
103 def heads(self):
103 def heads(self):
104 return self._repo.heads()
104 return self._repo.heads()
105
105
106 def known(self, nodes):
106 def known(self, nodes):
107 return self._repo.known(nodes)
107 return self._repo.known(nodes)
108
108
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 **kwargs):
110 **kwargs):
111 cg = exchange.getbundle(self._repo, source, heads=heads,
111 cg = exchange.getbundle(self._repo, source, heads=heads,
112 common=common, bundlecaps=bundlecaps, **kwargs)
112 common=common, bundlecaps=bundlecaps, **kwargs)
113 if bundlecaps is not None and 'HG20' in bundlecaps:
113 if bundlecaps is not None and 'HG20' in bundlecaps:
114 # When requesting a bundle2, getbundle returns a stream to make the
114 # When requesting a bundle2, getbundle returns a stream to make the
115 # wire level function happier. We need to build a proper object
115 # wire level function happier. We need to build a proper object
116 # from it in local peer.
116 # from it in local peer.
117 cg = bundle2.getunbundler(self.ui, cg)
117 cg = bundle2.getunbundler(self.ui, cg)
118 return cg
118 return cg
119
119
120 # TODO We might want to move the next two calls into legacypeer and add
120 # TODO We might want to move the next two calls into legacypeer and add
121 # unbundle instead.
121 # unbundle instead.
122
122
123 def unbundle(self, cg, heads, url):
123 def unbundle(self, cg, heads, url):
124 """apply a bundle on a repo
124 """apply a bundle on a repo
125
125
126 This function handles the repo locking itself."""
126 This function handles the repo locking itself."""
127 try:
127 try:
128 try:
128 try:
129 cg = exchange.readbundle(self.ui, cg, None)
129 cg = exchange.readbundle(self.ui, cg, None)
130 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
130 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
131 if util.safehasattr(ret, 'getchunks'):
131 if util.safehasattr(ret, 'getchunks'):
132 # This is a bundle20 object, turn it into an unbundler.
132 # This is a bundle20 object, turn it into an unbundler.
133 # This little dance should be dropped eventually when the
133 # This little dance should be dropped eventually when the
134 # API is finally improved.
134 # API is finally improved.
135 stream = util.chunkbuffer(ret.getchunks())
135 stream = util.chunkbuffer(ret.getchunks())
136 ret = bundle2.getunbundler(self.ui, stream)
136 ret = bundle2.getunbundler(self.ui, stream)
137 return ret
137 return ret
138 except Exception, exc:
138 except Exception, exc:
139 # If the exception contains output salvaged from a bundle2
139 # If the exception contains output salvaged from a bundle2
140 # reply, we need to make sure it is printed before continuing
140 # reply, we need to make sure it is printed before continuing
141 # to fail. So we build a bundle2 with such output and consume
141 # to fail. So we build a bundle2 with such output and consume
142 # it directly.
142 # it directly.
143 #
143 #
144 # This is not very elegant but allows a "simple" solution for
144 # This is not very elegant but allows a "simple" solution for
145 # issue4594
145 # issue4594
146 output = getattr(exc, '_bundle2salvagedoutput', ())
146 output = getattr(exc, '_bundle2salvagedoutput', ())
147 if output:
147 if output:
148 bundler = bundle2.bundle20(self._repo.ui)
148 bundler = bundle2.bundle20(self._repo.ui)
149 for out in output:
149 for out in output:
150 bundler.addpart(out)
150 bundler.addpart(out)
151 stream = util.chunkbuffer(bundler.getchunks())
151 stream = util.chunkbuffer(bundler.getchunks())
152 b = bundle2.getunbundler(self.ui, stream)
152 b = bundle2.getunbundler(self.ui, stream)
153 bundle2.processbundle(self._repo, b)
153 bundle2.processbundle(self._repo, b)
154 raise
154 raise
155 except error.PushRaced, exc:
155 except error.PushRaced, exc:
156 raise error.ResponseError(_('push failed:'), str(exc))
156 raise error.ResponseError(_('push failed:'), str(exc))
157
157
158 def lock(self):
158 def lock(self):
159 return self._repo.lock()
159 return self._repo.lock()
160
160
161 def addchangegroup(self, cg, source, url):
161 def addchangegroup(self, cg, source, url):
162 return changegroup.addchangegroup(self._repo, cg, source, url)
162 return changegroup.addchangegroup(self._repo, cg, source, url)
163
163
164 def pushkey(self, namespace, key, old, new):
164 def pushkey(self, namespace, key, old, new):
165 return self._repo.pushkey(namespace, key, old, new)
165 return self._repo.pushkey(namespace, key, old, new)
166
166
167 def listkeys(self, namespace):
167 def listkeys(self, namespace):
168 return self._repo.listkeys(namespace)
168 return self._repo.listkeys(namespace)
169
169
170 def debugwireargs(self, one, two, three=None, four=None, five=None):
170 def debugwireargs(self, one, two, three=None, four=None, five=None):
171 '''used to test argument passing over the wire'''
171 '''used to test argument passing over the wire'''
172 return "%s %s %s %s %s" % (one, two, three, four, five)
172 return "%s %s %s %s %s" % (one, two, three, four, five)
173
173
174 class locallegacypeer(localpeer):
174 class locallegacypeer(localpeer):
175 '''peer extension which implements legacy methods too; used for tests with
175 '''peer extension which implements legacy methods too; used for tests with
176 restricted capabilities'''
176 restricted capabilities'''
177
177
178 def __init__(self, repo):
178 def __init__(self, repo):
179 localpeer.__init__(self, repo, caps=legacycaps)
179 localpeer.__init__(self, repo, caps=legacycaps)
180
180
181 def branches(self, nodes):
181 def branches(self, nodes):
182 return self._repo.branches(nodes)
182 return self._repo.branches(nodes)
183
183
184 def between(self, pairs):
184 def between(self, pairs):
185 return self._repo.between(pairs)
185 return self._repo.between(pairs)
186
186
187 def changegroup(self, basenodes, source):
187 def changegroup(self, basenodes, source):
188 return changegroup.changegroup(self._repo, basenodes, source)
188 return changegroup.changegroup(self._repo, basenodes, source)
189
189
190 def changegroupsubset(self, bases, heads, source):
190 def changegroupsubset(self, bases, heads, source):
191 return changegroup.changegroupsubset(self._repo, bases, heads, source)
191 return changegroup.changegroupsubset(self._repo, bases, heads, source)
192
192
193 class localrepository(object):
193 class localrepository(object):
194
194
195 supportedformats = set(('revlogv1', 'generaldelta', 'manifestv2'))
195 supportedformats = set(('revlogv1', 'generaldelta', 'manifestv2'))
196 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
196 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
197 'dotencode'))
197 'dotencode'))
198 openerreqs = set(('revlogv1', 'generaldelta', 'manifestv2'))
198 openerreqs = set(('revlogv1', 'generaldelta', 'manifestv2'))
199 filtername = None
199 filtername = None
200
200
201 # a list of (ui, featureset) functions.
201 # a list of (ui, featureset) functions.
202 # only functions defined in module of enabled extensions are invoked
202 # only functions defined in module of enabled extensions are invoked
203 featuresetupfuncs = set()
203 featuresetupfuncs = set()
204
204
205 def _baserequirements(self, create):
205 def _baserequirements(self, create):
206 return ['revlogv1']
206 return ['revlogv1']
207
207
208 def __init__(self, baseui, path=None, create=False):
208 def __init__(self, baseui, path=None, create=False):
209 self.requirements = set()
209 self.requirements = set()
210 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
210 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
211 self.wopener = self.wvfs
211 self.wopener = self.wvfs
212 self.root = self.wvfs.base
212 self.root = self.wvfs.base
213 self.path = self.wvfs.join(".hg")
213 self.path = self.wvfs.join(".hg")
214 self.origroot = path
214 self.origroot = path
215 self.auditor = pathutil.pathauditor(self.root, self._checknested)
215 self.auditor = pathutil.pathauditor(self.root, self._checknested)
216 self.vfs = scmutil.vfs(self.path)
216 self.vfs = scmutil.vfs(self.path)
217 self.opener = self.vfs
217 self.opener = self.vfs
218 self.baseui = baseui
218 self.baseui = baseui
219 self.ui = baseui.copy()
219 self.ui = baseui.copy()
220 self.ui.copy = baseui.copy # prevent copying repo configuration
220 self.ui.copy = baseui.copy # prevent copying repo configuration
221 # A list of callback to shape the phase if no data were found.
221 # A list of callback to shape the phase if no data were found.
222 # Callback are in the form: func(repo, roots) --> processed root.
222 # Callback are in the form: func(repo, roots) --> processed root.
223 # This list it to be filled by extension during repo setup
223 # This list it to be filled by extension during repo setup
224 self._phasedefaults = []
224 self._phasedefaults = []
225 try:
225 try:
226 self.ui.readconfig(self.join("hgrc"), self.root)
226 self.ui.readconfig(self.join("hgrc"), self.root)
227 extensions.loadall(self.ui)
227 extensions.loadall(self.ui)
228 except IOError:
228 except IOError:
229 pass
229 pass
230
230
231 if self.featuresetupfuncs:
231 if self.featuresetupfuncs:
232 self.supported = set(self._basesupported) # use private copy
232 self.supported = set(self._basesupported) # use private copy
233 extmods = set(m.__name__ for n, m
233 extmods = set(m.__name__ for n, m
234 in extensions.extensions(self.ui))
234 in extensions.extensions(self.ui))
235 for setupfunc in self.featuresetupfuncs:
235 for setupfunc in self.featuresetupfuncs:
236 if setupfunc.__module__ in extmods:
236 if setupfunc.__module__ in extmods:
237 setupfunc(self.ui, self.supported)
237 setupfunc(self.ui, self.supported)
238 else:
238 else:
239 self.supported = self._basesupported
239 self.supported = self._basesupported
240
240
241 if not self.vfs.isdir():
241 if not self.vfs.isdir():
242 if create:
242 if create:
243 if not self.wvfs.exists():
243 if not self.wvfs.exists():
244 self.wvfs.makedirs()
244 self.wvfs.makedirs()
245 self.vfs.makedir(notindexed=True)
245 self.vfs.makedir(notindexed=True)
246 self.requirements.update(self._baserequirements(create))
246 self.requirements.update(self._baserequirements(create))
247 if self.ui.configbool('format', 'usestore', True):
247 if self.ui.configbool('format', 'usestore', True):
248 self.vfs.mkdir("store")
248 self.vfs.mkdir("store")
249 self.requirements.add("store")
249 self.requirements.add("store")
250 if self.ui.configbool('format', 'usefncache', True):
250 if self.ui.configbool('format', 'usefncache', True):
251 self.requirements.add("fncache")
251 self.requirements.add("fncache")
252 if self.ui.configbool('format', 'dotencode', True):
252 if self.ui.configbool('format', 'dotencode', True):
253 self.requirements.add('dotencode')
253 self.requirements.add('dotencode')
254 # create an invalid changelog
254 # create an invalid changelog
255 self.vfs.append(
255 self.vfs.append(
256 "00changelog.i",
256 "00changelog.i",
257 '\0\0\0\2' # represents revlogv2
257 '\0\0\0\2' # represents revlogv2
258 ' dummy changelog to prevent using the old repo layout'
258 ' dummy changelog to prevent using the old repo layout'
259 )
259 )
260 if self.ui.configbool('format', 'generaldelta', False):
260 if self.ui.configbool('format', 'generaldelta', False):
261 self.requirements.add("generaldelta")
261 self.requirements.add("generaldelta")
262 if self.ui.configbool('experimental', 'manifestv2', False):
262 if self.ui.configbool('experimental', 'manifestv2', False):
263 self.requirements.add("manifestv2")
263 self.requirements.add("manifestv2")
264 else:
264 else:
265 raise error.RepoError(_("repository %s not found") % path)
265 raise error.RepoError(_("repository %s not found") % path)
266 elif create:
266 elif create:
267 raise error.RepoError(_("repository %s already exists") % path)
267 raise error.RepoError(_("repository %s already exists") % path)
268 else:
268 else:
269 try:
269 try:
270 self.requirements = scmutil.readrequires(
270 self.requirements = scmutil.readrequires(
271 self.vfs, self.supported)
271 self.vfs, self.supported)
272 except IOError, inst:
272 except IOError, inst:
273 if inst.errno != errno.ENOENT:
273 if inst.errno != errno.ENOENT:
274 raise
274 raise
275
275
276 self.sharedpath = self.path
276 self.sharedpath = self.path
277 try:
277 try:
278 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
278 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
279 realpath=True)
279 realpath=True)
280 s = vfs.base
280 s = vfs.base
281 if not vfs.exists():
281 if not vfs.exists():
282 raise error.RepoError(
282 raise error.RepoError(
283 _('.hg/sharedpath points to nonexistent directory %s') % s)
283 _('.hg/sharedpath points to nonexistent directory %s') % s)
284 self.sharedpath = s
284 self.sharedpath = s
285 except IOError, inst:
285 except IOError, inst:
286 if inst.errno != errno.ENOENT:
286 if inst.errno != errno.ENOENT:
287 raise
287 raise
288
288
289 self.store = store.store(
289 self.store = store.store(
290 self.requirements, self.sharedpath, scmutil.vfs)
290 self.requirements, self.sharedpath, scmutil.vfs)
291 self.spath = self.store.path
291 self.spath = self.store.path
292 self.svfs = self.store.vfs
292 self.svfs = self.store.vfs
293 self.sopener = self.svfs
293 self.sopener = self.svfs
294 self.sjoin = self.store.join
294 self.sjoin = self.store.join
295 self.vfs.createmode = self.store.createmode
295 self.vfs.createmode = self.store.createmode
296 self._applyopenerreqs()
296 self._applyopenerreqs()
297 if create:
297 if create:
298 self._writerequirements()
298 self._writerequirements()
299
299
300
300
301 self._branchcaches = {}
301 self._branchcaches = {}
302 self._revbranchcache = None
302 self._revbranchcache = None
303 self.filterpats = {}
303 self.filterpats = {}
304 self._datafilters = {}
304 self._datafilters = {}
305 self._transref = self._lockref = self._wlockref = None
305 self._transref = self._lockref = self._wlockref = None
306
306
307 # A cache for various files under .hg/ that tracks file changes,
307 # A cache for various files under .hg/ that tracks file changes,
308 # (used by the filecache decorator)
308 # (used by the filecache decorator)
309 #
309 #
310 # Maps a property name to its util.filecacheentry
310 # Maps a property name to its util.filecacheentry
311 self._filecache = {}
311 self._filecache = {}
312
312
313 # hold sets of revision to be filtered
313 # hold sets of revision to be filtered
314 # should be cleared when something might have changed the filter value:
314 # should be cleared when something might have changed the filter value:
315 # - new changesets,
315 # - new changesets,
316 # - phase change,
316 # - phase change,
317 # - new obsolescence marker,
317 # - new obsolescence marker,
318 # - working directory parent change,
318 # - working directory parent change,
319 # - bookmark changes
319 # - bookmark changes
320 self.filteredrevcache = {}
320 self.filteredrevcache = {}
321
321
322 # generic mapping between names and nodes
322 # generic mapping between names and nodes
323 self.names = namespaces.namespaces()
323 self.names = namespaces.namespaces()
324
324
325 def close(self):
325 def close(self):
326 self._writecaches()
326 self._writecaches()
327
327
328 def _writecaches(self):
328 def _writecaches(self):
329 if self._revbranchcache:
329 if self._revbranchcache:
330 self._revbranchcache.write()
330 self._revbranchcache.write()
331
331
332 def _restrictcapabilities(self, caps):
332 def _restrictcapabilities(self, caps):
333 if self.ui.configbool('experimental', 'bundle2-advertise', True):
333 if self.ui.configbool('experimental', 'bundle2-advertise', True):
334 caps = set(caps)
334 caps = set(caps)
335 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
335 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
336 caps.add('bundle2=' + urllib.quote(capsblob))
336 caps.add('bundle2=' + urllib.quote(capsblob))
337 return caps
337 return caps
338
338
339 def _applyopenerreqs(self):
339 def _applyopenerreqs(self):
340 self.svfs.options = dict((r, 1) for r in self.requirements
340 self.svfs.options = dict((r, 1) for r in self.requirements
341 if r in self.openerreqs)
341 if r in self.openerreqs)
342 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
342 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
343 if chunkcachesize is not None:
343 if chunkcachesize is not None:
344 self.svfs.options['chunkcachesize'] = chunkcachesize
344 self.svfs.options['chunkcachesize'] = chunkcachesize
345 maxchainlen = self.ui.configint('format', 'maxchainlen')
345 maxchainlen = self.ui.configint('format', 'maxchainlen')
346 if maxchainlen is not None:
346 if maxchainlen is not None:
347 self.svfs.options['maxchainlen'] = maxchainlen
347 self.svfs.options['maxchainlen'] = maxchainlen
348 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
348 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
349 if manifestcachesize is not None:
349 if manifestcachesize is not None:
350 self.svfs.options['manifestcachesize'] = manifestcachesize
350 self.svfs.options['manifestcachesize'] = manifestcachesize
351 usetreemanifest = self.ui.configbool('experimental', 'treemanifest')
351 usetreemanifest = self.ui.configbool('experimental', 'treemanifest')
352 if usetreemanifest is not None:
352 if usetreemanifest is not None:
353 self.svfs.options['usetreemanifest'] = usetreemanifest
353 self.svfs.options['usetreemanifest'] = usetreemanifest
354
354
355 def _writerequirements(self):
355 def _writerequirements(self):
356 scmutil.writerequires(self.vfs, self.requirements)
356 scmutil.writerequires(self.vfs, self.requirements)
357
357
358 def _checknested(self, path):
358 def _checknested(self, path):
359 """Determine if path is a legal nested repository."""
359 """Determine if path is a legal nested repository."""
360 if not path.startswith(self.root):
360 if not path.startswith(self.root):
361 return False
361 return False
362 subpath = path[len(self.root) + 1:]
362 subpath = path[len(self.root) + 1:]
363 normsubpath = util.pconvert(subpath)
363 normsubpath = util.pconvert(subpath)
364
364
365 # XXX: Checking against the current working copy is wrong in
365 # XXX: Checking against the current working copy is wrong in
366 # the sense that it can reject things like
366 # the sense that it can reject things like
367 #
367 #
368 # $ hg cat -r 10 sub/x.txt
368 # $ hg cat -r 10 sub/x.txt
369 #
369 #
370 # if sub/ is no longer a subrepository in the working copy
370 # if sub/ is no longer a subrepository in the working copy
371 # parent revision.
371 # parent revision.
372 #
372 #
373 # However, it can of course also allow things that would have
373 # However, it can of course also allow things that would have
374 # been rejected before, such as the above cat command if sub/
374 # been rejected before, such as the above cat command if sub/
375 # is a subrepository now, but was a normal directory before.
375 # is a subrepository now, but was a normal directory before.
376 # The old path auditor would have rejected by mistake since it
376 # The old path auditor would have rejected by mistake since it
377 # panics when it sees sub/.hg/.
377 # panics when it sees sub/.hg/.
378 #
378 #
379 # All in all, checking against the working copy seems sensible
379 # All in all, checking against the working copy seems sensible
380 # since we want to prevent access to nested repositories on
380 # since we want to prevent access to nested repositories on
381 # the filesystem *now*.
381 # the filesystem *now*.
382 ctx = self[None]
382 ctx = self[None]
383 parts = util.splitpath(subpath)
383 parts = util.splitpath(subpath)
384 while parts:
384 while parts:
385 prefix = '/'.join(parts)
385 prefix = '/'.join(parts)
386 if prefix in ctx.substate:
386 if prefix in ctx.substate:
387 if prefix == normsubpath:
387 if prefix == normsubpath:
388 return True
388 return True
389 else:
389 else:
390 sub = ctx.sub(prefix)
390 sub = ctx.sub(prefix)
391 return sub.checknested(subpath[len(prefix) + 1:])
391 return sub.checknested(subpath[len(prefix) + 1:])
392 else:
392 else:
393 parts.pop()
393 parts.pop()
394 return False
394 return False
395
395
396 def peer(self):
396 def peer(self):
397 return localpeer(self) # not cached to avoid reference cycle
397 return localpeer(self) # not cached to avoid reference cycle
398
398
399 def unfiltered(self):
399 def unfiltered(self):
400 """Return unfiltered version of the repository
400 """Return unfiltered version of the repository
401
401
402 Intended to be overwritten by filtered repo."""
402 Intended to be overwritten by filtered repo."""
403 return self
403 return self
404
404
405 def filtered(self, name):
405 def filtered(self, name):
406 """Return a filtered version of a repository"""
406 """Return a filtered version of a repository"""
407 # build a new class with the mixin and the current class
407 # build a new class with the mixin and the current class
408 # (possibly subclass of the repo)
408 # (possibly subclass of the repo)
409 class proxycls(repoview.repoview, self.unfiltered().__class__):
409 class proxycls(repoview.repoview, self.unfiltered().__class__):
410 pass
410 pass
411 return proxycls(self, name)
411 return proxycls(self, name)
412
412
413 @repofilecache('bookmarks')
413 @repofilecache('bookmarks')
414 def _bookmarks(self):
414 def _bookmarks(self):
415 return bookmarks.bmstore(self)
415 return bookmarks.bmstore(self)
416
416
417 @repofilecache('bookmarks.current')
417 @repofilecache('bookmarks.current')
418 def _bookmarkcurrent(self):
418 def _activebookmark(self):
419 return bookmarks.readactive(self)
419 return bookmarks.readactive(self)
420
420
421 def bookmarkheads(self, bookmark):
421 def bookmarkheads(self, bookmark):
422 name = bookmark.split('@', 1)[0]
422 name = bookmark.split('@', 1)[0]
423 heads = []
423 heads = []
424 for mark, n in self._bookmarks.iteritems():
424 for mark, n in self._bookmarks.iteritems():
425 if mark.split('@', 1)[0] == name:
425 if mark.split('@', 1)[0] == name:
426 heads.append(n)
426 heads.append(n)
427 return heads
427 return heads
428
428
429 @storecache('phaseroots')
429 @storecache('phaseroots')
430 def _phasecache(self):
430 def _phasecache(self):
431 return phases.phasecache(self, self._phasedefaults)
431 return phases.phasecache(self, self._phasedefaults)
432
432
433 @storecache('obsstore')
433 @storecache('obsstore')
434 def obsstore(self):
434 def obsstore(self):
435 # read default format for new obsstore.
435 # read default format for new obsstore.
436 defaultformat = self.ui.configint('format', 'obsstore-version', None)
436 defaultformat = self.ui.configint('format', 'obsstore-version', None)
437 # rely on obsstore class default when possible.
437 # rely on obsstore class default when possible.
438 kwargs = {}
438 kwargs = {}
439 if defaultformat is not None:
439 if defaultformat is not None:
440 kwargs['defaultformat'] = defaultformat
440 kwargs['defaultformat'] = defaultformat
441 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
441 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
442 store = obsolete.obsstore(self.svfs, readonly=readonly,
442 store = obsolete.obsstore(self.svfs, readonly=readonly,
443 **kwargs)
443 **kwargs)
444 if store and readonly:
444 if store and readonly:
445 self.ui.warn(
445 self.ui.warn(
446 _('obsolete feature not enabled but %i markers found!\n')
446 _('obsolete feature not enabled but %i markers found!\n')
447 % len(list(store)))
447 % len(list(store)))
448 return store
448 return store
449
449
450 @storecache('00changelog.i')
450 @storecache('00changelog.i')
451 def changelog(self):
451 def changelog(self):
452 c = changelog.changelog(self.svfs)
452 c = changelog.changelog(self.svfs)
453 if 'HG_PENDING' in os.environ:
453 if 'HG_PENDING' in os.environ:
454 p = os.environ['HG_PENDING']
454 p = os.environ['HG_PENDING']
455 if p.startswith(self.root):
455 if p.startswith(self.root):
456 c.readpending('00changelog.i.a')
456 c.readpending('00changelog.i.a')
457 return c
457 return c
458
458
459 @storecache('00manifest.i')
459 @storecache('00manifest.i')
460 def manifest(self):
460 def manifest(self):
461 return manifest.manifest(self.svfs)
461 return manifest.manifest(self.svfs)
462
462
463 @repofilecache('dirstate')
463 @repofilecache('dirstate')
464 def dirstate(self):
464 def dirstate(self):
465 warned = [0]
465 warned = [0]
466 def validate(node):
466 def validate(node):
467 try:
467 try:
468 self.changelog.rev(node)
468 self.changelog.rev(node)
469 return node
469 return node
470 except error.LookupError:
470 except error.LookupError:
471 if not warned[0]:
471 if not warned[0]:
472 warned[0] = True
472 warned[0] = True
473 self.ui.warn(_("warning: ignoring unknown"
473 self.ui.warn(_("warning: ignoring unknown"
474 " working parent %s!\n") % short(node))
474 " working parent %s!\n") % short(node))
475 return nullid
475 return nullid
476
476
477 return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
477 return dirstate.dirstate(self.vfs, self.ui, self.root, validate)
478
478
479 def __getitem__(self, changeid):
479 def __getitem__(self, changeid):
480 if changeid is None:
480 if changeid is None:
481 return context.workingctx(self)
481 return context.workingctx(self)
482 if isinstance(changeid, slice):
482 if isinstance(changeid, slice):
483 return [context.changectx(self, i)
483 return [context.changectx(self, i)
484 for i in xrange(*changeid.indices(len(self)))
484 for i in xrange(*changeid.indices(len(self)))
485 if i not in self.changelog.filteredrevs]
485 if i not in self.changelog.filteredrevs]
486 return context.changectx(self, changeid)
486 return context.changectx(self, changeid)
487
487
488 def __contains__(self, changeid):
488 def __contains__(self, changeid):
489 try:
489 try:
490 self[changeid]
490 self[changeid]
491 return True
491 return True
492 except error.RepoLookupError:
492 except error.RepoLookupError:
493 return False
493 return False
494
494
495 def __nonzero__(self):
495 def __nonzero__(self):
496 return True
496 return True
497
497
498 def __len__(self):
498 def __len__(self):
499 return len(self.changelog)
499 return len(self.changelog)
500
500
501 def __iter__(self):
501 def __iter__(self):
502 return iter(self.changelog)
502 return iter(self.changelog)
503
503
504 def revs(self, expr, *args):
504 def revs(self, expr, *args):
505 '''Return a list of revisions matching the given revset'''
505 '''Return a list of revisions matching the given revset'''
506 expr = revset.formatspec(expr, *args)
506 expr = revset.formatspec(expr, *args)
507 m = revset.match(None, expr)
507 m = revset.match(None, expr)
508 return m(self)
508 return m(self)
509
509
510 def set(self, expr, *args):
510 def set(self, expr, *args):
511 '''
511 '''
512 Yield a context for each matching revision, after doing arg
512 Yield a context for each matching revision, after doing arg
513 replacement via revset.formatspec
513 replacement via revset.formatspec
514 '''
514 '''
515 for r in self.revs(expr, *args):
515 for r in self.revs(expr, *args):
516 yield self[r]
516 yield self[r]
517
517
518 def url(self):
518 def url(self):
519 return 'file:' + self.root
519 return 'file:' + self.root
520
520
521 def hook(self, name, throw=False, **args):
521 def hook(self, name, throw=False, **args):
522 """Call a hook, passing this repo instance.
522 """Call a hook, passing this repo instance.
523
523
524 This a convenience method to aid invoking hooks. Extensions likely
524 This a convenience method to aid invoking hooks. Extensions likely
525 won't call this unless they have registered a custom hook or are
525 won't call this unless they have registered a custom hook or are
526 replacing code that is expected to call a hook.
526 replacing code that is expected to call a hook.
527 """
527 """
528 return hook.hook(self.ui, self, name, throw, **args)
528 return hook.hook(self.ui, self, name, throw, **args)
529
529
530 @unfilteredmethod
530 @unfilteredmethod
531 def _tag(self, names, node, message, local, user, date, extra={},
531 def _tag(self, names, node, message, local, user, date, extra={},
532 editor=False):
532 editor=False):
533 if isinstance(names, str):
533 if isinstance(names, str):
534 names = (names,)
534 names = (names,)
535
535
536 branches = self.branchmap()
536 branches = self.branchmap()
537 for name in names:
537 for name in names:
538 self.hook('pretag', throw=True, node=hex(node), tag=name,
538 self.hook('pretag', throw=True, node=hex(node), tag=name,
539 local=local)
539 local=local)
540 if name in branches:
540 if name in branches:
541 self.ui.warn(_("warning: tag %s conflicts with existing"
541 self.ui.warn(_("warning: tag %s conflicts with existing"
542 " branch name\n") % name)
542 " branch name\n") % name)
543
543
544 def writetags(fp, names, munge, prevtags):
544 def writetags(fp, names, munge, prevtags):
545 fp.seek(0, 2)
545 fp.seek(0, 2)
546 if prevtags and prevtags[-1] != '\n':
546 if prevtags and prevtags[-1] != '\n':
547 fp.write('\n')
547 fp.write('\n')
548 for name in names:
548 for name in names:
549 if munge:
549 if munge:
550 m = munge(name)
550 m = munge(name)
551 else:
551 else:
552 m = name
552 m = name
553
553
554 if (self._tagscache.tagtypes and
554 if (self._tagscache.tagtypes and
555 name in self._tagscache.tagtypes):
555 name in self._tagscache.tagtypes):
556 old = self.tags().get(name, nullid)
556 old = self.tags().get(name, nullid)
557 fp.write('%s %s\n' % (hex(old), m))
557 fp.write('%s %s\n' % (hex(old), m))
558 fp.write('%s %s\n' % (hex(node), m))
558 fp.write('%s %s\n' % (hex(node), m))
559 fp.close()
559 fp.close()
560
560
561 prevtags = ''
561 prevtags = ''
562 if local:
562 if local:
563 try:
563 try:
564 fp = self.vfs('localtags', 'r+')
564 fp = self.vfs('localtags', 'r+')
565 except IOError:
565 except IOError:
566 fp = self.vfs('localtags', 'a')
566 fp = self.vfs('localtags', 'a')
567 else:
567 else:
568 prevtags = fp.read()
568 prevtags = fp.read()
569
569
570 # local tags are stored in the current charset
570 # local tags are stored in the current charset
571 writetags(fp, names, None, prevtags)
571 writetags(fp, names, None, prevtags)
572 for name in names:
572 for name in names:
573 self.hook('tag', node=hex(node), tag=name, local=local)
573 self.hook('tag', node=hex(node), tag=name, local=local)
574 return
574 return
575
575
576 try:
576 try:
577 fp = self.wfile('.hgtags', 'rb+')
577 fp = self.wfile('.hgtags', 'rb+')
578 except IOError, e:
578 except IOError, e:
579 if e.errno != errno.ENOENT:
579 if e.errno != errno.ENOENT:
580 raise
580 raise
581 fp = self.wfile('.hgtags', 'ab')
581 fp = self.wfile('.hgtags', 'ab')
582 else:
582 else:
583 prevtags = fp.read()
583 prevtags = fp.read()
584
584
585 # committed tags are stored in UTF-8
585 # committed tags are stored in UTF-8
586 writetags(fp, names, encoding.fromlocal, prevtags)
586 writetags(fp, names, encoding.fromlocal, prevtags)
587
587
588 fp.close()
588 fp.close()
589
589
590 self.invalidatecaches()
590 self.invalidatecaches()
591
591
592 if '.hgtags' not in self.dirstate:
592 if '.hgtags' not in self.dirstate:
593 self[None].add(['.hgtags'])
593 self[None].add(['.hgtags'])
594
594
595 m = matchmod.exact(self.root, '', ['.hgtags'])
595 m = matchmod.exact(self.root, '', ['.hgtags'])
596 tagnode = self.commit(message, user, date, extra=extra, match=m,
596 tagnode = self.commit(message, user, date, extra=extra, match=m,
597 editor=editor)
597 editor=editor)
598
598
599 for name in names:
599 for name in names:
600 self.hook('tag', node=hex(node), tag=name, local=local)
600 self.hook('tag', node=hex(node), tag=name, local=local)
601
601
602 return tagnode
602 return tagnode
603
603
604 def tag(self, names, node, message, local, user, date, editor=False):
604 def tag(self, names, node, message, local, user, date, editor=False):
605 '''tag a revision with one or more symbolic names.
605 '''tag a revision with one or more symbolic names.
606
606
607 names is a list of strings or, when adding a single tag, names may be a
607 names is a list of strings or, when adding a single tag, names may be a
608 string.
608 string.
609
609
610 if local is True, the tags are stored in a per-repository file.
610 if local is True, the tags are stored in a per-repository file.
611 otherwise, they are stored in the .hgtags file, and a new
611 otherwise, they are stored in the .hgtags file, and a new
612 changeset is committed with the change.
612 changeset is committed with the change.
613
613
614 keyword arguments:
614 keyword arguments:
615
615
616 local: whether to store tags in non-version-controlled file
616 local: whether to store tags in non-version-controlled file
617 (default False)
617 (default False)
618
618
619 message: commit message to use if committing
619 message: commit message to use if committing
620
620
621 user: name of user to use if committing
621 user: name of user to use if committing
622
622
623 date: date tuple to use if committing'''
623 date: date tuple to use if committing'''
624
624
625 if not local:
625 if not local:
626 m = matchmod.exact(self.root, '', ['.hgtags'])
626 m = matchmod.exact(self.root, '', ['.hgtags'])
627 if util.any(self.status(match=m, unknown=True, ignored=True)):
627 if util.any(self.status(match=m, unknown=True, ignored=True)):
628 raise util.Abort(_('working copy of .hgtags is changed'),
628 raise util.Abort(_('working copy of .hgtags is changed'),
629 hint=_('please commit .hgtags manually'))
629 hint=_('please commit .hgtags manually'))
630
630
631 self.tags() # instantiate the cache
631 self.tags() # instantiate the cache
632 self._tag(names, node, message, local, user, date, editor=editor)
632 self._tag(names, node, message, local, user, date, editor=editor)
633
633
634 @filteredpropertycache
634 @filteredpropertycache
635 def _tagscache(self):
635 def _tagscache(self):
636 '''Returns a tagscache object that contains various tags related
636 '''Returns a tagscache object that contains various tags related
637 caches.'''
637 caches.'''
638
638
639 # This simplifies its cache management by having one decorated
639 # This simplifies its cache management by having one decorated
640 # function (this one) and the rest simply fetch things from it.
640 # function (this one) and the rest simply fetch things from it.
641 class tagscache(object):
641 class tagscache(object):
642 def __init__(self):
642 def __init__(self):
643 # These two define the set of tags for this repository. tags
643 # These two define the set of tags for this repository. tags
644 # maps tag name to node; tagtypes maps tag name to 'global' or
644 # maps tag name to node; tagtypes maps tag name to 'global' or
645 # 'local'. (Global tags are defined by .hgtags across all
645 # 'local'. (Global tags are defined by .hgtags across all
646 # heads, and local tags are defined in .hg/localtags.)
646 # heads, and local tags are defined in .hg/localtags.)
647 # They constitute the in-memory cache of tags.
647 # They constitute the in-memory cache of tags.
648 self.tags = self.tagtypes = None
648 self.tags = self.tagtypes = None
649
649
650 self.nodetagscache = self.tagslist = None
650 self.nodetagscache = self.tagslist = None
651
651
652 cache = tagscache()
652 cache = tagscache()
653 cache.tags, cache.tagtypes = self._findtags()
653 cache.tags, cache.tagtypes = self._findtags()
654
654
655 return cache
655 return cache
656
656
657 def tags(self):
657 def tags(self):
658 '''return a mapping of tag to node'''
658 '''return a mapping of tag to node'''
659 t = {}
659 t = {}
660 if self.changelog.filteredrevs:
660 if self.changelog.filteredrevs:
661 tags, tt = self._findtags()
661 tags, tt = self._findtags()
662 else:
662 else:
663 tags = self._tagscache.tags
663 tags = self._tagscache.tags
664 for k, v in tags.iteritems():
664 for k, v in tags.iteritems():
665 try:
665 try:
666 # ignore tags to unknown nodes
666 # ignore tags to unknown nodes
667 self.changelog.rev(v)
667 self.changelog.rev(v)
668 t[k] = v
668 t[k] = v
669 except (error.LookupError, ValueError):
669 except (error.LookupError, ValueError):
670 pass
670 pass
671 return t
671 return t
672
672
673 def _findtags(self):
673 def _findtags(self):
674 '''Do the hard work of finding tags. Return a pair of dicts
674 '''Do the hard work of finding tags. Return a pair of dicts
675 (tags, tagtypes) where tags maps tag name to node, and tagtypes
675 (tags, tagtypes) where tags maps tag name to node, and tagtypes
676 maps tag name to a string like \'global\' or \'local\'.
676 maps tag name to a string like \'global\' or \'local\'.
677 Subclasses or extensions are free to add their own tags, but
677 Subclasses or extensions are free to add their own tags, but
678 should be aware that the returned dicts will be retained for the
678 should be aware that the returned dicts will be retained for the
679 duration of the localrepo object.'''
679 duration of the localrepo object.'''
680
680
681 # XXX what tagtype should subclasses/extensions use? Currently
681 # XXX what tagtype should subclasses/extensions use? Currently
682 # mq and bookmarks add tags, but do not set the tagtype at all.
682 # mq and bookmarks add tags, but do not set the tagtype at all.
683 # Should each extension invent its own tag type? Should there
683 # Should each extension invent its own tag type? Should there
684 # be one tagtype for all such "virtual" tags? Or is the status
684 # be one tagtype for all such "virtual" tags? Or is the status
685 # quo fine?
685 # quo fine?
686
686
687 alltags = {} # map tag name to (node, hist)
687 alltags = {} # map tag name to (node, hist)
688 tagtypes = {}
688 tagtypes = {}
689
689
690 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
690 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
691 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
691 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
692
692
693 # Build the return dicts. Have to re-encode tag names because
693 # Build the return dicts. Have to re-encode tag names because
694 # the tags module always uses UTF-8 (in order not to lose info
694 # the tags module always uses UTF-8 (in order not to lose info
695 # writing to the cache), but the rest of Mercurial wants them in
695 # writing to the cache), but the rest of Mercurial wants them in
696 # local encoding.
696 # local encoding.
697 tags = {}
697 tags = {}
698 for (name, (node, hist)) in alltags.iteritems():
698 for (name, (node, hist)) in alltags.iteritems():
699 if node != nullid:
699 if node != nullid:
700 tags[encoding.tolocal(name)] = node
700 tags[encoding.tolocal(name)] = node
701 tags['tip'] = self.changelog.tip()
701 tags['tip'] = self.changelog.tip()
702 tagtypes = dict([(encoding.tolocal(name), value)
702 tagtypes = dict([(encoding.tolocal(name), value)
703 for (name, value) in tagtypes.iteritems()])
703 for (name, value) in tagtypes.iteritems()])
704 return (tags, tagtypes)
704 return (tags, tagtypes)
705
705
706 def tagtype(self, tagname):
706 def tagtype(self, tagname):
707 '''
707 '''
708 return the type of the given tag. result can be:
708 return the type of the given tag. result can be:
709
709
710 'local' : a local tag
710 'local' : a local tag
711 'global' : a global tag
711 'global' : a global tag
712 None : tag does not exist
712 None : tag does not exist
713 '''
713 '''
714
714
715 return self._tagscache.tagtypes.get(tagname)
715 return self._tagscache.tagtypes.get(tagname)
716
716
717 def tagslist(self):
717 def tagslist(self):
718 '''return a list of tags ordered by revision'''
718 '''return a list of tags ordered by revision'''
719 if not self._tagscache.tagslist:
719 if not self._tagscache.tagslist:
720 l = []
720 l = []
721 for t, n in self.tags().iteritems():
721 for t, n in self.tags().iteritems():
722 l.append((self.changelog.rev(n), t, n))
722 l.append((self.changelog.rev(n), t, n))
723 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
723 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
724
724
725 return self._tagscache.tagslist
725 return self._tagscache.tagslist
726
726
727 def nodetags(self, node):
727 def nodetags(self, node):
728 '''return the tags associated with a node'''
728 '''return the tags associated with a node'''
729 if not self._tagscache.nodetagscache:
729 if not self._tagscache.nodetagscache:
730 nodetagscache = {}
730 nodetagscache = {}
731 for t, n in self._tagscache.tags.iteritems():
731 for t, n in self._tagscache.tags.iteritems():
732 nodetagscache.setdefault(n, []).append(t)
732 nodetagscache.setdefault(n, []).append(t)
733 for tags in nodetagscache.itervalues():
733 for tags in nodetagscache.itervalues():
734 tags.sort()
734 tags.sort()
735 self._tagscache.nodetagscache = nodetagscache
735 self._tagscache.nodetagscache = nodetagscache
736 return self._tagscache.nodetagscache.get(node, [])
736 return self._tagscache.nodetagscache.get(node, [])
737
737
738 def nodebookmarks(self, node):
738 def nodebookmarks(self, node):
739 marks = []
739 marks = []
740 for bookmark, n in self._bookmarks.iteritems():
740 for bookmark, n in self._bookmarks.iteritems():
741 if n == node:
741 if n == node:
742 marks.append(bookmark)
742 marks.append(bookmark)
743 return sorted(marks)
743 return sorted(marks)
744
744
745 def branchmap(self):
745 def branchmap(self):
746 '''returns a dictionary {branch: [branchheads]} with branchheads
746 '''returns a dictionary {branch: [branchheads]} with branchheads
747 ordered by increasing revision number'''
747 ordered by increasing revision number'''
748 branchmap.updatecache(self)
748 branchmap.updatecache(self)
749 return self._branchcaches[self.filtername]
749 return self._branchcaches[self.filtername]
750
750
751 @unfilteredmethod
751 @unfilteredmethod
752 def revbranchcache(self):
752 def revbranchcache(self):
753 if not self._revbranchcache:
753 if not self._revbranchcache:
754 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
754 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
755 return self._revbranchcache
755 return self._revbranchcache
756
756
757 def branchtip(self, branch, ignoremissing=False):
757 def branchtip(self, branch, ignoremissing=False):
758 '''return the tip node for a given branch
758 '''return the tip node for a given branch
759
759
760 If ignoremissing is True, then this method will not raise an error.
760 If ignoremissing is True, then this method will not raise an error.
761 This is helpful for callers that only expect None for a missing branch
761 This is helpful for callers that only expect None for a missing branch
762 (e.g. namespace).
762 (e.g. namespace).
763
763
764 '''
764 '''
765 try:
765 try:
766 return self.branchmap().branchtip(branch)
766 return self.branchmap().branchtip(branch)
767 except KeyError:
767 except KeyError:
768 if not ignoremissing:
768 if not ignoremissing:
769 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
769 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
770 else:
770 else:
771 pass
771 pass
772
772
773 def lookup(self, key):
773 def lookup(self, key):
774 return self[key].node()
774 return self[key].node()
775
775
776 def lookupbranch(self, key, remote=None):
776 def lookupbranch(self, key, remote=None):
777 repo = remote or self
777 repo = remote or self
778 if key in repo.branchmap():
778 if key in repo.branchmap():
779 return key
779 return key
780
780
781 repo = (remote and remote.local()) and remote or self
781 repo = (remote and remote.local()) and remote or self
782 return repo[key].branch()
782 return repo[key].branch()
783
783
784 def known(self, nodes):
784 def known(self, nodes):
785 nm = self.changelog.nodemap
785 nm = self.changelog.nodemap
786 pc = self._phasecache
786 pc = self._phasecache
787 result = []
787 result = []
788 for n in nodes:
788 for n in nodes:
789 r = nm.get(n)
789 r = nm.get(n)
790 resp = not (r is None or pc.phase(self, r) >= phases.secret)
790 resp = not (r is None or pc.phase(self, r) >= phases.secret)
791 result.append(resp)
791 result.append(resp)
792 return result
792 return result
793
793
794 def local(self):
794 def local(self):
795 return self
795 return self
796
796
797 def cancopy(self):
797 def cancopy(self):
798 # so statichttprepo's override of local() works
798 # so statichttprepo's override of local() works
799 if not self.local():
799 if not self.local():
800 return False
800 return False
801 if not self.ui.configbool('phases', 'publish', True):
801 if not self.ui.configbool('phases', 'publish', True):
802 return True
802 return True
803 # if publishing we can't copy if there is filtered content
803 # if publishing we can't copy if there is filtered content
804 return not self.filtered('visible').changelog.filteredrevs
804 return not self.filtered('visible').changelog.filteredrevs
805
805
806 def shared(self):
806 def shared(self):
807 '''the type of shared repository (None if not shared)'''
807 '''the type of shared repository (None if not shared)'''
808 if self.sharedpath != self.path:
808 if self.sharedpath != self.path:
809 return 'store'
809 return 'store'
810 return None
810 return None
811
811
812 def join(self, f, *insidef):
812 def join(self, f, *insidef):
813 return self.vfs.join(os.path.join(f, *insidef))
813 return self.vfs.join(os.path.join(f, *insidef))
814
814
815 def wjoin(self, f, *insidef):
815 def wjoin(self, f, *insidef):
816 return self.vfs.reljoin(self.root, f, *insidef)
816 return self.vfs.reljoin(self.root, f, *insidef)
817
817
818 def file(self, f):
818 def file(self, f):
819 if f[0] == '/':
819 if f[0] == '/':
820 f = f[1:]
820 f = f[1:]
821 return filelog.filelog(self.svfs, f)
821 return filelog.filelog(self.svfs, f)
822
822
823 def changectx(self, changeid):
823 def changectx(self, changeid):
824 return self[changeid]
824 return self[changeid]
825
825
826 def parents(self, changeid=None):
826 def parents(self, changeid=None):
827 '''get list of changectxs for parents of changeid'''
827 '''get list of changectxs for parents of changeid'''
828 return self[changeid].parents()
828 return self[changeid].parents()
829
829
830 def setparents(self, p1, p2=nullid):
830 def setparents(self, p1, p2=nullid):
831 self.dirstate.beginparentchange()
831 self.dirstate.beginparentchange()
832 copies = self.dirstate.setparents(p1, p2)
832 copies = self.dirstate.setparents(p1, p2)
833 pctx = self[p1]
833 pctx = self[p1]
834 if copies:
834 if copies:
835 # Adjust copy records, the dirstate cannot do it, it
835 # Adjust copy records, the dirstate cannot do it, it
836 # requires access to parents manifests. Preserve them
836 # requires access to parents manifests. Preserve them
837 # only for entries added to first parent.
837 # only for entries added to first parent.
838 for f in copies:
838 for f in copies:
839 if f not in pctx and copies[f] in pctx:
839 if f not in pctx and copies[f] in pctx:
840 self.dirstate.copy(copies[f], f)
840 self.dirstate.copy(copies[f], f)
841 if p2 == nullid:
841 if p2 == nullid:
842 for f, s in sorted(self.dirstate.copies().items()):
842 for f, s in sorted(self.dirstate.copies().items()):
843 if f not in pctx and s not in pctx:
843 if f not in pctx and s not in pctx:
844 self.dirstate.copy(None, f)
844 self.dirstate.copy(None, f)
845 self.dirstate.endparentchange()
845 self.dirstate.endparentchange()
846
846
847 def filectx(self, path, changeid=None, fileid=None):
847 def filectx(self, path, changeid=None, fileid=None):
848 """changeid can be a changeset revision, node, or tag.
848 """changeid can be a changeset revision, node, or tag.
849 fileid can be a file revision or node."""
849 fileid can be a file revision or node."""
850 return context.filectx(self, path, changeid, fileid)
850 return context.filectx(self, path, changeid, fileid)
851
851
852 def getcwd(self):
852 def getcwd(self):
853 return self.dirstate.getcwd()
853 return self.dirstate.getcwd()
854
854
855 def pathto(self, f, cwd=None):
855 def pathto(self, f, cwd=None):
856 return self.dirstate.pathto(f, cwd)
856 return self.dirstate.pathto(f, cwd)
857
857
858 def wfile(self, f, mode='r'):
858 def wfile(self, f, mode='r'):
859 return self.wvfs(f, mode)
859 return self.wvfs(f, mode)
860
860
861 def _link(self, f):
861 def _link(self, f):
862 return self.wvfs.islink(f)
862 return self.wvfs.islink(f)
863
863
864 def _loadfilter(self, filter):
864 def _loadfilter(self, filter):
865 if filter not in self.filterpats:
865 if filter not in self.filterpats:
866 l = []
866 l = []
867 for pat, cmd in self.ui.configitems(filter):
867 for pat, cmd in self.ui.configitems(filter):
868 if cmd == '!':
868 if cmd == '!':
869 continue
869 continue
870 mf = matchmod.match(self.root, '', [pat])
870 mf = matchmod.match(self.root, '', [pat])
871 fn = None
871 fn = None
872 params = cmd
872 params = cmd
873 for name, filterfn in self._datafilters.iteritems():
873 for name, filterfn in self._datafilters.iteritems():
874 if cmd.startswith(name):
874 if cmd.startswith(name):
875 fn = filterfn
875 fn = filterfn
876 params = cmd[len(name):].lstrip()
876 params = cmd[len(name):].lstrip()
877 break
877 break
878 if not fn:
878 if not fn:
879 fn = lambda s, c, **kwargs: util.filter(s, c)
879 fn = lambda s, c, **kwargs: util.filter(s, c)
880 # Wrap old filters not supporting keyword arguments
880 # Wrap old filters not supporting keyword arguments
881 if not inspect.getargspec(fn)[2]:
881 if not inspect.getargspec(fn)[2]:
882 oldfn = fn
882 oldfn = fn
883 fn = lambda s, c, **kwargs: oldfn(s, c)
883 fn = lambda s, c, **kwargs: oldfn(s, c)
884 l.append((mf, fn, params))
884 l.append((mf, fn, params))
885 self.filterpats[filter] = l
885 self.filterpats[filter] = l
886 return self.filterpats[filter]
886 return self.filterpats[filter]
887
887
888 def _filter(self, filterpats, filename, data):
888 def _filter(self, filterpats, filename, data):
889 for mf, fn, cmd in filterpats:
889 for mf, fn, cmd in filterpats:
890 if mf(filename):
890 if mf(filename):
891 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
891 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
892 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
892 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
893 break
893 break
894
894
895 return data
895 return data
896
896
897 @unfilteredpropertycache
897 @unfilteredpropertycache
898 def _encodefilterpats(self):
898 def _encodefilterpats(self):
899 return self._loadfilter('encode')
899 return self._loadfilter('encode')
900
900
901 @unfilteredpropertycache
901 @unfilteredpropertycache
902 def _decodefilterpats(self):
902 def _decodefilterpats(self):
903 return self._loadfilter('decode')
903 return self._loadfilter('decode')
904
904
905 def adddatafilter(self, name, filter):
905 def adddatafilter(self, name, filter):
906 self._datafilters[name] = filter
906 self._datafilters[name] = filter
907
907
908 def wread(self, filename):
908 def wread(self, filename):
909 if self._link(filename):
909 if self._link(filename):
910 data = self.wvfs.readlink(filename)
910 data = self.wvfs.readlink(filename)
911 else:
911 else:
912 data = self.wvfs.read(filename)
912 data = self.wvfs.read(filename)
913 return self._filter(self._encodefilterpats, filename, data)
913 return self._filter(self._encodefilterpats, filename, data)
914
914
915 def wwrite(self, filename, data, flags):
915 def wwrite(self, filename, data, flags):
916 """write ``data`` into ``filename`` in the working directory
916 """write ``data`` into ``filename`` in the working directory
917
917
918 This returns length of written (maybe decoded) data.
918 This returns length of written (maybe decoded) data.
919 """
919 """
920 data = self._filter(self._decodefilterpats, filename, data)
920 data = self._filter(self._decodefilterpats, filename, data)
921 if 'l' in flags:
921 if 'l' in flags:
922 self.wvfs.symlink(data, filename)
922 self.wvfs.symlink(data, filename)
923 else:
923 else:
924 self.wvfs.write(filename, data)
924 self.wvfs.write(filename, data)
925 if 'x' in flags:
925 if 'x' in flags:
926 self.wvfs.setflags(filename, False, True)
926 self.wvfs.setflags(filename, False, True)
927 return len(data)
927 return len(data)
928
928
929 def wwritedata(self, filename, data):
929 def wwritedata(self, filename, data):
930 return self._filter(self._decodefilterpats, filename, data)
930 return self._filter(self._decodefilterpats, filename, data)
931
931
932 def currenttransaction(self):
932 def currenttransaction(self):
933 """return the current transaction or None if non exists"""
933 """return the current transaction or None if non exists"""
934 if self._transref:
934 if self._transref:
935 tr = self._transref()
935 tr = self._transref()
936 else:
936 else:
937 tr = None
937 tr = None
938
938
939 if tr and tr.running():
939 if tr and tr.running():
940 return tr
940 return tr
941 return None
941 return None
942
942
943 def transaction(self, desc, report=None):
943 def transaction(self, desc, report=None):
944 if (self.ui.configbool('devel', 'all')
944 if (self.ui.configbool('devel', 'all')
945 or self.ui.configbool('devel', 'check-locks')):
945 or self.ui.configbool('devel', 'check-locks')):
946 l = self._lockref and self._lockref()
946 l = self._lockref and self._lockref()
947 if l is None or not l.held:
947 if l is None or not l.held:
948 scmutil.develwarn(self.ui, 'transaction with no lock')
948 scmutil.develwarn(self.ui, 'transaction with no lock')
949 tr = self.currenttransaction()
949 tr = self.currenttransaction()
950 if tr is not None:
950 if tr is not None:
951 return tr.nest()
951 return tr.nest()
952
952
953 # abort here if the journal already exists
953 # abort here if the journal already exists
954 if self.svfs.exists("journal"):
954 if self.svfs.exists("journal"):
955 raise error.RepoError(
955 raise error.RepoError(
956 _("abandoned transaction found"),
956 _("abandoned transaction found"),
957 hint=_("run 'hg recover' to clean up transaction"))
957 hint=_("run 'hg recover' to clean up transaction"))
958
958
959 self.hook('pretxnopen', throw=True, txnname=desc)
959 self.hook('pretxnopen', throw=True, txnname=desc)
960
960
961 self._writejournal(desc)
961 self._writejournal(desc)
962 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
962 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
963 if report:
963 if report:
964 rp = report
964 rp = report
965 else:
965 else:
966 rp = self.ui.warn
966 rp = self.ui.warn
967 vfsmap = {'plain': self.vfs} # root of .hg/
967 vfsmap = {'plain': self.vfs} # root of .hg/
968 # we must avoid cyclic reference between repo and transaction.
968 # we must avoid cyclic reference between repo and transaction.
969 reporef = weakref.ref(self)
969 reporef = weakref.ref(self)
970 def validate(tr):
970 def validate(tr):
971 """will run pre-closing hooks"""
971 """will run pre-closing hooks"""
972 pending = lambda: tr.writepending() and self.root or ""
972 pending = lambda: tr.writepending() and self.root or ""
973 reporef().hook('pretxnclose', throw=True, pending=pending,
973 reporef().hook('pretxnclose', throw=True, pending=pending,
974 xnname=desc, **tr.hookargs)
974 xnname=desc, **tr.hookargs)
975
975
976 tr = transaction.transaction(rp, self.sopener, vfsmap,
976 tr = transaction.transaction(rp, self.sopener, vfsmap,
977 "journal",
977 "journal",
978 "undo",
978 "undo",
979 aftertrans(renames),
979 aftertrans(renames),
980 self.store.createmode,
980 self.store.createmode,
981 validator=validate)
981 validator=validate)
982
982
983 trid = 'TXN:' + util.sha1("%s#%f" % (id(tr), time.time())).hexdigest()
983 trid = 'TXN:' + util.sha1("%s#%f" % (id(tr), time.time())).hexdigest()
984 tr.hookargs['TXNID'] = trid
984 tr.hookargs['TXNID'] = trid
985 # note: writing the fncache only during finalize mean that the file is
985 # note: writing the fncache only during finalize mean that the file is
986 # outdated when running hooks. As fncache is used for streaming clone,
986 # outdated when running hooks. As fncache is used for streaming clone,
987 # this is not expected to break anything that happen during the hooks.
987 # this is not expected to break anything that happen during the hooks.
988 tr.addfinalize('flush-fncache', self.store.write)
988 tr.addfinalize('flush-fncache', self.store.write)
989 def txnclosehook(tr2):
989 def txnclosehook(tr2):
990 """To be run if transaction is successful, will schedule a hook run
990 """To be run if transaction is successful, will schedule a hook run
991 """
991 """
992 def hook():
992 def hook():
993 reporef().hook('txnclose', throw=False, txnname=desc,
993 reporef().hook('txnclose', throw=False, txnname=desc,
994 **tr2.hookargs)
994 **tr2.hookargs)
995 reporef()._afterlock(hook)
995 reporef()._afterlock(hook)
996 tr.addfinalize('txnclose-hook', txnclosehook)
996 tr.addfinalize('txnclose-hook', txnclosehook)
997 def txnaborthook(tr2):
997 def txnaborthook(tr2):
998 """To be run if transaction is aborted
998 """To be run if transaction is aborted
999 """
999 """
1000 reporef().hook('txnabort', throw=False, txnname=desc,
1000 reporef().hook('txnabort', throw=False, txnname=desc,
1001 **tr2.hookargs)
1001 **tr2.hookargs)
1002 tr.addabort('txnabort-hook', txnaborthook)
1002 tr.addabort('txnabort-hook', txnaborthook)
1003 self._transref = weakref.ref(tr)
1003 self._transref = weakref.ref(tr)
1004 return tr
1004 return tr
1005
1005
1006 def _journalfiles(self):
1006 def _journalfiles(self):
1007 return ((self.svfs, 'journal'),
1007 return ((self.svfs, 'journal'),
1008 (self.vfs, 'journal.dirstate'),
1008 (self.vfs, 'journal.dirstate'),
1009 (self.vfs, 'journal.branch'),
1009 (self.vfs, 'journal.branch'),
1010 (self.vfs, 'journal.desc'),
1010 (self.vfs, 'journal.desc'),
1011 (self.vfs, 'journal.bookmarks'),
1011 (self.vfs, 'journal.bookmarks'),
1012 (self.svfs, 'journal.phaseroots'))
1012 (self.svfs, 'journal.phaseroots'))
1013
1013
1014 def undofiles(self):
1014 def undofiles(self):
1015 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1015 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1016
1016
1017 def _writejournal(self, desc):
1017 def _writejournal(self, desc):
1018 self.vfs.write("journal.dirstate",
1018 self.vfs.write("journal.dirstate",
1019 self.vfs.tryread("dirstate"))
1019 self.vfs.tryread("dirstate"))
1020 self.vfs.write("journal.branch",
1020 self.vfs.write("journal.branch",
1021 encoding.fromlocal(self.dirstate.branch()))
1021 encoding.fromlocal(self.dirstate.branch()))
1022 self.vfs.write("journal.desc",
1022 self.vfs.write("journal.desc",
1023 "%d\n%s\n" % (len(self), desc))
1023 "%d\n%s\n" % (len(self), desc))
1024 self.vfs.write("journal.bookmarks",
1024 self.vfs.write("journal.bookmarks",
1025 self.vfs.tryread("bookmarks"))
1025 self.vfs.tryread("bookmarks"))
1026 self.svfs.write("journal.phaseroots",
1026 self.svfs.write("journal.phaseroots",
1027 self.svfs.tryread("phaseroots"))
1027 self.svfs.tryread("phaseroots"))
1028
1028
1029 def recover(self):
1029 def recover(self):
1030 lock = self.lock()
1030 lock = self.lock()
1031 try:
1031 try:
1032 if self.svfs.exists("journal"):
1032 if self.svfs.exists("journal"):
1033 self.ui.status(_("rolling back interrupted transaction\n"))
1033 self.ui.status(_("rolling back interrupted transaction\n"))
1034 vfsmap = {'': self.svfs,
1034 vfsmap = {'': self.svfs,
1035 'plain': self.vfs,}
1035 'plain': self.vfs,}
1036 transaction.rollback(self.svfs, vfsmap, "journal",
1036 transaction.rollback(self.svfs, vfsmap, "journal",
1037 self.ui.warn)
1037 self.ui.warn)
1038 self.invalidate()
1038 self.invalidate()
1039 return True
1039 return True
1040 else:
1040 else:
1041 self.ui.warn(_("no interrupted transaction available\n"))
1041 self.ui.warn(_("no interrupted transaction available\n"))
1042 return False
1042 return False
1043 finally:
1043 finally:
1044 lock.release()
1044 lock.release()
1045
1045
1046 def rollback(self, dryrun=False, force=False):
1046 def rollback(self, dryrun=False, force=False):
1047 wlock = lock = None
1047 wlock = lock = None
1048 try:
1048 try:
1049 wlock = self.wlock()
1049 wlock = self.wlock()
1050 lock = self.lock()
1050 lock = self.lock()
1051 if self.svfs.exists("undo"):
1051 if self.svfs.exists("undo"):
1052 return self._rollback(dryrun, force)
1052 return self._rollback(dryrun, force)
1053 else:
1053 else:
1054 self.ui.warn(_("no rollback information available\n"))
1054 self.ui.warn(_("no rollback information available\n"))
1055 return 1
1055 return 1
1056 finally:
1056 finally:
1057 release(lock, wlock)
1057 release(lock, wlock)
1058
1058
1059 @unfilteredmethod # Until we get smarter cache management
1059 @unfilteredmethod # Until we get smarter cache management
1060 def _rollback(self, dryrun, force):
1060 def _rollback(self, dryrun, force):
1061 ui = self.ui
1061 ui = self.ui
1062 try:
1062 try:
1063 args = self.vfs.read('undo.desc').splitlines()
1063 args = self.vfs.read('undo.desc').splitlines()
1064 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1064 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1065 if len(args) >= 3:
1065 if len(args) >= 3:
1066 detail = args[2]
1066 detail = args[2]
1067 oldtip = oldlen - 1
1067 oldtip = oldlen - 1
1068
1068
1069 if detail and ui.verbose:
1069 if detail and ui.verbose:
1070 msg = (_('repository tip rolled back to revision %s'
1070 msg = (_('repository tip rolled back to revision %s'
1071 ' (undo %s: %s)\n')
1071 ' (undo %s: %s)\n')
1072 % (oldtip, desc, detail))
1072 % (oldtip, desc, detail))
1073 else:
1073 else:
1074 msg = (_('repository tip rolled back to revision %s'
1074 msg = (_('repository tip rolled back to revision %s'
1075 ' (undo %s)\n')
1075 ' (undo %s)\n')
1076 % (oldtip, desc))
1076 % (oldtip, desc))
1077 except IOError:
1077 except IOError:
1078 msg = _('rolling back unknown transaction\n')
1078 msg = _('rolling back unknown transaction\n')
1079 desc = None
1079 desc = None
1080
1080
1081 if not force and self['.'] != self['tip'] and desc == 'commit':
1081 if not force and self['.'] != self['tip'] and desc == 'commit':
1082 raise util.Abort(
1082 raise util.Abort(
1083 _('rollback of last commit while not checked out '
1083 _('rollback of last commit while not checked out '
1084 'may lose data'), hint=_('use -f to force'))
1084 'may lose data'), hint=_('use -f to force'))
1085
1085
1086 ui.status(msg)
1086 ui.status(msg)
1087 if dryrun:
1087 if dryrun:
1088 return 0
1088 return 0
1089
1089
1090 parents = self.dirstate.parents()
1090 parents = self.dirstate.parents()
1091 self.destroying()
1091 self.destroying()
1092 vfsmap = {'plain': self.vfs, '': self.svfs}
1092 vfsmap = {'plain': self.vfs, '': self.svfs}
1093 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1093 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1094 if self.vfs.exists('undo.bookmarks'):
1094 if self.vfs.exists('undo.bookmarks'):
1095 self.vfs.rename('undo.bookmarks', 'bookmarks')
1095 self.vfs.rename('undo.bookmarks', 'bookmarks')
1096 if self.svfs.exists('undo.phaseroots'):
1096 if self.svfs.exists('undo.phaseroots'):
1097 self.svfs.rename('undo.phaseroots', 'phaseroots')
1097 self.svfs.rename('undo.phaseroots', 'phaseroots')
1098 self.invalidate()
1098 self.invalidate()
1099
1099
1100 parentgone = (parents[0] not in self.changelog.nodemap or
1100 parentgone = (parents[0] not in self.changelog.nodemap or
1101 parents[1] not in self.changelog.nodemap)
1101 parents[1] not in self.changelog.nodemap)
1102 if parentgone:
1102 if parentgone:
1103 self.vfs.rename('undo.dirstate', 'dirstate')
1103 self.vfs.rename('undo.dirstate', 'dirstate')
1104 try:
1104 try:
1105 branch = self.vfs.read('undo.branch')
1105 branch = self.vfs.read('undo.branch')
1106 self.dirstate.setbranch(encoding.tolocal(branch))
1106 self.dirstate.setbranch(encoding.tolocal(branch))
1107 except IOError:
1107 except IOError:
1108 ui.warn(_('named branch could not be reset: '
1108 ui.warn(_('named branch could not be reset: '
1109 'current branch is still \'%s\'\n')
1109 'current branch is still \'%s\'\n')
1110 % self.dirstate.branch())
1110 % self.dirstate.branch())
1111
1111
1112 self.dirstate.invalidate()
1112 self.dirstate.invalidate()
1113 parents = tuple([p.rev() for p in self.parents()])
1113 parents = tuple([p.rev() for p in self.parents()])
1114 if len(parents) > 1:
1114 if len(parents) > 1:
1115 ui.status(_('working directory now based on '
1115 ui.status(_('working directory now based on '
1116 'revisions %d and %d\n') % parents)
1116 'revisions %d and %d\n') % parents)
1117 else:
1117 else:
1118 ui.status(_('working directory now based on '
1118 ui.status(_('working directory now based on '
1119 'revision %d\n') % parents)
1119 'revision %d\n') % parents)
1120 ms = mergemod.mergestate(self)
1120 ms = mergemod.mergestate(self)
1121 ms.reset(self['.'].node())
1121 ms.reset(self['.'].node())
1122
1122
1123 # TODO: if we know which new heads may result from this rollback, pass
1123 # TODO: if we know which new heads may result from this rollback, pass
1124 # them to destroy(), which will prevent the branchhead cache from being
1124 # them to destroy(), which will prevent the branchhead cache from being
1125 # invalidated.
1125 # invalidated.
1126 self.destroyed()
1126 self.destroyed()
1127 return 0
1127 return 0
1128
1128
1129 def invalidatecaches(self):
1129 def invalidatecaches(self):
1130
1130
1131 if '_tagscache' in vars(self):
1131 if '_tagscache' in vars(self):
1132 # can't use delattr on proxy
1132 # can't use delattr on proxy
1133 del self.__dict__['_tagscache']
1133 del self.__dict__['_tagscache']
1134
1134
1135 self.unfiltered()._branchcaches.clear()
1135 self.unfiltered()._branchcaches.clear()
1136 self.invalidatevolatilesets()
1136 self.invalidatevolatilesets()
1137
1137
1138 def invalidatevolatilesets(self):
1138 def invalidatevolatilesets(self):
1139 self.filteredrevcache.clear()
1139 self.filteredrevcache.clear()
1140 obsolete.clearobscaches(self)
1140 obsolete.clearobscaches(self)
1141
1141
1142 def invalidatedirstate(self):
1142 def invalidatedirstate(self):
1143 '''Invalidates the dirstate, causing the next call to dirstate
1143 '''Invalidates the dirstate, causing the next call to dirstate
1144 to check if it was modified since the last time it was read,
1144 to check if it was modified since the last time it was read,
1145 rereading it if it has.
1145 rereading it if it has.
1146
1146
1147 This is different to dirstate.invalidate() that it doesn't always
1147 This is different to dirstate.invalidate() that it doesn't always
1148 rereads the dirstate. Use dirstate.invalidate() if you want to
1148 rereads the dirstate. Use dirstate.invalidate() if you want to
1149 explicitly read the dirstate again (i.e. restoring it to a previous
1149 explicitly read the dirstate again (i.e. restoring it to a previous
1150 known good state).'''
1150 known good state).'''
1151 if hasunfilteredcache(self, 'dirstate'):
1151 if hasunfilteredcache(self, 'dirstate'):
1152 for k in self.dirstate._filecache:
1152 for k in self.dirstate._filecache:
1153 try:
1153 try:
1154 delattr(self.dirstate, k)
1154 delattr(self.dirstate, k)
1155 except AttributeError:
1155 except AttributeError:
1156 pass
1156 pass
1157 delattr(self.unfiltered(), 'dirstate')
1157 delattr(self.unfiltered(), 'dirstate')
1158
1158
1159 def invalidate(self):
1159 def invalidate(self):
1160 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1160 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1161 for k in self._filecache:
1161 for k in self._filecache:
1162 # dirstate is invalidated separately in invalidatedirstate()
1162 # dirstate is invalidated separately in invalidatedirstate()
1163 if k == 'dirstate':
1163 if k == 'dirstate':
1164 continue
1164 continue
1165
1165
1166 try:
1166 try:
1167 delattr(unfiltered, k)
1167 delattr(unfiltered, k)
1168 except AttributeError:
1168 except AttributeError:
1169 pass
1169 pass
1170 self.invalidatecaches()
1170 self.invalidatecaches()
1171 self.store.invalidatecaches()
1171 self.store.invalidatecaches()
1172
1172
1173 def invalidateall(self):
1173 def invalidateall(self):
1174 '''Fully invalidates both store and non-store parts, causing the
1174 '''Fully invalidates both store and non-store parts, causing the
1175 subsequent operation to reread any outside changes.'''
1175 subsequent operation to reread any outside changes.'''
1176 # extension should hook this to invalidate its caches
1176 # extension should hook this to invalidate its caches
1177 self.invalidate()
1177 self.invalidate()
1178 self.invalidatedirstate()
1178 self.invalidatedirstate()
1179
1179
1180 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1180 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1181 try:
1181 try:
1182 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1182 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1183 except error.LockHeld, inst:
1183 except error.LockHeld, inst:
1184 if not wait:
1184 if not wait:
1185 raise
1185 raise
1186 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1186 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1187 (desc, inst.locker))
1187 (desc, inst.locker))
1188 # default to 600 seconds timeout
1188 # default to 600 seconds timeout
1189 l = lockmod.lock(vfs, lockname,
1189 l = lockmod.lock(vfs, lockname,
1190 int(self.ui.config("ui", "timeout", "600")),
1190 int(self.ui.config("ui", "timeout", "600")),
1191 releasefn, desc=desc)
1191 releasefn, desc=desc)
1192 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1192 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1193 if acquirefn:
1193 if acquirefn:
1194 acquirefn()
1194 acquirefn()
1195 return l
1195 return l
1196
1196
1197 def _afterlock(self, callback):
1197 def _afterlock(self, callback):
1198 """add a callback to be run when the repository is fully unlocked
1198 """add a callback to be run when the repository is fully unlocked
1199
1199
1200 The callback will be executed when the outermost lock is released
1200 The callback will be executed when the outermost lock is released
1201 (with wlock being higher level than 'lock')."""
1201 (with wlock being higher level than 'lock')."""
1202 for ref in (self._wlockref, self._lockref):
1202 for ref in (self._wlockref, self._lockref):
1203 l = ref and ref()
1203 l = ref and ref()
1204 if l and l.held:
1204 if l and l.held:
1205 l.postrelease.append(callback)
1205 l.postrelease.append(callback)
1206 break
1206 break
1207 else: # no lock have been found.
1207 else: # no lock have been found.
1208 callback()
1208 callback()
1209
1209
1210 def lock(self, wait=True):
1210 def lock(self, wait=True):
1211 '''Lock the repository store (.hg/store) and return a weak reference
1211 '''Lock the repository store (.hg/store) and return a weak reference
1212 to the lock. Use this before modifying the store (e.g. committing or
1212 to the lock. Use this before modifying the store (e.g. committing or
1213 stripping). If you are opening a transaction, get a lock as well.)
1213 stripping). If you are opening a transaction, get a lock as well.)
1214
1214
1215 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1215 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1216 'wlock' first to avoid a dead-lock hazard.'''
1216 'wlock' first to avoid a dead-lock hazard.'''
1217 l = self._lockref and self._lockref()
1217 l = self._lockref and self._lockref()
1218 if l is not None and l.held:
1218 if l is not None and l.held:
1219 l.lock()
1219 l.lock()
1220 return l
1220 return l
1221
1221
1222 def unlock():
1222 def unlock():
1223 for k, ce in self._filecache.items():
1223 for k, ce in self._filecache.items():
1224 if k == 'dirstate' or k not in self.__dict__:
1224 if k == 'dirstate' or k not in self.__dict__:
1225 continue
1225 continue
1226 ce.refresh()
1226 ce.refresh()
1227
1227
1228 l = self._lock(self.svfs, "lock", wait, unlock,
1228 l = self._lock(self.svfs, "lock", wait, unlock,
1229 self.invalidate, _('repository %s') % self.origroot)
1229 self.invalidate, _('repository %s') % self.origroot)
1230 self._lockref = weakref.ref(l)
1230 self._lockref = weakref.ref(l)
1231 return l
1231 return l
1232
1232
1233 def wlock(self, wait=True):
1233 def wlock(self, wait=True):
1234 '''Lock the non-store parts of the repository (everything under
1234 '''Lock the non-store parts of the repository (everything under
1235 .hg except .hg/store) and return a weak reference to the lock.
1235 .hg except .hg/store) and return a weak reference to the lock.
1236
1236
1237 Use this before modifying files in .hg.
1237 Use this before modifying files in .hg.
1238
1238
1239 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1239 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1240 'wlock' first to avoid a dead-lock hazard.'''
1240 'wlock' first to avoid a dead-lock hazard.'''
1241 l = self._wlockref and self._wlockref()
1241 l = self._wlockref and self._wlockref()
1242 if l is not None and l.held:
1242 if l is not None and l.held:
1243 l.lock()
1243 l.lock()
1244 return l
1244 return l
1245
1245
1246 # We do not need to check for non-waiting lock aquisition. Such
1246 # We do not need to check for non-waiting lock aquisition. Such
1247 # acquisition would not cause dead-lock as they would just fail.
1247 # acquisition would not cause dead-lock as they would just fail.
1248 if wait and (self.ui.configbool('devel', 'all')
1248 if wait and (self.ui.configbool('devel', 'all')
1249 or self.ui.configbool('devel', 'check-locks')):
1249 or self.ui.configbool('devel', 'check-locks')):
1250 l = self._lockref and self._lockref()
1250 l = self._lockref and self._lockref()
1251 if l is not None and l.held:
1251 if l is not None and l.held:
1252 scmutil.develwarn(self.ui, '"wlock" acquired after "lock"')
1252 scmutil.develwarn(self.ui, '"wlock" acquired after "lock"')
1253
1253
1254 def unlock():
1254 def unlock():
1255 if self.dirstate.pendingparentchange():
1255 if self.dirstate.pendingparentchange():
1256 self.dirstate.invalidate()
1256 self.dirstate.invalidate()
1257 else:
1257 else:
1258 self.dirstate.write()
1258 self.dirstate.write()
1259
1259
1260 self._filecache['dirstate'].refresh()
1260 self._filecache['dirstate'].refresh()
1261
1261
1262 l = self._lock(self.vfs, "wlock", wait, unlock,
1262 l = self._lock(self.vfs, "wlock", wait, unlock,
1263 self.invalidatedirstate, _('working directory of %s') %
1263 self.invalidatedirstate, _('working directory of %s') %
1264 self.origroot)
1264 self.origroot)
1265 self._wlockref = weakref.ref(l)
1265 self._wlockref = weakref.ref(l)
1266 return l
1266 return l
1267
1267
1268 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1268 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1269 """
1269 """
1270 commit an individual file as part of a larger transaction
1270 commit an individual file as part of a larger transaction
1271 """
1271 """
1272
1272
1273 fname = fctx.path()
1273 fname = fctx.path()
1274 fparent1 = manifest1.get(fname, nullid)
1274 fparent1 = manifest1.get(fname, nullid)
1275 fparent2 = manifest2.get(fname, nullid)
1275 fparent2 = manifest2.get(fname, nullid)
1276 if isinstance(fctx, context.filectx):
1276 if isinstance(fctx, context.filectx):
1277 node = fctx.filenode()
1277 node = fctx.filenode()
1278 if node in [fparent1, fparent2]:
1278 if node in [fparent1, fparent2]:
1279 self.ui.debug('reusing %s filelog entry\n' % fname)
1279 self.ui.debug('reusing %s filelog entry\n' % fname)
1280 return node
1280 return node
1281
1281
1282 flog = self.file(fname)
1282 flog = self.file(fname)
1283 meta = {}
1283 meta = {}
1284 copy = fctx.renamed()
1284 copy = fctx.renamed()
1285 if copy and copy[0] != fname:
1285 if copy and copy[0] != fname:
1286 # Mark the new revision of this file as a copy of another
1286 # Mark the new revision of this file as a copy of another
1287 # file. This copy data will effectively act as a parent
1287 # file. This copy data will effectively act as a parent
1288 # of this new revision. If this is a merge, the first
1288 # of this new revision. If this is a merge, the first
1289 # parent will be the nullid (meaning "look up the copy data")
1289 # parent will be the nullid (meaning "look up the copy data")
1290 # and the second one will be the other parent. For example:
1290 # and the second one will be the other parent. For example:
1291 #
1291 #
1292 # 0 --- 1 --- 3 rev1 changes file foo
1292 # 0 --- 1 --- 3 rev1 changes file foo
1293 # \ / rev2 renames foo to bar and changes it
1293 # \ / rev2 renames foo to bar and changes it
1294 # \- 2 -/ rev3 should have bar with all changes and
1294 # \- 2 -/ rev3 should have bar with all changes and
1295 # should record that bar descends from
1295 # should record that bar descends from
1296 # bar in rev2 and foo in rev1
1296 # bar in rev2 and foo in rev1
1297 #
1297 #
1298 # this allows this merge to succeed:
1298 # this allows this merge to succeed:
1299 #
1299 #
1300 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1300 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1301 # \ / merging rev3 and rev4 should use bar@rev2
1301 # \ / merging rev3 and rev4 should use bar@rev2
1302 # \- 2 --- 4 as the merge base
1302 # \- 2 --- 4 as the merge base
1303 #
1303 #
1304
1304
1305 cfname = copy[0]
1305 cfname = copy[0]
1306 crev = manifest1.get(cfname)
1306 crev = manifest1.get(cfname)
1307 newfparent = fparent2
1307 newfparent = fparent2
1308
1308
1309 if manifest2: # branch merge
1309 if manifest2: # branch merge
1310 if fparent2 == nullid or crev is None: # copied on remote side
1310 if fparent2 == nullid or crev is None: # copied on remote side
1311 if cfname in manifest2:
1311 if cfname in manifest2:
1312 crev = manifest2[cfname]
1312 crev = manifest2[cfname]
1313 newfparent = fparent1
1313 newfparent = fparent1
1314
1314
1315 # Here, we used to search backwards through history to try to find
1315 # Here, we used to search backwards through history to try to find
1316 # where the file copy came from if the source of a copy was not in
1316 # where the file copy came from if the source of a copy was not in
1317 # the parent directory. However, this doesn't actually make sense to
1317 # the parent directory. However, this doesn't actually make sense to
1318 # do (what does a copy from something not in your working copy even
1318 # do (what does a copy from something not in your working copy even
1319 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1319 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1320 # the user that copy information was dropped, so if they didn't
1320 # the user that copy information was dropped, so if they didn't
1321 # expect this outcome it can be fixed, but this is the correct
1321 # expect this outcome it can be fixed, but this is the correct
1322 # behavior in this circumstance.
1322 # behavior in this circumstance.
1323
1323
1324 if crev:
1324 if crev:
1325 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1325 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1326 meta["copy"] = cfname
1326 meta["copy"] = cfname
1327 meta["copyrev"] = hex(crev)
1327 meta["copyrev"] = hex(crev)
1328 fparent1, fparent2 = nullid, newfparent
1328 fparent1, fparent2 = nullid, newfparent
1329 else:
1329 else:
1330 self.ui.warn(_("warning: can't find ancestor for '%s' "
1330 self.ui.warn(_("warning: can't find ancestor for '%s' "
1331 "copied from '%s'!\n") % (fname, cfname))
1331 "copied from '%s'!\n") % (fname, cfname))
1332
1332
1333 elif fparent1 == nullid:
1333 elif fparent1 == nullid:
1334 fparent1, fparent2 = fparent2, nullid
1334 fparent1, fparent2 = fparent2, nullid
1335 elif fparent2 != nullid:
1335 elif fparent2 != nullid:
1336 # is one parent an ancestor of the other?
1336 # is one parent an ancestor of the other?
1337 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1337 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1338 if fparent1 in fparentancestors:
1338 if fparent1 in fparentancestors:
1339 fparent1, fparent2 = fparent2, nullid
1339 fparent1, fparent2 = fparent2, nullid
1340 elif fparent2 in fparentancestors:
1340 elif fparent2 in fparentancestors:
1341 fparent2 = nullid
1341 fparent2 = nullid
1342
1342
1343 # is the file changed?
1343 # is the file changed?
1344 text = fctx.data()
1344 text = fctx.data()
1345 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1345 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1346 changelist.append(fname)
1346 changelist.append(fname)
1347 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1347 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1348 # are just the flags changed during merge?
1348 # are just the flags changed during merge?
1349 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1349 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1350 changelist.append(fname)
1350 changelist.append(fname)
1351
1351
1352 return fparent1
1352 return fparent1
1353
1353
1354 @unfilteredmethod
1354 @unfilteredmethod
1355 def commit(self, text="", user=None, date=None, match=None, force=False,
1355 def commit(self, text="", user=None, date=None, match=None, force=False,
1356 editor=False, extra={}):
1356 editor=False, extra={}):
1357 """Add a new revision to current repository.
1357 """Add a new revision to current repository.
1358
1358
1359 Revision information is gathered from the working directory,
1359 Revision information is gathered from the working directory,
1360 match can be used to filter the committed files. If editor is
1360 match can be used to filter the committed files. If editor is
1361 supplied, it is called to get a commit message.
1361 supplied, it is called to get a commit message.
1362 """
1362 """
1363
1363
1364 def fail(f, msg):
1364 def fail(f, msg):
1365 raise util.Abort('%s: %s' % (f, msg))
1365 raise util.Abort('%s: %s' % (f, msg))
1366
1366
1367 if not match:
1367 if not match:
1368 match = matchmod.always(self.root, '')
1368 match = matchmod.always(self.root, '')
1369
1369
1370 if not force:
1370 if not force:
1371 vdirs = []
1371 vdirs = []
1372 match.explicitdir = vdirs.append
1372 match.explicitdir = vdirs.append
1373 match.bad = fail
1373 match.bad = fail
1374
1374
1375 wlock = self.wlock()
1375 wlock = self.wlock()
1376 try:
1376 try:
1377 wctx = self[None]
1377 wctx = self[None]
1378 merge = len(wctx.parents()) > 1
1378 merge = len(wctx.parents()) > 1
1379
1379
1380 if not force and merge and not match.always():
1380 if not force and merge and not match.always():
1381 raise util.Abort(_('cannot partially commit a merge '
1381 raise util.Abort(_('cannot partially commit a merge '
1382 '(do not specify files or patterns)'))
1382 '(do not specify files or patterns)'))
1383
1383
1384 status = self.status(match=match, clean=force)
1384 status = self.status(match=match, clean=force)
1385 if force:
1385 if force:
1386 status.modified.extend(status.clean) # mq may commit clean files
1386 status.modified.extend(status.clean) # mq may commit clean files
1387
1387
1388 # check subrepos
1388 # check subrepos
1389 subs = []
1389 subs = []
1390 commitsubs = set()
1390 commitsubs = set()
1391 newstate = wctx.substate.copy()
1391 newstate = wctx.substate.copy()
1392 # only manage subrepos and .hgsubstate if .hgsub is present
1392 # only manage subrepos and .hgsubstate if .hgsub is present
1393 if '.hgsub' in wctx:
1393 if '.hgsub' in wctx:
1394 # we'll decide whether to track this ourselves, thanks
1394 # we'll decide whether to track this ourselves, thanks
1395 for c in status.modified, status.added, status.removed:
1395 for c in status.modified, status.added, status.removed:
1396 if '.hgsubstate' in c:
1396 if '.hgsubstate' in c:
1397 c.remove('.hgsubstate')
1397 c.remove('.hgsubstate')
1398
1398
1399 # compare current state to last committed state
1399 # compare current state to last committed state
1400 # build new substate based on last committed state
1400 # build new substate based on last committed state
1401 oldstate = wctx.p1().substate
1401 oldstate = wctx.p1().substate
1402 for s in sorted(newstate.keys()):
1402 for s in sorted(newstate.keys()):
1403 if not match(s):
1403 if not match(s):
1404 # ignore working copy, use old state if present
1404 # ignore working copy, use old state if present
1405 if s in oldstate:
1405 if s in oldstate:
1406 newstate[s] = oldstate[s]
1406 newstate[s] = oldstate[s]
1407 continue
1407 continue
1408 if not force:
1408 if not force:
1409 raise util.Abort(
1409 raise util.Abort(
1410 _("commit with new subrepo %s excluded") % s)
1410 _("commit with new subrepo %s excluded") % s)
1411 dirtyreason = wctx.sub(s).dirtyreason(True)
1411 dirtyreason = wctx.sub(s).dirtyreason(True)
1412 if dirtyreason:
1412 if dirtyreason:
1413 if not self.ui.configbool('ui', 'commitsubrepos'):
1413 if not self.ui.configbool('ui', 'commitsubrepos'):
1414 raise util.Abort(dirtyreason,
1414 raise util.Abort(dirtyreason,
1415 hint=_("use --subrepos for recursive commit"))
1415 hint=_("use --subrepos for recursive commit"))
1416 subs.append(s)
1416 subs.append(s)
1417 commitsubs.add(s)
1417 commitsubs.add(s)
1418 else:
1418 else:
1419 bs = wctx.sub(s).basestate()
1419 bs = wctx.sub(s).basestate()
1420 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1420 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1421 if oldstate.get(s, (None, None, None))[1] != bs:
1421 if oldstate.get(s, (None, None, None))[1] != bs:
1422 subs.append(s)
1422 subs.append(s)
1423
1423
1424 # check for removed subrepos
1424 # check for removed subrepos
1425 for p in wctx.parents():
1425 for p in wctx.parents():
1426 r = [s for s in p.substate if s not in newstate]
1426 r = [s for s in p.substate if s not in newstate]
1427 subs += [s for s in r if match(s)]
1427 subs += [s for s in r if match(s)]
1428 if subs:
1428 if subs:
1429 if (not match('.hgsub') and
1429 if (not match('.hgsub') and
1430 '.hgsub' in (wctx.modified() + wctx.added())):
1430 '.hgsub' in (wctx.modified() + wctx.added())):
1431 raise util.Abort(
1431 raise util.Abort(
1432 _("can't commit subrepos without .hgsub"))
1432 _("can't commit subrepos without .hgsub"))
1433 status.modified.insert(0, '.hgsubstate')
1433 status.modified.insert(0, '.hgsubstate')
1434
1434
1435 elif '.hgsub' in status.removed:
1435 elif '.hgsub' in status.removed:
1436 # clean up .hgsubstate when .hgsub is removed
1436 # clean up .hgsubstate when .hgsub is removed
1437 if ('.hgsubstate' in wctx and
1437 if ('.hgsubstate' in wctx and
1438 '.hgsubstate' not in (status.modified + status.added +
1438 '.hgsubstate' not in (status.modified + status.added +
1439 status.removed)):
1439 status.removed)):
1440 status.removed.insert(0, '.hgsubstate')
1440 status.removed.insert(0, '.hgsubstate')
1441
1441
1442 # make sure all explicit patterns are matched
1442 # make sure all explicit patterns are matched
1443 if not force and match.files():
1443 if not force and match.files():
1444 matched = set(status.modified + status.added + status.removed)
1444 matched = set(status.modified + status.added + status.removed)
1445
1445
1446 for f in match.files():
1446 for f in match.files():
1447 f = self.dirstate.normalize(f)
1447 f = self.dirstate.normalize(f)
1448 if f == '.' or f in matched or f in wctx.substate:
1448 if f == '.' or f in matched or f in wctx.substate:
1449 continue
1449 continue
1450 if f in status.deleted:
1450 if f in status.deleted:
1451 fail(f, _('file not found!'))
1451 fail(f, _('file not found!'))
1452 if f in vdirs: # visited directory
1452 if f in vdirs: # visited directory
1453 d = f + '/'
1453 d = f + '/'
1454 for mf in matched:
1454 for mf in matched:
1455 if mf.startswith(d):
1455 if mf.startswith(d):
1456 break
1456 break
1457 else:
1457 else:
1458 fail(f, _("no match under directory!"))
1458 fail(f, _("no match under directory!"))
1459 elif f not in self.dirstate:
1459 elif f not in self.dirstate:
1460 fail(f, _("file not tracked!"))
1460 fail(f, _("file not tracked!"))
1461
1461
1462 cctx = context.workingcommitctx(self, status,
1462 cctx = context.workingcommitctx(self, status,
1463 text, user, date, extra)
1463 text, user, date, extra)
1464
1464
1465 if (not force and not extra.get("close") and not merge
1465 if (not force and not extra.get("close") and not merge
1466 and not cctx.files()
1466 and not cctx.files()
1467 and wctx.branch() == wctx.p1().branch()):
1467 and wctx.branch() == wctx.p1().branch()):
1468 return None
1468 return None
1469
1469
1470 if merge and cctx.deleted():
1470 if merge and cctx.deleted():
1471 raise util.Abort(_("cannot commit merge with missing files"))
1471 raise util.Abort(_("cannot commit merge with missing files"))
1472
1472
1473 ms = mergemod.mergestate(self)
1473 ms = mergemod.mergestate(self)
1474 for f in status.modified:
1474 for f in status.modified:
1475 if f in ms and ms[f] == 'u':
1475 if f in ms and ms[f] == 'u':
1476 raise util.Abort(_('unresolved merge conflicts '
1476 raise util.Abort(_('unresolved merge conflicts '
1477 '(see "hg help resolve")'))
1477 '(see "hg help resolve")'))
1478
1478
1479 if editor:
1479 if editor:
1480 cctx._text = editor(self, cctx, subs)
1480 cctx._text = editor(self, cctx, subs)
1481 edited = (text != cctx._text)
1481 edited = (text != cctx._text)
1482
1482
1483 # Save commit message in case this transaction gets rolled back
1483 # Save commit message in case this transaction gets rolled back
1484 # (e.g. by a pretxncommit hook). Leave the content alone on
1484 # (e.g. by a pretxncommit hook). Leave the content alone on
1485 # the assumption that the user will use the same editor again.
1485 # the assumption that the user will use the same editor again.
1486 msgfn = self.savecommitmessage(cctx._text)
1486 msgfn = self.savecommitmessage(cctx._text)
1487
1487
1488 # commit subs and write new state
1488 # commit subs and write new state
1489 if subs:
1489 if subs:
1490 for s in sorted(commitsubs):
1490 for s in sorted(commitsubs):
1491 sub = wctx.sub(s)
1491 sub = wctx.sub(s)
1492 self.ui.status(_('committing subrepository %s\n') %
1492 self.ui.status(_('committing subrepository %s\n') %
1493 subrepo.subrelpath(sub))
1493 subrepo.subrelpath(sub))
1494 sr = sub.commit(cctx._text, user, date)
1494 sr = sub.commit(cctx._text, user, date)
1495 newstate[s] = (newstate[s][0], sr)
1495 newstate[s] = (newstate[s][0], sr)
1496 subrepo.writestate(self, newstate)
1496 subrepo.writestate(self, newstate)
1497
1497
1498 p1, p2 = self.dirstate.parents()
1498 p1, p2 = self.dirstate.parents()
1499 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1499 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1500 try:
1500 try:
1501 self.hook("precommit", throw=True, parent1=hookp1,
1501 self.hook("precommit", throw=True, parent1=hookp1,
1502 parent2=hookp2)
1502 parent2=hookp2)
1503 ret = self.commitctx(cctx, True)
1503 ret = self.commitctx(cctx, True)
1504 except: # re-raises
1504 except: # re-raises
1505 if edited:
1505 if edited:
1506 self.ui.write(
1506 self.ui.write(
1507 _('note: commit message saved in %s\n') % msgfn)
1507 _('note: commit message saved in %s\n') % msgfn)
1508 raise
1508 raise
1509
1509
1510 # update bookmarks, dirstate and mergestate
1510 # update bookmarks, dirstate and mergestate
1511 bookmarks.update(self, [p1, p2], ret)
1511 bookmarks.update(self, [p1, p2], ret)
1512 cctx.markcommitted(ret)
1512 cctx.markcommitted(ret)
1513 ms.reset()
1513 ms.reset()
1514 finally:
1514 finally:
1515 wlock.release()
1515 wlock.release()
1516
1516
1517 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1517 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1518 # hack for command that use a temporary commit (eg: histedit)
1518 # hack for command that use a temporary commit (eg: histedit)
1519 # temporary commit got stripped before hook release
1519 # temporary commit got stripped before hook release
1520 if node in self:
1520 if node in self:
1521 self.hook("commit", node=node, parent1=parent1,
1521 self.hook("commit", node=node, parent1=parent1,
1522 parent2=parent2)
1522 parent2=parent2)
1523 self._afterlock(commithook)
1523 self._afterlock(commithook)
1524 return ret
1524 return ret
1525
1525
1526 @unfilteredmethod
1526 @unfilteredmethod
1527 def commitctx(self, ctx, error=False):
1527 def commitctx(self, ctx, error=False):
1528 """Add a new revision to current repository.
1528 """Add a new revision to current repository.
1529 Revision information is passed via the context argument.
1529 Revision information is passed via the context argument.
1530 """
1530 """
1531
1531
1532 tr = None
1532 tr = None
1533 p1, p2 = ctx.p1(), ctx.p2()
1533 p1, p2 = ctx.p1(), ctx.p2()
1534 user = ctx.user()
1534 user = ctx.user()
1535
1535
1536 lock = self.lock()
1536 lock = self.lock()
1537 try:
1537 try:
1538 tr = self.transaction("commit")
1538 tr = self.transaction("commit")
1539 trp = weakref.proxy(tr)
1539 trp = weakref.proxy(tr)
1540
1540
1541 if ctx.files():
1541 if ctx.files():
1542 m1 = p1.manifest()
1542 m1 = p1.manifest()
1543 m2 = p2.manifest()
1543 m2 = p2.manifest()
1544 m = m1.copy()
1544 m = m1.copy()
1545
1545
1546 # check in files
1546 # check in files
1547 added = []
1547 added = []
1548 changed = []
1548 changed = []
1549 removed = list(ctx.removed())
1549 removed = list(ctx.removed())
1550 linkrev = len(self)
1550 linkrev = len(self)
1551 self.ui.note(_("committing files:\n"))
1551 self.ui.note(_("committing files:\n"))
1552 for f in sorted(ctx.modified() + ctx.added()):
1552 for f in sorted(ctx.modified() + ctx.added()):
1553 self.ui.note(f + "\n")
1553 self.ui.note(f + "\n")
1554 try:
1554 try:
1555 fctx = ctx[f]
1555 fctx = ctx[f]
1556 if fctx is None:
1556 if fctx is None:
1557 removed.append(f)
1557 removed.append(f)
1558 else:
1558 else:
1559 added.append(f)
1559 added.append(f)
1560 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1560 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1561 trp, changed)
1561 trp, changed)
1562 m.setflag(f, fctx.flags())
1562 m.setflag(f, fctx.flags())
1563 except OSError, inst:
1563 except OSError, inst:
1564 self.ui.warn(_("trouble committing %s!\n") % f)
1564 self.ui.warn(_("trouble committing %s!\n") % f)
1565 raise
1565 raise
1566 except IOError, inst:
1566 except IOError, inst:
1567 errcode = getattr(inst, 'errno', errno.ENOENT)
1567 errcode = getattr(inst, 'errno', errno.ENOENT)
1568 if error or errcode and errcode != errno.ENOENT:
1568 if error or errcode and errcode != errno.ENOENT:
1569 self.ui.warn(_("trouble committing %s!\n") % f)
1569 self.ui.warn(_("trouble committing %s!\n") % f)
1570 raise
1570 raise
1571
1571
1572 # update manifest
1572 # update manifest
1573 self.ui.note(_("committing manifest\n"))
1573 self.ui.note(_("committing manifest\n"))
1574 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1574 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1575 drop = [f for f in removed if f in m]
1575 drop = [f for f in removed if f in m]
1576 for f in drop:
1576 for f in drop:
1577 del m[f]
1577 del m[f]
1578 mn = self.manifest.add(m, trp, linkrev,
1578 mn = self.manifest.add(m, trp, linkrev,
1579 p1.manifestnode(), p2.manifestnode(),
1579 p1.manifestnode(), p2.manifestnode(),
1580 added, drop)
1580 added, drop)
1581 files = changed + removed
1581 files = changed + removed
1582 else:
1582 else:
1583 mn = p1.manifestnode()
1583 mn = p1.manifestnode()
1584 files = []
1584 files = []
1585
1585
1586 # update changelog
1586 # update changelog
1587 self.ui.note(_("committing changelog\n"))
1587 self.ui.note(_("committing changelog\n"))
1588 self.changelog.delayupdate(tr)
1588 self.changelog.delayupdate(tr)
1589 n = self.changelog.add(mn, files, ctx.description(),
1589 n = self.changelog.add(mn, files, ctx.description(),
1590 trp, p1.node(), p2.node(),
1590 trp, p1.node(), p2.node(),
1591 user, ctx.date(), ctx.extra().copy())
1591 user, ctx.date(), ctx.extra().copy())
1592 p = lambda: tr.writepending() and self.root or ""
1592 p = lambda: tr.writepending() and self.root or ""
1593 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1593 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1594 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1594 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1595 parent2=xp2, pending=p)
1595 parent2=xp2, pending=p)
1596 # set the new commit is proper phase
1596 # set the new commit is proper phase
1597 targetphase = subrepo.newcommitphase(self.ui, ctx)
1597 targetphase = subrepo.newcommitphase(self.ui, ctx)
1598 if targetphase:
1598 if targetphase:
1599 # retract boundary do not alter parent changeset.
1599 # retract boundary do not alter parent changeset.
1600 # if a parent have higher the resulting phase will
1600 # if a parent have higher the resulting phase will
1601 # be compliant anyway
1601 # be compliant anyway
1602 #
1602 #
1603 # if minimal phase was 0 we don't need to retract anything
1603 # if minimal phase was 0 we don't need to retract anything
1604 phases.retractboundary(self, tr, targetphase, [n])
1604 phases.retractboundary(self, tr, targetphase, [n])
1605 tr.close()
1605 tr.close()
1606 branchmap.updatecache(self.filtered('served'))
1606 branchmap.updatecache(self.filtered('served'))
1607 return n
1607 return n
1608 finally:
1608 finally:
1609 if tr:
1609 if tr:
1610 tr.release()
1610 tr.release()
1611 lock.release()
1611 lock.release()
1612
1612
1613 @unfilteredmethod
1613 @unfilteredmethod
1614 def destroying(self):
1614 def destroying(self):
1615 '''Inform the repository that nodes are about to be destroyed.
1615 '''Inform the repository that nodes are about to be destroyed.
1616 Intended for use by strip and rollback, so there's a common
1616 Intended for use by strip and rollback, so there's a common
1617 place for anything that has to be done before destroying history.
1617 place for anything that has to be done before destroying history.
1618
1618
1619 This is mostly useful for saving state that is in memory and waiting
1619 This is mostly useful for saving state that is in memory and waiting
1620 to be flushed when the current lock is released. Because a call to
1620 to be flushed when the current lock is released. Because a call to
1621 destroyed is imminent, the repo will be invalidated causing those
1621 destroyed is imminent, the repo will be invalidated causing those
1622 changes to stay in memory (waiting for the next unlock), or vanish
1622 changes to stay in memory (waiting for the next unlock), or vanish
1623 completely.
1623 completely.
1624 '''
1624 '''
1625 # When using the same lock to commit and strip, the phasecache is left
1625 # When using the same lock to commit and strip, the phasecache is left
1626 # dirty after committing. Then when we strip, the repo is invalidated,
1626 # dirty after committing. Then when we strip, the repo is invalidated,
1627 # causing those changes to disappear.
1627 # causing those changes to disappear.
1628 if '_phasecache' in vars(self):
1628 if '_phasecache' in vars(self):
1629 self._phasecache.write()
1629 self._phasecache.write()
1630
1630
1631 @unfilteredmethod
1631 @unfilteredmethod
1632 def destroyed(self):
1632 def destroyed(self):
1633 '''Inform the repository that nodes have been destroyed.
1633 '''Inform the repository that nodes have been destroyed.
1634 Intended for use by strip and rollback, so there's a common
1634 Intended for use by strip and rollback, so there's a common
1635 place for anything that has to be done after destroying history.
1635 place for anything that has to be done after destroying history.
1636 '''
1636 '''
1637 # When one tries to:
1637 # When one tries to:
1638 # 1) destroy nodes thus calling this method (e.g. strip)
1638 # 1) destroy nodes thus calling this method (e.g. strip)
1639 # 2) use phasecache somewhere (e.g. commit)
1639 # 2) use phasecache somewhere (e.g. commit)
1640 #
1640 #
1641 # then 2) will fail because the phasecache contains nodes that were
1641 # then 2) will fail because the phasecache contains nodes that were
1642 # removed. We can either remove phasecache from the filecache,
1642 # removed. We can either remove phasecache from the filecache,
1643 # causing it to reload next time it is accessed, or simply filter
1643 # causing it to reload next time it is accessed, or simply filter
1644 # the removed nodes now and write the updated cache.
1644 # the removed nodes now and write the updated cache.
1645 self._phasecache.filterunknown(self)
1645 self._phasecache.filterunknown(self)
1646 self._phasecache.write()
1646 self._phasecache.write()
1647
1647
1648 # update the 'served' branch cache to help read only server process
1648 # update the 'served' branch cache to help read only server process
1649 # Thanks to branchcache collaboration this is done from the nearest
1649 # Thanks to branchcache collaboration this is done from the nearest
1650 # filtered subset and it is expected to be fast.
1650 # filtered subset and it is expected to be fast.
1651 branchmap.updatecache(self.filtered('served'))
1651 branchmap.updatecache(self.filtered('served'))
1652
1652
1653 # Ensure the persistent tag cache is updated. Doing it now
1653 # Ensure the persistent tag cache is updated. Doing it now
1654 # means that the tag cache only has to worry about destroyed
1654 # means that the tag cache only has to worry about destroyed
1655 # heads immediately after a strip/rollback. That in turn
1655 # heads immediately after a strip/rollback. That in turn
1656 # guarantees that "cachetip == currenttip" (comparing both rev
1656 # guarantees that "cachetip == currenttip" (comparing both rev
1657 # and node) always means no nodes have been added or destroyed.
1657 # and node) always means no nodes have been added or destroyed.
1658
1658
1659 # XXX this is suboptimal when qrefresh'ing: we strip the current
1659 # XXX this is suboptimal when qrefresh'ing: we strip the current
1660 # head, refresh the tag cache, then immediately add a new head.
1660 # head, refresh the tag cache, then immediately add a new head.
1661 # But I think doing it this way is necessary for the "instant
1661 # But I think doing it this way is necessary for the "instant
1662 # tag cache retrieval" case to work.
1662 # tag cache retrieval" case to work.
1663 self.invalidate()
1663 self.invalidate()
1664
1664
1665 def walk(self, match, node=None):
1665 def walk(self, match, node=None):
1666 '''
1666 '''
1667 walk recursively through the directory tree or a given
1667 walk recursively through the directory tree or a given
1668 changeset, finding all files matched by the match
1668 changeset, finding all files matched by the match
1669 function
1669 function
1670 '''
1670 '''
1671 return self[node].walk(match)
1671 return self[node].walk(match)
1672
1672
1673 def status(self, node1='.', node2=None, match=None,
1673 def status(self, node1='.', node2=None, match=None,
1674 ignored=False, clean=False, unknown=False,
1674 ignored=False, clean=False, unknown=False,
1675 listsubrepos=False):
1675 listsubrepos=False):
1676 '''a convenience method that calls node1.status(node2)'''
1676 '''a convenience method that calls node1.status(node2)'''
1677 return self[node1].status(node2, match, ignored, clean, unknown,
1677 return self[node1].status(node2, match, ignored, clean, unknown,
1678 listsubrepos)
1678 listsubrepos)
1679
1679
1680 def heads(self, start=None):
1680 def heads(self, start=None):
1681 heads = self.changelog.heads(start)
1681 heads = self.changelog.heads(start)
1682 # sort the output in rev descending order
1682 # sort the output in rev descending order
1683 return sorted(heads, key=self.changelog.rev, reverse=True)
1683 return sorted(heads, key=self.changelog.rev, reverse=True)
1684
1684
1685 def branchheads(self, branch=None, start=None, closed=False):
1685 def branchheads(self, branch=None, start=None, closed=False):
1686 '''return a (possibly filtered) list of heads for the given branch
1686 '''return a (possibly filtered) list of heads for the given branch
1687
1687
1688 Heads are returned in topological order, from newest to oldest.
1688 Heads are returned in topological order, from newest to oldest.
1689 If branch is None, use the dirstate branch.
1689 If branch is None, use the dirstate branch.
1690 If start is not None, return only heads reachable from start.
1690 If start is not None, return only heads reachable from start.
1691 If closed is True, return heads that are marked as closed as well.
1691 If closed is True, return heads that are marked as closed as well.
1692 '''
1692 '''
1693 if branch is None:
1693 if branch is None:
1694 branch = self[None].branch()
1694 branch = self[None].branch()
1695 branches = self.branchmap()
1695 branches = self.branchmap()
1696 if branch not in branches:
1696 if branch not in branches:
1697 return []
1697 return []
1698 # the cache returns heads ordered lowest to highest
1698 # the cache returns heads ordered lowest to highest
1699 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1699 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1700 if start is not None:
1700 if start is not None:
1701 # filter out the heads that cannot be reached from startrev
1701 # filter out the heads that cannot be reached from startrev
1702 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1702 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1703 bheads = [h for h in bheads if h in fbheads]
1703 bheads = [h for h in bheads if h in fbheads]
1704 return bheads
1704 return bheads
1705
1705
1706 def branches(self, nodes):
1706 def branches(self, nodes):
1707 if not nodes:
1707 if not nodes:
1708 nodes = [self.changelog.tip()]
1708 nodes = [self.changelog.tip()]
1709 b = []
1709 b = []
1710 for n in nodes:
1710 for n in nodes:
1711 t = n
1711 t = n
1712 while True:
1712 while True:
1713 p = self.changelog.parents(n)
1713 p = self.changelog.parents(n)
1714 if p[1] != nullid or p[0] == nullid:
1714 if p[1] != nullid or p[0] == nullid:
1715 b.append((t, n, p[0], p[1]))
1715 b.append((t, n, p[0], p[1]))
1716 break
1716 break
1717 n = p[0]
1717 n = p[0]
1718 return b
1718 return b
1719
1719
1720 def between(self, pairs):
1720 def between(self, pairs):
1721 r = []
1721 r = []
1722
1722
1723 for top, bottom in pairs:
1723 for top, bottom in pairs:
1724 n, l, i = top, [], 0
1724 n, l, i = top, [], 0
1725 f = 1
1725 f = 1
1726
1726
1727 while n != bottom and n != nullid:
1727 while n != bottom and n != nullid:
1728 p = self.changelog.parents(n)[0]
1728 p = self.changelog.parents(n)[0]
1729 if i == f:
1729 if i == f:
1730 l.append(n)
1730 l.append(n)
1731 f = f * 2
1731 f = f * 2
1732 n = p
1732 n = p
1733 i += 1
1733 i += 1
1734
1734
1735 r.append(l)
1735 r.append(l)
1736
1736
1737 return r
1737 return r
1738
1738
1739 def checkpush(self, pushop):
1739 def checkpush(self, pushop):
1740 """Extensions can override this function if additional checks have
1740 """Extensions can override this function if additional checks have
1741 to be performed before pushing, or call it if they override push
1741 to be performed before pushing, or call it if they override push
1742 command.
1742 command.
1743 """
1743 """
1744 pass
1744 pass
1745
1745
1746 @unfilteredpropertycache
1746 @unfilteredpropertycache
1747 def prepushoutgoinghooks(self):
1747 def prepushoutgoinghooks(self):
1748 """Return util.hooks consists of "(repo, remote, outgoing)"
1748 """Return util.hooks consists of "(repo, remote, outgoing)"
1749 functions, which are called before pushing changesets.
1749 functions, which are called before pushing changesets.
1750 """
1750 """
1751 return util.hooks()
1751 return util.hooks()
1752
1752
1753 def stream_in(self, remote, remotereqs):
1753 def stream_in(self, remote, remotereqs):
1754 lock = self.lock()
1754 lock = self.lock()
1755 try:
1755 try:
1756 # Save remote branchmap. We will use it later
1756 # Save remote branchmap. We will use it later
1757 # to speed up branchcache creation
1757 # to speed up branchcache creation
1758 rbranchmap = None
1758 rbranchmap = None
1759 if remote.capable("branchmap"):
1759 if remote.capable("branchmap"):
1760 rbranchmap = remote.branchmap()
1760 rbranchmap = remote.branchmap()
1761
1761
1762 fp = remote.stream_out()
1762 fp = remote.stream_out()
1763 l = fp.readline()
1763 l = fp.readline()
1764 try:
1764 try:
1765 resp = int(l)
1765 resp = int(l)
1766 except ValueError:
1766 except ValueError:
1767 raise error.ResponseError(
1767 raise error.ResponseError(
1768 _('unexpected response from remote server:'), l)
1768 _('unexpected response from remote server:'), l)
1769 if resp == 1:
1769 if resp == 1:
1770 raise util.Abort(_('operation forbidden by server'))
1770 raise util.Abort(_('operation forbidden by server'))
1771 elif resp == 2:
1771 elif resp == 2:
1772 raise util.Abort(_('locking the remote repository failed'))
1772 raise util.Abort(_('locking the remote repository failed'))
1773 elif resp != 0:
1773 elif resp != 0:
1774 raise util.Abort(_('the server sent an unknown error code'))
1774 raise util.Abort(_('the server sent an unknown error code'))
1775 self.ui.status(_('streaming all changes\n'))
1775 self.ui.status(_('streaming all changes\n'))
1776 l = fp.readline()
1776 l = fp.readline()
1777 try:
1777 try:
1778 total_files, total_bytes = map(int, l.split(' ', 1))
1778 total_files, total_bytes = map(int, l.split(' ', 1))
1779 except (ValueError, TypeError):
1779 except (ValueError, TypeError):
1780 raise error.ResponseError(
1780 raise error.ResponseError(
1781 _('unexpected response from remote server:'), l)
1781 _('unexpected response from remote server:'), l)
1782 self.ui.status(_('%d files to transfer, %s of data\n') %
1782 self.ui.status(_('%d files to transfer, %s of data\n') %
1783 (total_files, util.bytecount(total_bytes)))
1783 (total_files, util.bytecount(total_bytes)))
1784 handled_bytes = 0
1784 handled_bytes = 0
1785 self.ui.progress(_('clone'), 0, total=total_bytes)
1785 self.ui.progress(_('clone'), 0, total=total_bytes)
1786 start = time.time()
1786 start = time.time()
1787
1787
1788 tr = self.transaction(_('clone'))
1788 tr = self.transaction(_('clone'))
1789 try:
1789 try:
1790 for i in xrange(total_files):
1790 for i in xrange(total_files):
1791 # XXX doesn't support '\n' or '\r' in filenames
1791 # XXX doesn't support '\n' or '\r' in filenames
1792 l = fp.readline()
1792 l = fp.readline()
1793 try:
1793 try:
1794 name, size = l.split('\0', 1)
1794 name, size = l.split('\0', 1)
1795 size = int(size)
1795 size = int(size)
1796 except (ValueError, TypeError):
1796 except (ValueError, TypeError):
1797 raise error.ResponseError(
1797 raise error.ResponseError(
1798 _('unexpected response from remote server:'), l)
1798 _('unexpected response from remote server:'), l)
1799 if self.ui.debugflag:
1799 if self.ui.debugflag:
1800 self.ui.debug('adding %s (%s)\n' %
1800 self.ui.debug('adding %s (%s)\n' %
1801 (name, util.bytecount(size)))
1801 (name, util.bytecount(size)))
1802 # for backwards compat, name was partially encoded
1802 # for backwards compat, name was partially encoded
1803 ofp = self.svfs(store.decodedir(name), 'w')
1803 ofp = self.svfs(store.decodedir(name), 'w')
1804 for chunk in util.filechunkiter(fp, limit=size):
1804 for chunk in util.filechunkiter(fp, limit=size):
1805 handled_bytes += len(chunk)
1805 handled_bytes += len(chunk)
1806 self.ui.progress(_('clone'), handled_bytes,
1806 self.ui.progress(_('clone'), handled_bytes,
1807 total=total_bytes)
1807 total=total_bytes)
1808 ofp.write(chunk)
1808 ofp.write(chunk)
1809 ofp.close()
1809 ofp.close()
1810 tr.close()
1810 tr.close()
1811 finally:
1811 finally:
1812 tr.release()
1812 tr.release()
1813
1813
1814 # Writing straight to files circumvented the inmemory caches
1814 # Writing straight to files circumvented the inmemory caches
1815 self.invalidate()
1815 self.invalidate()
1816
1816
1817 elapsed = time.time() - start
1817 elapsed = time.time() - start
1818 if elapsed <= 0:
1818 if elapsed <= 0:
1819 elapsed = 0.001
1819 elapsed = 0.001
1820 self.ui.progress(_('clone'), None)
1820 self.ui.progress(_('clone'), None)
1821 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1821 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1822 (util.bytecount(total_bytes), elapsed,
1822 (util.bytecount(total_bytes), elapsed,
1823 util.bytecount(total_bytes / elapsed)))
1823 util.bytecount(total_bytes / elapsed)))
1824
1824
1825 # new requirements = old non-format requirements +
1825 # new requirements = old non-format requirements +
1826 # new format-related remote requirements
1826 # new format-related remote requirements
1827 # requirements from the streamed-in repository
1827 # requirements from the streamed-in repository
1828 self.requirements = remotereqs | (
1828 self.requirements = remotereqs | (
1829 self.requirements - self.supportedformats)
1829 self.requirements - self.supportedformats)
1830 self._applyopenerreqs()
1830 self._applyopenerreqs()
1831 self._writerequirements()
1831 self._writerequirements()
1832
1832
1833 if rbranchmap:
1833 if rbranchmap:
1834 rbheads = []
1834 rbheads = []
1835 closed = []
1835 closed = []
1836 for bheads in rbranchmap.itervalues():
1836 for bheads in rbranchmap.itervalues():
1837 rbheads.extend(bheads)
1837 rbheads.extend(bheads)
1838 for h in bheads:
1838 for h in bheads:
1839 r = self.changelog.rev(h)
1839 r = self.changelog.rev(h)
1840 b, c = self.changelog.branchinfo(r)
1840 b, c = self.changelog.branchinfo(r)
1841 if c:
1841 if c:
1842 closed.append(h)
1842 closed.append(h)
1843
1843
1844 if rbheads:
1844 if rbheads:
1845 rtiprev = max((int(self.changelog.rev(node))
1845 rtiprev = max((int(self.changelog.rev(node))
1846 for node in rbheads))
1846 for node in rbheads))
1847 cache = branchmap.branchcache(rbranchmap,
1847 cache = branchmap.branchcache(rbranchmap,
1848 self[rtiprev].node(),
1848 self[rtiprev].node(),
1849 rtiprev,
1849 rtiprev,
1850 closednodes=closed)
1850 closednodes=closed)
1851 # Try to stick it as low as possible
1851 # Try to stick it as low as possible
1852 # filter above served are unlikely to be fetch from a clone
1852 # filter above served are unlikely to be fetch from a clone
1853 for candidate in ('base', 'immutable', 'served'):
1853 for candidate in ('base', 'immutable', 'served'):
1854 rview = self.filtered(candidate)
1854 rview = self.filtered(candidate)
1855 if cache.validfor(rview):
1855 if cache.validfor(rview):
1856 self._branchcaches[candidate] = cache
1856 self._branchcaches[candidate] = cache
1857 cache.write(rview)
1857 cache.write(rview)
1858 break
1858 break
1859 self.invalidate()
1859 self.invalidate()
1860 return len(self.heads()) + 1
1860 return len(self.heads()) + 1
1861 finally:
1861 finally:
1862 lock.release()
1862 lock.release()
1863
1863
1864 def clone(self, remote, heads=[], stream=None):
1864 def clone(self, remote, heads=[], stream=None):
1865 '''clone remote repository.
1865 '''clone remote repository.
1866
1866
1867 keyword arguments:
1867 keyword arguments:
1868 heads: list of revs to clone (forces use of pull)
1868 heads: list of revs to clone (forces use of pull)
1869 stream: use streaming clone if possible'''
1869 stream: use streaming clone if possible'''
1870
1870
1871 # now, all clients that can request uncompressed clones can
1871 # now, all clients that can request uncompressed clones can
1872 # read repo formats supported by all servers that can serve
1872 # read repo formats supported by all servers that can serve
1873 # them.
1873 # them.
1874
1874
1875 # if revlog format changes, client will have to check version
1875 # if revlog format changes, client will have to check version
1876 # and format flags on "stream" capability, and use
1876 # and format flags on "stream" capability, and use
1877 # uncompressed only if compatible.
1877 # uncompressed only if compatible.
1878
1878
1879 if stream is None:
1879 if stream is None:
1880 # if the server explicitly prefers to stream (for fast LANs)
1880 # if the server explicitly prefers to stream (for fast LANs)
1881 stream = remote.capable('stream-preferred')
1881 stream = remote.capable('stream-preferred')
1882
1882
1883 if stream and not heads:
1883 if stream and not heads:
1884 # 'stream' means remote revlog format is revlogv1 only
1884 # 'stream' means remote revlog format is revlogv1 only
1885 if remote.capable('stream'):
1885 if remote.capable('stream'):
1886 self.stream_in(remote, set(('revlogv1',)))
1886 self.stream_in(remote, set(('revlogv1',)))
1887 else:
1887 else:
1888 # otherwise, 'streamreqs' contains the remote revlog format
1888 # otherwise, 'streamreqs' contains the remote revlog format
1889 streamreqs = remote.capable('streamreqs')
1889 streamreqs = remote.capable('streamreqs')
1890 if streamreqs:
1890 if streamreqs:
1891 streamreqs = set(streamreqs.split(','))
1891 streamreqs = set(streamreqs.split(','))
1892 # if we support it, stream in and adjust our requirements
1892 # if we support it, stream in and adjust our requirements
1893 if not streamreqs - self.supportedformats:
1893 if not streamreqs - self.supportedformats:
1894 self.stream_in(remote, streamreqs)
1894 self.stream_in(remote, streamreqs)
1895
1895
1896 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1896 quiet = self.ui.backupconfig('ui', 'quietbookmarkmove')
1897 try:
1897 try:
1898 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1898 self.ui.setconfig('ui', 'quietbookmarkmove', True, 'clone')
1899 ret = exchange.pull(self, remote, heads).cgresult
1899 ret = exchange.pull(self, remote, heads).cgresult
1900 finally:
1900 finally:
1901 self.ui.restoreconfig(quiet)
1901 self.ui.restoreconfig(quiet)
1902 return ret
1902 return ret
1903
1903
1904 def pushkey(self, namespace, key, old, new):
1904 def pushkey(self, namespace, key, old, new):
1905 try:
1905 try:
1906 tr = self.currenttransaction()
1906 tr = self.currenttransaction()
1907 hookargs = {}
1907 hookargs = {}
1908 if tr is not None:
1908 if tr is not None:
1909 hookargs.update(tr.hookargs)
1909 hookargs.update(tr.hookargs)
1910 pending = lambda: tr.writepending() and self.root or ""
1910 pending = lambda: tr.writepending() and self.root or ""
1911 hookargs['pending'] = pending
1911 hookargs['pending'] = pending
1912 hookargs['namespace'] = namespace
1912 hookargs['namespace'] = namespace
1913 hookargs['key'] = key
1913 hookargs['key'] = key
1914 hookargs['old'] = old
1914 hookargs['old'] = old
1915 hookargs['new'] = new
1915 hookargs['new'] = new
1916 self.hook('prepushkey', throw=True, **hookargs)
1916 self.hook('prepushkey', throw=True, **hookargs)
1917 except error.HookAbort, exc:
1917 except error.HookAbort, exc:
1918 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1918 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1919 if exc.hint:
1919 if exc.hint:
1920 self.ui.write_err(_("(%s)\n") % exc.hint)
1920 self.ui.write_err(_("(%s)\n") % exc.hint)
1921 return False
1921 return False
1922 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1922 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1923 ret = pushkey.push(self, namespace, key, old, new)
1923 ret = pushkey.push(self, namespace, key, old, new)
1924 def runhook():
1924 def runhook():
1925 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1925 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1926 ret=ret)
1926 ret=ret)
1927 self._afterlock(runhook)
1927 self._afterlock(runhook)
1928 return ret
1928 return ret
1929
1929
1930 def listkeys(self, namespace):
1930 def listkeys(self, namespace):
1931 self.hook('prelistkeys', throw=True, namespace=namespace)
1931 self.hook('prelistkeys', throw=True, namespace=namespace)
1932 self.ui.debug('listing keys for "%s"\n' % namespace)
1932 self.ui.debug('listing keys for "%s"\n' % namespace)
1933 values = pushkey.list(self, namespace)
1933 values = pushkey.list(self, namespace)
1934 self.hook('listkeys', namespace=namespace, values=values)
1934 self.hook('listkeys', namespace=namespace, values=values)
1935 return values
1935 return values
1936
1936
1937 def debugwireargs(self, one, two, three=None, four=None, five=None):
1937 def debugwireargs(self, one, two, three=None, four=None, five=None):
1938 '''used to test argument passing over the wire'''
1938 '''used to test argument passing over the wire'''
1939 return "%s %s %s %s %s" % (one, two, three, four, five)
1939 return "%s %s %s %s %s" % (one, two, three, four, five)
1940
1940
1941 def savecommitmessage(self, text):
1941 def savecommitmessage(self, text):
1942 fp = self.vfs('last-message.txt', 'wb')
1942 fp = self.vfs('last-message.txt', 'wb')
1943 try:
1943 try:
1944 fp.write(text)
1944 fp.write(text)
1945 finally:
1945 finally:
1946 fp.close()
1946 fp.close()
1947 return self.pathto(fp.name[len(self.root) + 1:])
1947 return self.pathto(fp.name[len(self.root) + 1:])
1948
1948
1949 # used to avoid circular references so destructors work
1949 # used to avoid circular references so destructors work
1950 def aftertrans(files):
1950 def aftertrans(files):
1951 renamefiles = [tuple(t) for t in files]
1951 renamefiles = [tuple(t) for t in files]
1952 def a():
1952 def a():
1953 for vfs, src, dest in renamefiles:
1953 for vfs, src, dest in renamefiles:
1954 try:
1954 try:
1955 vfs.rename(src, dest)
1955 vfs.rename(src, dest)
1956 except OSError: # journal file does not yet exist
1956 except OSError: # journal file does not yet exist
1957 pass
1957 pass
1958 return a
1958 return a
1959
1959
1960 def undoname(fn):
1960 def undoname(fn):
1961 base, name = os.path.split(fn)
1961 base, name = os.path.split(fn)
1962 assert name.startswith('journal')
1962 assert name.startswith('journal')
1963 return os.path.join(base, name.replace('journal', 'undo', 1))
1963 return os.path.join(base, name.replace('journal', 'undo', 1))
1964
1964
1965 def instance(ui, path, create):
1965 def instance(ui, path, create):
1966 return localrepository(ui, util.urllocalpath(path), create)
1966 return localrepository(ui, util.urllocalpath(path), create)
1967
1967
1968 def islocal(path):
1968 def islocal(path):
1969 return True
1969 return True
@@ -1,466 +1,466 b''
1 # templatekw.py - common changeset template keywords
1 # templatekw.py - common changeset template keywords
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex
8 from node import hex
9 import patch, util, error
9 import patch, util, error
10 import hbisect
10 import hbisect
11
11
12 # This helper class allows us to handle both:
12 # This helper class allows us to handle both:
13 # "{files}" (legacy command-line-specific list hack) and
13 # "{files}" (legacy command-line-specific list hack) and
14 # "{files % '{file}\n'}" (hgweb-style with inlining and function support)
14 # "{files % '{file}\n'}" (hgweb-style with inlining and function support)
15 # and to access raw values:
15 # and to access raw values:
16 # "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
16 # "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
17 # "{get(extras, key)}"
17 # "{get(extras, key)}"
18
18
19 class _hybrid(object):
19 class _hybrid(object):
20 def __init__(self, gen, values, makemap, joinfmt=None):
20 def __init__(self, gen, values, makemap, joinfmt=None):
21 self.gen = gen
21 self.gen = gen
22 self.values = values
22 self.values = values
23 self._makemap = makemap
23 self._makemap = makemap
24 if joinfmt:
24 if joinfmt:
25 self.joinfmt = joinfmt
25 self.joinfmt = joinfmt
26 else:
26 else:
27 self.joinfmt = lambda x: x.values()[0]
27 self.joinfmt = lambda x: x.values()[0]
28 def __iter__(self):
28 def __iter__(self):
29 return self.gen
29 return self.gen
30 def __call__(self):
30 def __call__(self):
31 makemap = self._makemap
31 makemap = self._makemap
32 for x in self.values:
32 for x in self.values:
33 yield makemap(x)
33 yield makemap(x)
34 def __contains__(self, x):
34 def __contains__(self, x):
35 return x in self.values
35 return x in self.values
36 def __len__(self):
36 def __len__(self):
37 return len(self.values)
37 return len(self.values)
38 def __getattr__(self, name):
38 def __getattr__(self, name):
39 if name != 'get':
39 if name != 'get':
40 raise AttributeError(name)
40 raise AttributeError(name)
41 return getattr(self.values, name)
41 return getattr(self.values, name)
42
42
43 def showlist(name, values, plural=None, element=None, **args):
43 def showlist(name, values, plural=None, element=None, **args):
44 if not element:
44 if not element:
45 element = name
45 element = name
46 f = _showlist(name, values, plural, **args)
46 f = _showlist(name, values, plural, **args)
47 return _hybrid(f, values, lambda x: {element: x})
47 return _hybrid(f, values, lambda x: {element: x})
48
48
49 def _showlist(name, values, plural=None, **args):
49 def _showlist(name, values, plural=None, **args):
50 '''expand set of values.
50 '''expand set of values.
51 name is name of key in template map.
51 name is name of key in template map.
52 values is list of strings or dicts.
52 values is list of strings or dicts.
53 plural is plural of name, if not simply name + 's'.
53 plural is plural of name, if not simply name + 's'.
54
54
55 expansion works like this, given name 'foo'.
55 expansion works like this, given name 'foo'.
56
56
57 if values is empty, expand 'no_foos'.
57 if values is empty, expand 'no_foos'.
58
58
59 if 'foo' not in template map, return values as a string,
59 if 'foo' not in template map, return values as a string,
60 joined by space.
60 joined by space.
61
61
62 expand 'start_foos'.
62 expand 'start_foos'.
63
63
64 for each value, expand 'foo'. if 'last_foo' in template
64 for each value, expand 'foo'. if 'last_foo' in template
65 map, expand it instead of 'foo' for last key.
65 map, expand it instead of 'foo' for last key.
66
66
67 expand 'end_foos'.
67 expand 'end_foos'.
68 '''
68 '''
69 templ = args['templ']
69 templ = args['templ']
70 if plural:
70 if plural:
71 names = plural
71 names = plural
72 else: names = name + 's'
72 else: names = name + 's'
73 if not values:
73 if not values:
74 noname = 'no_' + names
74 noname = 'no_' + names
75 if noname in templ:
75 if noname in templ:
76 yield templ(noname, **args)
76 yield templ(noname, **args)
77 return
77 return
78 if name not in templ:
78 if name not in templ:
79 if isinstance(values[0], str):
79 if isinstance(values[0], str):
80 yield ' '.join(values)
80 yield ' '.join(values)
81 else:
81 else:
82 for v in values:
82 for v in values:
83 yield dict(v, **args)
83 yield dict(v, **args)
84 return
84 return
85 startname = 'start_' + names
85 startname = 'start_' + names
86 if startname in templ:
86 if startname in templ:
87 yield templ(startname, **args)
87 yield templ(startname, **args)
88 vargs = args.copy()
88 vargs = args.copy()
89 def one(v, tag=name):
89 def one(v, tag=name):
90 try:
90 try:
91 vargs.update(v)
91 vargs.update(v)
92 except (AttributeError, ValueError):
92 except (AttributeError, ValueError):
93 try:
93 try:
94 for a, b in v:
94 for a, b in v:
95 vargs[a] = b
95 vargs[a] = b
96 except ValueError:
96 except ValueError:
97 vargs[name] = v
97 vargs[name] = v
98 return templ(tag, **vargs)
98 return templ(tag, **vargs)
99 lastname = 'last_' + name
99 lastname = 'last_' + name
100 if lastname in templ:
100 if lastname in templ:
101 last = values.pop()
101 last = values.pop()
102 else:
102 else:
103 last = None
103 last = None
104 for v in values:
104 for v in values:
105 yield one(v)
105 yield one(v)
106 if last is not None:
106 if last is not None:
107 yield one(last, tag=lastname)
107 yield one(last, tag=lastname)
108 endname = 'end_' + names
108 endname = 'end_' + names
109 if endname in templ:
109 if endname in templ:
110 yield templ(endname, **args)
110 yield templ(endname, **args)
111
111
112 def getfiles(repo, ctx, revcache):
112 def getfiles(repo, ctx, revcache):
113 if 'files' not in revcache:
113 if 'files' not in revcache:
114 revcache['files'] = repo.status(ctx.p1().node(), ctx.node())[:3]
114 revcache['files'] = repo.status(ctx.p1().node(), ctx.node())[:3]
115 return revcache['files']
115 return revcache['files']
116
116
117 def getlatesttags(repo, ctx, cache):
117 def getlatesttags(repo, ctx, cache):
118 '''return date, distance and name for the latest tag of rev'''
118 '''return date, distance and name for the latest tag of rev'''
119
119
120 if 'latesttags' not in cache:
120 if 'latesttags' not in cache:
121 # Cache mapping from rev to a tuple with tag date, tag
121 # Cache mapping from rev to a tuple with tag date, tag
122 # distance and tag name
122 # distance and tag name
123 cache['latesttags'] = {-1: (0, 0, 'null')}
123 cache['latesttags'] = {-1: (0, 0, 'null')}
124 latesttags = cache['latesttags']
124 latesttags = cache['latesttags']
125
125
126 rev = ctx.rev()
126 rev = ctx.rev()
127 todo = [rev]
127 todo = [rev]
128 while todo:
128 while todo:
129 rev = todo.pop()
129 rev = todo.pop()
130 if rev in latesttags:
130 if rev in latesttags:
131 continue
131 continue
132 ctx = repo[rev]
132 ctx = repo[rev]
133 tags = [t for t in ctx.tags()
133 tags = [t for t in ctx.tags()
134 if (repo.tagtype(t) and repo.tagtype(t) != 'local')]
134 if (repo.tagtype(t) and repo.tagtype(t) != 'local')]
135 if tags:
135 if tags:
136 latesttags[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
136 latesttags[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
137 continue
137 continue
138 try:
138 try:
139 # The tuples are laid out so the right one can be found by
139 # The tuples are laid out so the right one can be found by
140 # comparison.
140 # comparison.
141 pdate, pdist, ptag = max(
141 pdate, pdist, ptag = max(
142 latesttags[p.rev()] for p in ctx.parents())
142 latesttags[p.rev()] for p in ctx.parents())
143 except KeyError:
143 except KeyError:
144 # Cache miss - recurse
144 # Cache miss - recurse
145 todo.append(rev)
145 todo.append(rev)
146 todo.extend(p.rev() for p in ctx.parents())
146 todo.extend(p.rev() for p in ctx.parents())
147 continue
147 continue
148 latesttags[rev] = pdate, pdist + 1, ptag
148 latesttags[rev] = pdate, pdist + 1, ptag
149 return latesttags[rev]
149 return latesttags[rev]
150
150
151 def getrenamedfn(repo, endrev=None):
151 def getrenamedfn(repo, endrev=None):
152 rcache = {}
152 rcache = {}
153 if endrev is None:
153 if endrev is None:
154 endrev = len(repo)
154 endrev = len(repo)
155
155
156 def getrenamed(fn, rev):
156 def getrenamed(fn, rev):
157 '''looks up all renames for a file (up to endrev) the first
157 '''looks up all renames for a file (up to endrev) the first
158 time the file is given. It indexes on the changerev and only
158 time the file is given. It indexes on the changerev and only
159 parses the manifest if linkrev != changerev.
159 parses the manifest if linkrev != changerev.
160 Returns rename info for fn at changerev rev.'''
160 Returns rename info for fn at changerev rev.'''
161 if fn not in rcache:
161 if fn not in rcache:
162 rcache[fn] = {}
162 rcache[fn] = {}
163 fl = repo.file(fn)
163 fl = repo.file(fn)
164 for i in fl:
164 for i in fl:
165 lr = fl.linkrev(i)
165 lr = fl.linkrev(i)
166 renamed = fl.renamed(fl.node(i))
166 renamed = fl.renamed(fl.node(i))
167 rcache[fn][lr] = renamed
167 rcache[fn][lr] = renamed
168 if lr >= endrev:
168 if lr >= endrev:
169 break
169 break
170 if rev in rcache[fn]:
170 if rev in rcache[fn]:
171 return rcache[fn][rev]
171 return rcache[fn][rev]
172
172
173 # If linkrev != rev (i.e. rev not found in rcache) fallback to
173 # If linkrev != rev (i.e. rev not found in rcache) fallback to
174 # filectx logic.
174 # filectx logic.
175 try:
175 try:
176 return repo[rev][fn].renamed()
176 return repo[rev][fn].renamed()
177 except error.LookupError:
177 except error.LookupError:
178 return None
178 return None
179
179
180 return getrenamed
180 return getrenamed
181
181
182
182
183 def showauthor(repo, ctx, templ, **args):
183 def showauthor(repo, ctx, templ, **args):
184 """:author: String. The unmodified author of the changeset."""
184 """:author: String. The unmodified author of the changeset."""
185 return ctx.user()
185 return ctx.user()
186
186
187 def showbisect(repo, ctx, templ, **args):
187 def showbisect(repo, ctx, templ, **args):
188 """:bisect: String. The changeset bisection status."""
188 """:bisect: String. The changeset bisection status."""
189 return hbisect.label(repo, ctx.node())
189 return hbisect.label(repo, ctx.node())
190
190
191 def showbranch(**args):
191 def showbranch(**args):
192 """:branch: String. The name of the branch on which the changeset was
192 """:branch: String. The name of the branch on which the changeset was
193 committed.
193 committed.
194 """
194 """
195 return args['ctx'].branch()
195 return args['ctx'].branch()
196
196
197 def showbranches(**args):
197 def showbranches(**args):
198 """:branches: List of strings. The name of the branch on which the
198 """:branches: List of strings. The name of the branch on which the
199 changeset was committed. Will be empty if the branch name was
199 changeset was committed. Will be empty if the branch name was
200 default.
200 default.
201 """
201 """
202 branch = args['ctx'].branch()
202 branch = args['ctx'].branch()
203 if branch != 'default':
203 if branch != 'default':
204 return showlist('branch', [branch], plural='branches', **args)
204 return showlist('branch', [branch], plural='branches', **args)
205 return showlist('branch', [], plural='branches', **args)
205 return showlist('branch', [], plural='branches', **args)
206
206
207 def showbookmarks(**args):
207 def showbookmarks(**args):
208 """:bookmarks: List of strings. Any bookmarks associated with the
208 """:bookmarks: List of strings. Any bookmarks associated with the
209 changeset.
209 changeset.
210 """
210 """
211 repo = args['ctx']._repo
211 repo = args['ctx']._repo
212 bookmarks = args['ctx'].bookmarks()
212 bookmarks = args['ctx'].bookmarks()
213 current = repo._bookmarkcurrent
213 current = repo._activebookmark
214 makemap = lambda v: {'bookmark': v, 'current': current}
214 makemap = lambda v: {'bookmark': v, 'current': current}
215 f = _showlist('bookmark', bookmarks, **args)
215 f = _showlist('bookmark', bookmarks, **args)
216 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
216 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
217
217
218 def showchildren(**args):
218 def showchildren(**args):
219 """:children: List of strings. The children of the changeset."""
219 """:children: List of strings. The children of the changeset."""
220 ctx = args['ctx']
220 ctx = args['ctx']
221 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
221 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
222 return showlist('children', childrevs, element='child', **args)
222 return showlist('children', childrevs, element='child', **args)
223
223
224 def showcurrentbookmark(**args):
224 def showcurrentbookmark(**args):
225 """:currentbookmark: String. The active bookmark, if it is
225 """:currentbookmark: String. The active bookmark, if it is
226 associated with the changeset"""
226 associated with the changeset"""
227 import bookmarks as bookmarks # to avoid circular import issues
227 import bookmarks as bookmarks # to avoid circular import issues
228 repo = args['repo']
228 repo = args['repo']
229 if bookmarks.iscurrent(repo):
229 if bookmarks.iscurrent(repo):
230 current = repo._bookmarkcurrent
230 current = repo._activebookmark
231 if current in args['ctx'].bookmarks():
231 if current in args['ctx'].bookmarks():
232 return current
232 return current
233 return ''
233 return ''
234
234
235 def showdate(repo, ctx, templ, **args):
235 def showdate(repo, ctx, templ, **args):
236 """:date: Date information. The date when the changeset was committed."""
236 """:date: Date information. The date when the changeset was committed."""
237 return ctx.date()
237 return ctx.date()
238
238
239 def showdescription(repo, ctx, templ, **args):
239 def showdescription(repo, ctx, templ, **args):
240 """:desc: String. The text of the changeset description."""
240 """:desc: String. The text of the changeset description."""
241 return ctx.description().strip()
241 return ctx.description().strip()
242
242
243 def showdiffstat(repo, ctx, templ, **args):
243 def showdiffstat(repo, ctx, templ, **args):
244 """:diffstat: String. Statistics of changes with the following format:
244 """:diffstat: String. Statistics of changes with the following format:
245 "modified files: +added/-removed lines"
245 "modified files: +added/-removed lines"
246 """
246 """
247 stats = patch.diffstatdata(util.iterlines(ctx.diff()))
247 stats = patch.diffstatdata(util.iterlines(ctx.diff()))
248 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
248 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
249 return '%s: +%s/-%s' % (len(stats), adds, removes)
249 return '%s: +%s/-%s' % (len(stats), adds, removes)
250
250
251 def showextras(**args):
251 def showextras(**args):
252 """:extras: List of dicts with key, value entries of the 'extras'
252 """:extras: List of dicts with key, value entries of the 'extras'
253 field of this changeset."""
253 field of this changeset."""
254 extras = args['ctx'].extra()
254 extras = args['ctx'].extra()
255 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
255 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
256 makemap = lambda k: {'key': k, 'value': extras[k]}
256 makemap = lambda k: {'key': k, 'value': extras[k]}
257 c = [makemap(k) for k in extras]
257 c = [makemap(k) for k in extras]
258 f = _showlist('extra', c, plural='extras', **args)
258 f = _showlist('extra', c, plural='extras', **args)
259 return _hybrid(f, extras, makemap,
259 return _hybrid(f, extras, makemap,
260 lambda x: '%s=%s' % (x['key'], x['value']))
260 lambda x: '%s=%s' % (x['key'], x['value']))
261
261
262 def showfileadds(**args):
262 def showfileadds(**args):
263 """:file_adds: List of strings. Files added by this changeset."""
263 """:file_adds: List of strings. Files added by this changeset."""
264 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
264 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
265 return showlist('file_add', getfiles(repo, ctx, revcache)[1],
265 return showlist('file_add', getfiles(repo, ctx, revcache)[1],
266 element='file', **args)
266 element='file', **args)
267
267
268 def showfilecopies(**args):
268 def showfilecopies(**args):
269 """:file_copies: List of strings. Files copied in this changeset with
269 """:file_copies: List of strings. Files copied in this changeset with
270 their sources.
270 their sources.
271 """
271 """
272 cache, ctx = args['cache'], args['ctx']
272 cache, ctx = args['cache'], args['ctx']
273 copies = args['revcache'].get('copies')
273 copies = args['revcache'].get('copies')
274 if copies is None:
274 if copies is None:
275 if 'getrenamed' not in cache:
275 if 'getrenamed' not in cache:
276 cache['getrenamed'] = getrenamedfn(args['repo'])
276 cache['getrenamed'] = getrenamedfn(args['repo'])
277 copies = []
277 copies = []
278 getrenamed = cache['getrenamed']
278 getrenamed = cache['getrenamed']
279 for fn in ctx.files():
279 for fn in ctx.files():
280 rename = getrenamed(fn, ctx.rev())
280 rename = getrenamed(fn, ctx.rev())
281 if rename:
281 if rename:
282 copies.append((fn, rename[0]))
282 copies.append((fn, rename[0]))
283
283
284 copies = util.sortdict(copies)
284 copies = util.sortdict(copies)
285 makemap = lambda k: {'name': k, 'source': copies[k]}
285 makemap = lambda k: {'name': k, 'source': copies[k]}
286 c = [makemap(k) for k in copies]
286 c = [makemap(k) for k in copies]
287 f = _showlist('file_copy', c, plural='file_copies', **args)
287 f = _showlist('file_copy', c, plural='file_copies', **args)
288 return _hybrid(f, copies, makemap,
288 return _hybrid(f, copies, makemap,
289 lambda x: '%s (%s)' % (x['name'], x['source']))
289 lambda x: '%s (%s)' % (x['name'], x['source']))
290
290
291 # showfilecopiesswitch() displays file copies only if copy records are
291 # showfilecopiesswitch() displays file copies only if copy records are
292 # provided before calling the templater, usually with a --copies
292 # provided before calling the templater, usually with a --copies
293 # command line switch.
293 # command line switch.
294 def showfilecopiesswitch(**args):
294 def showfilecopiesswitch(**args):
295 """:file_copies_switch: List of strings. Like "file_copies" but displayed
295 """:file_copies_switch: List of strings. Like "file_copies" but displayed
296 only if the --copied switch is set.
296 only if the --copied switch is set.
297 """
297 """
298 copies = args['revcache'].get('copies') or []
298 copies = args['revcache'].get('copies') or []
299 copies = util.sortdict(copies)
299 copies = util.sortdict(copies)
300 makemap = lambda k: {'name': k, 'source': copies[k]}
300 makemap = lambda k: {'name': k, 'source': copies[k]}
301 c = [makemap(k) for k in copies]
301 c = [makemap(k) for k in copies]
302 f = _showlist('file_copy', c, plural='file_copies', **args)
302 f = _showlist('file_copy', c, plural='file_copies', **args)
303 return _hybrid(f, copies, makemap,
303 return _hybrid(f, copies, makemap,
304 lambda x: '%s (%s)' % (x['name'], x['source']))
304 lambda x: '%s (%s)' % (x['name'], x['source']))
305
305
306 def showfiledels(**args):
306 def showfiledels(**args):
307 """:file_dels: List of strings. Files removed by this changeset."""
307 """:file_dels: List of strings. Files removed by this changeset."""
308 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
308 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
309 return showlist('file_del', getfiles(repo, ctx, revcache)[2],
309 return showlist('file_del', getfiles(repo, ctx, revcache)[2],
310 element='file', **args)
310 element='file', **args)
311
311
312 def showfilemods(**args):
312 def showfilemods(**args):
313 """:file_mods: List of strings. Files modified by this changeset."""
313 """:file_mods: List of strings. Files modified by this changeset."""
314 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
314 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
315 return showlist('file_mod', getfiles(repo, ctx, revcache)[0],
315 return showlist('file_mod', getfiles(repo, ctx, revcache)[0],
316 element='file', **args)
316 element='file', **args)
317
317
318 def showfiles(**args):
318 def showfiles(**args):
319 """:files: List of strings. All files modified, added, or removed by this
319 """:files: List of strings. All files modified, added, or removed by this
320 changeset.
320 changeset.
321 """
321 """
322 return showlist('file', args['ctx'].files(), **args)
322 return showlist('file', args['ctx'].files(), **args)
323
323
324 def showlatesttag(repo, ctx, templ, cache, **args):
324 def showlatesttag(repo, ctx, templ, cache, **args):
325 """:latesttag: String. Most recent global tag in the ancestors of this
325 """:latesttag: String. Most recent global tag in the ancestors of this
326 changeset.
326 changeset.
327 """
327 """
328 return getlatesttags(repo, ctx, cache)[2]
328 return getlatesttags(repo, ctx, cache)[2]
329
329
330 def showlatesttagdistance(repo, ctx, templ, cache, **args):
330 def showlatesttagdistance(repo, ctx, templ, cache, **args):
331 """:latesttagdistance: Integer. Longest path to the latest tag."""
331 """:latesttagdistance: Integer. Longest path to the latest tag."""
332 return getlatesttags(repo, ctx, cache)[1]
332 return getlatesttags(repo, ctx, cache)[1]
333
333
334 def showmanifest(**args):
334 def showmanifest(**args):
335 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
335 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
336 mnode = ctx.manifestnode()
336 mnode = ctx.manifestnode()
337 args = args.copy()
337 args = args.copy()
338 args.update({'rev': repo.manifest.rev(mnode), 'node': hex(mnode)})
338 args.update({'rev': repo.manifest.rev(mnode), 'node': hex(mnode)})
339 return templ('manifest', **args)
339 return templ('manifest', **args)
340
340
341 def shownode(repo, ctx, templ, **args):
341 def shownode(repo, ctx, templ, **args):
342 """:node: String. The changeset identification hash, as a 40 hexadecimal
342 """:node: String. The changeset identification hash, as a 40 hexadecimal
343 digit string.
343 digit string.
344 """
344 """
345 return ctx.hex()
345 return ctx.hex()
346
346
347 def showp1rev(repo, ctx, templ, **args):
347 def showp1rev(repo, ctx, templ, **args):
348 """:p1rev: Integer. The repository-local revision number of the changeset's
348 """:p1rev: Integer. The repository-local revision number of the changeset's
349 first parent, or -1 if the changeset has no parents."""
349 first parent, or -1 if the changeset has no parents."""
350 return ctx.p1().rev()
350 return ctx.p1().rev()
351
351
352 def showp2rev(repo, ctx, templ, **args):
352 def showp2rev(repo, ctx, templ, **args):
353 """:p2rev: Integer. The repository-local revision number of the changeset's
353 """:p2rev: Integer. The repository-local revision number of the changeset's
354 second parent, or -1 if the changeset has no second parent."""
354 second parent, or -1 if the changeset has no second parent."""
355 return ctx.p2().rev()
355 return ctx.p2().rev()
356
356
357 def showp1node(repo, ctx, templ, **args):
357 def showp1node(repo, ctx, templ, **args):
358 """:p1node: String. The identification hash of the changeset's first parent,
358 """:p1node: String. The identification hash of the changeset's first parent,
359 as a 40 digit hexadecimal string. If the changeset has no parents, all
359 as a 40 digit hexadecimal string. If the changeset has no parents, all
360 digits are 0."""
360 digits are 0."""
361 return ctx.p1().hex()
361 return ctx.p1().hex()
362
362
363 def showp2node(repo, ctx, templ, **args):
363 def showp2node(repo, ctx, templ, **args):
364 """:p2node: String. The identification hash of the changeset's second
364 """:p2node: String. The identification hash of the changeset's second
365 parent, as a 40 digit hexadecimal string. If the changeset has no second
365 parent, as a 40 digit hexadecimal string. If the changeset has no second
366 parent, all digits are 0."""
366 parent, all digits are 0."""
367 return ctx.p2().hex()
367 return ctx.p2().hex()
368
368
369 def showphase(repo, ctx, templ, **args):
369 def showphase(repo, ctx, templ, **args):
370 """:phase: String. The changeset phase name."""
370 """:phase: String. The changeset phase name."""
371 return ctx.phasestr()
371 return ctx.phasestr()
372
372
373 def showphaseidx(repo, ctx, templ, **args):
373 def showphaseidx(repo, ctx, templ, **args):
374 """:phaseidx: Integer. The changeset phase index."""
374 """:phaseidx: Integer. The changeset phase index."""
375 return ctx.phase()
375 return ctx.phase()
376
376
377 def showrev(repo, ctx, templ, **args):
377 def showrev(repo, ctx, templ, **args):
378 """:rev: Integer. The repository-local changeset revision number."""
378 """:rev: Integer. The repository-local changeset revision number."""
379 return ctx.rev()
379 return ctx.rev()
380
380
381 def showsubrepos(**args):
381 def showsubrepos(**args):
382 """:subrepos: List of strings. Updated subrepositories in the changeset."""
382 """:subrepos: List of strings. Updated subrepositories in the changeset."""
383 ctx = args['ctx']
383 ctx = args['ctx']
384 substate = ctx.substate
384 substate = ctx.substate
385 if not substate:
385 if not substate:
386 return showlist('subrepo', [], **args)
386 return showlist('subrepo', [], **args)
387 psubstate = ctx.parents()[0].substate or {}
387 psubstate = ctx.parents()[0].substate or {}
388 subrepos = []
388 subrepos = []
389 for sub in substate:
389 for sub in substate:
390 if sub not in psubstate or substate[sub] != psubstate[sub]:
390 if sub not in psubstate or substate[sub] != psubstate[sub]:
391 subrepos.append(sub) # modified or newly added in ctx
391 subrepos.append(sub) # modified or newly added in ctx
392 for sub in psubstate:
392 for sub in psubstate:
393 if sub not in substate:
393 if sub not in substate:
394 subrepos.append(sub) # removed in ctx
394 subrepos.append(sub) # removed in ctx
395 return showlist('subrepo', sorted(subrepos), **args)
395 return showlist('subrepo', sorted(subrepos), **args)
396
396
397 def shownames(namespace, **args):
397 def shownames(namespace, **args):
398 """helper method to generate a template keyword for a namespace"""
398 """helper method to generate a template keyword for a namespace"""
399 ctx = args['ctx']
399 ctx = args['ctx']
400 repo = ctx.repo()
400 repo = ctx.repo()
401 ns = repo.names[namespace]
401 ns = repo.names[namespace]
402 names = ns.names(repo, ctx.node())
402 names = ns.names(repo, ctx.node())
403 return showlist(ns.templatename, names, plural=namespace, **args)
403 return showlist(ns.templatename, names, plural=namespace, **args)
404
404
405 # don't remove "showtags" definition, even though namespaces will put
405 # don't remove "showtags" definition, even though namespaces will put
406 # a helper function for "tags" keyword into "keywords" map automatically,
406 # a helper function for "tags" keyword into "keywords" map automatically,
407 # because online help text is built without namespaces initialization
407 # because online help text is built without namespaces initialization
408 def showtags(**args):
408 def showtags(**args):
409 """:tags: List of strings. Any tags associated with the changeset."""
409 """:tags: List of strings. Any tags associated with the changeset."""
410 return shownames('tags', **args)
410 return shownames('tags', **args)
411
411
412 # keywords are callables like:
412 # keywords are callables like:
413 # fn(repo, ctx, templ, cache, revcache, **args)
413 # fn(repo, ctx, templ, cache, revcache, **args)
414 # with:
414 # with:
415 # repo - current repository instance
415 # repo - current repository instance
416 # ctx - the changectx being displayed
416 # ctx - the changectx being displayed
417 # templ - the templater instance
417 # templ - the templater instance
418 # cache - a cache dictionary for the whole templater run
418 # cache - a cache dictionary for the whole templater run
419 # revcache - a cache dictionary for the current revision
419 # revcache - a cache dictionary for the current revision
420 keywords = {
420 keywords = {
421 'author': showauthor,
421 'author': showauthor,
422 'bisect': showbisect,
422 'bisect': showbisect,
423 'branch': showbranch,
423 'branch': showbranch,
424 'branches': showbranches,
424 'branches': showbranches,
425 'bookmarks': showbookmarks,
425 'bookmarks': showbookmarks,
426 'children': showchildren,
426 'children': showchildren,
427 'currentbookmark': showcurrentbookmark,
427 'currentbookmark': showcurrentbookmark,
428 'date': showdate,
428 'date': showdate,
429 'desc': showdescription,
429 'desc': showdescription,
430 'diffstat': showdiffstat,
430 'diffstat': showdiffstat,
431 'extras': showextras,
431 'extras': showextras,
432 'file_adds': showfileadds,
432 'file_adds': showfileadds,
433 'file_copies': showfilecopies,
433 'file_copies': showfilecopies,
434 'file_copies_switch': showfilecopiesswitch,
434 'file_copies_switch': showfilecopiesswitch,
435 'file_dels': showfiledels,
435 'file_dels': showfiledels,
436 'file_mods': showfilemods,
436 'file_mods': showfilemods,
437 'files': showfiles,
437 'files': showfiles,
438 'latesttag': showlatesttag,
438 'latesttag': showlatesttag,
439 'latesttagdistance': showlatesttagdistance,
439 'latesttagdistance': showlatesttagdistance,
440 'manifest': showmanifest,
440 'manifest': showmanifest,
441 'node': shownode,
441 'node': shownode,
442 'p1rev': showp1rev,
442 'p1rev': showp1rev,
443 'p1node': showp1node,
443 'p1node': showp1node,
444 'p2rev': showp2rev,
444 'p2rev': showp2rev,
445 'p2node': showp2node,
445 'p2node': showp2node,
446 'phase': showphase,
446 'phase': showphase,
447 'phaseidx': showphaseidx,
447 'phaseidx': showphaseidx,
448 'rev': showrev,
448 'rev': showrev,
449 'subrepos': showsubrepos,
449 'subrepos': showsubrepos,
450 'tags': showtags,
450 'tags': showtags,
451 }
451 }
452
452
453 def _showparents(**args):
453 def _showparents(**args):
454 """:parents: List of strings. The parents of the changeset in "rev:node"
454 """:parents: List of strings. The parents of the changeset in "rev:node"
455 format. If the changeset has only one "natural" parent (the predecessor
455 format. If the changeset has only one "natural" parent (the predecessor
456 revision) nothing is shown."""
456 revision) nothing is shown."""
457 pass
457 pass
458
458
459 dockeywords = {
459 dockeywords = {
460 'parents': _showparents,
460 'parents': _showparents,
461 }
461 }
462 dockeywords.update(keywords)
462 dockeywords.update(keywords)
463 del dockeywords['branches']
463 del dockeywords['branches']
464
464
465 # tell hggettext to extract docstrings from these functions:
465 # tell hggettext to extract docstrings from these functions:
466 i18nfunctions = dockeywords.values()
466 i18nfunctions = dockeywords.values()
General Comments 0
You need to be logged in to leave comments. Login now