##// END OF EJS Templates
evolution: rename divergent to content-divergent...
Boris Feld -
r33651:2194a872 default
parent child Browse files
Show More
@@ -1,5444 +1,5444 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import sys
14 import sys
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23 from . import (
23 from . import (
24 archival,
24 archival,
25 bookmarks,
25 bookmarks,
26 bundle2,
26 bundle2,
27 changegroup,
27 changegroup,
28 cmdutil,
28 cmdutil,
29 copies,
29 copies,
30 debugcommands as debugcommandsmod,
30 debugcommands as debugcommandsmod,
31 destutil,
31 destutil,
32 dirstateguard,
32 dirstateguard,
33 discovery,
33 discovery,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 formatter,
38 formatter,
39 graphmod,
39 graphmod,
40 hbisect,
40 hbisect,
41 help,
41 help,
42 hg,
42 hg,
43 lock as lockmod,
43 lock as lockmod,
44 merge as mergemod,
44 merge as mergemod,
45 obsolete,
45 obsolete,
46 patch,
46 patch,
47 phases,
47 phases,
48 pycompat,
48 pycompat,
49 rcutil,
49 rcutil,
50 registrar,
50 registrar,
51 revsetlang,
51 revsetlang,
52 scmutil,
52 scmutil,
53 server,
53 server,
54 sshserver,
54 sshserver,
55 streamclone,
55 streamclone,
56 tags as tagsmod,
56 tags as tagsmod,
57 templatekw,
57 templatekw,
58 ui as uimod,
58 ui as uimod,
59 util,
59 util,
60 )
60 )
61
61
62 release = lockmod.release
62 release = lockmod.release
63
63
64 table = {}
64 table = {}
65 table.update(debugcommandsmod.command._table)
65 table.update(debugcommandsmod.command._table)
66
66
67 command = registrar.command(table)
67 command = registrar.command(table)
68
68
69 # common command options
69 # common command options
70
70
71 globalopts = [
71 globalopts = [
72 ('R', 'repository', '',
72 ('R', 'repository', '',
73 _('repository root directory or name of overlay bundle file'),
73 _('repository root directory or name of overlay bundle file'),
74 _('REPO')),
74 _('REPO')),
75 ('', 'cwd', '',
75 ('', 'cwd', '',
76 _('change working directory'), _('DIR')),
76 _('change working directory'), _('DIR')),
77 ('y', 'noninteractive', None,
77 ('y', 'noninteractive', None,
78 _('do not prompt, automatically pick the first choice for all prompts')),
78 _('do not prompt, automatically pick the first choice for all prompts')),
79 ('q', 'quiet', None, _('suppress output')),
79 ('q', 'quiet', None, _('suppress output')),
80 ('v', 'verbose', None, _('enable additional output')),
80 ('v', 'verbose', None, _('enable additional output')),
81 ('', 'color', '',
81 ('', 'color', '',
82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
83 # and should not be translated
83 # and should not be translated
84 _("when to colorize (boolean, always, auto, never, or debug)"),
84 _("when to colorize (boolean, always, auto, never, or debug)"),
85 _('TYPE')),
85 _('TYPE')),
86 ('', 'config', [],
86 ('', 'config', [],
87 _('set/override config option (use \'section.name=value\')'),
87 _('set/override config option (use \'section.name=value\')'),
88 _('CONFIG')),
88 _('CONFIG')),
89 ('', 'debug', None, _('enable debugging output')),
89 ('', 'debug', None, _('enable debugging output')),
90 ('', 'debugger', None, _('start debugger')),
90 ('', 'debugger', None, _('start debugger')),
91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
92 _('ENCODE')),
92 _('ENCODE')),
93 ('', 'encodingmode', encoding.encodingmode,
93 ('', 'encodingmode', encoding.encodingmode,
94 _('set the charset encoding mode'), _('MODE')),
94 _('set the charset encoding mode'), _('MODE')),
95 ('', 'traceback', None, _('always print a traceback on exception')),
95 ('', 'traceback', None, _('always print a traceback on exception')),
96 ('', 'time', None, _('time how long the command takes')),
96 ('', 'time', None, _('time how long the command takes')),
97 ('', 'profile', None, _('print command execution profile')),
97 ('', 'profile', None, _('print command execution profile')),
98 ('', 'version', None, _('output version information and exit')),
98 ('', 'version', None, _('output version information and exit')),
99 ('h', 'help', None, _('display help and exit')),
99 ('h', 'help', None, _('display help and exit')),
100 ('', 'hidden', False, _('consider hidden changesets')),
100 ('', 'hidden', False, _('consider hidden changesets')),
101 ('', 'pager', 'auto',
101 ('', 'pager', 'auto',
102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
103 ]
103 ]
104
104
105 dryrunopts = cmdutil.dryrunopts
105 dryrunopts = cmdutil.dryrunopts
106 remoteopts = cmdutil.remoteopts
106 remoteopts = cmdutil.remoteopts
107 walkopts = cmdutil.walkopts
107 walkopts = cmdutil.walkopts
108 commitopts = cmdutil.commitopts
108 commitopts = cmdutil.commitopts
109 commitopts2 = cmdutil.commitopts2
109 commitopts2 = cmdutil.commitopts2
110 formatteropts = cmdutil.formatteropts
110 formatteropts = cmdutil.formatteropts
111 templateopts = cmdutil.templateopts
111 templateopts = cmdutil.templateopts
112 logopts = cmdutil.logopts
112 logopts = cmdutil.logopts
113 diffopts = cmdutil.diffopts
113 diffopts = cmdutil.diffopts
114 diffwsopts = cmdutil.diffwsopts
114 diffwsopts = cmdutil.diffwsopts
115 diffopts2 = cmdutil.diffopts2
115 diffopts2 = cmdutil.diffopts2
116 mergetoolopts = cmdutil.mergetoolopts
116 mergetoolopts = cmdutil.mergetoolopts
117 similarityopts = cmdutil.similarityopts
117 similarityopts = cmdutil.similarityopts
118 subrepoopts = cmdutil.subrepoopts
118 subrepoopts = cmdutil.subrepoopts
119 debugrevlogopts = cmdutil.debugrevlogopts
119 debugrevlogopts = cmdutil.debugrevlogopts
120
120
121 # Commands start here, listed alphabetically
121 # Commands start here, listed alphabetically
122
122
123 @command('^add',
123 @command('^add',
124 walkopts + subrepoopts + dryrunopts,
124 walkopts + subrepoopts + dryrunopts,
125 _('[OPTION]... [FILE]...'),
125 _('[OPTION]... [FILE]...'),
126 inferrepo=True)
126 inferrepo=True)
127 def add(ui, repo, *pats, **opts):
127 def add(ui, repo, *pats, **opts):
128 """add the specified files on the next commit
128 """add the specified files on the next commit
129
129
130 Schedule files to be version controlled and added to the
130 Schedule files to be version controlled and added to the
131 repository.
131 repository.
132
132
133 The files will be added to the repository at the next commit. To
133 The files will be added to the repository at the next commit. To
134 undo an add before that, see :hg:`forget`.
134 undo an add before that, see :hg:`forget`.
135
135
136 If no names are given, add all files to the repository (except
136 If no names are given, add all files to the repository (except
137 files matching ``.hgignore``).
137 files matching ``.hgignore``).
138
138
139 .. container:: verbose
139 .. container:: verbose
140
140
141 Examples:
141 Examples:
142
142
143 - New (unknown) files are added
143 - New (unknown) files are added
144 automatically by :hg:`add`::
144 automatically by :hg:`add`::
145
145
146 $ ls
146 $ ls
147 foo.c
147 foo.c
148 $ hg status
148 $ hg status
149 ? foo.c
149 ? foo.c
150 $ hg add
150 $ hg add
151 adding foo.c
151 adding foo.c
152 $ hg status
152 $ hg status
153 A foo.c
153 A foo.c
154
154
155 - Specific files to be added can be specified::
155 - Specific files to be added can be specified::
156
156
157 $ ls
157 $ ls
158 bar.c foo.c
158 bar.c foo.c
159 $ hg status
159 $ hg status
160 ? bar.c
160 ? bar.c
161 ? foo.c
161 ? foo.c
162 $ hg add bar.c
162 $ hg add bar.c
163 $ hg status
163 $ hg status
164 A bar.c
164 A bar.c
165 ? foo.c
165 ? foo.c
166
166
167 Returns 0 if all files are successfully added.
167 Returns 0 if all files are successfully added.
168 """
168 """
169
169
170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
172 return rejected and 1 or 0
172 return rejected and 1 or 0
173
173
174 @command('addremove',
174 @command('addremove',
175 similarityopts + subrepoopts + walkopts + dryrunopts,
175 similarityopts + subrepoopts + walkopts + dryrunopts,
176 _('[OPTION]... [FILE]...'),
176 _('[OPTION]... [FILE]...'),
177 inferrepo=True)
177 inferrepo=True)
178 def addremove(ui, repo, *pats, **opts):
178 def addremove(ui, repo, *pats, **opts):
179 """add all new files, delete all missing files
179 """add all new files, delete all missing files
180
180
181 Add all new files and remove all missing files from the
181 Add all new files and remove all missing files from the
182 repository.
182 repository.
183
183
184 Unless names are given, new files are ignored if they match any of
184 Unless names are given, new files are ignored if they match any of
185 the patterns in ``.hgignore``. As with add, these changes take
185 the patterns in ``.hgignore``. As with add, these changes take
186 effect at the next commit.
186 effect at the next commit.
187
187
188 Use the -s/--similarity option to detect renamed files. This
188 Use the -s/--similarity option to detect renamed files. This
189 option takes a percentage between 0 (disabled) and 100 (files must
189 option takes a percentage between 0 (disabled) and 100 (files must
190 be identical) as its parameter. With a parameter greater than 0,
190 be identical) as its parameter. With a parameter greater than 0,
191 this compares every removed file with every added file and records
191 this compares every removed file with every added file and records
192 those similar enough as renames. Detecting renamed files this way
192 those similar enough as renames. Detecting renamed files this way
193 can be expensive. After using this option, :hg:`status -C` can be
193 can be expensive. After using this option, :hg:`status -C` can be
194 used to check which files were identified as moved or renamed. If
194 used to check which files were identified as moved or renamed. If
195 not specified, -s/--similarity defaults to 100 and only renames of
195 not specified, -s/--similarity defaults to 100 and only renames of
196 identical files are detected.
196 identical files are detected.
197
197
198 .. container:: verbose
198 .. container:: verbose
199
199
200 Examples:
200 Examples:
201
201
202 - A number of files (bar.c and foo.c) are new,
202 - A number of files (bar.c and foo.c) are new,
203 while foobar.c has been removed (without using :hg:`remove`)
203 while foobar.c has been removed (without using :hg:`remove`)
204 from the repository::
204 from the repository::
205
205
206 $ ls
206 $ ls
207 bar.c foo.c
207 bar.c foo.c
208 $ hg status
208 $ hg status
209 ! foobar.c
209 ! foobar.c
210 ? bar.c
210 ? bar.c
211 ? foo.c
211 ? foo.c
212 $ hg addremove
212 $ hg addremove
213 adding bar.c
213 adding bar.c
214 adding foo.c
214 adding foo.c
215 removing foobar.c
215 removing foobar.c
216 $ hg status
216 $ hg status
217 A bar.c
217 A bar.c
218 A foo.c
218 A foo.c
219 R foobar.c
219 R foobar.c
220
220
221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
222 Afterwards, it was edited slightly::
222 Afterwards, it was edited slightly::
223
223
224 $ ls
224 $ ls
225 foo.c
225 foo.c
226 $ hg status
226 $ hg status
227 ! foobar.c
227 ! foobar.c
228 ? foo.c
228 ? foo.c
229 $ hg addremove --similarity 90
229 $ hg addremove --similarity 90
230 removing foobar.c
230 removing foobar.c
231 adding foo.c
231 adding foo.c
232 recording removal of foobar.c as rename to foo.c (94% similar)
232 recording removal of foobar.c as rename to foo.c (94% similar)
233 $ hg status -C
233 $ hg status -C
234 A foo.c
234 A foo.c
235 foobar.c
235 foobar.c
236 R foobar.c
236 R foobar.c
237
237
238 Returns 0 if all files are successfully added.
238 Returns 0 if all files are successfully added.
239 """
239 """
240 opts = pycompat.byteskwargs(opts)
240 opts = pycompat.byteskwargs(opts)
241 try:
241 try:
242 sim = float(opts.get('similarity') or 100)
242 sim = float(opts.get('similarity') or 100)
243 except ValueError:
243 except ValueError:
244 raise error.Abort(_('similarity must be a number'))
244 raise error.Abort(_('similarity must be a number'))
245 if sim < 0 or sim > 100:
245 if sim < 0 or sim > 100:
246 raise error.Abort(_('similarity must be between 0 and 100'))
246 raise error.Abort(_('similarity must be between 0 and 100'))
247 matcher = scmutil.match(repo[None], pats, opts)
247 matcher = scmutil.match(repo[None], pats, opts)
248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
249
249
250 @command('^annotate|blame',
250 @command('^annotate|blame',
251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
252 ('', 'follow', None,
252 ('', 'follow', None,
253 _('follow copies/renames and list the filename (DEPRECATED)')),
253 _('follow copies/renames and list the filename (DEPRECATED)')),
254 ('', 'no-follow', None, _("don't follow copies and renames")),
254 ('', 'no-follow', None, _("don't follow copies and renames")),
255 ('a', 'text', None, _('treat all files as text')),
255 ('a', 'text', None, _('treat all files as text')),
256 ('u', 'user', None, _('list the author (long with -v)')),
256 ('u', 'user', None, _('list the author (long with -v)')),
257 ('f', 'file', None, _('list the filename')),
257 ('f', 'file', None, _('list the filename')),
258 ('d', 'date', None, _('list the date (short with -q)')),
258 ('d', 'date', None, _('list the date (short with -q)')),
259 ('n', 'number', None, _('list the revision number (default)')),
259 ('n', 'number', None, _('list the revision number (default)')),
260 ('c', 'changeset', None, _('list the changeset')),
260 ('c', 'changeset', None, _('list the changeset')),
261 ('l', 'line-number', None, _('show line number at the first appearance')),
261 ('l', 'line-number', None, _('show line number at the first appearance')),
262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
263 ] + diffwsopts + walkopts + formatteropts,
263 ] + diffwsopts + walkopts + formatteropts,
264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
265 inferrepo=True)
265 inferrepo=True)
266 def annotate(ui, repo, *pats, **opts):
266 def annotate(ui, repo, *pats, **opts):
267 """show changeset information by line for each file
267 """show changeset information by line for each file
268
268
269 List changes in files, showing the revision id responsible for
269 List changes in files, showing the revision id responsible for
270 each line.
270 each line.
271
271
272 This command is useful for discovering when a change was made and
272 This command is useful for discovering when a change was made and
273 by whom.
273 by whom.
274
274
275 If you include --file, --user, or --date, the revision number is
275 If you include --file, --user, or --date, the revision number is
276 suppressed unless you also include --number.
276 suppressed unless you also include --number.
277
277
278 Without the -a/--text option, annotate will avoid processing files
278 Without the -a/--text option, annotate will avoid processing files
279 it detects as binary. With -a, annotate will annotate the file
279 it detects as binary. With -a, annotate will annotate the file
280 anyway, although the results will probably be neither useful
280 anyway, although the results will probably be neither useful
281 nor desirable.
281 nor desirable.
282
282
283 Returns 0 on success.
283 Returns 0 on success.
284 """
284 """
285 opts = pycompat.byteskwargs(opts)
285 opts = pycompat.byteskwargs(opts)
286 if not pats:
286 if not pats:
287 raise error.Abort(_('at least one filename or pattern is required'))
287 raise error.Abort(_('at least one filename or pattern is required'))
288
288
289 if opts.get('follow'):
289 if opts.get('follow'):
290 # --follow is deprecated and now just an alias for -f/--file
290 # --follow is deprecated and now just an alias for -f/--file
291 # to mimic the behavior of Mercurial before version 1.5
291 # to mimic the behavior of Mercurial before version 1.5
292 opts['file'] = True
292 opts['file'] = True
293
293
294 ctx = scmutil.revsingle(repo, opts.get('rev'))
294 ctx = scmutil.revsingle(repo, opts.get('rev'))
295
295
296 rootfm = ui.formatter('annotate', opts)
296 rootfm = ui.formatter('annotate', opts)
297 if ui.quiet:
297 if ui.quiet:
298 datefunc = util.shortdate
298 datefunc = util.shortdate
299 else:
299 else:
300 datefunc = util.datestr
300 datefunc = util.datestr
301 if ctx.rev() is None:
301 if ctx.rev() is None:
302 def hexfn(node):
302 def hexfn(node):
303 if node is None:
303 if node is None:
304 return None
304 return None
305 else:
305 else:
306 return rootfm.hexfunc(node)
306 return rootfm.hexfunc(node)
307 if opts.get('changeset'):
307 if opts.get('changeset'):
308 # omit "+" suffix which is appended to node hex
308 # omit "+" suffix which is appended to node hex
309 def formatrev(rev):
309 def formatrev(rev):
310 if rev is None:
310 if rev is None:
311 return '%d' % ctx.p1().rev()
311 return '%d' % ctx.p1().rev()
312 else:
312 else:
313 return '%d' % rev
313 return '%d' % rev
314 else:
314 else:
315 def formatrev(rev):
315 def formatrev(rev):
316 if rev is None:
316 if rev is None:
317 return '%d+' % ctx.p1().rev()
317 return '%d+' % ctx.p1().rev()
318 else:
318 else:
319 return '%d ' % rev
319 return '%d ' % rev
320 def formathex(hex):
320 def formathex(hex):
321 if hex is None:
321 if hex is None:
322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
323 else:
323 else:
324 return '%s ' % hex
324 return '%s ' % hex
325 else:
325 else:
326 hexfn = rootfm.hexfunc
326 hexfn = rootfm.hexfunc
327 formatrev = formathex = pycompat.bytestr
327 formatrev = formathex = pycompat.bytestr
328
328
329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
330 ('number', ' ', lambda x: x[0].rev(), formatrev),
330 ('number', ' ', lambda x: x[0].rev(), formatrev),
331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
333 ('file', ' ', lambda x: x[0].path(), str),
333 ('file', ' ', lambda x: x[0].path(), str),
334 ('line_number', ':', lambda x: x[1], str),
334 ('line_number', ':', lambda x: x[1], str),
335 ]
335 ]
336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
337
337
338 if (not opts.get('user') and not opts.get('changeset')
338 if (not opts.get('user') and not opts.get('changeset')
339 and not opts.get('date') and not opts.get('file')):
339 and not opts.get('date') and not opts.get('file')):
340 opts['number'] = True
340 opts['number'] = True
341
341
342 linenumber = opts.get('line_number') is not None
342 linenumber = opts.get('line_number') is not None
343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
344 raise error.Abort(_('at least one of -n/-c is required for -l'))
344 raise error.Abort(_('at least one of -n/-c is required for -l'))
345
345
346 ui.pager('annotate')
346 ui.pager('annotate')
347
347
348 if rootfm.isplain():
348 if rootfm.isplain():
349 def makefunc(get, fmt):
349 def makefunc(get, fmt):
350 return lambda x: fmt(get(x))
350 return lambda x: fmt(get(x))
351 else:
351 else:
352 def makefunc(get, fmt):
352 def makefunc(get, fmt):
353 return get
353 return get
354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
355 if opts.get(op)]
355 if opts.get(op)]
356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
358 if opts.get(op))
358 if opts.get(op))
359
359
360 def bad(x, y):
360 def bad(x, y):
361 raise error.Abort("%s: %s" % (x, y))
361 raise error.Abort("%s: %s" % (x, y))
362
362
363 m = scmutil.match(ctx, pats, opts, badfn=bad)
363 m = scmutil.match(ctx, pats, opts, badfn=bad)
364
364
365 follow = not opts.get('no_follow')
365 follow = not opts.get('no_follow')
366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
367 whitespace=True)
367 whitespace=True)
368 skiprevs = opts.get('skip')
368 skiprevs = opts.get('skip')
369 if skiprevs:
369 if skiprevs:
370 skiprevs = scmutil.revrange(repo, skiprevs)
370 skiprevs = scmutil.revrange(repo, skiprevs)
371
371
372 for abs in ctx.walk(m):
372 for abs in ctx.walk(m):
373 fctx = ctx[abs]
373 fctx = ctx[abs]
374 rootfm.startitem()
374 rootfm.startitem()
375 rootfm.data(abspath=abs, path=m.rel(abs))
375 rootfm.data(abspath=abs, path=m.rel(abs))
376 if not opts.get('text') and fctx.isbinary():
376 if not opts.get('text') and fctx.isbinary():
377 rootfm.plain(_("%s: binary file\n")
377 rootfm.plain(_("%s: binary file\n")
378 % ((pats and m.rel(abs)) or abs))
378 % ((pats and m.rel(abs)) or abs))
379 continue
379 continue
380
380
381 fm = rootfm.nested('lines')
381 fm = rootfm.nested('lines')
382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
383 skiprevs=skiprevs, diffopts=diffopts)
383 skiprevs=skiprevs, diffopts=diffopts)
384 if not lines:
384 if not lines:
385 fm.end()
385 fm.end()
386 continue
386 continue
387 formats = []
387 formats = []
388 pieces = []
388 pieces = []
389
389
390 for f, sep in funcmap:
390 for f, sep in funcmap:
391 l = [f(n) for n, dummy in lines]
391 l = [f(n) for n, dummy in lines]
392 if fm.isplain():
392 if fm.isplain():
393 sizes = [encoding.colwidth(x) for x in l]
393 sizes = [encoding.colwidth(x) for x in l]
394 ml = max(sizes)
394 ml = max(sizes)
395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
396 else:
396 else:
397 formats.append(['%s' for x in l])
397 formats.append(['%s' for x in l])
398 pieces.append(l)
398 pieces.append(l)
399
399
400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
401 fm.startitem()
401 fm.startitem()
402 fm.write(fields, "".join(f), *p)
402 fm.write(fields, "".join(f), *p)
403 fm.write('line', ": %s", l[1])
403 fm.write('line', ": %s", l[1])
404
404
405 if not lines[-1][1].endswith('\n'):
405 if not lines[-1][1].endswith('\n'):
406 fm.plain('\n')
406 fm.plain('\n')
407 fm.end()
407 fm.end()
408
408
409 rootfm.end()
409 rootfm.end()
410
410
411 @command('archive',
411 @command('archive',
412 [('', 'no-decode', None, _('do not pass files through decoders')),
412 [('', 'no-decode', None, _('do not pass files through decoders')),
413 ('p', 'prefix', '', _('directory prefix for files in archive'),
413 ('p', 'prefix', '', _('directory prefix for files in archive'),
414 _('PREFIX')),
414 _('PREFIX')),
415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
417 ] + subrepoopts + walkopts,
417 ] + subrepoopts + walkopts,
418 _('[OPTION]... DEST'))
418 _('[OPTION]... DEST'))
419 def archive(ui, repo, dest, **opts):
419 def archive(ui, repo, dest, **opts):
420 '''create an unversioned archive of a repository revision
420 '''create an unversioned archive of a repository revision
421
421
422 By default, the revision used is the parent of the working
422 By default, the revision used is the parent of the working
423 directory; use -r/--rev to specify a different revision.
423 directory; use -r/--rev to specify a different revision.
424
424
425 The archive type is automatically detected based on file
425 The archive type is automatically detected based on file
426 extension (to override, use -t/--type).
426 extension (to override, use -t/--type).
427
427
428 .. container:: verbose
428 .. container:: verbose
429
429
430 Examples:
430 Examples:
431
431
432 - create a zip file containing the 1.0 release::
432 - create a zip file containing the 1.0 release::
433
433
434 hg archive -r 1.0 project-1.0.zip
434 hg archive -r 1.0 project-1.0.zip
435
435
436 - create a tarball excluding .hg files::
436 - create a tarball excluding .hg files::
437
437
438 hg archive project.tar.gz -X ".hg*"
438 hg archive project.tar.gz -X ".hg*"
439
439
440 Valid types are:
440 Valid types are:
441
441
442 :``files``: a directory full of files (default)
442 :``files``: a directory full of files (default)
443 :``tar``: tar archive, uncompressed
443 :``tar``: tar archive, uncompressed
444 :``tbz2``: tar archive, compressed using bzip2
444 :``tbz2``: tar archive, compressed using bzip2
445 :``tgz``: tar archive, compressed using gzip
445 :``tgz``: tar archive, compressed using gzip
446 :``uzip``: zip archive, uncompressed
446 :``uzip``: zip archive, uncompressed
447 :``zip``: zip archive, compressed using deflate
447 :``zip``: zip archive, compressed using deflate
448
448
449 The exact name of the destination archive or directory is given
449 The exact name of the destination archive or directory is given
450 using a format string; see :hg:`help export` for details.
450 using a format string; see :hg:`help export` for details.
451
451
452 Each member added to an archive file has a directory prefix
452 Each member added to an archive file has a directory prefix
453 prepended. Use -p/--prefix to specify a format string for the
453 prepended. Use -p/--prefix to specify a format string for the
454 prefix. The default is the basename of the archive, with suffixes
454 prefix. The default is the basename of the archive, with suffixes
455 removed.
455 removed.
456
456
457 Returns 0 on success.
457 Returns 0 on success.
458 '''
458 '''
459
459
460 opts = pycompat.byteskwargs(opts)
460 opts = pycompat.byteskwargs(opts)
461 ctx = scmutil.revsingle(repo, opts.get('rev'))
461 ctx = scmutil.revsingle(repo, opts.get('rev'))
462 if not ctx:
462 if not ctx:
463 raise error.Abort(_('no working directory: please specify a revision'))
463 raise error.Abort(_('no working directory: please specify a revision'))
464 node = ctx.node()
464 node = ctx.node()
465 dest = cmdutil.makefilename(repo, dest, node)
465 dest = cmdutil.makefilename(repo, dest, node)
466 if os.path.realpath(dest) == repo.root:
466 if os.path.realpath(dest) == repo.root:
467 raise error.Abort(_('repository root cannot be destination'))
467 raise error.Abort(_('repository root cannot be destination'))
468
468
469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
470 prefix = opts.get('prefix')
470 prefix = opts.get('prefix')
471
471
472 if dest == '-':
472 if dest == '-':
473 if kind == 'files':
473 if kind == 'files':
474 raise error.Abort(_('cannot archive plain files to stdout'))
474 raise error.Abort(_('cannot archive plain files to stdout'))
475 dest = cmdutil.makefileobj(repo, dest)
475 dest = cmdutil.makefileobj(repo, dest)
476 if not prefix:
476 if not prefix:
477 prefix = os.path.basename(repo.root) + '-%h'
477 prefix = os.path.basename(repo.root) + '-%h'
478
478
479 prefix = cmdutil.makefilename(repo, prefix, node)
479 prefix = cmdutil.makefilename(repo, prefix, node)
480 matchfn = scmutil.match(ctx, [], opts)
480 matchfn = scmutil.match(ctx, [], opts)
481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
482 matchfn, prefix, subrepos=opts.get('subrepos'))
482 matchfn, prefix, subrepos=opts.get('subrepos'))
483
483
484 @command('backout',
484 @command('backout',
485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
486 ('', 'commit', None,
486 ('', 'commit', None,
487 _('commit if no conflicts were encountered (DEPRECATED)')),
487 _('commit if no conflicts were encountered (DEPRECATED)')),
488 ('', 'no-commit', None, _('do not commit')),
488 ('', 'no-commit', None, _('do not commit')),
489 ('', 'parent', '',
489 ('', 'parent', '',
490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
491 ('r', 'rev', '', _('revision to backout'), _('REV')),
491 ('r', 'rev', '', _('revision to backout'), _('REV')),
492 ('e', 'edit', False, _('invoke editor on commit messages')),
492 ('e', 'edit', False, _('invoke editor on commit messages')),
493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
494 _('[OPTION]... [-r] REV'))
494 _('[OPTION]... [-r] REV'))
495 def backout(ui, repo, node=None, rev=None, **opts):
495 def backout(ui, repo, node=None, rev=None, **opts):
496 '''reverse effect of earlier changeset
496 '''reverse effect of earlier changeset
497
497
498 Prepare a new changeset with the effect of REV undone in the
498 Prepare a new changeset with the effect of REV undone in the
499 current working directory. If no conflicts were encountered,
499 current working directory. If no conflicts were encountered,
500 it will be committed immediately.
500 it will be committed immediately.
501
501
502 If REV is the parent of the working directory, then this new changeset
502 If REV is the parent of the working directory, then this new changeset
503 is committed automatically (unless --no-commit is specified).
503 is committed automatically (unless --no-commit is specified).
504
504
505 .. note::
505 .. note::
506
506
507 :hg:`backout` cannot be used to fix either an unwanted or
507 :hg:`backout` cannot be used to fix either an unwanted or
508 incorrect merge.
508 incorrect merge.
509
509
510 .. container:: verbose
510 .. container:: verbose
511
511
512 Examples:
512 Examples:
513
513
514 - Reverse the effect of the parent of the working directory.
514 - Reverse the effect of the parent of the working directory.
515 This backout will be committed immediately::
515 This backout will be committed immediately::
516
516
517 hg backout -r .
517 hg backout -r .
518
518
519 - Reverse the effect of previous bad revision 23::
519 - Reverse the effect of previous bad revision 23::
520
520
521 hg backout -r 23
521 hg backout -r 23
522
522
523 - Reverse the effect of previous bad revision 23 and
523 - Reverse the effect of previous bad revision 23 and
524 leave changes uncommitted::
524 leave changes uncommitted::
525
525
526 hg backout -r 23 --no-commit
526 hg backout -r 23 --no-commit
527 hg commit -m "Backout revision 23"
527 hg commit -m "Backout revision 23"
528
528
529 By default, the pending changeset will have one parent,
529 By default, the pending changeset will have one parent,
530 maintaining a linear history. With --merge, the pending
530 maintaining a linear history. With --merge, the pending
531 changeset will instead have two parents: the old parent of the
531 changeset will instead have two parents: the old parent of the
532 working directory and a new child of REV that simply undoes REV.
532 working directory and a new child of REV that simply undoes REV.
533
533
534 Before version 1.7, the behavior without --merge was equivalent
534 Before version 1.7, the behavior without --merge was equivalent
535 to specifying --merge followed by :hg:`update --clean .` to
535 to specifying --merge followed by :hg:`update --clean .` to
536 cancel the merge and leave the child of REV as a head to be
536 cancel the merge and leave the child of REV as a head to be
537 merged separately.
537 merged separately.
538
538
539 See :hg:`help dates` for a list of formats valid for -d/--date.
539 See :hg:`help dates` for a list of formats valid for -d/--date.
540
540
541 See :hg:`help revert` for a way to restore files to the state
541 See :hg:`help revert` for a way to restore files to the state
542 of another revision.
542 of another revision.
543
543
544 Returns 0 on success, 1 if nothing to backout or there are unresolved
544 Returns 0 on success, 1 if nothing to backout or there are unresolved
545 files.
545 files.
546 '''
546 '''
547 wlock = lock = None
547 wlock = lock = None
548 try:
548 try:
549 wlock = repo.wlock()
549 wlock = repo.wlock()
550 lock = repo.lock()
550 lock = repo.lock()
551 return _dobackout(ui, repo, node, rev, **opts)
551 return _dobackout(ui, repo, node, rev, **opts)
552 finally:
552 finally:
553 release(lock, wlock)
553 release(lock, wlock)
554
554
555 def _dobackout(ui, repo, node=None, rev=None, **opts):
555 def _dobackout(ui, repo, node=None, rev=None, **opts):
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('commit') and opts.get('no_commit'):
557 if opts.get('commit') and opts.get('no_commit'):
558 raise error.Abort(_("cannot use --commit with --no-commit"))
558 raise error.Abort(_("cannot use --commit with --no-commit"))
559 if opts.get('merge') and opts.get('no_commit'):
559 if opts.get('merge') and opts.get('no_commit'):
560 raise error.Abort(_("cannot use --merge with --no-commit"))
560 raise error.Abort(_("cannot use --merge with --no-commit"))
561
561
562 if rev and node:
562 if rev and node:
563 raise error.Abort(_("please specify just one revision"))
563 raise error.Abort(_("please specify just one revision"))
564
564
565 if not rev:
565 if not rev:
566 rev = node
566 rev = node
567
567
568 if not rev:
568 if not rev:
569 raise error.Abort(_("please specify a revision to backout"))
569 raise error.Abort(_("please specify a revision to backout"))
570
570
571 date = opts.get('date')
571 date = opts.get('date')
572 if date:
572 if date:
573 opts['date'] = util.parsedate(date)
573 opts['date'] = util.parsedate(date)
574
574
575 cmdutil.checkunfinished(repo)
575 cmdutil.checkunfinished(repo)
576 cmdutil.bailifchanged(repo)
576 cmdutil.bailifchanged(repo)
577 node = scmutil.revsingle(repo, rev).node()
577 node = scmutil.revsingle(repo, rev).node()
578
578
579 op1, op2 = repo.dirstate.parents()
579 op1, op2 = repo.dirstate.parents()
580 if not repo.changelog.isancestor(node, op1):
580 if not repo.changelog.isancestor(node, op1):
581 raise error.Abort(_('cannot backout change that is not an ancestor'))
581 raise error.Abort(_('cannot backout change that is not an ancestor'))
582
582
583 p1, p2 = repo.changelog.parents(node)
583 p1, p2 = repo.changelog.parents(node)
584 if p1 == nullid:
584 if p1 == nullid:
585 raise error.Abort(_('cannot backout a change with no parents'))
585 raise error.Abort(_('cannot backout a change with no parents'))
586 if p2 != nullid:
586 if p2 != nullid:
587 if not opts.get('parent'):
587 if not opts.get('parent'):
588 raise error.Abort(_('cannot backout a merge changeset'))
588 raise error.Abort(_('cannot backout a merge changeset'))
589 p = repo.lookup(opts['parent'])
589 p = repo.lookup(opts['parent'])
590 if p not in (p1, p2):
590 if p not in (p1, p2):
591 raise error.Abort(_('%s is not a parent of %s') %
591 raise error.Abort(_('%s is not a parent of %s') %
592 (short(p), short(node)))
592 (short(p), short(node)))
593 parent = p
593 parent = p
594 else:
594 else:
595 if opts.get('parent'):
595 if opts.get('parent'):
596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
597 parent = p1
597 parent = p1
598
598
599 # the backout should appear on the same branch
599 # the backout should appear on the same branch
600 branch = repo.dirstate.branch()
600 branch = repo.dirstate.branch()
601 bheads = repo.branchheads(branch)
601 bheads = repo.branchheads(branch)
602 rctx = scmutil.revsingle(repo, hex(parent))
602 rctx = scmutil.revsingle(repo, hex(parent))
603 if not opts.get('merge') and op1 != node:
603 if not opts.get('merge') and op1 != node:
604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
605 try:
605 try:
606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
607 'backout')
607 'backout')
608 stats = mergemod.update(repo, parent, True, True, node, False)
608 stats = mergemod.update(repo, parent, True, True, node, False)
609 repo.setparents(op1, op2)
609 repo.setparents(op1, op2)
610 dsguard.close()
610 dsguard.close()
611 hg._showstats(repo, stats)
611 hg._showstats(repo, stats)
612 if stats[3]:
612 if stats[3]:
613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
614 "file merges\n"))
614 "file merges\n"))
615 return 1
615 return 1
616 finally:
616 finally:
617 ui.setconfig('ui', 'forcemerge', '', '')
617 ui.setconfig('ui', 'forcemerge', '', '')
618 lockmod.release(dsguard)
618 lockmod.release(dsguard)
619 else:
619 else:
620 hg.clean(repo, node, show_stats=False)
620 hg.clean(repo, node, show_stats=False)
621 repo.dirstate.setbranch(branch)
621 repo.dirstate.setbranch(branch)
622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
623
623
624 if opts.get('no_commit'):
624 if opts.get('no_commit'):
625 msg = _("changeset %s backed out, "
625 msg = _("changeset %s backed out, "
626 "don't forget to commit.\n")
626 "don't forget to commit.\n")
627 ui.status(msg % short(node))
627 ui.status(msg % short(node))
628 return 0
628 return 0
629
629
630 def commitfunc(ui, repo, message, match, opts):
630 def commitfunc(ui, repo, message, match, opts):
631 editform = 'backout'
631 editform = 'backout'
632 e = cmdutil.getcommiteditor(editform=editform,
632 e = cmdutil.getcommiteditor(editform=editform,
633 **pycompat.strkwargs(opts))
633 **pycompat.strkwargs(opts))
634 if not message:
634 if not message:
635 # we don't translate commit messages
635 # we don't translate commit messages
636 message = "Backed out changeset %s" % short(node)
636 message = "Backed out changeset %s" % short(node)
637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
638 return repo.commit(message, opts.get('user'), opts.get('date'),
638 return repo.commit(message, opts.get('user'), opts.get('date'),
639 match, editor=e)
639 match, editor=e)
640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
641 if not newnode:
641 if not newnode:
642 ui.status(_("nothing changed\n"))
642 ui.status(_("nothing changed\n"))
643 return 1
643 return 1
644 cmdutil.commitstatus(repo, newnode, branch, bheads)
644 cmdutil.commitstatus(repo, newnode, branch, bheads)
645
645
646 def nice(node):
646 def nice(node):
647 return '%d:%s' % (repo.changelog.rev(node), short(node))
647 return '%d:%s' % (repo.changelog.rev(node), short(node))
648 ui.status(_('changeset %s backs out changeset %s\n') %
648 ui.status(_('changeset %s backs out changeset %s\n') %
649 (nice(repo.changelog.tip()), nice(node)))
649 (nice(repo.changelog.tip()), nice(node)))
650 if opts.get('merge') and op1 != node:
650 if opts.get('merge') and op1 != node:
651 hg.clean(repo, op1, show_stats=False)
651 hg.clean(repo, op1, show_stats=False)
652 ui.status(_('merging with changeset %s\n')
652 ui.status(_('merging with changeset %s\n')
653 % nice(repo.changelog.tip()))
653 % nice(repo.changelog.tip()))
654 try:
654 try:
655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
656 'backout')
656 'backout')
657 return hg.merge(repo, hex(repo.changelog.tip()))
657 return hg.merge(repo, hex(repo.changelog.tip()))
658 finally:
658 finally:
659 ui.setconfig('ui', 'forcemerge', '', '')
659 ui.setconfig('ui', 'forcemerge', '', '')
660 return 0
660 return 0
661
661
662 @command('bisect',
662 @command('bisect',
663 [('r', 'reset', False, _('reset bisect state')),
663 [('r', 'reset', False, _('reset bisect state')),
664 ('g', 'good', False, _('mark changeset good')),
664 ('g', 'good', False, _('mark changeset good')),
665 ('b', 'bad', False, _('mark changeset bad')),
665 ('b', 'bad', False, _('mark changeset bad')),
666 ('s', 'skip', False, _('skip testing changeset')),
666 ('s', 'skip', False, _('skip testing changeset')),
667 ('e', 'extend', False, _('extend the bisect range')),
667 ('e', 'extend', False, _('extend the bisect range')),
668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
669 ('U', 'noupdate', False, _('do not update to target'))],
669 ('U', 'noupdate', False, _('do not update to target'))],
670 _("[-gbsr] [-U] [-c CMD] [REV]"))
670 _("[-gbsr] [-U] [-c CMD] [REV]"))
671 def bisect(ui, repo, rev=None, extra=None, command=None,
671 def bisect(ui, repo, rev=None, extra=None, command=None,
672 reset=None, good=None, bad=None, skip=None, extend=None,
672 reset=None, good=None, bad=None, skip=None, extend=None,
673 noupdate=None):
673 noupdate=None):
674 """subdivision search of changesets
674 """subdivision search of changesets
675
675
676 This command helps to find changesets which introduce problems. To
676 This command helps to find changesets which introduce problems. To
677 use, mark the earliest changeset you know exhibits the problem as
677 use, mark the earliest changeset you know exhibits the problem as
678 bad, then mark the latest changeset which is free from the problem
678 bad, then mark the latest changeset which is free from the problem
679 as good. Bisect will update your working directory to a revision
679 as good. Bisect will update your working directory to a revision
680 for testing (unless the -U/--noupdate option is specified). Once
680 for testing (unless the -U/--noupdate option is specified). Once
681 you have performed tests, mark the working directory as good or
681 you have performed tests, mark the working directory as good or
682 bad, and bisect will either update to another candidate changeset
682 bad, and bisect will either update to another candidate changeset
683 or announce that it has found the bad revision.
683 or announce that it has found the bad revision.
684
684
685 As a shortcut, you can also use the revision argument to mark a
685 As a shortcut, you can also use the revision argument to mark a
686 revision as good or bad without checking it out first.
686 revision as good or bad without checking it out first.
687
687
688 If you supply a command, it will be used for automatic bisection.
688 If you supply a command, it will be used for automatic bisection.
689 The environment variable HG_NODE will contain the ID of the
689 The environment variable HG_NODE will contain the ID of the
690 changeset being tested. The exit status of the command will be
690 changeset being tested. The exit status of the command will be
691 used to mark revisions as good or bad: status 0 means good, 125
691 used to mark revisions as good or bad: status 0 means good, 125
692 means to skip the revision, 127 (command not found) will abort the
692 means to skip the revision, 127 (command not found) will abort the
693 bisection, and any other non-zero exit status means the revision
693 bisection, and any other non-zero exit status means the revision
694 is bad.
694 is bad.
695
695
696 .. container:: verbose
696 .. container:: verbose
697
697
698 Some examples:
698 Some examples:
699
699
700 - start a bisection with known bad revision 34, and good revision 12::
700 - start a bisection with known bad revision 34, and good revision 12::
701
701
702 hg bisect --bad 34
702 hg bisect --bad 34
703 hg bisect --good 12
703 hg bisect --good 12
704
704
705 - advance the current bisection by marking current revision as good or
705 - advance the current bisection by marking current revision as good or
706 bad::
706 bad::
707
707
708 hg bisect --good
708 hg bisect --good
709 hg bisect --bad
709 hg bisect --bad
710
710
711 - mark the current revision, or a known revision, to be skipped (e.g. if
711 - mark the current revision, or a known revision, to be skipped (e.g. if
712 that revision is not usable because of another issue)::
712 that revision is not usable because of another issue)::
713
713
714 hg bisect --skip
714 hg bisect --skip
715 hg bisect --skip 23
715 hg bisect --skip 23
716
716
717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
718
718
719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
720
720
721 - forget the current bisection::
721 - forget the current bisection::
722
722
723 hg bisect --reset
723 hg bisect --reset
724
724
725 - use 'make && make tests' to automatically find the first broken
725 - use 'make && make tests' to automatically find the first broken
726 revision::
726 revision::
727
727
728 hg bisect --reset
728 hg bisect --reset
729 hg bisect --bad 34
729 hg bisect --bad 34
730 hg bisect --good 12
730 hg bisect --good 12
731 hg bisect --command "make && make tests"
731 hg bisect --command "make && make tests"
732
732
733 - see all changesets whose states are already known in the current
733 - see all changesets whose states are already known in the current
734 bisection::
734 bisection::
735
735
736 hg log -r "bisect(pruned)"
736 hg log -r "bisect(pruned)"
737
737
738 - see the changeset currently being bisected (especially useful
738 - see the changeset currently being bisected (especially useful
739 if running with -U/--noupdate)::
739 if running with -U/--noupdate)::
740
740
741 hg log -r "bisect(current)"
741 hg log -r "bisect(current)"
742
742
743 - see all changesets that took part in the current bisection::
743 - see all changesets that took part in the current bisection::
744
744
745 hg log -r "bisect(range)"
745 hg log -r "bisect(range)"
746
746
747 - you can even get a nice graph::
747 - you can even get a nice graph::
748
748
749 hg log --graph -r "bisect(range)"
749 hg log --graph -r "bisect(range)"
750
750
751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
752
752
753 Returns 0 on success.
753 Returns 0 on success.
754 """
754 """
755 # backward compatibility
755 # backward compatibility
756 if rev in "good bad reset init".split():
756 if rev in "good bad reset init".split():
757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
758 cmd, rev, extra = rev, extra, None
758 cmd, rev, extra = rev, extra, None
759 if cmd == "good":
759 if cmd == "good":
760 good = True
760 good = True
761 elif cmd == "bad":
761 elif cmd == "bad":
762 bad = True
762 bad = True
763 else:
763 else:
764 reset = True
764 reset = True
765 elif extra:
765 elif extra:
766 raise error.Abort(_('incompatible arguments'))
766 raise error.Abort(_('incompatible arguments'))
767
767
768 incompatibles = {
768 incompatibles = {
769 '--bad': bad,
769 '--bad': bad,
770 '--command': bool(command),
770 '--command': bool(command),
771 '--extend': extend,
771 '--extend': extend,
772 '--good': good,
772 '--good': good,
773 '--reset': reset,
773 '--reset': reset,
774 '--skip': skip,
774 '--skip': skip,
775 }
775 }
776
776
777 enabled = [x for x in incompatibles if incompatibles[x]]
777 enabled = [x for x in incompatibles if incompatibles[x]]
778
778
779 if len(enabled) > 1:
779 if len(enabled) > 1:
780 raise error.Abort(_('%s and %s are incompatible') %
780 raise error.Abort(_('%s and %s are incompatible') %
781 tuple(sorted(enabled)[0:2]))
781 tuple(sorted(enabled)[0:2]))
782
782
783 if reset:
783 if reset:
784 hbisect.resetstate(repo)
784 hbisect.resetstate(repo)
785 return
785 return
786
786
787 state = hbisect.load_state(repo)
787 state = hbisect.load_state(repo)
788
788
789 # update state
789 # update state
790 if good or bad or skip:
790 if good or bad or skip:
791 if rev:
791 if rev:
792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
793 else:
793 else:
794 nodes = [repo.lookup('.')]
794 nodes = [repo.lookup('.')]
795 if good:
795 if good:
796 state['good'] += nodes
796 state['good'] += nodes
797 elif bad:
797 elif bad:
798 state['bad'] += nodes
798 state['bad'] += nodes
799 elif skip:
799 elif skip:
800 state['skip'] += nodes
800 state['skip'] += nodes
801 hbisect.save_state(repo, state)
801 hbisect.save_state(repo, state)
802 if not (state['good'] and state['bad']):
802 if not (state['good'] and state['bad']):
803 return
803 return
804
804
805 def mayupdate(repo, node, show_stats=True):
805 def mayupdate(repo, node, show_stats=True):
806 """common used update sequence"""
806 """common used update sequence"""
807 if noupdate:
807 if noupdate:
808 return
808 return
809 cmdutil.checkunfinished(repo)
809 cmdutil.checkunfinished(repo)
810 cmdutil.bailifchanged(repo)
810 cmdutil.bailifchanged(repo)
811 return hg.clean(repo, node, show_stats=show_stats)
811 return hg.clean(repo, node, show_stats=show_stats)
812
812
813 displayer = cmdutil.show_changeset(ui, repo, {})
813 displayer = cmdutil.show_changeset(ui, repo, {})
814
814
815 if command:
815 if command:
816 changesets = 1
816 changesets = 1
817 if noupdate:
817 if noupdate:
818 try:
818 try:
819 node = state['current'][0]
819 node = state['current'][0]
820 except LookupError:
820 except LookupError:
821 raise error.Abort(_('current bisect revision is unknown - '
821 raise error.Abort(_('current bisect revision is unknown - '
822 'start a new bisect to fix'))
822 'start a new bisect to fix'))
823 else:
823 else:
824 node, p2 = repo.dirstate.parents()
824 node, p2 = repo.dirstate.parents()
825 if p2 != nullid:
825 if p2 != nullid:
826 raise error.Abort(_('current bisect revision is a merge'))
826 raise error.Abort(_('current bisect revision is a merge'))
827 if rev:
827 if rev:
828 node = repo[scmutil.revsingle(repo, rev, node)].node()
828 node = repo[scmutil.revsingle(repo, rev, node)].node()
829 try:
829 try:
830 while changesets:
830 while changesets:
831 # update state
831 # update state
832 state['current'] = [node]
832 state['current'] = [node]
833 hbisect.save_state(repo, state)
833 hbisect.save_state(repo, state)
834 status = ui.system(command, environ={'HG_NODE': hex(node)},
834 status = ui.system(command, environ={'HG_NODE': hex(node)},
835 blockedtag='bisect_check')
835 blockedtag='bisect_check')
836 if status == 125:
836 if status == 125:
837 transition = "skip"
837 transition = "skip"
838 elif status == 0:
838 elif status == 0:
839 transition = "good"
839 transition = "good"
840 # status < 0 means process was killed
840 # status < 0 means process was killed
841 elif status == 127:
841 elif status == 127:
842 raise error.Abort(_("failed to execute %s") % command)
842 raise error.Abort(_("failed to execute %s") % command)
843 elif status < 0:
843 elif status < 0:
844 raise error.Abort(_("%s killed") % command)
844 raise error.Abort(_("%s killed") % command)
845 else:
845 else:
846 transition = "bad"
846 transition = "bad"
847 state[transition].append(node)
847 state[transition].append(node)
848 ctx = repo[node]
848 ctx = repo[node]
849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
850 hbisect.checkstate(state)
850 hbisect.checkstate(state)
851 # bisect
851 # bisect
852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
853 # update to next check
853 # update to next check
854 node = nodes[0]
854 node = nodes[0]
855 mayupdate(repo, node, show_stats=False)
855 mayupdate(repo, node, show_stats=False)
856 finally:
856 finally:
857 state['current'] = [node]
857 state['current'] = [node]
858 hbisect.save_state(repo, state)
858 hbisect.save_state(repo, state)
859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
860 return
860 return
861
861
862 hbisect.checkstate(state)
862 hbisect.checkstate(state)
863
863
864 # actually bisect
864 # actually bisect
865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
866 if extend:
866 if extend:
867 if not changesets:
867 if not changesets:
868 extendnode = hbisect.extendrange(repo, state, nodes, good)
868 extendnode = hbisect.extendrange(repo, state, nodes, good)
869 if extendnode is not None:
869 if extendnode is not None:
870 ui.write(_("Extending search to changeset %d:%s\n")
870 ui.write(_("Extending search to changeset %d:%s\n")
871 % (extendnode.rev(), extendnode))
871 % (extendnode.rev(), extendnode))
872 state['current'] = [extendnode.node()]
872 state['current'] = [extendnode.node()]
873 hbisect.save_state(repo, state)
873 hbisect.save_state(repo, state)
874 return mayupdate(repo, extendnode.node())
874 return mayupdate(repo, extendnode.node())
875 raise error.Abort(_("nothing to extend"))
875 raise error.Abort(_("nothing to extend"))
876
876
877 if changesets == 0:
877 if changesets == 0:
878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
879 else:
879 else:
880 assert len(nodes) == 1 # only a single node can be tested next
880 assert len(nodes) == 1 # only a single node can be tested next
881 node = nodes[0]
881 node = nodes[0]
882 # compute the approximate number of remaining tests
882 # compute the approximate number of remaining tests
883 tests, size = 0, 2
883 tests, size = 0, 2
884 while size <= changesets:
884 while size <= changesets:
885 tests, size = tests + 1, size * 2
885 tests, size = tests + 1, size * 2
886 rev = repo.changelog.rev(node)
886 rev = repo.changelog.rev(node)
887 ui.write(_("Testing changeset %d:%s "
887 ui.write(_("Testing changeset %d:%s "
888 "(%d changesets remaining, ~%d tests)\n")
888 "(%d changesets remaining, ~%d tests)\n")
889 % (rev, short(node), changesets, tests))
889 % (rev, short(node), changesets, tests))
890 state['current'] = [node]
890 state['current'] = [node]
891 hbisect.save_state(repo, state)
891 hbisect.save_state(repo, state)
892 return mayupdate(repo, node)
892 return mayupdate(repo, node)
893
893
894 @command('bookmarks|bookmark',
894 @command('bookmarks|bookmark',
895 [('f', 'force', False, _('force')),
895 [('f', 'force', False, _('force')),
896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
897 ('d', 'delete', False, _('delete a given bookmark')),
897 ('d', 'delete', False, _('delete a given bookmark')),
898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
899 ('i', 'inactive', False, _('mark a bookmark inactive')),
899 ('i', 'inactive', False, _('mark a bookmark inactive')),
900 ] + formatteropts,
900 ] + formatteropts,
901 _('hg bookmarks [OPTIONS]... [NAME]...'))
901 _('hg bookmarks [OPTIONS]... [NAME]...'))
902 def bookmark(ui, repo, *names, **opts):
902 def bookmark(ui, repo, *names, **opts):
903 '''create a new bookmark or list existing bookmarks
903 '''create a new bookmark or list existing bookmarks
904
904
905 Bookmarks are labels on changesets to help track lines of development.
905 Bookmarks are labels on changesets to help track lines of development.
906 Bookmarks are unversioned and can be moved, renamed and deleted.
906 Bookmarks are unversioned and can be moved, renamed and deleted.
907 Deleting or moving a bookmark has no effect on the associated changesets.
907 Deleting or moving a bookmark has no effect on the associated changesets.
908
908
909 Creating or updating to a bookmark causes it to be marked as 'active'.
909 Creating or updating to a bookmark causes it to be marked as 'active'.
910 The active bookmark is indicated with a '*'.
910 The active bookmark is indicated with a '*'.
911 When a commit is made, the active bookmark will advance to the new commit.
911 When a commit is made, the active bookmark will advance to the new commit.
912 A plain :hg:`update` will also advance an active bookmark, if possible.
912 A plain :hg:`update` will also advance an active bookmark, if possible.
913 Updating away from a bookmark will cause it to be deactivated.
913 Updating away from a bookmark will cause it to be deactivated.
914
914
915 Bookmarks can be pushed and pulled between repositories (see
915 Bookmarks can be pushed and pulled between repositories (see
916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
917 diverged, a new 'divergent bookmark' of the form 'name@path' will
917 diverged, a new 'divergent bookmark' of the form 'name@path' will
918 be created. Using :hg:`merge` will resolve the divergence.
918 be created. Using :hg:`merge` will resolve the divergence.
919
919
920 A bookmark named '@' has the special property that :hg:`clone` will
920 A bookmark named '@' has the special property that :hg:`clone` will
921 check it out by default if it exists.
921 check it out by default if it exists.
922
922
923 .. container:: verbose
923 .. container:: verbose
924
924
925 Examples:
925 Examples:
926
926
927 - create an active bookmark for a new line of development::
927 - create an active bookmark for a new line of development::
928
928
929 hg book new-feature
929 hg book new-feature
930
930
931 - create an inactive bookmark as a place marker::
931 - create an inactive bookmark as a place marker::
932
932
933 hg book -i reviewed
933 hg book -i reviewed
934
934
935 - create an inactive bookmark on another changeset::
935 - create an inactive bookmark on another changeset::
936
936
937 hg book -r .^ tested
937 hg book -r .^ tested
938
938
939 - rename bookmark turkey to dinner::
939 - rename bookmark turkey to dinner::
940
940
941 hg book -m turkey dinner
941 hg book -m turkey dinner
942
942
943 - move the '@' bookmark from another branch::
943 - move the '@' bookmark from another branch::
944
944
945 hg book -f @
945 hg book -f @
946 '''
946 '''
947 force = opts.get(r'force')
947 force = opts.get(r'force')
948 rev = opts.get(r'rev')
948 rev = opts.get(r'rev')
949 delete = opts.get(r'delete')
949 delete = opts.get(r'delete')
950 rename = opts.get(r'rename')
950 rename = opts.get(r'rename')
951 inactive = opts.get(r'inactive')
951 inactive = opts.get(r'inactive')
952
952
953 if delete and rename:
953 if delete and rename:
954 raise error.Abort(_("--delete and --rename are incompatible"))
954 raise error.Abort(_("--delete and --rename are incompatible"))
955 if delete and rev:
955 if delete and rev:
956 raise error.Abort(_("--rev is incompatible with --delete"))
956 raise error.Abort(_("--rev is incompatible with --delete"))
957 if rename and rev:
957 if rename and rev:
958 raise error.Abort(_("--rev is incompatible with --rename"))
958 raise error.Abort(_("--rev is incompatible with --rename"))
959 if not names and (delete or rev):
959 if not names and (delete or rev):
960 raise error.Abort(_("bookmark name required"))
960 raise error.Abort(_("bookmark name required"))
961
961
962 if delete or rename or names or inactive:
962 if delete or rename or names or inactive:
963 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
963 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
964 if delete:
964 if delete:
965 bookmarks.delete(repo, tr, names)
965 bookmarks.delete(repo, tr, names)
966 elif rename:
966 elif rename:
967 if not names:
967 if not names:
968 raise error.Abort(_("new bookmark name required"))
968 raise error.Abort(_("new bookmark name required"))
969 elif len(names) > 1:
969 elif len(names) > 1:
970 raise error.Abort(_("only one new bookmark name allowed"))
970 raise error.Abort(_("only one new bookmark name allowed"))
971 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
971 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
972 elif names:
972 elif names:
973 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
973 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
974 elif inactive:
974 elif inactive:
975 if len(repo._bookmarks) == 0:
975 if len(repo._bookmarks) == 0:
976 ui.status(_("no bookmarks set\n"))
976 ui.status(_("no bookmarks set\n"))
977 elif not repo._activebookmark:
977 elif not repo._activebookmark:
978 ui.status(_("no active bookmark\n"))
978 ui.status(_("no active bookmark\n"))
979 else:
979 else:
980 bookmarks.deactivate(repo)
980 bookmarks.deactivate(repo)
981 else: # show bookmarks
981 else: # show bookmarks
982 bookmarks.printbookmarks(ui, repo, **opts)
982 bookmarks.printbookmarks(ui, repo, **opts)
983
983
984 @command('branch',
984 @command('branch',
985 [('f', 'force', None,
985 [('f', 'force', None,
986 _('set branch name even if it shadows an existing branch')),
986 _('set branch name even if it shadows an existing branch')),
987 ('C', 'clean', None, _('reset branch name to parent branch name'))],
987 ('C', 'clean', None, _('reset branch name to parent branch name'))],
988 _('[-fC] [NAME]'))
988 _('[-fC] [NAME]'))
989 def branch(ui, repo, label=None, **opts):
989 def branch(ui, repo, label=None, **opts):
990 """set or show the current branch name
990 """set or show the current branch name
991
991
992 .. note::
992 .. note::
993
993
994 Branch names are permanent and global. Use :hg:`bookmark` to create a
994 Branch names are permanent and global. Use :hg:`bookmark` to create a
995 light-weight bookmark instead. See :hg:`help glossary` for more
995 light-weight bookmark instead. See :hg:`help glossary` for more
996 information about named branches and bookmarks.
996 information about named branches and bookmarks.
997
997
998 With no argument, show the current branch name. With one argument,
998 With no argument, show the current branch name. With one argument,
999 set the working directory branch name (the branch will not exist
999 set the working directory branch name (the branch will not exist
1000 in the repository until the next commit). Standard practice
1000 in the repository until the next commit). Standard practice
1001 recommends that primary development take place on the 'default'
1001 recommends that primary development take place on the 'default'
1002 branch.
1002 branch.
1003
1003
1004 Unless -f/--force is specified, branch will not let you set a
1004 Unless -f/--force is specified, branch will not let you set a
1005 branch name that already exists.
1005 branch name that already exists.
1006
1006
1007 Use -C/--clean to reset the working directory branch to that of
1007 Use -C/--clean to reset the working directory branch to that of
1008 the parent of the working directory, negating a previous branch
1008 the parent of the working directory, negating a previous branch
1009 change.
1009 change.
1010
1010
1011 Use the command :hg:`update` to switch to an existing branch. Use
1011 Use the command :hg:`update` to switch to an existing branch. Use
1012 :hg:`commit --close-branch` to mark this branch head as closed.
1012 :hg:`commit --close-branch` to mark this branch head as closed.
1013 When all heads of a branch are closed, the branch will be
1013 When all heads of a branch are closed, the branch will be
1014 considered closed.
1014 considered closed.
1015
1015
1016 Returns 0 on success.
1016 Returns 0 on success.
1017 """
1017 """
1018 opts = pycompat.byteskwargs(opts)
1018 opts = pycompat.byteskwargs(opts)
1019 if label:
1019 if label:
1020 label = label.strip()
1020 label = label.strip()
1021
1021
1022 if not opts.get('clean') and not label:
1022 if not opts.get('clean') and not label:
1023 ui.write("%s\n" % repo.dirstate.branch())
1023 ui.write("%s\n" % repo.dirstate.branch())
1024 return
1024 return
1025
1025
1026 with repo.wlock():
1026 with repo.wlock():
1027 if opts.get('clean'):
1027 if opts.get('clean'):
1028 label = repo[None].p1().branch()
1028 label = repo[None].p1().branch()
1029 repo.dirstate.setbranch(label)
1029 repo.dirstate.setbranch(label)
1030 ui.status(_('reset working directory to branch %s\n') % label)
1030 ui.status(_('reset working directory to branch %s\n') % label)
1031 elif label:
1031 elif label:
1032 if not opts.get('force') and label in repo.branchmap():
1032 if not opts.get('force') and label in repo.branchmap():
1033 if label not in [p.branch() for p in repo[None].parents()]:
1033 if label not in [p.branch() for p in repo[None].parents()]:
1034 raise error.Abort(_('a branch of the same name already'
1034 raise error.Abort(_('a branch of the same name already'
1035 ' exists'),
1035 ' exists'),
1036 # i18n: "it" refers to an existing branch
1036 # i18n: "it" refers to an existing branch
1037 hint=_("use 'hg update' to switch to it"))
1037 hint=_("use 'hg update' to switch to it"))
1038 scmutil.checknewlabel(repo, label, 'branch')
1038 scmutil.checknewlabel(repo, label, 'branch')
1039 repo.dirstate.setbranch(label)
1039 repo.dirstate.setbranch(label)
1040 ui.status(_('marked working directory as branch %s\n') % label)
1040 ui.status(_('marked working directory as branch %s\n') % label)
1041
1041
1042 # find any open named branches aside from default
1042 # find any open named branches aside from default
1043 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1043 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1044 if n != "default" and not c]
1044 if n != "default" and not c]
1045 if not others:
1045 if not others:
1046 ui.status(_('(branches are permanent and global, '
1046 ui.status(_('(branches are permanent and global, '
1047 'did you want a bookmark?)\n'))
1047 'did you want a bookmark?)\n'))
1048
1048
1049 @command('branches',
1049 @command('branches',
1050 [('a', 'active', False,
1050 [('a', 'active', False,
1051 _('show only branches that have unmerged heads (DEPRECATED)')),
1051 _('show only branches that have unmerged heads (DEPRECATED)')),
1052 ('c', 'closed', False, _('show normal and closed branches')),
1052 ('c', 'closed', False, _('show normal and closed branches')),
1053 ] + formatteropts,
1053 ] + formatteropts,
1054 _('[-c]'))
1054 _('[-c]'))
1055 def branches(ui, repo, active=False, closed=False, **opts):
1055 def branches(ui, repo, active=False, closed=False, **opts):
1056 """list repository named branches
1056 """list repository named branches
1057
1057
1058 List the repository's named branches, indicating which ones are
1058 List the repository's named branches, indicating which ones are
1059 inactive. If -c/--closed is specified, also list branches which have
1059 inactive. If -c/--closed is specified, also list branches which have
1060 been marked closed (see :hg:`commit --close-branch`).
1060 been marked closed (see :hg:`commit --close-branch`).
1061
1061
1062 Use the command :hg:`update` to switch to an existing branch.
1062 Use the command :hg:`update` to switch to an existing branch.
1063
1063
1064 Returns 0.
1064 Returns 0.
1065 """
1065 """
1066
1066
1067 opts = pycompat.byteskwargs(opts)
1067 opts = pycompat.byteskwargs(opts)
1068 ui.pager('branches')
1068 ui.pager('branches')
1069 fm = ui.formatter('branches', opts)
1069 fm = ui.formatter('branches', opts)
1070 hexfunc = fm.hexfunc
1070 hexfunc = fm.hexfunc
1071
1071
1072 allheads = set(repo.heads())
1072 allheads = set(repo.heads())
1073 branches = []
1073 branches = []
1074 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1074 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1075 isactive = not isclosed and bool(set(heads) & allheads)
1075 isactive = not isclosed and bool(set(heads) & allheads)
1076 branches.append((tag, repo[tip], isactive, not isclosed))
1076 branches.append((tag, repo[tip], isactive, not isclosed))
1077 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1077 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1078 reverse=True)
1078 reverse=True)
1079
1079
1080 for tag, ctx, isactive, isopen in branches:
1080 for tag, ctx, isactive, isopen in branches:
1081 if active and not isactive:
1081 if active and not isactive:
1082 continue
1082 continue
1083 if isactive:
1083 if isactive:
1084 label = 'branches.active'
1084 label = 'branches.active'
1085 notice = ''
1085 notice = ''
1086 elif not isopen:
1086 elif not isopen:
1087 if not closed:
1087 if not closed:
1088 continue
1088 continue
1089 label = 'branches.closed'
1089 label = 'branches.closed'
1090 notice = _(' (closed)')
1090 notice = _(' (closed)')
1091 else:
1091 else:
1092 label = 'branches.inactive'
1092 label = 'branches.inactive'
1093 notice = _(' (inactive)')
1093 notice = _(' (inactive)')
1094 current = (tag == repo.dirstate.branch())
1094 current = (tag == repo.dirstate.branch())
1095 if current:
1095 if current:
1096 label = 'branches.current'
1096 label = 'branches.current'
1097
1097
1098 fm.startitem()
1098 fm.startitem()
1099 fm.write('branch', '%s', tag, label=label)
1099 fm.write('branch', '%s', tag, label=label)
1100 rev = ctx.rev()
1100 rev = ctx.rev()
1101 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1101 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1102 fmt = ' ' * padsize + ' %d:%s'
1102 fmt = ' ' * padsize + ' %d:%s'
1103 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1103 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1104 label='log.changeset changeset.%s' % ctx.phasestr())
1104 label='log.changeset changeset.%s' % ctx.phasestr())
1105 fm.context(ctx=ctx)
1105 fm.context(ctx=ctx)
1106 fm.data(active=isactive, closed=not isopen, current=current)
1106 fm.data(active=isactive, closed=not isopen, current=current)
1107 if not ui.quiet:
1107 if not ui.quiet:
1108 fm.plain(notice)
1108 fm.plain(notice)
1109 fm.plain('\n')
1109 fm.plain('\n')
1110 fm.end()
1110 fm.end()
1111
1111
1112 @command('bundle',
1112 @command('bundle',
1113 [('f', 'force', None, _('run even when the destination is unrelated')),
1113 [('f', 'force', None, _('run even when the destination is unrelated')),
1114 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1114 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1115 _('REV')),
1115 _('REV')),
1116 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1116 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1117 _('BRANCH')),
1117 _('BRANCH')),
1118 ('', 'base', [],
1118 ('', 'base', [],
1119 _('a base changeset assumed to be available at the destination'),
1119 _('a base changeset assumed to be available at the destination'),
1120 _('REV')),
1120 _('REV')),
1121 ('a', 'all', None, _('bundle all changesets in the repository')),
1121 ('a', 'all', None, _('bundle all changesets in the repository')),
1122 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1122 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1123 ] + remoteopts,
1123 ] + remoteopts,
1124 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1124 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1125 def bundle(ui, repo, fname, dest=None, **opts):
1125 def bundle(ui, repo, fname, dest=None, **opts):
1126 """create a bundle file
1126 """create a bundle file
1127
1127
1128 Generate a bundle file containing data to be added to a repository.
1128 Generate a bundle file containing data to be added to a repository.
1129
1129
1130 To create a bundle containing all changesets, use -a/--all
1130 To create a bundle containing all changesets, use -a/--all
1131 (or --base null). Otherwise, hg assumes the destination will have
1131 (or --base null). Otherwise, hg assumes the destination will have
1132 all the nodes you specify with --base parameters. Otherwise, hg
1132 all the nodes you specify with --base parameters. Otherwise, hg
1133 will assume the repository has all the nodes in destination, or
1133 will assume the repository has all the nodes in destination, or
1134 default-push/default if no destination is specified.
1134 default-push/default if no destination is specified.
1135
1135
1136 You can change bundle format with the -t/--type option. See
1136 You can change bundle format with the -t/--type option. See
1137 :hg:`help bundlespec` for documentation on this format. By default,
1137 :hg:`help bundlespec` for documentation on this format. By default,
1138 the most appropriate format is used and compression defaults to
1138 the most appropriate format is used and compression defaults to
1139 bzip2.
1139 bzip2.
1140
1140
1141 The bundle file can then be transferred using conventional means
1141 The bundle file can then be transferred using conventional means
1142 and applied to another repository with the unbundle or pull
1142 and applied to another repository with the unbundle or pull
1143 command. This is useful when direct push and pull are not
1143 command. This is useful when direct push and pull are not
1144 available or when exporting an entire repository is undesirable.
1144 available or when exporting an entire repository is undesirable.
1145
1145
1146 Applying bundles preserves all changeset contents including
1146 Applying bundles preserves all changeset contents including
1147 permissions, copy/rename information, and revision history.
1147 permissions, copy/rename information, and revision history.
1148
1148
1149 Returns 0 on success, 1 if no changes found.
1149 Returns 0 on success, 1 if no changes found.
1150 """
1150 """
1151 opts = pycompat.byteskwargs(opts)
1151 opts = pycompat.byteskwargs(opts)
1152 revs = None
1152 revs = None
1153 if 'rev' in opts:
1153 if 'rev' in opts:
1154 revstrings = opts['rev']
1154 revstrings = opts['rev']
1155 revs = scmutil.revrange(repo, revstrings)
1155 revs = scmutil.revrange(repo, revstrings)
1156 if revstrings and not revs:
1156 if revstrings and not revs:
1157 raise error.Abort(_('no commits to bundle'))
1157 raise error.Abort(_('no commits to bundle'))
1158
1158
1159 bundletype = opts.get('type', 'bzip2').lower()
1159 bundletype = opts.get('type', 'bzip2').lower()
1160 try:
1160 try:
1161 bcompression, cgversion, params = exchange.parsebundlespec(
1161 bcompression, cgversion, params = exchange.parsebundlespec(
1162 repo, bundletype, strict=False)
1162 repo, bundletype, strict=False)
1163 except error.UnsupportedBundleSpecification as e:
1163 except error.UnsupportedBundleSpecification as e:
1164 raise error.Abort(str(e),
1164 raise error.Abort(str(e),
1165 hint=_("see 'hg help bundlespec' for supported "
1165 hint=_("see 'hg help bundlespec' for supported "
1166 "values for --type"))
1166 "values for --type"))
1167
1167
1168 # Packed bundles are a pseudo bundle format for now.
1168 # Packed bundles are a pseudo bundle format for now.
1169 if cgversion == 's1':
1169 if cgversion == 's1':
1170 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1170 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1171 hint=_("use 'hg debugcreatestreamclonebundle'"))
1171 hint=_("use 'hg debugcreatestreamclonebundle'"))
1172
1172
1173 if opts.get('all'):
1173 if opts.get('all'):
1174 if dest:
1174 if dest:
1175 raise error.Abort(_("--all is incompatible with specifying "
1175 raise error.Abort(_("--all is incompatible with specifying "
1176 "a destination"))
1176 "a destination"))
1177 if opts.get('base'):
1177 if opts.get('base'):
1178 ui.warn(_("ignoring --base because --all was specified\n"))
1178 ui.warn(_("ignoring --base because --all was specified\n"))
1179 base = ['null']
1179 base = ['null']
1180 else:
1180 else:
1181 base = scmutil.revrange(repo, opts.get('base'))
1181 base = scmutil.revrange(repo, opts.get('base'))
1182 if cgversion not in changegroup.supportedoutgoingversions(repo):
1182 if cgversion not in changegroup.supportedoutgoingversions(repo):
1183 raise error.Abort(_("repository does not support bundle version %s") %
1183 raise error.Abort(_("repository does not support bundle version %s") %
1184 cgversion)
1184 cgversion)
1185
1185
1186 if base:
1186 if base:
1187 if dest:
1187 if dest:
1188 raise error.Abort(_("--base is incompatible with specifying "
1188 raise error.Abort(_("--base is incompatible with specifying "
1189 "a destination"))
1189 "a destination"))
1190 common = [repo.lookup(rev) for rev in base]
1190 common = [repo.lookup(rev) for rev in base]
1191 heads = revs and map(repo.lookup, revs) or None
1191 heads = revs and map(repo.lookup, revs) or None
1192 outgoing = discovery.outgoing(repo, common, heads)
1192 outgoing = discovery.outgoing(repo, common, heads)
1193 else:
1193 else:
1194 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1194 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1195 dest, branches = hg.parseurl(dest, opts.get('branch'))
1195 dest, branches = hg.parseurl(dest, opts.get('branch'))
1196 other = hg.peer(repo, opts, dest)
1196 other = hg.peer(repo, opts, dest)
1197 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1197 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1198 heads = revs and map(repo.lookup, revs) or revs
1198 heads = revs and map(repo.lookup, revs) or revs
1199 outgoing = discovery.findcommonoutgoing(repo, other,
1199 outgoing = discovery.findcommonoutgoing(repo, other,
1200 onlyheads=heads,
1200 onlyheads=heads,
1201 force=opts.get('force'),
1201 force=opts.get('force'),
1202 portable=True)
1202 portable=True)
1203
1203
1204 if not outgoing.missing:
1204 if not outgoing.missing:
1205 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1205 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1206 return 1
1206 return 1
1207
1207
1208 if cgversion == '01': #bundle1
1208 if cgversion == '01': #bundle1
1209 if bcompression is None:
1209 if bcompression is None:
1210 bcompression = 'UN'
1210 bcompression = 'UN'
1211 bversion = 'HG10' + bcompression
1211 bversion = 'HG10' + bcompression
1212 bcompression = None
1212 bcompression = None
1213 elif cgversion in ('02', '03'):
1213 elif cgversion in ('02', '03'):
1214 bversion = 'HG20'
1214 bversion = 'HG20'
1215 else:
1215 else:
1216 raise error.ProgrammingError(
1216 raise error.ProgrammingError(
1217 'bundle: unexpected changegroup version %s' % cgversion)
1217 'bundle: unexpected changegroup version %s' % cgversion)
1218
1218
1219 # TODO compression options should be derived from bundlespec parsing.
1219 # TODO compression options should be derived from bundlespec parsing.
1220 # This is a temporary hack to allow adjusting bundle compression
1220 # This is a temporary hack to allow adjusting bundle compression
1221 # level without a) formalizing the bundlespec changes to declare it
1221 # level without a) formalizing the bundlespec changes to declare it
1222 # b) introducing a command flag.
1222 # b) introducing a command flag.
1223 compopts = {}
1223 compopts = {}
1224 complevel = ui.configint('experimental', 'bundlecomplevel')
1224 complevel = ui.configint('experimental', 'bundlecomplevel')
1225 if complevel is not None:
1225 if complevel is not None:
1226 compopts['level'] = complevel
1226 compopts['level'] = complevel
1227
1227
1228
1228
1229 contentopts = {'cg.version': cgversion}
1229 contentopts = {'cg.version': cgversion}
1230 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1230 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1231 contentopts['obsolescence'] = True
1231 contentopts['obsolescence'] = True
1232 if repo.ui.configbool('experimental', 'bundle-phases'):
1232 if repo.ui.configbool('experimental', 'bundle-phases'):
1233 contentopts['phases'] = True
1233 contentopts['phases'] = True
1234 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1234 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1235 contentopts, compression=bcompression,
1235 contentopts, compression=bcompression,
1236 compopts=compopts)
1236 compopts=compopts)
1237
1237
1238 @command('cat',
1238 @command('cat',
1239 [('o', 'output', '',
1239 [('o', 'output', '',
1240 _('print output to file with formatted name'), _('FORMAT')),
1240 _('print output to file with formatted name'), _('FORMAT')),
1241 ('r', 'rev', '', _('print the given revision'), _('REV')),
1241 ('r', 'rev', '', _('print the given revision'), _('REV')),
1242 ('', 'decode', None, _('apply any matching decode filter')),
1242 ('', 'decode', None, _('apply any matching decode filter')),
1243 ] + walkopts + formatteropts,
1243 ] + walkopts + formatteropts,
1244 _('[OPTION]... FILE...'),
1244 _('[OPTION]... FILE...'),
1245 inferrepo=True)
1245 inferrepo=True)
1246 def cat(ui, repo, file1, *pats, **opts):
1246 def cat(ui, repo, file1, *pats, **opts):
1247 """output the current or given revision of files
1247 """output the current or given revision of files
1248
1248
1249 Print the specified files as they were at the given revision. If
1249 Print the specified files as they were at the given revision. If
1250 no revision is given, the parent of the working directory is used.
1250 no revision is given, the parent of the working directory is used.
1251
1251
1252 Output may be to a file, in which case the name of the file is
1252 Output may be to a file, in which case the name of the file is
1253 given using a format string. The formatting rules as follows:
1253 given using a format string. The formatting rules as follows:
1254
1254
1255 :``%%``: literal "%" character
1255 :``%%``: literal "%" character
1256 :``%s``: basename of file being printed
1256 :``%s``: basename of file being printed
1257 :``%d``: dirname of file being printed, or '.' if in repository root
1257 :``%d``: dirname of file being printed, or '.' if in repository root
1258 :``%p``: root-relative path name of file being printed
1258 :``%p``: root-relative path name of file being printed
1259 :``%H``: changeset hash (40 hexadecimal digits)
1259 :``%H``: changeset hash (40 hexadecimal digits)
1260 :``%R``: changeset revision number
1260 :``%R``: changeset revision number
1261 :``%h``: short-form changeset hash (12 hexadecimal digits)
1261 :``%h``: short-form changeset hash (12 hexadecimal digits)
1262 :``%r``: zero-padded changeset revision number
1262 :``%r``: zero-padded changeset revision number
1263 :``%b``: basename of the exporting repository
1263 :``%b``: basename of the exporting repository
1264
1264
1265 Returns 0 on success.
1265 Returns 0 on success.
1266 """
1266 """
1267 ctx = scmutil.revsingle(repo, opts.get('rev'))
1267 ctx = scmutil.revsingle(repo, opts.get('rev'))
1268 m = scmutil.match(ctx, (file1,) + pats, opts)
1268 m = scmutil.match(ctx, (file1,) + pats, opts)
1269 fntemplate = opts.pop('output', '')
1269 fntemplate = opts.pop('output', '')
1270 if cmdutil.isstdiofilename(fntemplate):
1270 if cmdutil.isstdiofilename(fntemplate):
1271 fntemplate = ''
1271 fntemplate = ''
1272
1272
1273 if fntemplate:
1273 if fntemplate:
1274 fm = formatter.nullformatter(ui, 'cat')
1274 fm = formatter.nullformatter(ui, 'cat')
1275 else:
1275 else:
1276 ui.pager('cat')
1276 ui.pager('cat')
1277 fm = ui.formatter('cat', opts)
1277 fm = ui.formatter('cat', opts)
1278 with fm:
1278 with fm:
1279 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1279 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1280
1280
1281 @command('^clone',
1281 @command('^clone',
1282 [('U', 'noupdate', None, _('the clone will include an empty working '
1282 [('U', 'noupdate', None, _('the clone will include an empty working '
1283 'directory (only a repository)')),
1283 'directory (only a repository)')),
1284 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1284 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1285 _('REV')),
1285 _('REV')),
1286 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1286 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1287 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1287 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1288 ('', 'pull', None, _('use pull protocol to copy metadata')),
1288 ('', 'pull', None, _('use pull protocol to copy metadata')),
1289 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1289 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1290 ] + remoteopts,
1290 ] + remoteopts,
1291 _('[OPTION]... SOURCE [DEST]'),
1291 _('[OPTION]... SOURCE [DEST]'),
1292 norepo=True)
1292 norepo=True)
1293 def clone(ui, source, dest=None, **opts):
1293 def clone(ui, source, dest=None, **opts):
1294 """make a copy of an existing repository
1294 """make a copy of an existing repository
1295
1295
1296 Create a copy of an existing repository in a new directory.
1296 Create a copy of an existing repository in a new directory.
1297
1297
1298 If no destination directory name is specified, it defaults to the
1298 If no destination directory name is specified, it defaults to the
1299 basename of the source.
1299 basename of the source.
1300
1300
1301 The location of the source is added to the new repository's
1301 The location of the source is added to the new repository's
1302 ``.hg/hgrc`` file, as the default to be used for future pulls.
1302 ``.hg/hgrc`` file, as the default to be used for future pulls.
1303
1303
1304 Only local paths and ``ssh://`` URLs are supported as
1304 Only local paths and ``ssh://`` URLs are supported as
1305 destinations. For ``ssh://`` destinations, no working directory or
1305 destinations. For ``ssh://`` destinations, no working directory or
1306 ``.hg/hgrc`` will be created on the remote side.
1306 ``.hg/hgrc`` will be created on the remote side.
1307
1307
1308 If the source repository has a bookmark called '@' set, that
1308 If the source repository has a bookmark called '@' set, that
1309 revision will be checked out in the new repository by default.
1309 revision will be checked out in the new repository by default.
1310
1310
1311 To check out a particular version, use -u/--update, or
1311 To check out a particular version, use -u/--update, or
1312 -U/--noupdate to create a clone with no working directory.
1312 -U/--noupdate to create a clone with no working directory.
1313
1313
1314 To pull only a subset of changesets, specify one or more revisions
1314 To pull only a subset of changesets, specify one or more revisions
1315 identifiers with -r/--rev or branches with -b/--branch. The
1315 identifiers with -r/--rev or branches with -b/--branch. The
1316 resulting clone will contain only the specified changesets and
1316 resulting clone will contain only the specified changesets and
1317 their ancestors. These options (or 'clone src#rev dest') imply
1317 their ancestors. These options (or 'clone src#rev dest') imply
1318 --pull, even for local source repositories.
1318 --pull, even for local source repositories.
1319
1319
1320 .. note::
1320 .. note::
1321
1321
1322 Specifying a tag will include the tagged changeset but not the
1322 Specifying a tag will include the tagged changeset but not the
1323 changeset containing the tag.
1323 changeset containing the tag.
1324
1324
1325 .. container:: verbose
1325 .. container:: verbose
1326
1326
1327 For efficiency, hardlinks are used for cloning whenever the
1327 For efficiency, hardlinks are used for cloning whenever the
1328 source and destination are on the same filesystem (note this
1328 source and destination are on the same filesystem (note this
1329 applies only to the repository data, not to the working
1329 applies only to the repository data, not to the working
1330 directory). Some filesystems, such as AFS, implement hardlinking
1330 directory). Some filesystems, such as AFS, implement hardlinking
1331 incorrectly, but do not report errors. In these cases, use the
1331 incorrectly, but do not report errors. In these cases, use the
1332 --pull option to avoid hardlinking.
1332 --pull option to avoid hardlinking.
1333
1333
1334 In some cases, you can clone repositories and the working
1334 In some cases, you can clone repositories and the working
1335 directory using full hardlinks with ::
1335 directory using full hardlinks with ::
1336
1336
1337 $ cp -al REPO REPOCLONE
1337 $ cp -al REPO REPOCLONE
1338
1338
1339 This is the fastest way to clone, but it is not always safe. The
1339 This is the fastest way to clone, but it is not always safe. The
1340 operation is not atomic (making sure REPO is not modified during
1340 operation is not atomic (making sure REPO is not modified during
1341 the operation is up to you) and you have to make sure your
1341 the operation is up to you) and you have to make sure your
1342 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1342 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1343 so). Also, this is not compatible with certain extensions that
1343 so). Also, this is not compatible with certain extensions that
1344 place their metadata under the .hg directory, such as mq.
1344 place their metadata under the .hg directory, such as mq.
1345
1345
1346 Mercurial will update the working directory to the first applicable
1346 Mercurial will update the working directory to the first applicable
1347 revision from this list:
1347 revision from this list:
1348
1348
1349 a) null if -U or the source repository has no changesets
1349 a) null if -U or the source repository has no changesets
1350 b) if -u . and the source repository is local, the first parent of
1350 b) if -u . and the source repository is local, the first parent of
1351 the source repository's working directory
1351 the source repository's working directory
1352 c) the changeset specified with -u (if a branch name, this means the
1352 c) the changeset specified with -u (if a branch name, this means the
1353 latest head of that branch)
1353 latest head of that branch)
1354 d) the changeset specified with -r
1354 d) the changeset specified with -r
1355 e) the tipmost head specified with -b
1355 e) the tipmost head specified with -b
1356 f) the tipmost head specified with the url#branch source syntax
1356 f) the tipmost head specified with the url#branch source syntax
1357 g) the revision marked with the '@' bookmark, if present
1357 g) the revision marked with the '@' bookmark, if present
1358 h) the tipmost head of the default branch
1358 h) the tipmost head of the default branch
1359 i) tip
1359 i) tip
1360
1360
1361 When cloning from servers that support it, Mercurial may fetch
1361 When cloning from servers that support it, Mercurial may fetch
1362 pre-generated data from a server-advertised URL. When this is done,
1362 pre-generated data from a server-advertised URL. When this is done,
1363 hooks operating on incoming changesets and changegroups may fire twice,
1363 hooks operating on incoming changesets and changegroups may fire twice,
1364 once for the bundle fetched from the URL and another for any additional
1364 once for the bundle fetched from the URL and another for any additional
1365 data not fetched from this URL. In addition, if an error occurs, the
1365 data not fetched from this URL. In addition, if an error occurs, the
1366 repository may be rolled back to a partial clone. This behavior may
1366 repository may be rolled back to a partial clone. This behavior may
1367 change in future releases. See :hg:`help -e clonebundles` for more.
1367 change in future releases. See :hg:`help -e clonebundles` for more.
1368
1368
1369 Examples:
1369 Examples:
1370
1370
1371 - clone a remote repository to a new directory named hg/::
1371 - clone a remote repository to a new directory named hg/::
1372
1372
1373 hg clone https://www.mercurial-scm.org/repo/hg/
1373 hg clone https://www.mercurial-scm.org/repo/hg/
1374
1374
1375 - create a lightweight local clone::
1375 - create a lightweight local clone::
1376
1376
1377 hg clone project/ project-feature/
1377 hg clone project/ project-feature/
1378
1378
1379 - clone from an absolute path on an ssh server (note double-slash)::
1379 - clone from an absolute path on an ssh server (note double-slash)::
1380
1380
1381 hg clone ssh://user@server//home/projects/alpha/
1381 hg clone ssh://user@server//home/projects/alpha/
1382
1382
1383 - do a high-speed clone over a LAN while checking out a
1383 - do a high-speed clone over a LAN while checking out a
1384 specified version::
1384 specified version::
1385
1385
1386 hg clone --uncompressed http://server/repo -u 1.5
1386 hg clone --uncompressed http://server/repo -u 1.5
1387
1387
1388 - create a repository without changesets after a particular revision::
1388 - create a repository without changesets after a particular revision::
1389
1389
1390 hg clone -r 04e544 experimental/ good/
1390 hg clone -r 04e544 experimental/ good/
1391
1391
1392 - clone (and track) a particular named branch::
1392 - clone (and track) a particular named branch::
1393
1393
1394 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1394 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1395
1395
1396 See :hg:`help urls` for details on specifying URLs.
1396 See :hg:`help urls` for details on specifying URLs.
1397
1397
1398 Returns 0 on success.
1398 Returns 0 on success.
1399 """
1399 """
1400 opts = pycompat.byteskwargs(opts)
1400 opts = pycompat.byteskwargs(opts)
1401 if opts.get('noupdate') and opts.get('updaterev'):
1401 if opts.get('noupdate') and opts.get('updaterev'):
1402 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1402 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1403
1403
1404 r = hg.clone(ui, opts, source, dest,
1404 r = hg.clone(ui, opts, source, dest,
1405 pull=opts.get('pull'),
1405 pull=opts.get('pull'),
1406 stream=opts.get('uncompressed'),
1406 stream=opts.get('uncompressed'),
1407 rev=opts.get('rev'),
1407 rev=opts.get('rev'),
1408 update=opts.get('updaterev') or not opts.get('noupdate'),
1408 update=opts.get('updaterev') or not opts.get('noupdate'),
1409 branch=opts.get('branch'),
1409 branch=opts.get('branch'),
1410 shareopts=opts.get('shareopts'))
1410 shareopts=opts.get('shareopts'))
1411
1411
1412 return r is None
1412 return r is None
1413
1413
1414 @command('^commit|ci',
1414 @command('^commit|ci',
1415 [('A', 'addremove', None,
1415 [('A', 'addremove', None,
1416 _('mark new/missing files as added/removed before committing')),
1416 _('mark new/missing files as added/removed before committing')),
1417 ('', 'close-branch', None,
1417 ('', 'close-branch', None,
1418 _('mark a branch head as closed')),
1418 _('mark a branch head as closed')),
1419 ('', 'amend', None, _('amend the parent of the working directory')),
1419 ('', 'amend', None, _('amend the parent of the working directory')),
1420 ('s', 'secret', None, _('use the secret phase for committing')),
1420 ('s', 'secret', None, _('use the secret phase for committing')),
1421 ('e', 'edit', None, _('invoke editor on commit messages')),
1421 ('e', 'edit', None, _('invoke editor on commit messages')),
1422 ('i', 'interactive', None, _('use interactive mode')),
1422 ('i', 'interactive', None, _('use interactive mode')),
1423 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1423 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1424 _('[OPTION]... [FILE]...'),
1424 _('[OPTION]... [FILE]...'),
1425 inferrepo=True)
1425 inferrepo=True)
1426 def commit(ui, repo, *pats, **opts):
1426 def commit(ui, repo, *pats, **opts):
1427 """commit the specified files or all outstanding changes
1427 """commit the specified files or all outstanding changes
1428
1428
1429 Commit changes to the given files into the repository. Unlike a
1429 Commit changes to the given files into the repository. Unlike a
1430 centralized SCM, this operation is a local operation. See
1430 centralized SCM, this operation is a local operation. See
1431 :hg:`push` for a way to actively distribute your changes.
1431 :hg:`push` for a way to actively distribute your changes.
1432
1432
1433 If a list of files is omitted, all changes reported by :hg:`status`
1433 If a list of files is omitted, all changes reported by :hg:`status`
1434 will be committed.
1434 will be committed.
1435
1435
1436 If you are committing the result of a merge, do not provide any
1436 If you are committing the result of a merge, do not provide any
1437 filenames or -I/-X filters.
1437 filenames or -I/-X filters.
1438
1438
1439 If no commit message is specified, Mercurial starts your
1439 If no commit message is specified, Mercurial starts your
1440 configured editor where you can enter a message. In case your
1440 configured editor where you can enter a message. In case your
1441 commit fails, you will find a backup of your message in
1441 commit fails, you will find a backup of your message in
1442 ``.hg/last-message.txt``.
1442 ``.hg/last-message.txt``.
1443
1443
1444 The --close-branch flag can be used to mark the current branch
1444 The --close-branch flag can be used to mark the current branch
1445 head closed. When all heads of a branch are closed, the branch
1445 head closed. When all heads of a branch are closed, the branch
1446 will be considered closed and no longer listed.
1446 will be considered closed and no longer listed.
1447
1447
1448 The --amend flag can be used to amend the parent of the
1448 The --amend flag can be used to amend the parent of the
1449 working directory with a new commit that contains the changes
1449 working directory with a new commit that contains the changes
1450 in the parent in addition to those currently reported by :hg:`status`,
1450 in the parent in addition to those currently reported by :hg:`status`,
1451 if there are any. The old commit is stored in a backup bundle in
1451 if there are any. The old commit is stored in a backup bundle in
1452 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1452 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1453 on how to restore it).
1453 on how to restore it).
1454
1454
1455 Message, user and date are taken from the amended commit unless
1455 Message, user and date are taken from the amended commit unless
1456 specified. When a message isn't specified on the command line,
1456 specified. When a message isn't specified on the command line,
1457 the editor will open with the message of the amended commit.
1457 the editor will open with the message of the amended commit.
1458
1458
1459 It is not possible to amend public changesets (see :hg:`help phases`)
1459 It is not possible to amend public changesets (see :hg:`help phases`)
1460 or changesets that have children.
1460 or changesets that have children.
1461
1461
1462 See :hg:`help dates` for a list of formats valid for -d/--date.
1462 See :hg:`help dates` for a list of formats valid for -d/--date.
1463
1463
1464 Returns 0 on success, 1 if nothing changed.
1464 Returns 0 on success, 1 if nothing changed.
1465
1465
1466 .. container:: verbose
1466 .. container:: verbose
1467
1467
1468 Examples:
1468 Examples:
1469
1469
1470 - commit all files ending in .py::
1470 - commit all files ending in .py::
1471
1471
1472 hg commit --include "set:**.py"
1472 hg commit --include "set:**.py"
1473
1473
1474 - commit all non-binary files::
1474 - commit all non-binary files::
1475
1475
1476 hg commit --exclude "set:binary()"
1476 hg commit --exclude "set:binary()"
1477
1477
1478 - amend the current commit and set the date to now::
1478 - amend the current commit and set the date to now::
1479
1479
1480 hg commit --amend --date now
1480 hg commit --amend --date now
1481 """
1481 """
1482 wlock = lock = None
1482 wlock = lock = None
1483 try:
1483 try:
1484 wlock = repo.wlock()
1484 wlock = repo.wlock()
1485 lock = repo.lock()
1485 lock = repo.lock()
1486 return _docommit(ui, repo, *pats, **opts)
1486 return _docommit(ui, repo, *pats, **opts)
1487 finally:
1487 finally:
1488 release(lock, wlock)
1488 release(lock, wlock)
1489
1489
1490 def _docommit(ui, repo, *pats, **opts):
1490 def _docommit(ui, repo, *pats, **opts):
1491 if opts.get(r'interactive'):
1491 if opts.get(r'interactive'):
1492 opts.pop(r'interactive')
1492 opts.pop(r'interactive')
1493 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1493 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1494 cmdutil.recordfilter, *pats,
1494 cmdutil.recordfilter, *pats,
1495 **opts)
1495 **opts)
1496 # ret can be 0 (no changes to record) or the value returned by
1496 # ret can be 0 (no changes to record) or the value returned by
1497 # commit(), 1 if nothing changed or None on success.
1497 # commit(), 1 if nothing changed or None on success.
1498 return 1 if ret == 0 else ret
1498 return 1 if ret == 0 else ret
1499
1499
1500 opts = pycompat.byteskwargs(opts)
1500 opts = pycompat.byteskwargs(opts)
1501 if opts.get('subrepos'):
1501 if opts.get('subrepos'):
1502 if opts.get('amend'):
1502 if opts.get('amend'):
1503 raise error.Abort(_('cannot amend with --subrepos'))
1503 raise error.Abort(_('cannot amend with --subrepos'))
1504 # Let --subrepos on the command line override config setting.
1504 # Let --subrepos on the command line override config setting.
1505 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1505 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1506
1506
1507 cmdutil.checkunfinished(repo, commit=True)
1507 cmdutil.checkunfinished(repo, commit=True)
1508
1508
1509 branch = repo[None].branch()
1509 branch = repo[None].branch()
1510 bheads = repo.branchheads(branch)
1510 bheads = repo.branchheads(branch)
1511
1511
1512 extra = {}
1512 extra = {}
1513 if opts.get('close_branch'):
1513 if opts.get('close_branch'):
1514 extra['close'] = 1
1514 extra['close'] = 1
1515
1515
1516 if not bheads:
1516 if not bheads:
1517 raise error.Abort(_('can only close branch heads'))
1517 raise error.Abort(_('can only close branch heads'))
1518 elif opts.get('amend'):
1518 elif opts.get('amend'):
1519 if repo[None].parents()[0].p1().branch() != branch and \
1519 if repo[None].parents()[0].p1().branch() != branch and \
1520 repo[None].parents()[0].p2().branch() != branch:
1520 repo[None].parents()[0].p2().branch() != branch:
1521 raise error.Abort(_('can only close branch heads'))
1521 raise error.Abort(_('can only close branch heads'))
1522
1522
1523 if opts.get('amend'):
1523 if opts.get('amend'):
1524 if ui.configbool('ui', 'commitsubrepos'):
1524 if ui.configbool('ui', 'commitsubrepos'):
1525 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1525 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1526
1526
1527 old = repo['.']
1527 old = repo['.']
1528 if not old.mutable():
1528 if not old.mutable():
1529 raise error.Abort(_('cannot amend public changesets'))
1529 raise error.Abort(_('cannot amend public changesets'))
1530 if len(repo[None].parents()) > 1:
1530 if len(repo[None].parents()) > 1:
1531 raise error.Abort(_('cannot amend while merging'))
1531 raise error.Abort(_('cannot amend while merging'))
1532 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1532 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1533 if not allowunstable and old.children():
1533 if not allowunstable and old.children():
1534 raise error.Abort(_('cannot amend changeset with children'))
1534 raise error.Abort(_('cannot amend changeset with children'))
1535
1535
1536 # Currently histedit gets confused if an amend happens while histedit
1536 # Currently histedit gets confused if an amend happens while histedit
1537 # is in progress. Since we have a checkunfinished command, we are
1537 # is in progress. Since we have a checkunfinished command, we are
1538 # temporarily honoring it.
1538 # temporarily honoring it.
1539 #
1539 #
1540 # Note: eventually this guard will be removed. Please do not expect
1540 # Note: eventually this guard will be removed. Please do not expect
1541 # this behavior to remain.
1541 # this behavior to remain.
1542 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1542 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1543 cmdutil.checkunfinished(repo)
1543 cmdutil.checkunfinished(repo)
1544
1544
1545 # commitfunc is used only for temporary amend commit by cmdutil.amend
1545 # commitfunc is used only for temporary amend commit by cmdutil.amend
1546 def commitfunc(ui, repo, message, match, opts):
1546 def commitfunc(ui, repo, message, match, opts):
1547 return repo.commit(message,
1547 return repo.commit(message,
1548 opts.get('user') or old.user(),
1548 opts.get('user') or old.user(),
1549 opts.get('date') or old.date(),
1549 opts.get('date') or old.date(),
1550 match,
1550 match,
1551 extra=extra)
1551 extra=extra)
1552
1552
1553 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1553 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1554 if node == old.node():
1554 if node == old.node():
1555 ui.status(_("nothing changed\n"))
1555 ui.status(_("nothing changed\n"))
1556 return 1
1556 return 1
1557 else:
1557 else:
1558 def commitfunc(ui, repo, message, match, opts):
1558 def commitfunc(ui, repo, message, match, opts):
1559 overrides = {}
1559 overrides = {}
1560 if opts.get('secret'):
1560 if opts.get('secret'):
1561 overrides[('phases', 'new-commit')] = 'secret'
1561 overrides[('phases', 'new-commit')] = 'secret'
1562
1562
1563 baseui = repo.baseui
1563 baseui = repo.baseui
1564 with baseui.configoverride(overrides, 'commit'):
1564 with baseui.configoverride(overrides, 'commit'):
1565 with ui.configoverride(overrides, 'commit'):
1565 with ui.configoverride(overrides, 'commit'):
1566 editform = cmdutil.mergeeditform(repo[None],
1566 editform = cmdutil.mergeeditform(repo[None],
1567 'commit.normal')
1567 'commit.normal')
1568 editor = cmdutil.getcommiteditor(
1568 editor = cmdutil.getcommiteditor(
1569 editform=editform, **pycompat.strkwargs(opts))
1569 editform=editform, **pycompat.strkwargs(opts))
1570 return repo.commit(message,
1570 return repo.commit(message,
1571 opts.get('user'),
1571 opts.get('user'),
1572 opts.get('date'),
1572 opts.get('date'),
1573 match,
1573 match,
1574 editor=editor,
1574 editor=editor,
1575 extra=extra)
1575 extra=extra)
1576
1576
1577 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1577 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1578
1578
1579 if not node:
1579 if not node:
1580 stat = cmdutil.postcommitstatus(repo, pats, opts)
1580 stat = cmdutil.postcommitstatus(repo, pats, opts)
1581 if stat[3]:
1581 if stat[3]:
1582 ui.status(_("nothing changed (%d missing files, see "
1582 ui.status(_("nothing changed (%d missing files, see "
1583 "'hg status')\n") % len(stat[3]))
1583 "'hg status')\n") % len(stat[3]))
1584 else:
1584 else:
1585 ui.status(_("nothing changed\n"))
1585 ui.status(_("nothing changed\n"))
1586 return 1
1586 return 1
1587
1587
1588 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1588 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1589
1589
1590 @command('config|showconfig|debugconfig',
1590 @command('config|showconfig|debugconfig',
1591 [('u', 'untrusted', None, _('show untrusted configuration options')),
1591 [('u', 'untrusted', None, _('show untrusted configuration options')),
1592 ('e', 'edit', None, _('edit user config')),
1592 ('e', 'edit', None, _('edit user config')),
1593 ('l', 'local', None, _('edit repository config')),
1593 ('l', 'local', None, _('edit repository config')),
1594 ('g', 'global', None, _('edit global config'))] + formatteropts,
1594 ('g', 'global', None, _('edit global config'))] + formatteropts,
1595 _('[-u] [NAME]...'),
1595 _('[-u] [NAME]...'),
1596 optionalrepo=True)
1596 optionalrepo=True)
1597 def config(ui, repo, *values, **opts):
1597 def config(ui, repo, *values, **opts):
1598 """show combined config settings from all hgrc files
1598 """show combined config settings from all hgrc files
1599
1599
1600 With no arguments, print names and values of all config items.
1600 With no arguments, print names and values of all config items.
1601
1601
1602 With one argument of the form section.name, print just the value
1602 With one argument of the form section.name, print just the value
1603 of that config item.
1603 of that config item.
1604
1604
1605 With multiple arguments, print names and values of all config
1605 With multiple arguments, print names and values of all config
1606 items with matching section names.
1606 items with matching section names.
1607
1607
1608 With --edit, start an editor on the user-level config file. With
1608 With --edit, start an editor on the user-level config file. With
1609 --global, edit the system-wide config file. With --local, edit the
1609 --global, edit the system-wide config file. With --local, edit the
1610 repository-level config file.
1610 repository-level config file.
1611
1611
1612 With --debug, the source (filename and line number) is printed
1612 With --debug, the source (filename and line number) is printed
1613 for each config item.
1613 for each config item.
1614
1614
1615 See :hg:`help config` for more information about config files.
1615 See :hg:`help config` for more information about config files.
1616
1616
1617 Returns 0 on success, 1 if NAME does not exist.
1617 Returns 0 on success, 1 if NAME does not exist.
1618
1618
1619 """
1619 """
1620
1620
1621 opts = pycompat.byteskwargs(opts)
1621 opts = pycompat.byteskwargs(opts)
1622 if opts.get('edit') or opts.get('local') or opts.get('global'):
1622 if opts.get('edit') or opts.get('local') or opts.get('global'):
1623 if opts.get('local') and opts.get('global'):
1623 if opts.get('local') and opts.get('global'):
1624 raise error.Abort(_("can't use --local and --global together"))
1624 raise error.Abort(_("can't use --local and --global together"))
1625
1625
1626 if opts.get('local'):
1626 if opts.get('local'):
1627 if not repo:
1627 if not repo:
1628 raise error.Abort(_("can't use --local outside a repository"))
1628 raise error.Abort(_("can't use --local outside a repository"))
1629 paths = [repo.vfs.join('hgrc')]
1629 paths = [repo.vfs.join('hgrc')]
1630 elif opts.get('global'):
1630 elif opts.get('global'):
1631 paths = rcutil.systemrcpath()
1631 paths = rcutil.systemrcpath()
1632 else:
1632 else:
1633 paths = rcutil.userrcpath()
1633 paths = rcutil.userrcpath()
1634
1634
1635 for f in paths:
1635 for f in paths:
1636 if os.path.exists(f):
1636 if os.path.exists(f):
1637 break
1637 break
1638 else:
1638 else:
1639 if opts.get('global'):
1639 if opts.get('global'):
1640 samplehgrc = uimod.samplehgrcs['global']
1640 samplehgrc = uimod.samplehgrcs['global']
1641 elif opts.get('local'):
1641 elif opts.get('local'):
1642 samplehgrc = uimod.samplehgrcs['local']
1642 samplehgrc = uimod.samplehgrcs['local']
1643 else:
1643 else:
1644 samplehgrc = uimod.samplehgrcs['user']
1644 samplehgrc = uimod.samplehgrcs['user']
1645
1645
1646 f = paths[0]
1646 f = paths[0]
1647 fp = open(f, "wb")
1647 fp = open(f, "wb")
1648 fp.write(util.tonativeeol(samplehgrc))
1648 fp.write(util.tonativeeol(samplehgrc))
1649 fp.close()
1649 fp.close()
1650
1650
1651 editor = ui.geteditor()
1651 editor = ui.geteditor()
1652 ui.system("%s \"%s\"" % (editor, f),
1652 ui.system("%s \"%s\"" % (editor, f),
1653 onerr=error.Abort, errprefix=_("edit failed"),
1653 onerr=error.Abort, errprefix=_("edit failed"),
1654 blockedtag='config_edit')
1654 blockedtag='config_edit')
1655 return
1655 return
1656 ui.pager('config')
1656 ui.pager('config')
1657 fm = ui.formatter('config', opts)
1657 fm = ui.formatter('config', opts)
1658 for t, f in rcutil.rccomponents():
1658 for t, f in rcutil.rccomponents():
1659 if t == 'path':
1659 if t == 'path':
1660 ui.debug('read config from: %s\n' % f)
1660 ui.debug('read config from: %s\n' % f)
1661 elif t == 'items':
1661 elif t == 'items':
1662 for section, name, value, source in f:
1662 for section, name, value, source in f:
1663 ui.debug('set config by: %s\n' % source)
1663 ui.debug('set config by: %s\n' % source)
1664 else:
1664 else:
1665 raise error.ProgrammingError('unknown rctype: %s' % t)
1665 raise error.ProgrammingError('unknown rctype: %s' % t)
1666 untrusted = bool(opts.get('untrusted'))
1666 untrusted = bool(opts.get('untrusted'))
1667 if values:
1667 if values:
1668 sections = [v for v in values if '.' not in v]
1668 sections = [v for v in values if '.' not in v]
1669 items = [v for v in values if '.' in v]
1669 items = [v for v in values if '.' in v]
1670 if len(items) > 1 or items and sections:
1670 if len(items) > 1 or items and sections:
1671 raise error.Abort(_('only one config item permitted'))
1671 raise error.Abort(_('only one config item permitted'))
1672 matched = False
1672 matched = False
1673 for section, name, value in ui.walkconfig(untrusted=untrusted):
1673 for section, name, value in ui.walkconfig(untrusted=untrusted):
1674 source = ui.configsource(section, name, untrusted)
1674 source = ui.configsource(section, name, untrusted)
1675 value = pycompat.bytestr(value)
1675 value = pycompat.bytestr(value)
1676 if fm.isplain():
1676 if fm.isplain():
1677 source = source or 'none'
1677 source = source or 'none'
1678 value = value.replace('\n', '\\n')
1678 value = value.replace('\n', '\\n')
1679 entryname = section + '.' + name
1679 entryname = section + '.' + name
1680 if values:
1680 if values:
1681 for v in values:
1681 for v in values:
1682 if v == section:
1682 if v == section:
1683 fm.startitem()
1683 fm.startitem()
1684 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1684 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1685 fm.write('name value', '%s=%s\n', entryname, value)
1685 fm.write('name value', '%s=%s\n', entryname, value)
1686 matched = True
1686 matched = True
1687 elif v == entryname:
1687 elif v == entryname:
1688 fm.startitem()
1688 fm.startitem()
1689 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1689 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1690 fm.write('value', '%s\n', value)
1690 fm.write('value', '%s\n', value)
1691 fm.data(name=entryname)
1691 fm.data(name=entryname)
1692 matched = True
1692 matched = True
1693 else:
1693 else:
1694 fm.startitem()
1694 fm.startitem()
1695 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1695 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1696 fm.write('name value', '%s=%s\n', entryname, value)
1696 fm.write('name value', '%s=%s\n', entryname, value)
1697 matched = True
1697 matched = True
1698 fm.end()
1698 fm.end()
1699 if matched:
1699 if matched:
1700 return 0
1700 return 0
1701 return 1
1701 return 1
1702
1702
1703 @command('copy|cp',
1703 @command('copy|cp',
1704 [('A', 'after', None, _('record a copy that has already occurred')),
1704 [('A', 'after', None, _('record a copy that has already occurred')),
1705 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1705 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1706 ] + walkopts + dryrunopts,
1706 ] + walkopts + dryrunopts,
1707 _('[OPTION]... [SOURCE]... DEST'))
1707 _('[OPTION]... [SOURCE]... DEST'))
1708 def copy(ui, repo, *pats, **opts):
1708 def copy(ui, repo, *pats, **opts):
1709 """mark files as copied for the next commit
1709 """mark files as copied for the next commit
1710
1710
1711 Mark dest as having copies of source files. If dest is a
1711 Mark dest as having copies of source files. If dest is a
1712 directory, copies are put in that directory. If dest is a file,
1712 directory, copies are put in that directory. If dest is a file,
1713 the source must be a single file.
1713 the source must be a single file.
1714
1714
1715 By default, this command copies the contents of files as they
1715 By default, this command copies the contents of files as they
1716 exist in the working directory. If invoked with -A/--after, the
1716 exist in the working directory. If invoked with -A/--after, the
1717 operation is recorded, but no copying is performed.
1717 operation is recorded, but no copying is performed.
1718
1718
1719 This command takes effect with the next commit. To undo a copy
1719 This command takes effect with the next commit. To undo a copy
1720 before that, see :hg:`revert`.
1720 before that, see :hg:`revert`.
1721
1721
1722 Returns 0 on success, 1 if errors are encountered.
1722 Returns 0 on success, 1 if errors are encountered.
1723 """
1723 """
1724 opts = pycompat.byteskwargs(opts)
1724 opts = pycompat.byteskwargs(opts)
1725 with repo.wlock(False):
1725 with repo.wlock(False):
1726 return cmdutil.copy(ui, repo, pats, opts)
1726 return cmdutil.copy(ui, repo, pats, opts)
1727
1727
1728 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1728 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1729 def debugcommands(ui, cmd='', *args):
1729 def debugcommands(ui, cmd='', *args):
1730 """list all available commands and options"""
1730 """list all available commands and options"""
1731 for cmd, vals in sorted(table.iteritems()):
1731 for cmd, vals in sorted(table.iteritems()):
1732 cmd = cmd.split('|')[0].strip('^')
1732 cmd = cmd.split('|')[0].strip('^')
1733 opts = ', '.join([i[1] for i in vals[1]])
1733 opts = ', '.join([i[1] for i in vals[1]])
1734 ui.write('%s: %s\n' % (cmd, opts))
1734 ui.write('%s: %s\n' % (cmd, opts))
1735
1735
1736 @command('debugcomplete',
1736 @command('debugcomplete',
1737 [('o', 'options', None, _('show the command options'))],
1737 [('o', 'options', None, _('show the command options'))],
1738 _('[-o] CMD'),
1738 _('[-o] CMD'),
1739 norepo=True)
1739 norepo=True)
1740 def debugcomplete(ui, cmd='', **opts):
1740 def debugcomplete(ui, cmd='', **opts):
1741 """returns the completion list associated with the given command"""
1741 """returns the completion list associated with the given command"""
1742
1742
1743 if opts.get('options'):
1743 if opts.get('options'):
1744 options = []
1744 options = []
1745 otables = [globalopts]
1745 otables = [globalopts]
1746 if cmd:
1746 if cmd:
1747 aliases, entry = cmdutil.findcmd(cmd, table, False)
1747 aliases, entry = cmdutil.findcmd(cmd, table, False)
1748 otables.append(entry[1])
1748 otables.append(entry[1])
1749 for t in otables:
1749 for t in otables:
1750 for o in t:
1750 for o in t:
1751 if "(DEPRECATED)" in o[3]:
1751 if "(DEPRECATED)" in o[3]:
1752 continue
1752 continue
1753 if o[0]:
1753 if o[0]:
1754 options.append('-%s' % o[0])
1754 options.append('-%s' % o[0])
1755 options.append('--%s' % o[1])
1755 options.append('--%s' % o[1])
1756 ui.write("%s\n" % "\n".join(options))
1756 ui.write("%s\n" % "\n".join(options))
1757 return
1757 return
1758
1758
1759 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1759 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1760 if ui.verbose:
1760 if ui.verbose:
1761 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1761 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1762 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1762 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1763
1763
1764 @command('^diff',
1764 @command('^diff',
1765 [('r', 'rev', [], _('revision'), _('REV')),
1765 [('r', 'rev', [], _('revision'), _('REV')),
1766 ('c', 'change', '', _('change made by revision'), _('REV'))
1766 ('c', 'change', '', _('change made by revision'), _('REV'))
1767 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1767 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1768 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1768 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1769 inferrepo=True)
1769 inferrepo=True)
1770 def diff(ui, repo, *pats, **opts):
1770 def diff(ui, repo, *pats, **opts):
1771 """diff repository (or selected files)
1771 """diff repository (or selected files)
1772
1772
1773 Show differences between revisions for the specified files.
1773 Show differences between revisions for the specified files.
1774
1774
1775 Differences between files are shown using the unified diff format.
1775 Differences between files are shown using the unified diff format.
1776
1776
1777 .. note::
1777 .. note::
1778
1778
1779 :hg:`diff` may generate unexpected results for merges, as it will
1779 :hg:`diff` may generate unexpected results for merges, as it will
1780 default to comparing against the working directory's first
1780 default to comparing against the working directory's first
1781 parent changeset if no revisions are specified.
1781 parent changeset if no revisions are specified.
1782
1782
1783 When two revision arguments are given, then changes are shown
1783 When two revision arguments are given, then changes are shown
1784 between those revisions. If only one revision is specified then
1784 between those revisions. If only one revision is specified then
1785 that revision is compared to the working directory, and, when no
1785 that revision is compared to the working directory, and, when no
1786 revisions are specified, the working directory files are compared
1786 revisions are specified, the working directory files are compared
1787 to its first parent.
1787 to its first parent.
1788
1788
1789 Alternatively you can specify -c/--change with a revision to see
1789 Alternatively you can specify -c/--change with a revision to see
1790 the changes in that changeset relative to its first parent.
1790 the changes in that changeset relative to its first parent.
1791
1791
1792 Without the -a/--text option, diff will avoid generating diffs of
1792 Without the -a/--text option, diff will avoid generating diffs of
1793 files it detects as binary. With -a, diff will generate a diff
1793 files it detects as binary. With -a, diff will generate a diff
1794 anyway, probably with undesirable results.
1794 anyway, probably with undesirable results.
1795
1795
1796 Use the -g/--git option to generate diffs in the git extended diff
1796 Use the -g/--git option to generate diffs in the git extended diff
1797 format. For more information, read :hg:`help diffs`.
1797 format. For more information, read :hg:`help diffs`.
1798
1798
1799 .. container:: verbose
1799 .. container:: verbose
1800
1800
1801 Examples:
1801 Examples:
1802
1802
1803 - compare a file in the current working directory to its parent::
1803 - compare a file in the current working directory to its parent::
1804
1804
1805 hg diff foo.c
1805 hg diff foo.c
1806
1806
1807 - compare two historical versions of a directory, with rename info::
1807 - compare two historical versions of a directory, with rename info::
1808
1808
1809 hg diff --git -r 1.0:1.2 lib/
1809 hg diff --git -r 1.0:1.2 lib/
1810
1810
1811 - get change stats relative to the last change on some date::
1811 - get change stats relative to the last change on some date::
1812
1812
1813 hg diff --stat -r "date('may 2')"
1813 hg diff --stat -r "date('may 2')"
1814
1814
1815 - diff all newly-added files that contain a keyword::
1815 - diff all newly-added files that contain a keyword::
1816
1816
1817 hg diff "set:added() and grep(GNU)"
1817 hg diff "set:added() and grep(GNU)"
1818
1818
1819 - compare a revision and its parents::
1819 - compare a revision and its parents::
1820
1820
1821 hg diff -c 9353 # compare against first parent
1821 hg diff -c 9353 # compare against first parent
1822 hg diff -r 9353^:9353 # same using revset syntax
1822 hg diff -r 9353^:9353 # same using revset syntax
1823 hg diff -r 9353^2:9353 # compare against the second parent
1823 hg diff -r 9353^2:9353 # compare against the second parent
1824
1824
1825 Returns 0 on success.
1825 Returns 0 on success.
1826 """
1826 """
1827
1827
1828 opts = pycompat.byteskwargs(opts)
1828 opts = pycompat.byteskwargs(opts)
1829 revs = opts.get('rev')
1829 revs = opts.get('rev')
1830 change = opts.get('change')
1830 change = opts.get('change')
1831 stat = opts.get('stat')
1831 stat = opts.get('stat')
1832 reverse = opts.get('reverse')
1832 reverse = opts.get('reverse')
1833
1833
1834 if revs and change:
1834 if revs and change:
1835 msg = _('cannot specify --rev and --change at the same time')
1835 msg = _('cannot specify --rev and --change at the same time')
1836 raise error.Abort(msg)
1836 raise error.Abort(msg)
1837 elif change:
1837 elif change:
1838 node2 = scmutil.revsingle(repo, change, None).node()
1838 node2 = scmutil.revsingle(repo, change, None).node()
1839 node1 = repo[node2].p1().node()
1839 node1 = repo[node2].p1().node()
1840 else:
1840 else:
1841 node1, node2 = scmutil.revpair(repo, revs)
1841 node1, node2 = scmutil.revpair(repo, revs)
1842
1842
1843 if reverse:
1843 if reverse:
1844 node1, node2 = node2, node1
1844 node1, node2 = node2, node1
1845
1845
1846 diffopts = patch.diffallopts(ui, opts)
1846 diffopts = patch.diffallopts(ui, opts)
1847 m = scmutil.match(repo[node2], pats, opts)
1847 m = scmutil.match(repo[node2], pats, opts)
1848 ui.pager('diff')
1848 ui.pager('diff')
1849 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1849 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1850 listsubrepos=opts.get('subrepos'),
1850 listsubrepos=opts.get('subrepos'),
1851 root=opts.get('root'))
1851 root=opts.get('root'))
1852
1852
1853 @command('^export',
1853 @command('^export',
1854 [('o', 'output', '',
1854 [('o', 'output', '',
1855 _('print output to file with formatted name'), _('FORMAT')),
1855 _('print output to file with formatted name'), _('FORMAT')),
1856 ('', 'switch-parent', None, _('diff against the second parent')),
1856 ('', 'switch-parent', None, _('diff against the second parent')),
1857 ('r', 'rev', [], _('revisions to export'), _('REV')),
1857 ('r', 'rev', [], _('revisions to export'), _('REV')),
1858 ] + diffopts,
1858 ] + diffopts,
1859 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1859 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1860 def export(ui, repo, *changesets, **opts):
1860 def export(ui, repo, *changesets, **opts):
1861 """dump the header and diffs for one or more changesets
1861 """dump the header and diffs for one or more changesets
1862
1862
1863 Print the changeset header and diffs for one or more revisions.
1863 Print the changeset header and diffs for one or more revisions.
1864 If no revision is given, the parent of the working directory is used.
1864 If no revision is given, the parent of the working directory is used.
1865
1865
1866 The information shown in the changeset header is: author, date,
1866 The information shown in the changeset header is: author, date,
1867 branch name (if non-default), changeset hash, parent(s) and commit
1867 branch name (if non-default), changeset hash, parent(s) and commit
1868 comment.
1868 comment.
1869
1869
1870 .. note::
1870 .. note::
1871
1871
1872 :hg:`export` may generate unexpected diff output for merge
1872 :hg:`export` may generate unexpected diff output for merge
1873 changesets, as it will compare the merge changeset against its
1873 changesets, as it will compare the merge changeset against its
1874 first parent only.
1874 first parent only.
1875
1875
1876 Output may be to a file, in which case the name of the file is
1876 Output may be to a file, in which case the name of the file is
1877 given using a format string. The formatting rules are as follows:
1877 given using a format string. The formatting rules are as follows:
1878
1878
1879 :``%%``: literal "%" character
1879 :``%%``: literal "%" character
1880 :``%H``: changeset hash (40 hexadecimal digits)
1880 :``%H``: changeset hash (40 hexadecimal digits)
1881 :``%N``: number of patches being generated
1881 :``%N``: number of patches being generated
1882 :``%R``: changeset revision number
1882 :``%R``: changeset revision number
1883 :``%b``: basename of the exporting repository
1883 :``%b``: basename of the exporting repository
1884 :``%h``: short-form changeset hash (12 hexadecimal digits)
1884 :``%h``: short-form changeset hash (12 hexadecimal digits)
1885 :``%m``: first line of the commit message (only alphanumeric characters)
1885 :``%m``: first line of the commit message (only alphanumeric characters)
1886 :``%n``: zero-padded sequence number, starting at 1
1886 :``%n``: zero-padded sequence number, starting at 1
1887 :``%r``: zero-padded changeset revision number
1887 :``%r``: zero-padded changeset revision number
1888
1888
1889 Without the -a/--text option, export will avoid generating diffs
1889 Without the -a/--text option, export will avoid generating diffs
1890 of files it detects as binary. With -a, export will generate a
1890 of files it detects as binary. With -a, export will generate a
1891 diff anyway, probably with undesirable results.
1891 diff anyway, probably with undesirable results.
1892
1892
1893 Use the -g/--git option to generate diffs in the git extended diff
1893 Use the -g/--git option to generate diffs in the git extended diff
1894 format. See :hg:`help diffs` for more information.
1894 format. See :hg:`help diffs` for more information.
1895
1895
1896 With the --switch-parent option, the diff will be against the
1896 With the --switch-parent option, the diff will be against the
1897 second parent. It can be useful to review a merge.
1897 second parent. It can be useful to review a merge.
1898
1898
1899 .. container:: verbose
1899 .. container:: verbose
1900
1900
1901 Examples:
1901 Examples:
1902
1902
1903 - use export and import to transplant a bugfix to the current
1903 - use export and import to transplant a bugfix to the current
1904 branch::
1904 branch::
1905
1905
1906 hg export -r 9353 | hg import -
1906 hg export -r 9353 | hg import -
1907
1907
1908 - export all the changesets between two revisions to a file with
1908 - export all the changesets between two revisions to a file with
1909 rename information::
1909 rename information::
1910
1910
1911 hg export --git -r 123:150 > changes.txt
1911 hg export --git -r 123:150 > changes.txt
1912
1912
1913 - split outgoing changes into a series of patches with
1913 - split outgoing changes into a series of patches with
1914 descriptive names::
1914 descriptive names::
1915
1915
1916 hg export -r "outgoing()" -o "%n-%m.patch"
1916 hg export -r "outgoing()" -o "%n-%m.patch"
1917
1917
1918 Returns 0 on success.
1918 Returns 0 on success.
1919 """
1919 """
1920 opts = pycompat.byteskwargs(opts)
1920 opts = pycompat.byteskwargs(opts)
1921 changesets += tuple(opts.get('rev', []))
1921 changesets += tuple(opts.get('rev', []))
1922 if not changesets:
1922 if not changesets:
1923 changesets = ['.']
1923 changesets = ['.']
1924 revs = scmutil.revrange(repo, changesets)
1924 revs = scmutil.revrange(repo, changesets)
1925 if not revs:
1925 if not revs:
1926 raise error.Abort(_("export requires at least one changeset"))
1926 raise error.Abort(_("export requires at least one changeset"))
1927 if len(revs) > 1:
1927 if len(revs) > 1:
1928 ui.note(_('exporting patches:\n'))
1928 ui.note(_('exporting patches:\n'))
1929 else:
1929 else:
1930 ui.note(_('exporting patch:\n'))
1930 ui.note(_('exporting patch:\n'))
1931 ui.pager('export')
1931 ui.pager('export')
1932 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1932 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1933 switch_parent=opts.get('switch_parent'),
1933 switch_parent=opts.get('switch_parent'),
1934 opts=patch.diffallopts(ui, opts))
1934 opts=patch.diffallopts(ui, opts))
1935
1935
1936 @command('files',
1936 @command('files',
1937 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1937 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1938 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1938 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1939 ] + walkopts + formatteropts + subrepoopts,
1939 ] + walkopts + formatteropts + subrepoopts,
1940 _('[OPTION]... [FILE]...'))
1940 _('[OPTION]... [FILE]...'))
1941 def files(ui, repo, *pats, **opts):
1941 def files(ui, repo, *pats, **opts):
1942 """list tracked files
1942 """list tracked files
1943
1943
1944 Print files under Mercurial control in the working directory or
1944 Print files under Mercurial control in the working directory or
1945 specified revision for given files (excluding removed files).
1945 specified revision for given files (excluding removed files).
1946 Files can be specified as filenames or filesets.
1946 Files can be specified as filenames or filesets.
1947
1947
1948 If no files are given to match, this command prints the names
1948 If no files are given to match, this command prints the names
1949 of all files under Mercurial control.
1949 of all files under Mercurial control.
1950
1950
1951 .. container:: verbose
1951 .. container:: verbose
1952
1952
1953 Examples:
1953 Examples:
1954
1954
1955 - list all files under the current directory::
1955 - list all files under the current directory::
1956
1956
1957 hg files .
1957 hg files .
1958
1958
1959 - shows sizes and flags for current revision::
1959 - shows sizes and flags for current revision::
1960
1960
1961 hg files -vr .
1961 hg files -vr .
1962
1962
1963 - list all files named README::
1963 - list all files named README::
1964
1964
1965 hg files -I "**/README"
1965 hg files -I "**/README"
1966
1966
1967 - list all binary files::
1967 - list all binary files::
1968
1968
1969 hg files "set:binary()"
1969 hg files "set:binary()"
1970
1970
1971 - find files containing a regular expression::
1971 - find files containing a regular expression::
1972
1972
1973 hg files "set:grep('bob')"
1973 hg files "set:grep('bob')"
1974
1974
1975 - search tracked file contents with xargs and grep::
1975 - search tracked file contents with xargs and grep::
1976
1976
1977 hg files -0 | xargs -0 grep foo
1977 hg files -0 | xargs -0 grep foo
1978
1978
1979 See :hg:`help patterns` and :hg:`help filesets` for more information
1979 See :hg:`help patterns` and :hg:`help filesets` for more information
1980 on specifying file patterns.
1980 on specifying file patterns.
1981
1981
1982 Returns 0 if a match is found, 1 otherwise.
1982 Returns 0 if a match is found, 1 otherwise.
1983
1983
1984 """
1984 """
1985
1985
1986 opts = pycompat.byteskwargs(opts)
1986 opts = pycompat.byteskwargs(opts)
1987 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1987 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1988
1988
1989 end = '\n'
1989 end = '\n'
1990 if opts.get('print0'):
1990 if opts.get('print0'):
1991 end = '\0'
1991 end = '\0'
1992 fmt = '%s' + end
1992 fmt = '%s' + end
1993
1993
1994 m = scmutil.match(ctx, pats, opts)
1994 m = scmutil.match(ctx, pats, opts)
1995 ui.pager('files')
1995 ui.pager('files')
1996 with ui.formatter('files', opts) as fm:
1996 with ui.formatter('files', opts) as fm:
1997 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1997 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1998
1998
1999 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
1999 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2000 def forget(ui, repo, *pats, **opts):
2000 def forget(ui, repo, *pats, **opts):
2001 """forget the specified files on the next commit
2001 """forget the specified files on the next commit
2002
2002
2003 Mark the specified files so they will no longer be tracked
2003 Mark the specified files so they will no longer be tracked
2004 after the next commit.
2004 after the next commit.
2005
2005
2006 This only removes files from the current branch, not from the
2006 This only removes files from the current branch, not from the
2007 entire project history, and it does not delete them from the
2007 entire project history, and it does not delete them from the
2008 working directory.
2008 working directory.
2009
2009
2010 To delete the file from the working directory, see :hg:`remove`.
2010 To delete the file from the working directory, see :hg:`remove`.
2011
2011
2012 To undo a forget before the next commit, see :hg:`add`.
2012 To undo a forget before the next commit, see :hg:`add`.
2013
2013
2014 .. container:: verbose
2014 .. container:: verbose
2015
2015
2016 Examples:
2016 Examples:
2017
2017
2018 - forget newly-added binary files::
2018 - forget newly-added binary files::
2019
2019
2020 hg forget "set:added() and binary()"
2020 hg forget "set:added() and binary()"
2021
2021
2022 - forget files that would be excluded by .hgignore::
2022 - forget files that would be excluded by .hgignore::
2023
2023
2024 hg forget "set:hgignore()"
2024 hg forget "set:hgignore()"
2025
2025
2026 Returns 0 on success.
2026 Returns 0 on success.
2027 """
2027 """
2028
2028
2029 opts = pycompat.byteskwargs(opts)
2029 opts = pycompat.byteskwargs(opts)
2030 if not pats:
2030 if not pats:
2031 raise error.Abort(_('no files specified'))
2031 raise error.Abort(_('no files specified'))
2032
2032
2033 m = scmutil.match(repo[None], pats, opts)
2033 m = scmutil.match(repo[None], pats, opts)
2034 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2034 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2035 return rejected and 1 or 0
2035 return rejected and 1 or 0
2036
2036
2037 @command(
2037 @command(
2038 'graft',
2038 'graft',
2039 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2039 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2040 ('c', 'continue', False, _('resume interrupted graft')),
2040 ('c', 'continue', False, _('resume interrupted graft')),
2041 ('e', 'edit', False, _('invoke editor on commit messages')),
2041 ('e', 'edit', False, _('invoke editor on commit messages')),
2042 ('', 'log', None, _('append graft info to log message')),
2042 ('', 'log', None, _('append graft info to log message')),
2043 ('f', 'force', False, _('force graft')),
2043 ('f', 'force', False, _('force graft')),
2044 ('D', 'currentdate', False,
2044 ('D', 'currentdate', False,
2045 _('record the current date as commit date')),
2045 _('record the current date as commit date')),
2046 ('U', 'currentuser', False,
2046 ('U', 'currentuser', False,
2047 _('record the current user as committer'), _('DATE'))]
2047 _('record the current user as committer'), _('DATE'))]
2048 + commitopts2 + mergetoolopts + dryrunopts,
2048 + commitopts2 + mergetoolopts + dryrunopts,
2049 _('[OPTION]... [-r REV]... REV...'))
2049 _('[OPTION]... [-r REV]... REV...'))
2050 def graft(ui, repo, *revs, **opts):
2050 def graft(ui, repo, *revs, **opts):
2051 '''copy changes from other branches onto the current branch
2051 '''copy changes from other branches onto the current branch
2052
2052
2053 This command uses Mercurial's merge logic to copy individual
2053 This command uses Mercurial's merge logic to copy individual
2054 changes from other branches without merging branches in the
2054 changes from other branches without merging branches in the
2055 history graph. This is sometimes known as 'backporting' or
2055 history graph. This is sometimes known as 'backporting' or
2056 'cherry-picking'. By default, graft will copy user, date, and
2056 'cherry-picking'. By default, graft will copy user, date, and
2057 description from the source changesets.
2057 description from the source changesets.
2058
2058
2059 Changesets that are ancestors of the current revision, that have
2059 Changesets that are ancestors of the current revision, that have
2060 already been grafted, or that are merges will be skipped.
2060 already been grafted, or that are merges will be skipped.
2061
2061
2062 If --log is specified, log messages will have a comment appended
2062 If --log is specified, log messages will have a comment appended
2063 of the form::
2063 of the form::
2064
2064
2065 (grafted from CHANGESETHASH)
2065 (grafted from CHANGESETHASH)
2066
2066
2067 If --force is specified, revisions will be grafted even if they
2067 If --force is specified, revisions will be grafted even if they
2068 are already ancestors of or have been grafted to the destination.
2068 are already ancestors of or have been grafted to the destination.
2069 This is useful when the revisions have since been backed out.
2069 This is useful when the revisions have since been backed out.
2070
2070
2071 If a graft merge results in conflicts, the graft process is
2071 If a graft merge results in conflicts, the graft process is
2072 interrupted so that the current merge can be manually resolved.
2072 interrupted so that the current merge can be manually resolved.
2073 Once all conflicts are addressed, the graft process can be
2073 Once all conflicts are addressed, the graft process can be
2074 continued with the -c/--continue option.
2074 continued with the -c/--continue option.
2075
2075
2076 .. note::
2076 .. note::
2077
2077
2078 The -c/--continue option does not reapply earlier options, except
2078 The -c/--continue option does not reapply earlier options, except
2079 for --force.
2079 for --force.
2080
2080
2081 .. container:: verbose
2081 .. container:: verbose
2082
2082
2083 Examples:
2083 Examples:
2084
2084
2085 - copy a single change to the stable branch and edit its description::
2085 - copy a single change to the stable branch and edit its description::
2086
2086
2087 hg update stable
2087 hg update stable
2088 hg graft --edit 9393
2088 hg graft --edit 9393
2089
2089
2090 - graft a range of changesets with one exception, updating dates::
2090 - graft a range of changesets with one exception, updating dates::
2091
2091
2092 hg graft -D "2085::2093 and not 2091"
2092 hg graft -D "2085::2093 and not 2091"
2093
2093
2094 - continue a graft after resolving conflicts::
2094 - continue a graft after resolving conflicts::
2095
2095
2096 hg graft -c
2096 hg graft -c
2097
2097
2098 - show the source of a grafted changeset::
2098 - show the source of a grafted changeset::
2099
2099
2100 hg log --debug -r .
2100 hg log --debug -r .
2101
2101
2102 - show revisions sorted by date::
2102 - show revisions sorted by date::
2103
2103
2104 hg log -r "sort(all(), date)"
2104 hg log -r "sort(all(), date)"
2105
2105
2106 See :hg:`help revisions` for more about specifying revisions.
2106 See :hg:`help revisions` for more about specifying revisions.
2107
2107
2108 Returns 0 on successful completion.
2108 Returns 0 on successful completion.
2109 '''
2109 '''
2110 with repo.wlock():
2110 with repo.wlock():
2111 return _dograft(ui, repo, *revs, **opts)
2111 return _dograft(ui, repo, *revs, **opts)
2112
2112
2113 def _dograft(ui, repo, *revs, **opts):
2113 def _dograft(ui, repo, *revs, **opts):
2114 opts = pycompat.byteskwargs(opts)
2114 opts = pycompat.byteskwargs(opts)
2115 if revs and opts.get('rev'):
2115 if revs and opts.get('rev'):
2116 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2116 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2117 'revision ordering!\n'))
2117 'revision ordering!\n'))
2118
2118
2119 revs = list(revs)
2119 revs = list(revs)
2120 revs.extend(opts.get('rev'))
2120 revs.extend(opts.get('rev'))
2121
2121
2122 if not opts.get('user') and opts.get('currentuser'):
2122 if not opts.get('user') and opts.get('currentuser'):
2123 opts['user'] = ui.username()
2123 opts['user'] = ui.username()
2124 if not opts.get('date') and opts.get('currentdate'):
2124 if not opts.get('date') and opts.get('currentdate'):
2125 opts['date'] = "%d %d" % util.makedate()
2125 opts['date'] = "%d %d" % util.makedate()
2126
2126
2127 editor = cmdutil.getcommiteditor(editform='graft',
2127 editor = cmdutil.getcommiteditor(editform='graft',
2128 **pycompat.strkwargs(opts))
2128 **pycompat.strkwargs(opts))
2129
2129
2130 cont = False
2130 cont = False
2131 if opts.get('continue'):
2131 if opts.get('continue'):
2132 cont = True
2132 cont = True
2133 if revs:
2133 if revs:
2134 raise error.Abort(_("can't specify --continue and revisions"))
2134 raise error.Abort(_("can't specify --continue and revisions"))
2135 # read in unfinished revisions
2135 # read in unfinished revisions
2136 try:
2136 try:
2137 nodes = repo.vfs.read('graftstate').splitlines()
2137 nodes = repo.vfs.read('graftstate').splitlines()
2138 revs = [repo[node].rev() for node in nodes]
2138 revs = [repo[node].rev() for node in nodes]
2139 except IOError as inst:
2139 except IOError as inst:
2140 if inst.errno != errno.ENOENT:
2140 if inst.errno != errno.ENOENT:
2141 raise
2141 raise
2142 cmdutil.wrongtooltocontinue(repo, _('graft'))
2142 cmdutil.wrongtooltocontinue(repo, _('graft'))
2143 else:
2143 else:
2144 cmdutil.checkunfinished(repo)
2144 cmdutil.checkunfinished(repo)
2145 cmdutil.bailifchanged(repo)
2145 cmdutil.bailifchanged(repo)
2146 if not revs:
2146 if not revs:
2147 raise error.Abort(_('no revisions specified'))
2147 raise error.Abort(_('no revisions specified'))
2148 revs = scmutil.revrange(repo, revs)
2148 revs = scmutil.revrange(repo, revs)
2149
2149
2150 skipped = set()
2150 skipped = set()
2151 # check for merges
2151 # check for merges
2152 for rev in repo.revs('%ld and merge()', revs):
2152 for rev in repo.revs('%ld and merge()', revs):
2153 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2153 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2154 skipped.add(rev)
2154 skipped.add(rev)
2155 revs = [r for r in revs if r not in skipped]
2155 revs = [r for r in revs if r not in skipped]
2156 if not revs:
2156 if not revs:
2157 return -1
2157 return -1
2158
2158
2159 # Don't check in the --continue case, in effect retaining --force across
2159 # Don't check in the --continue case, in effect retaining --force across
2160 # --continues. That's because without --force, any revisions we decided to
2160 # --continues. That's because without --force, any revisions we decided to
2161 # skip would have been filtered out here, so they wouldn't have made their
2161 # skip would have been filtered out here, so they wouldn't have made their
2162 # way to the graftstate. With --force, any revisions we would have otherwise
2162 # way to the graftstate. With --force, any revisions we would have otherwise
2163 # skipped would not have been filtered out, and if they hadn't been applied
2163 # skipped would not have been filtered out, and if they hadn't been applied
2164 # already, they'd have been in the graftstate.
2164 # already, they'd have been in the graftstate.
2165 if not (cont or opts.get('force')):
2165 if not (cont or opts.get('force')):
2166 # check for ancestors of dest branch
2166 # check for ancestors of dest branch
2167 crev = repo['.'].rev()
2167 crev = repo['.'].rev()
2168 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2168 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2169 # XXX make this lazy in the future
2169 # XXX make this lazy in the future
2170 # don't mutate while iterating, create a copy
2170 # don't mutate while iterating, create a copy
2171 for rev in list(revs):
2171 for rev in list(revs):
2172 if rev in ancestors:
2172 if rev in ancestors:
2173 ui.warn(_('skipping ancestor revision %d:%s\n') %
2173 ui.warn(_('skipping ancestor revision %d:%s\n') %
2174 (rev, repo[rev]))
2174 (rev, repo[rev]))
2175 # XXX remove on list is slow
2175 # XXX remove on list is slow
2176 revs.remove(rev)
2176 revs.remove(rev)
2177 if not revs:
2177 if not revs:
2178 return -1
2178 return -1
2179
2179
2180 # analyze revs for earlier grafts
2180 # analyze revs for earlier grafts
2181 ids = {}
2181 ids = {}
2182 for ctx in repo.set("%ld", revs):
2182 for ctx in repo.set("%ld", revs):
2183 ids[ctx.hex()] = ctx.rev()
2183 ids[ctx.hex()] = ctx.rev()
2184 n = ctx.extra().get('source')
2184 n = ctx.extra().get('source')
2185 if n:
2185 if n:
2186 ids[n] = ctx.rev()
2186 ids[n] = ctx.rev()
2187
2187
2188 # check ancestors for earlier grafts
2188 # check ancestors for earlier grafts
2189 ui.debug('scanning for duplicate grafts\n')
2189 ui.debug('scanning for duplicate grafts\n')
2190
2190
2191 # The only changesets we can be sure doesn't contain grafts of any
2191 # The only changesets we can be sure doesn't contain grafts of any
2192 # revs, are the ones that are common ancestors of *all* revs:
2192 # revs, are the ones that are common ancestors of *all* revs:
2193 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2193 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2194 ctx = repo[rev]
2194 ctx = repo[rev]
2195 n = ctx.extra().get('source')
2195 n = ctx.extra().get('source')
2196 if n in ids:
2196 if n in ids:
2197 try:
2197 try:
2198 r = repo[n].rev()
2198 r = repo[n].rev()
2199 except error.RepoLookupError:
2199 except error.RepoLookupError:
2200 r = None
2200 r = None
2201 if r in revs:
2201 if r in revs:
2202 ui.warn(_('skipping revision %d:%s '
2202 ui.warn(_('skipping revision %d:%s '
2203 '(already grafted to %d:%s)\n')
2203 '(already grafted to %d:%s)\n')
2204 % (r, repo[r], rev, ctx))
2204 % (r, repo[r], rev, ctx))
2205 revs.remove(r)
2205 revs.remove(r)
2206 elif ids[n] in revs:
2206 elif ids[n] in revs:
2207 if r is None:
2207 if r is None:
2208 ui.warn(_('skipping already grafted revision %d:%s '
2208 ui.warn(_('skipping already grafted revision %d:%s '
2209 '(%d:%s also has unknown origin %s)\n')
2209 '(%d:%s also has unknown origin %s)\n')
2210 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2210 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2211 else:
2211 else:
2212 ui.warn(_('skipping already grafted revision %d:%s '
2212 ui.warn(_('skipping already grafted revision %d:%s '
2213 '(%d:%s also has origin %d:%s)\n')
2213 '(%d:%s also has origin %d:%s)\n')
2214 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2214 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2215 revs.remove(ids[n])
2215 revs.remove(ids[n])
2216 elif ctx.hex() in ids:
2216 elif ctx.hex() in ids:
2217 r = ids[ctx.hex()]
2217 r = ids[ctx.hex()]
2218 ui.warn(_('skipping already grafted revision %d:%s '
2218 ui.warn(_('skipping already grafted revision %d:%s '
2219 '(was grafted from %d:%s)\n') %
2219 '(was grafted from %d:%s)\n') %
2220 (r, repo[r], rev, ctx))
2220 (r, repo[r], rev, ctx))
2221 revs.remove(r)
2221 revs.remove(r)
2222 if not revs:
2222 if not revs:
2223 return -1
2223 return -1
2224
2224
2225 for pos, ctx in enumerate(repo.set("%ld", revs)):
2225 for pos, ctx in enumerate(repo.set("%ld", revs)):
2226 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2226 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2227 ctx.description().split('\n', 1)[0])
2227 ctx.description().split('\n', 1)[0])
2228 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2228 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2229 if names:
2229 if names:
2230 desc += ' (%s)' % ' '.join(names)
2230 desc += ' (%s)' % ' '.join(names)
2231 ui.status(_('grafting %s\n') % desc)
2231 ui.status(_('grafting %s\n') % desc)
2232 if opts.get('dry_run'):
2232 if opts.get('dry_run'):
2233 continue
2233 continue
2234
2234
2235 source = ctx.extra().get('source')
2235 source = ctx.extra().get('source')
2236 extra = {}
2236 extra = {}
2237 if source:
2237 if source:
2238 extra['source'] = source
2238 extra['source'] = source
2239 extra['intermediate-source'] = ctx.hex()
2239 extra['intermediate-source'] = ctx.hex()
2240 else:
2240 else:
2241 extra['source'] = ctx.hex()
2241 extra['source'] = ctx.hex()
2242 user = ctx.user()
2242 user = ctx.user()
2243 if opts.get('user'):
2243 if opts.get('user'):
2244 user = opts['user']
2244 user = opts['user']
2245 date = ctx.date()
2245 date = ctx.date()
2246 if opts.get('date'):
2246 if opts.get('date'):
2247 date = opts['date']
2247 date = opts['date']
2248 message = ctx.description()
2248 message = ctx.description()
2249 if opts.get('log'):
2249 if opts.get('log'):
2250 message += '\n(grafted from %s)' % ctx.hex()
2250 message += '\n(grafted from %s)' % ctx.hex()
2251
2251
2252 # we don't merge the first commit when continuing
2252 # we don't merge the first commit when continuing
2253 if not cont:
2253 if not cont:
2254 # perform the graft merge with p1(rev) as 'ancestor'
2254 # perform the graft merge with p1(rev) as 'ancestor'
2255 try:
2255 try:
2256 # ui.forcemerge is an internal variable, do not document
2256 # ui.forcemerge is an internal variable, do not document
2257 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2257 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2258 'graft')
2258 'graft')
2259 stats = mergemod.graft(repo, ctx, ctx.p1(),
2259 stats = mergemod.graft(repo, ctx, ctx.p1(),
2260 ['local', 'graft'])
2260 ['local', 'graft'])
2261 finally:
2261 finally:
2262 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2262 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2263 # report any conflicts
2263 # report any conflicts
2264 if stats and stats[3] > 0:
2264 if stats and stats[3] > 0:
2265 # write out state for --continue
2265 # write out state for --continue
2266 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2266 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2267 repo.vfs.write('graftstate', ''.join(nodelines))
2267 repo.vfs.write('graftstate', ''.join(nodelines))
2268 extra = ''
2268 extra = ''
2269 if opts.get('user'):
2269 if opts.get('user'):
2270 extra += ' --user %s' % util.shellquote(opts['user'])
2270 extra += ' --user %s' % util.shellquote(opts['user'])
2271 if opts.get('date'):
2271 if opts.get('date'):
2272 extra += ' --date %s' % util.shellquote(opts['date'])
2272 extra += ' --date %s' % util.shellquote(opts['date'])
2273 if opts.get('log'):
2273 if opts.get('log'):
2274 extra += ' --log'
2274 extra += ' --log'
2275 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2275 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2276 raise error.Abort(
2276 raise error.Abort(
2277 _("unresolved conflicts, can't continue"),
2277 _("unresolved conflicts, can't continue"),
2278 hint=hint)
2278 hint=hint)
2279 else:
2279 else:
2280 cont = False
2280 cont = False
2281
2281
2282 # commit
2282 # commit
2283 node = repo.commit(text=message, user=user,
2283 node = repo.commit(text=message, user=user,
2284 date=date, extra=extra, editor=editor)
2284 date=date, extra=extra, editor=editor)
2285 if node is None:
2285 if node is None:
2286 ui.warn(
2286 ui.warn(
2287 _('note: graft of %d:%s created no changes to commit\n') %
2287 _('note: graft of %d:%s created no changes to commit\n') %
2288 (ctx.rev(), ctx))
2288 (ctx.rev(), ctx))
2289
2289
2290 # remove state when we complete successfully
2290 # remove state when we complete successfully
2291 if not opts.get('dry_run'):
2291 if not opts.get('dry_run'):
2292 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2292 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2293
2293
2294 return 0
2294 return 0
2295
2295
2296 @command('grep',
2296 @command('grep',
2297 [('0', 'print0', None, _('end fields with NUL')),
2297 [('0', 'print0', None, _('end fields with NUL')),
2298 ('', 'all', None, _('print all revisions that match')),
2298 ('', 'all', None, _('print all revisions that match')),
2299 ('a', 'text', None, _('treat all files as text')),
2299 ('a', 'text', None, _('treat all files as text')),
2300 ('f', 'follow', None,
2300 ('f', 'follow', None,
2301 _('follow changeset history,'
2301 _('follow changeset history,'
2302 ' or file history across copies and renames')),
2302 ' or file history across copies and renames')),
2303 ('i', 'ignore-case', None, _('ignore case when matching')),
2303 ('i', 'ignore-case', None, _('ignore case when matching')),
2304 ('l', 'files-with-matches', None,
2304 ('l', 'files-with-matches', None,
2305 _('print only filenames and revisions that match')),
2305 _('print only filenames and revisions that match')),
2306 ('n', 'line-number', None, _('print matching line numbers')),
2306 ('n', 'line-number', None, _('print matching line numbers')),
2307 ('r', 'rev', [],
2307 ('r', 'rev', [],
2308 _('only search files changed within revision range'), _('REV')),
2308 _('only search files changed within revision range'), _('REV')),
2309 ('u', 'user', None, _('list the author (long with -v)')),
2309 ('u', 'user', None, _('list the author (long with -v)')),
2310 ('d', 'date', None, _('list the date (short with -q)')),
2310 ('d', 'date', None, _('list the date (short with -q)')),
2311 ] + formatteropts + walkopts,
2311 ] + formatteropts + walkopts,
2312 _('[OPTION]... PATTERN [FILE]...'),
2312 _('[OPTION]... PATTERN [FILE]...'),
2313 inferrepo=True)
2313 inferrepo=True)
2314 def grep(ui, repo, pattern, *pats, **opts):
2314 def grep(ui, repo, pattern, *pats, **opts):
2315 """search revision history for a pattern in specified files
2315 """search revision history for a pattern in specified files
2316
2316
2317 Search revision history for a regular expression in the specified
2317 Search revision history for a regular expression in the specified
2318 files or the entire project.
2318 files or the entire project.
2319
2319
2320 By default, grep prints the most recent revision number for each
2320 By default, grep prints the most recent revision number for each
2321 file in which it finds a match. To get it to print every revision
2321 file in which it finds a match. To get it to print every revision
2322 that contains a change in match status ("-" for a match that becomes
2322 that contains a change in match status ("-" for a match that becomes
2323 a non-match, or "+" for a non-match that becomes a match), use the
2323 a non-match, or "+" for a non-match that becomes a match), use the
2324 --all flag.
2324 --all flag.
2325
2325
2326 PATTERN can be any Python (roughly Perl-compatible) regular
2326 PATTERN can be any Python (roughly Perl-compatible) regular
2327 expression.
2327 expression.
2328
2328
2329 If no FILEs are specified (and -f/--follow isn't set), all files in
2329 If no FILEs are specified (and -f/--follow isn't set), all files in
2330 the repository are searched, including those that don't exist in the
2330 the repository are searched, including those that don't exist in the
2331 current branch or have been deleted in a prior changeset.
2331 current branch or have been deleted in a prior changeset.
2332
2332
2333 Returns 0 if a match is found, 1 otherwise.
2333 Returns 0 if a match is found, 1 otherwise.
2334 """
2334 """
2335 opts = pycompat.byteskwargs(opts)
2335 opts = pycompat.byteskwargs(opts)
2336 reflags = re.M
2336 reflags = re.M
2337 if opts.get('ignore_case'):
2337 if opts.get('ignore_case'):
2338 reflags |= re.I
2338 reflags |= re.I
2339 try:
2339 try:
2340 regexp = util.re.compile(pattern, reflags)
2340 regexp = util.re.compile(pattern, reflags)
2341 except re.error as inst:
2341 except re.error as inst:
2342 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2342 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2343 return 1
2343 return 1
2344 sep, eol = ':', '\n'
2344 sep, eol = ':', '\n'
2345 if opts.get('print0'):
2345 if opts.get('print0'):
2346 sep = eol = '\0'
2346 sep = eol = '\0'
2347
2347
2348 getfile = util.lrucachefunc(repo.file)
2348 getfile = util.lrucachefunc(repo.file)
2349
2349
2350 def matchlines(body):
2350 def matchlines(body):
2351 begin = 0
2351 begin = 0
2352 linenum = 0
2352 linenum = 0
2353 while begin < len(body):
2353 while begin < len(body):
2354 match = regexp.search(body, begin)
2354 match = regexp.search(body, begin)
2355 if not match:
2355 if not match:
2356 break
2356 break
2357 mstart, mend = match.span()
2357 mstart, mend = match.span()
2358 linenum += body.count('\n', begin, mstart) + 1
2358 linenum += body.count('\n', begin, mstart) + 1
2359 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2359 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2360 begin = body.find('\n', mend) + 1 or len(body) + 1
2360 begin = body.find('\n', mend) + 1 or len(body) + 1
2361 lend = begin - 1
2361 lend = begin - 1
2362 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2362 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2363
2363
2364 class linestate(object):
2364 class linestate(object):
2365 def __init__(self, line, linenum, colstart, colend):
2365 def __init__(self, line, linenum, colstart, colend):
2366 self.line = line
2366 self.line = line
2367 self.linenum = linenum
2367 self.linenum = linenum
2368 self.colstart = colstart
2368 self.colstart = colstart
2369 self.colend = colend
2369 self.colend = colend
2370
2370
2371 def __hash__(self):
2371 def __hash__(self):
2372 return hash((self.linenum, self.line))
2372 return hash((self.linenum, self.line))
2373
2373
2374 def __eq__(self, other):
2374 def __eq__(self, other):
2375 return self.line == other.line
2375 return self.line == other.line
2376
2376
2377 def findpos(self):
2377 def findpos(self):
2378 """Iterate all (start, end) indices of matches"""
2378 """Iterate all (start, end) indices of matches"""
2379 yield self.colstart, self.colend
2379 yield self.colstart, self.colend
2380 p = self.colend
2380 p = self.colend
2381 while p < len(self.line):
2381 while p < len(self.line):
2382 m = regexp.search(self.line, p)
2382 m = regexp.search(self.line, p)
2383 if not m:
2383 if not m:
2384 break
2384 break
2385 yield m.span()
2385 yield m.span()
2386 p = m.end()
2386 p = m.end()
2387
2387
2388 matches = {}
2388 matches = {}
2389 copies = {}
2389 copies = {}
2390 def grepbody(fn, rev, body):
2390 def grepbody(fn, rev, body):
2391 matches[rev].setdefault(fn, [])
2391 matches[rev].setdefault(fn, [])
2392 m = matches[rev][fn]
2392 m = matches[rev][fn]
2393 for lnum, cstart, cend, line in matchlines(body):
2393 for lnum, cstart, cend, line in matchlines(body):
2394 s = linestate(line, lnum, cstart, cend)
2394 s = linestate(line, lnum, cstart, cend)
2395 m.append(s)
2395 m.append(s)
2396
2396
2397 def difflinestates(a, b):
2397 def difflinestates(a, b):
2398 sm = difflib.SequenceMatcher(None, a, b)
2398 sm = difflib.SequenceMatcher(None, a, b)
2399 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2399 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2400 if tag == 'insert':
2400 if tag == 'insert':
2401 for i in xrange(blo, bhi):
2401 for i in xrange(blo, bhi):
2402 yield ('+', b[i])
2402 yield ('+', b[i])
2403 elif tag == 'delete':
2403 elif tag == 'delete':
2404 for i in xrange(alo, ahi):
2404 for i in xrange(alo, ahi):
2405 yield ('-', a[i])
2405 yield ('-', a[i])
2406 elif tag == 'replace':
2406 elif tag == 'replace':
2407 for i in xrange(alo, ahi):
2407 for i in xrange(alo, ahi):
2408 yield ('-', a[i])
2408 yield ('-', a[i])
2409 for i in xrange(blo, bhi):
2409 for i in xrange(blo, bhi):
2410 yield ('+', b[i])
2410 yield ('+', b[i])
2411
2411
2412 def display(fm, fn, ctx, pstates, states):
2412 def display(fm, fn, ctx, pstates, states):
2413 rev = ctx.rev()
2413 rev = ctx.rev()
2414 if fm.isplain():
2414 if fm.isplain():
2415 formatuser = ui.shortuser
2415 formatuser = ui.shortuser
2416 else:
2416 else:
2417 formatuser = str
2417 formatuser = str
2418 if ui.quiet:
2418 if ui.quiet:
2419 datefmt = '%Y-%m-%d'
2419 datefmt = '%Y-%m-%d'
2420 else:
2420 else:
2421 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2421 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2422 found = False
2422 found = False
2423 @util.cachefunc
2423 @util.cachefunc
2424 def binary():
2424 def binary():
2425 flog = getfile(fn)
2425 flog = getfile(fn)
2426 return util.binary(flog.read(ctx.filenode(fn)))
2426 return util.binary(flog.read(ctx.filenode(fn)))
2427
2427
2428 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2428 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2429 if opts.get('all'):
2429 if opts.get('all'):
2430 iter = difflinestates(pstates, states)
2430 iter = difflinestates(pstates, states)
2431 else:
2431 else:
2432 iter = [('', l) for l in states]
2432 iter = [('', l) for l in states]
2433 for change, l in iter:
2433 for change, l in iter:
2434 fm.startitem()
2434 fm.startitem()
2435 fm.data(node=fm.hexfunc(ctx.node()))
2435 fm.data(node=fm.hexfunc(ctx.node()))
2436 cols = [
2436 cols = [
2437 ('filename', fn, True),
2437 ('filename', fn, True),
2438 ('rev', rev, True),
2438 ('rev', rev, True),
2439 ('linenumber', l.linenum, opts.get('line_number')),
2439 ('linenumber', l.linenum, opts.get('line_number')),
2440 ]
2440 ]
2441 if opts.get('all'):
2441 if opts.get('all'):
2442 cols.append(('change', change, True))
2442 cols.append(('change', change, True))
2443 cols.extend([
2443 cols.extend([
2444 ('user', formatuser(ctx.user()), opts.get('user')),
2444 ('user', formatuser(ctx.user()), opts.get('user')),
2445 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2445 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2446 ])
2446 ])
2447 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2447 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2448 for name, data, cond in cols:
2448 for name, data, cond in cols:
2449 field = fieldnamemap.get(name, name)
2449 field = fieldnamemap.get(name, name)
2450 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2450 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2451 if cond and name != lastcol:
2451 if cond and name != lastcol:
2452 fm.plain(sep, label='grep.sep')
2452 fm.plain(sep, label='grep.sep')
2453 if not opts.get('files_with_matches'):
2453 if not opts.get('files_with_matches'):
2454 fm.plain(sep, label='grep.sep')
2454 fm.plain(sep, label='grep.sep')
2455 if not opts.get('text') and binary():
2455 if not opts.get('text') and binary():
2456 fm.plain(_(" Binary file matches"))
2456 fm.plain(_(" Binary file matches"))
2457 else:
2457 else:
2458 displaymatches(fm.nested('texts'), l)
2458 displaymatches(fm.nested('texts'), l)
2459 fm.plain(eol)
2459 fm.plain(eol)
2460 found = True
2460 found = True
2461 if opts.get('files_with_matches'):
2461 if opts.get('files_with_matches'):
2462 break
2462 break
2463 return found
2463 return found
2464
2464
2465 def displaymatches(fm, l):
2465 def displaymatches(fm, l):
2466 p = 0
2466 p = 0
2467 for s, e in l.findpos():
2467 for s, e in l.findpos():
2468 if p < s:
2468 if p < s:
2469 fm.startitem()
2469 fm.startitem()
2470 fm.write('text', '%s', l.line[p:s])
2470 fm.write('text', '%s', l.line[p:s])
2471 fm.data(matched=False)
2471 fm.data(matched=False)
2472 fm.startitem()
2472 fm.startitem()
2473 fm.write('text', '%s', l.line[s:e], label='grep.match')
2473 fm.write('text', '%s', l.line[s:e], label='grep.match')
2474 fm.data(matched=True)
2474 fm.data(matched=True)
2475 p = e
2475 p = e
2476 if p < len(l.line):
2476 if p < len(l.line):
2477 fm.startitem()
2477 fm.startitem()
2478 fm.write('text', '%s', l.line[p:])
2478 fm.write('text', '%s', l.line[p:])
2479 fm.data(matched=False)
2479 fm.data(matched=False)
2480 fm.end()
2480 fm.end()
2481
2481
2482 skip = {}
2482 skip = {}
2483 revfiles = {}
2483 revfiles = {}
2484 matchfn = scmutil.match(repo[None], pats, opts)
2484 matchfn = scmutil.match(repo[None], pats, opts)
2485 found = False
2485 found = False
2486 follow = opts.get('follow')
2486 follow = opts.get('follow')
2487
2487
2488 def prep(ctx, fns):
2488 def prep(ctx, fns):
2489 rev = ctx.rev()
2489 rev = ctx.rev()
2490 pctx = ctx.p1()
2490 pctx = ctx.p1()
2491 parent = pctx.rev()
2491 parent = pctx.rev()
2492 matches.setdefault(rev, {})
2492 matches.setdefault(rev, {})
2493 matches.setdefault(parent, {})
2493 matches.setdefault(parent, {})
2494 files = revfiles.setdefault(rev, [])
2494 files = revfiles.setdefault(rev, [])
2495 for fn in fns:
2495 for fn in fns:
2496 flog = getfile(fn)
2496 flog = getfile(fn)
2497 try:
2497 try:
2498 fnode = ctx.filenode(fn)
2498 fnode = ctx.filenode(fn)
2499 except error.LookupError:
2499 except error.LookupError:
2500 continue
2500 continue
2501
2501
2502 copied = flog.renamed(fnode)
2502 copied = flog.renamed(fnode)
2503 copy = follow and copied and copied[0]
2503 copy = follow and copied and copied[0]
2504 if copy:
2504 if copy:
2505 copies.setdefault(rev, {})[fn] = copy
2505 copies.setdefault(rev, {})[fn] = copy
2506 if fn in skip:
2506 if fn in skip:
2507 if copy:
2507 if copy:
2508 skip[copy] = True
2508 skip[copy] = True
2509 continue
2509 continue
2510 files.append(fn)
2510 files.append(fn)
2511
2511
2512 if fn not in matches[rev]:
2512 if fn not in matches[rev]:
2513 grepbody(fn, rev, flog.read(fnode))
2513 grepbody(fn, rev, flog.read(fnode))
2514
2514
2515 pfn = copy or fn
2515 pfn = copy or fn
2516 if pfn not in matches[parent]:
2516 if pfn not in matches[parent]:
2517 try:
2517 try:
2518 fnode = pctx.filenode(pfn)
2518 fnode = pctx.filenode(pfn)
2519 grepbody(pfn, parent, flog.read(fnode))
2519 grepbody(pfn, parent, flog.read(fnode))
2520 except error.LookupError:
2520 except error.LookupError:
2521 pass
2521 pass
2522
2522
2523 ui.pager('grep')
2523 ui.pager('grep')
2524 fm = ui.formatter('grep', opts)
2524 fm = ui.formatter('grep', opts)
2525 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2525 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2526 rev = ctx.rev()
2526 rev = ctx.rev()
2527 parent = ctx.p1().rev()
2527 parent = ctx.p1().rev()
2528 for fn in sorted(revfiles.get(rev, [])):
2528 for fn in sorted(revfiles.get(rev, [])):
2529 states = matches[rev][fn]
2529 states = matches[rev][fn]
2530 copy = copies.get(rev, {}).get(fn)
2530 copy = copies.get(rev, {}).get(fn)
2531 if fn in skip:
2531 if fn in skip:
2532 if copy:
2532 if copy:
2533 skip[copy] = True
2533 skip[copy] = True
2534 continue
2534 continue
2535 pstates = matches.get(parent, {}).get(copy or fn, [])
2535 pstates = matches.get(parent, {}).get(copy or fn, [])
2536 if pstates or states:
2536 if pstates or states:
2537 r = display(fm, fn, ctx, pstates, states)
2537 r = display(fm, fn, ctx, pstates, states)
2538 found = found or r
2538 found = found or r
2539 if r and not opts.get('all'):
2539 if r and not opts.get('all'):
2540 skip[fn] = True
2540 skip[fn] = True
2541 if copy:
2541 if copy:
2542 skip[copy] = True
2542 skip[copy] = True
2543 del matches[rev]
2543 del matches[rev]
2544 del revfiles[rev]
2544 del revfiles[rev]
2545 fm.end()
2545 fm.end()
2546
2546
2547 return not found
2547 return not found
2548
2548
2549 @command('heads',
2549 @command('heads',
2550 [('r', 'rev', '',
2550 [('r', 'rev', '',
2551 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2551 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2552 ('t', 'topo', False, _('show topological heads only')),
2552 ('t', 'topo', False, _('show topological heads only')),
2553 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2553 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2554 ('c', 'closed', False, _('show normal and closed branch heads')),
2554 ('c', 'closed', False, _('show normal and closed branch heads')),
2555 ] + templateopts,
2555 ] + templateopts,
2556 _('[-ct] [-r STARTREV] [REV]...'))
2556 _('[-ct] [-r STARTREV] [REV]...'))
2557 def heads(ui, repo, *branchrevs, **opts):
2557 def heads(ui, repo, *branchrevs, **opts):
2558 """show branch heads
2558 """show branch heads
2559
2559
2560 With no arguments, show all open branch heads in the repository.
2560 With no arguments, show all open branch heads in the repository.
2561 Branch heads are changesets that have no descendants on the
2561 Branch heads are changesets that have no descendants on the
2562 same branch. They are where development generally takes place and
2562 same branch. They are where development generally takes place and
2563 are the usual targets for update and merge operations.
2563 are the usual targets for update and merge operations.
2564
2564
2565 If one or more REVs are given, only open branch heads on the
2565 If one or more REVs are given, only open branch heads on the
2566 branches associated with the specified changesets are shown. This
2566 branches associated with the specified changesets are shown. This
2567 means that you can use :hg:`heads .` to see the heads on the
2567 means that you can use :hg:`heads .` to see the heads on the
2568 currently checked-out branch.
2568 currently checked-out branch.
2569
2569
2570 If -c/--closed is specified, also show branch heads marked closed
2570 If -c/--closed is specified, also show branch heads marked closed
2571 (see :hg:`commit --close-branch`).
2571 (see :hg:`commit --close-branch`).
2572
2572
2573 If STARTREV is specified, only those heads that are descendants of
2573 If STARTREV is specified, only those heads that are descendants of
2574 STARTREV will be displayed.
2574 STARTREV will be displayed.
2575
2575
2576 If -t/--topo is specified, named branch mechanics will be ignored and only
2576 If -t/--topo is specified, named branch mechanics will be ignored and only
2577 topological heads (changesets with no children) will be shown.
2577 topological heads (changesets with no children) will be shown.
2578
2578
2579 Returns 0 if matching heads are found, 1 if not.
2579 Returns 0 if matching heads are found, 1 if not.
2580 """
2580 """
2581
2581
2582 opts = pycompat.byteskwargs(opts)
2582 opts = pycompat.byteskwargs(opts)
2583 start = None
2583 start = None
2584 if 'rev' in opts:
2584 if 'rev' in opts:
2585 start = scmutil.revsingle(repo, opts['rev'], None).node()
2585 start = scmutil.revsingle(repo, opts['rev'], None).node()
2586
2586
2587 if opts.get('topo'):
2587 if opts.get('topo'):
2588 heads = [repo[h] for h in repo.heads(start)]
2588 heads = [repo[h] for h in repo.heads(start)]
2589 else:
2589 else:
2590 heads = []
2590 heads = []
2591 for branch in repo.branchmap():
2591 for branch in repo.branchmap():
2592 heads += repo.branchheads(branch, start, opts.get('closed'))
2592 heads += repo.branchheads(branch, start, opts.get('closed'))
2593 heads = [repo[h] for h in heads]
2593 heads = [repo[h] for h in heads]
2594
2594
2595 if branchrevs:
2595 if branchrevs:
2596 branches = set(repo[br].branch() for br in branchrevs)
2596 branches = set(repo[br].branch() for br in branchrevs)
2597 heads = [h for h in heads if h.branch() in branches]
2597 heads = [h for h in heads if h.branch() in branches]
2598
2598
2599 if opts.get('active') and branchrevs:
2599 if opts.get('active') and branchrevs:
2600 dagheads = repo.heads(start)
2600 dagheads = repo.heads(start)
2601 heads = [h for h in heads if h.node() in dagheads]
2601 heads = [h for h in heads if h.node() in dagheads]
2602
2602
2603 if branchrevs:
2603 if branchrevs:
2604 haveheads = set(h.branch() for h in heads)
2604 haveheads = set(h.branch() for h in heads)
2605 if branches - haveheads:
2605 if branches - haveheads:
2606 headless = ', '.join(b for b in branches - haveheads)
2606 headless = ', '.join(b for b in branches - haveheads)
2607 msg = _('no open branch heads found on branches %s')
2607 msg = _('no open branch heads found on branches %s')
2608 if opts.get('rev'):
2608 if opts.get('rev'):
2609 msg += _(' (started at %s)') % opts['rev']
2609 msg += _(' (started at %s)') % opts['rev']
2610 ui.warn((msg + '\n') % headless)
2610 ui.warn((msg + '\n') % headless)
2611
2611
2612 if not heads:
2612 if not heads:
2613 return 1
2613 return 1
2614
2614
2615 ui.pager('heads')
2615 ui.pager('heads')
2616 heads = sorted(heads, key=lambda x: -x.rev())
2616 heads = sorted(heads, key=lambda x: -x.rev())
2617 displayer = cmdutil.show_changeset(ui, repo, opts)
2617 displayer = cmdutil.show_changeset(ui, repo, opts)
2618 for ctx in heads:
2618 for ctx in heads:
2619 displayer.show(ctx)
2619 displayer.show(ctx)
2620 displayer.close()
2620 displayer.close()
2621
2621
2622 @command('help',
2622 @command('help',
2623 [('e', 'extension', None, _('show only help for extensions')),
2623 [('e', 'extension', None, _('show only help for extensions')),
2624 ('c', 'command', None, _('show only help for commands')),
2624 ('c', 'command', None, _('show only help for commands')),
2625 ('k', 'keyword', None, _('show topics matching keyword')),
2625 ('k', 'keyword', None, _('show topics matching keyword')),
2626 ('s', 'system', [], _('show help for specific platform(s)')),
2626 ('s', 'system', [], _('show help for specific platform(s)')),
2627 ],
2627 ],
2628 _('[-ecks] [TOPIC]'),
2628 _('[-ecks] [TOPIC]'),
2629 norepo=True)
2629 norepo=True)
2630 def help_(ui, name=None, **opts):
2630 def help_(ui, name=None, **opts):
2631 """show help for a given topic or a help overview
2631 """show help for a given topic or a help overview
2632
2632
2633 With no arguments, print a list of commands with short help messages.
2633 With no arguments, print a list of commands with short help messages.
2634
2634
2635 Given a topic, extension, or command name, print help for that
2635 Given a topic, extension, or command name, print help for that
2636 topic.
2636 topic.
2637
2637
2638 Returns 0 if successful.
2638 Returns 0 if successful.
2639 """
2639 """
2640
2640
2641 keep = opts.get(r'system') or []
2641 keep = opts.get(r'system') or []
2642 if len(keep) == 0:
2642 if len(keep) == 0:
2643 if pycompat.sysplatform.startswith('win'):
2643 if pycompat.sysplatform.startswith('win'):
2644 keep.append('windows')
2644 keep.append('windows')
2645 elif pycompat.sysplatform == 'OpenVMS':
2645 elif pycompat.sysplatform == 'OpenVMS':
2646 keep.append('vms')
2646 keep.append('vms')
2647 elif pycompat.sysplatform == 'plan9':
2647 elif pycompat.sysplatform == 'plan9':
2648 keep.append('plan9')
2648 keep.append('plan9')
2649 else:
2649 else:
2650 keep.append('unix')
2650 keep.append('unix')
2651 keep.append(pycompat.sysplatform.lower())
2651 keep.append(pycompat.sysplatform.lower())
2652 if ui.verbose:
2652 if ui.verbose:
2653 keep.append('verbose')
2653 keep.append('verbose')
2654
2654
2655 commands = sys.modules[__name__]
2655 commands = sys.modules[__name__]
2656 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2656 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2657 ui.pager('help')
2657 ui.pager('help')
2658 ui.write(formatted)
2658 ui.write(formatted)
2659
2659
2660
2660
2661 @command('identify|id',
2661 @command('identify|id',
2662 [('r', 'rev', '',
2662 [('r', 'rev', '',
2663 _('identify the specified revision'), _('REV')),
2663 _('identify the specified revision'), _('REV')),
2664 ('n', 'num', None, _('show local revision number')),
2664 ('n', 'num', None, _('show local revision number')),
2665 ('i', 'id', None, _('show global revision id')),
2665 ('i', 'id', None, _('show global revision id')),
2666 ('b', 'branch', None, _('show branch')),
2666 ('b', 'branch', None, _('show branch')),
2667 ('t', 'tags', None, _('show tags')),
2667 ('t', 'tags', None, _('show tags')),
2668 ('B', 'bookmarks', None, _('show bookmarks')),
2668 ('B', 'bookmarks', None, _('show bookmarks')),
2669 ] + remoteopts + formatteropts,
2669 ] + remoteopts + formatteropts,
2670 _('[-nibtB] [-r REV] [SOURCE]'),
2670 _('[-nibtB] [-r REV] [SOURCE]'),
2671 optionalrepo=True)
2671 optionalrepo=True)
2672 def identify(ui, repo, source=None, rev=None,
2672 def identify(ui, repo, source=None, rev=None,
2673 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2673 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2674 """identify the working directory or specified revision
2674 """identify the working directory or specified revision
2675
2675
2676 Print a summary identifying the repository state at REV using one or
2676 Print a summary identifying the repository state at REV using one or
2677 two parent hash identifiers, followed by a "+" if the working
2677 two parent hash identifiers, followed by a "+" if the working
2678 directory has uncommitted changes, the branch name (if not default),
2678 directory has uncommitted changes, the branch name (if not default),
2679 a list of tags, and a list of bookmarks.
2679 a list of tags, and a list of bookmarks.
2680
2680
2681 When REV is not given, print a summary of the current state of the
2681 When REV is not given, print a summary of the current state of the
2682 repository.
2682 repository.
2683
2683
2684 Specifying a path to a repository root or Mercurial bundle will
2684 Specifying a path to a repository root or Mercurial bundle will
2685 cause lookup to operate on that repository/bundle.
2685 cause lookup to operate on that repository/bundle.
2686
2686
2687 .. container:: verbose
2687 .. container:: verbose
2688
2688
2689 Examples:
2689 Examples:
2690
2690
2691 - generate a build identifier for the working directory::
2691 - generate a build identifier for the working directory::
2692
2692
2693 hg id --id > build-id.dat
2693 hg id --id > build-id.dat
2694
2694
2695 - find the revision corresponding to a tag::
2695 - find the revision corresponding to a tag::
2696
2696
2697 hg id -n -r 1.3
2697 hg id -n -r 1.3
2698
2698
2699 - check the most recent revision of a remote repository::
2699 - check the most recent revision of a remote repository::
2700
2700
2701 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2701 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2702
2702
2703 See :hg:`log` for generating more information about specific revisions,
2703 See :hg:`log` for generating more information about specific revisions,
2704 including full hash identifiers.
2704 including full hash identifiers.
2705
2705
2706 Returns 0 if successful.
2706 Returns 0 if successful.
2707 """
2707 """
2708
2708
2709 opts = pycompat.byteskwargs(opts)
2709 opts = pycompat.byteskwargs(opts)
2710 if not repo and not source:
2710 if not repo and not source:
2711 raise error.Abort(_("there is no Mercurial repository here "
2711 raise error.Abort(_("there is no Mercurial repository here "
2712 "(.hg not found)"))
2712 "(.hg not found)"))
2713
2713
2714 if ui.debugflag:
2714 if ui.debugflag:
2715 hexfunc = hex
2715 hexfunc = hex
2716 else:
2716 else:
2717 hexfunc = short
2717 hexfunc = short
2718 default = not (num or id or branch or tags or bookmarks)
2718 default = not (num or id or branch or tags or bookmarks)
2719 output = []
2719 output = []
2720 revs = []
2720 revs = []
2721
2721
2722 if source:
2722 if source:
2723 source, branches = hg.parseurl(ui.expandpath(source))
2723 source, branches = hg.parseurl(ui.expandpath(source))
2724 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2724 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2725 repo = peer.local()
2725 repo = peer.local()
2726 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2726 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2727
2727
2728 fm = ui.formatter('identify', opts)
2728 fm = ui.formatter('identify', opts)
2729 fm.startitem()
2729 fm.startitem()
2730
2730
2731 if not repo:
2731 if not repo:
2732 if num or branch or tags:
2732 if num or branch or tags:
2733 raise error.Abort(
2733 raise error.Abort(
2734 _("can't query remote revision number, branch, or tags"))
2734 _("can't query remote revision number, branch, or tags"))
2735 if not rev and revs:
2735 if not rev and revs:
2736 rev = revs[0]
2736 rev = revs[0]
2737 if not rev:
2737 if not rev:
2738 rev = "tip"
2738 rev = "tip"
2739
2739
2740 remoterev = peer.lookup(rev)
2740 remoterev = peer.lookup(rev)
2741 hexrev = hexfunc(remoterev)
2741 hexrev = hexfunc(remoterev)
2742 if default or id:
2742 if default or id:
2743 output = [hexrev]
2743 output = [hexrev]
2744 fm.data(id=hexrev)
2744 fm.data(id=hexrev)
2745
2745
2746 def getbms():
2746 def getbms():
2747 bms = []
2747 bms = []
2748
2748
2749 if 'bookmarks' in peer.listkeys('namespaces'):
2749 if 'bookmarks' in peer.listkeys('namespaces'):
2750 hexremoterev = hex(remoterev)
2750 hexremoterev = hex(remoterev)
2751 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2751 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2752 if bmr == hexremoterev]
2752 if bmr == hexremoterev]
2753
2753
2754 return sorted(bms)
2754 return sorted(bms)
2755
2755
2756 bms = getbms()
2756 bms = getbms()
2757 if bookmarks:
2757 if bookmarks:
2758 output.extend(bms)
2758 output.extend(bms)
2759 elif default and not ui.quiet:
2759 elif default and not ui.quiet:
2760 # multiple bookmarks for a single parent separated by '/'
2760 # multiple bookmarks for a single parent separated by '/'
2761 bm = '/'.join(bms)
2761 bm = '/'.join(bms)
2762 if bm:
2762 if bm:
2763 output.append(bm)
2763 output.append(bm)
2764
2764
2765 fm.data(node=hex(remoterev))
2765 fm.data(node=hex(remoterev))
2766 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2766 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2767 else:
2767 else:
2768 ctx = scmutil.revsingle(repo, rev, None)
2768 ctx = scmutil.revsingle(repo, rev, None)
2769
2769
2770 if ctx.rev() is None:
2770 if ctx.rev() is None:
2771 ctx = repo[None]
2771 ctx = repo[None]
2772 parents = ctx.parents()
2772 parents = ctx.parents()
2773 taglist = []
2773 taglist = []
2774 for p in parents:
2774 for p in parents:
2775 taglist.extend(p.tags())
2775 taglist.extend(p.tags())
2776
2776
2777 dirty = ""
2777 dirty = ""
2778 if ctx.dirty(missing=True, merge=False, branch=False):
2778 if ctx.dirty(missing=True, merge=False, branch=False):
2779 dirty = '+'
2779 dirty = '+'
2780 fm.data(dirty=dirty)
2780 fm.data(dirty=dirty)
2781
2781
2782 hexoutput = [hexfunc(p.node()) for p in parents]
2782 hexoutput = [hexfunc(p.node()) for p in parents]
2783 if default or id:
2783 if default or id:
2784 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2784 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2785 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2785 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2786
2786
2787 if num:
2787 if num:
2788 numoutput = ["%d" % p.rev() for p in parents]
2788 numoutput = ["%d" % p.rev() for p in parents]
2789 output.append("%s%s" % ('+'.join(numoutput), dirty))
2789 output.append("%s%s" % ('+'.join(numoutput), dirty))
2790
2790
2791 fn = fm.nested('parents')
2791 fn = fm.nested('parents')
2792 for p in parents:
2792 for p in parents:
2793 fn.startitem()
2793 fn.startitem()
2794 fn.data(rev=p.rev())
2794 fn.data(rev=p.rev())
2795 fn.data(node=p.hex())
2795 fn.data(node=p.hex())
2796 fn.context(ctx=p)
2796 fn.context(ctx=p)
2797 fn.end()
2797 fn.end()
2798 else:
2798 else:
2799 hexoutput = hexfunc(ctx.node())
2799 hexoutput = hexfunc(ctx.node())
2800 if default or id:
2800 if default or id:
2801 output = [hexoutput]
2801 output = [hexoutput]
2802 fm.data(id=hexoutput)
2802 fm.data(id=hexoutput)
2803
2803
2804 if num:
2804 if num:
2805 output.append(pycompat.bytestr(ctx.rev()))
2805 output.append(pycompat.bytestr(ctx.rev()))
2806 taglist = ctx.tags()
2806 taglist = ctx.tags()
2807
2807
2808 if default and not ui.quiet:
2808 if default and not ui.quiet:
2809 b = ctx.branch()
2809 b = ctx.branch()
2810 if b != 'default':
2810 if b != 'default':
2811 output.append("(%s)" % b)
2811 output.append("(%s)" % b)
2812
2812
2813 # multiple tags for a single parent separated by '/'
2813 # multiple tags for a single parent separated by '/'
2814 t = '/'.join(taglist)
2814 t = '/'.join(taglist)
2815 if t:
2815 if t:
2816 output.append(t)
2816 output.append(t)
2817
2817
2818 # multiple bookmarks for a single parent separated by '/'
2818 # multiple bookmarks for a single parent separated by '/'
2819 bm = '/'.join(ctx.bookmarks())
2819 bm = '/'.join(ctx.bookmarks())
2820 if bm:
2820 if bm:
2821 output.append(bm)
2821 output.append(bm)
2822 else:
2822 else:
2823 if branch:
2823 if branch:
2824 output.append(ctx.branch())
2824 output.append(ctx.branch())
2825
2825
2826 if tags:
2826 if tags:
2827 output.extend(taglist)
2827 output.extend(taglist)
2828
2828
2829 if bookmarks:
2829 if bookmarks:
2830 output.extend(ctx.bookmarks())
2830 output.extend(ctx.bookmarks())
2831
2831
2832 fm.data(node=ctx.hex())
2832 fm.data(node=ctx.hex())
2833 fm.data(branch=ctx.branch())
2833 fm.data(branch=ctx.branch())
2834 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2834 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2835 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2835 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2836 fm.context(ctx=ctx)
2836 fm.context(ctx=ctx)
2837
2837
2838 fm.plain("%s\n" % ' '.join(output))
2838 fm.plain("%s\n" % ' '.join(output))
2839 fm.end()
2839 fm.end()
2840
2840
2841 @command('import|patch',
2841 @command('import|patch',
2842 [('p', 'strip', 1,
2842 [('p', 'strip', 1,
2843 _('directory strip option for patch. This has the same '
2843 _('directory strip option for patch. This has the same '
2844 'meaning as the corresponding patch option'), _('NUM')),
2844 'meaning as the corresponding patch option'), _('NUM')),
2845 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2845 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2846 ('e', 'edit', False, _('invoke editor on commit messages')),
2846 ('e', 'edit', False, _('invoke editor on commit messages')),
2847 ('f', 'force', None,
2847 ('f', 'force', None,
2848 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2848 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2849 ('', 'no-commit', None,
2849 ('', 'no-commit', None,
2850 _("don't commit, just update the working directory")),
2850 _("don't commit, just update the working directory")),
2851 ('', 'bypass', None,
2851 ('', 'bypass', None,
2852 _("apply patch without touching the working directory")),
2852 _("apply patch without touching the working directory")),
2853 ('', 'partial', None,
2853 ('', 'partial', None,
2854 _('commit even if some hunks fail')),
2854 _('commit even if some hunks fail')),
2855 ('', 'exact', None,
2855 ('', 'exact', None,
2856 _('abort if patch would apply lossily')),
2856 _('abort if patch would apply lossily')),
2857 ('', 'prefix', '',
2857 ('', 'prefix', '',
2858 _('apply patch to subdirectory'), _('DIR')),
2858 _('apply patch to subdirectory'), _('DIR')),
2859 ('', 'import-branch', None,
2859 ('', 'import-branch', None,
2860 _('use any branch information in patch (implied by --exact)'))] +
2860 _('use any branch information in patch (implied by --exact)'))] +
2861 commitopts + commitopts2 + similarityopts,
2861 commitopts + commitopts2 + similarityopts,
2862 _('[OPTION]... PATCH...'))
2862 _('[OPTION]... PATCH...'))
2863 def import_(ui, repo, patch1=None, *patches, **opts):
2863 def import_(ui, repo, patch1=None, *patches, **opts):
2864 """import an ordered set of patches
2864 """import an ordered set of patches
2865
2865
2866 Import a list of patches and commit them individually (unless
2866 Import a list of patches and commit them individually (unless
2867 --no-commit is specified).
2867 --no-commit is specified).
2868
2868
2869 To read a patch from standard input (stdin), use "-" as the patch
2869 To read a patch from standard input (stdin), use "-" as the patch
2870 name. If a URL is specified, the patch will be downloaded from
2870 name. If a URL is specified, the patch will be downloaded from
2871 there.
2871 there.
2872
2872
2873 Import first applies changes to the working directory (unless
2873 Import first applies changes to the working directory (unless
2874 --bypass is specified), import will abort if there are outstanding
2874 --bypass is specified), import will abort if there are outstanding
2875 changes.
2875 changes.
2876
2876
2877 Use --bypass to apply and commit patches directly to the
2877 Use --bypass to apply and commit patches directly to the
2878 repository, without affecting the working directory. Without
2878 repository, without affecting the working directory. Without
2879 --exact, patches will be applied on top of the working directory
2879 --exact, patches will be applied on top of the working directory
2880 parent revision.
2880 parent revision.
2881
2881
2882 You can import a patch straight from a mail message. Even patches
2882 You can import a patch straight from a mail message. Even patches
2883 as attachments work (to use the body part, it must have type
2883 as attachments work (to use the body part, it must have type
2884 text/plain or text/x-patch). From and Subject headers of email
2884 text/plain or text/x-patch). From and Subject headers of email
2885 message are used as default committer and commit message. All
2885 message are used as default committer and commit message. All
2886 text/plain body parts before first diff are added to the commit
2886 text/plain body parts before first diff are added to the commit
2887 message.
2887 message.
2888
2888
2889 If the imported patch was generated by :hg:`export`, user and
2889 If the imported patch was generated by :hg:`export`, user and
2890 description from patch override values from message headers and
2890 description from patch override values from message headers and
2891 body. Values given on command line with -m/--message and -u/--user
2891 body. Values given on command line with -m/--message and -u/--user
2892 override these.
2892 override these.
2893
2893
2894 If --exact is specified, import will set the working directory to
2894 If --exact is specified, import will set the working directory to
2895 the parent of each patch before applying it, and will abort if the
2895 the parent of each patch before applying it, and will abort if the
2896 resulting changeset has a different ID than the one recorded in
2896 resulting changeset has a different ID than the one recorded in
2897 the patch. This will guard against various ways that portable
2897 the patch. This will guard against various ways that portable
2898 patch formats and mail systems might fail to transfer Mercurial
2898 patch formats and mail systems might fail to transfer Mercurial
2899 data or metadata. See :hg:`bundle` for lossless transmission.
2899 data or metadata. See :hg:`bundle` for lossless transmission.
2900
2900
2901 Use --partial to ensure a changeset will be created from the patch
2901 Use --partial to ensure a changeset will be created from the patch
2902 even if some hunks fail to apply. Hunks that fail to apply will be
2902 even if some hunks fail to apply. Hunks that fail to apply will be
2903 written to a <target-file>.rej file. Conflicts can then be resolved
2903 written to a <target-file>.rej file. Conflicts can then be resolved
2904 by hand before :hg:`commit --amend` is run to update the created
2904 by hand before :hg:`commit --amend` is run to update the created
2905 changeset. This flag exists to let people import patches that
2905 changeset. This flag exists to let people import patches that
2906 partially apply without losing the associated metadata (author,
2906 partially apply without losing the associated metadata (author,
2907 date, description, ...).
2907 date, description, ...).
2908
2908
2909 .. note::
2909 .. note::
2910
2910
2911 When no hunks apply cleanly, :hg:`import --partial` will create
2911 When no hunks apply cleanly, :hg:`import --partial` will create
2912 an empty changeset, importing only the patch metadata.
2912 an empty changeset, importing only the patch metadata.
2913
2913
2914 With -s/--similarity, hg will attempt to discover renames and
2914 With -s/--similarity, hg will attempt to discover renames and
2915 copies in the patch in the same way as :hg:`addremove`.
2915 copies in the patch in the same way as :hg:`addremove`.
2916
2916
2917 It is possible to use external patch programs to perform the patch
2917 It is possible to use external patch programs to perform the patch
2918 by setting the ``ui.patch`` configuration option. For the default
2918 by setting the ``ui.patch`` configuration option. For the default
2919 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2919 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2920 See :hg:`help config` for more information about configuration
2920 See :hg:`help config` for more information about configuration
2921 files and how to use these options.
2921 files and how to use these options.
2922
2922
2923 See :hg:`help dates` for a list of formats valid for -d/--date.
2923 See :hg:`help dates` for a list of formats valid for -d/--date.
2924
2924
2925 .. container:: verbose
2925 .. container:: verbose
2926
2926
2927 Examples:
2927 Examples:
2928
2928
2929 - import a traditional patch from a website and detect renames::
2929 - import a traditional patch from a website and detect renames::
2930
2930
2931 hg import -s 80 http://example.com/bugfix.patch
2931 hg import -s 80 http://example.com/bugfix.patch
2932
2932
2933 - import a changeset from an hgweb server::
2933 - import a changeset from an hgweb server::
2934
2934
2935 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2935 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2936
2936
2937 - import all the patches in an Unix-style mbox::
2937 - import all the patches in an Unix-style mbox::
2938
2938
2939 hg import incoming-patches.mbox
2939 hg import incoming-patches.mbox
2940
2940
2941 - import patches from stdin::
2941 - import patches from stdin::
2942
2942
2943 hg import -
2943 hg import -
2944
2944
2945 - attempt to exactly restore an exported changeset (not always
2945 - attempt to exactly restore an exported changeset (not always
2946 possible)::
2946 possible)::
2947
2947
2948 hg import --exact proposed-fix.patch
2948 hg import --exact proposed-fix.patch
2949
2949
2950 - use an external tool to apply a patch which is too fuzzy for
2950 - use an external tool to apply a patch which is too fuzzy for
2951 the default internal tool.
2951 the default internal tool.
2952
2952
2953 hg import --config ui.patch="patch --merge" fuzzy.patch
2953 hg import --config ui.patch="patch --merge" fuzzy.patch
2954
2954
2955 - change the default fuzzing from 2 to a less strict 7
2955 - change the default fuzzing from 2 to a less strict 7
2956
2956
2957 hg import --config ui.fuzz=7 fuzz.patch
2957 hg import --config ui.fuzz=7 fuzz.patch
2958
2958
2959 Returns 0 on success, 1 on partial success (see --partial).
2959 Returns 0 on success, 1 on partial success (see --partial).
2960 """
2960 """
2961
2961
2962 opts = pycompat.byteskwargs(opts)
2962 opts = pycompat.byteskwargs(opts)
2963 if not patch1:
2963 if not patch1:
2964 raise error.Abort(_('need at least one patch to import'))
2964 raise error.Abort(_('need at least one patch to import'))
2965
2965
2966 patches = (patch1,) + patches
2966 patches = (patch1,) + patches
2967
2967
2968 date = opts.get('date')
2968 date = opts.get('date')
2969 if date:
2969 if date:
2970 opts['date'] = util.parsedate(date)
2970 opts['date'] = util.parsedate(date)
2971
2971
2972 exact = opts.get('exact')
2972 exact = opts.get('exact')
2973 update = not opts.get('bypass')
2973 update = not opts.get('bypass')
2974 if not update and opts.get('no_commit'):
2974 if not update and opts.get('no_commit'):
2975 raise error.Abort(_('cannot use --no-commit with --bypass'))
2975 raise error.Abort(_('cannot use --no-commit with --bypass'))
2976 try:
2976 try:
2977 sim = float(opts.get('similarity') or 0)
2977 sim = float(opts.get('similarity') or 0)
2978 except ValueError:
2978 except ValueError:
2979 raise error.Abort(_('similarity must be a number'))
2979 raise error.Abort(_('similarity must be a number'))
2980 if sim < 0 or sim > 100:
2980 if sim < 0 or sim > 100:
2981 raise error.Abort(_('similarity must be between 0 and 100'))
2981 raise error.Abort(_('similarity must be between 0 and 100'))
2982 if sim and not update:
2982 if sim and not update:
2983 raise error.Abort(_('cannot use --similarity with --bypass'))
2983 raise error.Abort(_('cannot use --similarity with --bypass'))
2984 if exact:
2984 if exact:
2985 if opts.get('edit'):
2985 if opts.get('edit'):
2986 raise error.Abort(_('cannot use --exact with --edit'))
2986 raise error.Abort(_('cannot use --exact with --edit'))
2987 if opts.get('prefix'):
2987 if opts.get('prefix'):
2988 raise error.Abort(_('cannot use --exact with --prefix'))
2988 raise error.Abort(_('cannot use --exact with --prefix'))
2989
2989
2990 base = opts["base"]
2990 base = opts["base"]
2991 wlock = dsguard = lock = tr = None
2991 wlock = dsguard = lock = tr = None
2992 msgs = []
2992 msgs = []
2993 ret = 0
2993 ret = 0
2994
2994
2995
2995
2996 try:
2996 try:
2997 wlock = repo.wlock()
2997 wlock = repo.wlock()
2998
2998
2999 if update:
2999 if update:
3000 cmdutil.checkunfinished(repo)
3000 cmdutil.checkunfinished(repo)
3001 if (exact or not opts.get('force')):
3001 if (exact or not opts.get('force')):
3002 cmdutil.bailifchanged(repo)
3002 cmdutil.bailifchanged(repo)
3003
3003
3004 if not opts.get('no_commit'):
3004 if not opts.get('no_commit'):
3005 lock = repo.lock()
3005 lock = repo.lock()
3006 tr = repo.transaction('import')
3006 tr = repo.transaction('import')
3007 else:
3007 else:
3008 dsguard = dirstateguard.dirstateguard(repo, 'import')
3008 dsguard = dirstateguard.dirstateguard(repo, 'import')
3009 parents = repo[None].parents()
3009 parents = repo[None].parents()
3010 for patchurl in patches:
3010 for patchurl in patches:
3011 if patchurl == '-':
3011 if patchurl == '-':
3012 ui.status(_('applying patch from stdin\n'))
3012 ui.status(_('applying patch from stdin\n'))
3013 patchfile = ui.fin
3013 patchfile = ui.fin
3014 patchurl = 'stdin' # for error message
3014 patchurl = 'stdin' # for error message
3015 else:
3015 else:
3016 patchurl = os.path.join(base, patchurl)
3016 patchurl = os.path.join(base, patchurl)
3017 ui.status(_('applying %s\n') % patchurl)
3017 ui.status(_('applying %s\n') % patchurl)
3018 patchfile = hg.openpath(ui, patchurl)
3018 patchfile = hg.openpath(ui, patchurl)
3019
3019
3020 haspatch = False
3020 haspatch = False
3021 for hunk in patch.split(patchfile):
3021 for hunk in patch.split(patchfile):
3022 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3022 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3023 parents, opts,
3023 parents, opts,
3024 msgs, hg.clean)
3024 msgs, hg.clean)
3025 if msg:
3025 if msg:
3026 haspatch = True
3026 haspatch = True
3027 ui.note(msg + '\n')
3027 ui.note(msg + '\n')
3028 if update or exact:
3028 if update or exact:
3029 parents = repo[None].parents()
3029 parents = repo[None].parents()
3030 else:
3030 else:
3031 parents = [repo[node]]
3031 parents = [repo[node]]
3032 if rej:
3032 if rej:
3033 ui.write_err(_("patch applied partially\n"))
3033 ui.write_err(_("patch applied partially\n"))
3034 ui.write_err(_("(fix the .rej files and run "
3034 ui.write_err(_("(fix the .rej files and run "
3035 "`hg commit --amend`)\n"))
3035 "`hg commit --amend`)\n"))
3036 ret = 1
3036 ret = 1
3037 break
3037 break
3038
3038
3039 if not haspatch:
3039 if not haspatch:
3040 raise error.Abort(_('%s: no diffs found') % patchurl)
3040 raise error.Abort(_('%s: no diffs found') % patchurl)
3041
3041
3042 if tr:
3042 if tr:
3043 tr.close()
3043 tr.close()
3044 if msgs:
3044 if msgs:
3045 repo.savecommitmessage('\n* * *\n'.join(msgs))
3045 repo.savecommitmessage('\n* * *\n'.join(msgs))
3046 if dsguard:
3046 if dsguard:
3047 dsguard.close()
3047 dsguard.close()
3048 return ret
3048 return ret
3049 finally:
3049 finally:
3050 if tr:
3050 if tr:
3051 tr.release()
3051 tr.release()
3052 release(lock, dsguard, wlock)
3052 release(lock, dsguard, wlock)
3053
3053
3054 @command('incoming|in',
3054 @command('incoming|in',
3055 [('f', 'force', None,
3055 [('f', 'force', None,
3056 _('run even if remote repository is unrelated')),
3056 _('run even if remote repository is unrelated')),
3057 ('n', 'newest-first', None, _('show newest record first')),
3057 ('n', 'newest-first', None, _('show newest record first')),
3058 ('', 'bundle', '',
3058 ('', 'bundle', '',
3059 _('file to store the bundles into'), _('FILE')),
3059 _('file to store the bundles into'), _('FILE')),
3060 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3060 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3061 ('B', 'bookmarks', False, _("compare bookmarks")),
3061 ('B', 'bookmarks', False, _("compare bookmarks")),
3062 ('b', 'branch', [],
3062 ('b', 'branch', [],
3063 _('a specific branch you would like to pull'), _('BRANCH')),
3063 _('a specific branch you would like to pull'), _('BRANCH')),
3064 ] + logopts + remoteopts + subrepoopts,
3064 ] + logopts + remoteopts + subrepoopts,
3065 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3065 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3066 def incoming(ui, repo, source="default", **opts):
3066 def incoming(ui, repo, source="default", **opts):
3067 """show new changesets found in source
3067 """show new changesets found in source
3068
3068
3069 Show new changesets found in the specified path/URL or the default
3069 Show new changesets found in the specified path/URL or the default
3070 pull location. These are the changesets that would have been pulled
3070 pull location. These are the changesets that would have been pulled
3071 if a pull at the time you issued this command.
3071 if a pull at the time you issued this command.
3072
3072
3073 See pull for valid source format details.
3073 See pull for valid source format details.
3074
3074
3075 .. container:: verbose
3075 .. container:: verbose
3076
3076
3077 With -B/--bookmarks, the result of bookmark comparison between
3077 With -B/--bookmarks, the result of bookmark comparison between
3078 local and remote repositories is displayed. With -v/--verbose,
3078 local and remote repositories is displayed. With -v/--verbose,
3079 status is also displayed for each bookmark like below::
3079 status is also displayed for each bookmark like below::
3080
3080
3081 BM1 01234567890a added
3081 BM1 01234567890a added
3082 BM2 1234567890ab advanced
3082 BM2 1234567890ab advanced
3083 BM3 234567890abc diverged
3083 BM3 234567890abc diverged
3084 BM4 34567890abcd changed
3084 BM4 34567890abcd changed
3085
3085
3086 The action taken locally when pulling depends on the
3086 The action taken locally when pulling depends on the
3087 status of each bookmark:
3087 status of each bookmark:
3088
3088
3089 :``added``: pull will create it
3089 :``added``: pull will create it
3090 :``advanced``: pull will update it
3090 :``advanced``: pull will update it
3091 :``diverged``: pull will create a divergent bookmark
3091 :``diverged``: pull will create a divergent bookmark
3092 :``changed``: result depends on remote changesets
3092 :``changed``: result depends on remote changesets
3093
3093
3094 From the point of view of pulling behavior, bookmark
3094 From the point of view of pulling behavior, bookmark
3095 existing only in the remote repository are treated as ``added``,
3095 existing only in the remote repository are treated as ``added``,
3096 even if it is in fact locally deleted.
3096 even if it is in fact locally deleted.
3097
3097
3098 .. container:: verbose
3098 .. container:: verbose
3099
3099
3100 For remote repository, using --bundle avoids downloading the
3100 For remote repository, using --bundle avoids downloading the
3101 changesets twice if the incoming is followed by a pull.
3101 changesets twice if the incoming is followed by a pull.
3102
3102
3103 Examples:
3103 Examples:
3104
3104
3105 - show incoming changes with patches and full description::
3105 - show incoming changes with patches and full description::
3106
3106
3107 hg incoming -vp
3107 hg incoming -vp
3108
3108
3109 - show incoming changes excluding merges, store a bundle::
3109 - show incoming changes excluding merges, store a bundle::
3110
3110
3111 hg in -vpM --bundle incoming.hg
3111 hg in -vpM --bundle incoming.hg
3112 hg pull incoming.hg
3112 hg pull incoming.hg
3113
3113
3114 - briefly list changes inside a bundle::
3114 - briefly list changes inside a bundle::
3115
3115
3116 hg in changes.hg -T "{desc|firstline}\\n"
3116 hg in changes.hg -T "{desc|firstline}\\n"
3117
3117
3118 Returns 0 if there are incoming changes, 1 otherwise.
3118 Returns 0 if there are incoming changes, 1 otherwise.
3119 """
3119 """
3120 opts = pycompat.byteskwargs(opts)
3120 opts = pycompat.byteskwargs(opts)
3121 if opts.get('graph'):
3121 if opts.get('graph'):
3122 cmdutil.checkunsupportedgraphflags([], opts)
3122 cmdutil.checkunsupportedgraphflags([], opts)
3123 def display(other, chlist, displayer):
3123 def display(other, chlist, displayer):
3124 revdag = cmdutil.graphrevs(other, chlist, opts)
3124 revdag = cmdutil.graphrevs(other, chlist, opts)
3125 cmdutil.displaygraph(ui, repo, revdag, displayer,
3125 cmdutil.displaygraph(ui, repo, revdag, displayer,
3126 graphmod.asciiedges)
3126 graphmod.asciiedges)
3127
3127
3128 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3128 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3129 return 0
3129 return 0
3130
3130
3131 if opts.get('bundle') and opts.get('subrepos'):
3131 if opts.get('bundle') and opts.get('subrepos'):
3132 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3132 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3133
3133
3134 if opts.get('bookmarks'):
3134 if opts.get('bookmarks'):
3135 source, branches = hg.parseurl(ui.expandpath(source),
3135 source, branches = hg.parseurl(ui.expandpath(source),
3136 opts.get('branch'))
3136 opts.get('branch'))
3137 other = hg.peer(repo, opts, source)
3137 other = hg.peer(repo, opts, source)
3138 if 'bookmarks' not in other.listkeys('namespaces'):
3138 if 'bookmarks' not in other.listkeys('namespaces'):
3139 ui.warn(_("remote doesn't support bookmarks\n"))
3139 ui.warn(_("remote doesn't support bookmarks\n"))
3140 return 0
3140 return 0
3141 ui.pager('incoming')
3141 ui.pager('incoming')
3142 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3142 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3143 return bookmarks.incoming(ui, repo, other)
3143 return bookmarks.incoming(ui, repo, other)
3144
3144
3145 repo._subtoppath = ui.expandpath(source)
3145 repo._subtoppath = ui.expandpath(source)
3146 try:
3146 try:
3147 return hg.incoming(ui, repo, source, opts)
3147 return hg.incoming(ui, repo, source, opts)
3148 finally:
3148 finally:
3149 del repo._subtoppath
3149 del repo._subtoppath
3150
3150
3151
3151
3152 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3152 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3153 norepo=True)
3153 norepo=True)
3154 def init(ui, dest=".", **opts):
3154 def init(ui, dest=".", **opts):
3155 """create a new repository in the given directory
3155 """create a new repository in the given directory
3156
3156
3157 Initialize a new repository in the given directory. If the given
3157 Initialize a new repository in the given directory. If the given
3158 directory does not exist, it will be created.
3158 directory does not exist, it will be created.
3159
3159
3160 If no directory is given, the current directory is used.
3160 If no directory is given, the current directory is used.
3161
3161
3162 It is possible to specify an ``ssh://`` URL as the destination.
3162 It is possible to specify an ``ssh://`` URL as the destination.
3163 See :hg:`help urls` for more information.
3163 See :hg:`help urls` for more information.
3164
3164
3165 Returns 0 on success.
3165 Returns 0 on success.
3166 """
3166 """
3167 opts = pycompat.byteskwargs(opts)
3167 opts = pycompat.byteskwargs(opts)
3168 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3168 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3169
3169
3170 @command('locate',
3170 @command('locate',
3171 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3171 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3172 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3172 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3173 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3173 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3174 ] + walkopts,
3174 ] + walkopts,
3175 _('[OPTION]... [PATTERN]...'))
3175 _('[OPTION]... [PATTERN]...'))
3176 def locate(ui, repo, *pats, **opts):
3176 def locate(ui, repo, *pats, **opts):
3177 """locate files matching specific patterns (DEPRECATED)
3177 """locate files matching specific patterns (DEPRECATED)
3178
3178
3179 Print files under Mercurial control in the working directory whose
3179 Print files under Mercurial control in the working directory whose
3180 names match the given patterns.
3180 names match the given patterns.
3181
3181
3182 By default, this command searches all directories in the working
3182 By default, this command searches all directories in the working
3183 directory. To search just the current directory and its
3183 directory. To search just the current directory and its
3184 subdirectories, use "--include .".
3184 subdirectories, use "--include .".
3185
3185
3186 If no patterns are given to match, this command prints the names
3186 If no patterns are given to match, this command prints the names
3187 of all files under Mercurial control in the working directory.
3187 of all files under Mercurial control in the working directory.
3188
3188
3189 If you want to feed the output of this command into the "xargs"
3189 If you want to feed the output of this command into the "xargs"
3190 command, use the -0 option to both this command and "xargs". This
3190 command, use the -0 option to both this command and "xargs". This
3191 will avoid the problem of "xargs" treating single filenames that
3191 will avoid the problem of "xargs" treating single filenames that
3192 contain whitespace as multiple filenames.
3192 contain whitespace as multiple filenames.
3193
3193
3194 See :hg:`help files` for a more versatile command.
3194 See :hg:`help files` for a more versatile command.
3195
3195
3196 Returns 0 if a match is found, 1 otherwise.
3196 Returns 0 if a match is found, 1 otherwise.
3197 """
3197 """
3198 opts = pycompat.byteskwargs(opts)
3198 opts = pycompat.byteskwargs(opts)
3199 if opts.get('print0'):
3199 if opts.get('print0'):
3200 end = '\0'
3200 end = '\0'
3201 else:
3201 else:
3202 end = '\n'
3202 end = '\n'
3203 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3203 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3204
3204
3205 ret = 1
3205 ret = 1
3206 ctx = repo[rev]
3206 ctx = repo[rev]
3207 m = scmutil.match(ctx, pats, opts, default='relglob',
3207 m = scmutil.match(ctx, pats, opts, default='relglob',
3208 badfn=lambda x, y: False)
3208 badfn=lambda x, y: False)
3209
3209
3210 ui.pager('locate')
3210 ui.pager('locate')
3211 for abs in ctx.matches(m):
3211 for abs in ctx.matches(m):
3212 if opts.get('fullpath'):
3212 if opts.get('fullpath'):
3213 ui.write(repo.wjoin(abs), end)
3213 ui.write(repo.wjoin(abs), end)
3214 else:
3214 else:
3215 ui.write(((pats and m.rel(abs)) or abs), end)
3215 ui.write(((pats and m.rel(abs)) or abs), end)
3216 ret = 0
3216 ret = 0
3217
3217
3218 return ret
3218 return ret
3219
3219
3220 @command('^log|history',
3220 @command('^log|history',
3221 [('f', 'follow', None,
3221 [('f', 'follow', None,
3222 _('follow changeset history, or file history across copies and renames')),
3222 _('follow changeset history, or file history across copies and renames')),
3223 ('', 'follow-first', None,
3223 ('', 'follow-first', None,
3224 _('only follow the first parent of merge changesets (DEPRECATED)')),
3224 _('only follow the first parent of merge changesets (DEPRECATED)')),
3225 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3225 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3226 ('C', 'copies', None, _('show copied files')),
3226 ('C', 'copies', None, _('show copied files')),
3227 ('k', 'keyword', [],
3227 ('k', 'keyword', [],
3228 _('do case-insensitive search for a given text'), _('TEXT')),
3228 _('do case-insensitive search for a given text'), _('TEXT')),
3229 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3229 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3230 ('', 'removed', None, _('include revisions where files were removed')),
3230 ('', 'removed', None, _('include revisions where files were removed')),
3231 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3231 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3232 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3232 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3233 ('', 'only-branch', [],
3233 ('', 'only-branch', [],
3234 _('show only changesets within the given named branch (DEPRECATED)'),
3234 _('show only changesets within the given named branch (DEPRECATED)'),
3235 _('BRANCH')),
3235 _('BRANCH')),
3236 ('b', 'branch', [],
3236 ('b', 'branch', [],
3237 _('show changesets within the given named branch'), _('BRANCH')),
3237 _('show changesets within the given named branch'), _('BRANCH')),
3238 ('P', 'prune', [],
3238 ('P', 'prune', [],
3239 _('do not display revision or any of its ancestors'), _('REV')),
3239 _('do not display revision or any of its ancestors'), _('REV')),
3240 ] + logopts + walkopts,
3240 ] + logopts + walkopts,
3241 _('[OPTION]... [FILE]'),
3241 _('[OPTION]... [FILE]'),
3242 inferrepo=True)
3242 inferrepo=True)
3243 def log(ui, repo, *pats, **opts):
3243 def log(ui, repo, *pats, **opts):
3244 """show revision history of entire repository or files
3244 """show revision history of entire repository or files
3245
3245
3246 Print the revision history of the specified files or the entire
3246 Print the revision history of the specified files or the entire
3247 project.
3247 project.
3248
3248
3249 If no revision range is specified, the default is ``tip:0`` unless
3249 If no revision range is specified, the default is ``tip:0`` unless
3250 --follow is set, in which case the working directory parent is
3250 --follow is set, in which case the working directory parent is
3251 used as the starting revision.
3251 used as the starting revision.
3252
3252
3253 File history is shown without following rename or copy history of
3253 File history is shown without following rename or copy history of
3254 files. Use -f/--follow with a filename to follow history across
3254 files. Use -f/--follow with a filename to follow history across
3255 renames and copies. --follow without a filename will only show
3255 renames and copies. --follow without a filename will only show
3256 ancestors or descendants of the starting revision.
3256 ancestors or descendants of the starting revision.
3257
3257
3258 By default this command prints revision number and changeset id,
3258 By default this command prints revision number and changeset id,
3259 tags, non-trivial parents, user, date and time, and a summary for
3259 tags, non-trivial parents, user, date and time, and a summary for
3260 each commit. When the -v/--verbose switch is used, the list of
3260 each commit. When the -v/--verbose switch is used, the list of
3261 changed files and full commit message are shown.
3261 changed files and full commit message are shown.
3262
3262
3263 With --graph the revisions are shown as an ASCII art DAG with the most
3263 With --graph the revisions are shown as an ASCII art DAG with the most
3264 recent changeset at the top.
3264 recent changeset at the top.
3265 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3265 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3266 and '+' represents a fork where the changeset from the lines below is a
3266 and '+' represents a fork where the changeset from the lines below is a
3267 parent of the 'o' merge on the same line.
3267 parent of the 'o' merge on the same line.
3268 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3268 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3269 of a '|' indicates one or more revisions in a path are omitted.
3269 of a '|' indicates one or more revisions in a path are omitted.
3270
3270
3271 .. note::
3271 .. note::
3272
3272
3273 :hg:`log --patch` may generate unexpected diff output for merge
3273 :hg:`log --patch` may generate unexpected diff output for merge
3274 changesets, as it will only compare the merge changeset against
3274 changesets, as it will only compare the merge changeset against
3275 its first parent. Also, only files different from BOTH parents
3275 its first parent. Also, only files different from BOTH parents
3276 will appear in files:.
3276 will appear in files:.
3277
3277
3278 .. note::
3278 .. note::
3279
3279
3280 For performance reasons, :hg:`log FILE` may omit duplicate changes
3280 For performance reasons, :hg:`log FILE` may omit duplicate changes
3281 made on branches and will not show removals or mode changes. To
3281 made on branches and will not show removals or mode changes. To
3282 see all such changes, use the --removed switch.
3282 see all such changes, use the --removed switch.
3283
3283
3284 .. container:: verbose
3284 .. container:: verbose
3285
3285
3286 Some examples:
3286 Some examples:
3287
3287
3288 - changesets with full descriptions and file lists::
3288 - changesets with full descriptions and file lists::
3289
3289
3290 hg log -v
3290 hg log -v
3291
3291
3292 - changesets ancestral to the working directory::
3292 - changesets ancestral to the working directory::
3293
3293
3294 hg log -f
3294 hg log -f
3295
3295
3296 - last 10 commits on the current branch::
3296 - last 10 commits on the current branch::
3297
3297
3298 hg log -l 10 -b .
3298 hg log -l 10 -b .
3299
3299
3300 - changesets showing all modifications of a file, including removals::
3300 - changesets showing all modifications of a file, including removals::
3301
3301
3302 hg log --removed file.c
3302 hg log --removed file.c
3303
3303
3304 - all changesets that touch a directory, with diffs, excluding merges::
3304 - all changesets that touch a directory, with diffs, excluding merges::
3305
3305
3306 hg log -Mp lib/
3306 hg log -Mp lib/
3307
3307
3308 - all revision numbers that match a keyword::
3308 - all revision numbers that match a keyword::
3309
3309
3310 hg log -k bug --template "{rev}\\n"
3310 hg log -k bug --template "{rev}\\n"
3311
3311
3312 - the full hash identifier of the working directory parent::
3312 - the full hash identifier of the working directory parent::
3313
3313
3314 hg log -r . --template "{node}\\n"
3314 hg log -r . --template "{node}\\n"
3315
3315
3316 - list available log templates::
3316 - list available log templates::
3317
3317
3318 hg log -T list
3318 hg log -T list
3319
3319
3320 - check if a given changeset is included in a tagged release::
3320 - check if a given changeset is included in a tagged release::
3321
3321
3322 hg log -r "a21ccf and ancestor(1.9)"
3322 hg log -r "a21ccf and ancestor(1.9)"
3323
3323
3324 - find all changesets by some user in a date range::
3324 - find all changesets by some user in a date range::
3325
3325
3326 hg log -k alice -d "may 2008 to jul 2008"
3326 hg log -k alice -d "may 2008 to jul 2008"
3327
3327
3328 - summary of all changesets after the last tag::
3328 - summary of all changesets after the last tag::
3329
3329
3330 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3330 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3331
3331
3332 See :hg:`help dates` for a list of formats valid for -d/--date.
3332 See :hg:`help dates` for a list of formats valid for -d/--date.
3333
3333
3334 See :hg:`help revisions` for more about specifying and ordering
3334 See :hg:`help revisions` for more about specifying and ordering
3335 revisions.
3335 revisions.
3336
3336
3337 See :hg:`help templates` for more about pre-packaged styles and
3337 See :hg:`help templates` for more about pre-packaged styles and
3338 specifying custom templates.
3338 specifying custom templates.
3339
3339
3340 Returns 0 on success.
3340 Returns 0 on success.
3341
3341
3342 """
3342 """
3343 opts = pycompat.byteskwargs(opts)
3343 opts = pycompat.byteskwargs(opts)
3344 if opts.get('follow') and opts.get('rev'):
3344 if opts.get('follow') and opts.get('rev'):
3345 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3345 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3346 del opts['follow']
3346 del opts['follow']
3347
3347
3348 if opts.get('graph'):
3348 if opts.get('graph'):
3349 return cmdutil.graphlog(ui, repo, pats, opts)
3349 return cmdutil.graphlog(ui, repo, pats, opts)
3350
3350
3351 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3351 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3352 limit = cmdutil.loglimit(opts)
3352 limit = cmdutil.loglimit(opts)
3353 count = 0
3353 count = 0
3354
3354
3355 getrenamed = None
3355 getrenamed = None
3356 if opts.get('copies'):
3356 if opts.get('copies'):
3357 endrev = None
3357 endrev = None
3358 if opts.get('rev'):
3358 if opts.get('rev'):
3359 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3359 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3360 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3360 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3361
3361
3362 ui.pager('log')
3362 ui.pager('log')
3363 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3363 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3364 for rev in revs:
3364 for rev in revs:
3365 if count == limit:
3365 if count == limit:
3366 break
3366 break
3367 ctx = repo[rev]
3367 ctx = repo[rev]
3368 copies = None
3368 copies = None
3369 if getrenamed is not None and rev:
3369 if getrenamed is not None and rev:
3370 copies = []
3370 copies = []
3371 for fn in ctx.files():
3371 for fn in ctx.files():
3372 rename = getrenamed(fn, rev)
3372 rename = getrenamed(fn, rev)
3373 if rename:
3373 if rename:
3374 copies.append((fn, rename[0]))
3374 copies.append((fn, rename[0]))
3375 if filematcher:
3375 if filematcher:
3376 revmatchfn = filematcher(ctx.rev())
3376 revmatchfn = filematcher(ctx.rev())
3377 else:
3377 else:
3378 revmatchfn = None
3378 revmatchfn = None
3379 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3379 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3380 if displayer.flush(ctx):
3380 if displayer.flush(ctx):
3381 count += 1
3381 count += 1
3382
3382
3383 displayer.close()
3383 displayer.close()
3384
3384
3385 @command('manifest',
3385 @command('manifest',
3386 [('r', 'rev', '', _('revision to display'), _('REV')),
3386 [('r', 'rev', '', _('revision to display'), _('REV')),
3387 ('', 'all', False, _("list files from all revisions"))]
3387 ('', 'all', False, _("list files from all revisions"))]
3388 + formatteropts,
3388 + formatteropts,
3389 _('[-r REV]'))
3389 _('[-r REV]'))
3390 def manifest(ui, repo, node=None, rev=None, **opts):
3390 def manifest(ui, repo, node=None, rev=None, **opts):
3391 """output the current or given revision of the project manifest
3391 """output the current or given revision of the project manifest
3392
3392
3393 Print a list of version controlled files for the given revision.
3393 Print a list of version controlled files for the given revision.
3394 If no revision is given, the first parent of the working directory
3394 If no revision is given, the first parent of the working directory
3395 is used, or the null revision if no revision is checked out.
3395 is used, or the null revision if no revision is checked out.
3396
3396
3397 With -v, print file permissions, symlink and executable bits.
3397 With -v, print file permissions, symlink and executable bits.
3398 With --debug, print file revision hashes.
3398 With --debug, print file revision hashes.
3399
3399
3400 If option --all is specified, the list of all files from all revisions
3400 If option --all is specified, the list of all files from all revisions
3401 is printed. This includes deleted and renamed files.
3401 is printed. This includes deleted and renamed files.
3402
3402
3403 Returns 0 on success.
3403 Returns 0 on success.
3404 """
3404 """
3405 opts = pycompat.byteskwargs(opts)
3405 opts = pycompat.byteskwargs(opts)
3406 fm = ui.formatter('manifest', opts)
3406 fm = ui.formatter('manifest', opts)
3407
3407
3408 if opts.get('all'):
3408 if opts.get('all'):
3409 if rev or node:
3409 if rev or node:
3410 raise error.Abort(_("can't specify a revision with --all"))
3410 raise error.Abort(_("can't specify a revision with --all"))
3411
3411
3412 res = []
3412 res = []
3413 prefix = "data/"
3413 prefix = "data/"
3414 suffix = ".i"
3414 suffix = ".i"
3415 plen = len(prefix)
3415 plen = len(prefix)
3416 slen = len(suffix)
3416 slen = len(suffix)
3417 with repo.lock():
3417 with repo.lock():
3418 for fn, b, size in repo.store.datafiles():
3418 for fn, b, size in repo.store.datafiles():
3419 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3419 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3420 res.append(fn[plen:-slen])
3420 res.append(fn[plen:-slen])
3421 ui.pager('manifest')
3421 ui.pager('manifest')
3422 for f in res:
3422 for f in res:
3423 fm.startitem()
3423 fm.startitem()
3424 fm.write("path", '%s\n', f)
3424 fm.write("path", '%s\n', f)
3425 fm.end()
3425 fm.end()
3426 return
3426 return
3427
3427
3428 if rev and node:
3428 if rev and node:
3429 raise error.Abort(_("please specify just one revision"))
3429 raise error.Abort(_("please specify just one revision"))
3430
3430
3431 if not node:
3431 if not node:
3432 node = rev
3432 node = rev
3433
3433
3434 char = {'l': '@', 'x': '*', '': ''}
3434 char = {'l': '@', 'x': '*', '': ''}
3435 mode = {'l': '644', 'x': '755', '': '644'}
3435 mode = {'l': '644', 'x': '755', '': '644'}
3436 ctx = scmutil.revsingle(repo, node)
3436 ctx = scmutil.revsingle(repo, node)
3437 mf = ctx.manifest()
3437 mf = ctx.manifest()
3438 ui.pager('manifest')
3438 ui.pager('manifest')
3439 for f in ctx:
3439 for f in ctx:
3440 fm.startitem()
3440 fm.startitem()
3441 fl = ctx[f].flags()
3441 fl = ctx[f].flags()
3442 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3442 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3443 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3443 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3444 fm.write('path', '%s\n', f)
3444 fm.write('path', '%s\n', f)
3445 fm.end()
3445 fm.end()
3446
3446
3447 @command('^merge',
3447 @command('^merge',
3448 [('f', 'force', None,
3448 [('f', 'force', None,
3449 _('force a merge including outstanding changes (DEPRECATED)')),
3449 _('force a merge including outstanding changes (DEPRECATED)')),
3450 ('r', 'rev', '', _('revision to merge'), _('REV')),
3450 ('r', 'rev', '', _('revision to merge'), _('REV')),
3451 ('P', 'preview', None,
3451 ('P', 'preview', None,
3452 _('review revisions to merge (no merge is performed)'))
3452 _('review revisions to merge (no merge is performed)'))
3453 ] + mergetoolopts,
3453 ] + mergetoolopts,
3454 _('[-P] [[-r] REV]'))
3454 _('[-P] [[-r] REV]'))
3455 def merge(ui, repo, node=None, **opts):
3455 def merge(ui, repo, node=None, **opts):
3456 """merge another revision into working directory
3456 """merge another revision into working directory
3457
3457
3458 The current working directory is updated with all changes made in
3458 The current working directory is updated with all changes made in
3459 the requested revision since the last common predecessor revision.
3459 the requested revision since the last common predecessor revision.
3460
3460
3461 Files that changed between either parent are marked as changed for
3461 Files that changed between either parent are marked as changed for
3462 the next commit and a commit must be performed before any further
3462 the next commit and a commit must be performed before any further
3463 updates to the repository are allowed. The next commit will have
3463 updates to the repository are allowed. The next commit will have
3464 two parents.
3464 two parents.
3465
3465
3466 ``--tool`` can be used to specify the merge tool used for file
3466 ``--tool`` can be used to specify the merge tool used for file
3467 merges. It overrides the HGMERGE environment variable and your
3467 merges. It overrides the HGMERGE environment variable and your
3468 configuration files. See :hg:`help merge-tools` for options.
3468 configuration files. See :hg:`help merge-tools` for options.
3469
3469
3470 If no revision is specified, the working directory's parent is a
3470 If no revision is specified, the working directory's parent is a
3471 head revision, and the current branch contains exactly one other
3471 head revision, and the current branch contains exactly one other
3472 head, the other head is merged with by default. Otherwise, an
3472 head, the other head is merged with by default. Otherwise, an
3473 explicit revision with which to merge with must be provided.
3473 explicit revision with which to merge with must be provided.
3474
3474
3475 See :hg:`help resolve` for information on handling file conflicts.
3475 See :hg:`help resolve` for information on handling file conflicts.
3476
3476
3477 To undo an uncommitted merge, use :hg:`update --clean .` which
3477 To undo an uncommitted merge, use :hg:`update --clean .` which
3478 will check out a clean copy of the original merge parent, losing
3478 will check out a clean copy of the original merge parent, losing
3479 all changes.
3479 all changes.
3480
3480
3481 Returns 0 on success, 1 if there are unresolved files.
3481 Returns 0 on success, 1 if there are unresolved files.
3482 """
3482 """
3483
3483
3484 opts = pycompat.byteskwargs(opts)
3484 opts = pycompat.byteskwargs(opts)
3485 if opts.get('rev') and node:
3485 if opts.get('rev') and node:
3486 raise error.Abort(_("please specify just one revision"))
3486 raise error.Abort(_("please specify just one revision"))
3487 if not node:
3487 if not node:
3488 node = opts.get('rev')
3488 node = opts.get('rev')
3489
3489
3490 if node:
3490 if node:
3491 node = scmutil.revsingle(repo, node).node()
3491 node = scmutil.revsingle(repo, node).node()
3492
3492
3493 if not node:
3493 if not node:
3494 node = repo[destutil.destmerge(repo)].node()
3494 node = repo[destutil.destmerge(repo)].node()
3495
3495
3496 if opts.get('preview'):
3496 if opts.get('preview'):
3497 # find nodes that are ancestors of p2 but not of p1
3497 # find nodes that are ancestors of p2 but not of p1
3498 p1 = repo.lookup('.')
3498 p1 = repo.lookup('.')
3499 p2 = repo.lookup(node)
3499 p2 = repo.lookup(node)
3500 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3500 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3501
3501
3502 displayer = cmdutil.show_changeset(ui, repo, opts)
3502 displayer = cmdutil.show_changeset(ui, repo, opts)
3503 for node in nodes:
3503 for node in nodes:
3504 displayer.show(repo[node])
3504 displayer.show(repo[node])
3505 displayer.close()
3505 displayer.close()
3506 return 0
3506 return 0
3507
3507
3508 try:
3508 try:
3509 # ui.forcemerge is an internal variable, do not document
3509 # ui.forcemerge is an internal variable, do not document
3510 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3510 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3511 force = opts.get('force')
3511 force = opts.get('force')
3512 labels = ['working copy', 'merge rev']
3512 labels = ['working copy', 'merge rev']
3513 return hg.merge(repo, node, force=force, mergeforce=force,
3513 return hg.merge(repo, node, force=force, mergeforce=force,
3514 labels=labels)
3514 labels=labels)
3515 finally:
3515 finally:
3516 ui.setconfig('ui', 'forcemerge', '', 'merge')
3516 ui.setconfig('ui', 'forcemerge', '', 'merge')
3517
3517
3518 @command('outgoing|out',
3518 @command('outgoing|out',
3519 [('f', 'force', None, _('run even when the destination is unrelated')),
3519 [('f', 'force', None, _('run even when the destination is unrelated')),
3520 ('r', 'rev', [],
3520 ('r', 'rev', [],
3521 _('a changeset intended to be included in the destination'), _('REV')),
3521 _('a changeset intended to be included in the destination'), _('REV')),
3522 ('n', 'newest-first', None, _('show newest record first')),
3522 ('n', 'newest-first', None, _('show newest record first')),
3523 ('B', 'bookmarks', False, _('compare bookmarks')),
3523 ('B', 'bookmarks', False, _('compare bookmarks')),
3524 ('b', 'branch', [], _('a specific branch you would like to push'),
3524 ('b', 'branch', [], _('a specific branch you would like to push'),
3525 _('BRANCH')),
3525 _('BRANCH')),
3526 ] + logopts + remoteopts + subrepoopts,
3526 ] + logopts + remoteopts + subrepoopts,
3527 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3527 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3528 def outgoing(ui, repo, dest=None, **opts):
3528 def outgoing(ui, repo, dest=None, **opts):
3529 """show changesets not found in the destination
3529 """show changesets not found in the destination
3530
3530
3531 Show changesets not found in the specified destination repository
3531 Show changesets not found in the specified destination repository
3532 or the default push location. These are the changesets that would
3532 or the default push location. These are the changesets that would
3533 be pushed if a push was requested.
3533 be pushed if a push was requested.
3534
3534
3535 See pull for details of valid destination formats.
3535 See pull for details of valid destination formats.
3536
3536
3537 .. container:: verbose
3537 .. container:: verbose
3538
3538
3539 With -B/--bookmarks, the result of bookmark comparison between
3539 With -B/--bookmarks, the result of bookmark comparison between
3540 local and remote repositories is displayed. With -v/--verbose,
3540 local and remote repositories is displayed. With -v/--verbose,
3541 status is also displayed for each bookmark like below::
3541 status is also displayed for each bookmark like below::
3542
3542
3543 BM1 01234567890a added
3543 BM1 01234567890a added
3544 BM2 deleted
3544 BM2 deleted
3545 BM3 234567890abc advanced
3545 BM3 234567890abc advanced
3546 BM4 34567890abcd diverged
3546 BM4 34567890abcd diverged
3547 BM5 4567890abcde changed
3547 BM5 4567890abcde changed
3548
3548
3549 The action taken when pushing depends on the
3549 The action taken when pushing depends on the
3550 status of each bookmark:
3550 status of each bookmark:
3551
3551
3552 :``added``: push with ``-B`` will create it
3552 :``added``: push with ``-B`` will create it
3553 :``deleted``: push with ``-B`` will delete it
3553 :``deleted``: push with ``-B`` will delete it
3554 :``advanced``: push will update it
3554 :``advanced``: push will update it
3555 :``diverged``: push with ``-B`` will update it
3555 :``diverged``: push with ``-B`` will update it
3556 :``changed``: push with ``-B`` will update it
3556 :``changed``: push with ``-B`` will update it
3557
3557
3558 From the point of view of pushing behavior, bookmarks
3558 From the point of view of pushing behavior, bookmarks
3559 existing only in the remote repository are treated as
3559 existing only in the remote repository are treated as
3560 ``deleted``, even if it is in fact added remotely.
3560 ``deleted``, even if it is in fact added remotely.
3561
3561
3562 Returns 0 if there are outgoing changes, 1 otherwise.
3562 Returns 0 if there are outgoing changes, 1 otherwise.
3563 """
3563 """
3564 opts = pycompat.byteskwargs(opts)
3564 opts = pycompat.byteskwargs(opts)
3565 if opts.get('graph'):
3565 if opts.get('graph'):
3566 cmdutil.checkunsupportedgraphflags([], opts)
3566 cmdutil.checkunsupportedgraphflags([], opts)
3567 o, other = hg._outgoing(ui, repo, dest, opts)
3567 o, other = hg._outgoing(ui, repo, dest, opts)
3568 if not o:
3568 if not o:
3569 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3569 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3570 return
3570 return
3571
3571
3572 revdag = cmdutil.graphrevs(repo, o, opts)
3572 revdag = cmdutil.graphrevs(repo, o, opts)
3573 ui.pager('outgoing')
3573 ui.pager('outgoing')
3574 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3574 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3575 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3575 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3576 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3576 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3577 return 0
3577 return 0
3578
3578
3579 if opts.get('bookmarks'):
3579 if opts.get('bookmarks'):
3580 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3580 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3581 dest, branches = hg.parseurl(dest, opts.get('branch'))
3581 dest, branches = hg.parseurl(dest, opts.get('branch'))
3582 other = hg.peer(repo, opts, dest)
3582 other = hg.peer(repo, opts, dest)
3583 if 'bookmarks' not in other.listkeys('namespaces'):
3583 if 'bookmarks' not in other.listkeys('namespaces'):
3584 ui.warn(_("remote doesn't support bookmarks\n"))
3584 ui.warn(_("remote doesn't support bookmarks\n"))
3585 return 0
3585 return 0
3586 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3586 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3587 ui.pager('outgoing')
3587 ui.pager('outgoing')
3588 return bookmarks.outgoing(ui, repo, other)
3588 return bookmarks.outgoing(ui, repo, other)
3589
3589
3590 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3590 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3591 try:
3591 try:
3592 return hg.outgoing(ui, repo, dest, opts)
3592 return hg.outgoing(ui, repo, dest, opts)
3593 finally:
3593 finally:
3594 del repo._subtoppath
3594 del repo._subtoppath
3595
3595
3596 @command('parents',
3596 @command('parents',
3597 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3597 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3598 ] + templateopts,
3598 ] + templateopts,
3599 _('[-r REV] [FILE]'),
3599 _('[-r REV] [FILE]'),
3600 inferrepo=True)
3600 inferrepo=True)
3601 def parents(ui, repo, file_=None, **opts):
3601 def parents(ui, repo, file_=None, **opts):
3602 """show the parents of the working directory or revision (DEPRECATED)
3602 """show the parents of the working directory or revision (DEPRECATED)
3603
3603
3604 Print the working directory's parent revisions. If a revision is
3604 Print the working directory's parent revisions. If a revision is
3605 given via -r/--rev, the parent of that revision will be printed.
3605 given via -r/--rev, the parent of that revision will be printed.
3606 If a file argument is given, the revision in which the file was
3606 If a file argument is given, the revision in which the file was
3607 last changed (before the working directory revision or the
3607 last changed (before the working directory revision or the
3608 argument to --rev if given) is printed.
3608 argument to --rev if given) is printed.
3609
3609
3610 This command is equivalent to::
3610 This command is equivalent to::
3611
3611
3612 hg log -r "p1()+p2()" or
3612 hg log -r "p1()+p2()" or
3613 hg log -r "p1(REV)+p2(REV)" or
3613 hg log -r "p1(REV)+p2(REV)" or
3614 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3614 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3615 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3615 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3616
3616
3617 See :hg:`summary` and :hg:`help revsets` for related information.
3617 See :hg:`summary` and :hg:`help revsets` for related information.
3618
3618
3619 Returns 0 on success.
3619 Returns 0 on success.
3620 """
3620 """
3621
3621
3622 opts = pycompat.byteskwargs(opts)
3622 opts = pycompat.byteskwargs(opts)
3623 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3623 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3624
3624
3625 if file_:
3625 if file_:
3626 m = scmutil.match(ctx, (file_,), opts)
3626 m = scmutil.match(ctx, (file_,), opts)
3627 if m.anypats() or len(m.files()) != 1:
3627 if m.anypats() or len(m.files()) != 1:
3628 raise error.Abort(_('can only specify an explicit filename'))
3628 raise error.Abort(_('can only specify an explicit filename'))
3629 file_ = m.files()[0]
3629 file_ = m.files()[0]
3630 filenodes = []
3630 filenodes = []
3631 for cp in ctx.parents():
3631 for cp in ctx.parents():
3632 if not cp:
3632 if not cp:
3633 continue
3633 continue
3634 try:
3634 try:
3635 filenodes.append(cp.filenode(file_))
3635 filenodes.append(cp.filenode(file_))
3636 except error.LookupError:
3636 except error.LookupError:
3637 pass
3637 pass
3638 if not filenodes:
3638 if not filenodes:
3639 raise error.Abort(_("'%s' not found in manifest!") % file_)
3639 raise error.Abort(_("'%s' not found in manifest!") % file_)
3640 p = []
3640 p = []
3641 for fn in filenodes:
3641 for fn in filenodes:
3642 fctx = repo.filectx(file_, fileid=fn)
3642 fctx = repo.filectx(file_, fileid=fn)
3643 p.append(fctx.node())
3643 p.append(fctx.node())
3644 else:
3644 else:
3645 p = [cp.node() for cp in ctx.parents()]
3645 p = [cp.node() for cp in ctx.parents()]
3646
3646
3647 displayer = cmdutil.show_changeset(ui, repo, opts)
3647 displayer = cmdutil.show_changeset(ui, repo, opts)
3648 for n in p:
3648 for n in p:
3649 if n != nullid:
3649 if n != nullid:
3650 displayer.show(repo[n])
3650 displayer.show(repo[n])
3651 displayer.close()
3651 displayer.close()
3652
3652
3653 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3653 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3654 def paths(ui, repo, search=None, **opts):
3654 def paths(ui, repo, search=None, **opts):
3655 """show aliases for remote repositories
3655 """show aliases for remote repositories
3656
3656
3657 Show definition of symbolic path name NAME. If no name is given,
3657 Show definition of symbolic path name NAME. If no name is given,
3658 show definition of all available names.
3658 show definition of all available names.
3659
3659
3660 Option -q/--quiet suppresses all output when searching for NAME
3660 Option -q/--quiet suppresses all output when searching for NAME
3661 and shows only the path names when listing all definitions.
3661 and shows only the path names when listing all definitions.
3662
3662
3663 Path names are defined in the [paths] section of your
3663 Path names are defined in the [paths] section of your
3664 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3664 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3665 repository, ``.hg/hgrc`` is used, too.
3665 repository, ``.hg/hgrc`` is used, too.
3666
3666
3667 The path names ``default`` and ``default-push`` have a special
3667 The path names ``default`` and ``default-push`` have a special
3668 meaning. When performing a push or pull operation, they are used
3668 meaning. When performing a push or pull operation, they are used
3669 as fallbacks if no location is specified on the command-line.
3669 as fallbacks if no location is specified on the command-line.
3670 When ``default-push`` is set, it will be used for push and
3670 When ``default-push`` is set, it will be used for push and
3671 ``default`` will be used for pull; otherwise ``default`` is used
3671 ``default`` will be used for pull; otherwise ``default`` is used
3672 as the fallback for both. When cloning a repository, the clone
3672 as the fallback for both. When cloning a repository, the clone
3673 source is written as ``default`` in ``.hg/hgrc``.
3673 source is written as ``default`` in ``.hg/hgrc``.
3674
3674
3675 .. note::
3675 .. note::
3676
3676
3677 ``default`` and ``default-push`` apply to all inbound (e.g.
3677 ``default`` and ``default-push`` apply to all inbound (e.g.
3678 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3678 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3679 and :hg:`bundle`) operations.
3679 and :hg:`bundle`) operations.
3680
3680
3681 See :hg:`help urls` for more information.
3681 See :hg:`help urls` for more information.
3682
3682
3683 Returns 0 on success.
3683 Returns 0 on success.
3684 """
3684 """
3685
3685
3686 opts = pycompat.byteskwargs(opts)
3686 opts = pycompat.byteskwargs(opts)
3687 ui.pager('paths')
3687 ui.pager('paths')
3688 if search:
3688 if search:
3689 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3689 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3690 if name == search]
3690 if name == search]
3691 else:
3691 else:
3692 pathitems = sorted(ui.paths.iteritems())
3692 pathitems = sorted(ui.paths.iteritems())
3693
3693
3694 fm = ui.formatter('paths', opts)
3694 fm = ui.formatter('paths', opts)
3695 if fm.isplain():
3695 if fm.isplain():
3696 hidepassword = util.hidepassword
3696 hidepassword = util.hidepassword
3697 else:
3697 else:
3698 hidepassword = str
3698 hidepassword = str
3699 if ui.quiet:
3699 if ui.quiet:
3700 namefmt = '%s\n'
3700 namefmt = '%s\n'
3701 else:
3701 else:
3702 namefmt = '%s = '
3702 namefmt = '%s = '
3703 showsubopts = not search and not ui.quiet
3703 showsubopts = not search and not ui.quiet
3704
3704
3705 for name, path in pathitems:
3705 for name, path in pathitems:
3706 fm.startitem()
3706 fm.startitem()
3707 fm.condwrite(not search, 'name', namefmt, name)
3707 fm.condwrite(not search, 'name', namefmt, name)
3708 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3708 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3709 for subopt, value in sorted(path.suboptions.items()):
3709 for subopt, value in sorted(path.suboptions.items()):
3710 assert subopt not in ('name', 'url')
3710 assert subopt not in ('name', 'url')
3711 if showsubopts:
3711 if showsubopts:
3712 fm.plain('%s:%s = ' % (name, subopt))
3712 fm.plain('%s:%s = ' % (name, subopt))
3713 fm.condwrite(showsubopts, subopt, '%s\n', value)
3713 fm.condwrite(showsubopts, subopt, '%s\n', value)
3714
3714
3715 fm.end()
3715 fm.end()
3716
3716
3717 if search and not pathitems:
3717 if search and not pathitems:
3718 if not ui.quiet:
3718 if not ui.quiet:
3719 ui.warn(_("not found!\n"))
3719 ui.warn(_("not found!\n"))
3720 return 1
3720 return 1
3721 else:
3721 else:
3722 return 0
3722 return 0
3723
3723
3724 @command('phase',
3724 @command('phase',
3725 [('p', 'public', False, _('set changeset phase to public')),
3725 [('p', 'public', False, _('set changeset phase to public')),
3726 ('d', 'draft', False, _('set changeset phase to draft')),
3726 ('d', 'draft', False, _('set changeset phase to draft')),
3727 ('s', 'secret', False, _('set changeset phase to secret')),
3727 ('s', 'secret', False, _('set changeset phase to secret')),
3728 ('f', 'force', False, _('allow to move boundary backward')),
3728 ('f', 'force', False, _('allow to move boundary backward')),
3729 ('r', 'rev', [], _('target revision'), _('REV')),
3729 ('r', 'rev', [], _('target revision'), _('REV')),
3730 ],
3730 ],
3731 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3731 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3732 def phase(ui, repo, *revs, **opts):
3732 def phase(ui, repo, *revs, **opts):
3733 """set or show the current phase name
3733 """set or show the current phase name
3734
3734
3735 With no argument, show the phase name of the current revision(s).
3735 With no argument, show the phase name of the current revision(s).
3736
3736
3737 With one of -p/--public, -d/--draft or -s/--secret, change the
3737 With one of -p/--public, -d/--draft or -s/--secret, change the
3738 phase value of the specified revisions.
3738 phase value of the specified revisions.
3739
3739
3740 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3740 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3741 lower phase to an higher phase. Phases are ordered as follows::
3741 lower phase to an higher phase. Phases are ordered as follows::
3742
3742
3743 public < draft < secret
3743 public < draft < secret
3744
3744
3745 Returns 0 on success, 1 if some phases could not be changed.
3745 Returns 0 on success, 1 if some phases could not be changed.
3746
3746
3747 (For more information about the phases concept, see :hg:`help phases`.)
3747 (For more information about the phases concept, see :hg:`help phases`.)
3748 """
3748 """
3749 opts = pycompat.byteskwargs(opts)
3749 opts = pycompat.byteskwargs(opts)
3750 # search for a unique phase argument
3750 # search for a unique phase argument
3751 targetphase = None
3751 targetphase = None
3752 for idx, name in enumerate(phases.phasenames):
3752 for idx, name in enumerate(phases.phasenames):
3753 if opts[name]:
3753 if opts[name]:
3754 if targetphase is not None:
3754 if targetphase is not None:
3755 raise error.Abort(_('only one phase can be specified'))
3755 raise error.Abort(_('only one phase can be specified'))
3756 targetphase = idx
3756 targetphase = idx
3757
3757
3758 # look for specified revision
3758 # look for specified revision
3759 revs = list(revs)
3759 revs = list(revs)
3760 revs.extend(opts['rev'])
3760 revs.extend(opts['rev'])
3761 if not revs:
3761 if not revs:
3762 # display both parents as the second parent phase can influence
3762 # display both parents as the second parent phase can influence
3763 # the phase of a merge commit
3763 # the phase of a merge commit
3764 revs = [c.rev() for c in repo[None].parents()]
3764 revs = [c.rev() for c in repo[None].parents()]
3765
3765
3766 revs = scmutil.revrange(repo, revs)
3766 revs = scmutil.revrange(repo, revs)
3767
3767
3768 lock = None
3768 lock = None
3769 ret = 0
3769 ret = 0
3770 if targetphase is None:
3770 if targetphase is None:
3771 # display
3771 # display
3772 for r in revs:
3772 for r in revs:
3773 ctx = repo[r]
3773 ctx = repo[r]
3774 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3774 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3775 else:
3775 else:
3776 tr = None
3776 tr = None
3777 lock = repo.lock()
3777 lock = repo.lock()
3778 try:
3778 try:
3779 tr = repo.transaction("phase")
3779 tr = repo.transaction("phase")
3780 # set phase
3780 # set phase
3781 if not revs:
3781 if not revs:
3782 raise error.Abort(_('empty revision set'))
3782 raise error.Abort(_('empty revision set'))
3783 nodes = [repo[r].node() for r in revs]
3783 nodes = [repo[r].node() for r in revs]
3784 # moving revision from public to draft may hide them
3784 # moving revision from public to draft may hide them
3785 # We have to check result on an unfiltered repository
3785 # We have to check result on an unfiltered repository
3786 unfi = repo.unfiltered()
3786 unfi = repo.unfiltered()
3787 getphase = unfi._phasecache.phase
3787 getphase = unfi._phasecache.phase
3788 olddata = [getphase(unfi, r) for r in unfi]
3788 olddata = [getphase(unfi, r) for r in unfi]
3789 phases.advanceboundary(repo, tr, targetphase, nodes)
3789 phases.advanceboundary(repo, tr, targetphase, nodes)
3790 if opts['force']:
3790 if opts['force']:
3791 phases.retractboundary(repo, tr, targetphase, nodes)
3791 phases.retractboundary(repo, tr, targetphase, nodes)
3792 tr.close()
3792 tr.close()
3793 finally:
3793 finally:
3794 if tr is not None:
3794 if tr is not None:
3795 tr.release()
3795 tr.release()
3796 lock.release()
3796 lock.release()
3797 getphase = unfi._phasecache.phase
3797 getphase = unfi._phasecache.phase
3798 newdata = [getphase(unfi, r) for r in unfi]
3798 newdata = [getphase(unfi, r) for r in unfi]
3799 changes = sum(newdata[r] != olddata[r] for r in unfi)
3799 changes = sum(newdata[r] != olddata[r] for r in unfi)
3800 cl = unfi.changelog
3800 cl = unfi.changelog
3801 rejected = [n for n in nodes
3801 rejected = [n for n in nodes
3802 if newdata[cl.rev(n)] < targetphase]
3802 if newdata[cl.rev(n)] < targetphase]
3803 if rejected:
3803 if rejected:
3804 ui.warn(_('cannot move %i changesets to a higher '
3804 ui.warn(_('cannot move %i changesets to a higher '
3805 'phase, use --force\n') % len(rejected))
3805 'phase, use --force\n') % len(rejected))
3806 ret = 1
3806 ret = 1
3807 if changes:
3807 if changes:
3808 msg = _('phase changed for %i changesets\n') % changes
3808 msg = _('phase changed for %i changesets\n') % changes
3809 if ret:
3809 if ret:
3810 ui.status(msg)
3810 ui.status(msg)
3811 else:
3811 else:
3812 ui.note(msg)
3812 ui.note(msg)
3813 else:
3813 else:
3814 ui.warn(_('no phases changed\n'))
3814 ui.warn(_('no phases changed\n'))
3815 return ret
3815 return ret
3816
3816
3817 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3817 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3818 """Run after a changegroup has been added via pull/unbundle
3818 """Run after a changegroup has been added via pull/unbundle
3819
3819
3820 This takes arguments below:
3820 This takes arguments below:
3821
3821
3822 :modheads: change of heads by pull/unbundle
3822 :modheads: change of heads by pull/unbundle
3823 :optupdate: updating working directory is needed or not
3823 :optupdate: updating working directory is needed or not
3824 :checkout: update destination revision (or None to default destination)
3824 :checkout: update destination revision (or None to default destination)
3825 :brev: a name, which might be a bookmark to be activated after updating
3825 :brev: a name, which might be a bookmark to be activated after updating
3826 """
3826 """
3827 if modheads == 0:
3827 if modheads == 0:
3828 return
3828 return
3829 if optupdate:
3829 if optupdate:
3830 try:
3830 try:
3831 return hg.updatetotally(ui, repo, checkout, brev)
3831 return hg.updatetotally(ui, repo, checkout, brev)
3832 except error.UpdateAbort as inst:
3832 except error.UpdateAbort as inst:
3833 msg = _("not updating: %s") % str(inst)
3833 msg = _("not updating: %s") % str(inst)
3834 hint = inst.hint
3834 hint = inst.hint
3835 raise error.UpdateAbort(msg, hint=hint)
3835 raise error.UpdateAbort(msg, hint=hint)
3836 if modheads > 1:
3836 if modheads > 1:
3837 currentbranchheads = len(repo.branchheads())
3837 currentbranchheads = len(repo.branchheads())
3838 if currentbranchheads == modheads:
3838 if currentbranchheads == modheads:
3839 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3839 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3840 elif currentbranchheads > 1:
3840 elif currentbranchheads > 1:
3841 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3841 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3842 "merge)\n"))
3842 "merge)\n"))
3843 else:
3843 else:
3844 ui.status(_("(run 'hg heads' to see heads)\n"))
3844 ui.status(_("(run 'hg heads' to see heads)\n"))
3845 else:
3845 else:
3846 ui.status(_("(run 'hg update' to get a working copy)\n"))
3846 ui.status(_("(run 'hg update' to get a working copy)\n"))
3847
3847
3848 @command('^pull',
3848 @command('^pull',
3849 [('u', 'update', None,
3849 [('u', 'update', None,
3850 _('update to new branch head if changesets were pulled')),
3850 _('update to new branch head if changesets were pulled')),
3851 ('f', 'force', None, _('run even when remote repository is unrelated')),
3851 ('f', 'force', None, _('run even when remote repository is unrelated')),
3852 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3852 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3853 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3853 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3854 ('b', 'branch', [], _('a specific branch you would like to pull'),
3854 ('b', 'branch', [], _('a specific branch you would like to pull'),
3855 _('BRANCH')),
3855 _('BRANCH')),
3856 ] + remoteopts,
3856 ] + remoteopts,
3857 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3857 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3858 def pull(ui, repo, source="default", **opts):
3858 def pull(ui, repo, source="default", **opts):
3859 """pull changes from the specified source
3859 """pull changes from the specified source
3860
3860
3861 Pull changes from a remote repository to a local one.
3861 Pull changes from a remote repository to a local one.
3862
3862
3863 This finds all changes from the repository at the specified path
3863 This finds all changes from the repository at the specified path
3864 or URL and adds them to a local repository (the current one unless
3864 or URL and adds them to a local repository (the current one unless
3865 -R is specified). By default, this does not update the copy of the
3865 -R is specified). By default, this does not update the copy of the
3866 project in the working directory.
3866 project in the working directory.
3867
3867
3868 Use :hg:`incoming` if you want to see what would have been added
3868 Use :hg:`incoming` if you want to see what would have been added
3869 by a pull at the time you issued this command. If you then decide
3869 by a pull at the time you issued this command. If you then decide
3870 to add those changes to the repository, you should use :hg:`pull
3870 to add those changes to the repository, you should use :hg:`pull
3871 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3871 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3872
3872
3873 If SOURCE is omitted, the 'default' path will be used.
3873 If SOURCE is omitted, the 'default' path will be used.
3874 See :hg:`help urls` for more information.
3874 See :hg:`help urls` for more information.
3875
3875
3876 Specifying bookmark as ``.`` is equivalent to specifying the active
3876 Specifying bookmark as ``.`` is equivalent to specifying the active
3877 bookmark's name.
3877 bookmark's name.
3878
3878
3879 Returns 0 on success, 1 if an update had unresolved files.
3879 Returns 0 on success, 1 if an update had unresolved files.
3880 """
3880 """
3881
3881
3882 opts = pycompat.byteskwargs(opts)
3882 opts = pycompat.byteskwargs(opts)
3883 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3883 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3884 msg = _('update destination required by configuration')
3884 msg = _('update destination required by configuration')
3885 hint = _('use hg pull followed by hg update DEST')
3885 hint = _('use hg pull followed by hg update DEST')
3886 raise error.Abort(msg, hint=hint)
3886 raise error.Abort(msg, hint=hint)
3887
3887
3888 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3888 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3889 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3889 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3890 other = hg.peer(repo, opts, source)
3890 other = hg.peer(repo, opts, source)
3891 try:
3891 try:
3892 revs, checkout = hg.addbranchrevs(repo, other, branches,
3892 revs, checkout = hg.addbranchrevs(repo, other, branches,
3893 opts.get('rev'))
3893 opts.get('rev'))
3894
3894
3895
3895
3896 pullopargs = {}
3896 pullopargs = {}
3897 if opts.get('bookmark'):
3897 if opts.get('bookmark'):
3898 if not revs:
3898 if not revs:
3899 revs = []
3899 revs = []
3900 # The list of bookmark used here is not the one used to actually
3900 # The list of bookmark used here is not the one used to actually
3901 # update the bookmark name. This can result in the revision pulled
3901 # update the bookmark name. This can result in the revision pulled
3902 # not ending up with the name of the bookmark because of a race
3902 # not ending up with the name of the bookmark because of a race
3903 # condition on the server. (See issue 4689 for details)
3903 # condition on the server. (See issue 4689 for details)
3904 remotebookmarks = other.listkeys('bookmarks')
3904 remotebookmarks = other.listkeys('bookmarks')
3905 pullopargs['remotebookmarks'] = remotebookmarks
3905 pullopargs['remotebookmarks'] = remotebookmarks
3906 for b in opts['bookmark']:
3906 for b in opts['bookmark']:
3907 b = repo._bookmarks.expandname(b)
3907 b = repo._bookmarks.expandname(b)
3908 if b not in remotebookmarks:
3908 if b not in remotebookmarks:
3909 raise error.Abort(_('remote bookmark %s not found!') % b)
3909 raise error.Abort(_('remote bookmark %s not found!') % b)
3910 revs.append(remotebookmarks[b])
3910 revs.append(remotebookmarks[b])
3911
3911
3912 if revs:
3912 if revs:
3913 try:
3913 try:
3914 # When 'rev' is a bookmark name, we cannot guarantee that it
3914 # When 'rev' is a bookmark name, we cannot guarantee that it
3915 # will be updated with that name because of a race condition
3915 # will be updated with that name because of a race condition
3916 # server side. (See issue 4689 for details)
3916 # server side. (See issue 4689 for details)
3917 oldrevs = revs
3917 oldrevs = revs
3918 revs = [] # actually, nodes
3918 revs = [] # actually, nodes
3919 for r in oldrevs:
3919 for r in oldrevs:
3920 node = other.lookup(r)
3920 node = other.lookup(r)
3921 revs.append(node)
3921 revs.append(node)
3922 if r == checkout:
3922 if r == checkout:
3923 checkout = node
3923 checkout = node
3924 except error.CapabilityError:
3924 except error.CapabilityError:
3925 err = _("other repository doesn't support revision lookup, "
3925 err = _("other repository doesn't support revision lookup, "
3926 "so a rev cannot be specified.")
3926 "so a rev cannot be specified.")
3927 raise error.Abort(err)
3927 raise error.Abort(err)
3928
3928
3929 pullopargs.update(opts.get('opargs', {}))
3929 pullopargs.update(opts.get('opargs', {}))
3930 modheads = exchange.pull(repo, other, heads=revs,
3930 modheads = exchange.pull(repo, other, heads=revs,
3931 force=opts.get('force'),
3931 force=opts.get('force'),
3932 bookmarks=opts.get('bookmark', ()),
3932 bookmarks=opts.get('bookmark', ()),
3933 opargs=pullopargs).cgresult
3933 opargs=pullopargs).cgresult
3934
3934
3935 # brev is a name, which might be a bookmark to be activated at
3935 # brev is a name, which might be a bookmark to be activated at
3936 # the end of the update. In other words, it is an explicit
3936 # the end of the update. In other words, it is an explicit
3937 # destination of the update
3937 # destination of the update
3938 brev = None
3938 brev = None
3939
3939
3940 if checkout:
3940 if checkout:
3941 checkout = str(repo.changelog.rev(checkout))
3941 checkout = str(repo.changelog.rev(checkout))
3942
3942
3943 # order below depends on implementation of
3943 # order below depends on implementation of
3944 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3944 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3945 # because 'checkout' is determined without it.
3945 # because 'checkout' is determined without it.
3946 if opts.get('rev'):
3946 if opts.get('rev'):
3947 brev = opts['rev'][0]
3947 brev = opts['rev'][0]
3948 elif opts.get('branch'):
3948 elif opts.get('branch'):
3949 brev = opts['branch'][0]
3949 brev = opts['branch'][0]
3950 else:
3950 else:
3951 brev = branches[0]
3951 brev = branches[0]
3952 repo._subtoppath = source
3952 repo._subtoppath = source
3953 try:
3953 try:
3954 ret = postincoming(ui, repo, modheads, opts.get('update'),
3954 ret = postincoming(ui, repo, modheads, opts.get('update'),
3955 checkout, brev)
3955 checkout, brev)
3956
3956
3957 finally:
3957 finally:
3958 del repo._subtoppath
3958 del repo._subtoppath
3959
3959
3960 finally:
3960 finally:
3961 other.close()
3961 other.close()
3962 return ret
3962 return ret
3963
3963
3964 @command('^push',
3964 @command('^push',
3965 [('f', 'force', None, _('force push')),
3965 [('f', 'force', None, _('force push')),
3966 ('r', 'rev', [],
3966 ('r', 'rev', [],
3967 _('a changeset intended to be included in the destination'),
3967 _('a changeset intended to be included in the destination'),
3968 _('REV')),
3968 _('REV')),
3969 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3969 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3970 ('b', 'branch', [],
3970 ('b', 'branch', [],
3971 _('a specific branch you would like to push'), _('BRANCH')),
3971 _('a specific branch you would like to push'), _('BRANCH')),
3972 ('', 'new-branch', False, _('allow pushing a new branch')),
3972 ('', 'new-branch', False, _('allow pushing a new branch')),
3973 ] + remoteopts,
3973 ] + remoteopts,
3974 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3974 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3975 def push(ui, repo, dest=None, **opts):
3975 def push(ui, repo, dest=None, **opts):
3976 """push changes to the specified destination
3976 """push changes to the specified destination
3977
3977
3978 Push changesets from the local repository to the specified
3978 Push changesets from the local repository to the specified
3979 destination.
3979 destination.
3980
3980
3981 This operation is symmetrical to pull: it is identical to a pull
3981 This operation is symmetrical to pull: it is identical to a pull
3982 in the destination repository from the current one.
3982 in the destination repository from the current one.
3983
3983
3984 By default, push will not allow creation of new heads at the
3984 By default, push will not allow creation of new heads at the
3985 destination, since multiple heads would make it unclear which head
3985 destination, since multiple heads would make it unclear which head
3986 to use. In this situation, it is recommended to pull and merge
3986 to use. In this situation, it is recommended to pull and merge
3987 before pushing.
3987 before pushing.
3988
3988
3989 Use --new-branch if you want to allow push to create a new named
3989 Use --new-branch if you want to allow push to create a new named
3990 branch that is not present at the destination. This allows you to
3990 branch that is not present at the destination. This allows you to
3991 only create a new branch without forcing other changes.
3991 only create a new branch without forcing other changes.
3992
3992
3993 .. note::
3993 .. note::
3994
3994
3995 Extra care should be taken with the -f/--force option,
3995 Extra care should be taken with the -f/--force option,
3996 which will push all new heads on all branches, an action which will
3996 which will push all new heads on all branches, an action which will
3997 almost always cause confusion for collaborators.
3997 almost always cause confusion for collaborators.
3998
3998
3999 If -r/--rev is used, the specified revision and all its ancestors
3999 If -r/--rev is used, the specified revision and all its ancestors
4000 will be pushed to the remote repository.
4000 will be pushed to the remote repository.
4001
4001
4002 If -B/--bookmark is used, the specified bookmarked revision, its
4002 If -B/--bookmark is used, the specified bookmarked revision, its
4003 ancestors, and the bookmark will be pushed to the remote
4003 ancestors, and the bookmark will be pushed to the remote
4004 repository. Specifying ``.`` is equivalent to specifying the active
4004 repository. Specifying ``.`` is equivalent to specifying the active
4005 bookmark's name.
4005 bookmark's name.
4006
4006
4007 Please see :hg:`help urls` for important details about ``ssh://``
4007 Please see :hg:`help urls` for important details about ``ssh://``
4008 URLs. If DESTINATION is omitted, a default path will be used.
4008 URLs. If DESTINATION is omitted, a default path will be used.
4009
4009
4010 Returns 0 if push was successful, 1 if nothing to push.
4010 Returns 0 if push was successful, 1 if nothing to push.
4011 """
4011 """
4012
4012
4013 opts = pycompat.byteskwargs(opts)
4013 opts = pycompat.byteskwargs(opts)
4014 if opts.get('bookmark'):
4014 if opts.get('bookmark'):
4015 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4015 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4016 for b in opts['bookmark']:
4016 for b in opts['bookmark']:
4017 # translate -B options to -r so changesets get pushed
4017 # translate -B options to -r so changesets get pushed
4018 b = repo._bookmarks.expandname(b)
4018 b = repo._bookmarks.expandname(b)
4019 if b in repo._bookmarks:
4019 if b in repo._bookmarks:
4020 opts.setdefault('rev', []).append(b)
4020 opts.setdefault('rev', []).append(b)
4021 else:
4021 else:
4022 # if we try to push a deleted bookmark, translate it to null
4022 # if we try to push a deleted bookmark, translate it to null
4023 # this lets simultaneous -r, -b options continue working
4023 # this lets simultaneous -r, -b options continue working
4024 opts.setdefault('rev', []).append("null")
4024 opts.setdefault('rev', []).append("null")
4025
4025
4026 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4026 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4027 if not path:
4027 if not path:
4028 raise error.Abort(_('default repository not configured!'),
4028 raise error.Abort(_('default repository not configured!'),
4029 hint=_("see 'hg help config.paths'"))
4029 hint=_("see 'hg help config.paths'"))
4030 dest = path.pushloc or path.loc
4030 dest = path.pushloc or path.loc
4031 branches = (path.branch, opts.get('branch') or [])
4031 branches = (path.branch, opts.get('branch') or [])
4032 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4032 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4033 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4033 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4034 other = hg.peer(repo, opts, dest)
4034 other = hg.peer(repo, opts, dest)
4035
4035
4036 if revs:
4036 if revs:
4037 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4037 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4038 if not revs:
4038 if not revs:
4039 raise error.Abort(_("specified revisions evaluate to an empty set"),
4039 raise error.Abort(_("specified revisions evaluate to an empty set"),
4040 hint=_("use different revision arguments"))
4040 hint=_("use different revision arguments"))
4041 elif path.pushrev:
4041 elif path.pushrev:
4042 # It doesn't make any sense to specify ancestor revisions. So limit
4042 # It doesn't make any sense to specify ancestor revisions. So limit
4043 # to DAG heads to make discovery simpler.
4043 # to DAG heads to make discovery simpler.
4044 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4044 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4045 revs = scmutil.revrange(repo, [expr])
4045 revs = scmutil.revrange(repo, [expr])
4046 revs = [repo[rev].node() for rev in revs]
4046 revs = [repo[rev].node() for rev in revs]
4047 if not revs:
4047 if not revs:
4048 raise error.Abort(_('default push revset for path evaluates to an '
4048 raise error.Abort(_('default push revset for path evaluates to an '
4049 'empty set'))
4049 'empty set'))
4050
4050
4051 repo._subtoppath = dest
4051 repo._subtoppath = dest
4052 try:
4052 try:
4053 # push subrepos depth-first for coherent ordering
4053 # push subrepos depth-first for coherent ordering
4054 c = repo['']
4054 c = repo['']
4055 subs = c.substate # only repos that are committed
4055 subs = c.substate # only repos that are committed
4056 for s in sorted(subs):
4056 for s in sorted(subs):
4057 result = c.sub(s).push(opts)
4057 result = c.sub(s).push(opts)
4058 if result == 0:
4058 if result == 0:
4059 return not result
4059 return not result
4060 finally:
4060 finally:
4061 del repo._subtoppath
4061 del repo._subtoppath
4062 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4062 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4063 newbranch=opts.get('new_branch'),
4063 newbranch=opts.get('new_branch'),
4064 bookmarks=opts.get('bookmark', ()),
4064 bookmarks=opts.get('bookmark', ()),
4065 opargs=opts.get('opargs'))
4065 opargs=opts.get('opargs'))
4066
4066
4067 result = not pushop.cgresult
4067 result = not pushop.cgresult
4068
4068
4069 if pushop.bkresult is not None:
4069 if pushop.bkresult is not None:
4070 if pushop.bkresult == 2:
4070 if pushop.bkresult == 2:
4071 result = 2
4071 result = 2
4072 elif not result and pushop.bkresult:
4072 elif not result and pushop.bkresult:
4073 result = 2
4073 result = 2
4074
4074
4075 return result
4075 return result
4076
4076
4077 @command('recover', [])
4077 @command('recover', [])
4078 def recover(ui, repo):
4078 def recover(ui, repo):
4079 """roll back an interrupted transaction
4079 """roll back an interrupted transaction
4080
4080
4081 Recover from an interrupted commit or pull.
4081 Recover from an interrupted commit or pull.
4082
4082
4083 This command tries to fix the repository status after an
4083 This command tries to fix the repository status after an
4084 interrupted operation. It should only be necessary when Mercurial
4084 interrupted operation. It should only be necessary when Mercurial
4085 suggests it.
4085 suggests it.
4086
4086
4087 Returns 0 if successful, 1 if nothing to recover or verify fails.
4087 Returns 0 if successful, 1 if nothing to recover or verify fails.
4088 """
4088 """
4089 if repo.recover():
4089 if repo.recover():
4090 return hg.verify(repo)
4090 return hg.verify(repo)
4091 return 1
4091 return 1
4092
4092
4093 @command('^remove|rm',
4093 @command('^remove|rm',
4094 [('A', 'after', None, _('record delete for missing files')),
4094 [('A', 'after', None, _('record delete for missing files')),
4095 ('f', 'force', None,
4095 ('f', 'force', None,
4096 _('forget added files, delete modified files')),
4096 _('forget added files, delete modified files')),
4097 ] + subrepoopts + walkopts,
4097 ] + subrepoopts + walkopts,
4098 _('[OPTION]... FILE...'),
4098 _('[OPTION]... FILE...'),
4099 inferrepo=True)
4099 inferrepo=True)
4100 def remove(ui, repo, *pats, **opts):
4100 def remove(ui, repo, *pats, **opts):
4101 """remove the specified files on the next commit
4101 """remove the specified files on the next commit
4102
4102
4103 Schedule the indicated files for removal from the current branch.
4103 Schedule the indicated files for removal from the current branch.
4104
4104
4105 This command schedules the files to be removed at the next commit.
4105 This command schedules the files to be removed at the next commit.
4106 To undo a remove before that, see :hg:`revert`. To undo added
4106 To undo a remove before that, see :hg:`revert`. To undo added
4107 files, see :hg:`forget`.
4107 files, see :hg:`forget`.
4108
4108
4109 .. container:: verbose
4109 .. container:: verbose
4110
4110
4111 -A/--after can be used to remove only files that have already
4111 -A/--after can be used to remove only files that have already
4112 been deleted, -f/--force can be used to force deletion, and -Af
4112 been deleted, -f/--force can be used to force deletion, and -Af
4113 can be used to remove files from the next revision without
4113 can be used to remove files from the next revision without
4114 deleting them from the working directory.
4114 deleting them from the working directory.
4115
4115
4116 The following table details the behavior of remove for different
4116 The following table details the behavior of remove for different
4117 file states (columns) and option combinations (rows). The file
4117 file states (columns) and option combinations (rows). The file
4118 states are Added [A], Clean [C], Modified [M] and Missing [!]
4118 states are Added [A], Clean [C], Modified [M] and Missing [!]
4119 (as reported by :hg:`status`). The actions are Warn, Remove
4119 (as reported by :hg:`status`). The actions are Warn, Remove
4120 (from branch) and Delete (from disk):
4120 (from branch) and Delete (from disk):
4121
4121
4122 ========= == == == ==
4122 ========= == == == ==
4123 opt/state A C M !
4123 opt/state A C M !
4124 ========= == == == ==
4124 ========= == == == ==
4125 none W RD W R
4125 none W RD W R
4126 -f R RD RD R
4126 -f R RD RD R
4127 -A W W W R
4127 -A W W W R
4128 -Af R R R R
4128 -Af R R R R
4129 ========= == == == ==
4129 ========= == == == ==
4130
4130
4131 .. note::
4131 .. note::
4132
4132
4133 :hg:`remove` never deletes files in Added [A] state from the
4133 :hg:`remove` never deletes files in Added [A] state from the
4134 working directory, not even if ``--force`` is specified.
4134 working directory, not even if ``--force`` is specified.
4135
4135
4136 Returns 0 on success, 1 if any warnings encountered.
4136 Returns 0 on success, 1 if any warnings encountered.
4137 """
4137 """
4138
4138
4139 opts = pycompat.byteskwargs(opts)
4139 opts = pycompat.byteskwargs(opts)
4140 after, force = opts.get('after'), opts.get('force')
4140 after, force = opts.get('after'), opts.get('force')
4141 if not pats and not after:
4141 if not pats and not after:
4142 raise error.Abort(_('no files specified'))
4142 raise error.Abort(_('no files specified'))
4143
4143
4144 m = scmutil.match(repo[None], pats, opts)
4144 m = scmutil.match(repo[None], pats, opts)
4145 subrepos = opts.get('subrepos')
4145 subrepos = opts.get('subrepos')
4146 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4146 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4147
4147
4148 @command('rename|move|mv',
4148 @command('rename|move|mv',
4149 [('A', 'after', None, _('record a rename that has already occurred')),
4149 [('A', 'after', None, _('record a rename that has already occurred')),
4150 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4150 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4151 ] + walkopts + dryrunopts,
4151 ] + walkopts + dryrunopts,
4152 _('[OPTION]... SOURCE... DEST'))
4152 _('[OPTION]... SOURCE... DEST'))
4153 def rename(ui, repo, *pats, **opts):
4153 def rename(ui, repo, *pats, **opts):
4154 """rename files; equivalent of copy + remove
4154 """rename files; equivalent of copy + remove
4155
4155
4156 Mark dest as copies of sources; mark sources for deletion. If dest
4156 Mark dest as copies of sources; mark sources for deletion. If dest
4157 is a directory, copies are put in that directory. If dest is a
4157 is a directory, copies are put in that directory. If dest is a
4158 file, there can only be one source.
4158 file, there can only be one source.
4159
4159
4160 By default, this command copies the contents of files as they
4160 By default, this command copies the contents of files as they
4161 exist in the working directory. If invoked with -A/--after, the
4161 exist in the working directory. If invoked with -A/--after, the
4162 operation is recorded, but no copying is performed.
4162 operation is recorded, but no copying is performed.
4163
4163
4164 This command takes effect at the next commit. To undo a rename
4164 This command takes effect at the next commit. To undo a rename
4165 before that, see :hg:`revert`.
4165 before that, see :hg:`revert`.
4166
4166
4167 Returns 0 on success, 1 if errors are encountered.
4167 Returns 0 on success, 1 if errors are encountered.
4168 """
4168 """
4169 opts = pycompat.byteskwargs(opts)
4169 opts = pycompat.byteskwargs(opts)
4170 with repo.wlock(False):
4170 with repo.wlock(False):
4171 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4171 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4172
4172
4173 @command('resolve',
4173 @command('resolve',
4174 [('a', 'all', None, _('select all unresolved files')),
4174 [('a', 'all', None, _('select all unresolved files')),
4175 ('l', 'list', None, _('list state of files needing merge')),
4175 ('l', 'list', None, _('list state of files needing merge')),
4176 ('m', 'mark', None, _('mark files as resolved')),
4176 ('m', 'mark', None, _('mark files as resolved')),
4177 ('u', 'unmark', None, _('mark files as unresolved')),
4177 ('u', 'unmark', None, _('mark files as unresolved')),
4178 ('n', 'no-status', None, _('hide status prefix'))]
4178 ('n', 'no-status', None, _('hide status prefix'))]
4179 + mergetoolopts + walkopts + formatteropts,
4179 + mergetoolopts + walkopts + formatteropts,
4180 _('[OPTION]... [FILE]...'),
4180 _('[OPTION]... [FILE]...'),
4181 inferrepo=True)
4181 inferrepo=True)
4182 def resolve(ui, repo, *pats, **opts):
4182 def resolve(ui, repo, *pats, **opts):
4183 """redo merges or set/view the merge status of files
4183 """redo merges or set/view the merge status of files
4184
4184
4185 Merges with unresolved conflicts are often the result of
4185 Merges with unresolved conflicts are often the result of
4186 non-interactive merging using the ``internal:merge`` configuration
4186 non-interactive merging using the ``internal:merge`` configuration
4187 setting, or a command-line merge tool like ``diff3``. The resolve
4187 setting, or a command-line merge tool like ``diff3``. The resolve
4188 command is used to manage the files involved in a merge, after
4188 command is used to manage the files involved in a merge, after
4189 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4189 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4190 working directory must have two parents). See :hg:`help
4190 working directory must have two parents). See :hg:`help
4191 merge-tools` for information on configuring merge tools.
4191 merge-tools` for information on configuring merge tools.
4192
4192
4193 The resolve command can be used in the following ways:
4193 The resolve command can be used in the following ways:
4194
4194
4195 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4195 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4196 files, discarding any previous merge attempts. Re-merging is not
4196 files, discarding any previous merge attempts. Re-merging is not
4197 performed for files already marked as resolved. Use ``--all/-a``
4197 performed for files already marked as resolved. Use ``--all/-a``
4198 to select all unresolved files. ``--tool`` can be used to specify
4198 to select all unresolved files. ``--tool`` can be used to specify
4199 the merge tool used for the given files. It overrides the HGMERGE
4199 the merge tool used for the given files. It overrides the HGMERGE
4200 environment variable and your configuration files. Previous file
4200 environment variable and your configuration files. Previous file
4201 contents are saved with a ``.orig`` suffix.
4201 contents are saved with a ``.orig`` suffix.
4202
4202
4203 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4203 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4204 (e.g. after having manually fixed-up the files). The default is
4204 (e.g. after having manually fixed-up the files). The default is
4205 to mark all unresolved files.
4205 to mark all unresolved files.
4206
4206
4207 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4207 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4208 default is to mark all resolved files.
4208 default is to mark all resolved files.
4209
4209
4210 - :hg:`resolve -l`: list files which had or still have conflicts.
4210 - :hg:`resolve -l`: list files which had or still have conflicts.
4211 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4211 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4212 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4212 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4213 the list. See :hg:`help filesets` for details.
4213 the list. See :hg:`help filesets` for details.
4214
4214
4215 .. note::
4215 .. note::
4216
4216
4217 Mercurial will not let you commit files with unresolved merge
4217 Mercurial will not let you commit files with unresolved merge
4218 conflicts. You must use :hg:`resolve -m ...` before you can
4218 conflicts. You must use :hg:`resolve -m ...` before you can
4219 commit after a conflicting merge.
4219 commit after a conflicting merge.
4220
4220
4221 Returns 0 on success, 1 if any files fail a resolve attempt.
4221 Returns 0 on success, 1 if any files fail a resolve attempt.
4222 """
4222 """
4223
4223
4224 opts = pycompat.byteskwargs(opts)
4224 opts = pycompat.byteskwargs(opts)
4225 flaglist = 'all mark unmark list no_status'.split()
4225 flaglist = 'all mark unmark list no_status'.split()
4226 all, mark, unmark, show, nostatus = \
4226 all, mark, unmark, show, nostatus = \
4227 [opts.get(o) for o in flaglist]
4227 [opts.get(o) for o in flaglist]
4228
4228
4229 if (show and (mark or unmark)) or (mark and unmark):
4229 if (show and (mark or unmark)) or (mark and unmark):
4230 raise error.Abort(_("too many options specified"))
4230 raise error.Abort(_("too many options specified"))
4231 if pats and all:
4231 if pats and all:
4232 raise error.Abort(_("can't specify --all and patterns"))
4232 raise error.Abort(_("can't specify --all and patterns"))
4233 if not (all or pats or show or mark or unmark):
4233 if not (all or pats or show or mark or unmark):
4234 raise error.Abort(_('no files or directories specified'),
4234 raise error.Abort(_('no files or directories specified'),
4235 hint=('use --all to re-merge all unresolved files'))
4235 hint=('use --all to re-merge all unresolved files'))
4236
4236
4237 if show:
4237 if show:
4238 ui.pager('resolve')
4238 ui.pager('resolve')
4239 fm = ui.formatter('resolve', opts)
4239 fm = ui.formatter('resolve', opts)
4240 ms = mergemod.mergestate.read(repo)
4240 ms = mergemod.mergestate.read(repo)
4241 m = scmutil.match(repo[None], pats, opts)
4241 m = scmutil.match(repo[None], pats, opts)
4242 for f in ms:
4242 for f in ms:
4243 if not m(f):
4243 if not m(f):
4244 continue
4244 continue
4245 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4245 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4246 'd': 'driverresolved'}[ms[f]]
4246 'd': 'driverresolved'}[ms[f]]
4247 fm.startitem()
4247 fm.startitem()
4248 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4248 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4249 fm.write('path', '%s\n', f, label=l)
4249 fm.write('path', '%s\n', f, label=l)
4250 fm.end()
4250 fm.end()
4251 return 0
4251 return 0
4252
4252
4253 with repo.wlock():
4253 with repo.wlock():
4254 ms = mergemod.mergestate.read(repo)
4254 ms = mergemod.mergestate.read(repo)
4255
4255
4256 if not (ms.active() or repo.dirstate.p2() != nullid):
4256 if not (ms.active() or repo.dirstate.p2() != nullid):
4257 raise error.Abort(
4257 raise error.Abort(
4258 _('resolve command not applicable when not merging'))
4258 _('resolve command not applicable when not merging'))
4259
4259
4260 wctx = repo[None]
4260 wctx = repo[None]
4261
4261
4262 if ms.mergedriver and ms.mdstate() == 'u':
4262 if ms.mergedriver and ms.mdstate() == 'u':
4263 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4263 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4264 ms.commit()
4264 ms.commit()
4265 # allow mark and unmark to go through
4265 # allow mark and unmark to go through
4266 if not mark and not unmark and not proceed:
4266 if not mark and not unmark and not proceed:
4267 return 1
4267 return 1
4268
4268
4269 m = scmutil.match(wctx, pats, opts)
4269 m = scmutil.match(wctx, pats, opts)
4270 ret = 0
4270 ret = 0
4271 didwork = False
4271 didwork = False
4272 runconclude = False
4272 runconclude = False
4273
4273
4274 tocomplete = []
4274 tocomplete = []
4275 for f in ms:
4275 for f in ms:
4276 if not m(f):
4276 if not m(f):
4277 continue
4277 continue
4278
4278
4279 didwork = True
4279 didwork = True
4280
4280
4281 # don't let driver-resolved files be marked, and run the conclude
4281 # don't let driver-resolved files be marked, and run the conclude
4282 # step if asked to resolve
4282 # step if asked to resolve
4283 if ms[f] == "d":
4283 if ms[f] == "d":
4284 exact = m.exact(f)
4284 exact = m.exact(f)
4285 if mark:
4285 if mark:
4286 if exact:
4286 if exact:
4287 ui.warn(_('not marking %s as it is driver-resolved\n')
4287 ui.warn(_('not marking %s as it is driver-resolved\n')
4288 % f)
4288 % f)
4289 elif unmark:
4289 elif unmark:
4290 if exact:
4290 if exact:
4291 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4291 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4292 % f)
4292 % f)
4293 else:
4293 else:
4294 runconclude = True
4294 runconclude = True
4295 continue
4295 continue
4296
4296
4297 if mark:
4297 if mark:
4298 ms.mark(f, "r")
4298 ms.mark(f, "r")
4299 elif unmark:
4299 elif unmark:
4300 ms.mark(f, "u")
4300 ms.mark(f, "u")
4301 else:
4301 else:
4302 # backup pre-resolve (merge uses .orig for its own purposes)
4302 # backup pre-resolve (merge uses .orig for its own purposes)
4303 a = repo.wjoin(f)
4303 a = repo.wjoin(f)
4304 try:
4304 try:
4305 util.copyfile(a, a + ".resolve")
4305 util.copyfile(a, a + ".resolve")
4306 except (IOError, OSError) as inst:
4306 except (IOError, OSError) as inst:
4307 if inst.errno != errno.ENOENT:
4307 if inst.errno != errno.ENOENT:
4308 raise
4308 raise
4309
4309
4310 try:
4310 try:
4311 # preresolve file
4311 # preresolve file
4312 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4312 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4313 'resolve')
4313 'resolve')
4314 complete, r = ms.preresolve(f, wctx)
4314 complete, r = ms.preresolve(f, wctx)
4315 if not complete:
4315 if not complete:
4316 tocomplete.append(f)
4316 tocomplete.append(f)
4317 elif r:
4317 elif r:
4318 ret = 1
4318 ret = 1
4319 finally:
4319 finally:
4320 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4320 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4321 ms.commit()
4321 ms.commit()
4322
4322
4323 # replace filemerge's .orig file with our resolve file, but only
4323 # replace filemerge's .orig file with our resolve file, but only
4324 # for merges that are complete
4324 # for merges that are complete
4325 if complete:
4325 if complete:
4326 try:
4326 try:
4327 util.rename(a + ".resolve",
4327 util.rename(a + ".resolve",
4328 scmutil.origpath(ui, repo, a))
4328 scmutil.origpath(ui, repo, a))
4329 except OSError as inst:
4329 except OSError as inst:
4330 if inst.errno != errno.ENOENT:
4330 if inst.errno != errno.ENOENT:
4331 raise
4331 raise
4332
4332
4333 for f in tocomplete:
4333 for f in tocomplete:
4334 try:
4334 try:
4335 # resolve file
4335 # resolve file
4336 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4336 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4337 'resolve')
4337 'resolve')
4338 r = ms.resolve(f, wctx)
4338 r = ms.resolve(f, wctx)
4339 if r:
4339 if r:
4340 ret = 1
4340 ret = 1
4341 finally:
4341 finally:
4342 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4342 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4343 ms.commit()
4343 ms.commit()
4344
4344
4345 # replace filemerge's .orig file with our resolve file
4345 # replace filemerge's .orig file with our resolve file
4346 a = repo.wjoin(f)
4346 a = repo.wjoin(f)
4347 try:
4347 try:
4348 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4348 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4349 except OSError as inst:
4349 except OSError as inst:
4350 if inst.errno != errno.ENOENT:
4350 if inst.errno != errno.ENOENT:
4351 raise
4351 raise
4352
4352
4353 ms.commit()
4353 ms.commit()
4354 ms.recordactions()
4354 ms.recordactions()
4355
4355
4356 if not didwork and pats:
4356 if not didwork and pats:
4357 hint = None
4357 hint = None
4358 if not any([p for p in pats if p.find(':') >= 0]):
4358 if not any([p for p in pats if p.find(':') >= 0]):
4359 pats = ['path:%s' % p for p in pats]
4359 pats = ['path:%s' % p for p in pats]
4360 m = scmutil.match(wctx, pats, opts)
4360 m = scmutil.match(wctx, pats, opts)
4361 for f in ms:
4361 for f in ms:
4362 if not m(f):
4362 if not m(f):
4363 continue
4363 continue
4364 flags = ''.join(['-%s ' % o[0] for o in flaglist
4364 flags = ''.join(['-%s ' % o[0] for o in flaglist
4365 if opts.get(o)])
4365 if opts.get(o)])
4366 hint = _("(try: hg resolve %s%s)\n") % (
4366 hint = _("(try: hg resolve %s%s)\n") % (
4367 flags,
4367 flags,
4368 ' '.join(pats))
4368 ' '.join(pats))
4369 break
4369 break
4370 ui.warn(_("arguments do not match paths that need resolving\n"))
4370 ui.warn(_("arguments do not match paths that need resolving\n"))
4371 if hint:
4371 if hint:
4372 ui.warn(hint)
4372 ui.warn(hint)
4373 elif ms.mergedriver and ms.mdstate() != 's':
4373 elif ms.mergedriver and ms.mdstate() != 's':
4374 # run conclude step when either a driver-resolved file is requested
4374 # run conclude step when either a driver-resolved file is requested
4375 # or there are no driver-resolved files
4375 # or there are no driver-resolved files
4376 # we can't use 'ret' to determine whether any files are unresolved
4376 # we can't use 'ret' to determine whether any files are unresolved
4377 # because we might not have tried to resolve some
4377 # because we might not have tried to resolve some
4378 if ((runconclude or not list(ms.driverresolved()))
4378 if ((runconclude or not list(ms.driverresolved()))
4379 and not list(ms.unresolved())):
4379 and not list(ms.unresolved())):
4380 proceed = mergemod.driverconclude(repo, ms, wctx)
4380 proceed = mergemod.driverconclude(repo, ms, wctx)
4381 ms.commit()
4381 ms.commit()
4382 if not proceed:
4382 if not proceed:
4383 return 1
4383 return 1
4384
4384
4385 # Nudge users into finishing an unfinished operation
4385 # Nudge users into finishing an unfinished operation
4386 unresolvedf = list(ms.unresolved())
4386 unresolvedf = list(ms.unresolved())
4387 driverresolvedf = list(ms.driverresolved())
4387 driverresolvedf = list(ms.driverresolved())
4388 if not unresolvedf and not driverresolvedf:
4388 if not unresolvedf and not driverresolvedf:
4389 ui.status(_('(no more unresolved files)\n'))
4389 ui.status(_('(no more unresolved files)\n'))
4390 cmdutil.checkafterresolved(repo)
4390 cmdutil.checkafterresolved(repo)
4391 elif not unresolvedf:
4391 elif not unresolvedf:
4392 ui.status(_('(no more unresolved files -- '
4392 ui.status(_('(no more unresolved files -- '
4393 'run "hg resolve --all" to conclude)\n'))
4393 'run "hg resolve --all" to conclude)\n'))
4394
4394
4395 return ret
4395 return ret
4396
4396
4397 @command('revert',
4397 @command('revert',
4398 [('a', 'all', None, _('revert all changes when no arguments given')),
4398 [('a', 'all', None, _('revert all changes when no arguments given')),
4399 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4399 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4400 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4400 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4401 ('C', 'no-backup', None, _('do not save backup copies of files')),
4401 ('C', 'no-backup', None, _('do not save backup copies of files')),
4402 ('i', 'interactive', None,
4402 ('i', 'interactive', None,
4403 _('interactively select the changes (EXPERIMENTAL)')),
4403 _('interactively select the changes (EXPERIMENTAL)')),
4404 ] + walkopts + dryrunopts,
4404 ] + walkopts + dryrunopts,
4405 _('[OPTION]... [-r REV] [NAME]...'))
4405 _('[OPTION]... [-r REV] [NAME]...'))
4406 def revert(ui, repo, *pats, **opts):
4406 def revert(ui, repo, *pats, **opts):
4407 """restore files to their checkout state
4407 """restore files to their checkout state
4408
4408
4409 .. note::
4409 .. note::
4410
4410
4411 To check out earlier revisions, you should use :hg:`update REV`.
4411 To check out earlier revisions, you should use :hg:`update REV`.
4412 To cancel an uncommitted merge (and lose your changes),
4412 To cancel an uncommitted merge (and lose your changes),
4413 use :hg:`update --clean .`.
4413 use :hg:`update --clean .`.
4414
4414
4415 With no revision specified, revert the specified files or directories
4415 With no revision specified, revert the specified files or directories
4416 to the contents they had in the parent of the working directory.
4416 to the contents they had in the parent of the working directory.
4417 This restores the contents of files to an unmodified
4417 This restores the contents of files to an unmodified
4418 state and unschedules adds, removes, copies, and renames. If the
4418 state and unschedules adds, removes, copies, and renames. If the
4419 working directory has two parents, you must explicitly specify a
4419 working directory has two parents, you must explicitly specify a
4420 revision.
4420 revision.
4421
4421
4422 Using the -r/--rev or -d/--date options, revert the given files or
4422 Using the -r/--rev or -d/--date options, revert the given files or
4423 directories to their states as of a specific revision. Because
4423 directories to their states as of a specific revision. Because
4424 revert does not change the working directory parents, this will
4424 revert does not change the working directory parents, this will
4425 cause these files to appear modified. This can be helpful to "back
4425 cause these files to appear modified. This can be helpful to "back
4426 out" some or all of an earlier change. See :hg:`backout` for a
4426 out" some or all of an earlier change. See :hg:`backout` for a
4427 related method.
4427 related method.
4428
4428
4429 Modified files are saved with a .orig suffix before reverting.
4429 Modified files are saved with a .orig suffix before reverting.
4430 To disable these backups, use --no-backup. It is possible to store
4430 To disable these backups, use --no-backup. It is possible to store
4431 the backup files in a custom directory relative to the root of the
4431 the backup files in a custom directory relative to the root of the
4432 repository by setting the ``ui.origbackuppath`` configuration
4432 repository by setting the ``ui.origbackuppath`` configuration
4433 option.
4433 option.
4434
4434
4435 See :hg:`help dates` for a list of formats valid for -d/--date.
4435 See :hg:`help dates` for a list of formats valid for -d/--date.
4436
4436
4437 See :hg:`help backout` for a way to reverse the effect of an
4437 See :hg:`help backout` for a way to reverse the effect of an
4438 earlier changeset.
4438 earlier changeset.
4439
4439
4440 Returns 0 on success.
4440 Returns 0 on success.
4441 """
4441 """
4442
4442
4443 if opts.get("date"):
4443 if opts.get("date"):
4444 if opts.get("rev"):
4444 if opts.get("rev"):
4445 raise error.Abort(_("you can't specify a revision and a date"))
4445 raise error.Abort(_("you can't specify a revision and a date"))
4446 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4446 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4447
4447
4448 parent, p2 = repo.dirstate.parents()
4448 parent, p2 = repo.dirstate.parents()
4449 if not opts.get('rev') and p2 != nullid:
4449 if not opts.get('rev') and p2 != nullid:
4450 # revert after merge is a trap for new users (issue2915)
4450 # revert after merge is a trap for new users (issue2915)
4451 raise error.Abort(_('uncommitted merge with no revision specified'),
4451 raise error.Abort(_('uncommitted merge with no revision specified'),
4452 hint=_("use 'hg update' or see 'hg help revert'"))
4452 hint=_("use 'hg update' or see 'hg help revert'"))
4453
4453
4454 ctx = scmutil.revsingle(repo, opts.get('rev'))
4454 ctx = scmutil.revsingle(repo, opts.get('rev'))
4455
4455
4456 if (not (pats or opts.get('include') or opts.get('exclude') or
4456 if (not (pats or opts.get('include') or opts.get('exclude') or
4457 opts.get('all') or opts.get('interactive'))):
4457 opts.get('all') or opts.get('interactive'))):
4458 msg = _("no files or directories specified")
4458 msg = _("no files or directories specified")
4459 if p2 != nullid:
4459 if p2 != nullid:
4460 hint = _("uncommitted merge, use --all to discard all changes,"
4460 hint = _("uncommitted merge, use --all to discard all changes,"
4461 " or 'hg update -C .' to abort the merge")
4461 " or 'hg update -C .' to abort the merge")
4462 raise error.Abort(msg, hint=hint)
4462 raise error.Abort(msg, hint=hint)
4463 dirty = any(repo.status())
4463 dirty = any(repo.status())
4464 node = ctx.node()
4464 node = ctx.node()
4465 if node != parent:
4465 if node != parent:
4466 if dirty:
4466 if dirty:
4467 hint = _("uncommitted changes, use --all to discard all"
4467 hint = _("uncommitted changes, use --all to discard all"
4468 " changes, or 'hg update %s' to update") % ctx.rev()
4468 " changes, or 'hg update %s' to update") % ctx.rev()
4469 else:
4469 else:
4470 hint = _("use --all to revert all files,"
4470 hint = _("use --all to revert all files,"
4471 " or 'hg update %s' to update") % ctx.rev()
4471 " or 'hg update %s' to update") % ctx.rev()
4472 elif dirty:
4472 elif dirty:
4473 hint = _("uncommitted changes, use --all to discard all changes")
4473 hint = _("uncommitted changes, use --all to discard all changes")
4474 else:
4474 else:
4475 hint = _("use --all to revert all files")
4475 hint = _("use --all to revert all files")
4476 raise error.Abort(msg, hint=hint)
4476 raise error.Abort(msg, hint=hint)
4477
4477
4478 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4478 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4479
4479
4480 @command('rollback', dryrunopts +
4480 @command('rollback', dryrunopts +
4481 [('f', 'force', False, _('ignore safety measures'))])
4481 [('f', 'force', False, _('ignore safety measures'))])
4482 def rollback(ui, repo, **opts):
4482 def rollback(ui, repo, **opts):
4483 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4483 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4484
4484
4485 Please use :hg:`commit --amend` instead of rollback to correct
4485 Please use :hg:`commit --amend` instead of rollback to correct
4486 mistakes in the last commit.
4486 mistakes in the last commit.
4487
4487
4488 This command should be used with care. There is only one level of
4488 This command should be used with care. There is only one level of
4489 rollback, and there is no way to undo a rollback. It will also
4489 rollback, and there is no way to undo a rollback. It will also
4490 restore the dirstate at the time of the last transaction, losing
4490 restore the dirstate at the time of the last transaction, losing
4491 any dirstate changes since that time. This command does not alter
4491 any dirstate changes since that time. This command does not alter
4492 the working directory.
4492 the working directory.
4493
4493
4494 Transactions are used to encapsulate the effects of all commands
4494 Transactions are used to encapsulate the effects of all commands
4495 that create new changesets or propagate existing changesets into a
4495 that create new changesets or propagate existing changesets into a
4496 repository.
4496 repository.
4497
4497
4498 .. container:: verbose
4498 .. container:: verbose
4499
4499
4500 For example, the following commands are transactional, and their
4500 For example, the following commands are transactional, and their
4501 effects can be rolled back:
4501 effects can be rolled back:
4502
4502
4503 - commit
4503 - commit
4504 - import
4504 - import
4505 - pull
4505 - pull
4506 - push (with this repository as the destination)
4506 - push (with this repository as the destination)
4507 - unbundle
4507 - unbundle
4508
4508
4509 To avoid permanent data loss, rollback will refuse to rollback a
4509 To avoid permanent data loss, rollback will refuse to rollback a
4510 commit transaction if it isn't checked out. Use --force to
4510 commit transaction if it isn't checked out. Use --force to
4511 override this protection.
4511 override this protection.
4512
4512
4513 The rollback command can be entirely disabled by setting the
4513 The rollback command can be entirely disabled by setting the
4514 ``ui.rollback`` configuration setting to false. If you're here
4514 ``ui.rollback`` configuration setting to false. If you're here
4515 because you want to use rollback and it's disabled, you can
4515 because you want to use rollback and it's disabled, you can
4516 re-enable the command by setting ``ui.rollback`` to true.
4516 re-enable the command by setting ``ui.rollback`` to true.
4517
4517
4518 This command is not intended for use on public repositories. Once
4518 This command is not intended for use on public repositories. Once
4519 changes are visible for pull by other users, rolling a transaction
4519 changes are visible for pull by other users, rolling a transaction
4520 back locally is ineffective (someone else may already have pulled
4520 back locally is ineffective (someone else may already have pulled
4521 the changes). Furthermore, a race is possible with readers of the
4521 the changes). Furthermore, a race is possible with readers of the
4522 repository; for example an in-progress pull from the repository
4522 repository; for example an in-progress pull from the repository
4523 may fail if a rollback is performed.
4523 may fail if a rollback is performed.
4524
4524
4525 Returns 0 on success, 1 if no rollback data is available.
4525 Returns 0 on success, 1 if no rollback data is available.
4526 """
4526 """
4527 if not ui.configbool('ui', 'rollback'):
4527 if not ui.configbool('ui', 'rollback'):
4528 raise error.Abort(_('rollback is disabled because it is unsafe'),
4528 raise error.Abort(_('rollback is disabled because it is unsafe'),
4529 hint=('see `hg help -v rollback` for information'))
4529 hint=('see `hg help -v rollback` for information'))
4530 return repo.rollback(dryrun=opts.get(r'dry_run'),
4530 return repo.rollback(dryrun=opts.get(r'dry_run'),
4531 force=opts.get(r'force'))
4531 force=opts.get(r'force'))
4532
4532
4533 @command('root', [])
4533 @command('root', [])
4534 def root(ui, repo):
4534 def root(ui, repo):
4535 """print the root (top) of the current working directory
4535 """print the root (top) of the current working directory
4536
4536
4537 Print the root directory of the current repository.
4537 Print the root directory of the current repository.
4538
4538
4539 Returns 0 on success.
4539 Returns 0 on success.
4540 """
4540 """
4541 ui.write(repo.root + "\n")
4541 ui.write(repo.root + "\n")
4542
4542
4543 @command('^serve',
4543 @command('^serve',
4544 [('A', 'accesslog', '', _('name of access log file to write to'),
4544 [('A', 'accesslog', '', _('name of access log file to write to'),
4545 _('FILE')),
4545 _('FILE')),
4546 ('d', 'daemon', None, _('run server in background')),
4546 ('d', 'daemon', None, _('run server in background')),
4547 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4547 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4548 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4548 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4549 # use string type, then we can check if something was passed
4549 # use string type, then we can check if something was passed
4550 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4550 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4551 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4551 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4552 _('ADDR')),
4552 _('ADDR')),
4553 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4553 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4554 _('PREFIX')),
4554 _('PREFIX')),
4555 ('n', 'name', '',
4555 ('n', 'name', '',
4556 _('name to show in web pages (default: working directory)'), _('NAME')),
4556 _('name to show in web pages (default: working directory)'), _('NAME')),
4557 ('', 'web-conf', '',
4557 ('', 'web-conf', '',
4558 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4558 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4559 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4559 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4560 _('FILE')),
4560 _('FILE')),
4561 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4561 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4562 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4562 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4563 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4563 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4564 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4564 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4565 ('', 'style', '', _('template style to use'), _('STYLE')),
4565 ('', 'style', '', _('template style to use'), _('STYLE')),
4566 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4566 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4567 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4567 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4568 + subrepoopts,
4568 + subrepoopts,
4569 _('[OPTION]...'),
4569 _('[OPTION]...'),
4570 optionalrepo=True)
4570 optionalrepo=True)
4571 def serve(ui, repo, **opts):
4571 def serve(ui, repo, **opts):
4572 """start stand-alone webserver
4572 """start stand-alone webserver
4573
4573
4574 Start a local HTTP repository browser and pull server. You can use
4574 Start a local HTTP repository browser and pull server. You can use
4575 this for ad-hoc sharing and browsing of repositories. It is
4575 this for ad-hoc sharing and browsing of repositories. It is
4576 recommended to use a real web server to serve a repository for
4576 recommended to use a real web server to serve a repository for
4577 longer periods of time.
4577 longer periods of time.
4578
4578
4579 Please note that the server does not implement access control.
4579 Please note that the server does not implement access control.
4580 This means that, by default, anybody can read from the server and
4580 This means that, by default, anybody can read from the server and
4581 nobody can write to it by default. Set the ``web.allow_push``
4581 nobody can write to it by default. Set the ``web.allow_push``
4582 option to ``*`` to allow everybody to push to the server. You
4582 option to ``*`` to allow everybody to push to the server. You
4583 should use a real web server if you need to authenticate users.
4583 should use a real web server if you need to authenticate users.
4584
4584
4585 By default, the server logs accesses to stdout and errors to
4585 By default, the server logs accesses to stdout and errors to
4586 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4586 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4587 files.
4587 files.
4588
4588
4589 To have the server choose a free port number to listen on, specify
4589 To have the server choose a free port number to listen on, specify
4590 a port number of 0; in this case, the server will print the port
4590 a port number of 0; in this case, the server will print the port
4591 number it uses.
4591 number it uses.
4592
4592
4593 Returns 0 on success.
4593 Returns 0 on success.
4594 """
4594 """
4595
4595
4596 opts = pycompat.byteskwargs(opts)
4596 opts = pycompat.byteskwargs(opts)
4597 if opts["stdio"] and opts["cmdserver"]:
4597 if opts["stdio"] and opts["cmdserver"]:
4598 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4598 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4599
4599
4600 if opts["stdio"]:
4600 if opts["stdio"]:
4601 if repo is None:
4601 if repo is None:
4602 raise error.RepoError(_("there is no Mercurial repository here"
4602 raise error.RepoError(_("there is no Mercurial repository here"
4603 " (.hg not found)"))
4603 " (.hg not found)"))
4604 s = sshserver.sshserver(ui, repo)
4604 s = sshserver.sshserver(ui, repo)
4605 s.serve_forever()
4605 s.serve_forever()
4606
4606
4607 service = server.createservice(ui, repo, opts)
4607 service = server.createservice(ui, repo, opts)
4608 return server.runservice(opts, initfn=service.init, runfn=service.run)
4608 return server.runservice(opts, initfn=service.init, runfn=service.run)
4609
4609
4610 @command('^status|st',
4610 @command('^status|st',
4611 [('A', 'all', None, _('show status of all files')),
4611 [('A', 'all', None, _('show status of all files')),
4612 ('m', 'modified', None, _('show only modified files')),
4612 ('m', 'modified', None, _('show only modified files')),
4613 ('a', 'added', None, _('show only added files')),
4613 ('a', 'added', None, _('show only added files')),
4614 ('r', 'removed', None, _('show only removed files')),
4614 ('r', 'removed', None, _('show only removed files')),
4615 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4615 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4616 ('c', 'clean', None, _('show only files without changes')),
4616 ('c', 'clean', None, _('show only files without changes')),
4617 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4617 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4618 ('i', 'ignored', None, _('show only ignored files')),
4618 ('i', 'ignored', None, _('show only ignored files')),
4619 ('n', 'no-status', None, _('hide status prefix')),
4619 ('n', 'no-status', None, _('hide status prefix')),
4620 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4620 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4621 ('C', 'copies', None, _('show source of copied files')),
4621 ('C', 'copies', None, _('show source of copied files')),
4622 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4622 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4623 ('', 'rev', [], _('show difference from revision'), _('REV')),
4623 ('', 'rev', [], _('show difference from revision'), _('REV')),
4624 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4624 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4625 ] + walkopts + subrepoopts + formatteropts,
4625 ] + walkopts + subrepoopts + formatteropts,
4626 _('[OPTION]... [FILE]...'),
4626 _('[OPTION]... [FILE]...'),
4627 inferrepo=True)
4627 inferrepo=True)
4628 def status(ui, repo, *pats, **opts):
4628 def status(ui, repo, *pats, **opts):
4629 """show changed files in the working directory
4629 """show changed files in the working directory
4630
4630
4631 Show status of files in the repository. If names are given, only
4631 Show status of files in the repository. If names are given, only
4632 files that match are shown. Files that are clean or ignored or
4632 files that match are shown. Files that are clean or ignored or
4633 the source of a copy/move operation, are not listed unless
4633 the source of a copy/move operation, are not listed unless
4634 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4634 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4635 Unless options described with "show only ..." are given, the
4635 Unless options described with "show only ..." are given, the
4636 options -mardu are used.
4636 options -mardu are used.
4637
4637
4638 Option -q/--quiet hides untracked (unknown and ignored) files
4638 Option -q/--quiet hides untracked (unknown and ignored) files
4639 unless explicitly requested with -u/--unknown or -i/--ignored.
4639 unless explicitly requested with -u/--unknown or -i/--ignored.
4640
4640
4641 .. note::
4641 .. note::
4642
4642
4643 :hg:`status` may appear to disagree with diff if permissions have
4643 :hg:`status` may appear to disagree with diff if permissions have
4644 changed or a merge has occurred. The standard diff format does
4644 changed or a merge has occurred. The standard diff format does
4645 not report permission changes and diff only reports changes
4645 not report permission changes and diff only reports changes
4646 relative to one merge parent.
4646 relative to one merge parent.
4647
4647
4648 If one revision is given, it is used as the base revision.
4648 If one revision is given, it is used as the base revision.
4649 If two revisions are given, the differences between them are
4649 If two revisions are given, the differences between them are
4650 shown. The --change option can also be used as a shortcut to list
4650 shown. The --change option can also be used as a shortcut to list
4651 the changed files of a revision from its first parent.
4651 the changed files of a revision from its first parent.
4652
4652
4653 The codes used to show the status of files are::
4653 The codes used to show the status of files are::
4654
4654
4655 M = modified
4655 M = modified
4656 A = added
4656 A = added
4657 R = removed
4657 R = removed
4658 C = clean
4658 C = clean
4659 ! = missing (deleted by non-hg command, but still tracked)
4659 ! = missing (deleted by non-hg command, but still tracked)
4660 ? = not tracked
4660 ? = not tracked
4661 I = ignored
4661 I = ignored
4662 = origin of the previous file (with --copies)
4662 = origin of the previous file (with --copies)
4663
4663
4664 .. container:: verbose
4664 .. container:: verbose
4665
4665
4666 The -t/--terse option abbreviates the output by showing directory name
4666 The -t/--terse option abbreviates the output by showing directory name
4667 if all the files in it share the same status. The option expects a value
4667 if all the files in it share the same status. The option expects a value
4668 which can be a string formed by using 'm', 'a', 'r', 'd', 'u', 'i', 'c'
4668 which can be a string formed by using 'm', 'a', 'r', 'd', 'u', 'i', 'c'
4669 where, 'm' stands for 'modified', 'a' for 'added', 'r' for 'removed',
4669 where, 'm' stands for 'modified', 'a' for 'added', 'r' for 'removed',
4670 'd' for 'deleted', 'u' for 'unknown', 'i' for 'ignored' and 'c' for clean.
4670 'd' for 'deleted', 'u' for 'unknown', 'i' for 'ignored' and 'c' for clean.
4671
4671
4672 It terses the output of only those status which are passed. The ignored
4672 It terses the output of only those status which are passed. The ignored
4673 files are not considered while tersing until 'i' is there in --terse value
4673 files are not considered while tersing until 'i' is there in --terse value
4674 or the --ignored option is used.
4674 or the --ignored option is used.
4675
4675
4676 Examples:
4676 Examples:
4677
4677
4678 - show changes in the working directory relative to a
4678 - show changes in the working directory relative to a
4679 changeset::
4679 changeset::
4680
4680
4681 hg status --rev 9353
4681 hg status --rev 9353
4682
4682
4683 - show changes in the working directory relative to the
4683 - show changes in the working directory relative to the
4684 current directory (see :hg:`help patterns` for more information)::
4684 current directory (see :hg:`help patterns` for more information)::
4685
4685
4686 hg status re:
4686 hg status re:
4687
4687
4688 - show all changes including copies in an existing changeset::
4688 - show all changes including copies in an existing changeset::
4689
4689
4690 hg status --copies --change 9353
4690 hg status --copies --change 9353
4691
4691
4692 - get a NUL separated list of added files, suitable for xargs::
4692 - get a NUL separated list of added files, suitable for xargs::
4693
4693
4694 hg status -an0
4694 hg status -an0
4695
4695
4696 Returns 0 on success.
4696 Returns 0 on success.
4697 """
4697 """
4698
4698
4699 opts = pycompat.byteskwargs(opts)
4699 opts = pycompat.byteskwargs(opts)
4700 revs = opts.get('rev')
4700 revs = opts.get('rev')
4701 change = opts.get('change')
4701 change = opts.get('change')
4702 terse = opts.get('terse')
4702 terse = opts.get('terse')
4703
4703
4704 if revs and change:
4704 if revs and change:
4705 msg = _('cannot specify --rev and --change at the same time')
4705 msg = _('cannot specify --rev and --change at the same time')
4706 raise error.Abort(msg)
4706 raise error.Abort(msg)
4707 elif revs and terse:
4707 elif revs and terse:
4708 msg = _('cannot use --terse with --rev')
4708 msg = _('cannot use --terse with --rev')
4709 raise error.Abort(msg)
4709 raise error.Abort(msg)
4710 elif change:
4710 elif change:
4711 node2 = scmutil.revsingle(repo, change, None).node()
4711 node2 = scmutil.revsingle(repo, change, None).node()
4712 node1 = repo[node2].p1().node()
4712 node1 = repo[node2].p1().node()
4713 else:
4713 else:
4714 node1, node2 = scmutil.revpair(repo, revs)
4714 node1, node2 = scmutil.revpair(repo, revs)
4715
4715
4716 if pats or ui.configbool('commands', 'status.relative'):
4716 if pats or ui.configbool('commands', 'status.relative'):
4717 cwd = repo.getcwd()
4717 cwd = repo.getcwd()
4718 else:
4718 else:
4719 cwd = ''
4719 cwd = ''
4720
4720
4721 if opts.get('print0'):
4721 if opts.get('print0'):
4722 end = '\0'
4722 end = '\0'
4723 else:
4723 else:
4724 end = '\n'
4724 end = '\n'
4725 copy = {}
4725 copy = {}
4726 states = 'modified added removed deleted unknown ignored clean'.split()
4726 states = 'modified added removed deleted unknown ignored clean'.split()
4727 show = [k for k in states if opts.get(k)]
4727 show = [k for k in states if opts.get(k)]
4728 if opts.get('all'):
4728 if opts.get('all'):
4729 show += ui.quiet and (states[:4] + ['clean']) or states
4729 show += ui.quiet and (states[:4] + ['clean']) or states
4730
4730
4731 if not show:
4731 if not show:
4732 if ui.quiet:
4732 if ui.quiet:
4733 show = states[:4]
4733 show = states[:4]
4734 else:
4734 else:
4735 show = states[:5]
4735 show = states[:5]
4736
4736
4737 m = scmutil.match(repo[node2], pats, opts)
4737 m = scmutil.match(repo[node2], pats, opts)
4738 stat = repo.status(node1, node2, m,
4738 stat = repo.status(node1, node2, m,
4739 'ignored' in show, 'clean' in show, 'unknown' in show,
4739 'ignored' in show, 'clean' in show, 'unknown' in show,
4740 opts.get('subrepos'))
4740 opts.get('subrepos'))
4741 if terse:
4741 if terse:
4742 stat = cmdutil.tersestatus(repo.root, stat, terse,
4742 stat = cmdutil.tersestatus(repo.root, stat, terse,
4743 repo.dirstate._ignore, opts.get('ignored'))
4743 repo.dirstate._ignore, opts.get('ignored'))
4744 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4744 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4745
4745
4746 if (opts.get('all') or opts.get('copies')
4746 if (opts.get('all') or opts.get('copies')
4747 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4747 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4748 copy = copies.pathcopies(repo[node1], repo[node2], m)
4748 copy = copies.pathcopies(repo[node1], repo[node2], m)
4749
4749
4750 ui.pager('status')
4750 ui.pager('status')
4751 fm = ui.formatter('status', opts)
4751 fm = ui.formatter('status', opts)
4752 fmt = '%s' + end
4752 fmt = '%s' + end
4753 showchar = not opts.get('no_status')
4753 showchar = not opts.get('no_status')
4754
4754
4755 for state, char, files in changestates:
4755 for state, char, files in changestates:
4756 if state in show:
4756 if state in show:
4757 label = 'status.' + state
4757 label = 'status.' + state
4758 for f in files:
4758 for f in files:
4759 fm.startitem()
4759 fm.startitem()
4760 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4760 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4761 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4761 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4762 if f in copy:
4762 if f in copy:
4763 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4763 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4764 label='status.copied')
4764 label='status.copied')
4765 fm.end()
4765 fm.end()
4766
4766
4767 @command('^summary|sum',
4767 @command('^summary|sum',
4768 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4768 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4769 def summary(ui, repo, **opts):
4769 def summary(ui, repo, **opts):
4770 """summarize working directory state
4770 """summarize working directory state
4771
4771
4772 This generates a brief summary of the working directory state,
4772 This generates a brief summary of the working directory state,
4773 including parents, branch, commit status, phase and available updates.
4773 including parents, branch, commit status, phase and available updates.
4774
4774
4775 With the --remote option, this will check the default paths for
4775 With the --remote option, this will check the default paths for
4776 incoming and outgoing changes. This can be time-consuming.
4776 incoming and outgoing changes. This can be time-consuming.
4777
4777
4778 Returns 0 on success.
4778 Returns 0 on success.
4779 """
4779 """
4780
4780
4781 opts = pycompat.byteskwargs(opts)
4781 opts = pycompat.byteskwargs(opts)
4782 ui.pager('summary')
4782 ui.pager('summary')
4783 ctx = repo[None]
4783 ctx = repo[None]
4784 parents = ctx.parents()
4784 parents = ctx.parents()
4785 pnode = parents[0].node()
4785 pnode = parents[0].node()
4786 marks = []
4786 marks = []
4787
4787
4788 ms = None
4788 ms = None
4789 try:
4789 try:
4790 ms = mergemod.mergestate.read(repo)
4790 ms = mergemod.mergestate.read(repo)
4791 except error.UnsupportedMergeRecords as e:
4791 except error.UnsupportedMergeRecords as e:
4792 s = ' '.join(e.recordtypes)
4792 s = ' '.join(e.recordtypes)
4793 ui.warn(
4793 ui.warn(
4794 _('warning: merge state has unsupported record types: %s\n') % s)
4794 _('warning: merge state has unsupported record types: %s\n') % s)
4795 unresolved = []
4795 unresolved = []
4796 else:
4796 else:
4797 unresolved = list(ms.unresolved())
4797 unresolved = list(ms.unresolved())
4798
4798
4799 for p in parents:
4799 for p in parents:
4800 # label with log.changeset (instead of log.parent) since this
4800 # label with log.changeset (instead of log.parent) since this
4801 # shows a working directory parent *changeset*:
4801 # shows a working directory parent *changeset*:
4802 # i18n: column positioning for "hg summary"
4802 # i18n: column positioning for "hg summary"
4803 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4803 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4804 label=cmdutil._changesetlabels(p))
4804 label=cmdutil._changesetlabels(p))
4805 ui.write(' '.join(p.tags()), label='log.tag')
4805 ui.write(' '.join(p.tags()), label='log.tag')
4806 if p.bookmarks():
4806 if p.bookmarks():
4807 marks.extend(p.bookmarks())
4807 marks.extend(p.bookmarks())
4808 if p.rev() == -1:
4808 if p.rev() == -1:
4809 if not len(repo):
4809 if not len(repo):
4810 ui.write(_(' (empty repository)'))
4810 ui.write(_(' (empty repository)'))
4811 else:
4811 else:
4812 ui.write(_(' (no revision checked out)'))
4812 ui.write(_(' (no revision checked out)'))
4813 if p.obsolete():
4813 if p.obsolete():
4814 ui.write(_(' (obsolete)'))
4814 ui.write(_(' (obsolete)'))
4815 if p.troubled():
4815 if p.troubled():
4816 ui.write(' ('
4816 ui.write(' ('
4817 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4817 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4818 for trouble in p.troubles())
4818 for trouble in p.troubles())
4819 + ')')
4819 + ')')
4820 ui.write('\n')
4820 ui.write('\n')
4821 if p.description():
4821 if p.description():
4822 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4822 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4823 label='log.summary')
4823 label='log.summary')
4824
4824
4825 branch = ctx.branch()
4825 branch = ctx.branch()
4826 bheads = repo.branchheads(branch)
4826 bheads = repo.branchheads(branch)
4827 # i18n: column positioning for "hg summary"
4827 # i18n: column positioning for "hg summary"
4828 m = _('branch: %s\n') % branch
4828 m = _('branch: %s\n') % branch
4829 if branch != 'default':
4829 if branch != 'default':
4830 ui.write(m, label='log.branch')
4830 ui.write(m, label='log.branch')
4831 else:
4831 else:
4832 ui.status(m, label='log.branch')
4832 ui.status(m, label='log.branch')
4833
4833
4834 if marks:
4834 if marks:
4835 active = repo._activebookmark
4835 active = repo._activebookmark
4836 # i18n: column positioning for "hg summary"
4836 # i18n: column positioning for "hg summary"
4837 ui.write(_('bookmarks:'), label='log.bookmark')
4837 ui.write(_('bookmarks:'), label='log.bookmark')
4838 if active is not None:
4838 if active is not None:
4839 if active in marks:
4839 if active in marks:
4840 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4840 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4841 marks.remove(active)
4841 marks.remove(active)
4842 else:
4842 else:
4843 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4843 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4844 for m in marks:
4844 for m in marks:
4845 ui.write(' ' + m, label='log.bookmark')
4845 ui.write(' ' + m, label='log.bookmark')
4846 ui.write('\n', label='log.bookmark')
4846 ui.write('\n', label='log.bookmark')
4847
4847
4848 status = repo.status(unknown=True)
4848 status = repo.status(unknown=True)
4849
4849
4850 c = repo.dirstate.copies()
4850 c = repo.dirstate.copies()
4851 copied, renamed = [], []
4851 copied, renamed = [], []
4852 for d, s in c.iteritems():
4852 for d, s in c.iteritems():
4853 if s in status.removed:
4853 if s in status.removed:
4854 status.removed.remove(s)
4854 status.removed.remove(s)
4855 renamed.append(d)
4855 renamed.append(d)
4856 else:
4856 else:
4857 copied.append(d)
4857 copied.append(d)
4858 if d in status.added:
4858 if d in status.added:
4859 status.added.remove(d)
4859 status.added.remove(d)
4860
4860
4861 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4861 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4862
4862
4863 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4863 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4864 (ui.label(_('%d added'), 'status.added'), status.added),
4864 (ui.label(_('%d added'), 'status.added'), status.added),
4865 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4865 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4866 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4866 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4867 (ui.label(_('%d copied'), 'status.copied'), copied),
4867 (ui.label(_('%d copied'), 'status.copied'), copied),
4868 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4868 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4869 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4869 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4870 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4870 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4871 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4871 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4872 t = []
4872 t = []
4873 for l, s in labels:
4873 for l, s in labels:
4874 if s:
4874 if s:
4875 t.append(l % len(s))
4875 t.append(l % len(s))
4876
4876
4877 t = ', '.join(t)
4877 t = ', '.join(t)
4878 cleanworkdir = False
4878 cleanworkdir = False
4879
4879
4880 if repo.vfs.exists('graftstate'):
4880 if repo.vfs.exists('graftstate'):
4881 t += _(' (graft in progress)')
4881 t += _(' (graft in progress)')
4882 if repo.vfs.exists('updatestate'):
4882 if repo.vfs.exists('updatestate'):
4883 t += _(' (interrupted update)')
4883 t += _(' (interrupted update)')
4884 elif len(parents) > 1:
4884 elif len(parents) > 1:
4885 t += _(' (merge)')
4885 t += _(' (merge)')
4886 elif branch != parents[0].branch():
4886 elif branch != parents[0].branch():
4887 t += _(' (new branch)')
4887 t += _(' (new branch)')
4888 elif (parents[0].closesbranch() and
4888 elif (parents[0].closesbranch() and
4889 pnode in repo.branchheads(branch, closed=True)):
4889 pnode in repo.branchheads(branch, closed=True)):
4890 t += _(' (head closed)')
4890 t += _(' (head closed)')
4891 elif not (status.modified or status.added or status.removed or renamed or
4891 elif not (status.modified or status.added or status.removed or renamed or
4892 copied or subs):
4892 copied or subs):
4893 t += _(' (clean)')
4893 t += _(' (clean)')
4894 cleanworkdir = True
4894 cleanworkdir = True
4895 elif pnode not in bheads:
4895 elif pnode not in bheads:
4896 t += _(' (new branch head)')
4896 t += _(' (new branch head)')
4897
4897
4898 if parents:
4898 if parents:
4899 pendingphase = max(p.phase() for p in parents)
4899 pendingphase = max(p.phase() for p in parents)
4900 else:
4900 else:
4901 pendingphase = phases.public
4901 pendingphase = phases.public
4902
4902
4903 if pendingphase > phases.newcommitphase(ui):
4903 if pendingphase > phases.newcommitphase(ui):
4904 t += ' (%s)' % phases.phasenames[pendingphase]
4904 t += ' (%s)' % phases.phasenames[pendingphase]
4905
4905
4906 if cleanworkdir:
4906 if cleanworkdir:
4907 # i18n: column positioning for "hg summary"
4907 # i18n: column positioning for "hg summary"
4908 ui.status(_('commit: %s\n') % t.strip())
4908 ui.status(_('commit: %s\n') % t.strip())
4909 else:
4909 else:
4910 # i18n: column positioning for "hg summary"
4910 # i18n: column positioning for "hg summary"
4911 ui.write(_('commit: %s\n') % t.strip())
4911 ui.write(_('commit: %s\n') % t.strip())
4912
4912
4913 # all ancestors of branch heads - all ancestors of parent = new csets
4913 # all ancestors of branch heads - all ancestors of parent = new csets
4914 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4914 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4915 bheads))
4915 bheads))
4916
4916
4917 if new == 0:
4917 if new == 0:
4918 # i18n: column positioning for "hg summary"
4918 # i18n: column positioning for "hg summary"
4919 ui.status(_('update: (current)\n'))
4919 ui.status(_('update: (current)\n'))
4920 elif pnode not in bheads:
4920 elif pnode not in bheads:
4921 # i18n: column positioning for "hg summary"
4921 # i18n: column positioning for "hg summary"
4922 ui.write(_('update: %d new changesets (update)\n') % new)
4922 ui.write(_('update: %d new changesets (update)\n') % new)
4923 else:
4923 else:
4924 # i18n: column positioning for "hg summary"
4924 # i18n: column positioning for "hg summary"
4925 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4925 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4926 (new, len(bheads)))
4926 (new, len(bheads)))
4927
4927
4928 t = []
4928 t = []
4929 draft = len(repo.revs('draft()'))
4929 draft = len(repo.revs('draft()'))
4930 if draft:
4930 if draft:
4931 t.append(_('%d draft') % draft)
4931 t.append(_('%d draft') % draft)
4932 secret = len(repo.revs('secret()'))
4932 secret = len(repo.revs('secret()'))
4933 if secret:
4933 if secret:
4934 t.append(_('%d secret') % secret)
4934 t.append(_('%d secret') % secret)
4935
4935
4936 if draft or secret:
4936 if draft or secret:
4937 ui.status(_('phases: %s\n') % ', '.join(t))
4937 ui.status(_('phases: %s\n') % ', '.join(t))
4938
4938
4939 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4939 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4940 for trouble in ("unstable", "divergent", "bumped"):
4940 for trouble in ("unstable", "divergent", "bumped"):
4941 numtrouble = len(repo.revs(trouble + "()"))
4941 numtrouble = len(repo.revs(trouble + "()"))
4942 # We write all the possibilities to ease translation
4942 # We write all the possibilities to ease translation
4943 troublemsg = {
4943 troublemsg = {
4944 "unstable": _("orphan: %d changesets"),
4944 "unstable": _("orphan: %d changesets"),
4945 "divergent": _("divergent: %d changesets"),
4945 "divergent": _("content-divergent: %d changesets"),
4946 "bumped": _("bumped: %d changesets"),
4946 "bumped": _("bumped: %d changesets"),
4947 }
4947 }
4948 if numtrouble > 0:
4948 if numtrouble > 0:
4949 ui.status(troublemsg[trouble] % numtrouble + "\n")
4949 ui.status(troublemsg[trouble] % numtrouble + "\n")
4950
4950
4951 cmdutil.summaryhooks(ui, repo)
4951 cmdutil.summaryhooks(ui, repo)
4952
4952
4953 if opts.get('remote'):
4953 if opts.get('remote'):
4954 needsincoming, needsoutgoing = True, True
4954 needsincoming, needsoutgoing = True, True
4955 else:
4955 else:
4956 needsincoming, needsoutgoing = False, False
4956 needsincoming, needsoutgoing = False, False
4957 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4957 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4958 if i:
4958 if i:
4959 needsincoming = True
4959 needsincoming = True
4960 if o:
4960 if o:
4961 needsoutgoing = True
4961 needsoutgoing = True
4962 if not needsincoming and not needsoutgoing:
4962 if not needsincoming and not needsoutgoing:
4963 return
4963 return
4964
4964
4965 def getincoming():
4965 def getincoming():
4966 source, branches = hg.parseurl(ui.expandpath('default'))
4966 source, branches = hg.parseurl(ui.expandpath('default'))
4967 sbranch = branches[0]
4967 sbranch = branches[0]
4968 try:
4968 try:
4969 other = hg.peer(repo, {}, source)
4969 other = hg.peer(repo, {}, source)
4970 except error.RepoError:
4970 except error.RepoError:
4971 if opts.get('remote'):
4971 if opts.get('remote'):
4972 raise
4972 raise
4973 return source, sbranch, None, None, None
4973 return source, sbranch, None, None, None
4974 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4974 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4975 if revs:
4975 if revs:
4976 revs = [other.lookup(rev) for rev in revs]
4976 revs = [other.lookup(rev) for rev in revs]
4977 ui.debug('comparing with %s\n' % util.hidepassword(source))
4977 ui.debug('comparing with %s\n' % util.hidepassword(source))
4978 repo.ui.pushbuffer()
4978 repo.ui.pushbuffer()
4979 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4979 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4980 repo.ui.popbuffer()
4980 repo.ui.popbuffer()
4981 return source, sbranch, other, commoninc, commoninc[1]
4981 return source, sbranch, other, commoninc, commoninc[1]
4982
4982
4983 if needsincoming:
4983 if needsincoming:
4984 source, sbranch, sother, commoninc, incoming = getincoming()
4984 source, sbranch, sother, commoninc, incoming = getincoming()
4985 else:
4985 else:
4986 source = sbranch = sother = commoninc = incoming = None
4986 source = sbranch = sother = commoninc = incoming = None
4987
4987
4988 def getoutgoing():
4988 def getoutgoing():
4989 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4989 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4990 dbranch = branches[0]
4990 dbranch = branches[0]
4991 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4991 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4992 if source != dest:
4992 if source != dest:
4993 try:
4993 try:
4994 dother = hg.peer(repo, {}, dest)
4994 dother = hg.peer(repo, {}, dest)
4995 except error.RepoError:
4995 except error.RepoError:
4996 if opts.get('remote'):
4996 if opts.get('remote'):
4997 raise
4997 raise
4998 return dest, dbranch, None, None
4998 return dest, dbranch, None, None
4999 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4999 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5000 elif sother is None:
5000 elif sother is None:
5001 # there is no explicit destination peer, but source one is invalid
5001 # there is no explicit destination peer, but source one is invalid
5002 return dest, dbranch, None, None
5002 return dest, dbranch, None, None
5003 else:
5003 else:
5004 dother = sother
5004 dother = sother
5005 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5005 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5006 common = None
5006 common = None
5007 else:
5007 else:
5008 common = commoninc
5008 common = commoninc
5009 if revs:
5009 if revs:
5010 revs = [repo.lookup(rev) for rev in revs]
5010 revs = [repo.lookup(rev) for rev in revs]
5011 repo.ui.pushbuffer()
5011 repo.ui.pushbuffer()
5012 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5012 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5013 commoninc=common)
5013 commoninc=common)
5014 repo.ui.popbuffer()
5014 repo.ui.popbuffer()
5015 return dest, dbranch, dother, outgoing
5015 return dest, dbranch, dother, outgoing
5016
5016
5017 if needsoutgoing:
5017 if needsoutgoing:
5018 dest, dbranch, dother, outgoing = getoutgoing()
5018 dest, dbranch, dother, outgoing = getoutgoing()
5019 else:
5019 else:
5020 dest = dbranch = dother = outgoing = None
5020 dest = dbranch = dother = outgoing = None
5021
5021
5022 if opts.get('remote'):
5022 if opts.get('remote'):
5023 t = []
5023 t = []
5024 if incoming:
5024 if incoming:
5025 t.append(_('1 or more incoming'))
5025 t.append(_('1 or more incoming'))
5026 o = outgoing.missing
5026 o = outgoing.missing
5027 if o:
5027 if o:
5028 t.append(_('%d outgoing') % len(o))
5028 t.append(_('%d outgoing') % len(o))
5029 other = dother or sother
5029 other = dother or sother
5030 if 'bookmarks' in other.listkeys('namespaces'):
5030 if 'bookmarks' in other.listkeys('namespaces'):
5031 counts = bookmarks.summary(repo, other)
5031 counts = bookmarks.summary(repo, other)
5032 if counts[0] > 0:
5032 if counts[0] > 0:
5033 t.append(_('%d incoming bookmarks') % counts[0])
5033 t.append(_('%d incoming bookmarks') % counts[0])
5034 if counts[1] > 0:
5034 if counts[1] > 0:
5035 t.append(_('%d outgoing bookmarks') % counts[1])
5035 t.append(_('%d outgoing bookmarks') % counts[1])
5036
5036
5037 if t:
5037 if t:
5038 # i18n: column positioning for "hg summary"
5038 # i18n: column positioning for "hg summary"
5039 ui.write(_('remote: %s\n') % (', '.join(t)))
5039 ui.write(_('remote: %s\n') % (', '.join(t)))
5040 else:
5040 else:
5041 # i18n: column positioning for "hg summary"
5041 # i18n: column positioning for "hg summary"
5042 ui.status(_('remote: (synced)\n'))
5042 ui.status(_('remote: (synced)\n'))
5043
5043
5044 cmdutil.summaryremotehooks(ui, repo, opts,
5044 cmdutil.summaryremotehooks(ui, repo, opts,
5045 ((source, sbranch, sother, commoninc),
5045 ((source, sbranch, sother, commoninc),
5046 (dest, dbranch, dother, outgoing)))
5046 (dest, dbranch, dother, outgoing)))
5047
5047
5048 @command('tag',
5048 @command('tag',
5049 [('f', 'force', None, _('force tag')),
5049 [('f', 'force', None, _('force tag')),
5050 ('l', 'local', None, _('make the tag local')),
5050 ('l', 'local', None, _('make the tag local')),
5051 ('r', 'rev', '', _('revision to tag'), _('REV')),
5051 ('r', 'rev', '', _('revision to tag'), _('REV')),
5052 ('', 'remove', None, _('remove a tag')),
5052 ('', 'remove', None, _('remove a tag')),
5053 # -l/--local is already there, commitopts cannot be used
5053 # -l/--local is already there, commitopts cannot be used
5054 ('e', 'edit', None, _('invoke editor on commit messages')),
5054 ('e', 'edit', None, _('invoke editor on commit messages')),
5055 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5055 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5056 ] + commitopts2,
5056 ] + commitopts2,
5057 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5057 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5058 def tag(ui, repo, name1, *names, **opts):
5058 def tag(ui, repo, name1, *names, **opts):
5059 """add one or more tags for the current or given revision
5059 """add one or more tags for the current or given revision
5060
5060
5061 Name a particular revision using <name>.
5061 Name a particular revision using <name>.
5062
5062
5063 Tags are used to name particular revisions of the repository and are
5063 Tags are used to name particular revisions of the repository and are
5064 very useful to compare different revisions, to go back to significant
5064 very useful to compare different revisions, to go back to significant
5065 earlier versions or to mark branch points as releases, etc. Changing
5065 earlier versions or to mark branch points as releases, etc. Changing
5066 an existing tag is normally disallowed; use -f/--force to override.
5066 an existing tag is normally disallowed; use -f/--force to override.
5067
5067
5068 If no revision is given, the parent of the working directory is
5068 If no revision is given, the parent of the working directory is
5069 used.
5069 used.
5070
5070
5071 To facilitate version control, distribution, and merging of tags,
5071 To facilitate version control, distribution, and merging of tags,
5072 they are stored as a file named ".hgtags" which is managed similarly
5072 they are stored as a file named ".hgtags" which is managed similarly
5073 to other project files and can be hand-edited if necessary. This
5073 to other project files and can be hand-edited if necessary. This
5074 also means that tagging creates a new commit. The file
5074 also means that tagging creates a new commit. The file
5075 ".hg/localtags" is used for local tags (not shared among
5075 ".hg/localtags" is used for local tags (not shared among
5076 repositories).
5076 repositories).
5077
5077
5078 Tag commits are usually made at the head of a branch. If the parent
5078 Tag commits are usually made at the head of a branch. If the parent
5079 of the working directory is not a branch head, :hg:`tag` aborts; use
5079 of the working directory is not a branch head, :hg:`tag` aborts; use
5080 -f/--force to force the tag commit to be based on a non-head
5080 -f/--force to force the tag commit to be based on a non-head
5081 changeset.
5081 changeset.
5082
5082
5083 See :hg:`help dates` for a list of formats valid for -d/--date.
5083 See :hg:`help dates` for a list of formats valid for -d/--date.
5084
5084
5085 Since tag names have priority over branch names during revision
5085 Since tag names have priority over branch names during revision
5086 lookup, using an existing branch name as a tag name is discouraged.
5086 lookup, using an existing branch name as a tag name is discouraged.
5087
5087
5088 Returns 0 on success.
5088 Returns 0 on success.
5089 """
5089 """
5090 opts = pycompat.byteskwargs(opts)
5090 opts = pycompat.byteskwargs(opts)
5091 wlock = lock = None
5091 wlock = lock = None
5092 try:
5092 try:
5093 wlock = repo.wlock()
5093 wlock = repo.wlock()
5094 lock = repo.lock()
5094 lock = repo.lock()
5095 rev_ = "."
5095 rev_ = "."
5096 names = [t.strip() for t in (name1,) + names]
5096 names = [t.strip() for t in (name1,) + names]
5097 if len(names) != len(set(names)):
5097 if len(names) != len(set(names)):
5098 raise error.Abort(_('tag names must be unique'))
5098 raise error.Abort(_('tag names must be unique'))
5099 for n in names:
5099 for n in names:
5100 scmutil.checknewlabel(repo, n, 'tag')
5100 scmutil.checknewlabel(repo, n, 'tag')
5101 if not n:
5101 if not n:
5102 raise error.Abort(_('tag names cannot consist entirely of '
5102 raise error.Abort(_('tag names cannot consist entirely of '
5103 'whitespace'))
5103 'whitespace'))
5104 if opts.get('rev') and opts.get('remove'):
5104 if opts.get('rev') and opts.get('remove'):
5105 raise error.Abort(_("--rev and --remove are incompatible"))
5105 raise error.Abort(_("--rev and --remove are incompatible"))
5106 if opts.get('rev'):
5106 if opts.get('rev'):
5107 rev_ = opts['rev']
5107 rev_ = opts['rev']
5108 message = opts.get('message')
5108 message = opts.get('message')
5109 if opts.get('remove'):
5109 if opts.get('remove'):
5110 if opts.get('local'):
5110 if opts.get('local'):
5111 expectedtype = 'local'
5111 expectedtype = 'local'
5112 else:
5112 else:
5113 expectedtype = 'global'
5113 expectedtype = 'global'
5114
5114
5115 for n in names:
5115 for n in names:
5116 if not repo.tagtype(n):
5116 if not repo.tagtype(n):
5117 raise error.Abort(_("tag '%s' does not exist") % n)
5117 raise error.Abort(_("tag '%s' does not exist") % n)
5118 if repo.tagtype(n) != expectedtype:
5118 if repo.tagtype(n) != expectedtype:
5119 if expectedtype == 'global':
5119 if expectedtype == 'global':
5120 raise error.Abort(_("tag '%s' is not a global tag") % n)
5120 raise error.Abort(_("tag '%s' is not a global tag") % n)
5121 else:
5121 else:
5122 raise error.Abort(_("tag '%s' is not a local tag") % n)
5122 raise error.Abort(_("tag '%s' is not a local tag") % n)
5123 rev_ = 'null'
5123 rev_ = 'null'
5124 if not message:
5124 if not message:
5125 # we don't translate commit messages
5125 # we don't translate commit messages
5126 message = 'Removed tag %s' % ', '.join(names)
5126 message = 'Removed tag %s' % ', '.join(names)
5127 elif not opts.get('force'):
5127 elif not opts.get('force'):
5128 for n in names:
5128 for n in names:
5129 if n in repo.tags():
5129 if n in repo.tags():
5130 raise error.Abort(_("tag '%s' already exists "
5130 raise error.Abort(_("tag '%s' already exists "
5131 "(use -f to force)") % n)
5131 "(use -f to force)") % n)
5132 if not opts.get('local'):
5132 if not opts.get('local'):
5133 p1, p2 = repo.dirstate.parents()
5133 p1, p2 = repo.dirstate.parents()
5134 if p2 != nullid:
5134 if p2 != nullid:
5135 raise error.Abort(_('uncommitted merge'))
5135 raise error.Abort(_('uncommitted merge'))
5136 bheads = repo.branchheads()
5136 bheads = repo.branchheads()
5137 if not opts.get('force') and bheads and p1 not in bheads:
5137 if not opts.get('force') and bheads and p1 not in bheads:
5138 raise error.Abort(_('working directory is not at a branch head '
5138 raise error.Abort(_('working directory is not at a branch head '
5139 '(use -f to force)'))
5139 '(use -f to force)'))
5140 r = scmutil.revsingle(repo, rev_).node()
5140 r = scmutil.revsingle(repo, rev_).node()
5141
5141
5142 if not message:
5142 if not message:
5143 # we don't translate commit messages
5143 # we don't translate commit messages
5144 message = ('Added tag %s for changeset %s' %
5144 message = ('Added tag %s for changeset %s' %
5145 (', '.join(names), short(r)))
5145 (', '.join(names), short(r)))
5146
5146
5147 date = opts.get('date')
5147 date = opts.get('date')
5148 if date:
5148 if date:
5149 date = util.parsedate(date)
5149 date = util.parsedate(date)
5150
5150
5151 if opts.get('remove'):
5151 if opts.get('remove'):
5152 editform = 'tag.remove'
5152 editform = 'tag.remove'
5153 else:
5153 else:
5154 editform = 'tag.add'
5154 editform = 'tag.add'
5155 editor = cmdutil.getcommiteditor(editform=editform,
5155 editor = cmdutil.getcommiteditor(editform=editform,
5156 **pycompat.strkwargs(opts))
5156 **pycompat.strkwargs(opts))
5157
5157
5158 # don't allow tagging the null rev
5158 # don't allow tagging the null rev
5159 if (not opts.get('remove') and
5159 if (not opts.get('remove') and
5160 scmutil.revsingle(repo, rev_).rev() == nullrev):
5160 scmutil.revsingle(repo, rev_).rev() == nullrev):
5161 raise error.Abort(_("cannot tag null revision"))
5161 raise error.Abort(_("cannot tag null revision"))
5162
5162
5163 tagsmod.tag(repo, names, r, message, opts.get('local'),
5163 tagsmod.tag(repo, names, r, message, opts.get('local'),
5164 opts.get('user'), date, editor=editor)
5164 opts.get('user'), date, editor=editor)
5165 finally:
5165 finally:
5166 release(lock, wlock)
5166 release(lock, wlock)
5167
5167
5168 @command('tags', formatteropts, '')
5168 @command('tags', formatteropts, '')
5169 def tags(ui, repo, **opts):
5169 def tags(ui, repo, **opts):
5170 """list repository tags
5170 """list repository tags
5171
5171
5172 This lists both regular and local tags. When the -v/--verbose
5172 This lists both regular and local tags. When the -v/--verbose
5173 switch is used, a third column "local" is printed for local tags.
5173 switch is used, a third column "local" is printed for local tags.
5174 When the -q/--quiet switch is used, only the tag name is printed.
5174 When the -q/--quiet switch is used, only the tag name is printed.
5175
5175
5176 Returns 0 on success.
5176 Returns 0 on success.
5177 """
5177 """
5178
5178
5179 opts = pycompat.byteskwargs(opts)
5179 opts = pycompat.byteskwargs(opts)
5180 ui.pager('tags')
5180 ui.pager('tags')
5181 fm = ui.formatter('tags', opts)
5181 fm = ui.formatter('tags', opts)
5182 hexfunc = fm.hexfunc
5182 hexfunc = fm.hexfunc
5183 tagtype = ""
5183 tagtype = ""
5184
5184
5185 for t, n in reversed(repo.tagslist()):
5185 for t, n in reversed(repo.tagslist()):
5186 hn = hexfunc(n)
5186 hn = hexfunc(n)
5187 label = 'tags.normal'
5187 label = 'tags.normal'
5188 tagtype = ''
5188 tagtype = ''
5189 if repo.tagtype(t) == 'local':
5189 if repo.tagtype(t) == 'local':
5190 label = 'tags.local'
5190 label = 'tags.local'
5191 tagtype = 'local'
5191 tagtype = 'local'
5192
5192
5193 fm.startitem()
5193 fm.startitem()
5194 fm.write('tag', '%s', t, label=label)
5194 fm.write('tag', '%s', t, label=label)
5195 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5195 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5196 fm.condwrite(not ui.quiet, 'rev node', fmt,
5196 fm.condwrite(not ui.quiet, 'rev node', fmt,
5197 repo.changelog.rev(n), hn, label=label)
5197 repo.changelog.rev(n), hn, label=label)
5198 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5198 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5199 tagtype, label=label)
5199 tagtype, label=label)
5200 fm.plain('\n')
5200 fm.plain('\n')
5201 fm.end()
5201 fm.end()
5202
5202
5203 @command('tip',
5203 @command('tip',
5204 [('p', 'patch', None, _('show patch')),
5204 [('p', 'patch', None, _('show patch')),
5205 ('g', 'git', None, _('use git extended diff format')),
5205 ('g', 'git', None, _('use git extended diff format')),
5206 ] + templateopts,
5206 ] + templateopts,
5207 _('[-p] [-g]'))
5207 _('[-p] [-g]'))
5208 def tip(ui, repo, **opts):
5208 def tip(ui, repo, **opts):
5209 """show the tip revision (DEPRECATED)
5209 """show the tip revision (DEPRECATED)
5210
5210
5211 The tip revision (usually just called the tip) is the changeset
5211 The tip revision (usually just called the tip) is the changeset
5212 most recently added to the repository (and therefore the most
5212 most recently added to the repository (and therefore the most
5213 recently changed head).
5213 recently changed head).
5214
5214
5215 If you have just made a commit, that commit will be the tip. If
5215 If you have just made a commit, that commit will be the tip. If
5216 you have just pulled changes from another repository, the tip of
5216 you have just pulled changes from another repository, the tip of
5217 that repository becomes the current tip. The "tip" tag is special
5217 that repository becomes the current tip. The "tip" tag is special
5218 and cannot be renamed or assigned to a different changeset.
5218 and cannot be renamed or assigned to a different changeset.
5219
5219
5220 This command is deprecated, please use :hg:`heads` instead.
5220 This command is deprecated, please use :hg:`heads` instead.
5221
5221
5222 Returns 0 on success.
5222 Returns 0 on success.
5223 """
5223 """
5224 opts = pycompat.byteskwargs(opts)
5224 opts = pycompat.byteskwargs(opts)
5225 displayer = cmdutil.show_changeset(ui, repo, opts)
5225 displayer = cmdutil.show_changeset(ui, repo, opts)
5226 displayer.show(repo['tip'])
5226 displayer.show(repo['tip'])
5227 displayer.close()
5227 displayer.close()
5228
5228
5229 @command('unbundle',
5229 @command('unbundle',
5230 [('u', 'update', None,
5230 [('u', 'update', None,
5231 _('update to new branch head if changesets were unbundled'))],
5231 _('update to new branch head if changesets were unbundled'))],
5232 _('[-u] FILE...'))
5232 _('[-u] FILE...'))
5233 def unbundle(ui, repo, fname1, *fnames, **opts):
5233 def unbundle(ui, repo, fname1, *fnames, **opts):
5234 """apply one or more bundle files
5234 """apply one or more bundle files
5235
5235
5236 Apply one or more bundle files generated by :hg:`bundle`.
5236 Apply one or more bundle files generated by :hg:`bundle`.
5237
5237
5238 Returns 0 on success, 1 if an update has unresolved files.
5238 Returns 0 on success, 1 if an update has unresolved files.
5239 """
5239 """
5240 fnames = (fname1,) + fnames
5240 fnames = (fname1,) + fnames
5241
5241
5242 with repo.lock():
5242 with repo.lock():
5243 for fname in fnames:
5243 for fname in fnames:
5244 f = hg.openpath(ui, fname)
5244 f = hg.openpath(ui, fname)
5245 gen = exchange.readbundle(ui, f, fname)
5245 gen = exchange.readbundle(ui, f, fname)
5246 if isinstance(gen, streamclone.streamcloneapplier):
5246 if isinstance(gen, streamclone.streamcloneapplier):
5247 raise error.Abort(
5247 raise error.Abort(
5248 _('packed bundles cannot be applied with '
5248 _('packed bundles cannot be applied with '
5249 '"hg unbundle"'),
5249 '"hg unbundle"'),
5250 hint=_('use "hg debugapplystreamclonebundle"'))
5250 hint=_('use "hg debugapplystreamclonebundle"'))
5251 url = 'bundle:' + fname
5251 url = 'bundle:' + fname
5252 try:
5252 try:
5253 txnname = 'unbundle'
5253 txnname = 'unbundle'
5254 if not isinstance(gen, bundle2.unbundle20):
5254 if not isinstance(gen, bundle2.unbundle20):
5255 txnname = 'unbundle\n%s' % util.hidepassword(url)
5255 txnname = 'unbundle\n%s' % util.hidepassword(url)
5256 with repo.transaction(txnname) as tr:
5256 with repo.transaction(txnname) as tr:
5257 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5257 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5258 url=url)
5258 url=url)
5259 except error.BundleUnknownFeatureError as exc:
5259 except error.BundleUnknownFeatureError as exc:
5260 raise error.Abort(
5260 raise error.Abort(
5261 _('%s: unknown bundle feature, %s') % (fname, exc),
5261 _('%s: unknown bundle feature, %s') % (fname, exc),
5262 hint=_("see https://mercurial-scm.org/"
5262 hint=_("see https://mercurial-scm.org/"
5263 "wiki/BundleFeature for more "
5263 "wiki/BundleFeature for more "
5264 "information"))
5264 "information"))
5265 modheads = bundle2.combinechangegroupresults(op)
5265 modheads = bundle2.combinechangegroupresults(op)
5266
5266
5267 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5267 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5268
5268
5269 @command('^update|up|checkout|co',
5269 @command('^update|up|checkout|co',
5270 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5270 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5271 ('c', 'check', None, _('require clean working directory')),
5271 ('c', 'check', None, _('require clean working directory')),
5272 ('m', 'merge', None, _('merge uncommitted changes')),
5272 ('m', 'merge', None, _('merge uncommitted changes')),
5273 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5273 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5274 ('r', 'rev', '', _('revision'), _('REV'))
5274 ('r', 'rev', '', _('revision'), _('REV'))
5275 ] + mergetoolopts,
5275 ] + mergetoolopts,
5276 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5276 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5277 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5277 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5278 merge=None, tool=None):
5278 merge=None, tool=None):
5279 """update working directory (or switch revisions)
5279 """update working directory (or switch revisions)
5280
5280
5281 Update the repository's working directory to the specified
5281 Update the repository's working directory to the specified
5282 changeset. If no changeset is specified, update to the tip of the
5282 changeset. If no changeset is specified, update to the tip of the
5283 current named branch and move the active bookmark (see :hg:`help
5283 current named branch and move the active bookmark (see :hg:`help
5284 bookmarks`).
5284 bookmarks`).
5285
5285
5286 Update sets the working directory's parent revision to the specified
5286 Update sets the working directory's parent revision to the specified
5287 changeset (see :hg:`help parents`).
5287 changeset (see :hg:`help parents`).
5288
5288
5289 If the changeset is not a descendant or ancestor of the working
5289 If the changeset is not a descendant or ancestor of the working
5290 directory's parent and there are uncommitted changes, the update is
5290 directory's parent and there are uncommitted changes, the update is
5291 aborted. With the -c/--check option, the working directory is checked
5291 aborted. With the -c/--check option, the working directory is checked
5292 for uncommitted changes; if none are found, the working directory is
5292 for uncommitted changes; if none are found, the working directory is
5293 updated to the specified changeset.
5293 updated to the specified changeset.
5294
5294
5295 .. container:: verbose
5295 .. container:: verbose
5296
5296
5297 The -C/--clean, -c/--check, and -m/--merge options control what
5297 The -C/--clean, -c/--check, and -m/--merge options control what
5298 happens if the working directory contains uncommitted changes.
5298 happens if the working directory contains uncommitted changes.
5299 At most of one of them can be specified.
5299 At most of one of them can be specified.
5300
5300
5301 1. If no option is specified, and if
5301 1. If no option is specified, and if
5302 the requested changeset is an ancestor or descendant of
5302 the requested changeset is an ancestor or descendant of
5303 the working directory's parent, the uncommitted changes
5303 the working directory's parent, the uncommitted changes
5304 are merged into the requested changeset and the merged
5304 are merged into the requested changeset and the merged
5305 result is left uncommitted. If the requested changeset is
5305 result is left uncommitted. If the requested changeset is
5306 not an ancestor or descendant (that is, it is on another
5306 not an ancestor or descendant (that is, it is on another
5307 branch), the update is aborted and the uncommitted changes
5307 branch), the update is aborted and the uncommitted changes
5308 are preserved.
5308 are preserved.
5309
5309
5310 2. With the -m/--merge option, the update is allowed even if the
5310 2. With the -m/--merge option, the update is allowed even if the
5311 requested changeset is not an ancestor or descendant of
5311 requested changeset is not an ancestor or descendant of
5312 the working directory's parent.
5312 the working directory's parent.
5313
5313
5314 3. With the -c/--check option, the update is aborted and the
5314 3. With the -c/--check option, the update is aborted and the
5315 uncommitted changes are preserved.
5315 uncommitted changes are preserved.
5316
5316
5317 4. With the -C/--clean option, uncommitted changes are discarded and
5317 4. With the -C/--clean option, uncommitted changes are discarded and
5318 the working directory is updated to the requested changeset.
5318 the working directory is updated to the requested changeset.
5319
5319
5320 To cancel an uncommitted merge (and lose your changes), use
5320 To cancel an uncommitted merge (and lose your changes), use
5321 :hg:`update --clean .`.
5321 :hg:`update --clean .`.
5322
5322
5323 Use null as the changeset to remove the working directory (like
5323 Use null as the changeset to remove the working directory (like
5324 :hg:`clone -U`).
5324 :hg:`clone -U`).
5325
5325
5326 If you want to revert just one file to an older revision, use
5326 If you want to revert just one file to an older revision, use
5327 :hg:`revert [-r REV] NAME`.
5327 :hg:`revert [-r REV] NAME`.
5328
5328
5329 See :hg:`help dates` for a list of formats valid for -d/--date.
5329 See :hg:`help dates` for a list of formats valid for -d/--date.
5330
5330
5331 Returns 0 on success, 1 if there are unresolved files.
5331 Returns 0 on success, 1 if there are unresolved files.
5332 """
5332 """
5333 if rev and node:
5333 if rev and node:
5334 raise error.Abort(_("please specify just one revision"))
5334 raise error.Abort(_("please specify just one revision"))
5335
5335
5336 if ui.configbool('commands', 'update.requiredest'):
5336 if ui.configbool('commands', 'update.requiredest'):
5337 if not node and not rev and not date:
5337 if not node and not rev and not date:
5338 raise error.Abort(_('you must specify a destination'),
5338 raise error.Abort(_('you must specify a destination'),
5339 hint=_('for example: hg update ".::"'))
5339 hint=_('for example: hg update ".::"'))
5340
5340
5341 if rev is None or rev == '':
5341 if rev is None or rev == '':
5342 rev = node
5342 rev = node
5343
5343
5344 if date and rev is not None:
5344 if date and rev is not None:
5345 raise error.Abort(_("you can't specify a revision and a date"))
5345 raise error.Abort(_("you can't specify a revision and a date"))
5346
5346
5347 if len([x for x in (clean, check, merge) if x]) > 1:
5347 if len([x for x in (clean, check, merge) if x]) > 1:
5348 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5348 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5349 "or -m/merge"))
5349 "or -m/merge"))
5350
5350
5351 updatecheck = None
5351 updatecheck = None
5352 if check:
5352 if check:
5353 updatecheck = 'abort'
5353 updatecheck = 'abort'
5354 elif merge:
5354 elif merge:
5355 updatecheck = 'none'
5355 updatecheck = 'none'
5356
5356
5357 with repo.wlock():
5357 with repo.wlock():
5358 cmdutil.clearunfinished(repo)
5358 cmdutil.clearunfinished(repo)
5359
5359
5360 if date:
5360 if date:
5361 rev = cmdutil.finddate(ui, repo, date)
5361 rev = cmdutil.finddate(ui, repo, date)
5362
5362
5363 # if we defined a bookmark, we have to remember the original name
5363 # if we defined a bookmark, we have to remember the original name
5364 brev = rev
5364 brev = rev
5365 rev = scmutil.revsingle(repo, rev, rev).rev()
5365 rev = scmutil.revsingle(repo, rev, rev).rev()
5366
5366
5367 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5367 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5368
5368
5369 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5369 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5370 updatecheck=updatecheck)
5370 updatecheck=updatecheck)
5371
5371
5372 @command('verify', [])
5372 @command('verify', [])
5373 def verify(ui, repo):
5373 def verify(ui, repo):
5374 """verify the integrity of the repository
5374 """verify the integrity of the repository
5375
5375
5376 Verify the integrity of the current repository.
5376 Verify the integrity of the current repository.
5377
5377
5378 This will perform an extensive check of the repository's
5378 This will perform an extensive check of the repository's
5379 integrity, validating the hashes and checksums of each entry in
5379 integrity, validating the hashes and checksums of each entry in
5380 the changelog, manifest, and tracked files, as well as the
5380 the changelog, manifest, and tracked files, as well as the
5381 integrity of their crosslinks and indices.
5381 integrity of their crosslinks and indices.
5382
5382
5383 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5383 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5384 for more information about recovery from corruption of the
5384 for more information about recovery from corruption of the
5385 repository.
5385 repository.
5386
5386
5387 Returns 0 on success, 1 if errors are encountered.
5387 Returns 0 on success, 1 if errors are encountered.
5388 """
5388 """
5389 return hg.verify(repo)
5389 return hg.verify(repo)
5390
5390
5391 @command('version', [] + formatteropts, norepo=True)
5391 @command('version', [] + formatteropts, norepo=True)
5392 def version_(ui, **opts):
5392 def version_(ui, **opts):
5393 """output version and copyright information"""
5393 """output version and copyright information"""
5394 opts = pycompat.byteskwargs(opts)
5394 opts = pycompat.byteskwargs(opts)
5395 if ui.verbose:
5395 if ui.verbose:
5396 ui.pager('version')
5396 ui.pager('version')
5397 fm = ui.formatter("version", opts)
5397 fm = ui.formatter("version", opts)
5398 fm.startitem()
5398 fm.startitem()
5399 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5399 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5400 util.version())
5400 util.version())
5401 license = _(
5401 license = _(
5402 "(see https://mercurial-scm.org for more information)\n"
5402 "(see https://mercurial-scm.org for more information)\n"
5403 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5403 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5404 "This is free software; see the source for copying conditions. "
5404 "This is free software; see the source for copying conditions. "
5405 "There is NO\nwarranty; "
5405 "There is NO\nwarranty; "
5406 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5406 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5407 )
5407 )
5408 if not ui.quiet:
5408 if not ui.quiet:
5409 fm.plain(license)
5409 fm.plain(license)
5410
5410
5411 if ui.verbose:
5411 if ui.verbose:
5412 fm.plain(_("\nEnabled extensions:\n\n"))
5412 fm.plain(_("\nEnabled extensions:\n\n"))
5413 # format names and versions into columns
5413 # format names and versions into columns
5414 names = []
5414 names = []
5415 vers = []
5415 vers = []
5416 isinternals = []
5416 isinternals = []
5417 for name, module in extensions.extensions():
5417 for name, module in extensions.extensions():
5418 names.append(name)
5418 names.append(name)
5419 vers.append(extensions.moduleversion(module) or None)
5419 vers.append(extensions.moduleversion(module) or None)
5420 isinternals.append(extensions.ismoduleinternal(module))
5420 isinternals.append(extensions.ismoduleinternal(module))
5421 fn = fm.nested("extensions")
5421 fn = fm.nested("extensions")
5422 if names:
5422 if names:
5423 namefmt = " %%-%ds " % max(len(n) for n in names)
5423 namefmt = " %%-%ds " % max(len(n) for n in names)
5424 places = [_("external"), _("internal")]
5424 places = [_("external"), _("internal")]
5425 for n, v, p in zip(names, vers, isinternals):
5425 for n, v, p in zip(names, vers, isinternals):
5426 fn.startitem()
5426 fn.startitem()
5427 fn.condwrite(ui.verbose, "name", namefmt, n)
5427 fn.condwrite(ui.verbose, "name", namefmt, n)
5428 if ui.verbose:
5428 if ui.verbose:
5429 fn.plain("%s " % places[p])
5429 fn.plain("%s " % places[p])
5430 fn.data(bundled=p)
5430 fn.data(bundled=p)
5431 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5431 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5432 if ui.verbose:
5432 if ui.verbose:
5433 fn.plain("\n")
5433 fn.plain("\n")
5434 fn.end()
5434 fn.end()
5435 fm.end()
5435 fm.end()
5436
5436
5437 def loadcmdtable(ui, name, cmdtable):
5437 def loadcmdtable(ui, name, cmdtable):
5438 """Load command functions from specified cmdtable
5438 """Load command functions from specified cmdtable
5439 """
5439 """
5440 overrides = [cmd for cmd in cmdtable if cmd in table]
5440 overrides = [cmd for cmd in cmdtable if cmd in table]
5441 if overrides:
5441 if overrides:
5442 ui.warn(_("extension '%s' overrides commands: %s\n")
5442 ui.warn(_("extension '%s' overrides commands: %s\n")
5443 % (name, " ".join(overrides)))
5443 % (name, " ".join(overrides)))
5444 table.update(cmdtable)
5444 table.update(cmdtable)
@@ -1,2330 +1,2330 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirnodes,
25 wdirnodes,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 mdiff,
33 mdiff,
34 obsolete as obsmod,
34 obsolete as obsmod,
35 patch,
35 patch,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 pycompat,
38 pycompat,
39 repoview,
39 repoview,
40 revlog,
40 revlog,
41 scmutil,
41 scmutil,
42 sparse,
42 sparse,
43 subrepo,
43 subrepo,
44 util,
44 util,
45 )
45 )
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48
48
49 nonascii = re.compile(r'[^\x21-\x7f]').search
49 nonascii = re.compile(r'[^\x21-\x7f]').search
50
50
51 class basectx(object):
51 class basectx(object):
52 """A basectx object represents the common logic for its children:
52 """A basectx object represents the common logic for its children:
53 changectx: read-only context that is already present in the repo,
53 changectx: read-only context that is already present in the repo,
54 workingctx: a context that represents the working directory and can
54 workingctx: a context that represents the working directory and can
55 be committed,
55 be committed,
56 memctx: a context that represents changes in-memory and can also
56 memctx: a context that represents changes in-memory and can also
57 be committed."""
57 be committed."""
58 def __new__(cls, repo, changeid='', *args, **kwargs):
58 def __new__(cls, repo, changeid='', *args, **kwargs):
59 if isinstance(changeid, basectx):
59 if isinstance(changeid, basectx):
60 return changeid
60 return changeid
61
61
62 o = super(basectx, cls).__new__(cls)
62 o = super(basectx, cls).__new__(cls)
63
63
64 o._repo = repo
64 o._repo = repo
65 o._rev = nullrev
65 o._rev = nullrev
66 o._node = nullid
66 o._node = nullid
67
67
68 return o
68 return o
69
69
70 def __bytes__(self):
70 def __bytes__(self):
71 return short(self.node())
71 return short(self.node())
72
72
73 __str__ = encoding.strmethod(__bytes__)
73 __str__ = encoding.strmethod(__bytes__)
74
74
75 def __int__(self):
75 def __int__(self):
76 return self.rev()
76 return self.rev()
77
77
78 def __repr__(self):
78 def __repr__(self):
79 return r"<%s %s>" % (type(self).__name__, str(self))
79 return r"<%s %s>" % (type(self).__name__, str(self))
80
80
81 def __eq__(self, other):
81 def __eq__(self, other):
82 try:
82 try:
83 return type(self) == type(other) and self._rev == other._rev
83 return type(self) == type(other) and self._rev == other._rev
84 except AttributeError:
84 except AttributeError:
85 return False
85 return False
86
86
87 def __ne__(self, other):
87 def __ne__(self, other):
88 return not (self == other)
88 return not (self == other)
89
89
90 def __contains__(self, key):
90 def __contains__(self, key):
91 return key in self._manifest
91 return key in self._manifest
92
92
93 def __getitem__(self, key):
93 def __getitem__(self, key):
94 return self.filectx(key)
94 return self.filectx(key)
95
95
96 def __iter__(self):
96 def __iter__(self):
97 return iter(self._manifest)
97 return iter(self._manifest)
98
98
99 def _buildstatusmanifest(self, status):
99 def _buildstatusmanifest(self, status):
100 """Builds a manifest that includes the given status results, if this is
100 """Builds a manifest that includes the given status results, if this is
101 a working copy context. For non-working copy contexts, it just returns
101 a working copy context. For non-working copy contexts, it just returns
102 the normal manifest."""
102 the normal manifest."""
103 return self.manifest()
103 return self.manifest()
104
104
105 def _matchstatus(self, other, match):
105 def _matchstatus(self, other, match):
106 """return match.always if match is none
106 """return match.always if match is none
107
107
108 This internal method provides a way for child objects to override the
108 This internal method provides a way for child objects to override the
109 match operator.
109 match operator.
110 """
110 """
111 return match or matchmod.always(self._repo.root, self._repo.getcwd())
111 return match or matchmod.always(self._repo.root, self._repo.getcwd())
112
112
113 def _buildstatus(self, other, s, match, listignored, listclean,
113 def _buildstatus(self, other, s, match, listignored, listclean,
114 listunknown):
114 listunknown):
115 """build a status with respect to another context"""
115 """build a status with respect to another context"""
116 # Load earliest manifest first for caching reasons. More specifically,
116 # Load earliest manifest first for caching reasons. More specifically,
117 # if you have revisions 1000 and 1001, 1001 is probably stored as a
117 # if you have revisions 1000 and 1001, 1001 is probably stored as a
118 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
118 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
119 # 1000 and cache it so that when you read 1001, we just need to apply a
119 # 1000 and cache it so that when you read 1001, we just need to apply a
120 # delta to what's in the cache. So that's one full reconstruction + one
120 # delta to what's in the cache. So that's one full reconstruction + one
121 # delta application.
121 # delta application.
122 mf2 = None
122 mf2 = None
123 if self.rev() is not None and self.rev() < other.rev():
123 if self.rev() is not None and self.rev() < other.rev():
124 mf2 = self._buildstatusmanifest(s)
124 mf2 = self._buildstatusmanifest(s)
125 mf1 = other._buildstatusmanifest(s)
125 mf1 = other._buildstatusmanifest(s)
126 if mf2 is None:
126 if mf2 is None:
127 mf2 = self._buildstatusmanifest(s)
127 mf2 = self._buildstatusmanifest(s)
128
128
129 modified, added = [], []
129 modified, added = [], []
130 removed = []
130 removed = []
131 clean = []
131 clean = []
132 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
132 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
133 deletedset = set(deleted)
133 deletedset = set(deleted)
134 d = mf1.diff(mf2, match=match, clean=listclean)
134 d = mf1.diff(mf2, match=match, clean=listclean)
135 for fn, value in d.iteritems():
135 for fn, value in d.iteritems():
136 if fn in deletedset:
136 if fn in deletedset:
137 continue
137 continue
138 if value is None:
138 if value is None:
139 clean.append(fn)
139 clean.append(fn)
140 continue
140 continue
141 (node1, flag1), (node2, flag2) = value
141 (node1, flag1), (node2, flag2) = value
142 if node1 is None:
142 if node1 is None:
143 added.append(fn)
143 added.append(fn)
144 elif node2 is None:
144 elif node2 is None:
145 removed.append(fn)
145 removed.append(fn)
146 elif flag1 != flag2:
146 elif flag1 != flag2:
147 modified.append(fn)
147 modified.append(fn)
148 elif node2 not in wdirnodes:
148 elif node2 not in wdirnodes:
149 # When comparing files between two commits, we save time by
149 # When comparing files between two commits, we save time by
150 # not comparing the file contents when the nodeids differ.
150 # not comparing the file contents when the nodeids differ.
151 # Note that this means we incorrectly report a reverted change
151 # Note that this means we incorrectly report a reverted change
152 # to a file as a modification.
152 # to a file as a modification.
153 modified.append(fn)
153 modified.append(fn)
154 elif self[fn].cmp(other[fn]):
154 elif self[fn].cmp(other[fn]):
155 modified.append(fn)
155 modified.append(fn)
156 else:
156 else:
157 clean.append(fn)
157 clean.append(fn)
158
158
159 if removed:
159 if removed:
160 # need to filter files if they are already reported as removed
160 # need to filter files if they are already reported as removed
161 unknown = [fn for fn in unknown if fn not in mf1 and
161 unknown = [fn for fn in unknown if fn not in mf1 and
162 (not match or match(fn))]
162 (not match or match(fn))]
163 ignored = [fn for fn in ignored if fn not in mf1 and
163 ignored = [fn for fn in ignored if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 # if they're deleted, don't report them as removed
165 # if they're deleted, don't report them as removed
166 removed = [fn for fn in removed if fn not in deletedset]
166 removed = [fn for fn in removed if fn not in deletedset]
167
167
168 return scmutil.status(modified, added, removed, deleted, unknown,
168 return scmutil.status(modified, added, removed, deleted, unknown,
169 ignored, clean)
169 ignored, clean)
170
170
171 @propertycache
171 @propertycache
172 def substate(self):
172 def substate(self):
173 return subrepo.state(self, self._repo.ui)
173 return subrepo.state(self, self._repo.ui)
174
174
175 def subrev(self, subpath):
175 def subrev(self, subpath):
176 return self.substate[subpath][1]
176 return self.substate[subpath][1]
177
177
178 def rev(self):
178 def rev(self):
179 return self._rev
179 return self._rev
180 def node(self):
180 def node(self):
181 return self._node
181 return self._node
182 def hex(self):
182 def hex(self):
183 return hex(self.node())
183 return hex(self.node())
184 def manifest(self):
184 def manifest(self):
185 return self._manifest
185 return self._manifest
186 def manifestctx(self):
186 def manifestctx(self):
187 return self._manifestctx
187 return self._manifestctx
188 def repo(self):
188 def repo(self):
189 return self._repo
189 return self._repo
190 def phasestr(self):
190 def phasestr(self):
191 return phases.phasenames[self.phase()]
191 return phases.phasenames[self.phase()]
192 def mutable(self):
192 def mutable(self):
193 return self.phase() > phases.public
193 return self.phase() > phases.public
194
194
195 def getfileset(self, expr):
195 def getfileset(self, expr):
196 return fileset.getfileset(self, expr)
196 return fileset.getfileset(self, expr)
197
197
198 def obsolete(self):
198 def obsolete(self):
199 """True if the changeset is obsolete"""
199 """True if the changeset is obsolete"""
200 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
200 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
201
201
202 def extinct(self):
202 def extinct(self):
203 """True if the changeset is extinct"""
203 """True if the changeset is extinct"""
204 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
204 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
205
205
206 def unstable(self):
206 def unstable(self):
207 """True if the changeset is not obsolete but it's ancestor are"""
207 """True if the changeset is not obsolete but it's ancestor are"""
208 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
208 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
209
209
210 def bumped(self):
210 def bumped(self):
211 """True if the changeset try to be a successor of a public changeset
211 """True if the changeset try to be a successor of a public changeset
212
212
213 Only non-public and non-obsolete changesets may be bumped.
213 Only non-public and non-obsolete changesets may be bumped.
214 """
214 """
215 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
215 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
216
216
217 def divergent(self):
217 def divergent(self):
218 """Is a successors of a changeset with multiple possible successors set
218 """Is a successors of a changeset with multiple possible successors set
219
219
220 Only non-public and non-obsolete changesets may be divergent.
220 Only non-public and non-obsolete changesets may be divergent.
221 """
221 """
222 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
222 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
223
223
224 def troubled(self):
224 def troubled(self):
225 """True if the changeset is either unstable, bumped or divergent"""
225 """True if the changeset is either unstable, bumped or divergent"""
226 return self.unstable() or self.bumped() or self.divergent()
226 return self.unstable() or self.bumped() or self.divergent()
227
227
228 def troubles(self):
228 def troubles(self):
229 """return the list of troubles affecting this changesets.
229 """return the list of troubles affecting this changesets.
230
230
231 Troubles are returned as strings. possible values are:
231 Troubles are returned as strings. possible values are:
232 - orphan,
232 - orphan,
233 - bumped,
233 - bumped,
234 - divergent.
234 - content-divergent.
235 """
235 """
236 troubles = []
236 troubles = []
237 if self.unstable():
237 if self.unstable():
238 troubles.append('orphan')
238 troubles.append('orphan')
239 if self.bumped():
239 if self.bumped():
240 troubles.append('bumped')
240 troubles.append('bumped')
241 if self.divergent():
241 if self.divergent():
242 troubles.append('divergent')
242 troubles.append('content-divergent')
243 return troubles
243 return troubles
244
244
245 def parents(self):
245 def parents(self):
246 """return contexts for each parent changeset"""
246 """return contexts for each parent changeset"""
247 return self._parents
247 return self._parents
248
248
249 def p1(self):
249 def p1(self):
250 return self._parents[0]
250 return self._parents[0]
251
251
252 def p2(self):
252 def p2(self):
253 parents = self._parents
253 parents = self._parents
254 if len(parents) == 2:
254 if len(parents) == 2:
255 return parents[1]
255 return parents[1]
256 return changectx(self._repo, nullrev)
256 return changectx(self._repo, nullrev)
257
257
258 def _fileinfo(self, path):
258 def _fileinfo(self, path):
259 if r'_manifest' in self.__dict__:
259 if r'_manifest' in self.__dict__:
260 try:
260 try:
261 return self._manifest[path], self._manifest.flags(path)
261 return self._manifest[path], self._manifest.flags(path)
262 except KeyError:
262 except KeyError:
263 raise error.ManifestLookupError(self._node, path,
263 raise error.ManifestLookupError(self._node, path,
264 _('not found in manifest'))
264 _('not found in manifest'))
265 if r'_manifestdelta' in self.__dict__ or path in self.files():
265 if r'_manifestdelta' in self.__dict__ or path in self.files():
266 if path in self._manifestdelta:
266 if path in self._manifestdelta:
267 return (self._manifestdelta[path],
267 return (self._manifestdelta[path],
268 self._manifestdelta.flags(path))
268 self._manifestdelta.flags(path))
269 mfl = self._repo.manifestlog
269 mfl = self._repo.manifestlog
270 try:
270 try:
271 node, flag = mfl[self._changeset.manifest].find(path)
271 node, flag = mfl[self._changeset.manifest].find(path)
272 except KeyError:
272 except KeyError:
273 raise error.ManifestLookupError(self._node, path,
273 raise error.ManifestLookupError(self._node, path,
274 _('not found in manifest'))
274 _('not found in manifest'))
275
275
276 return node, flag
276 return node, flag
277
277
278 def filenode(self, path):
278 def filenode(self, path):
279 return self._fileinfo(path)[0]
279 return self._fileinfo(path)[0]
280
280
281 def flags(self, path):
281 def flags(self, path):
282 try:
282 try:
283 return self._fileinfo(path)[1]
283 return self._fileinfo(path)[1]
284 except error.LookupError:
284 except error.LookupError:
285 return ''
285 return ''
286
286
287 def sub(self, path, allowcreate=True):
287 def sub(self, path, allowcreate=True):
288 '''return a subrepo for the stored revision of path, never wdir()'''
288 '''return a subrepo for the stored revision of path, never wdir()'''
289 return subrepo.subrepo(self, path, allowcreate=allowcreate)
289 return subrepo.subrepo(self, path, allowcreate=allowcreate)
290
290
291 def nullsub(self, path, pctx):
291 def nullsub(self, path, pctx):
292 return subrepo.nullsubrepo(self, path, pctx)
292 return subrepo.nullsubrepo(self, path, pctx)
293
293
294 def workingsub(self, path):
294 def workingsub(self, path):
295 '''return a subrepo for the stored revision, or wdir if this is a wdir
295 '''return a subrepo for the stored revision, or wdir if this is a wdir
296 context.
296 context.
297 '''
297 '''
298 return subrepo.subrepo(self, path, allowwdir=True)
298 return subrepo.subrepo(self, path, allowwdir=True)
299
299
300 def match(self, pats=None, include=None, exclude=None, default='glob',
300 def match(self, pats=None, include=None, exclude=None, default='glob',
301 listsubrepos=False, badfn=None):
301 listsubrepos=False, badfn=None):
302 r = self._repo
302 r = self._repo
303 return matchmod.match(r.root, r.getcwd(), pats,
303 return matchmod.match(r.root, r.getcwd(), pats,
304 include, exclude, default,
304 include, exclude, default,
305 auditor=r.nofsauditor, ctx=self,
305 auditor=r.nofsauditor, ctx=self,
306 listsubrepos=listsubrepos, badfn=badfn)
306 listsubrepos=listsubrepos, badfn=badfn)
307
307
308 def diff(self, ctx2=None, match=None, **opts):
308 def diff(self, ctx2=None, match=None, **opts):
309 """Returns a diff generator for the given contexts and matcher"""
309 """Returns a diff generator for the given contexts and matcher"""
310 if ctx2 is None:
310 if ctx2 is None:
311 ctx2 = self.p1()
311 ctx2 = self.p1()
312 if ctx2 is not None:
312 if ctx2 is not None:
313 ctx2 = self._repo[ctx2]
313 ctx2 = self._repo[ctx2]
314 diffopts = patch.diffopts(self._repo.ui, opts)
314 diffopts = patch.diffopts(self._repo.ui, opts)
315 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
315 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
316
316
317 def dirs(self):
317 def dirs(self):
318 return self._manifest.dirs()
318 return self._manifest.dirs()
319
319
320 def hasdir(self, dir):
320 def hasdir(self, dir):
321 return self._manifest.hasdir(dir)
321 return self._manifest.hasdir(dir)
322
322
323 def status(self, other=None, match=None, listignored=False,
323 def status(self, other=None, match=None, listignored=False,
324 listclean=False, listunknown=False, listsubrepos=False):
324 listclean=False, listunknown=False, listsubrepos=False):
325 """return status of files between two nodes or node and working
325 """return status of files between two nodes or node and working
326 directory.
326 directory.
327
327
328 If other is None, compare this node with working directory.
328 If other is None, compare this node with working directory.
329
329
330 returns (modified, added, removed, deleted, unknown, ignored, clean)
330 returns (modified, added, removed, deleted, unknown, ignored, clean)
331 """
331 """
332
332
333 ctx1 = self
333 ctx1 = self
334 ctx2 = self._repo[other]
334 ctx2 = self._repo[other]
335
335
336 # This next code block is, admittedly, fragile logic that tests for
336 # This next code block is, admittedly, fragile logic that tests for
337 # reversing the contexts and wouldn't need to exist if it weren't for
337 # reversing the contexts and wouldn't need to exist if it weren't for
338 # the fast (and common) code path of comparing the working directory
338 # the fast (and common) code path of comparing the working directory
339 # with its first parent.
339 # with its first parent.
340 #
340 #
341 # What we're aiming for here is the ability to call:
341 # What we're aiming for here is the ability to call:
342 #
342 #
343 # workingctx.status(parentctx)
343 # workingctx.status(parentctx)
344 #
344 #
345 # If we always built the manifest for each context and compared those,
345 # If we always built the manifest for each context and compared those,
346 # then we'd be done. But the special case of the above call means we
346 # then we'd be done. But the special case of the above call means we
347 # just copy the manifest of the parent.
347 # just copy the manifest of the parent.
348 reversed = False
348 reversed = False
349 if (not isinstance(ctx1, changectx)
349 if (not isinstance(ctx1, changectx)
350 and isinstance(ctx2, changectx)):
350 and isinstance(ctx2, changectx)):
351 reversed = True
351 reversed = True
352 ctx1, ctx2 = ctx2, ctx1
352 ctx1, ctx2 = ctx2, ctx1
353
353
354 match = ctx2._matchstatus(ctx1, match)
354 match = ctx2._matchstatus(ctx1, match)
355 r = scmutil.status([], [], [], [], [], [], [])
355 r = scmutil.status([], [], [], [], [], [], [])
356 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
356 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
357 listunknown)
357 listunknown)
358
358
359 if reversed:
359 if reversed:
360 # Reverse added and removed. Clear deleted, unknown and ignored as
360 # Reverse added and removed. Clear deleted, unknown and ignored as
361 # these make no sense to reverse.
361 # these make no sense to reverse.
362 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
362 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
363 r.clean)
363 r.clean)
364
364
365 if listsubrepos:
365 if listsubrepos:
366 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
366 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
367 try:
367 try:
368 rev2 = ctx2.subrev(subpath)
368 rev2 = ctx2.subrev(subpath)
369 except KeyError:
369 except KeyError:
370 # A subrepo that existed in node1 was deleted between
370 # A subrepo that existed in node1 was deleted between
371 # node1 and node2 (inclusive). Thus, ctx2's substate
371 # node1 and node2 (inclusive). Thus, ctx2's substate
372 # won't contain that subpath. The best we can do ignore it.
372 # won't contain that subpath. The best we can do ignore it.
373 rev2 = None
373 rev2 = None
374 submatch = matchmod.subdirmatcher(subpath, match)
374 submatch = matchmod.subdirmatcher(subpath, match)
375 s = sub.status(rev2, match=submatch, ignored=listignored,
375 s = sub.status(rev2, match=submatch, ignored=listignored,
376 clean=listclean, unknown=listunknown,
376 clean=listclean, unknown=listunknown,
377 listsubrepos=True)
377 listsubrepos=True)
378 for rfiles, sfiles in zip(r, s):
378 for rfiles, sfiles in zip(r, s):
379 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
379 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
380
380
381 for l in r:
381 for l in r:
382 l.sort()
382 l.sort()
383
383
384 return r
384 return r
385
385
386 def _filterederror(repo, changeid):
386 def _filterederror(repo, changeid):
387 """build an exception to be raised about a filtered changeid
387 """build an exception to be raised about a filtered changeid
388
388
389 This is extracted in a function to help extensions (eg: evolve) to
389 This is extracted in a function to help extensions (eg: evolve) to
390 experiment with various message variants."""
390 experiment with various message variants."""
391 if repo.filtername.startswith('visible'):
391 if repo.filtername.startswith('visible'):
392 msg = _("hidden revision '%s'") % changeid
392 msg = _("hidden revision '%s'") % changeid
393 hint = _('use --hidden to access hidden revisions')
393 hint = _('use --hidden to access hidden revisions')
394 return error.FilteredRepoLookupError(msg, hint=hint)
394 return error.FilteredRepoLookupError(msg, hint=hint)
395 msg = _("filtered revision '%s' (not in '%s' subset)")
395 msg = _("filtered revision '%s' (not in '%s' subset)")
396 msg %= (changeid, repo.filtername)
396 msg %= (changeid, repo.filtername)
397 return error.FilteredRepoLookupError(msg)
397 return error.FilteredRepoLookupError(msg)
398
398
399 class changectx(basectx):
399 class changectx(basectx):
400 """A changecontext object makes access to data related to a particular
400 """A changecontext object makes access to data related to a particular
401 changeset convenient. It represents a read-only context already present in
401 changeset convenient. It represents a read-only context already present in
402 the repo."""
402 the repo."""
403 def __init__(self, repo, changeid=''):
403 def __init__(self, repo, changeid=''):
404 """changeid is a revision number, node, or tag"""
404 """changeid is a revision number, node, or tag"""
405
405
406 # since basectx.__new__ already took care of copying the object, we
406 # since basectx.__new__ already took care of copying the object, we
407 # don't need to do anything in __init__, so we just exit here
407 # don't need to do anything in __init__, so we just exit here
408 if isinstance(changeid, basectx):
408 if isinstance(changeid, basectx):
409 return
409 return
410
410
411 if changeid == '':
411 if changeid == '':
412 changeid = '.'
412 changeid = '.'
413 self._repo = repo
413 self._repo = repo
414
414
415 try:
415 try:
416 if isinstance(changeid, int):
416 if isinstance(changeid, int):
417 self._node = repo.changelog.node(changeid)
417 self._node = repo.changelog.node(changeid)
418 self._rev = changeid
418 self._rev = changeid
419 return
419 return
420 if not pycompat.ispy3 and isinstance(changeid, long):
420 if not pycompat.ispy3 and isinstance(changeid, long):
421 changeid = str(changeid)
421 changeid = str(changeid)
422 if changeid == 'null':
422 if changeid == 'null':
423 self._node = nullid
423 self._node = nullid
424 self._rev = nullrev
424 self._rev = nullrev
425 return
425 return
426 if changeid == 'tip':
426 if changeid == 'tip':
427 self._node = repo.changelog.tip()
427 self._node = repo.changelog.tip()
428 self._rev = repo.changelog.rev(self._node)
428 self._rev = repo.changelog.rev(self._node)
429 return
429 return
430 if changeid == '.' or changeid == repo.dirstate.p1():
430 if changeid == '.' or changeid == repo.dirstate.p1():
431 # this is a hack to delay/avoid loading obsmarkers
431 # this is a hack to delay/avoid loading obsmarkers
432 # when we know that '.' won't be hidden
432 # when we know that '.' won't be hidden
433 self._node = repo.dirstate.p1()
433 self._node = repo.dirstate.p1()
434 self._rev = repo.unfiltered().changelog.rev(self._node)
434 self._rev = repo.unfiltered().changelog.rev(self._node)
435 return
435 return
436 if len(changeid) == 20:
436 if len(changeid) == 20:
437 try:
437 try:
438 self._node = changeid
438 self._node = changeid
439 self._rev = repo.changelog.rev(changeid)
439 self._rev = repo.changelog.rev(changeid)
440 return
440 return
441 except error.FilteredRepoLookupError:
441 except error.FilteredRepoLookupError:
442 raise
442 raise
443 except LookupError:
443 except LookupError:
444 pass
444 pass
445
445
446 try:
446 try:
447 r = int(changeid)
447 r = int(changeid)
448 if '%d' % r != changeid:
448 if '%d' % r != changeid:
449 raise ValueError
449 raise ValueError
450 l = len(repo.changelog)
450 l = len(repo.changelog)
451 if r < 0:
451 if r < 0:
452 r += l
452 r += l
453 if r < 0 or r >= l and r != wdirrev:
453 if r < 0 or r >= l and r != wdirrev:
454 raise ValueError
454 raise ValueError
455 self._rev = r
455 self._rev = r
456 self._node = repo.changelog.node(r)
456 self._node = repo.changelog.node(r)
457 return
457 return
458 except error.FilteredIndexError:
458 except error.FilteredIndexError:
459 raise
459 raise
460 except (ValueError, OverflowError, IndexError):
460 except (ValueError, OverflowError, IndexError):
461 pass
461 pass
462
462
463 if len(changeid) == 40:
463 if len(changeid) == 40:
464 try:
464 try:
465 self._node = bin(changeid)
465 self._node = bin(changeid)
466 self._rev = repo.changelog.rev(self._node)
466 self._rev = repo.changelog.rev(self._node)
467 return
467 return
468 except error.FilteredLookupError:
468 except error.FilteredLookupError:
469 raise
469 raise
470 except (TypeError, LookupError):
470 except (TypeError, LookupError):
471 pass
471 pass
472
472
473 # lookup bookmarks through the name interface
473 # lookup bookmarks through the name interface
474 try:
474 try:
475 self._node = repo.names.singlenode(repo, changeid)
475 self._node = repo.names.singlenode(repo, changeid)
476 self._rev = repo.changelog.rev(self._node)
476 self._rev = repo.changelog.rev(self._node)
477 return
477 return
478 except KeyError:
478 except KeyError:
479 pass
479 pass
480 except error.FilteredRepoLookupError:
480 except error.FilteredRepoLookupError:
481 raise
481 raise
482 except error.RepoLookupError:
482 except error.RepoLookupError:
483 pass
483 pass
484
484
485 self._node = repo.unfiltered().changelog._partialmatch(changeid)
485 self._node = repo.unfiltered().changelog._partialmatch(changeid)
486 if self._node is not None:
486 if self._node is not None:
487 self._rev = repo.changelog.rev(self._node)
487 self._rev = repo.changelog.rev(self._node)
488 return
488 return
489
489
490 # lookup failed
490 # lookup failed
491 # check if it might have come from damaged dirstate
491 # check if it might have come from damaged dirstate
492 #
492 #
493 # XXX we could avoid the unfiltered if we had a recognizable
493 # XXX we could avoid the unfiltered if we had a recognizable
494 # exception for filtered changeset access
494 # exception for filtered changeset access
495 if changeid in repo.unfiltered().dirstate.parents():
495 if changeid in repo.unfiltered().dirstate.parents():
496 msg = _("working directory has unknown parent '%s'!")
496 msg = _("working directory has unknown parent '%s'!")
497 raise error.Abort(msg % short(changeid))
497 raise error.Abort(msg % short(changeid))
498 try:
498 try:
499 if len(changeid) == 20 and nonascii(changeid):
499 if len(changeid) == 20 and nonascii(changeid):
500 changeid = hex(changeid)
500 changeid = hex(changeid)
501 except TypeError:
501 except TypeError:
502 pass
502 pass
503 except (error.FilteredIndexError, error.FilteredLookupError,
503 except (error.FilteredIndexError, error.FilteredLookupError,
504 error.FilteredRepoLookupError):
504 error.FilteredRepoLookupError):
505 raise _filterederror(repo, changeid)
505 raise _filterederror(repo, changeid)
506 except IndexError:
506 except IndexError:
507 pass
507 pass
508 raise error.RepoLookupError(
508 raise error.RepoLookupError(
509 _("unknown revision '%s'") % changeid)
509 _("unknown revision '%s'") % changeid)
510
510
511 def __hash__(self):
511 def __hash__(self):
512 try:
512 try:
513 return hash(self._rev)
513 return hash(self._rev)
514 except AttributeError:
514 except AttributeError:
515 return id(self)
515 return id(self)
516
516
517 def __nonzero__(self):
517 def __nonzero__(self):
518 return self._rev != nullrev
518 return self._rev != nullrev
519
519
520 __bool__ = __nonzero__
520 __bool__ = __nonzero__
521
521
522 @propertycache
522 @propertycache
523 def _changeset(self):
523 def _changeset(self):
524 return self._repo.changelog.changelogrevision(self.rev())
524 return self._repo.changelog.changelogrevision(self.rev())
525
525
526 @propertycache
526 @propertycache
527 def _manifest(self):
527 def _manifest(self):
528 return self._manifestctx.read()
528 return self._manifestctx.read()
529
529
530 @property
530 @property
531 def _manifestctx(self):
531 def _manifestctx(self):
532 return self._repo.manifestlog[self._changeset.manifest]
532 return self._repo.manifestlog[self._changeset.manifest]
533
533
534 @propertycache
534 @propertycache
535 def _manifestdelta(self):
535 def _manifestdelta(self):
536 return self._manifestctx.readdelta()
536 return self._manifestctx.readdelta()
537
537
538 @propertycache
538 @propertycache
539 def _parents(self):
539 def _parents(self):
540 repo = self._repo
540 repo = self._repo
541 p1, p2 = repo.changelog.parentrevs(self._rev)
541 p1, p2 = repo.changelog.parentrevs(self._rev)
542 if p2 == nullrev:
542 if p2 == nullrev:
543 return [changectx(repo, p1)]
543 return [changectx(repo, p1)]
544 return [changectx(repo, p1), changectx(repo, p2)]
544 return [changectx(repo, p1), changectx(repo, p2)]
545
545
546 def changeset(self):
546 def changeset(self):
547 c = self._changeset
547 c = self._changeset
548 return (
548 return (
549 c.manifest,
549 c.manifest,
550 c.user,
550 c.user,
551 c.date,
551 c.date,
552 c.files,
552 c.files,
553 c.description,
553 c.description,
554 c.extra,
554 c.extra,
555 )
555 )
556 def manifestnode(self):
556 def manifestnode(self):
557 return self._changeset.manifest
557 return self._changeset.manifest
558
558
559 def user(self):
559 def user(self):
560 return self._changeset.user
560 return self._changeset.user
561 def date(self):
561 def date(self):
562 return self._changeset.date
562 return self._changeset.date
563 def files(self):
563 def files(self):
564 return self._changeset.files
564 return self._changeset.files
565 def description(self):
565 def description(self):
566 return self._changeset.description
566 return self._changeset.description
567 def branch(self):
567 def branch(self):
568 return encoding.tolocal(self._changeset.extra.get("branch"))
568 return encoding.tolocal(self._changeset.extra.get("branch"))
569 def closesbranch(self):
569 def closesbranch(self):
570 return 'close' in self._changeset.extra
570 return 'close' in self._changeset.extra
571 def extra(self):
571 def extra(self):
572 return self._changeset.extra
572 return self._changeset.extra
573 def tags(self):
573 def tags(self):
574 return self._repo.nodetags(self._node)
574 return self._repo.nodetags(self._node)
575 def bookmarks(self):
575 def bookmarks(self):
576 return self._repo.nodebookmarks(self._node)
576 return self._repo.nodebookmarks(self._node)
577 def phase(self):
577 def phase(self):
578 return self._repo._phasecache.phase(self._repo, self._rev)
578 return self._repo._phasecache.phase(self._repo, self._rev)
579 def hidden(self):
579 def hidden(self):
580 return self._rev in repoview.filterrevs(self._repo, 'visible')
580 return self._rev in repoview.filterrevs(self._repo, 'visible')
581
581
582 def children(self):
582 def children(self):
583 """return contexts for each child changeset"""
583 """return contexts for each child changeset"""
584 c = self._repo.changelog.children(self._node)
584 c = self._repo.changelog.children(self._node)
585 return [changectx(self._repo, x) for x in c]
585 return [changectx(self._repo, x) for x in c]
586
586
587 def ancestors(self):
587 def ancestors(self):
588 for a in self._repo.changelog.ancestors([self._rev]):
588 for a in self._repo.changelog.ancestors([self._rev]):
589 yield changectx(self._repo, a)
589 yield changectx(self._repo, a)
590
590
591 def descendants(self):
591 def descendants(self):
592 for d in self._repo.changelog.descendants([self._rev]):
592 for d in self._repo.changelog.descendants([self._rev]):
593 yield changectx(self._repo, d)
593 yield changectx(self._repo, d)
594
594
595 def filectx(self, path, fileid=None, filelog=None):
595 def filectx(self, path, fileid=None, filelog=None):
596 """get a file context from this changeset"""
596 """get a file context from this changeset"""
597 if fileid is None:
597 if fileid is None:
598 fileid = self.filenode(path)
598 fileid = self.filenode(path)
599 return filectx(self._repo, path, fileid=fileid,
599 return filectx(self._repo, path, fileid=fileid,
600 changectx=self, filelog=filelog)
600 changectx=self, filelog=filelog)
601
601
602 def ancestor(self, c2, warn=False):
602 def ancestor(self, c2, warn=False):
603 """return the "best" ancestor context of self and c2
603 """return the "best" ancestor context of self and c2
604
604
605 If there are multiple candidates, it will show a message and check
605 If there are multiple candidates, it will show a message and check
606 merge.preferancestor configuration before falling back to the
606 merge.preferancestor configuration before falling back to the
607 revlog ancestor."""
607 revlog ancestor."""
608 # deal with workingctxs
608 # deal with workingctxs
609 n2 = c2._node
609 n2 = c2._node
610 if n2 is None:
610 if n2 is None:
611 n2 = c2._parents[0]._node
611 n2 = c2._parents[0]._node
612 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
612 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
613 if not cahs:
613 if not cahs:
614 anc = nullid
614 anc = nullid
615 elif len(cahs) == 1:
615 elif len(cahs) == 1:
616 anc = cahs[0]
616 anc = cahs[0]
617 else:
617 else:
618 # experimental config: merge.preferancestor
618 # experimental config: merge.preferancestor
619 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
619 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
620 try:
620 try:
621 ctx = changectx(self._repo, r)
621 ctx = changectx(self._repo, r)
622 except error.RepoLookupError:
622 except error.RepoLookupError:
623 continue
623 continue
624 anc = ctx.node()
624 anc = ctx.node()
625 if anc in cahs:
625 if anc in cahs:
626 break
626 break
627 else:
627 else:
628 anc = self._repo.changelog.ancestor(self._node, n2)
628 anc = self._repo.changelog.ancestor(self._node, n2)
629 if warn:
629 if warn:
630 self._repo.ui.status(
630 self._repo.ui.status(
631 (_("note: using %s as ancestor of %s and %s\n") %
631 (_("note: using %s as ancestor of %s and %s\n") %
632 (short(anc), short(self._node), short(n2))) +
632 (short(anc), short(self._node), short(n2))) +
633 ''.join(_(" alternatively, use --config "
633 ''.join(_(" alternatively, use --config "
634 "merge.preferancestor=%s\n") %
634 "merge.preferancestor=%s\n") %
635 short(n) for n in sorted(cahs) if n != anc))
635 short(n) for n in sorted(cahs) if n != anc))
636 return changectx(self._repo, anc)
636 return changectx(self._repo, anc)
637
637
638 def descendant(self, other):
638 def descendant(self, other):
639 """True if other is descendant of this changeset"""
639 """True if other is descendant of this changeset"""
640 return self._repo.changelog.descendant(self._rev, other._rev)
640 return self._repo.changelog.descendant(self._rev, other._rev)
641
641
642 def walk(self, match):
642 def walk(self, match):
643 '''Generates matching file names.'''
643 '''Generates matching file names.'''
644
644
645 # Wrap match.bad method to have message with nodeid
645 # Wrap match.bad method to have message with nodeid
646 def bad(fn, msg):
646 def bad(fn, msg):
647 # The manifest doesn't know about subrepos, so don't complain about
647 # The manifest doesn't know about subrepos, so don't complain about
648 # paths into valid subrepos.
648 # paths into valid subrepos.
649 if any(fn == s or fn.startswith(s + '/')
649 if any(fn == s or fn.startswith(s + '/')
650 for s in self.substate):
650 for s in self.substate):
651 return
651 return
652 match.bad(fn, _('no such file in rev %s') % self)
652 match.bad(fn, _('no such file in rev %s') % self)
653
653
654 m = matchmod.badmatch(match, bad)
654 m = matchmod.badmatch(match, bad)
655 return self._manifest.walk(m)
655 return self._manifest.walk(m)
656
656
657 def matches(self, match):
657 def matches(self, match):
658 return self.walk(match)
658 return self.walk(match)
659
659
660 class basefilectx(object):
660 class basefilectx(object):
661 """A filecontext object represents the common logic for its children:
661 """A filecontext object represents the common logic for its children:
662 filectx: read-only access to a filerevision that is already present
662 filectx: read-only access to a filerevision that is already present
663 in the repo,
663 in the repo,
664 workingfilectx: a filecontext that represents files from the working
664 workingfilectx: a filecontext that represents files from the working
665 directory,
665 directory,
666 memfilectx: a filecontext that represents files in-memory,
666 memfilectx: a filecontext that represents files in-memory,
667 overlayfilectx: duplicate another filecontext with some fields overridden.
667 overlayfilectx: duplicate another filecontext with some fields overridden.
668 """
668 """
669 @propertycache
669 @propertycache
670 def _filelog(self):
670 def _filelog(self):
671 return self._repo.file(self._path)
671 return self._repo.file(self._path)
672
672
673 @propertycache
673 @propertycache
674 def _changeid(self):
674 def _changeid(self):
675 if r'_changeid' in self.__dict__:
675 if r'_changeid' in self.__dict__:
676 return self._changeid
676 return self._changeid
677 elif r'_changectx' in self.__dict__:
677 elif r'_changectx' in self.__dict__:
678 return self._changectx.rev()
678 return self._changectx.rev()
679 elif r'_descendantrev' in self.__dict__:
679 elif r'_descendantrev' in self.__dict__:
680 # this file context was created from a revision with a known
680 # this file context was created from a revision with a known
681 # descendant, we can (lazily) correct for linkrev aliases
681 # descendant, we can (lazily) correct for linkrev aliases
682 return self._adjustlinkrev(self._descendantrev)
682 return self._adjustlinkrev(self._descendantrev)
683 else:
683 else:
684 return self._filelog.linkrev(self._filerev)
684 return self._filelog.linkrev(self._filerev)
685
685
686 @propertycache
686 @propertycache
687 def _filenode(self):
687 def _filenode(self):
688 if r'_fileid' in self.__dict__:
688 if r'_fileid' in self.__dict__:
689 return self._filelog.lookup(self._fileid)
689 return self._filelog.lookup(self._fileid)
690 else:
690 else:
691 return self._changectx.filenode(self._path)
691 return self._changectx.filenode(self._path)
692
692
693 @propertycache
693 @propertycache
694 def _filerev(self):
694 def _filerev(self):
695 return self._filelog.rev(self._filenode)
695 return self._filelog.rev(self._filenode)
696
696
697 @propertycache
697 @propertycache
698 def _repopath(self):
698 def _repopath(self):
699 return self._path
699 return self._path
700
700
701 def __nonzero__(self):
701 def __nonzero__(self):
702 try:
702 try:
703 self._filenode
703 self._filenode
704 return True
704 return True
705 except error.LookupError:
705 except error.LookupError:
706 # file is missing
706 # file is missing
707 return False
707 return False
708
708
709 __bool__ = __nonzero__
709 __bool__ = __nonzero__
710
710
711 def __bytes__(self):
711 def __bytes__(self):
712 try:
712 try:
713 return "%s@%s" % (self.path(), self._changectx)
713 return "%s@%s" % (self.path(), self._changectx)
714 except error.LookupError:
714 except error.LookupError:
715 return "%s@???" % self.path()
715 return "%s@???" % self.path()
716
716
717 __str__ = encoding.strmethod(__bytes__)
717 __str__ = encoding.strmethod(__bytes__)
718
718
719 def __repr__(self):
719 def __repr__(self):
720 return "<%s %s>" % (type(self).__name__, str(self))
720 return "<%s %s>" % (type(self).__name__, str(self))
721
721
722 def __hash__(self):
722 def __hash__(self):
723 try:
723 try:
724 return hash((self._path, self._filenode))
724 return hash((self._path, self._filenode))
725 except AttributeError:
725 except AttributeError:
726 return id(self)
726 return id(self)
727
727
728 def __eq__(self, other):
728 def __eq__(self, other):
729 try:
729 try:
730 return (type(self) == type(other) and self._path == other._path
730 return (type(self) == type(other) and self._path == other._path
731 and self._filenode == other._filenode)
731 and self._filenode == other._filenode)
732 except AttributeError:
732 except AttributeError:
733 return False
733 return False
734
734
735 def __ne__(self, other):
735 def __ne__(self, other):
736 return not (self == other)
736 return not (self == other)
737
737
738 def filerev(self):
738 def filerev(self):
739 return self._filerev
739 return self._filerev
740 def filenode(self):
740 def filenode(self):
741 return self._filenode
741 return self._filenode
742 @propertycache
742 @propertycache
743 def _flags(self):
743 def _flags(self):
744 return self._changectx.flags(self._path)
744 return self._changectx.flags(self._path)
745 def flags(self):
745 def flags(self):
746 return self._flags
746 return self._flags
747 def filelog(self):
747 def filelog(self):
748 return self._filelog
748 return self._filelog
749 def rev(self):
749 def rev(self):
750 return self._changeid
750 return self._changeid
751 def linkrev(self):
751 def linkrev(self):
752 return self._filelog.linkrev(self._filerev)
752 return self._filelog.linkrev(self._filerev)
753 def node(self):
753 def node(self):
754 return self._changectx.node()
754 return self._changectx.node()
755 def hex(self):
755 def hex(self):
756 return self._changectx.hex()
756 return self._changectx.hex()
757 def user(self):
757 def user(self):
758 return self._changectx.user()
758 return self._changectx.user()
759 def date(self):
759 def date(self):
760 return self._changectx.date()
760 return self._changectx.date()
761 def files(self):
761 def files(self):
762 return self._changectx.files()
762 return self._changectx.files()
763 def description(self):
763 def description(self):
764 return self._changectx.description()
764 return self._changectx.description()
765 def branch(self):
765 def branch(self):
766 return self._changectx.branch()
766 return self._changectx.branch()
767 def extra(self):
767 def extra(self):
768 return self._changectx.extra()
768 return self._changectx.extra()
769 def phase(self):
769 def phase(self):
770 return self._changectx.phase()
770 return self._changectx.phase()
771 def phasestr(self):
771 def phasestr(self):
772 return self._changectx.phasestr()
772 return self._changectx.phasestr()
773 def manifest(self):
773 def manifest(self):
774 return self._changectx.manifest()
774 return self._changectx.manifest()
775 def changectx(self):
775 def changectx(self):
776 return self._changectx
776 return self._changectx
777 def renamed(self):
777 def renamed(self):
778 return self._copied
778 return self._copied
779 def repo(self):
779 def repo(self):
780 return self._repo
780 return self._repo
781 def size(self):
781 def size(self):
782 return len(self.data())
782 return len(self.data())
783
783
784 def path(self):
784 def path(self):
785 return self._path
785 return self._path
786
786
787 def isbinary(self):
787 def isbinary(self):
788 try:
788 try:
789 return util.binary(self.data())
789 return util.binary(self.data())
790 except IOError:
790 except IOError:
791 return False
791 return False
792 def isexec(self):
792 def isexec(self):
793 return 'x' in self.flags()
793 return 'x' in self.flags()
794 def islink(self):
794 def islink(self):
795 return 'l' in self.flags()
795 return 'l' in self.flags()
796
796
797 def isabsent(self):
797 def isabsent(self):
798 """whether this filectx represents a file not in self._changectx
798 """whether this filectx represents a file not in self._changectx
799
799
800 This is mainly for merge code to detect change/delete conflicts. This is
800 This is mainly for merge code to detect change/delete conflicts. This is
801 expected to be True for all subclasses of basectx."""
801 expected to be True for all subclasses of basectx."""
802 return False
802 return False
803
803
804 _customcmp = False
804 _customcmp = False
805 def cmp(self, fctx):
805 def cmp(self, fctx):
806 """compare with other file context
806 """compare with other file context
807
807
808 returns True if different than fctx.
808 returns True if different than fctx.
809 """
809 """
810 if fctx._customcmp:
810 if fctx._customcmp:
811 return fctx.cmp(self)
811 return fctx.cmp(self)
812
812
813 if (fctx._filenode is None
813 if (fctx._filenode is None
814 and (self._repo._encodefilterpats
814 and (self._repo._encodefilterpats
815 # if file data starts with '\1\n', empty metadata block is
815 # if file data starts with '\1\n', empty metadata block is
816 # prepended, which adds 4 bytes to filelog.size().
816 # prepended, which adds 4 bytes to filelog.size().
817 or self.size() - 4 == fctx.size())
817 or self.size() - 4 == fctx.size())
818 or self.size() == fctx.size()):
818 or self.size() == fctx.size()):
819 return self._filelog.cmp(self._filenode, fctx.data())
819 return self._filelog.cmp(self._filenode, fctx.data())
820
820
821 return True
821 return True
822
822
823 def _adjustlinkrev(self, srcrev, inclusive=False):
823 def _adjustlinkrev(self, srcrev, inclusive=False):
824 """return the first ancestor of <srcrev> introducing <fnode>
824 """return the first ancestor of <srcrev> introducing <fnode>
825
825
826 If the linkrev of the file revision does not point to an ancestor of
826 If the linkrev of the file revision does not point to an ancestor of
827 srcrev, we'll walk down the ancestors until we find one introducing
827 srcrev, we'll walk down the ancestors until we find one introducing
828 this file revision.
828 this file revision.
829
829
830 :srcrev: the changeset revision we search ancestors from
830 :srcrev: the changeset revision we search ancestors from
831 :inclusive: if true, the src revision will also be checked
831 :inclusive: if true, the src revision will also be checked
832 """
832 """
833 repo = self._repo
833 repo = self._repo
834 cl = repo.unfiltered().changelog
834 cl = repo.unfiltered().changelog
835 mfl = repo.manifestlog
835 mfl = repo.manifestlog
836 # fetch the linkrev
836 # fetch the linkrev
837 lkr = self.linkrev()
837 lkr = self.linkrev()
838 # hack to reuse ancestor computation when searching for renames
838 # hack to reuse ancestor computation when searching for renames
839 memberanc = getattr(self, '_ancestrycontext', None)
839 memberanc = getattr(self, '_ancestrycontext', None)
840 iteranc = None
840 iteranc = None
841 if srcrev is None:
841 if srcrev is None:
842 # wctx case, used by workingfilectx during mergecopy
842 # wctx case, used by workingfilectx during mergecopy
843 revs = [p.rev() for p in self._repo[None].parents()]
843 revs = [p.rev() for p in self._repo[None].parents()]
844 inclusive = True # we skipped the real (revless) source
844 inclusive = True # we skipped the real (revless) source
845 else:
845 else:
846 revs = [srcrev]
846 revs = [srcrev]
847 if memberanc is None:
847 if memberanc is None:
848 memberanc = iteranc = cl.ancestors(revs, lkr,
848 memberanc = iteranc = cl.ancestors(revs, lkr,
849 inclusive=inclusive)
849 inclusive=inclusive)
850 # check if this linkrev is an ancestor of srcrev
850 # check if this linkrev is an ancestor of srcrev
851 if lkr not in memberanc:
851 if lkr not in memberanc:
852 if iteranc is None:
852 if iteranc is None:
853 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
853 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
854 fnode = self._filenode
854 fnode = self._filenode
855 path = self._path
855 path = self._path
856 for a in iteranc:
856 for a in iteranc:
857 ac = cl.read(a) # get changeset data (we avoid object creation)
857 ac = cl.read(a) # get changeset data (we avoid object creation)
858 if path in ac[3]: # checking the 'files' field.
858 if path in ac[3]: # checking the 'files' field.
859 # The file has been touched, check if the content is
859 # The file has been touched, check if the content is
860 # similar to the one we search for.
860 # similar to the one we search for.
861 if fnode == mfl[ac[0]].readfast().get(path):
861 if fnode == mfl[ac[0]].readfast().get(path):
862 return a
862 return a
863 # In theory, we should never get out of that loop without a result.
863 # In theory, we should never get out of that loop without a result.
864 # But if manifest uses a buggy file revision (not children of the
864 # But if manifest uses a buggy file revision (not children of the
865 # one it replaces) we could. Such a buggy situation will likely
865 # one it replaces) we could. Such a buggy situation will likely
866 # result is crash somewhere else at to some point.
866 # result is crash somewhere else at to some point.
867 return lkr
867 return lkr
868
868
869 def introrev(self):
869 def introrev(self):
870 """return the rev of the changeset which introduced this file revision
870 """return the rev of the changeset which introduced this file revision
871
871
872 This method is different from linkrev because it take into account the
872 This method is different from linkrev because it take into account the
873 changeset the filectx was created from. It ensures the returned
873 changeset the filectx was created from. It ensures the returned
874 revision is one of its ancestors. This prevents bugs from
874 revision is one of its ancestors. This prevents bugs from
875 'linkrev-shadowing' when a file revision is used by multiple
875 'linkrev-shadowing' when a file revision is used by multiple
876 changesets.
876 changesets.
877 """
877 """
878 lkr = self.linkrev()
878 lkr = self.linkrev()
879 attrs = vars(self)
879 attrs = vars(self)
880 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
880 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
881 if noctx or self.rev() == lkr:
881 if noctx or self.rev() == lkr:
882 return self.linkrev()
882 return self.linkrev()
883 return self._adjustlinkrev(self.rev(), inclusive=True)
883 return self._adjustlinkrev(self.rev(), inclusive=True)
884
884
885 def _parentfilectx(self, path, fileid, filelog):
885 def _parentfilectx(self, path, fileid, filelog):
886 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
886 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
887 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
887 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
888 if '_changeid' in vars(self) or '_changectx' in vars(self):
888 if '_changeid' in vars(self) or '_changectx' in vars(self):
889 # If self is associated with a changeset (probably explicitly
889 # If self is associated with a changeset (probably explicitly
890 # fed), ensure the created filectx is associated with a
890 # fed), ensure the created filectx is associated with a
891 # changeset that is an ancestor of self.changectx.
891 # changeset that is an ancestor of self.changectx.
892 # This lets us later use _adjustlinkrev to get a correct link.
892 # This lets us later use _adjustlinkrev to get a correct link.
893 fctx._descendantrev = self.rev()
893 fctx._descendantrev = self.rev()
894 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
894 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
895 elif '_descendantrev' in vars(self):
895 elif '_descendantrev' in vars(self):
896 # Otherwise propagate _descendantrev if we have one associated.
896 # Otherwise propagate _descendantrev if we have one associated.
897 fctx._descendantrev = self._descendantrev
897 fctx._descendantrev = self._descendantrev
898 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
898 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
899 return fctx
899 return fctx
900
900
901 def parents(self):
901 def parents(self):
902 _path = self._path
902 _path = self._path
903 fl = self._filelog
903 fl = self._filelog
904 parents = self._filelog.parents(self._filenode)
904 parents = self._filelog.parents(self._filenode)
905 pl = [(_path, node, fl) for node in parents if node != nullid]
905 pl = [(_path, node, fl) for node in parents if node != nullid]
906
906
907 r = fl.renamed(self._filenode)
907 r = fl.renamed(self._filenode)
908 if r:
908 if r:
909 # - In the simple rename case, both parent are nullid, pl is empty.
909 # - In the simple rename case, both parent are nullid, pl is empty.
910 # - In case of merge, only one of the parent is null id and should
910 # - In case of merge, only one of the parent is null id and should
911 # be replaced with the rename information. This parent is -always-
911 # be replaced with the rename information. This parent is -always-
912 # the first one.
912 # the first one.
913 #
913 #
914 # As null id have always been filtered out in the previous list
914 # As null id have always been filtered out in the previous list
915 # comprehension, inserting to 0 will always result in "replacing
915 # comprehension, inserting to 0 will always result in "replacing
916 # first nullid parent with rename information.
916 # first nullid parent with rename information.
917 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
917 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
918
918
919 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
919 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
920
920
921 def p1(self):
921 def p1(self):
922 return self.parents()[0]
922 return self.parents()[0]
923
923
924 def p2(self):
924 def p2(self):
925 p = self.parents()
925 p = self.parents()
926 if len(p) == 2:
926 if len(p) == 2:
927 return p[1]
927 return p[1]
928 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
928 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
929
929
930 def annotate(self, follow=False, linenumber=False, skiprevs=None,
930 def annotate(self, follow=False, linenumber=False, skiprevs=None,
931 diffopts=None):
931 diffopts=None):
932 '''returns a list of tuples of ((ctx, number), line) for each line
932 '''returns a list of tuples of ((ctx, number), line) for each line
933 in the file, where ctx is the filectx of the node where
933 in the file, where ctx is the filectx of the node where
934 that line was last changed; if linenumber parameter is true, number is
934 that line was last changed; if linenumber parameter is true, number is
935 the line number at the first appearance in the managed file, otherwise,
935 the line number at the first appearance in the managed file, otherwise,
936 number has a fixed value of False.
936 number has a fixed value of False.
937 '''
937 '''
938
938
939 def lines(text):
939 def lines(text):
940 if text.endswith("\n"):
940 if text.endswith("\n"):
941 return text.count("\n")
941 return text.count("\n")
942 return text.count("\n") + int(bool(text))
942 return text.count("\n") + int(bool(text))
943
943
944 if linenumber:
944 if linenumber:
945 def decorate(text, rev):
945 def decorate(text, rev):
946 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
946 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
947 else:
947 else:
948 def decorate(text, rev):
948 def decorate(text, rev):
949 return ([(rev, False)] * lines(text), text)
949 return ([(rev, False)] * lines(text), text)
950
950
951 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
951 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
952
952
953 def parents(f):
953 def parents(f):
954 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
954 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
955 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
955 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
956 # from the topmost introrev (= srcrev) down to p.linkrev() if it
956 # from the topmost introrev (= srcrev) down to p.linkrev() if it
957 # isn't an ancestor of the srcrev.
957 # isn't an ancestor of the srcrev.
958 f._changeid
958 f._changeid
959 pl = f.parents()
959 pl = f.parents()
960
960
961 # Don't return renamed parents if we aren't following.
961 # Don't return renamed parents if we aren't following.
962 if not follow:
962 if not follow:
963 pl = [p for p in pl if p.path() == f.path()]
963 pl = [p for p in pl if p.path() == f.path()]
964
964
965 # renamed filectx won't have a filelog yet, so set it
965 # renamed filectx won't have a filelog yet, so set it
966 # from the cache to save time
966 # from the cache to save time
967 for p in pl:
967 for p in pl:
968 if not '_filelog' in p.__dict__:
968 if not '_filelog' in p.__dict__:
969 p._filelog = getlog(p.path())
969 p._filelog = getlog(p.path())
970
970
971 return pl
971 return pl
972
972
973 # use linkrev to find the first changeset where self appeared
973 # use linkrev to find the first changeset where self appeared
974 base = self
974 base = self
975 introrev = self.introrev()
975 introrev = self.introrev()
976 if self.rev() != introrev:
976 if self.rev() != introrev:
977 base = self.filectx(self.filenode(), changeid=introrev)
977 base = self.filectx(self.filenode(), changeid=introrev)
978 if getattr(base, '_ancestrycontext', None) is None:
978 if getattr(base, '_ancestrycontext', None) is None:
979 cl = self._repo.changelog
979 cl = self._repo.changelog
980 if introrev is None:
980 if introrev is None:
981 # wctx is not inclusive, but works because _ancestrycontext
981 # wctx is not inclusive, but works because _ancestrycontext
982 # is used to test filelog revisions
982 # is used to test filelog revisions
983 ac = cl.ancestors([p.rev() for p in base.parents()],
983 ac = cl.ancestors([p.rev() for p in base.parents()],
984 inclusive=True)
984 inclusive=True)
985 else:
985 else:
986 ac = cl.ancestors([introrev], inclusive=True)
986 ac = cl.ancestors([introrev], inclusive=True)
987 base._ancestrycontext = ac
987 base._ancestrycontext = ac
988
988
989 # This algorithm would prefer to be recursive, but Python is a
989 # This algorithm would prefer to be recursive, but Python is a
990 # bit recursion-hostile. Instead we do an iterative
990 # bit recursion-hostile. Instead we do an iterative
991 # depth-first search.
991 # depth-first search.
992
992
993 # 1st DFS pre-calculates pcache and needed
993 # 1st DFS pre-calculates pcache and needed
994 visit = [base]
994 visit = [base]
995 pcache = {}
995 pcache = {}
996 needed = {base: 1}
996 needed = {base: 1}
997 while visit:
997 while visit:
998 f = visit.pop()
998 f = visit.pop()
999 if f in pcache:
999 if f in pcache:
1000 continue
1000 continue
1001 pl = parents(f)
1001 pl = parents(f)
1002 pcache[f] = pl
1002 pcache[f] = pl
1003 for p in pl:
1003 for p in pl:
1004 needed[p] = needed.get(p, 0) + 1
1004 needed[p] = needed.get(p, 0) + 1
1005 if p not in pcache:
1005 if p not in pcache:
1006 visit.append(p)
1006 visit.append(p)
1007
1007
1008 # 2nd DFS does the actual annotate
1008 # 2nd DFS does the actual annotate
1009 visit[:] = [base]
1009 visit[:] = [base]
1010 hist = {}
1010 hist = {}
1011 while visit:
1011 while visit:
1012 f = visit[-1]
1012 f = visit[-1]
1013 if f in hist:
1013 if f in hist:
1014 visit.pop()
1014 visit.pop()
1015 continue
1015 continue
1016
1016
1017 ready = True
1017 ready = True
1018 pl = pcache[f]
1018 pl = pcache[f]
1019 for p in pl:
1019 for p in pl:
1020 if p not in hist:
1020 if p not in hist:
1021 ready = False
1021 ready = False
1022 visit.append(p)
1022 visit.append(p)
1023 if ready:
1023 if ready:
1024 visit.pop()
1024 visit.pop()
1025 curr = decorate(f.data(), f)
1025 curr = decorate(f.data(), f)
1026 skipchild = False
1026 skipchild = False
1027 if skiprevs is not None:
1027 if skiprevs is not None:
1028 skipchild = f._changeid in skiprevs
1028 skipchild = f._changeid in skiprevs
1029 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1029 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1030 diffopts)
1030 diffopts)
1031 for p in pl:
1031 for p in pl:
1032 if needed[p] == 1:
1032 if needed[p] == 1:
1033 del hist[p]
1033 del hist[p]
1034 del needed[p]
1034 del needed[p]
1035 else:
1035 else:
1036 needed[p] -= 1
1036 needed[p] -= 1
1037
1037
1038 hist[f] = curr
1038 hist[f] = curr
1039 del pcache[f]
1039 del pcache[f]
1040
1040
1041 return zip(hist[base][0], hist[base][1].splitlines(True))
1041 return zip(hist[base][0], hist[base][1].splitlines(True))
1042
1042
1043 def ancestors(self, followfirst=False):
1043 def ancestors(self, followfirst=False):
1044 visit = {}
1044 visit = {}
1045 c = self
1045 c = self
1046 if followfirst:
1046 if followfirst:
1047 cut = 1
1047 cut = 1
1048 else:
1048 else:
1049 cut = None
1049 cut = None
1050
1050
1051 while True:
1051 while True:
1052 for parent in c.parents()[:cut]:
1052 for parent in c.parents()[:cut]:
1053 visit[(parent.linkrev(), parent.filenode())] = parent
1053 visit[(parent.linkrev(), parent.filenode())] = parent
1054 if not visit:
1054 if not visit:
1055 break
1055 break
1056 c = visit.pop(max(visit))
1056 c = visit.pop(max(visit))
1057 yield c
1057 yield c
1058
1058
1059 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1059 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1060 r'''
1060 r'''
1061 Given parent and child fctxes and annotate data for parents, for all lines
1061 Given parent and child fctxes and annotate data for parents, for all lines
1062 in either parent that match the child, annotate the child with the parent's
1062 in either parent that match the child, annotate the child with the parent's
1063 data.
1063 data.
1064
1064
1065 Additionally, if `skipchild` is True, replace all other lines with parent
1065 Additionally, if `skipchild` is True, replace all other lines with parent
1066 annotate data as well such that child is never blamed for any lines.
1066 annotate data as well such that child is never blamed for any lines.
1067
1067
1068 >>> oldfctx = 'old'
1068 >>> oldfctx = 'old'
1069 >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
1069 >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
1070 >>> olddata = 'a\nb\n'
1070 >>> olddata = 'a\nb\n'
1071 >>> p1data = 'a\nb\nc\n'
1071 >>> p1data = 'a\nb\nc\n'
1072 >>> p2data = 'a\nc\nd\n'
1072 >>> p2data = 'a\nc\nd\n'
1073 >>> childdata = 'a\nb2\nc\nc2\nd\n'
1073 >>> childdata = 'a\nb2\nc\nc2\nd\n'
1074 >>> diffopts = mdiff.diffopts()
1074 >>> diffopts = mdiff.diffopts()
1075
1075
1076 >>> def decorate(text, rev):
1076 >>> def decorate(text, rev):
1077 ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
1077 ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
1078
1078
1079 Basic usage:
1079 Basic usage:
1080
1080
1081 >>> oldann = decorate(olddata, oldfctx)
1081 >>> oldann = decorate(olddata, oldfctx)
1082 >>> p1ann = decorate(p1data, p1fctx)
1082 >>> p1ann = decorate(p1data, p1fctx)
1083 >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
1083 >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
1084 >>> p1ann[0]
1084 >>> p1ann[0]
1085 [('old', 1), ('old', 2), ('p1', 3)]
1085 [('old', 1), ('old', 2), ('p1', 3)]
1086 >>> p2ann = decorate(p2data, p2fctx)
1086 >>> p2ann = decorate(p2data, p2fctx)
1087 >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
1087 >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
1088 >>> p2ann[0]
1088 >>> p2ann[0]
1089 [('old', 1), ('p2', 2), ('p2', 3)]
1089 [('old', 1), ('p2', 2), ('p2', 3)]
1090
1090
1091 Test with multiple parents (note the difference caused by ordering):
1091 Test with multiple parents (note the difference caused by ordering):
1092
1092
1093 >>> childann = decorate(childdata, childfctx)
1093 >>> childann = decorate(childdata, childfctx)
1094 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
1094 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
1095 ... diffopts)
1095 ... diffopts)
1096 >>> childann[0]
1096 >>> childann[0]
1097 [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
1097 [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
1098
1098
1099 >>> childann = decorate(childdata, childfctx)
1099 >>> childann = decorate(childdata, childfctx)
1100 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
1100 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
1101 ... diffopts)
1101 ... diffopts)
1102 >>> childann[0]
1102 >>> childann[0]
1103 [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
1103 [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
1104
1104
1105 Test with skipchild (note the difference caused by ordering):
1105 Test with skipchild (note the difference caused by ordering):
1106
1106
1107 >>> childann = decorate(childdata, childfctx)
1107 >>> childann = decorate(childdata, childfctx)
1108 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
1108 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
1109 ... diffopts)
1109 ... diffopts)
1110 >>> childann[0]
1110 >>> childann[0]
1111 [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
1111 [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
1112
1112
1113 >>> childann = decorate(childdata, childfctx)
1113 >>> childann = decorate(childdata, childfctx)
1114 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
1114 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
1115 ... diffopts)
1115 ... diffopts)
1116 >>> childann[0]
1116 >>> childann[0]
1117 [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
1117 [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
1118 '''
1118 '''
1119 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1119 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1120 for parent in parents]
1120 for parent in parents]
1121
1121
1122 if skipchild:
1122 if skipchild:
1123 # Need to iterate over the blocks twice -- make it a list
1123 # Need to iterate over the blocks twice -- make it a list
1124 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1124 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1125 # Mercurial currently prefers p2 over p1 for annotate.
1125 # Mercurial currently prefers p2 over p1 for annotate.
1126 # TODO: change this?
1126 # TODO: change this?
1127 for parent, blocks in pblocks:
1127 for parent, blocks in pblocks:
1128 for (a1, a2, b1, b2), t in blocks:
1128 for (a1, a2, b1, b2), t in blocks:
1129 # Changed blocks ('!') or blocks made only of blank lines ('~')
1129 # Changed blocks ('!') or blocks made only of blank lines ('~')
1130 # belong to the child.
1130 # belong to the child.
1131 if t == '=':
1131 if t == '=':
1132 child[0][b1:b2] = parent[0][a1:a2]
1132 child[0][b1:b2] = parent[0][a1:a2]
1133
1133
1134 if skipchild:
1134 if skipchild:
1135 # Now try and match up anything that couldn't be matched,
1135 # Now try and match up anything that couldn't be matched,
1136 # Reversing pblocks maintains bias towards p2, matching above
1136 # Reversing pblocks maintains bias towards p2, matching above
1137 # behavior.
1137 # behavior.
1138 pblocks.reverse()
1138 pblocks.reverse()
1139
1139
1140 # The heuristics are:
1140 # The heuristics are:
1141 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1141 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1142 # This could potentially be smarter but works well enough.
1142 # This could potentially be smarter but works well enough.
1143 # * For a non-matching section, do a best-effort fit. Match lines in
1143 # * For a non-matching section, do a best-effort fit. Match lines in
1144 # diff hunks 1:1, dropping lines as necessary.
1144 # diff hunks 1:1, dropping lines as necessary.
1145 # * Repeat the last line as a last resort.
1145 # * Repeat the last line as a last resort.
1146
1146
1147 # First, replace as much as possible without repeating the last line.
1147 # First, replace as much as possible without repeating the last line.
1148 remaining = [(parent, []) for parent, _blocks in pblocks]
1148 remaining = [(parent, []) for parent, _blocks in pblocks]
1149 for idx, (parent, blocks) in enumerate(pblocks):
1149 for idx, (parent, blocks) in enumerate(pblocks):
1150 for (a1, a2, b1, b2), _t in blocks:
1150 for (a1, a2, b1, b2), _t in blocks:
1151 if a2 - a1 >= b2 - b1:
1151 if a2 - a1 >= b2 - b1:
1152 for bk in xrange(b1, b2):
1152 for bk in xrange(b1, b2):
1153 if child[0][bk][0] == childfctx:
1153 if child[0][bk][0] == childfctx:
1154 ak = min(a1 + (bk - b1), a2 - 1)
1154 ak = min(a1 + (bk - b1), a2 - 1)
1155 child[0][bk] = parent[0][ak]
1155 child[0][bk] = parent[0][ak]
1156 else:
1156 else:
1157 remaining[idx][1].append((a1, a2, b1, b2))
1157 remaining[idx][1].append((a1, a2, b1, b2))
1158
1158
1159 # Then, look at anything left, which might involve repeating the last
1159 # Then, look at anything left, which might involve repeating the last
1160 # line.
1160 # line.
1161 for parent, blocks in remaining:
1161 for parent, blocks in remaining:
1162 for a1, a2, b1, b2 in blocks:
1162 for a1, a2, b1, b2 in blocks:
1163 for bk in xrange(b1, b2):
1163 for bk in xrange(b1, b2):
1164 if child[0][bk][0] == childfctx:
1164 if child[0][bk][0] == childfctx:
1165 ak = min(a1 + (bk - b1), a2 - 1)
1165 ak = min(a1 + (bk - b1), a2 - 1)
1166 child[0][bk] = parent[0][ak]
1166 child[0][bk] = parent[0][ak]
1167 return child
1167 return child
1168
1168
1169 class filectx(basefilectx):
1169 class filectx(basefilectx):
1170 """A filecontext object makes access to data related to a particular
1170 """A filecontext object makes access to data related to a particular
1171 filerevision convenient."""
1171 filerevision convenient."""
1172 def __init__(self, repo, path, changeid=None, fileid=None,
1172 def __init__(self, repo, path, changeid=None, fileid=None,
1173 filelog=None, changectx=None):
1173 filelog=None, changectx=None):
1174 """changeid can be a changeset revision, node, or tag.
1174 """changeid can be a changeset revision, node, or tag.
1175 fileid can be a file revision or node."""
1175 fileid can be a file revision or node."""
1176 self._repo = repo
1176 self._repo = repo
1177 self._path = path
1177 self._path = path
1178
1178
1179 assert (changeid is not None
1179 assert (changeid is not None
1180 or fileid is not None
1180 or fileid is not None
1181 or changectx is not None), \
1181 or changectx is not None), \
1182 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1182 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1183 % (changeid, fileid, changectx))
1183 % (changeid, fileid, changectx))
1184
1184
1185 if filelog is not None:
1185 if filelog is not None:
1186 self._filelog = filelog
1186 self._filelog = filelog
1187
1187
1188 if changeid is not None:
1188 if changeid is not None:
1189 self._changeid = changeid
1189 self._changeid = changeid
1190 if changectx is not None:
1190 if changectx is not None:
1191 self._changectx = changectx
1191 self._changectx = changectx
1192 if fileid is not None:
1192 if fileid is not None:
1193 self._fileid = fileid
1193 self._fileid = fileid
1194
1194
1195 @propertycache
1195 @propertycache
1196 def _changectx(self):
1196 def _changectx(self):
1197 try:
1197 try:
1198 return changectx(self._repo, self._changeid)
1198 return changectx(self._repo, self._changeid)
1199 except error.FilteredRepoLookupError:
1199 except error.FilteredRepoLookupError:
1200 # Linkrev may point to any revision in the repository. When the
1200 # Linkrev may point to any revision in the repository. When the
1201 # repository is filtered this may lead to `filectx` trying to build
1201 # repository is filtered this may lead to `filectx` trying to build
1202 # `changectx` for filtered revision. In such case we fallback to
1202 # `changectx` for filtered revision. In such case we fallback to
1203 # creating `changectx` on the unfiltered version of the reposition.
1203 # creating `changectx` on the unfiltered version of the reposition.
1204 # This fallback should not be an issue because `changectx` from
1204 # This fallback should not be an issue because `changectx` from
1205 # `filectx` are not used in complex operations that care about
1205 # `filectx` are not used in complex operations that care about
1206 # filtering.
1206 # filtering.
1207 #
1207 #
1208 # This fallback is a cheap and dirty fix that prevent several
1208 # This fallback is a cheap and dirty fix that prevent several
1209 # crashes. It does not ensure the behavior is correct. However the
1209 # crashes. It does not ensure the behavior is correct. However the
1210 # behavior was not correct before filtering either and "incorrect
1210 # behavior was not correct before filtering either and "incorrect
1211 # behavior" is seen as better as "crash"
1211 # behavior" is seen as better as "crash"
1212 #
1212 #
1213 # Linkrevs have several serious troubles with filtering that are
1213 # Linkrevs have several serious troubles with filtering that are
1214 # complicated to solve. Proper handling of the issue here should be
1214 # complicated to solve. Proper handling of the issue here should be
1215 # considered when solving linkrev issue are on the table.
1215 # considered when solving linkrev issue are on the table.
1216 return changectx(self._repo.unfiltered(), self._changeid)
1216 return changectx(self._repo.unfiltered(), self._changeid)
1217
1217
1218 def filectx(self, fileid, changeid=None):
1218 def filectx(self, fileid, changeid=None):
1219 '''opens an arbitrary revision of the file without
1219 '''opens an arbitrary revision of the file without
1220 opening a new filelog'''
1220 opening a new filelog'''
1221 return filectx(self._repo, self._path, fileid=fileid,
1221 return filectx(self._repo, self._path, fileid=fileid,
1222 filelog=self._filelog, changeid=changeid)
1222 filelog=self._filelog, changeid=changeid)
1223
1223
1224 def rawdata(self):
1224 def rawdata(self):
1225 return self._filelog.revision(self._filenode, raw=True)
1225 return self._filelog.revision(self._filenode, raw=True)
1226
1226
1227 def rawflags(self):
1227 def rawflags(self):
1228 """low-level revlog flags"""
1228 """low-level revlog flags"""
1229 return self._filelog.flags(self._filerev)
1229 return self._filelog.flags(self._filerev)
1230
1230
1231 def data(self):
1231 def data(self):
1232 try:
1232 try:
1233 return self._filelog.read(self._filenode)
1233 return self._filelog.read(self._filenode)
1234 except error.CensoredNodeError:
1234 except error.CensoredNodeError:
1235 if self._repo.ui.config("censor", "policy") == "ignore":
1235 if self._repo.ui.config("censor", "policy") == "ignore":
1236 return ""
1236 return ""
1237 raise error.Abort(_("censored node: %s") % short(self._filenode),
1237 raise error.Abort(_("censored node: %s") % short(self._filenode),
1238 hint=_("set censor.policy to ignore errors"))
1238 hint=_("set censor.policy to ignore errors"))
1239
1239
1240 def size(self):
1240 def size(self):
1241 return self._filelog.size(self._filerev)
1241 return self._filelog.size(self._filerev)
1242
1242
1243 @propertycache
1243 @propertycache
1244 def _copied(self):
1244 def _copied(self):
1245 """check if file was actually renamed in this changeset revision
1245 """check if file was actually renamed in this changeset revision
1246
1246
1247 If rename logged in file revision, we report copy for changeset only
1247 If rename logged in file revision, we report copy for changeset only
1248 if file revisions linkrev points back to the changeset in question
1248 if file revisions linkrev points back to the changeset in question
1249 or both changeset parents contain different file revisions.
1249 or both changeset parents contain different file revisions.
1250 """
1250 """
1251
1251
1252 renamed = self._filelog.renamed(self._filenode)
1252 renamed = self._filelog.renamed(self._filenode)
1253 if not renamed:
1253 if not renamed:
1254 return renamed
1254 return renamed
1255
1255
1256 if self.rev() == self.linkrev():
1256 if self.rev() == self.linkrev():
1257 return renamed
1257 return renamed
1258
1258
1259 name = self.path()
1259 name = self.path()
1260 fnode = self._filenode
1260 fnode = self._filenode
1261 for p in self._changectx.parents():
1261 for p in self._changectx.parents():
1262 try:
1262 try:
1263 if fnode == p.filenode(name):
1263 if fnode == p.filenode(name):
1264 return None
1264 return None
1265 except error.LookupError:
1265 except error.LookupError:
1266 pass
1266 pass
1267 return renamed
1267 return renamed
1268
1268
1269 def children(self):
1269 def children(self):
1270 # hard for renames
1270 # hard for renames
1271 c = self._filelog.children(self._filenode)
1271 c = self._filelog.children(self._filenode)
1272 return [filectx(self._repo, self._path, fileid=x,
1272 return [filectx(self._repo, self._path, fileid=x,
1273 filelog=self._filelog) for x in c]
1273 filelog=self._filelog) for x in c]
1274
1274
1275 class committablectx(basectx):
1275 class committablectx(basectx):
1276 """A committablectx object provides common functionality for a context that
1276 """A committablectx object provides common functionality for a context that
1277 wants the ability to commit, e.g. workingctx or memctx."""
1277 wants the ability to commit, e.g. workingctx or memctx."""
1278 def __init__(self, repo, text="", user=None, date=None, extra=None,
1278 def __init__(self, repo, text="", user=None, date=None, extra=None,
1279 changes=None):
1279 changes=None):
1280 self._repo = repo
1280 self._repo = repo
1281 self._rev = None
1281 self._rev = None
1282 self._node = None
1282 self._node = None
1283 self._text = text
1283 self._text = text
1284 if date:
1284 if date:
1285 self._date = util.parsedate(date)
1285 self._date = util.parsedate(date)
1286 if user:
1286 if user:
1287 self._user = user
1287 self._user = user
1288 if changes:
1288 if changes:
1289 self._status = changes
1289 self._status = changes
1290
1290
1291 self._extra = {}
1291 self._extra = {}
1292 if extra:
1292 if extra:
1293 self._extra = extra.copy()
1293 self._extra = extra.copy()
1294 if 'branch' not in self._extra:
1294 if 'branch' not in self._extra:
1295 try:
1295 try:
1296 branch = encoding.fromlocal(self._repo.dirstate.branch())
1296 branch = encoding.fromlocal(self._repo.dirstate.branch())
1297 except UnicodeDecodeError:
1297 except UnicodeDecodeError:
1298 raise error.Abort(_('branch name not in UTF-8!'))
1298 raise error.Abort(_('branch name not in UTF-8!'))
1299 self._extra['branch'] = branch
1299 self._extra['branch'] = branch
1300 if self._extra['branch'] == '':
1300 if self._extra['branch'] == '':
1301 self._extra['branch'] = 'default'
1301 self._extra['branch'] = 'default'
1302
1302
1303 def __bytes__(self):
1303 def __bytes__(self):
1304 return bytes(self._parents[0]) + "+"
1304 return bytes(self._parents[0]) + "+"
1305
1305
1306 __str__ = encoding.strmethod(__bytes__)
1306 __str__ = encoding.strmethod(__bytes__)
1307
1307
1308 def __nonzero__(self):
1308 def __nonzero__(self):
1309 return True
1309 return True
1310
1310
1311 __bool__ = __nonzero__
1311 __bool__ = __nonzero__
1312
1312
1313 def _buildflagfunc(self):
1313 def _buildflagfunc(self):
1314 # Create a fallback function for getting file flags when the
1314 # Create a fallback function for getting file flags when the
1315 # filesystem doesn't support them
1315 # filesystem doesn't support them
1316
1316
1317 copiesget = self._repo.dirstate.copies().get
1317 copiesget = self._repo.dirstate.copies().get
1318 parents = self.parents()
1318 parents = self.parents()
1319 if len(parents) < 2:
1319 if len(parents) < 2:
1320 # when we have one parent, it's easy: copy from parent
1320 # when we have one parent, it's easy: copy from parent
1321 man = parents[0].manifest()
1321 man = parents[0].manifest()
1322 def func(f):
1322 def func(f):
1323 f = copiesget(f, f)
1323 f = copiesget(f, f)
1324 return man.flags(f)
1324 return man.flags(f)
1325 else:
1325 else:
1326 # merges are tricky: we try to reconstruct the unstored
1326 # merges are tricky: we try to reconstruct the unstored
1327 # result from the merge (issue1802)
1327 # result from the merge (issue1802)
1328 p1, p2 = parents
1328 p1, p2 = parents
1329 pa = p1.ancestor(p2)
1329 pa = p1.ancestor(p2)
1330 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1330 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1331
1331
1332 def func(f):
1332 def func(f):
1333 f = copiesget(f, f) # may be wrong for merges with copies
1333 f = copiesget(f, f) # may be wrong for merges with copies
1334 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1334 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1335 if fl1 == fl2:
1335 if fl1 == fl2:
1336 return fl1
1336 return fl1
1337 if fl1 == fla:
1337 if fl1 == fla:
1338 return fl2
1338 return fl2
1339 if fl2 == fla:
1339 if fl2 == fla:
1340 return fl1
1340 return fl1
1341 return '' # punt for conflicts
1341 return '' # punt for conflicts
1342
1342
1343 return func
1343 return func
1344
1344
1345 @propertycache
1345 @propertycache
1346 def _flagfunc(self):
1346 def _flagfunc(self):
1347 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1347 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1348
1348
1349 @propertycache
1349 @propertycache
1350 def _status(self):
1350 def _status(self):
1351 return self._repo.status()
1351 return self._repo.status()
1352
1352
1353 @propertycache
1353 @propertycache
1354 def _user(self):
1354 def _user(self):
1355 return self._repo.ui.username()
1355 return self._repo.ui.username()
1356
1356
1357 @propertycache
1357 @propertycache
1358 def _date(self):
1358 def _date(self):
1359 ui = self._repo.ui
1359 ui = self._repo.ui
1360 date = ui.configdate('devel', 'default-date')
1360 date = ui.configdate('devel', 'default-date')
1361 if date is None:
1361 if date is None:
1362 date = util.makedate()
1362 date = util.makedate()
1363 return date
1363 return date
1364
1364
1365 def subrev(self, subpath):
1365 def subrev(self, subpath):
1366 return None
1366 return None
1367
1367
1368 def manifestnode(self):
1368 def manifestnode(self):
1369 return None
1369 return None
1370 def user(self):
1370 def user(self):
1371 return self._user or self._repo.ui.username()
1371 return self._user or self._repo.ui.username()
1372 def date(self):
1372 def date(self):
1373 return self._date
1373 return self._date
1374 def description(self):
1374 def description(self):
1375 return self._text
1375 return self._text
1376 def files(self):
1376 def files(self):
1377 return sorted(self._status.modified + self._status.added +
1377 return sorted(self._status.modified + self._status.added +
1378 self._status.removed)
1378 self._status.removed)
1379
1379
1380 def modified(self):
1380 def modified(self):
1381 return self._status.modified
1381 return self._status.modified
1382 def added(self):
1382 def added(self):
1383 return self._status.added
1383 return self._status.added
1384 def removed(self):
1384 def removed(self):
1385 return self._status.removed
1385 return self._status.removed
1386 def deleted(self):
1386 def deleted(self):
1387 return self._status.deleted
1387 return self._status.deleted
1388 def branch(self):
1388 def branch(self):
1389 return encoding.tolocal(self._extra['branch'])
1389 return encoding.tolocal(self._extra['branch'])
1390 def closesbranch(self):
1390 def closesbranch(self):
1391 return 'close' in self._extra
1391 return 'close' in self._extra
1392 def extra(self):
1392 def extra(self):
1393 return self._extra
1393 return self._extra
1394
1394
1395 def tags(self):
1395 def tags(self):
1396 return []
1396 return []
1397
1397
1398 def bookmarks(self):
1398 def bookmarks(self):
1399 b = []
1399 b = []
1400 for p in self.parents():
1400 for p in self.parents():
1401 b.extend(p.bookmarks())
1401 b.extend(p.bookmarks())
1402 return b
1402 return b
1403
1403
1404 def phase(self):
1404 def phase(self):
1405 phase = phases.draft # default phase to draft
1405 phase = phases.draft # default phase to draft
1406 for p in self.parents():
1406 for p in self.parents():
1407 phase = max(phase, p.phase())
1407 phase = max(phase, p.phase())
1408 return phase
1408 return phase
1409
1409
1410 def hidden(self):
1410 def hidden(self):
1411 return False
1411 return False
1412
1412
1413 def children(self):
1413 def children(self):
1414 return []
1414 return []
1415
1415
1416 def flags(self, path):
1416 def flags(self, path):
1417 if r'_manifest' in self.__dict__:
1417 if r'_manifest' in self.__dict__:
1418 try:
1418 try:
1419 return self._manifest.flags(path)
1419 return self._manifest.flags(path)
1420 except KeyError:
1420 except KeyError:
1421 return ''
1421 return ''
1422
1422
1423 try:
1423 try:
1424 return self._flagfunc(path)
1424 return self._flagfunc(path)
1425 except OSError:
1425 except OSError:
1426 return ''
1426 return ''
1427
1427
1428 def ancestor(self, c2):
1428 def ancestor(self, c2):
1429 """return the "best" ancestor context of self and c2"""
1429 """return the "best" ancestor context of self and c2"""
1430 return self._parents[0].ancestor(c2) # punt on two parents for now
1430 return self._parents[0].ancestor(c2) # punt on two parents for now
1431
1431
1432 def walk(self, match):
1432 def walk(self, match):
1433 '''Generates matching file names.'''
1433 '''Generates matching file names.'''
1434 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1434 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1435 True, False))
1435 True, False))
1436
1436
1437 def matches(self, match):
1437 def matches(self, match):
1438 return sorted(self._repo.dirstate.matches(match))
1438 return sorted(self._repo.dirstate.matches(match))
1439
1439
1440 def ancestors(self):
1440 def ancestors(self):
1441 for p in self._parents:
1441 for p in self._parents:
1442 yield p
1442 yield p
1443 for a in self._repo.changelog.ancestors(
1443 for a in self._repo.changelog.ancestors(
1444 [p.rev() for p in self._parents]):
1444 [p.rev() for p in self._parents]):
1445 yield changectx(self._repo, a)
1445 yield changectx(self._repo, a)
1446
1446
1447 def markcommitted(self, node):
1447 def markcommitted(self, node):
1448 """Perform post-commit cleanup necessary after committing this ctx
1448 """Perform post-commit cleanup necessary after committing this ctx
1449
1449
1450 Specifically, this updates backing stores this working context
1450 Specifically, this updates backing stores this working context
1451 wraps to reflect the fact that the changes reflected by this
1451 wraps to reflect the fact that the changes reflected by this
1452 workingctx have been committed. For example, it marks
1452 workingctx have been committed. For example, it marks
1453 modified and added files as normal in the dirstate.
1453 modified and added files as normal in the dirstate.
1454
1454
1455 """
1455 """
1456
1456
1457 with self._repo.dirstate.parentchange():
1457 with self._repo.dirstate.parentchange():
1458 for f in self.modified() + self.added():
1458 for f in self.modified() + self.added():
1459 self._repo.dirstate.normal(f)
1459 self._repo.dirstate.normal(f)
1460 for f in self.removed():
1460 for f in self.removed():
1461 self._repo.dirstate.drop(f)
1461 self._repo.dirstate.drop(f)
1462 self._repo.dirstate.setparents(node)
1462 self._repo.dirstate.setparents(node)
1463
1463
1464 # write changes out explicitly, because nesting wlock at
1464 # write changes out explicitly, because nesting wlock at
1465 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1465 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1466 # from immediately doing so for subsequent changing files
1466 # from immediately doing so for subsequent changing files
1467 self._repo.dirstate.write(self._repo.currenttransaction())
1467 self._repo.dirstate.write(self._repo.currenttransaction())
1468
1468
1469 def dirty(self, missing=False, merge=True, branch=True):
1469 def dirty(self, missing=False, merge=True, branch=True):
1470 return False
1470 return False
1471
1471
1472 class workingctx(committablectx):
1472 class workingctx(committablectx):
1473 """A workingctx object makes access to data related to
1473 """A workingctx object makes access to data related to
1474 the current working directory convenient.
1474 the current working directory convenient.
1475 date - any valid date string or (unixtime, offset), or None.
1475 date - any valid date string or (unixtime, offset), or None.
1476 user - username string, or None.
1476 user - username string, or None.
1477 extra - a dictionary of extra values, or None.
1477 extra - a dictionary of extra values, or None.
1478 changes - a list of file lists as returned by localrepo.status()
1478 changes - a list of file lists as returned by localrepo.status()
1479 or None to use the repository status.
1479 or None to use the repository status.
1480 """
1480 """
1481 def __init__(self, repo, text="", user=None, date=None, extra=None,
1481 def __init__(self, repo, text="", user=None, date=None, extra=None,
1482 changes=None):
1482 changes=None):
1483 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1483 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1484
1484
1485 def __iter__(self):
1485 def __iter__(self):
1486 d = self._repo.dirstate
1486 d = self._repo.dirstate
1487 for f in d:
1487 for f in d:
1488 if d[f] != 'r':
1488 if d[f] != 'r':
1489 yield f
1489 yield f
1490
1490
1491 def __contains__(self, key):
1491 def __contains__(self, key):
1492 return self._repo.dirstate[key] not in "?r"
1492 return self._repo.dirstate[key] not in "?r"
1493
1493
1494 def hex(self):
1494 def hex(self):
1495 return hex(wdirid)
1495 return hex(wdirid)
1496
1496
1497 @propertycache
1497 @propertycache
1498 def _parents(self):
1498 def _parents(self):
1499 p = self._repo.dirstate.parents()
1499 p = self._repo.dirstate.parents()
1500 if p[1] == nullid:
1500 if p[1] == nullid:
1501 p = p[:-1]
1501 p = p[:-1]
1502 return [changectx(self._repo, x) for x in p]
1502 return [changectx(self._repo, x) for x in p]
1503
1503
1504 def filectx(self, path, filelog=None):
1504 def filectx(self, path, filelog=None):
1505 """get a file context from the working directory"""
1505 """get a file context from the working directory"""
1506 return workingfilectx(self._repo, path, workingctx=self,
1506 return workingfilectx(self._repo, path, workingctx=self,
1507 filelog=filelog)
1507 filelog=filelog)
1508
1508
1509 def dirty(self, missing=False, merge=True, branch=True):
1509 def dirty(self, missing=False, merge=True, branch=True):
1510 "check whether a working directory is modified"
1510 "check whether a working directory is modified"
1511 # check subrepos first
1511 # check subrepos first
1512 for s in sorted(self.substate):
1512 for s in sorted(self.substate):
1513 if self.sub(s).dirty(missing=missing):
1513 if self.sub(s).dirty(missing=missing):
1514 return True
1514 return True
1515 # check current working dir
1515 # check current working dir
1516 return ((merge and self.p2()) or
1516 return ((merge and self.p2()) or
1517 (branch and self.branch() != self.p1().branch()) or
1517 (branch and self.branch() != self.p1().branch()) or
1518 self.modified() or self.added() or self.removed() or
1518 self.modified() or self.added() or self.removed() or
1519 (missing and self.deleted()))
1519 (missing and self.deleted()))
1520
1520
1521 def add(self, list, prefix=""):
1521 def add(self, list, prefix=""):
1522 with self._repo.wlock():
1522 with self._repo.wlock():
1523 ui, ds = self._repo.ui, self._repo.dirstate
1523 ui, ds = self._repo.ui, self._repo.dirstate
1524 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1524 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1525 rejected = []
1525 rejected = []
1526 lstat = self._repo.wvfs.lstat
1526 lstat = self._repo.wvfs.lstat
1527 for f in list:
1527 for f in list:
1528 # ds.pathto() returns an absolute file when this is invoked from
1528 # ds.pathto() returns an absolute file when this is invoked from
1529 # the keyword extension. That gets flagged as non-portable on
1529 # the keyword extension. That gets flagged as non-portable on
1530 # Windows, since it contains the drive letter and colon.
1530 # Windows, since it contains the drive letter and colon.
1531 scmutil.checkportable(ui, os.path.join(prefix, f))
1531 scmutil.checkportable(ui, os.path.join(prefix, f))
1532 try:
1532 try:
1533 st = lstat(f)
1533 st = lstat(f)
1534 except OSError:
1534 except OSError:
1535 ui.warn(_("%s does not exist!\n") % uipath(f))
1535 ui.warn(_("%s does not exist!\n") % uipath(f))
1536 rejected.append(f)
1536 rejected.append(f)
1537 continue
1537 continue
1538 if st.st_size > 10000000:
1538 if st.st_size > 10000000:
1539 ui.warn(_("%s: up to %d MB of RAM may be required "
1539 ui.warn(_("%s: up to %d MB of RAM may be required "
1540 "to manage this file\n"
1540 "to manage this file\n"
1541 "(use 'hg revert %s' to cancel the "
1541 "(use 'hg revert %s' to cancel the "
1542 "pending addition)\n")
1542 "pending addition)\n")
1543 % (f, 3 * st.st_size // 1000000, uipath(f)))
1543 % (f, 3 * st.st_size // 1000000, uipath(f)))
1544 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1544 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1545 ui.warn(_("%s not added: only files and symlinks "
1545 ui.warn(_("%s not added: only files and symlinks "
1546 "supported currently\n") % uipath(f))
1546 "supported currently\n") % uipath(f))
1547 rejected.append(f)
1547 rejected.append(f)
1548 elif ds[f] in 'amn':
1548 elif ds[f] in 'amn':
1549 ui.warn(_("%s already tracked!\n") % uipath(f))
1549 ui.warn(_("%s already tracked!\n") % uipath(f))
1550 elif ds[f] == 'r':
1550 elif ds[f] == 'r':
1551 ds.normallookup(f)
1551 ds.normallookup(f)
1552 else:
1552 else:
1553 ds.add(f)
1553 ds.add(f)
1554 return rejected
1554 return rejected
1555
1555
1556 def forget(self, files, prefix=""):
1556 def forget(self, files, prefix=""):
1557 with self._repo.wlock():
1557 with self._repo.wlock():
1558 ds = self._repo.dirstate
1558 ds = self._repo.dirstate
1559 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1559 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1560 rejected = []
1560 rejected = []
1561 for f in files:
1561 for f in files:
1562 if f not in self._repo.dirstate:
1562 if f not in self._repo.dirstate:
1563 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1563 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1564 rejected.append(f)
1564 rejected.append(f)
1565 elif self._repo.dirstate[f] != 'a':
1565 elif self._repo.dirstate[f] != 'a':
1566 self._repo.dirstate.remove(f)
1566 self._repo.dirstate.remove(f)
1567 else:
1567 else:
1568 self._repo.dirstate.drop(f)
1568 self._repo.dirstate.drop(f)
1569 return rejected
1569 return rejected
1570
1570
1571 def undelete(self, list):
1571 def undelete(self, list):
1572 pctxs = self.parents()
1572 pctxs = self.parents()
1573 with self._repo.wlock():
1573 with self._repo.wlock():
1574 ds = self._repo.dirstate
1574 ds = self._repo.dirstate
1575 for f in list:
1575 for f in list:
1576 if self._repo.dirstate[f] != 'r':
1576 if self._repo.dirstate[f] != 'r':
1577 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1577 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1578 else:
1578 else:
1579 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1579 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1580 t = fctx.data()
1580 t = fctx.data()
1581 self._repo.wwrite(f, t, fctx.flags())
1581 self._repo.wwrite(f, t, fctx.flags())
1582 self._repo.dirstate.normal(f)
1582 self._repo.dirstate.normal(f)
1583
1583
1584 def copy(self, source, dest):
1584 def copy(self, source, dest):
1585 try:
1585 try:
1586 st = self._repo.wvfs.lstat(dest)
1586 st = self._repo.wvfs.lstat(dest)
1587 except OSError as err:
1587 except OSError as err:
1588 if err.errno != errno.ENOENT:
1588 if err.errno != errno.ENOENT:
1589 raise
1589 raise
1590 self._repo.ui.warn(_("%s does not exist!\n")
1590 self._repo.ui.warn(_("%s does not exist!\n")
1591 % self._repo.dirstate.pathto(dest))
1591 % self._repo.dirstate.pathto(dest))
1592 return
1592 return
1593 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1593 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1594 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1594 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1595 "symbolic link\n")
1595 "symbolic link\n")
1596 % self._repo.dirstate.pathto(dest))
1596 % self._repo.dirstate.pathto(dest))
1597 else:
1597 else:
1598 with self._repo.wlock():
1598 with self._repo.wlock():
1599 if self._repo.dirstate[dest] in '?':
1599 if self._repo.dirstate[dest] in '?':
1600 self._repo.dirstate.add(dest)
1600 self._repo.dirstate.add(dest)
1601 elif self._repo.dirstate[dest] in 'r':
1601 elif self._repo.dirstate[dest] in 'r':
1602 self._repo.dirstate.normallookup(dest)
1602 self._repo.dirstate.normallookup(dest)
1603 self._repo.dirstate.copy(source, dest)
1603 self._repo.dirstate.copy(source, dest)
1604
1604
1605 def match(self, pats=None, include=None, exclude=None, default='glob',
1605 def match(self, pats=None, include=None, exclude=None, default='glob',
1606 listsubrepos=False, badfn=None):
1606 listsubrepos=False, badfn=None):
1607 r = self._repo
1607 r = self._repo
1608
1608
1609 # Only a case insensitive filesystem needs magic to translate user input
1609 # Only a case insensitive filesystem needs magic to translate user input
1610 # to actual case in the filesystem.
1610 # to actual case in the filesystem.
1611 icasefs = not util.fscasesensitive(r.root)
1611 icasefs = not util.fscasesensitive(r.root)
1612 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1612 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1613 default, auditor=r.auditor, ctx=self,
1613 default, auditor=r.auditor, ctx=self,
1614 listsubrepos=listsubrepos, badfn=badfn,
1614 listsubrepos=listsubrepos, badfn=badfn,
1615 icasefs=icasefs)
1615 icasefs=icasefs)
1616
1616
1617 def _filtersuspectsymlink(self, files):
1617 def _filtersuspectsymlink(self, files):
1618 if not files or self._repo.dirstate._checklink:
1618 if not files or self._repo.dirstate._checklink:
1619 return files
1619 return files
1620
1620
1621 # Symlink placeholders may get non-symlink-like contents
1621 # Symlink placeholders may get non-symlink-like contents
1622 # via user error or dereferencing by NFS or Samba servers,
1622 # via user error or dereferencing by NFS or Samba servers,
1623 # so we filter out any placeholders that don't look like a
1623 # so we filter out any placeholders that don't look like a
1624 # symlink
1624 # symlink
1625 sane = []
1625 sane = []
1626 for f in files:
1626 for f in files:
1627 if self.flags(f) == 'l':
1627 if self.flags(f) == 'l':
1628 d = self[f].data()
1628 d = self[f].data()
1629 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1629 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1630 self._repo.ui.debug('ignoring suspect symlink placeholder'
1630 self._repo.ui.debug('ignoring suspect symlink placeholder'
1631 ' "%s"\n' % f)
1631 ' "%s"\n' % f)
1632 continue
1632 continue
1633 sane.append(f)
1633 sane.append(f)
1634 return sane
1634 return sane
1635
1635
1636 def _checklookup(self, files):
1636 def _checklookup(self, files):
1637 # check for any possibly clean files
1637 # check for any possibly clean files
1638 if not files:
1638 if not files:
1639 return [], [], []
1639 return [], [], []
1640
1640
1641 modified = []
1641 modified = []
1642 deleted = []
1642 deleted = []
1643 fixup = []
1643 fixup = []
1644 pctx = self._parents[0]
1644 pctx = self._parents[0]
1645 # do a full compare of any files that might have changed
1645 # do a full compare of any files that might have changed
1646 for f in sorted(files):
1646 for f in sorted(files):
1647 try:
1647 try:
1648 # This will return True for a file that got replaced by a
1648 # This will return True for a file that got replaced by a
1649 # directory in the interim, but fixing that is pretty hard.
1649 # directory in the interim, but fixing that is pretty hard.
1650 if (f not in pctx or self.flags(f) != pctx.flags(f)
1650 if (f not in pctx or self.flags(f) != pctx.flags(f)
1651 or pctx[f].cmp(self[f])):
1651 or pctx[f].cmp(self[f])):
1652 modified.append(f)
1652 modified.append(f)
1653 else:
1653 else:
1654 fixup.append(f)
1654 fixup.append(f)
1655 except (IOError, OSError):
1655 except (IOError, OSError):
1656 # A file become inaccessible in between? Mark it as deleted,
1656 # A file become inaccessible in between? Mark it as deleted,
1657 # matching dirstate behavior (issue5584).
1657 # matching dirstate behavior (issue5584).
1658 # The dirstate has more complex behavior around whether a
1658 # The dirstate has more complex behavior around whether a
1659 # missing file matches a directory, etc, but we don't need to
1659 # missing file matches a directory, etc, but we don't need to
1660 # bother with that: if f has made it to this point, we're sure
1660 # bother with that: if f has made it to this point, we're sure
1661 # it's in the dirstate.
1661 # it's in the dirstate.
1662 deleted.append(f)
1662 deleted.append(f)
1663
1663
1664 return modified, deleted, fixup
1664 return modified, deleted, fixup
1665
1665
1666 def _poststatusfixup(self, status, fixup):
1666 def _poststatusfixup(self, status, fixup):
1667 """update dirstate for files that are actually clean"""
1667 """update dirstate for files that are actually clean"""
1668 poststatus = self._repo.postdsstatus()
1668 poststatus = self._repo.postdsstatus()
1669 if fixup or poststatus:
1669 if fixup or poststatus:
1670 try:
1670 try:
1671 oldid = self._repo.dirstate.identity()
1671 oldid = self._repo.dirstate.identity()
1672
1672
1673 # updating the dirstate is optional
1673 # updating the dirstate is optional
1674 # so we don't wait on the lock
1674 # so we don't wait on the lock
1675 # wlock can invalidate the dirstate, so cache normal _after_
1675 # wlock can invalidate the dirstate, so cache normal _after_
1676 # taking the lock
1676 # taking the lock
1677 with self._repo.wlock(False):
1677 with self._repo.wlock(False):
1678 if self._repo.dirstate.identity() == oldid:
1678 if self._repo.dirstate.identity() == oldid:
1679 if fixup:
1679 if fixup:
1680 normal = self._repo.dirstate.normal
1680 normal = self._repo.dirstate.normal
1681 for f in fixup:
1681 for f in fixup:
1682 normal(f)
1682 normal(f)
1683 # write changes out explicitly, because nesting
1683 # write changes out explicitly, because nesting
1684 # wlock at runtime may prevent 'wlock.release()'
1684 # wlock at runtime may prevent 'wlock.release()'
1685 # after this block from doing so for subsequent
1685 # after this block from doing so for subsequent
1686 # changing files
1686 # changing files
1687 tr = self._repo.currenttransaction()
1687 tr = self._repo.currenttransaction()
1688 self._repo.dirstate.write(tr)
1688 self._repo.dirstate.write(tr)
1689
1689
1690 if poststatus:
1690 if poststatus:
1691 for ps in poststatus:
1691 for ps in poststatus:
1692 ps(self, status)
1692 ps(self, status)
1693 else:
1693 else:
1694 # in this case, writing changes out breaks
1694 # in this case, writing changes out breaks
1695 # consistency, because .hg/dirstate was
1695 # consistency, because .hg/dirstate was
1696 # already changed simultaneously after last
1696 # already changed simultaneously after last
1697 # caching (see also issue5584 for detail)
1697 # caching (see also issue5584 for detail)
1698 self._repo.ui.debug('skip updating dirstate: '
1698 self._repo.ui.debug('skip updating dirstate: '
1699 'identity mismatch\n')
1699 'identity mismatch\n')
1700 except error.LockError:
1700 except error.LockError:
1701 pass
1701 pass
1702 finally:
1702 finally:
1703 # Even if the wlock couldn't be grabbed, clear out the list.
1703 # Even if the wlock couldn't be grabbed, clear out the list.
1704 self._repo.clearpostdsstatus()
1704 self._repo.clearpostdsstatus()
1705
1705
1706 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1706 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1707 unknown=False):
1707 unknown=False):
1708 '''Gets the status from the dirstate -- internal use only.'''
1708 '''Gets the status from the dirstate -- internal use only.'''
1709 listignored, listclean, listunknown = ignored, clean, unknown
1709 listignored, listclean, listunknown = ignored, clean, unknown
1710 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1710 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1711 subrepos = []
1711 subrepos = []
1712 if '.hgsub' in self:
1712 if '.hgsub' in self:
1713 subrepos = sorted(self.substate)
1713 subrepos = sorted(self.substate)
1714 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1714 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1715 listclean, listunknown)
1715 listclean, listunknown)
1716
1716
1717 # check for any possibly clean files
1717 # check for any possibly clean files
1718 fixup = []
1718 fixup = []
1719 if cmp:
1719 if cmp:
1720 modified2, deleted2, fixup = self._checklookup(cmp)
1720 modified2, deleted2, fixup = self._checklookup(cmp)
1721 s.modified.extend(modified2)
1721 s.modified.extend(modified2)
1722 s.deleted.extend(deleted2)
1722 s.deleted.extend(deleted2)
1723
1723
1724 if fixup and listclean:
1724 if fixup and listclean:
1725 s.clean.extend(fixup)
1725 s.clean.extend(fixup)
1726
1726
1727 self._poststatusfixup(s, fixup)
1727 self._poststatusfixup(s, fixup)
1728
1728
1729 if match.always():
1729 if match.always():
1730 # cache for performance
1730 # cache for performance
1731 if s.unknown or s.ignored or s.clean:
1731 if s.unknown or s.ignored or s.clean:
1732 # "_status" is cached with list*=False in the normal route
1732 # "_status" is cached with list*=False in the normal route
1733 self._status = scmutil.status(s.modified, s.added, s.removed,
1733 self._status = scmutil.status(s.modified, s.added, s.removed,
1734 s.deleted, [], [], [])
1734 s.deleted, [], [], [])
1735 else:
1735 else:
1736 self._status = s
1736 self._status = s
1737
1737
1738 return s
1738 return s
1739
1739
1740 @propertycache
1740 @propertycache
1741 def _manifest(self):
1741 def _manifest(self):
1742 """generate a manifest corresponding to the values in self._status
1742 """generate a manifest corresponding to the values in self._status
1743
1743
1744 This reuse the file nodeid from parent, but we use special node
1744 This reuse the file nodeid from parent, but we use special node
1745 identifiers for added and modified files. This is used by manifests
1745 identifiers for added and modified files. This is used by manifests
1746 merge to see that files are different and by update logic to avoid
1746 merge to see that files are different and by update logic to avoid
1747 deleting newly added files.
1747 deleting newly added files.
1748 """
1748 """
1749 return self._buildstatusmanifest(self._status)
1749 return self._buildstatusmanifest(self._status)
1750
1750
1751 def _buildstatusmanifest(self, status):
1751 def _buildstatusmanifest(self, status):
1752 """Builds a manifest that includes the given status results."""
1752 """Builds a manifest that includes the given status results."""
1753 parents = self.parents()
1753 parents = self.parents()
1754
1754
1755 man = parents[0].manifest().copy()
1755 man = parents[0].manifest().copy()
1756
1756
1757 ff = self._flagfunc
1757 ff = self._flagfunc
1758 for i, l in ((addednodeid, status.added),
1758 for i, l in ((addednodeid, status.added),
1759 (modifiednodeid, status.modified)):
1759 (modifiednodeid, status.modified)):
1760 for f in l:
1760 for f in l:
1761 man[f] = i
1761 man[f] = i
1762 try:
1762 try:
1763 man.setflag(f, ff(f))
1763 man.setflag(f, ff(f))
1764 except OSError:
1764 except OSError:
1765 pass
1765 pass
1766
1766
1767 for f in status.deleted + status.removed:
1767 for f in status.deleted + status.removed:
1768 if f in man:
1768 if f in man:
1769 del man[f]
1769 del man[f]
1770
1770
1771 return man
1771 return man
1772
1772
1773 def _buildstatus(self, other, s, match, listignored, listclean,
1773 def _buildstatus(self, other, s, match, listignored, listclean,
1774 listunknown):
1774 listunknown):
1775 """build a status with respect to another context
1775 """build a status with respect to another context
1776
1776
1777 This includes logic for maintaining the fast path of status when
1777 This includes logic for maintaining the fast path of status when
1778 comparing the working directory against its parent, which is to skip
1778 comparing the working directory against its parent, which is to skip
1779 building a new manifest if self (working directory) is not comparing
1779 building a new manifest if self (working directory) is not comparing
1780 against its parent (repo['.']).
1780 against its parent (repo['.']).
1781 """
1781 """
1782 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1782 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1783 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1783 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1784 # might have accidentally ended up with the entire contents of the file
1784 # might have accidentally ended up with the entire contents of the file
1785 # they are supposed to be linking to.
1785 # they are supposed to be linking to.
1786 s.modified[:] = self._filtersuspectsymlink(s.modified)
1786 s.modified[:] = self._filtersuspectsymlink(s.modified)
1787 if other != self._repo['.']:
1787 if other != self._repo['.']:
1788 s = super(workingctx, self)._buildstatus(other, s, match,
1788 s = super(workingctx, self)._buildstatus(other, s, match,
1789 listignored, listclean,
1789 listignored, listclean,
1790 listunknown)
1790 listunknown)
1791 return s
1791 return s
1792
1792
1793 def _matchstatus(self, other, match):
1793 def _matchstatus(self, other, match):
1794 """override the match method with a filter for directory patterns
1794 """override the match method with a filter for directory patterns
1795
1795
1796 We use inheritance to customize the match.bad method only in cases of
1796 We use inheritance to customize the match.bad method only in cases of
1797 workingctx since it belongs only to the working directory when
1797 workingctx since it belongs only to the working directory when
1798 comparing against the parent changeset.
1798 comparing against the parent changeset.
1799
1799
1800 If we aren't comparing against the working directory's parent, then we
1800 If we aren't comparing against the working directory's parent, then we
1801 just use the default match object sent to us.
1801 just use the default match object sent to us.
1802 """
1802 """
1803 superself = super(workingctx, self)
1803 superself = super(workingctx, self)
1804 match = superself._matchstatus(other, match)
1804 match = superself._matchstatus(other, match)
1805 if other != self._repo['.']:
1805 if other != self._repo['.']:
1806 def bad(f, msg):
1806 def bad(f, msg):
1807 # 'f' may be a directory pattern from 'match.files()',
1807 # 'f' may be a directory pattern from 'match.files()',
1808 # so 'f not in ctx1' is not enough
1808 # so 'f not in ctx1' is not enough
1809 if f not in other and not other.hasdir(f):
1809 if f not in other and not other.hasdir(f):
1810 self._repo.ui.warn('%s: %s\n' %
1810 self._repo.ui.warn('%s: %s\n' %
1811 (self._repo.dirstate.pathto(f), msg))
1811 (self._repo.dirstate.pathto(f), msg))
1812 match.bad = bad
1812 match.bad = bad
1813 return match
1813 return match
1814
1814
1815 def markcommitted(self, node):
1815 def markcommitted(self, node):
1816 super(workingctx, self).markcommitted(node)
1816 super(workingctx, self).markcommitted(node)
1817
1817
1818 sparse.aftercommit(self._repo, node)
1818 sparse.aftercommit(self._repo, node)
1819
1819
1820 class committablefilectx(basefilectx):
1820 class committablefilectx(basefilectx):
1821 """A committablefilectx provides common functionality for a file context
1821 """A committablefilectx provides common functionality for a file context
1822 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1822 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1823 def __init__(self, repo, path, filelog=None, ctx=None):
1823 def __init__(self, repo, path, filelog=None, ctx=None):
1824 self._repo = repo
1824 self._repo = repo
1825 self._path = path
1825 self._path = path
1826 self._changeid = None
1826 self._changeid = None
1827 self._filerev = self._filenode = None
1827 self._filerev = self._filenode = None
1828
1828
1829 if filelog is not None:
1829 if filelog is not None:
1830 self._filelog = filelog
1830 self._filelog = filelog
1831 if ctx:
1831 if ctx:
1832 self._changectx = ctx
1832 self._changectx = ctx
1833
1833
1834 def __nonzero__(self):
1834 def __nonzero__(self):
1835 return True
1835 return True
1836
1836
1837 __bool__ = __nonzero__
1837 __bool__ = __nonzero__
1838
1838
1839 def linkrev(self):
1839 def linkrev(self):
1840 # linked to self._changectx no matter if file is modified or not
1840 # linked to self._changectx no matter if file is modified or not
1841 return self.rev()
1841 return self.rev()
1842
1842
1843 def parents(self):
1843 def parents(self):
1844 '''return parent filectxs, following copies if necessary'''
1844 '''return parent filectxs, following copies if necessary'''
1845 def filenode(ctx, path):
1845 def filenode(ctx, path):
1846 return ctx._manifest.get(path, nullid)
1846 return ctx._manifest.get(path, nullid)
1847
1847
1848 path = self._path
1848 path = self._path
1849 fl = self._filelog
1849 fl = self._filelog
1850 pcl = self._changectx._parents
1850 pcl = self._changectx._parents
1851 renamed = self.renamed()
1851 renamed = self.renamed()
1852
1852
1853 if renamed:
1853 if renamed:
1854 pl = [renamed + (None,)]
1854 pl = [renamed + (None,)]
1855 else:
1855 else:
1856 pl = [(path, filenode(pcl[0], path), fl)]
1856 pl = [(path, filenode(pcl[0], path), fl)]
1857
1857
1858 for pc in pcl[1:]:
1858 for pc in pcl[1:]:
1859 pl.append((path, filenode(pc, path), fl))
1859 pl.append((path, filenode(pc, path), fl))
1860
1860
1861 return [self._parentfilectx(p, fileid=n, filelog=l)
1861 return [self._parentfilectx(p, fileid=n, filelog=l)
1862 for p, n, l in pl if n != nullid]
1862 for p, n, l in pl if n != nullid]
1863
1863
1864 def children(self):
1864 def children(self):
1865 return []
1865 return []
1866
1866
1867 class workingfilectx(committablefilectx):
1867 class workingfilectx(committablefilectx):
1868 """A workingfilectx object makes access to data related to a particular
1868 """A workingfilectx object makes access to data related to a particular
1869 file in the working directory convenient."""
1869 file in the working directory convenient."""
1870 def __init__(self, repo, path, filelog=None, workingctx=None):
1870 def __init__(self, repo, path, filelog=None, workingctx=None):
1871 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1871 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1872
1872
1873 @propertycache
1873 @propertycache
1874 def _changectx(self):
1874 def _changectx(self):
1875 return workingctx(self._repo)
1875 return workingctx(self._repo)
1876
1876
1877 def data(self):
1877 def data(self):
1878 return self._repo.wread(self._path)
1878 return self._repo.wread(self._path)
1879 def renamed(self):
1879 def renamed(self):
1880 rp = self._repo.dirstate.copied(self._path)
1880 rp = self._repo.dirstate.copied(self._path)
1881 if not rp:
1881 if not rp:
1882 return None
1882 return None
1883 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1883 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1884
1884
1885 def size(self):
1885 def size(self):
1886 return self._repo.wvfs.lstat(self._path).st_size
1886 return self._repo.wvfs.lstat(self._path).st_size
1887 def date(self):
1887 def date(self):
1888 t, tz = self._changectx.date()
1888 t, tz = self._changectx.date()
1889 try:
1889 try:
1890 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1890 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1891 except OSError as err:
1891 except OSError as err:
1892 if err.errno != errno.ENOENT:
1892 if err.errno != errno.ENOENT:
1893 raise
1893 raise
1894 return (t, tz)
1894 return (t, tz)
1895
1895
1896 def exists(self):
1896 def exists(self):
1897 return self._repo.wvfs.exists(self._path)
1897 return self._repo.wvfs.exists(self._path)
1898
1898
1899 def lexists(self):
1899 def lexists(self):
1900 return self._repo.wvfs.lexists(self._path)
1900 return self._repo.wvfs.lexists(self._path)
1901
1901
1902 def audit(self):
1902 def audit(self):
1903 return self._repo.wvfs.audit(self._path)
1903 return self._repo.wvfs.audit(self._path)
1904
1904
1905 def cmp(self, fctx):
1905 def cmp(self, fctx):
1906 """compare with other file context
1906 """compare with other file context
1907
1907
1908 returns True if different than fctx.
1908 returns True if different than fctx.
1909 """
1909 """
1910 # fctx should be a filectx (not a workingfilectx)
1910 # fctx should be a filectx (not a workingfilectx)
1911 # invert comparison to reuse the same code path
1911 # invert comparison to reuse the same code path
1912 return fctx.cmp(self)
1912 return fctx.cmp(self)
1913
1913
1914 def remove(self, ignoremissing=False):
1914 def remove(self, ignoremissing=False):
1915 """wraps unlink for a repo's working directory"""
1915 """wraps unlink for a repo's working directory"""
1916 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1916 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1917
1917
1918 def write(self, data, flags, backgroundclose=False):
1918 def write(self, data, flags, backgroundclose=False):
1919 """wraps repo.wwrite"""
1919 """wraps repo.wwrite"""
1920 self._repo.wwrite(self._path, data, flags,
1920 self._repo.wwrite(self._path, data, flags,
1921 backgroundclose=backgroundclose)
1921 backgroundclose=backgroundclose)
1922
1922
1923 def setflags(self, l, x):
1923 def setflags(self, l, x):
1924 self._repo.wvfs.setflags(self._path, l, x)
1924 self._repo.wvfs.setflags(self._path, l, x)
1925
1925
1926 class workingcommitctx(workingctx):
1926 class workingcommitctx(workingctx):
1927 """A workingcommitctx object makes access to data related to
1927 """A workingcommitctx object makes access to data related to
1928 the revision being committed convenient.
1928 the revision being committed convenient.
1929
1929
1930 This hides changes in the working directory, if they aren't
1930 This hides changes in the working directory, if they aren't
1931 committed in this context.
1931 committed in this context.
1932 """
1932 """
1933 def __init__(self, repo, changes,
1933 def __init__(self, repo, changes,
1934 text="", user=None, date=None, extra=None):
1934 text="", user=None, date=None, extra=None):
1935 super(workingctx, self).__init__(repo, text, user, date, extra,
1935 super(workingctx, self).__init__(repo, text, user, date, extra,
1936 changes)
1936 changes)
1937
1937
1938 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1938 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1939 unknown=False):
1939 unknown=False):
1940 """Return matched files only in ``self._status``
1940 """Return matched files only in ``self._status``
1941
1941
1942 Uncommitted files appear "clean" via this context, even if
1942 Uncommitted files appear "clean" via this context, even if
1943 they aren't actually so in the working directory.
1943 they aren't actually so in the working directory.
1944 """
1944 """
1945 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1945 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1946 if clean:
1946 if clean:
1947 clean = [f for f in self._manifest if f not in self._changedset]
1947 clean = [f for f in self._manifest if f not in self._changedset]
1948 else:
1948 else:
1949 clean = []
1949 clean = []
1950 return scmutil.status([f for f in self._status.modified if match(f)],
1950 return scmutil.status([f for f in self._status.modified if match(f)],
1951 [f for f in self._status.added if match(f)],
1951 [f for f in self._status.added if match(f)],
1952 [f for f in self._status.removed if match(f)],
1952 [f for f in self._status.removed if match(f)],
1953 [], [], [], clean)
1953 [], [], [], clean)
1954
1954
1955 @propertycache
1955 @propertycache
1956 def _changedset(self):
1956 def _changedset(self):
1957 """Return the set of files changed in this context
1957 """Return the set of files changed in this context
1958 """
1958 """
1959 changed = set(self._status.modified)
1959 changed = set(self._status.modified)
1960 changed.update(self._status.added)
1960 changed.update(self._status.added)
1961 changed.update(self._status.removed)
1961 changed.update(self._status.removed)
1962 return changed
1962 return changed
1963
1963
1964 def makecachingfilectxfn(func):
1964 def makecachingfilectxfn(func):
1965 """Create a filectxfn that caches based on the path.
1965 """Create a filectxfn that caches based on the path.
1966
1966
1967 We can't use util.cachefunc because it uses all arguments as the cache
1967 We can't use util.cachefunc because it uses all arguments as the cache
1968 key and this creates a cycle since the arguments include the repo and
1968 key and this creates a cycle since the arguments include the repo and
1969 memctx.
1969 memctx.
1970 """
1970 """
1971 cache = {}
1971 cache = {}
1972
1972
1973 def getfilectx(repo, memctx, path):
1973 def getfilectx(repo, memctx, path):
1974 if path not in cache:
1974 if path not in cache:
1975 cache[path] = func(repo, memctx, path)
1975 cache[path] = func(repo, memctx, path)
1976 return cache[path]
1976 return cache[path]
1977
1977
1978 return getfilectx
1978 return getfilectx
1979
1979
1980 def memfilefromctx(ctx):
1980 def memfilefromctx(ctx):
1981 """Given a context return a memfilectx for ctx[path]
1981 """Given a context return a memfilectx for ctx[path]
1982
1982
1983 This is a convenience method for building a memctx based on another
1983 This is a convenience method for building a memctx based on another
1984 context.
1984 context.
1985 """
1985 """
1986 def getfilectx(repo, memctx, path):
1986 def getfilectx(repo, memctx, path):
1987 fctx = ctx[path]
1987 fctx = ctx[path]
1988 # this is weird but apparently we only keep track of one parent
1988 # this is weird but apparently we only keep track of one parent
1989 # (why not only store that instead of a tuple?)
1989 # (why not only store that instead of a tuple?)
1990 copied = fctx.renamed()
1990 copied = fctx.renamed()
1991 if copied:
1991 if copied:
1992 copied = copied[0]
1992 copied = copied[0]
1993 return memfilectx(repo, path, fctx.data(),
1993 return memfilectx(repo, path, fctx.data(),
1994 islink=fctx.islink(), isexec=fctx.isexec(),
1994 islink=fctx.islink(), isexec=fctx.isexec(),
1995 copied=copied, memctx=memctx)
1995 copied=copied, memctx=memctx)
1996
1996
1997 return getfilectx
1997 return getfilectx
1998
1998
1999 def memfilefrompatch(patchstore):
1999 def memfilefrompatch(patchstore):
2000 """Given a patch (e.g. patchstore object) return a memfilectx
2000 """Given a patch (e.g. patchstore object) return a memfilectx
2001
2001
2002 This is a convenience method for building a memctx based on a patchstore.
2002 This is a convenience method for building a memctx based on a patchstore.
2003 """
2003 """
2004 def getfilectx(repo, memctx, path):
2004 def getfilectx(repo, memctx, path):
2005 data, mode, copied = patchstore.getfile(path)
2005 data, mode, copied = patchstore.getfile(path)
2006 if data is None:
2006 if data is None:
2007 return None
2007 return None
2008 islink, isexec = mode
2008 islink, isexec = mode
2009 return memfilectx(repo, path, data, islink=islink,
2009 return memfilectx(repo, path, data, islink=islink,
2010 isexec=isexec, copied=copied,
2010 isexec=isexec, copied=copied,
2011 memctx=memctx)
2011 memctx=memctx)
2012
2012
2013 return getfilectx
2013 return getfilectx
2014
2014
2015 class memctx(committablectx):
2015 class memctx(committablectx):
2016 """Use memctx to perform in-memory commits via localrepo.commitctx().
2016 """Use memctx to perform in-memory commits via localrepo.commitctx().
2017
2017
2018 Revision information is supplied at initialization time while
2018 Revision information is supplied at initialization time while
2019 related files data and is made available through a callback
2019 related files data and is made available through a callback
2020 mechanism. 'repo' is the current localrepo, 'parents' is a
2020 mechanism. 'repo' is the current localrepo, 'parents' is a
2021 sequence of two parent revisions identifiers (pass None for every
2021 sequence of two parent revisions identifiers (pass None for every
2022 missing parent), 'text' is the commit message and 'files' lists
2022 missing parent), 'text' is the commit message and 'files' lists
2023 names of files touched by the revision (normalized and relative to
2023 names of files touched by the revision (normalized and relative to
2024 repository root).
2024 repository root).
2025
2025
2026 filectxfn(repo, memctx, path) is a callable receiving the
2026 filectxfn(repo, memctx, path) is a callable receiving the
2027 repository, the current memctx object and the normalized path of
2027 repository, the current memctx object and the normalized path of
2028 requested file, relative to repository root. It is fired by the
2028 requested file, relative to repository root. It is fired by the
2029 commit function for every file in 'files', but calls order is
2029 commit function for every file in 'files', but calls order is
2030 undefined. If the file is available in the revision being
2030 undefined. If the file is available in the revision being
2031 committed (updated or added), filectxfn returns a memfilectx
2031 committed (updated or added), filectxfn returns a memfilectx
2032 object. If the file was removed, filectxfn return None for recent
2032 object. If the file was removed, filectxfn return None for recent
2033 Mercurial. Moved files are represented by marking the source file
2033 Mercurial. Moved files are represented by marking the source file
2034 removed and the new file added with copy information (see
2034 removed and the new file added with copy information (see
2035 memfilectx).
2035 memfilectx).
2036
2036
2037 user receives the committer name and defaults to current
2037 user receives the committer name and defaults to current
2038 repository username, date is the commit date in any format
2038 repository username, date is the commit date in any format
2039 supported by util.parsedate() and defaults to current date, extra
2039 supported by util.parsedate() and defaults to current date, extra
2040 is a dictionary of metadata or is left empty.
2040 is a dictionary of metadata or is left empty.
2041 """
2041 """
2042
2042
2043 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2043 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2044 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2044 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2045 # this field to determine what to do in filectxfn.
2045 # this field to determine what to do in filectxfn.
2046 _returnnoneformissingfiles = True
2046 _returnnoneformissingfiles = True
2047
2047
2048 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2048 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2049 date=None, extra=None, branch=None, editor=False):
2049 date=None, extra=None, branch=None, editor=False):
2050 super(memctx, self).__init__(repo, text, user, date, extra)
2050 super(memctx, self).__init__(repo, text, user, date, extra)
2051 self._rev = None
2051 self._rev = None
2052 self._node = None
2052 self._node = None
2053 parents = [(p or nullid) for p in parents]
2053 parents = [(p or nullid) for p in parents]
2054 p1, p2 = parents
2054 p1, p2 = parents
2055 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2055 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2056 files = sorted(set(files))
2056 files = sorted(set(files))
2057 self._files = files
2057 self._files = files
2058 if branch is not None:
2058 if branch is not None:
2059 self._extra['branch'] = encoding.fromlocal(branch)
2059 self._extra['branch'] = encoding.fromlocal(branch)
2060 self.substate = {}
2060 self.substate = {}
2061
2061
2062 if isinstance(filectxfn, patch.filestore):
2062 if isinstance(filectxfn, patch.filestore):
2063 filectxfn = memfilefrompatch(filectxfn)
2063 filectxfn = memfilefrompatch(filectxfn)
2064 elif not callable(filectxfn):
2064 elif not callable(filectxfn):
2065 # if store is not callable, wrap it in a function
2065 # if store is not callable, wrap it in a function
2066 filectxfn = memfilefromctx(filectxfn)
2066 filectxfn = memfilefromctx(filectxfn)
2067
2067
2068 # memoizing increases performance for e.g. vcs convert scenarios.
2068 # memoizing increases performance for e.g. vcs convert scenarios.
2069 self._filectxfn = makecachingfilectxfn(filectxfn)
2069 self._filectxfn = makecachingfilectxfn(filectxfn)
2070
2070
2071 if editor:
2071 if editor:
2072 self._text = editor(self._repo, self, [])
2072 self._text = editor(self._repo, self, [])
2073 self._repo.savecommitmessage(self._text)
2073 self._repo.savecommitmessage(self._text)
2074
2074
2075 def filectx(self, path, filelog=None):
2075 def filectx(self, path, filelog=None):
2076 """get a file context from the working directory
2076 """get a file context from the working directory
2077
2077
2078 Returns None if file doesn't exist and should be removed."""
2078 Returns None if file doesn't exist and should be removed."""
2079 return self._filectxfn(self._repo, self, path)
2079 return self._filectxfn(self._repo, self, path)
2080
2080
2081 def commit(self):
2081 def commit(self):
2082 """commit context to the repo"""
2082 """commit context to the repo"""
2083 return self._repo.commitctx(self)
2083 return self._repo.commitctx(self)
2084
2084
2085 @propertycache
2085 @propertycache
2086 def _manifest(self):
2086 def _manifest(self):
2087 """generate a manifest based on the return values of filectxfn"""
2087 """generate a manifest based on the return values of filectxfn"""
2088
2088
2089 # keep this simple for now; just worry about p1
2089 # keep this simple for now; just worry about p1
2090 pctx = self._parents[0]
2090 pctx = self._parents[0]
2091 man = pctx.manifest().copy()
2091 man = pctx.manifest().copy()
2092
2092
2093 for f in self._status.modified:
2093 for f in self._status.modified:
2094 p1node = nullid
2094 p1node = nullid
2095 p2node = nullid
2095 p2node = nullid
2096 p = pctx[f].parents() # if file isn't in pctx, check p2?
2096 p = pctx[f].parents() # if file isn't in pctx, check p2?
2097 if len(p) > 0:
2097 if len(p) > 0:
2098 p1node = p[0].filenode()
2098 p1node = p[0].filenode()
2099 if len(p) > 1:
2099 if len(p) > 1:
2100 p2node = p[1].filenode()
2100 p2node = p[1].filenode()
2101 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2101 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2102
2102
2103 for f in self._status.added:
2103 for f in self._status.added:
2104 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2104 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2105
2105
2106 for f in self._status.removed:
2106 for f in self._status.removed:
2107 if f in man:
2107 if f in man:
2108 del man[f]
2108 del man[f]
2109
2109
2110 return man
2110 return man
2111
2111
2112 @propertycache
2112 @propertycache
2113 def _status(self):
2113 def _status(self):
2114 """Calculate exact status from ``files`` specified at construction
2114 """Calculate exact status from ``files`` specified at construction
2115 """
2115 """
2116 man1 = self.p1().manifest()
2116 man1 = self.p1().manifest()
2117 p2 = self._parents[1]
2117 p2 = self._parents[1]
2118 # "1 < len(self._parents)" can't be used for checking
2118 # "1 < len(self._parents)" can't be used for checking
2119 # existence of the 2nd parent, because "memctx._parents" is
2119 # existence of the 2nd parent, because "memctx._parents" is
2120 # explicitly initialized by the list, of which length is 2.
2120 # explicitly initialized by the list, of which length is 2.
2121 if p2.node() != nullid:
2121 if p2.node() != nullid:
2122 man2 = p2.manifest()
2122 man2 = p2.manifest()
2123 managing = lambda f: f in man1 or f in man2
2123 managing = lambda f: f in man1 or f in man2
2124 else:
2124 else:
2125 managing = lambda f: f in man1
2125 managing = lambda f: f in man1
2126
2126
2127 modified, added, removed = [], [], []
2127 modified, added, removed = [], [], []
2128 for f in self._files:
2128 for f in self._files:
2129 if not managing(f):
2129 if not managing(f):
2130 added.append(f)
2130 added.append(f)
2131 elif self[f]:
2131 elif self[f]:
2132 modified.append(f)
2132 modified.append(f)
2133 else:
2133 else:
2134 removed.append(f)
2134 removed.append(f)
2135
2135
2136 return scmutil.status(modified, added, removed, [], [], [], [])
2136 return scmutil.status(modified, added, removed, [], [], [], [])
2137
2137
2138 class memfilectx(committablefilectx):
2138 class memfilectx(committablefilectx):
2139 """memfilectx represents an in-memory file to commit.
2139 """memfilectx represents an in-memory file to commit.
2140
2140
2141 See memctx and committablefilectx for more details.
2141 See memctx and committablefilectx for more details.
2142 """
2142 """
2143 def __init__(self, repo, path, data, islink=False,
2143 def __init__(self, repo, path, data, islink=False,
2144 isexec=False, copied=None, memctx=None):
2144 isexec=False, copied=None, memctx=None):
2145 """
2145 """
2146 path is the normalized file path relative to repository root.
2146 path is the normalized file path relative to repository root.
2147 data is the file content as a string.
2147 data is the file content as a string.
2148 islink is True if the file is a symbolic link.
2148 islink is True if the file is a symbolic link.
2149 isexec is True if the file is executable.
2149 isexec is True if the file is executable.
2150 copied is the source file path if current file was copied in the
2150 copied is the source file path if current file was copied in the
2151 revision being committed, or None."""
2151 revision being committed, or None."""
2152 super(memfilectx, self).__init__(repo, path, None, memctx)
2152 super(memfilectx, self).__init__(repo, path, None, memctx)
2153 self._data = data
2153 self._data = data
2154 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2154 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2155 self._copied = None
2155 self._copied = None
2156 if copied:
2156 if copied:
2157 self._copied = (copied, nullid)
2157 self._copied = (copied, nullid)
2158
2158
2159 def data(self):
2159 def data(self):
2160 return self._data
2160 return self._data
2161
2161
2162 def remove(self, ignoremissing=False):
2162 def remove(self, ignoremissing=False):
2163 """wraps unlink for a repo's working directory"""
2163 """wraps unlink for a repo's working directory"""
2164 # need to figure out what to do here
2164 # need to figure out what to do here
2165 del self._changectx[self._path]
2165 del self._changectx[self._path]
2166
2166
2167 def write(self, data, flags):
2167 def write(self, data, flags):
2168 """wraps repo.wwrite"""
2168 """wraps repo.wwrite"""
2169 self._data = data
2169 self._data = data
2170
2170
2171 class overlayfilectx(committablefilectx):
2171 class overlayfilectx(committablefilectx):
2172 """Like memfilectx but take an original filectx and optional parameters to
2172 """Like memfilectx but take an original filectx and optional parameters to
2173 override parts of it. This is useful when fctx.data() is expensive (i.e.
2173 override parts of it. This is useful when fctx.data() is expensive (i.e.
2174 flag processor is expensive) and raw data, flags, and filenode could be
2174 flag processor is expensive) and raw data, flags, and filenode could be
2175 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2175 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2176 """
2176 """
2177
2177
2178 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2178 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2179 copied=None, ctx=None):
2179 copied=None, ctx=None):
2180 """originalfctx: filecontext to duplicate
2180 """originalfctx: filecontext to duplicate
2181
2181
2182 datafunc: None or a function to override data (file content). It is a
2182 datafunc: None or a function to override data (file content). It is a
2183 function to be lazy. path, flags, copied, ctx: None or overridden value
2183 function to be lazy. path, flags, copied, ctx: None or overridden value
2184
2184
2185 copied could be (path, rev), or False. copied could also be just path,
2185 copied could be (path, rev), or False. copied could also be just path,
2186 and will be converted to (path, nullid). This simplifies some callers.
2186 and will be converted to (path, nullid). This simplifies some callers.
2187 """
2187 """
2188
2188
2189 if path is None:
2189 if path is None:
2190 path = originalfctx.path()
2190 path = originalfctx.path()
2191 if ctx is None:
2191 if ctx is None:
2192 ctx = originalfctx.changectx()
2192 ctx = originalfctx.changectx()
2193 ctxmatch = lambda: True
2193 ctxmatch = lambda: True
2194 else:
2194 else:
2195 ctxmatch = lambda: ctx == originalfctx.changectx()
2195 ctxmatch = lambda: ctx == originalfctx.changectx()
2196
2196
2197 repo = originalfctx.repo()
2197 repo = originalfctx.repo()
2198 flog = originalfctx.filelog()
2198 flog = originalfctx.filelog()
2199 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2199 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2200
2200
2201 if copied is None:
2201 if copied is None:
2202 copied = originalfctx.renamed()
2202 copied = originalfctx.renamed()
2203 copiedmatch = lambda: True
2203 copiedmatch = lambda: True
2204 else:
2204 else:
2205 if copied and not isinstance(copied, tuple):
2205 if copied and not isinstance(copied, tuple):
2206 # repo._filecommit will recalculate copyrev so nullid is okay
2206 # repo._filecommit will recalculate copyrev so nullid is okay
2207 copied = (copied, nullid)
2207 copied = (copied, nullid)
2208 copiedmatch = lambda: copied == originalfctx.renamed()
2208 copiedmatch = lambda: copied == originalfctx.renamed()
2209
2209
2210 # When data, copied (could affect data), ctx (could affect filelog
2210 # When data, copied (could affect data), ctx (could affect filelog
2211 # parents) are not overridden, rawdata, rawflags, and filenode may be
2211 # parents) are not overridden, rawdata, rawflags, and filenode may be
2212 # reused (repo._filecommit should double check filelog parents).
2212 # reused (repo._filecommit should double check filelog parents).
2213 #
2213 #
2214 # path, flags are not hashed in filelog (but in manifestlog) so they do
2214 # path, flags are not hashed in filelog (but in manifestlog) so they do
2215 # not affect reusable here.
2215 # not affect reusable here.
2216 #
2216 #
2217 # If ctx or copied is overridden to a same value with originalfctx,
2217 # If ctx or copied is overridden to a same value with originalfctx,
2218 # still consider it's reusable. originalfctx.renamed() may be a bit
2218 # still consider it's reusable. originalfctx.renamed() may be a bit
2219 # expensive so it's not called unless necessary. Assuming datafunc is
2219 # expensive so it's not called unless necessary. Assuming datafunc is
2220 # always expensive, do not call it for this "reusable" test.
2220 # always expensive, do not call it for this "reusable" test.
2221 reusable = datafunc is None and ctxmatch() and copiedmatch()
2221 reusable = datafunc is None and ctxmatch() and copiedmatch()
2222
2222
2223 if datafunc is None:
2223 if datafunc is None:
2224 datafunc = originalfctx.data
2224 datafunc = originalfctx.data
2225 if flags is None:
2225 if flags is None:
2226 flags = originalfctx.flags()
2226 flags = originalfctx.flags()
2227
2227
2228 self._datafunc = datafunc
2228 self._datafunc = datafunc
2229 self._flags = flags
2229 self._flags = flags
2230 self._copied = copied
2230 self._copied = copied
2231
2231
2232 if reusable:
2232 if reusable:
2233 # copy extra fields from originalfctx
2233 # copy extra fields from originalfctx
2234 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2234 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2235 for attr in attrs:
2235 for attr in attrs:
2236 if util.safehasattr(originalfctx, attr):
2236 if util.safehasattr(originalfctx, attr):
2237 setattr(self, attr, getattr(originalfctx, attr))
2237 setattr(self, attr, getattr(originalfctx, attr))
2238
2238
2239 def data(self):
2239 def data(self):
2240 return self._datafunc()
2240 return self._datafunc()
2241
2241
2242 class metadataonlyctx(committablectx):
2242 class metadataonlyctx(committablectx):
2243 """Like memctx but it's reusing the manifest of different commit.
2243 """Like memctx but it's reusing the manifest of different commit.
2244 Intended to be used by lightweight operations that are creating
2244 Intended to be used by lightweight operations that are creating
2245 metadata-only changes.
2245 metadata-only changes.
2246
2246
2247 Revision information is supplied at initialization time. 'repo' is the
2247 Revision information is supplied at initialization time. 'repo' is the
2248 current localrepo, 'ctx' is original revision which manifest we're reuisng
2248 current localrepo, 'ctx' is original revision which manifest we're reuisng
2249 'parents' is a sequence of two parent revisions identifiers (pass None for
2249 'parents' is a sequence of two parent revisions identifiers (pass None for
2250 every missing parent), 'text' is the commit.
2250 every missing parent), 'text' is the commit.
2251
2251
2252 user receives the committer name and defaults to current repository
2252 user receives the committer name and defaults to current repository
2253 username, date is the commit date in any format supported by
2253 username, date is the commit date in any format supported by
2254 util.parsedate() and defaults to current date, extra is a dictionary of
2254 util.parsedate() and defaults to current date, extra is a dictionary of
2255 metadata or is left empty.
2255 metadata or is left empty.
2256 """
2256 """
2257 def __new__(cls, repo, originalctx, *args, **kwargs):
2257 def __new__(cls, repo, originalctx, *args, **kwargs):
2258 return super(metadataonlyctx, cls).__new__(cls, repo)
2258 return super(metadataonlyctx, cls).__new__(cls, repo)
2259
2259
2260 def __init__(self, repo, originalctx, parents, text, user=None, date=None,
2260 def __init__(self, repo, originalctx, parents, text, user=None, date=None,
2261 extra=None, editor=False):
2261 extra=None, editor=False):
2262 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2262 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2263 self._rev = None
2263 self._rev = None
2264 self._node = None
2264 self._node = None
2265 self._originalctx = originalctx
2265 self._originalctx = originalctx
2266 self._manifestnode = originalctx.manifestnode()
2266 self._manifestnode = originalctx.manifestnode()
2267 parents = [(p or nullid) for p in parents]
2267 parents = [(p or nullid) for p in parents]
2268 p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
2268 p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
2269
2269
2270 # sanity check to ensure that the reused manifest parents are
2270 # sanity check to ensure that the reused manifest parents are
2271 # manifests of our commit parents
2271 # manifests of our commit parents
2272 mp1, mp2 = self.manifestctx().parents
2272 mp1, mp2 = self.manifestctx().parents
2273 if p1 != nullid and p1.manifestnode() != mp1:
2273 if p1 != nullid and p1.manifestnode() != mp1:
2274 raise RuntimeError('can\'t reuse the manifest: '
2274 raise RuntimeError('can\'t reuse the manifest: '
2275 'its p1 doesn\'t match the new ctx p1')
2275 'its p1 doesn\'t match the new ctx p1')
2276 if p2 != nullid and p2.manifestnode() != mp2:
2276 if p2 != nullid and p2.manifestnode() != mp2:
2277 raise RuntimeError('can\'t reuse the manifest: '
2277 raise RuntimeError('can\'t reuse the manifest: '
2278 'its p2 doesn\'t match the new ctx p2')
2278 'its p2 doesn\'t match the new ctx p2')
2279
2279
2280 self._files = originalctx.files()
2280 self._files = originalctx.files()
2281 self.substate = {}
2281 self.substate = {}
2282
2282
2283 if editor:
2283 if editor:
2284 self._text = editor(self._repo, self, [])
2284 self._text = editor(self._repo, self, [])
2285 self._repo.savecommitmessage(self._text)
2285 self._repo.savecommitmessage(self._text)
2286
2286
2287 def manifestnode(self):
2287 def manifestnode(self):
2288 return self._manifestnode
2288 return self._manifestnode
2289
2289
2290 @property
2290 @property
2291 def _manifestctx(self):
2291 def _manifestctx(self):
2292 return self._repo.manifestlog[self._manifestnode]
2292 return self._repo.manifestlog[self._manifestnode]
2293
2293
2294 def filectx(self, path, filelog=None):
2294 def filectx(self, path, filelog=None):
2295 return self._originalctx.filectx(path, filelog=filelog)
2295 return self._originalctx.filectx(path, filelog=filelog)
2296
2296
2297 def commit(self):
2297 def commit(self):
2298 """commit context to the repo"""
2298 """commit context to the repo"""
2299 return self._repo.commitctx(self)
2299 return self._repo.commitctx(self)
2300
2300
2301 @property
2301 @property
2302 def _manifest(self):
2302 def _manifest(self):
2303 return self._originalctx.manifest()
2303 return self._originalctx.manifest()
2304
2304
2305 @propertycache
2305 @propertycache
2306 def _status(self):
2306 def _status(self):
2307 """Calculate exact status from ``files`` specified in the ``origctx``
2307 """Calculate exact status from ``files`` specified in the ``origctx``
2308 and parents manifests.
2308 and parents manifests.
2309 """
2309 """
2310 man1 = self.p1().manifest()
2310 man1 = self.p1().manifest()
2311 p2 = self._parents[1]
2311 p2 = self._parents[1]
2312 # "1 < len(self._parents)" can't be used for checking
2312 # "1 < len(self._parents)" can't be used for checking
2313 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2313 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2314 # explicitly initialized by the list, of which length is 2.
2314 # explicitly initialized by the list, of which length is 2.
2315 if p2.node() != nullid:
2315 if p2.node() != nullid:
2316 man2 = p2.manifest()
2316 man2 = p2.manifest()
2317 managing = lambda f: f in man1 or f in man2
2317 managing = lambda f: f in man1 or f in man2
2318 else:
2318 else:
2319 managing = lambda f: f in man1
2319 managing = lambda f: f in man1
2320
2320
2321 modified, added, removed = [], [], []
2321 modified, added, removed = [], [], []
2322 for f in self._files:
2322 for f in self._files:
2323 if not managing(f):
2323 if not managing(f):
2324 added.append(f)
2324 added.append(f)
2325 elif self[f]:
2325 elif self[f]:
2326 modified.append(f)
2326 modified.append(f)
2327 else:
2327 else:
2328 removed.append(f)
2328 removed.append(f)
2329
2329
2330 return scmutil.status(modified, added, removed, [], [], [], [])
2330 return scmutil.status(modified, added, removed, [], [], [], [])
@@ -1,2012 +1,2013 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=()):
297 bookmarks=()):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # did a local lock get acquired?
311 # did a local lock get acquired?
312 self.locallocked = None
312 self.locallocked = None
313 # step already performed
313 # step already performed
314 # (used to check what steps have been already performed through bundle2)
314 # (used to check what steps have been already performed through bundle2)
315 self.stepsdone = set()
315 self.stepsdone = set()
316 # Integer version of the changegroup push result
316 # Integer version of the changegroup push result
317 # - None means nothing to push
317 # - None means nothing to push
318 # - 0 means HTTP error
318 # - 0 means HTTP error
319 # - 1 means we pushed and remote head count is unchanged *or*
319 # - 1 means we pushed and remote head count is unchanged *or*
320 # we have outgoing changesets but refused to push
320 # we have outgoing changesets but refused to push
321 # - other values as described by addchangegroup()
321 # - other values as described by addchangegroup()
322 self.cgresult = None
322 self.cgresult = None
323 # Boolean value for the bookmark push
323 # Boolean value for the bookmark push
324 self.bkresult = None
324 self.bkresult = None
325 # discover.outgoing object (contains common and outgoing data)
325 # discover.outgoing object (contains common and outgoing data)
326 self.outgoing = None
326 self.outgoing = None
327 # all remote topological heads before the push
327 # all remote topological heads before the push
328 self.remoteheads = None
328 self.remoteheads = None
329 # Details of the remote branch pre and post push
329 # Details of the remote branch pre and post push
330 #
330 #
331 # mapping: {'branch': ([remoteheads],
331 # mapping: {'branch': ([remoteheads],
332 # [newheads],
332 # [newheads],
333 # [unsyncedheads],
333 # [unsyncedheads],
334 # [discardedheads])}
334 # [discardedheads])}
335 # - branch: the branch name
335 # - branch: the branch name
336 # - remoteheads: the list of remote heads known locally
336 # - remoteheads: the list of remote heads known locally
337 # None if the branch is new
337 # None if the branch is new
338 # - newheads: the new remote heads (known locally) with outgoing pushed
338 # - newheads: the new remote heads (known locally) with outgoing pushed
339 # - unsyncedheads: the list of remote heads unknown locally.
339 # - unsyncedheads: the list of remote heads unknown locally.
340 # - discardedheads: the list of remote heads made obsolete by the push
340 # - discardedheads: the list of remote heads made obsolete by the push
341 self.pushbranchmap = None
341 self.pushbranchmap = None
342 # testable as a boolean indicating if any nodes are missing locally.
342 # testable as a boolean indicating if any nodes are missing locally.
343 self.incoming = None
343 self.incoming = None
344 # phases changes that must be pushed along side the changesets
344 # phases changes that must be pushed along side the changesets
345 self.outdatedphases = None
345 self.outdatedphases = None
346 # phases changes that must be pushed if changeset push fails
346 # phases changes that must be pushed if changeset push fails
347 self.fallbackoutdatedphases = None
347 self.fallbackoutdatedphases = None
348 # outgoing obsmarkers
348 # outgoing obsmarkers
349 self.outobsmarkers = set()
349 self.outobsmarkers = set()
350 # outgoing bookmarks
350 # outgoing bookmarks
351 self.outbookmarks = []
351 self.outbookmarks = []
352 # transaction manager
352 # transaction manager
353 self.trmanager = None
353 self.trmanager = None
354 # map { pushkey partid -> callback handling failure}
354 # map { pushkey partid -> callback handling failure}
355 # used to handle exception from mandatory pushkey part failure
355 # used to handle exception from mandatory pushkey part failure
356 self.pkfailcb = {}
356 self.pkfailcb = {}
357
357
358 @util.propertycache
358 @util.propertycache
359 def futureheads(self):
359 def futureheads(self):
360 """future remote heads if the changeset push succeeds"""
360 """future remote heads if the changeset push succeeds"""
361 return self.outgoing.missingheads
361 return self.outgoing.missingheads
362
362
363 @util.propertycache
363 @util.propertycache
364 def fallbackheads(self):
364 def fallbackheads(self):
365 """future remote heads if the changeset push fails"""
365 """future remote heads if the changeset push fails"""
366 if self.revs is None:
366 if self.revs is None:
367 # not target to push, all common are relevant
367 # not target to push, all common are relevant
368 return self.outgoing.commonheads
368 return self.outgoing.commonheads
369 unfi = self.repo.unfiltered()
369 unfi = self.repo.unfiltered()
370 # I want cheads = heads(::missingheads and ::commonheads)
370 # I want cheads = heads(::missingheads and ::commonheads)
371 # (missingheads is revs with secret changeset filtered out)
371 # (missingheads is revs with secret changeset filtered out)
372 #
372 #
373 # This can be expressed as:
373 # This can be expressed as:
374 # cheads = ( (missingheads and ::commonheads)
374 # cheads = ( (missingheads and ::commonheads)
375 # + (commonheads and ::missingheads))"
375 # + (commonheads and ::missingheads))"
376 # )
376 # )
377 #
377 #
378 # while trying to push we already computed the following:
378 # while trying to push we already computed the following:
379 # common = (::commonheads)
379 # common = (::commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
381 #
381 #
382 # We can pick:
382 # We can pick:
383 # * missingheads part of common (::commonheads)
383 # * missingheads part of common (::commonheads)
384 common = self.outgoing.common
384 common = self.outgoing.common
385 nm = self.repo.changelog.nodemap
385 nm = self.repo.changelog.nodemap
386 cheads = [node for node in self.revs if nm[node] in common]
386 cheads = [node for node in self.revs if nm[node] in common]
387 # and
387 # and
388 # * commonheads parents on missing
388 # * commonheads parents on missing
389 revset = unfi.set('%ln and parents(roots(%ln))',
389 revset = unfi.set('%ln and parents(roots(%ln))',
390 self.outgoing.commonheads,
390 self.outgoing.commonheads,
391 self.outgoing.missing)
391 self.outgoing.missing)
392 cheads.extend(c.node() for c in revset)
392 cheads.extend(c.node() for c in revset)
393 return cheads
393 return cheads
394
394
395 @property
395 @property
396 def commonheads(self):
396 def commonheads(self):
397 """set of all common heads after changeset bundle push"""
397 """set of all common heads after changeset bundle push"""
398 if self.cgresult:
398 if self.cgresult:
399 return self.futureheads
399 return self.futureheads
400 else:
400 else:
401 return self.fallbackheads
401 return self.fallbackheads
402
402
403 # mapping of message used when pushing bookmark
403 # mapping of message used when pushing bookmark
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 _('updating bookmark %s failed!\n')),
405 _('updating bookmark %s failed!\n')),
406 'export': (_("exporting bookmark %s\n"),
406 'export': (_("exporting bookmark %s\n"),
407 _('exporting bookmark %s failed!\n')),
407 _('exporting bookmark %s failed!\n')),
408 'delete': (_("deleting remote bookmark %s\n"),
408 'delete': (_("deleting remote bookmark %s\n"),
409 _('deleting remote bookmark %s failed!\n')),
409 _('deleting remote bookmark %s failed!\n')),
410 }
410 }
411
411
412
412
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 opargs=None):
414 opargs=None):
415 '''Push outgoing changesets (limited by revs) from a local
415 '''Push outgoing changesets (limited by revs) from a local
416 repository to remote. Return an integer:
416 repository to remote. Return an integer:
417 - None means nothing to push
417 - None means nothing to push
418 - 0 means HTTP error
418 - 0 means HTTP error
419 - 1 means we pushed and remote head count is unchanged *or*
419 - 1 means we pushed and remote head count is unchanged *or*
420 we have outgoing changesets but refused to push
420 we have outgoing changesets but refused to push
421 - other values as described by addchangegroup()
421 - other values as described by addchangegroup()
422 '''
422 '''
423 if opargs is None:
423 if opargs is None:
424 opargs = {}
424 opargs = {}
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 **opargs)
426 **opargs)
427 if pushop.remote.local():
427 if pushop.remote.local():
428 missing = (set(pushop.repo.requirements)
428 missing = (set(pushop.repo.requirements)
429 - pushop.remote.local().supported)
429 - pushop.remote.local().supported)
430 if missing:
430 if missing:
431 msg = _("required features are not"
431 msg = _("required features are not"
432 " supported in the destination:"
432 " supported in the destination:"
433 " %s") % (', '.join(sorted(missing)))
433 " %s") % (', '.join(sorted(missing)))
434 raise error.Abort(msg)
434 raise error.Abort(msg)
435
435
436 # there are two ways to push to remote repo:
436 # there are two ways to push to remote repo:
437 #
437 #
438 # addchangegroup assumes local user can lock remote
438 # addchangegroup assumes local user can lock remote
439 # repo (local filesystem, old ssh servers).
439 # repo (local filesystem, old ssh servers).
440 #
440 #
441 # unbundle assumes local user cannot lock remote repo (new ssh
441 # unbundle assumes local user cannot lock remote repo (new ssh
442 # servers, http servers).
442 # servers, http servers).
443
443
444 if not pushop.remote.canpush():
444 if not pushop.remote.canpush():
445 raise error.Abort(_("destination does not support push"))
445 raise error.Abort(_("destination does not support push"))
446 # get local lock as we might write phase data
446 # get local lock as we might write phase data
447 localwlock = locallock = None
447 localwlock = locallock = None
448 try:
448 try:
449 # bundle2 push may receive a reply bundle touching bookmarks or other
449 # bundle2 push may receive a reply bundle touching bookmarks or other
450 # things requiring the wlock. Take it now to ensure proper ordering.
450 # things requiring the wlock. Take it now to ensure proper ordering.
451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
452 if (not _forcebundle1(pushop)) and maypushback:
452 if (not _forcebundle1(pushop)) and maypushback:
453 localwlock = pushop.repo.wlock()
453 localwlock = pushop.repo.wlock()
454 locallock = pushop.repo.lock()
454 locallock = pushop.repo.lock()
455 pushop.locallocked = True
455 pushop.locallocked = True
456 except IOError as err:
456 except IOError as err:
457 pushop.locallocked = False
457 pushop.locallocked = False
458 if err.errno != errno.EACCES:
458 if err.errno != errno.EACCES:
459 raise
459 raise
460 # source repo cannot be locked.
460 # source repo cannot be locked.
461 # We do not abort the push, but just disable the local phase
461 # We do not abort the push, but just disable the local phase
462 # synchronisation.
462 # synchronisation.
463 msg = 'cannot lock source repository: %s\n' % err
463 msg = 'cannot lock source repository: %s\n' % err
464 pushop.ui.debug(msg)
464 pushop.ui.debug(msg)
465 try:
465 try:
466 if pushop.locallocked:
466 if pushop.locallocked:
467 pushop.trmanager = transactionmanager(pushop.repo,
467 pushop.trmanager = transactionmanager(pushop.repo,
468 'push-response',
468 'push-response',
469 pushop.remote.url())
469 pushop.remote.url())
470 pushop.repo.checkpush(pushop)
470 pushop.repo.checkpush(pushop)
471 lock = None
471 lock = None
472 unbundle = pushop.remote.capable('unbundle')
472 unbundle = pushop.remote.capable('unbundle')
473 if not unbundle:
473 if not unbundle:
474 lock = pushop.remote.lock()
474 lock = pushop.remote.lock()
475 try:
475 try:
476 _pushdiscovery(pushop)
476 _pushdiscovery(pushop)
477 if not _forcebundle1(pushop):
477 if not _forcebundle1(pushop):
478 _pushbundle2(pushop)
478 _pushbundle2(pushop)
479 _pushchangeset(pushop)
479 _pushchangeset(pushop)
480 _pushsyncphase(pushop)
480 _pushsyncphase(pushop)
481 _pushobsolete(pushop)
481 _pushobsolete(pushop)
482 _pushbookmark(pushop)
482 _pushbookmark(pushop)
483 finally:
483 finally:
484 if lock is not None:
484 if lock is not None:
485 lock.release()
485 lock.release()
486 if pushop.trmanager:
486 if pushop.trmanager:
487 pushop.trmanager.close()
487 pushop.trmanager.close()
488 finally:
488 finally:
489 if pushop.trmanager:
489 if pushop.trmanager:
490 pushop.trmanager.release()
490 pushop.trmanager.release()
491 if locallock is not None:
491 if locallock is not None:
492 locallock.release()
492 locallock.release()
493 if localwlock is not None:
493 if localwlock is not None:
494 localwlock.release()
494 localwlock.release()
495
495
496 return pushop
496 return pushop
497
497
498 # list of steps to perform discovery before push
498 # list of steps to perform discovery before push
499 pushdiscoveryorder = []
499 pushdiscoveryorder = []
500
500
501 # Mapping between step name and function
501 # Mapping between step name and function
502 #
502 #
503 # This exists to help extensions wrap steps if necessary
503 # This exists to help extensions wrap steps if necessary
504 pushdiscoverymapping = {}
504 pushdiscoverymapping = {}
505
505
506 def pushdiscovery(stepname):
506 def pushdiscovery(stepname):
507 """decorator for function performing discovery before push
507 """decorator for function performing discovery before push
508
508
509 The function is added to the step -> function mapping and appended to the
509 The function is added to the step -> function mapping and appended to the
510 list of steps. Beware that decorated function will be added in order (this
510 list of steps. Beware that decorated function will be added in order (this
511 may matter).
511 may matter).
512
512
513 You can only use this decorator for a new step, if you want to wrap a step
513 You can only use this decorator for a new step, if you want to wrap a step
514 from an extension, change the pushdiscovery dictionary directly."""
514 from an extension, change the pushdiscovery dictionary directly."""
515 def dec(func):
515 def dec(func):
516 assert stepname not in pushdiscoverymapping
516 assert stepname not in pushdiscoverymapping
517 pushdiscoverymapping[stepname] = func
517 pushdiscoverymapping[stepname] = func
518 pushdiscoveryorder.append(stepname)
518 pushdiscoveryorder.append(stepname)
519 return func
519 return func
520 return dec
520 return dec
521
521
522 def _pushdiscovery(pushop):
522 def _pushdiscovery(pushop):
523 """Run all discovery steps"""
523 """Run all discovery steps"""
524 for stepname in pushdiscoveryorder:
524 for stepname in pushdiscoveryorder:
525 step = pushdiscoverymapping[stepname]
525 step = pushdiscoverymapping[stepname]
526 step(pushop)
526 step(pushop)
527
527
528 @pushdiscovery('changeset')
528 @pushdiscovery('changeset')
529 def _pushdiscoverychangeset(pushop):
529 def _pushdiscoverychangeset(pushop):
530 """discover the changeset that need to be pushed"""
530 """discover the changeset that need to be pushed"""
531 fci = discovery.findcommonincoming
531 fci = discovery.findcommonincoming
532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
533 common, inc, remoteheads = commoninc
533 common, inc, remoteheads = commoninc
534 fco = discovery.findcommonoutgoing
534 fco = discovery.findcommonoutgoing
535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
536 commoninc=commoninc, force=pushop.force)
536 commoninc=commoninc, force=pushop.force)
537 pushop.outgoing = outgoing
537 pushop.outgoing = outgoing
538 pushop.remoteheads = remoteheads
538 pushop.remoteheads = remoteheads
539 pushop.incoming = inc
539 pushop.incoming = inc
540
540
541 @pushdiscovery('phase')
541 @pushdiscovery('phase')
542 def _pushdiscoveryphase(pushop):
542 def _pushdiscoveryphase(pushop):
543 """discover the phase that needs to be pushed
543 """discover the phase that needs to be pushed
544
544
545 (computed for both success and failure case for changesets push)"""
545 (computed for both success and failure case for changesets push)"""
546 outgoing = pushop.outgoing
546 outgoing = pushop.outgoing
547 unfi = pushop.repo.unfiltered()
547 unfi = pushop.repo.unfiltered()
548 remotephases = pushop.remote.listkeys('phases')
548 remotephases = pushop.remote.listkeys('phases')
549 publishing = remotephases.get('publishing', False)
549 publishing = remotephases.get('publishing', False)
550 if (pushop.ui.configbool('ui', '_usedassubrepo')
550 if (pushop.ui.configbool('ui', '_usedassubrepo')
551 and remotephases # server supports phases
551 and remotephases # server supports phases
552 and not pushop.outgoing.missing # no changesets to be pushed
552 and not pushop.outgoing.missing # no changesets to be pushed
553 and publishing):
553 and publishing):
554 # When:
554 # When:
555 # - this is a subrepo push
555 # - this is a subrepo push
556 # - and remote support phase
556 # - and remote support phase
557 # - and no changeset are to be pushed
557 # - and no changeset are to be pushed
558 # - and remote is publishing
558 # - and remote is publishing
559 # We may be in issue 3871 case!
559 # We may be in issue 3871 case!
560 # We drop the possible phase synchronisation done by
560 # We drop the possible phase synchronisation done by
561 # courtesy to publish changesets possibly locally draft
561 # courtesy to publish changesets possibly locally draft
562 # on the remote.
562 # on the remote.
563 remotephases = {'publishing': 'True'}
563 remotephases = {'publishing': 'True'}
564 ana = phases.analyzeremotephases(pushop.repo,
564 ana = phases.analyzeremotephases(pushop.repo,
565 pushop.fallbackheads,
565 pushop.fallbackheads,
566 remotephases)
566 remotephases)
567 pheads, droots = ana
567 pheads, droots = ana
568 extracond = ''
568 extracond = ''
569 if not publishing:
569 if not publishing:
570 extracond = ' and public()'
570 extracond = ' and public()'
571 revset = 'heads((%%ln::%%ln) %s)' % extracond
571 revset = 'heads((%%ln::%%ln) %s)' % extracond
572 # Get the list of all revs draft on remote by public here.
572 # Get the list of all revs draft on remote by public here.
573 # XXX Beware that revset break if droots is not strictly
573 # XXX Beware that revset break if droots is not strictly
574 # XXX root we may want to ensure it is but it is costly
574 # XXX root we may want to ensure it is but it is costly
575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
576 if not outgoing.missing:
576 if not outgoing.missing:
577 future = fallback
577 future = fallback
578 else:
578 else:
579 # adds changeset we are going to push as draft
579 # adds changeset we are going to push as draft
580 #
580 #
581 # should not be necessary for publishing server, but because of an
581 # should not be necessary for publishing server, but because of an
582 # issue fixed in xxxxx we have to do it anyway.
582 # issue fixed in xxxxx we have to do it anyway.
583 fdroots = list(unfi.set('roots(%ln + %ln::)',
583 fdroots = list(unfi.set('roots(%ln + %ln::)',
584 outgoing.missing, droots))
584 outgoing.missing, droots))
585 fdroots = [f.node() for f in fdroots]
585 fdroots = [f.node() for f in fdroots]
586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
587 pushop.outdatedphases = future
587 pushop.outdatedphases = future
588 pushop.fallbackoutdatedphases = fallback
588 pushop.fallbackoutdatedphases = fallback
589
589
590 @pushdiscovery('obsmarker')
590 @pushdiscovery('obsmarker')
591 def _pushdiscoveryobsmarkers(pushop):
591 def _pushdiscoveryobsmarkers(pushop):
592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
593 and pushop.repo.obsstore
593 and pushop.repo.obsstore
594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
595 repo = pushop.repo
595 repo = pushop.repo
596 # very naive computation, that can be quite expensive on big repo.
596 # very naive computation, that can be quite expensive on big repo.
597 # However: evolution is currently slow on them anyway.
597 # However: evolution is currently slow on them anyway.
598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
600
600
601 @pushdiscovery('bookmarks')
601 @pushdiscovery('bookmarks')
602 def _pushdiscoverybookmarks(pushop):
602 def _pushdiscoverybookmarks(pushop):
603 ui = pushop.ui
603 ui = pushop.ui
604 repo = pushop.repo.unfiltered()
604 repo = pushop.repo.unfiltered()
605 remote = pushop.remote
605 remote = pushop.remote
606 ui.debug("checking for updated bookmarks\n")
606 ui.debug("checking for updated bookmarks\n")
607 ancestors = ()
607 ancestors = ()
608 if pushop.revs:
608 if pushop.revs:
609 revnums = map(repo.changelog.rev, pushop.revs)
609 revnums = map(repo.changelog.rev, pushop.revs)
610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
611 remotebookmark = remote.listkeys('bookmarks')
611 remotebookmark = remote.listkeys('bookmarks')
612
612
613 explicit = set([repo._bookmarks.expandname(bookmark)
613 explicit = set([repo._bookmarks.expandname(bookmark)
614 for bookmark in pushop.bookmarks])
614 for bookmark in pushop.bookmarks])
615
615
616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
618
618
619 def safehex(x):
619 def safehex(x):
620 if x is None:
620 if x is None:
621 return x
621 return x
622 return hex(x)
622 return hex(x)
623
623
624 def hexifycompbookmarks(bookmarks):
624 def hexifycompbookmarks(bookmarks):
625 for b, scid, dcid in bookmarks:
625 for b, scid, dcid in bookmarks:
626 yield b, safehex(scid), safehex(dcid)
626 yield b, safehex(scid), safehex(dcid)
627
627
628 comp = [hexifycompbookmarks(marks) for marks in comp]
628 comp = [hexifycompbookmarks(marks) for marks in comp]
629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
630
630
631 for b, scid, dcid in advsrc:
631 for b, scid, dcid in advsrc:
632 if b in explicit:
632 if b in explicit:
633 explicit.remove(b)
633 explicit.remove(b)
634 if not ancestors or repo[scid].rev() in ancestors:
634 if not ancestors or repo[scid].rev() in ancestors:
635 pushop.outbookmarks.append((b, dcid, scid))
635 pushop.outbookmarks.append((b, dcid, scid))
636 # search added bookmark
636 # search added bookmark
637 for b, scid, dcid in addsrc:
637 for b, scid, dcid in addsrc:
638 if b in explicit:
638 if b in explicit:
639 explicit.remove(b)
639 explicit.remove(b)
640 pushop.outbookmarks.append((b, '', scid))
640 pushop.outbookmarks.append((b, '', scid))
641 # search for overwritten bookmark
641 # search for overwritten bookmark
642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
643 if b in explicit:
643 if b in explicit:
644 explicit.remove(b)
644 explicit.remove(b)
645 pushop.outbookmarks.append((b, dcid, scid))
645 pushop.outbookmarks.append((b, dcid, scid))
646 # search for bookmark to delete
646 # search for bookmark to delete
647 for b, scid, dcid in adddst:
647 for b, scid, dcid in adddst:
648 if b in explicit:
648 if b in explicit:
649 explicit.remove(b)
649 explicit.remove(b)
650 # treat as "deleted locally"
650 # treat as "deleted locally"
651 pushop.outbookmarks.append((b, dcid, ''))
651 pushop.outbookmarks.append((b, dcid, ''))
652 # identical bookmarks shouldn't get reported
652 # identical bookmarks shouldn't get reported
653 for b, scid, dcid in same:
653 for b, scid, dcid in same:
654 if b in explicit:
654 if b in explicit:
655 explicit.remove(b)
655 explicit.remove(b)
656
656
657 if explicit:
657 if explicit:
658 explicit = sorted(explicit)
658 explicit = sorted(explicit)
659 # we should probably list all of them
659 # we should probably list all of them
660 ui.warn(_('bookmark %s does not exist on the local '
660 ui.warn(_('bookmark %s does not exist on the local '
661 'or remote repository!\n') % explicit[0])
661 'or remote repository!\n') % explicit[0])
662 pushop.bkresult = 2
662 pushop.bkresult = 2
663
663
664 pushop.outbookmarks.sort()
664 pushop.outbookmarks.sort()
665
665
666 def _pushcheckoutgoing(pushop):
666 def _pushcheckoutgoing(pushop):
667 outgoing = pushop.outgoing
667 outgoing = pushop.outgoing
668 unfi = pushop.repo.unfiltered()
668 unfi = pushop.repo.unfiltered()
669 if not outgoing.missing:
669 if not outgoing.missing:
670 # nothing to push
670 # nothing to push
671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
672 return False
672 return False
673 # something to push
673 # something to push
674 if not pushop.force:
674 if not pushop.force:
675 # if repo.obsstore == False --> no obsolete
675 # if repo.obsstore == False --> no obsolete
676 # then, save the iteration
676 # then, save the iteration
677 if unfi.obsstore:
677 if unfi.obsstore:
678 # this message are here for 80 char limit reason
678 # this message are here for 80 char limit reason
679 mso = _("push includes obsolete changeset: %s!")
679 mso = _("push includes obsolete changeset: %s!")
680 mscd = _("push includes content-divergent changeset: %s!")
680 mst = {"orphan": _("push includes orphan changeset: %s!"),
681 mst = {"orphan": _("push includes orphan changeset: %s!"),
681 "bumped": _("push includes bumped changeset: %s!"),
682 "bumped": _("push includes bumped changeset: %s!"),
682 "divergent": _("push includes divergent changeset: %s!")}
683 "content-divergent": mscd}
683 # If we are to push if there is at least one
684 # If we are to push if there is at least one
684 # obsolete or unstable changeset in missing, at
685 # obsolete or unstable changeset in missing, at
685 # least one of the missinghead will be obsolete or
686 # least one of the missinghead will be obsolete or
686 # unstable. So checking heads only is ok
687 # unstable. So checking heads only is ok
687 for node in outgoing.missingheads:
688 for node in outgoing.missingheads:
688 ctx = unfi[node]
689 ctx = unfi[node]
689 if ctx.obsolete():
690 if ctx.obsolete():
690 raise error.Abort(mso % ctx)
691 raise error.Abort(mso % ctx)
691 elif ctx.troubled():
692 elif ctx.troubled():
692 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693
694
694 discovery.checkheads(pushop)
695 discovery.checkheads(pushop)
695 return True
696 return True
696
697
697 # List of names of steps to perform for an outgoing bundle2, order matters.
698 # List of names of steps to perform for an outgoing bundle2, order matters.
698 b2partsgenorder = []
699 b2partsgenorder = []
699
700
700 # Mapping between step name and function
701 # Mapping between step name and function
701 #
702 #
702 # This exists to help extensions wrap steps if necessary
703 # This exists to help extensions wrap steps if necessary
703 b2partsgenmapping = {}
704 b2partsgenmapping = {}
704
705
705 def b2partsgenerator(stepname, idx=None):
706 def b2partsgenerator(stepname, idx=None):
706 """decorator for function generating bundle2 part
707 """decorator for function generating bundle2 part
707
708
708 The function is added to the step -> function mapping and appended to the
709 The function is added to the step -> function mapping and appended to the
709 list of steps. Beware that decorated functions will be added in order
710 list of steps. Beware that decorated functions will be added in order
710 (this may matter).
711 (this may matter).
711
712
712 You can only use this decorator for new steps, if you want to wrap a step
713 You can only use this decorator for new steps, if you want to wrap a step
713 from an extension, attack the b2partsgenmapping dictionary directly."""
714 from an extension, attack the b2partsgenmapping dictionary directly."""
714 def dec(func):
715 def dec(func):
715 assert stepname not in b2partsgenmapping
716 assert stepname not in b2partsgenmapping
716 b2partsgenmapping[stepname] = func
717 b2partsgenmapping[stepname] = func
717 if idx is None:
718 if idx is None:
718 b2partsgenorder.append(stepname)
719 b2partsgenorder.append(stepname)
719 else:
720 else:
720 b2partsgenorder.insert(idx, stepname)
721 b2partsgenorder.insert(idx, stepname)
721 return func
722 return func
722 return dec
723 return dec
723
724
724 def _pushb2ctxcheckheads(pushop, bundler):
725 def _pushb2ctxcheckheads(pushop, bundler):
725 """Generate race condition checking parts
726 """Generate race condition checking parts
726
727
727 Exists as an independent function to aid extensions
728 Exists as an independent function to aid extensions
728 """
729 """
729 # * 'force' do not check for push race,
730 # * 'force' do not check for push race,
730 # * if we don't push anything, there are nothing to check.
731 # * if we don't push anything, there are nothing to check.
731 if not pushop.force and pushop.outgoing.missingheads:
732 if not pushop.force and pushop.outgoing.missingheads:
732 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
733 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
733 emptyremote = pushop.pushbranchmap is None
734 emptyremote = pushop.pushbranchmap is None
734 if not allowunrelated or emptyremote:
735 if not allowunrelated or emptyremote:
735 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
736 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
736 else:
737 else:
737 affected = set()
738 affected = set()
738 for branch, heads in pushop.pushbranchmap.iteritems():
739 for branch, heads in pushop.pushbranchmap.iteritems():
739 remoteheads, newheads, unsyncedheads, discardedheads = heads
740 remoteheads, newheads, unsyncedheads, discardedheads = heads
740 if remoteheads is not None:
741 if remoteheads is not None:
741 remote = set(remoteheads)
742 remote = set(remoteheads)
742 affected |= set(discardedheads) & remote
743 affected |= set(discardedheads) & remote
743 affected |= remote - set(newheads)
744 affected |= remote - set(newheads)
744 if affected:
745 if affected:
745 data = iter(sorted(affected))
746 data = iter(sorted(affected))
746 bundler.newpart('check:updated-heads', data=data)
747 bundler.newpart('check:updated-heads', data=data)
747
748
748 @b2partsgenerator('changeset')
749 @b2partsgenerator('changeset')
749 def _pushb2ctx(pushop, bundler):
750 def _pushb2ctx(pushop, bundler):
750 """handle changegroup push through bundle2
751 """handle changegroup push through bundle2
751
752
752 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
753 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
753 """
754 """
754 if 'changesets' in pushop.stepsdone:
755 if 'changesets' in pushop.stepsdone:
755 return
756 return
756 pushop.stepsdone.add('changesets')
757 pushop.stepsdone.add('changesets')
757 # Send known heads to the server for race detection.
758 # Send known heads to the server for race detection.
758 if not _pushcheckoutgoing(pushop):
759 if not _pushcheckoutgoing(pushop):
759 return
760 return
760 pushop.repo.prepushoutgoinghooks(pushop)
761 pushop.repo.prepushoutgoinghooks(pushop)
761
762
762 _pushb2ctxcheckheads(pushop, bundler)
763 _pushb2ctxcheckheads(pushop, bundler)
763
764
764 b2caps = bundle2.bundle2caps(pushop.remote)
765 b2caps = bundle2.bundle2caps(pushop.remote)
765 version = '01'
766 version = '01'
766 cgversions = b2caps.get('changegroup')
767 cgversions = b2caps.get('changegroup')
767 if cgversions: # 3.1 and 3.2 ship with an empty value
768 if cgversions: # 3.1 and 3.2 ship with an empty value
768 cgversions = [v for v in cgversions
769 cgversions = [v for v in cgversions
769 if v in changegroup.supportedoutgoingversions(
770 if v in changegroup.supportedoutgoingversions(
770 pushop.repo)]
771 pushop.repo)]
771 if not cgversions:
772 if not cgversions:
772 raise ValueError(_('no common changegroup version'))
773 raise ValueError(_('no common changegroup version'))
773 version = max(cgversions)
774 version = max(cgversions)
774 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
775 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
775 pushop.outgoing,
776 pushop.outgoing,
776 version=version)
777 version=version)
777 cgpart = bundler.newpart('changegroup', data=cg)
778 cgpart = bundler.newpart('changegroup', data=cg)
778 if cgversions:
779 if cgversions:
779 cgpart.addparam('version', version)
780 cgpart.addparam('version', version)
780 if 'treemanifest' in pushop.repo.requirements:
781 if 'treemanifest' in pushop.repo.requirements:
781 cgpart.addparam('treemanifest', '1')
782 cgpart.addparam('treemanifest', '1')
782 def handlereply(op):
783 def handlereply(op):
783 """extract addchangegroup returns from server reply"""
784 """extract addchangegroup returns from server reply"""
784 cgreplies = op.records.getreplies(cgpart.id)
785 cgreplies = op.records.getreplies(cgpart.id)
785 assert len(cgreplies['changegroup']) == 1
786 assert len(cgreplies['changegroup']) == 1
786 pushop.cgresult = cgreplies['changegroup'][0]['return']
787 pushop.cgresult = cgreplies['changegroup'][0]['return']
787 return handlereply
788 return handlereply
788
789
789 @b2partsgenerator('phase')
790 @b2partsgenerator('phase')
790 def _pushb2phases(pushop, bundler):
791 def _pushb2phases(pushop, bundler):
791 """handle phase push through bundle2"""
792 """handle phase push through bundle2"""
792 if 'phases' in pushop.stepsdone:
793 if 'phases' in pushop.stepsdone:
793 return
794 return
794 b2caps = bundle2.bundle2caps(pushop.remote)
795 b2caps = bundle2.bundle2caps(pushop.remote)
795 if not 'pushkey' in b2caps:
796 if not 'pushkey' in b2caps:
796 return
797 return
797 pushop.stepsdone.add('phases')
798 pushop.stepsdone.add('phases')
798 part2node = []
799 part2node = []
799
800
800 def handlefailure(pushop, exc):
801 def handlefailure(pushop, exc):
801 targetid = int(exc.partid)
802 targetid = int(exc.partid)
802 for partid, node in part2node:
803 for partid, node in part2node:
803 if partid == targetid:
804 if partid == targetid:
804 raise error.Abort(_('updating %s to public failed') % node)
805 raise error.Abort(_('updating %s to public failed') % node)
805
806
806 enc = pushkey.encode
807 enc = pushkey.encode
807 for newremotehead in pushop.outdatedphases:
808 for newremotehead in pushop.outdatedphases:
808 part = bundler.newpart('pushkey')
809 part = bundler.newpart('pushkey')
809 part.addparam('namespace', enc('phases'))
810 part.addparam('namespace', enc('phases'))
810 part.addparam('key', enc(newremotehead.hex()))
811 part.addparam('key', enc(newremotehead.hex()))
811 part.addparam('old', enc(str(phases.draft)))
812 part.addparam('old', enc(str(phases.draft)))
812 part.addparam('new', enc(str(phases.public)))
813 part.addparam('new', enc(str(phases.public)))
813 part2node.append((part.id, newremotehead))
814 part2node.append((part.id, newremotehead))
814 pushop.pkfailcb[part.id] = handlefailure
815 pushop.pkfailcb[part.id] = handlefailure
815
816
816 def handlereply(op):
817 def handlereply(op):
817 for partid, node in part2node:
818 for partid, node in part2node:
818 partrep = op.records.getreplies(partid)
819 partrep = op.records.getreplies(partid)
819 results = partrep['pushkey']
820 results = partrep['pushkey']
820 assert len(results) <= 1
821 assert len(results) <= 1
821 msg = None
822 msg = None
822 if not results:
823 if not results:
823 msg = _('server ignored update of %s to public!\n') % node
824 msg = _('server ignored update of %s to public!\n') % node
824 elif not int(results[0]['return']):
825 elif not int(results[0]['return']):
825 msg = _('updating %s to public failed!\n') % node
826 msg = _('updating %s to public failed!\n') % node
826 if msg is not None:
827 if msg is not None:
827 pushop.ui.warn(msg)
828 pushop.ui.warn(msg)
828 return handlereply
829 return handlereply
829
830
830 @b2partsgenerator('obsmarkers')
831 @b2partsgenerator('obsmarkers')
831 def _pushb2obsmarkers(pushop, bundler):
832 def _pushb2obsmarkers(pushop, bundler):
832 if 'obsmarkers' in pushop.stepsdone:
833 if 'obsmarkers' in pushop.stepsdone:
833 return
834 return
834 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
835 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
835 if obsolete.commonversion(remoteversions) is None:
836 if obsolete.commonversion(remoteversions) is None:
836 return
837 return
837 pushop.stepsdone.add('obsmarkers')
838 pushop.stepsdone.add('obsmarkers')
838 if pushop.outobsmarkers:
839 if pushop.outobsmarkers:
839 markers = sorted(pushop.outobsmarkers)
840 markers = sorted(pushop.outobsmarkers)
840 bundle2.buildobsmarkerspart(bundler, markers)
841 bundle2.buildobsmarkerspart(bundler, markers)
841
842
842 @b2partsgenerator('bookmarks')
843 @b2partsgenerator('bookmarks')
843 def _pushb2bookmarks(pushop, bundler):
844 def _pushb2bookmarks(pushop, bundler):
844 """handle bookmark push through bundle2"""
845 """handle bookmark push through bundle2"""
845 if 'bookmarks' in pushop.stepsdone:
846 if 'bookmarks' in pushop.stepsdone:
846 return
847 return
847 b2caps = bundle2.bundle2caps(pushop.remote)
848 b2caps = bundle2.bundle2caps(pushop.remote)
848 if 'pushkey' not in b2caps:
849 if 'pushkey' not in b2caps:
849 return
850 return
850 pushop.stepsdone.add('bookmarks')
851 pushop.stepsdone.add('bookmarks')
851 part2book = []
852 part2book = []
852 enc = pushkey.encode
853 enc = pushkey.encode
853
854
854 def handlefailure(pushop, exc):
855 def handlefailure(pushop, exc):
855 targetid = int(exc.partid)
856 targetid = int(exc.partid)
856 for partid, book, action in part2book:
857 for partid, book, action in part2book:
857 if partid == targetid:
858 if partid == targetid:
858 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
859 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
859 # we should not be called for part we did not generated
860 # we should not be called for part we did not generated
860 assert False
861 assert False
861
862
862 for book, old, new in pushop.outbookmarks:
863 for book, old, new in pushop.outbookmarks:
863 part = bundler.newpart('pushkey')
864 part = bundler.newpart('pushkey')
864 part.addparam('namespace', enc('bookmarks'))
865 part.addparam('namespace', enc('bookmarks'))
865 part.addparam('key', enc(book))
866 part.addparam('key', enc(book))
866 part.addparam('old', enc(old))
867 part.addparam('old', enc(old))
867 part.addparam('new', enc(new))
868 part.addparam('new', enc(new))
868 action = 'update'
869 action = 'update'
869 if not old:
870 if not old:
870 action = 'export'
871 action = 'export'
871 elif not new:
872 elif not new:
872 action = 'delete'
873 action = 'delete'
873 part2book.append((part.id, book, action))
874 part2book.append((part.id, book, action))
874 pushop.pkfailcb[part.id] = handlefailure
875 pushop.pkfailcb[part.id] = handlefailure
875
876
876 def handlereply(op):
877 def handlereply(op):
877 ui = pushop.ui
878 ui = pushop.ui
878 for partid, book, action in part2book:
879 for partid, book, action in part2book:
879 partrep = op.records.getreplies(partid)
880 partrep = op.records.getreplies(partid)
880 results = partrep['pushkey']
881 results = partrep['pushkey']
881 assert len(results) <= 1
882 assert len(results) <= 1
882 if not results:
883 if not results:
883 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
884 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
884 else:
885 else:
885 ret = int(results[0]['return'])
886 ret = int(results[0]['return'])
886 if ret:
887 if ret:
887 ui.status(bookmsgmap[action][0] % book)
888 ui.status(bookmsgmap[action][0] % book)
888 else:
889 else:
889 ui.warn(bookmsgmap[action][1] % book)
890 ui.warn(bookmsgmap[action][1] % book)
890 if pushop.bkresult is not None:
891 if pushop.bkresult is not None:
891 pushop.bkresult = 1
892 pushop.bkresult = 1
892 return handlereply
893 return handlereply
893
894
894
895
895 def _pushbundle2(pushop):
896 def _pushbundle2(pushop):
896 """push data to the remote using bundle2
897 """push data to the remote using bundle2
897
898
898 The only currently supported type of data is changegroup but this will
899 The only currently supported type of data is changegroup but this will
899 evolve in the future."""
900 evolve in the future."""
900 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 pushback = (pushop.trmanager
902 pushback = (pushop.trmanager
902 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903
904
904 # create reply capability
905 # create reply capability
905 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 allowpushback=pushback))
907 allowpushback=pushback))
907 bundler.newpart('replycaps', data=capsblob)
908 bundler.newpart('replycaps', data=capsblob)
908 replyhandlers = []
909 replyhandlers = []
909 for partgenname in b2partsgenorder:
910 for partgenname in b2partsgenorder:
910 partgen = b2partsgenmapping[partgenname]
911 partgen = b2partsgenmapping[partgenname]
911 ret = partgen(pushop, bundler)
912 ret = partgen(pushop, bundler)
912 if callable(ret):
913 if callable(ret):
913 replyhandlers.append(ret)
914 replyhandlers.append(ret)
914 # do not push if nothing to push
915 # do not push if nothing to push
915 if bundler.nbparts <= 1:
916 if bundler.nbparts <= 1:
916 return
917 return
917 stream = util.chunkbuffer(bundler.getchunks())
918 stream = util.chunkbuffer(bundler.getchunks())
918 try:
919 try:
919 try:
920 try:
920 reply = pushop.remote.unbundle(
921 reply = pushop.remote.unbundle(
921 stream, ['force'], pushop.remote.url())
922 stream, ['force'], pushop.remote.url())
922 except error.BundleValueError as exc:
923 except error.BundleValueError as exc:
923 raise error.Abort(_('missing support for %s') % exc)
924 raise error.Abort(_('missing support for %s') % exc)
924 try:
925 try:
925 trgetter = None
926 trgetter = None
926 if pushback:
927 if pushback:
927 trgetter = pushop.trmanager.transaction
928 trgetter = pushop.trmanager.transaction
928 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 except error.BundleValueError as exc:
930 except error.BundleValueError as exc:
930 raise error.Abort(_('missing support for %s') % exc)
931 raise error.Abort(_('missing support for %s') % exc)
931 except bundle2.AbortFromPart as exc:
932 except bundle2.AbortFromPart as exc:
932 pushop.ui.status(_('remote: %s\n') % exc)
933 pushop.ui.status(_('remote: %s\n') % exc)
933 if exc.hint is not None:
934 if exc.hint is not None:
934 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 raise error.Abort(_('push failed on remote'))
936 raise error.Abort(_('push failed on remote'))
936 except error.PushkeyFailed as exc:
937 except error.PushkeyFailed as exc:
937 partid = int(exc.partid)
938 partid = int(exc.partid)
938 if partid not in pushop.pkfailcb:
939 if partid not in pushop.pkfailcb:
939 raise
940 raise
940 pushop.pkfailcb[partid](pushop, exc)
941 pushop.pkfailcb[partid](pushop, exc)
941 for rephand in replyhandlers:
942 for rephand in replyhandlers:
942 rephand(op)
943 rephand(op)
943
944
944 def _pushchangeset(pushop):
945 def _pushchangeset(pushop):
945 """Make the actual push of changeset bundle to remote repo"""
946 """Make the actual push of changeset bundle to remote repo"""
946 if 'changesets' in pushop.stepsdone:
947 if 'changesets' in pushop.stepsdone:
947 return
948 return
948 pushop.stepsdone.add('changesets')
949 pushop.stepsdone.add('changesets')
949 if not _pushcheckoutgoing(pushop):
950 if not _pushcheckoutgoing(pushop):
950 return
951 return
951 pushop.repo.prepushoutgoinghooks(pushop)
952 pushop.repo.prepushoutgoinghooks(pushop)
952 outgoing = pushop.outgoing
953 outgoing = pushop.outgoing
953 unbundle = pushop.remote.capable('unbundle')
954 unbundle = pushop.remote.capable('unbundle')
954 # TODO: get bundlecaps from remote
955 # TODO: get bundlecaps from remote
955 bundlecaps = None
956 bundlecaps = None
956 # create a changegroup from local
957 # create a changegroup from local
957 if pushop.revs is None and not (outgoing.excluded
958 if pushop.revs is None and not (outgoing.excluded
958 or pushop.repo.changelog.filteredrevs):
959 or pushop.repo.changelog.filteredrevs):
959 # push everything,
960 # push everything,
960 # use the fast path, no race possible on push
961 # use the fast path, no race possible on push
961 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
962 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
962 cg = changegroup.getsubset(pushop.repo,
963 cg = changegroup.getsubset(pushop.repo,
963 outgoing,
964 outgoing,
964 bundler,
965 bundler,
965 'push',
966 'push',
966 fastpath=True)
967 fastpath=True)
967 else:
968 else:
968 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
969 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
969 bundlecaps=bundlecaps)
970 bundlecaps=bundlecaps)
970
971
971 # apply changegroup to remote
972 # apply changegroup to remote
972 if unbundle:
973 if unbundle:
973 # local repo finds heads on server, finds out what
974 # local repo finds heads on server, finds out what
974 # revs it must push. once revs transferred, if server
975 # revs it must push. once revs transferred, if server
975 # finds it has different heads (someone else won
976 # finds it has different heads (someone else won
976 # commit/push race), server aborts.
977 # commit/push race), server aborts.
977 if pushop.force:
978 if pushop.force:
978 remoteheads = ['force']
979 remoteheads = ['force']
979 else:
980 else:
980 remoteheads = pushop.remoteheads
981 remoteheads = pushop.remoteheads
981 # ssh: return remote's addchangegroup()
982 # ssh: return remote's addchangegroup()
982 # http: return remote's addchangegroup() or 0 for error
983 # http: return remote's addchangegroup() or 0 for error
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
984 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
984 pushop.repo.url())
985 pushop.repo.url())
985 else:
986 else:
986 # we return an integer indicating remote head count
987 # we return an integer indicating remote head count
987 # change
988 # change
988 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
989 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
989 pushop.repo.url())
990 pushop.repo.url())
990
991
991 def _pushsyncphase(pushop):
992 def _pushsyncphase(pushop):
992 """synchronise phase information locally and remotely"""
993 """synchronise phase information locally and remotely"""
993 cheads = pushop.commonheads
994 cheads = pushop.commonheads
994 # even when we don't push, exchanging phase data is useful
995 # even when we don't push, exchanging phase data is useful
995 remotephases = pushop.remote.listkeys('phases')
996 remotephases = pushop.remote.listkeys('phases')
996 if (pushop.ui.configbool('ui', '_usedassubrepo')
997 if (pushop.ui.configbool('ui', '_usedassubrepo')
997 and remotephases # server supports phases
998 and remotephases # server supports phases
998 and pushop.cgresult is None # nothing was pushed
999 and pushop.cgresult is None # nothing was pushed
999 and remotephases.get('publishing', False)):
1000 and remotephases.get('publishing', False)):
1000 # When:
1001 # When:
1001 # - this is a subrepo push
1002 # - this is a subrepo push
1002 # - and remote support phase
1003 # - and remote support phase
1003 # - and no changeset was pushed
1004 # - and no changeset was pushed
1004 # - and remote is publishing
1005 # - and remote is publishing
1005 # We may be in issue 3871 case!
1006 # We may be in issue 3871 case!
1006 # We drop the possible phase synchronisation done by
1007 # We drop the possible phase synchronisation done by
1007 # courtesy to publish changesets possibly locally draft
1008 # courtesy to publish changesets possibly locally draft
1008 # on the remote.
1009 # on the remote.
1009 remotephases = {'publishing': 'True'}
1010 remotephases = {'publishing': 'True'}
1010 if not remotephases: # old server or public only reply from non-publishing
1011 if not remotephases: # old server or public only reply from non-publishing
1011 _localphasemove(pushop, cheads)
1012 _localphasemove(pushop, cheads)
1012 # don't push any phase data as there is nothing to push
1013 # don't push any phase data as there is nothing to push
1013 else:
1014 else:
1014 ana = phases.analyzeremotephases(pushop.repo, cheads,
1015 ana = phases.analyzeremotephases(pushop.repo, cheads,
1015 remotephases)
1016 remotephases)
1016 pheads, droots = ana
1017 pheads, droots = ana
1017 ### Apply remote phase on local
1018 ### Apply remote phase on local
1018 if remotephases.get('publishing', False):
1019 if remotephases.get('publishing', False):
1019 _localphasemove(pushop, cheads)
1020 _localphasemove(pushop, cheads)
1020 else: # publish = False
1021 else: # publish = False
1021 _localphasemove(pushop, pheads)
1022 _localphasemove(pushop, pheads)
1022 _localphasemove(pushop, cheads, phases.draft)
1023 _localphasemove(pushop, cheads, phases.draft)
1023 ### Apply local phase on remote
1024 ### Apply local phase on remote
1024
1025
1025 if pushop.cgresult:
1026 if pushop.cgresult:
1026 if 'phases' in pushop.stepsdone:
1027 if 'phases' in pushop.stepsdone:
1027 # phases already pushed though bundle2
1028 # phases already pushed though bundle2
1028 return
1029 return
1029 outdated = pushop.outdatedphases
1030 outdated = pushop.outdatedphases
1030 else:
1031 else:
1031 outdated = pushop.fallbackoutdatedphases
1032 outdated = pushop.fallbackoutdatedphases
1032
1033
1033 pushop.stepsdone.add('phases')
1034 pushop.stepsdone.add('phases')
1034
1035
1035 # filter heads already turned public by the push
1036 # filter heads already turned public by the push
1036 outdated = [c for c in outdated if c.node() not in pheads]
1037 outdated = [c for c in outdated if c.node() not in pheads]
1037 # fallback to independent pushkey command
1038 # fallback to independent pushkey command
1038 for newremotehead in outdated:
1039 for newremotehead in outdated:
1039 r = pushop.remote.pushkey('phases',
1040 r = pushop.remote.pushkey('phases',
1040 newremotehead.hex(),
1041 newremotehead.hex(),
1041 str(phases.draft),
1042 str(phases.draft),
1042 str(phases.public))
1043 str(phases.public))
1043 if not r:
1044 if not r:
1044 pushop.ui.warn(_('updating %s to public failed!\n')
1045 pushop.ui.warn(_('updating %s to public failed!\n')
1045 % newremotehead)
1046 % newremotehead)
1046
1047
1047 def _localphasemove(pushop, nodes, phase=phases.public):
1048 def _localphasemove(pushop, nodes, phase=phases.public):
1048 """move <nodes> to <phase> in the local source repo"""
1049 """move <nodes> to <phase> in the local source repo"""
1049 if pushop.trmanager:
1050 if pushop.trmanager:
1050 phases.advanceboundary(pushop.repo,
1051 phases.advanceboundary(pushop.repo,
1051 pushop.trmanager.transaction(),
1052 pushop.trmanager.transaction(),
1052 phase,
1053 phase,
1053 nodes)
1054 nodes)
1054 else:
1055 else:
1055 # repo is not locked, do not change any phases!
1056 # repo is not locked, do not change any phases!
1056 # Informs the user that phases should have been moved when
1057 # Informs the user that phases should have been moved when
1057 # applicable.
1058 # applicable.
1058 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1059 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1059 phasestr = phases.phasenames[phase]
1060 phasestr = phases.phasenames[phase]
1060 if actualmoves:
1061 if actualmoves:
1061 pushop.ui.status(_('cannot lock source repo, skipping '
1062 pushop.ui.status(_('cannot lock source repo, skipping '
1062 'local %s phase update\n') % phasestr)
1063 'local %s phase update\n') % phasestr)
1063
1064
1064 def _pushobsolete(pushop):
1065 def _pushobsolete(pushop):
1065 """utility function to push obsolete markers to a remote"""
1066 """utility function to push obsolete markers to a remote"""
1066 if 'obsmarkers' in pushop.stepsdone:
1067 if 'obsmarkers' in pushop.stepsdone:
1067 return
1068 return
1068 repo = pushop.repo
1069 repo = pushop.repo
1069 remote = pushop.remote
1070 remote = pushop.remote
1070 pushop.stepsdone.add('obsmarkers')
1071 pushop.stepsdone.add('obsmarkers')
1071 if pushop.outobsmarkers:
1072 if pushop.outobsmarkers:
1072 pushop.ui.debug('try to push obsolete markers to remote\n')
1073 pushop.ui.debug('try to push obsolete markers to remote\n')
1073 rslts = []
1074 rslts = []
1074 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1075 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1075 for key in sorted(remotedata, reverse=True):
1076 for key in sorted(remotedata, reverse=True):
1076 # reverse sort to ensure we end with dump0
1077 # reverse sort to ensure we end with dump0
1077 data = remotedata[key]
1078 data = remotedata[key]
1078 rslts.append(remote.pushkey('obsolete', key, '', data))
1079 rslts.append(remote.pushkey('obsolete', key, '', data))
1079 if [r for r in rslts if not r]:
1080 if [r for r in rslts if not r]:
1080 msg = _('failed to push some obsolete markers!\n')
1081 msg = _('failed to push some obsolete markers!\n')
1081 repo.ui.warn(msg)
1082 repo.ui.warn(msg)
1082
1083
1083 def _pushbookmark(pushop):
1084 def _pushbookmark(pushop):
1084 """Update bookmark position on remote"""
1085 """Update bookmark position on remote"""
1085 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1086 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1086 return
1087 return
1087 pushop.stepsdone.add('bookmarks')
1088 pushop.stepsdone.add('bookmarks')
1088 ui = pushop.ui
1089 ui = pushop.ui
1089 remote = pushop.remote
1090 remote = pushop.remote
1090
1091
1091 for b, old, new in pushop.outbookmarks:
1092 for b, old, new in pushop.outbookmarks:
1092 action = 'update'
1093 action = 'update'
1093 if not old:
1094 if not old:
1094 action = 'export'
1095 action = 'export'
1095 elif not new:
1096 elif not new:
1096 action = 'delete'
1097 action = 'delete'
1097 if remote.pushkey('bookmarks', b, old, new):
1098 if remote.pushkey('bookmarks', b, old, new):
1098 ui.status(bookmsgmap[action][0] % b)
1099 ui.status(bookmsgmap[action][0] % b)
1099 else:
1100 else:
1100 ui.warn(bookmsgmap[action][1] % b)
1101 ui.warn(bookmsgmap[action][1] % b)
1101 # discovery can have set the value form invalid entry
1102 # discovery can have set the value form invalid entry
1102 if pushop.bkresult is not None:
1103 if pushop.bkresult is not None:
1103 pushop.bkresult = 1
1104 pushop.bkresult = 1
1104
1105
1105 class pulloperation(object):
1106 class pulloperation(object):
1106 """A object that represent a single pull operation
1107 """A object that represent a single pull operation
1107
1108
1108 It purpose is to carry pull related state and very common operation.
1109 It purpose is to carry pull related state and very common operation.
1109
1110
1110 A new should be created at the beginning of each pull and discarded
1111 A new should be created at the beginning of each pull and discarded
1111 afterward.
1112 afterward.
1112 """
1113 """
1113
1114
1114 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1115 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1115 remotebookmarks=None, streamclonerequested=None):
1116 remotebookmarks=None, streamclonerequested=None):
1116 # repo we pull into
1117 # repo we pull into
1117 self.repo = repo
1118 self.repo = repo
1118 # repo we pull from
1119 # repo we pull from
1119 self.remote = remote
1120 self.remote = remote
1120 # revision we try to pull (None is "all")
1121 # revision we try to pull (None is "all")
1121 self.heads = heads
1122 self.heads = heads
1122 # bookmark pulled explicitly
1123 # bookmark pulled explicitly
1123 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1124 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1124 for bookmark in bookmarks]
1125 for bookmark in bookmarks]
1125 # do we force pull?
1126 # do we force pull?
1126 self.force = force
1127 self.force = force
1127 # whether a streaming clone was requested
1128 # whether a streaming clone was requested
1128 self.streamclonerequested = streamclonerequested
1129 self.streamclonerequested = streamclonerequested
1129 # transaction manager
1130 # transaction manager
1130 self.trmanager = None
1131 self.trmanager = None
1131 # set of common changeset between local and remote before pull
1132 # set of common changeset between local and remote before pull
1132 self.common = None
1133 self.common = None
1133 # set of pulled head
1134 # set of pulled head
1134 self.rheads = None
1135 self.rheads = None
1135 # list of missing changeset to fetch remotely
1136 # list of missing changeset to fetch remotely
1136 self.fetch = None
1137 self.fetch = None
1137 # remote bookmarks data
1138 # remote bookmarks data
1138 self.remotebookmarks = remotebookmarks
1139 self.remotebookmarks = remotebookmarks
1139 # result of changegroup pulling (used as return code by pull)
1140 # result of changegroup pulling (used as return code by pull)
1140 self.cgresult = None
1141 self.cgresult = None
1141 # list of step already done
1142 # list of step already done
1142 self.stepsdone = set()
1143 self.stepsdone = set()
1143 # Whether we attempted a clone from pre-generated bundles.
1144 # Whether we attempted a clone from pre-generated bundles.
1144 self.clonebundleattempted = False
1145 self.clonebundleattempted = False
1145
1146
1146 @util.propertycache
1147 @util.propertycache
1147 def pulledsubset(self):
1148 def pulledsubset(self):
1148 """heads of the set of changeset target by the pull"""
1149 """heads of the set of changeset target by the pull"""
1149 # compute target subset
1150 # compute target subset
1150 if self.heads is None:
1151 if self.heads is None:
1151 # We pulled every thing possible
1152 # We pulled every thing possible
1152 # sync on everything common
1153 # sync on everything common
1153 c = set(self.common)
1154 c = set(self.common)
1154 ret = list(self.common)
1155 ret = list(self.common)
1155 for n in self.rheads:
1156 for n in self.rheads:
1156 if n not in c:
1157 if n not in c:
1157 ret.append(n)
1158 ret.append(n)
1158 return ret
1159 return ret
1159 else:
1160 else:
1160 # We pulled a specific subset
1161 # We pulled a specific subset
1161 # sync on this subset
1162 # sync on this subset
1162 return self.heads
1163 return self.heads
1163
1164
1164 @util.propertycache
1165 @util.propertycache
1165 def canusebundle2(self):
1166 def canusebundle2(self):
1166 return not _forcebundle1(self)
1167 return not _forcebundle1(self)
1167
1168
1168 @util.propertycache
1169 @util.propertycache
1169 def remotebundle2caps(self):
1170 def remotebundle2caps(self):
1170 return bundle2.bundle2caps(self.remote)
1171 return bundle2.bundle2caps(self.remote)
1171
1172
1172 def gettransaction(self):
1173 def gettransaction(self):
1173 # deprecated; talk to trmanager directly
1174 # deprecated; talk to trmanager directly
1174 return self.trmanager.transaction()
1175 return self.trmanager.transaction()
1175
1176
1176 class transactionmanager(object):
1177 class transactionmanager(object):
1177 """An object to manage the life cycle of a transaction
1178 """An object to manage the life cycle of a transaction
1178
1179
1179 It creates the transaction on demand and calls the appropriate hooks when
1180 It creates the transaction on demand and calls the appropriate hooks when
1180 closing the transaction."""
1181 closing the transaction."""
1181 def __init__(self, repo, source, url):
1182 def __init__(self, repo, source, url):
1182 self.repo = repo
1183 self.repo = repo
1183 self.source = source
1184 self.source = source
1184 self.url = url
1185 self.url = url
1185 self._tr = None
1186 self._tr = None
1186
1187
1187 def transaction(self):
1188 def transaction(self):
1188 """Return an open transaction object, constructing if necessary"""
1189 """Return an open transaction object, constructing if necessary"""
1189 if not self._tr:
1190 if not self._tr:
1190 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1191 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1191 self._tr = self.repo.transaction(trname)
1192 self._tr = self.repo.transaction(trname)
1192 self._tr.hookargs['source'] = self.source
1193 self._tr.hookargs['source'] = self.source
1193 self._tr.hookargs['url'] = self.url
1194 self._tr.hookargs['url'] = self.url
1194 return self._tr
1195 return self._tr
1195
1196
1196 def close(self):
1197 def close(self):
1197 """close transaction if created"""
1198 """close transaction if created"""
1198 if self._tr is not None:
1199 if self._tr is not None:
1199 self._tr.close()
1200 self._tr.close()
1200
1201
1201 def release(self):
1202 def release(self):
1202 """release transaction if created"""
1203 """release transaction if created"""
1203 if self._tr is not None:
1204 if self._tr is not None:
1204 self._tr.release()
1205 self._tr.release()
1205
1206
1206 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1207 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1207 streamclonerequested=None):
1208 streamclonerequested=None):
1208 """Fetch repository data from a remote.
1209 """Fetch repository data from a remote.
1209
1210
1210 This is the main function used to retrieve data from a remote repository.
1211 This is the main function used to retrieve data from a remote repository.
1211
1212
1212 ``repo`` is the local repository to clone into.
1213 ``repo`` is the local repository to clone into.
1213 ``remote`` is a peer instance.
1214 ``remote`` is a peer instance.
1214 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1215 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1215 default) means to pull everything from the remote.
1216 default) means to pull everything from the remote.
1216 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1217 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1217 default, all remote bookmarks are pulled.
1218 default, all remote bookmarks are pulled.
1218 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1219 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1219 initialization.
1220 initialization.
1220 ``streamclonerequested`` is a boolean indicating whether a "streaming
1221 ``streamclonerequested`` is a boolean indicating whether a "streaming
1221 clone" is requested. A "streaming clone" is essentially a raw file copy
1222 clone" is requested. A "streaming clone" is essentially a raw file copy
1222 of revlogs from the server. This only works when the local repository is
1223 of revlogs from the server. This only works when the local repository is
1223 empty. The default value of ``None`` means to respect the server
1224 empty. The default value of ``None`` means to respect the server
1224 configuration for preferring stream clones.
1225 configuration for preferring stream clones.
1225
1226
1226 Returns the ``pulloperation`` created for this pull.
1227 Returns the ``pulloperation`` created for this pull.
1227 """
1228 """
1228 if opargs is None:
1229 if opargs is None:
1229 opargs = {}
1230 opargs = {}
1230 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1231 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1231 streamclonerequested=streamclonerequested, **opargs)
1232 streamclonerequested=streamclonerequested, **opargs)
1232 if pullop.remote.local():
1233 if pullop.remote.local():
1233 missing = set(pullop.remote.requirements) - pullop.repo.supported
1234 missing = set(pullop.remote.requirements) - pullop.repo.supported
1234 if missing:
1235 if missing:
1235 msg = _("required features are not"
1236 msg = _("required features are not"
1236 " supported in the destination:"
1237 " supported in the destination:"
1237 " %s") % (', '.join(sorted(missing)))
1238 " %s") % (', '.join(sorted(missing)))
1238 raise error.Abort(msg)
1239 raise error.Abort(msg)
1239
1240
1240 wlock = lock = None
1241 wlock = lock = None
1241 try:
1242 try:
1242 wlock = pullop.repo.wlock()
1243 wlock = pullop.repo.wlock()
1243 lock = pullop.repo.lock()
1244 lock = pullop.repo.lock()
1244 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1245 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1245 streamclone.maybeperformlegacystreamclone(pullop)
1246 streamclone.maybeperformlegacystreamclone(pullop)
1246 # This should ideally be in _pullbundle2(). However, it needs to run
1247 # This should ideally be in _pullbundle2(). However, it needs to run
1247 # before discovery to avoid extra work.
1248 # before discovery to avoid extra work.
1248 _maybeapplyclonebundle(pullop)
1249 _maybeapplyclonebundle(pullop)
1249 _pulldiscovery(pullop)
1250 _pulldiscovery(pullop)
1250 if pullop.canusebundle2:
1251 if pullop.canusebundle2:
1251 _pullbundle2(pullop)
1252 _pullbundle2(pullop)
1252 _pullchangeset(pullop)
1253 _pullchangeset(pullop)
1253 _pullphase(pullop)
1254 _pullphase(pullop)
1254 _pullbookmarks(pullop)
1255 _pullbookmarks(pullop)
1255 _pullobsolete(pullop)
1256 _pullobsolete(pullop)
1256 pullop.trmanager.close()
1257 pullop.trmanager.close()
1257 finally:
1258 finally:
1258 lockmod.release(pullop.trmanager, lock, wlock)
1259 lockmod.release(pullop.trmanager, lock, wlock)
1259
1260
1260 return pullop
1261 return pullop
1261
1262
1262 # list of steps to perform discovery before pull
1263 # list of steps to perform discovery before pull
1263 pulldiscoveryorder = []
1264 pulldiscoveryorder = []
1264
1265
1265 # Mapping between step name and function
1266 # Mapping between step name and function
1266 #
1267 #
1267 # This exists to help extensions wrap steps if necessary
1268 # This exists to help extensions wrap steps if necessary
1268 pulldiscoverymapping = {}
1269 pulldiscoverymapping = {}
1269
1270
1270 def pulldiscovery(stepname):
1271 def pulldiscovery(stepname):
1271 """decorator for function performing discovery before pull
1272 """decorator for function performing discovery before pull
1272
1273
1273 The function is added to the step -> function mapping and appended to the
1274 The function is added to the step -> function mapping and appended to the
1274 list of steps. Beware that decorated function will be added in order (this
1275 list of steps. Beware that decorated function will be added in order (this
1275 may matter).
1276 may matter).
1276
1277
1277 You can only use this decorator for a new step, if you want to wrap a step
1278 You can only use this decorator for a new step, if you want to wrap a step
1278 from an extension, change the pulldiscovery dictionary directly."""
1279 from an extension, change the pulldiscovery dictionary directly."""
1279 def dec(func):
1280 def dec(func):
1280 assert stepname not in pulldiscoverymapping
1281 assert stepname not in pulldiscoverymapping
1281 pulldiscoverymapping[stepname] = func
1282 pulldiscoverymapping[stepname] = func
1282 pulldiscoveryorder.append(stepname)
1283 pulldiscoveryorder.append(stepname)
1283 return func
1284 return func
1284 return dec
1285 return dec
1285
1286
1286 def _pulldiscovery(pullop):
1287 def _pulldiscovery(pullop):
1287 """Run all discovery steps"""
1288 """Run all discovery steps"""
1288 for stepname in pulldiscoveryorder:
1289 for stepname in pulldiscoveryorder:
1289 step = pulldiscoverymapping[stepname]
1290 step = pulldiscoverymapping[stepname]
1290 step(pullop)
1291 step(pullop)
1291
1292
1292 @pulldiscovery('b1:bookmarks')
1293 @pulldiscovery('b1:bookmarks')
1293 def _pullbookmarkbundle1(pullop):
1294 def _pullbookmarkbundle1(pullop):
1294 """fetch bookmark data in bundle1 case
1295 """fetch bookmark data in bundle1 case
1295
1296
1296 If not using bundle2, we have to fetch bookmarks before changeset
1297 If not using bundle2, we have to fetch bookmarks before changeset
1297 discovery to reduce the chance and impact of race conditions."""
1298 discovery to reduce the chance and impact of race conditions."""
1298 if pullop.remotebookmarks is not None:
1299 if pullop.remotebookmarks is not None:
1299 return
1300 return
1300 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1301 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1301 # all known bundle2 servers now support listkeys, but lets be nice with
1302 # all known bundle2 servers now support listkeys, but lets be nice with
1302 # new implementation.
1303 # new implementation.
1303 return
1304 return
1304 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1305 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1305
1306
1306
1307
1307 @pulldiscovery('changegroup')
1308 @pulldiscovery('changegroup')
1308 def _pulldiscoverychangegroup(pullop):
1309 def _pulldiscoverychangegroup(pullop):
1309 """discovery phase for the pull
1310 """discovery phase for the pull
1310
1311
1311 Current handle changeset discovery only, will change handle all discovery
1312 Current handle changeset discovery only, will change handle all discovery
1312 at some point."""
1313 at some point."""
1313 tmp = discovery.findcommonincoming(pullop.repo,
1314 tmp = discovery.findcommonincoming(pullop.repo,
1314 pullop.remote,
1315 pullop.remote,
1315 heads=pullop.heads,
1316 heads=pullop.heads,
1316 force=pullop.force)
1317 force=pullop.force)
1317 common, fetch, rheads = tmp
1318 common, fetch, rheads = tmp
1318 nm = pullop.repo.unfiltered().changelog.nodemap
1319 nm = pullop.repo.unfiltered().changelog.nodemap
1319 if fetch and rheads:
1320 if fetch and rheads:
1320 # If a remote heads in filtered locally, lets drop it from the unknown
1321 # If a remote heads in filtered locally, lets drop it from the unknown
1321 # remote heads and put in back in common.
1322 # remote heads and put in back in common.
1322 #
1323 #
1323 # This is a hackish solution to catch most of "common but locally
1324 # This is a hackish solution to catch most of "common but locally
1324 # hidden situation". We do not performs discovery on unfiltered
1325 # hidden situation". We do not performs discovery on unfiltered
1325 # repository because it end up doing a pathological amount of round
1326 # repository because it end up doing a pathological amount of round
1326 # trip for w huge amount of changeset we do not care about.
1327 # trip for w huge amount of changeset we do not care about.
1327 #
1328 #
1328 # If a set of such "common but filtered" changeset exist on the server
1329 # If a set of such "common but filtered" changeset exist on the server
1329 # but are not including a remote heads, we'll not be able to detect it,
1330 # but are not including a remote heads, we'll not be able to detect it,
1330 scommon = set(common)
1331 scommon = set(common)
1331 filteredrheads = []
1332 filteredrheads = []
1332 for n in rheads:
1333 for n in rheads:
1333 if n in nm:
1334 if n in nm:
1334 if n not in scommon:
1335 if n not in scommon:
1335 common.append(n)
1336 common.append(n)
1336 else:
1337 else:
1337 filteredrheads.append(n)
1338 filteredrheads.append(n)
1338 if not filteredrheads:
1339 if not filteredrheads:
1339 fetch = []
1340 fetch = []
1340 rheads = filteredrheads
1341 rheads = filteredrheads
1341 pullop.common = common
1342 pullop.common = common
1342 pullop.fetch = fetch
1343 pullop.fetch = fetch
1343 pullop.rheads = rheads
1344 pullop.rheads = rheads
1344
1345
1345 def _pullbundle2(pullop):
1346 def _pullbundle2(pullop):
1346 """pull data using bundle2
1347 """pull data using bundle2
1347
1348
1348 For now, the only supported data are changegroup."""
1349 For now, the only supported data are changegroup."""
1349 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1350 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1350
1351
1351 # At the moment we don't do stream clones over bundle2. If that is
1352 # At the moment we don't do stream clones over bundle2. If that is
1352 # implemented then here's where the check for that will go.
1353 # implemented then here's where the check for that will go.
1353 streaming = False
1354 streaming = False
1354
1355
1355 # pulling changegroup
1356 # pulling changegroup
1356 pullop.stepsdone.add('changegroup')
1357 pullop.stepsdone.add('changegroup')
1357
1358
1358 kwargs['common'] = pullop.common
1359 kwargs['common'] = pullop.common
1359 kwargs['heads'] = pullop.heads or pullop.rheads
1360 kwargs['heads'] = pullop.heads or pullop.rheads
1360 kwargs['cg'] = pullop.fetch
1361 kwargs['cg'] = pullop.fetch
1361 if 'listkeys' in pullop.remotebundle2caps:
1362 if 'listkeys' in pullop.remotebundle2caps:
1362 kwargs['listkeys'] = ['phases']
1363 kwargs['listkeys'] = ['phases']
1363 if pullop.remotebookmarks is None:
1364 if pullop.remotebookmarks is None:
1364 # make sure to always includes bookmark data when migrating
1365 # make sure to always includes bookmark data when migrating
1365 # `hg incoming --bundle` to using this function.
1366 # `hg incoming --bundle` to using this function.
1366 kwargs['listkeys'].append('bookmarks')
1367 kwargs['listkeys'].append('bookmarks')
1367
1368
1368 # If this is a full pull / clone and the server supports the clone bundles
1369 # If this is a full pull / clone and the server supports the clone bundles
1369 # feature, tell the server whether we attempted a clone bundle. The
1370 # feature, tell the server whether we attempted a clone bundle. The
1370 # presence of this flag indicates the client supports clone bundles. This
1371 # presence of this flag indicates the client supports clone bundles. This
1371 # will enable the server to treat clients that support clone bundles
1372 # will enable the server to treat clients that support clone bundles
1372 # differently from those that don't.
1373 # differently from those that don't.
1373 if (pullop.remote.capable('clonebundles')
1374 if (pullop.remote.capable('clonebundles')
1374 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 kwargs['cbattempted'] = pullop.clonebundleattempted
1376 kwargs['cbattempted'] = pullop.clonebundleattempted
1376
1377
1377 if streaming:
1378 if streaming:
1378 pullop.repo.ui.status(_('streaming all changes\n'))
1379 pullop.repo.ui.status(_('streaming all changes\n'))
1379 elif not pullop.fetch:
1380 elif not pullop.fetch:
1380 pullop.repo.ui.status(_("no changes found\n"))
1381 pullop.repo.ui.status(_("no changes found\n"))
1381 pullop.cgresult = 0
1382 pullop.cgresult = 0
1382 else:
1383 else:
1383 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 pullop.repo.ui.status(_("requesting all changes\n"))
1385 pullop.repo.ui.status(_("requesting all changes\n"))
1385 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 if obsolete.commonversion(remoteversions) is not None:
1388 if obsolete.commonversion(remoteversions) is not None:
1388 kwargs['obsmarkers'] = True
1389 kwargs['obsmarkers'] = True
1389 pullop.stepsdone.add('obsmarkers')
1390 pullop.stepsdone.add('obsmarkers')
1390 _pullbundle2extraprepare(pullop, kwargs)
1391 _pullbundle2extraprepare(pullop, kwargs)
1391 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 try:
1393 try:
1393 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 except bundle2.AbortFromPart as exc:
1395 except bundle2.AbortFromPart as exc:
1395 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 except error.BundleValueError as exc:
1398 except error.BundleValueError as exc:
1398 raise error.Abort(_('missing support for %s') % exc)
1399 raise error.Abort(_('missing support for %s') % exc)
1399
1400
1400 if pullop.fetch:
1401 if pullop.fetch:
1401 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402
1403
1403 # processing phases change
1404 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1405 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1406 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1407 _pullapplyphases(pullop, value)
1407
1408
1408 # processing bookmark update
1409 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1410 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1411 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1412 pullop.remotebookmarks = value
1412
1413
1413 # bookmark data were either already there or pulled in the bundle
1414 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1415 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1416 _pullbookmarks(pullop)
1416
1417
1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1419 """hook function so that extensions can extend the getbundle call"""
1419 pass
1420 pass
1420
1421
1421 def _pullchangeset(pullop):
1422 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1423 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1424 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1425 # don't open transaction for nothing or you break future useful
1425 # rollback call
1426 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1427 if 'changegroup' in pullop.stepsdone:
1427 return
1428 return
1428 pullop.stepsdone.add('changegroup')
1429 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1430 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1432 pullop.cgresult = 0
1432 return
1433 return
1433 tr = pullop.gettransaction()
1434 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1438 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1439 pullop.heads = pullop.rheads
1439
1440
1440 if pullop.remote.capable('getbundle'):
1441 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1442 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1444 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1445 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1447 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1448 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1449 "other repository doesn't support "
1449 "changegroupsubset."))
1450 "changegroupsubset."))
1450 else:
1451 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 pullop.remote.url())
1454 pullop.remote.url())
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455
1456
1456 def _pullphase(pullop):
1457 def _pullphase(pullop):
1457 # Get remote phases data from remote
1458 # Get remote phases data from remote
1458 if 'phases' in pullop.stepsdone:
1459 if 'phases' in pullop.stepsdone:
1459 return
1460 return
1460 remotephases = pullop.remote.listkeys('phases')
1461 remotephases = pullop.remote.listkeys('phases')
1461 _pullapplyphases(pullop, remotephases)
1462 _pullapplyphases(pullop, remotephases)
1462
1463
1463 def _pullapplyphases(pullop, remotephases):
1464 def _pullapplyphases(pullop, remotephases):
1464 """apply phase movement from observed remote state"""
1465 """apply phase movement from observed remote state"""
1465 if 'phases' in pullop.stepsdone:
1466 if 'phases' in pullop.stepsdone:
1466 return
1467 return
1467 pullop.stepsdone.add('phases')
1468 pullop.stepsdone.add('phases')
1468 publishing = bool(remotephases.get('publishing', False))
1469 publishing = bool(remotephases.get('publishing', False))
1469 if remotephases and not publishing:
1470 if remotephases and not publishing:
1470 # remote is new and non-publishing
1471 # remote is new and non-publishing
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pullop.pulledsubset,
1473 pullop.pulledsubset,
1473 remotephases)
1474 remotephases)
1474 dheads = pullop.pulledsubset
1475 dheads = pullop.pulledsubset
1475 else:
1476 else:
1476 # Remote is old or publishing all common changesets
1477 # Remote is old or publishing all common changesets
1477 # should be seen as public
1478 # should be seen as public
1478 pheads = pullop.pulledsubset
1479 pheads = pullop.pulledsubset
1479 dheads = []
1480 dheads = []
1480 unfi = pullop.repo.unfiltered()
1481 unfi = pullop.repo.unfiltered()
1481 phase = unfi._phasecache.phase
1482 phase = unfi._phasecache.phase
1482 rev = unfi.changelog.nodemap.get
1483 rev = unfi.changelog.nodemap.get
1483 public = phases.public
1484 public = phases.public
1484 draft = phases.draft
1485 draft = phases.draft
1485
1486
1486 # exclude changesets already public locally and update the others
1487 # exclude changesets already public locally and update the others
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 if pheads:
1489 if pheads:
1489 tr = pullop.gettransaction()
1490 tr = pullop.gettransaction()
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491
1492
1492 # exclude changesets already draft locally and update the others
1493 # exclude changesets already draft locally and update the others
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 if dheads:
1495 if dheads:
1495 tr = pullop.gettransaction()
1496 tr = pullop.gettransaction()
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497
1498
1498 def _pullbookmarks(pullop):
1499 def _pullbookmarks(pullop):
1499 """process the remote bookmark information to update the local one"""
1500 """process the remote bookmark information to update the local one"""
1500 if 'bookmarks' in pullop.stepsdone:
1501 if 'bookmarks' in pullop.stepsdone:
1501 return
1502 return
1502 pullop.stepsdone.add('bookmarks')
1503 pullop.stepsdone.add('bookmarks')
1503 repo = pullop.repo
1504 repo = pullop.repo
1504 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 pullop.remote.url(),
1508 pullop.remote.url(),
1508 pullop.gettransaction,
1509 pullop.gettransaction,
1509 explicit=pullop.explicitbookmarks)
1510 explicit=pullop.explicitbookmarks)
1510
1511
1511 def _pullobsolete(pullop):
1512 def _pullobsolete(pullop):
1512 """utility function to pull obsolete markers from a remote
1513 """utility function to pull obsolete markers from a remote
1513
1514
1514 The `gettransaction` is function that return the pull transaction, creating
1515 The `gettransaction` is function that return the pull transaction, creating
1515 one if necessary. We return the transaction to inform the calling code that
1516 one if necessary. We return the transaction to inform the calling code that
1516 a new transaction have been created (when applicable).
1517 a new transaction have been created (when applicable).
1517
1518
1518 Exists mostly to allow overriding for experimentation purpose"""
1519 Exists mostly to allow overriding for experimentation purpose"""
1519 if 'obsmarkers' in pullop.stepsdone:
1520 if 'obsmarkers' in pullop.stepsdone:
1520 return
1521 return
1521 pullop.stepsdone.add('obsmarkers')
1522 pullop.stepsdone.add('obsmarkers')
1522 tr = None
1523 tr = None
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1526 remoteobs = pullop.remote.listkeys('obsolete')
1526 if 'dump0' in remoteobs:
1527 if 'dump0' in remoteobs:
1527 tr = pullop.gettransaction()
1528 tr = pullop.gettransaction()
1528 markers = []
1529 markers = []
1529 for key in sorted(remoteobs, reverse=True):
1530 for key in sorted(remoteobs, reverse=True):
1530 if key.startswith('dump'):
1531 if key.startswith('dump'):
1531 data = util.b85decode(remoteobs[key])
1532 data = util.b85decode(remoteobs[key])
1532 version, newmarks = obsolete._readmarkers(data)
1533 version, newmarks = obsolete._readmarkers(data)
1533 markers += newmarks
1534 markers += newmarks
1534 if markers:
1535 if markers:
1535 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.invalidatevolatilesets()
1537 pullop.repo.invalidatevolatilesets()
1537 return tr
1538 return tr
1538
1539
1539 def caps20to10(repo):
1540 def caps20to10(repo):
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1541 """return a set with appropriate options to use bundle20 during getbundle"""
1541 caps = {'HG20'}
1542 caps = {'HG20'}
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 return caps
1545 return caps
1545
1546
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 getbundle2partsorder = []
1548 getbundle2partsorder = []
1548
1549
1549 # Mapping between step name and function
1550 # Mapping between step name and function
1550 #
1551 #
1551 # This exists to help extensions wrap steps if necessary
1552 # This exists to help extensions wrap steps if necessary
1552 getbundle2partsmapping = {}
1553 getbundle2partsmapping = {}
1553
1554
1554 def getbundle2partsgenerator(stepname, idx=None):
1555 def getbundle2partsgenerator(stepname, idx=None):
1555 """decorator for function generating bundle2 part for getbundle
1556 """decorator for function generating bundle2 part for getbundle
1556
1557
1557 The function is added to the step -> function mapping and appended to the
1558 The function is added to the step -> function mapping and appended to the
1558 list of steps. Beware that decorated functions will be added in order
1559 list of steps. Beware that decorated functions will be added in order
1559 (this may matter).
1560 (this may matter).
1560
1561
1561 You can only use this decorator for new steps, if you want to wrap a step
1562 You can only use this decorator for new steps, if you want to wrap a step
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 def dec(func):
1564 def dec(func):
1564 assert stepname not in getbundle2partsmapping
1565 assert stepname not in getbundle2partsmapping
1565 getbundle2partsmapping[stepname] = func
1566 getbundle2partsmapping[stepname] = func
1566 if idx is None:
1567 if idx is None:
1567 getbundle2partsorder.append(stepname)
1568 getbundle2partsorder.append(stepname)
1568 else:
1569 else:
1569 getbundle2partsorder.insert(idx, stepname)
1570 getbundle2partsorder.insert(idx, stepname)
1570 return func
1571 return func
1571 return dec
1572 return dec
1572
1573
1573 def bundle2requested(bundlecaps):
1574 def bundle2requested(bundlecaps):
1574 if bundlecaps is not None:
1575 if bundlecaps is not None:
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return False
1577 return False
1577
1578
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 **kwargs):
1580 **kwargs):
1580 """Return chunks constituting a bundle's raw data.
1581 """Return chunks constituting a bundle's raw data.
1581
1582
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 passed.
1584 passed.
1584
1585
1585 Returns an iterator over raw chunks (of varying sizes).
1586 Returns an iterator over raw chunks (of varying sizes).
1586 """
1587 """
1587 kwargs = pycompat.byteskwargs(kwargs)
1588 kwargs = pycompat.byteskwargs(kwargs)
1588 usebundle2 = bundle2requested(bundlecaps)
1589 usebundle2 = bundle2requested(bundlecaps)
1589 # bundle10 case
1590 # bundle10 case
1590 if not usebundle2:
1591 if not usebundle2:
1591 if bundlecaps and not kwargs.get('cg', True):
1592 if bundlecaps and not kwargs.get('cg', True):
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1593 raise ValueError(_('request for bundle10 must include changegroup'))
1593
1594
1594 if kwargs:
1595 if kwargs:
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1596 raise ValueError(_('unsupported getbundle arguments: %s')
1596 % ', '.join(sorted(kwargs.keys())))
1597 % ', '.join(sorted(kwargs.keys())))
1597 outgoing = _computeoutgoing(repo, heads, common)
1598 outgoing = _computeoutgoing(repo, heads, common)
1598 bundler = changegroup.getbundler('01', repo, bundlecaps)
1599 bundler = changegroup.getbundler('01', repo, bundlecaps)
1599 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1600 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1600
1601
1601 # bundle20 case
1602 # bundle20 case
1602 b2caps = {}
1603 b2caps = {}
1603 for bcaps in bundlecaps:
1604 for bcaps in bundlecaps:
1604 if bcaps.startswith('bundle2='):
1605 if bcaps.startswith('bundle2='):
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 b2caps.update(bundle2.decodecaps(blob))
1607 b2caps.update(bundle2.decodecaps(blob))
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1608 bundler = bundle2.bundle20(repo.ui, b2caps)
1608
1609
1609 kwargs['heads'] = heads
1610 kwargs['heads'] = heads
1610 kwargs['common'] = common
1611 kwargs['common'] = common
1611
1612
1612 for name in getbundle2partsorder:
1613 for name in getbundle2partsorder:
1613 func = getbundle2partsmapping[name]
1614 func = getbundle2partsmapping[name]
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 **pycompat.strkwargs(kwargs))
1616 **pycompat.strkwargs(kwargs))
1616
1617
1617 return bundler.getchunks()
1618 return bundler.getchunks()
1618
1619
1619 @getbundle2partsgenerator('changegroup')
1620 @getbundle2partsgenerator('changegroup')
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 b2caps=None, heads=None, common=None, **kwargs):
1622 b2caps=None, heads=None, common=None, **kwargs):
1622 """add a changegroup part to the requested bundle"""
1623 """add a changegroup part to the requested bundle"""
1623 cg = None
1624 cg = None
1624 if kwargs.get('cg', True):
1625 if kwargs.get('cg', True):
1625 # build changegroup bundle here.
1626 # build changegroup bundle here.
1626 version = '01'
1627 version = '01'
1627 cgversions = b2caps.get('changegroup')
1628 cgversions = b2caps.get('changegroup')
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 cgversions = [v for v in cgversions
1630 cgversions = [v for v in cgversions
1630 if v in changegroup.supportedoutgoingversions(repo)]
1631 if v in changegroup.supportedoutgoingversions(repo)]
1631 if not cgversions:
1632 if not cgversions:
1632 raise ValueError(_('no common changegroup version'))
1633 raise ValueError(_('no common changegroup version'))
1633 version = max(cgversions)
1634 version = max(cgversions)
1634 outgoing = _computeoutgoing(repo, heads, common)
1635 outgoing = _computeoutgoing(repo, heads, common)
1635 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1636 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1636 bundlecaps=bundlecaps,
1637 bundlecaps=bundlecaps,
1637 version=version)
1638 version=version)
1638
1639
1639 if cg:
1640 if cg:
1640 part = bundler.newpart('changegroup', data=cg)
1641 part = bundler.newpart('changegroup', data=cg)
1641 if cgversions:
1642 if cgversions:
1642 part.addparam('version', version)
1643 part.addparam('version', version)
1643 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1644 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1644 if 'treemanifest' in repo.requirements:
1645 if 'treemanifest' in repo.requirements:
1645 part.addparam('treemanifest', '1')
1646 part.addparam('treemanifest', '1')
1646
1647
1647 @getbundle2partsgenerator('listkeys')
1648 @getbundle2partsgenerator('listkeys')
1648 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1649 b2caps=None, **kwargs):
1650 b2caps=None, **kwargs):
1650 """add parts containing listkeys namespaces to the requested bundle"""
1651 """add parts containing listkeys namespaces to the requested bundle"""
1651 listkeys = kwargs.get('listkeys', ())
1652 listkeys = kwargs.get('listkeys', ())
1652 for namespace in listkeys:
1653 for namespace in listkeys:
1653 part = bundler.newpart('listkeys')
1654 part = bundler.newpart('listkeys')
1654 part.addparam('namespace', namespace)
1655 part.addparam('namespace', namespace)
1655 keys = repo.listkeys(namespace).items()
1656 keys = repo.listkeys(namespace).items()
1656 part.data = pushkey.encodekeys(keys)
1657 part.data = pushkey.encodekeys(keys)
1657
1658
1658 @getbundle2partsgenerator('obsmarkers')
1659 @getbundle2partsgenerator('obsmarkers')
1659 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1660 b2caps=None, heads=None, **kwargs):
1661 b2caps=None, heads=None, **kwargs):
1661 """add an obsolescence markers part to the requested bundle"""
1662 """add an obsolescence markers part to the requested bundle"""
1662 if kwargs.get('obsmarkers', False):
1663 if kwargs.get('obsmarkers', False):
1663 if heads is None:
1664 if heads is None:
1664 heads = repo.heads()
1665 heads = repo.heads()
1665 subset = [c.node() for c in repo.set('::%ln', heads)]
1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1666 markers = repo.obsstore.relevantmarkers(subset)
1667 markers = repo.obsstore.relevantmarkers(subset)
1667 markers = sorted(markers)
1668 markers = sorted(markers)
1668 bundle2.buildobsmarkerspart(bundler, markers)
1669 bundle2.buildobsmarkerspart(bundler, markers)
1669
1670
1670 @getbundle2partsgenerator('hgtagsfnodes')
1671 @getbundle2partsgenerator('hgtagsfnodes')
1671 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1672 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1672 b2caps=None, heads=None, common=None,
1673 b2caps=None, heads=None, common=None,
1673 **kwargs):
1674 **kwargs):
1674 """Transfer the .hgtags filenodes mapping.
1675 """Transfer the .hgtags filenodes mapping.
1675
1676
1676 Only values for heads in this bundle will be transferred.
1677 Only values for heads in this bundle will be transferred.
1677
1678
1678 The part data consists of pairs of 20 byte changeset node and .hgtags
1679 The part data consists of pairs of 20 byte changeset node and .hgtags
1679 filenodes raw values.
1680 filenodes raw values.
1680 """
1681 """
1681 # Don't send unless:
1682 # Don't send unless:
1682 # - changeset are being exchanged,
1683 # - changeset are being exchanged,
1683 # - the client supports it.
1684 # - the client supports it.
1684 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1685 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1685 return
1686 return
1686
1687
1687 outgoing = _computeoutgoing(repo, heads, common)
1688 outgoing = _computeoutgoing(repo, heads, common)
1688 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1689 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1689
1690
1690 def _getbookmarks(repo, **kwargs):
1691 def _getbookmarks(repo, **kwargs):
1691 """Returns bookmark to node mapping.
1692 """Returns bookmark to node mapping.
1692
1693
1693 This function is primarily used to generate `bookmarks` bundle2 part.
1694 This function is primarily used to generate `bookmarks` bundle2 part.
1694 It is a separate function in order to make it easy to wrap it
1695 It is a separate function in order to make it easy to wrap it
1695 in extensions. Passing `kwargs` to the function makes it easy to
1696 in extensions. Passing `kwargs` to the function makes it easy to
1696 add new parameters in extensions.
1697 add new parameters in extensions.
1697 """
1698 """
1698
1699
1699 return dict(bookmod.listbinbookmarks(repo))
1700 return dict(bookmod.listbinbookmarks(repo))
1700
1701
1701 def check_heads(repo, their_heads, context):
1702 def check_heads(repo, their_heads, context):
1702 """check if the heads of a repo have been modified
1703 """check if the heads of a repo have been modified
1703
1704
1704 Used by peer for unbundling.
1705 Used by peer for unbundling.
1705 """
1706 """
1706 heads = repo.heads()
1707 heads = repo.heads()
1707 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1708 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1708 if not (their_heads == ['force'] or their_heads == heads or
1709 if not (their_heads == ['force'] or their_heads == heads or
1709 their_heads == ['hashed', heads_hash]):
1710 their_heads == ['hashed', heads_hash]):
1710 # someone else committed/pushed/unbundled while we
1711 # someone else committed/pushed/unbundled while we
1711 # were transferring data
1712 # were transferring data
1712 raise error.PushRaced('repository changed while %s - '
1713 raise error.PushRaced('repository changed while %s - '
1713 'please try again' % context)
1714 'please try again' % context)
1714
1715
1715 def unbundle(repo, cg, heads, source, url):
1716 def unbundle(repo, cg, heads, source, url):
1716 """Apply a bundle to a repo.
1717 """Apply a bundle to a repo.
1717
1718
1718 this function makes sure the repo is locked during the application and have
1719 this function makes sure the repo is locked during the application and have
1719 mechanism to check that no push race occurred between the creation of the
1720 mechanism to check that no push race occurred between the creation of the
1720 bundle and its application.
1721 bundle and its application.
1721
1722
1722 If the push was raced as PushRaced exception is raised."""
1723 If the push was raced as PushRaced exception is raised."""
1723 r = 0
1724 r = 0
1724 # need a transaction when processing a bundle2 stream
1725 # need a transaction when processing a bundle2 stream
1725 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1726 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1726 lockandtr = [None, None, None]
1727 lockandtr = [None, None, None]
1727 recordout = None
1728 recordout = None
1728 # quick fix for output mismatch with bundle2 in 3.4
1729 # quick fix for output mismatch with bundle2 in 3.4
1729 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1730 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1730 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1731 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1731 captureoutput = True
1732 captureoutput = True
1732 try:
1733 try:
1733 # note: outside bundle1, 'heads' is expected to be empty and this
1734 # note: outside bundle1, 'heads' is expected to be empty and this
1734 # 'check_heads' call wil be a no-op
1735 # 'check_heads' call wil be a no-op
1735 check_heads(repo, heads, 'uploading changes')
1736 check_heads(repo, heads, 'uploading changes')
1736 # push can proceed
1737 # push can proceed
1737 if not isinstance(cg, bundle2.unbundle20):
1738 if not isinstance(cg, bundle2.unbundle20):
1738 # legacy case: bundle1 (changegroup 01)
1739 # legacy case: bundle1 (changegroup 01)
1739 txnname = "\n".join([source, util.hidepassword(url)])
1740 txnname = "\n".join([source, util.hidepassword(url)])
1740 with repo.lock(), repo.transaction(txnname) as tr:
1741 with repo.lock(), repo.transaction(txnname) as tr:
1741 op = bundle2.applybundle(repo, cg, tr, source, url)
1742 op = bundle2.applybundle(repo, cg, tr, source, url)
1742 r = bundle2.combinechangegroupresults(op)
1743 r = bundle2.combinechangegroupresults(op)
1743 else:
1744 else:
1744 r = None
1745 r = None
1745 try:
1746 try:
1746 def gettransaction():
1747 def gettransaction():
1747 if not lockandtr[2]:
1748 if not lockandtr[2]:
1748 lockandtr[0] = repo.wlock()
1749 lockandtr[0] = repo.wlock()
1749 lockandtr[1] = repo.lock()
1750 lockandtr[1] = repo.lock()
1750 lockandtr[2] = repo.transaction(source)
1751 lockandtr[2] = repo.transaction(source)
1751 lockandtr[2].hookargs['source'] = source
1752 lockandtr[2].hookargs['source'] = source
1752 lockandtr[2].hookargs['url'] = url
1753 lockandtr[2].hookargs['url'] = url
1753 lockandtr[2].hookargs['bundle2'] = '1'
1754 lockandtr[2].hookargs['bundle2'] = '1'
1754 return lockandtr[2]
1755 return lockandtr[2]
1755
1756
1756 # Do greedy locking by default until we're satisfied with lazy
1757 # Do greedy locking by default until we're satisfied with lazy
1757 # locking.
1758 # locking.
1758 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1759 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1759 gettransaction()
1760 gettransaction()
1760
1761
1761 op = bundle2.bundleoperation(repo, gettransaction,
1762 op = bundle2.bundleoperation(repo, gettransaction,
1762 captureoutput=captureoutput)
1763 captureoutput=captureoutput)
1763 try:
1764 try:
1764 op = bundle2.processbundle(repo, cg, op=op)
1765 op = bundle2.processbundle(repo, cg, op=op)
1765 finally:
1766 finally:
1766 r = op.reply
1767 r = op.reply
1767 if captureoutput and r is not None:
1768 if captureoutput and r is not None:
1768 repo.ui.pushbuffer(error=True, subproc=True)
1769 repo.ui.pushbuffer(error=True, subproc=True)
1769 def recordout(output):
1770 def recordout(output):
1770 r.newpart('output', data=output, mandatory=False)
1771 r.newpart('output', data=output, mandatory=False)
1771 if lockandtr[2] is not None:
1772 if lockandtr[2] is not None:
1772 lockandtr[2].close()
1773 lockandtr[2].close()
1773 except BaseException as exc:
1774 except BaseException as exc:
1774 exc.duringunbundle2 = True
1775 exc.duringunbundle2 = True
1775 if captureoutput and r is not None:
1776 if captureoutput and r is not None:
1776 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1777 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1777 def recordout(output):
1778 def recordout(output):
1778 part = bundle2.bundlepart('output', data=output,
1779 part = bundle2.bundlepart('output', data=output,
1779 mandatory=False)
1780 mandatory=False)
1780 parts.append(part)
1781 parts.append(part)
1781 raise
1782 raise
1782 finally:
1783 finally:
1783 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1784 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1784 if recordout is not None:
1785 if recordout is not None:
1785 recordout(repo.ui.popbuffer())
1786 recordout(repo.ui.popbuffer())
1786 return r
1787 return r
1787
1788
1788 def _maybeapplyclonebundle(pullop):
1789 def _maybeapplyclonebundle(pullop):
1789 """Apply a clone bundle from a remote, if possible."""
1790 """Apply a clone bundle from a remote, if possible."""
1790
1791
1791 repo = pullop.repo
1792 repo = pullop.repo
1792 remote = pullop.remote
1793 remote = pullop.remote
1793
1794
1794 if not repo.ui.configbool('ui', 'clonebundles'):
1795 if not repo.ui.configbool('ui', 'clonebundles'):
1795 return
1796 return
1796
1797
1797 # Only run if local repo is empty.
1798 # Only run if local repo is empty.
1798 if len(repo):
1799 if len(repo):
1799 return
1800 return
1800
1801
1801 if pullop.heads:
1802 if pullop.heads:
1802 return
1803 return
1803
1804
1804 if not remote.capable('clonebundles'):
1805 if not remote.capable('clonebundles'):
1805 return
1806 return
1806
1807
1807 res = remote._call('clonebundles')
1808 res = remote._call('clonebundles')
1808
1809
1809 # If we call the wire protocol command, that's good enough to record the
1810 # If we call the wire protocol command, that's good enough to record the
1810 # attempt.
1811 # attempt.
1811 pullop.clonebundleattempted = True
1812 pullop.clonebundleattempted = True
1812
1813
1813 entries = parseclonebundlesmanifest(repo, res)
1814 entries = parseclonebundlesmanifest(repo, res)
1814 if not entries:
1815 if not entries:
1815 repo.ui.note(_('no clone bundles available on remote; '
1816 repo.ui.note(_('no clone bundles available on remote; '
1816 'falling back to regular clone\n'))
1817 'falling back to regular clone\n'))
1817 return
1818 return
1818
1819
1819 entries = filterclonebundleentries(repo, entries)
1820 entries = filterclonebundleentries(repo, entries)
1820 if not entries:
1821 if not entries:
1821 # There is a thundering herd concern here. However, if a server
1822 # There is a thundering herd concern here. However, if a server
1822 # operator doesn't advertise bundles appropriate for its clients,
1823 # operator doesn't advertise bundles appropriate for its clients,
1823 # they deserve what's coming. Furthermore, from a client's
1824 # they deserve what's coming. Furthermore, from a client's
1824 # perspective, no automatic fallback would mean not being able to
1825 # perspective, no automatic fallback would mean not being able to
1825 # clone!
1826 # clone!
1826 repo.ui.warn(_('no compatible clone bundles available on server; '
1827 repo.ui.warn(_('no compatible clone bundles available on server; '
1827 'falling back to regular clone\n'))
1828 'falling back to regular clone\n'))
1828 repo.ui.warn(_('(you may want to report this to the server '
1829 repo.ui.warn(_('(you may want to report this to the server '
1829 'operator)\n'))
1830 'operator)\n'))
1830 return
1831 return
1831
1832
1832 entries = sortclonebundleentries(repo.ui, entries)
1833 entries = sortclonebundleentries(repo.ui, entries)
1833
1834
1834 url = entries[0]['URL']
1835 url = entries[0]['URL']
1835 repo.ui.status(_('applying clone bundle from %s\n') % url)
1836 repo.ui.status(_('applying clone bundle from %s\n') % url)
1836 if trypullbundlefromurl(repo.ui, repo, url):
1837 if trypullbundlefromurl(repo.ui, repo, url):
1837 repo.ui.status(_('finished applying clone bundle\n'))
1838 repo.ui.status(_('finished applying clone bundle\n'))
1838 # Bundle failed.
1839 # Bundle failed.
1839 #
1840 #
1840 # We abort by default to avoid the thundering herd of
1841 # We abort by default to avoid the thundering herd of
1841 # clients flooding a server that was expecting expensive
1842 # clients flooding a server that was expecting expensive
1842 # clone load to be offloaded.
1843 # clone load to be offloaded.
1843 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1844 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1844 repo.ui.warn(_('falling back to normal clone\n'))
1845 repo.ui.warn(_('falling back to normal clone\n'))
1845 else:
1846 else:
1846 raise error.Abort(_('error applying bundle'),
1847 raise error.Abort(_('error applying bundle'),
1847 hint=_('if this error persists, consider contacting '
1848 hint=_('if this error persists, consider contacting '
1848 'the server operator or disable clone '
1849 'the server operator or disable clone '
1849 'bundles via '
1850 'bundles via '
1850 '"--config ui.clonebundles=false"'))
1851 '"--config ui.clonebundles=false"'))
1851
1852
1852 def parseclonebundlesmanifest(repo, s):
1853 def parseclonebundlesmanifest(repo, s):
1853 """Parses the raw text of a clone bundles manifest.
1854 """Parses the raw text of a clone bundles manifest.
1854
1855
1855 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1856 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1856 to the URL and other keys are the attributes for the entry.
1857 to the URL and other keys are the attributes for the entry.
1857 """
1858 """
1858 m = []
1859 m = []
1859 for line in s.splitlines():
1860 for line in s.splitlines():
1860 fields = line.split()
1861 fields = line.split()
1861 if not fields:
1862 if not fields:
1862 continue
1863 continue
1863 attrs = {'URL': fields[0]}
1864 attrs = {'URL': fields[0]}
1864 for rawattr in fields[1:]:
1865 for rawattr in fields[1:]:
1865 key, value = rawattr.split('=', 1)
1866 key, value = rawattr.split('=', 1)
1866 key = urlreq.unquote(key)
1867 key = urlreq.unquote(key)
1867 value = urlreq.unquote(value)
1868 value = urlreq.unquote(value)
1868 attrs[key] = value
1869 attrs[key] = value
1869
1870
1870 # Parse BUNDLESPEC into components. This makes client-side
1871 # Parse BUNDLESPEC into components. This makes client-side
1871 # preferences easier to specify since you can prefer a single
1872 # preferences easier to specify since you can prefer a single
1872 # component of the BUNDLESPEC.
1873 # component of the BUNDLESPEC.
1873 if key == 'BUNDLESPEC':
1874 if key == 'BUNDLESPEC':
1874 try:
1875 try:
1875 comp, version, params = parsebundlespec(repo, value,
1876 comp, version, params = parsebundlespec(repo, value,
1876 externalnames=True)
1877 externalnames=True)
1877 attrs['COMPRESSION'] = comp
1878 attrs['COMPRESSION'] = comp
1878 attrs['VERSION'] = version
1879 attrs['VERSION'] = version
1879 except error.InvalidBundleSpecification:
1880 except error.InvalidBundleSpecification:
1880 pass
1881 pass
1881 except error.UnsupportedBundleSpecification:
1882 except error.UnsupportedBundleSpecification:
1882 pass
1883 pass
1883
1884
1884 m.append(attrs)
1885 m.append(attrs)
1885
1886
1886 return m
1887 return m
1887
1888
1888 def filterclonebundleentries(repo, entries):
1889 def filterclonebundleentries(repo, entries):
1889 """Remove incompatible clone bundle manifest entries.
1890 """Remove incompatible clone bundle manifest entries.
1890
1891
1891 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1892 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1892 and returns a new list consisting of only the entries that this client
1893 and returns a new list consisting of only the entries that this client
1893 should be able to apply.
1894 should be able to apply.
1894
1895
1895 There is no guarantee we'll be able to apply all returned entries because
1896 There is no guarantee we'll be able to apply all returned entries because
1896 the metadata we use to filter on may be missing or wrong.
1897 the metadata we use to filter on may be missing or wrong.
1897 """
1898 """
1898 newentries = []
1899 newentries = []
1899 for entry in entries:
1900 for entry in entries:
1900 spec = entry.get('BUNDLESPEC')
1901 spec = entry.get('BUNDLESPEC')
1901 if spec:
1902 if spec:
1902 try:
1903 try:
1903 parsebundlespec(repo, spec, strict=True)
1904 parsebundlespec(repo, spec, strict=True)
1904 except error.InvalidBundleSpecification as e:
1905 except error.InvalidBundleSpecification as e:
1905 repo.ui.debug(str(e) + '\n')
1906 repo.ui.debug(str(e) + '\n')
1906 continue
1907 continue
1907 except error.UnsupportedBundleSpecification as e:
1908 except error.UnsupportedBundleSpecification as e:
1908 repo.ui.debug('filtering %s because unsupported bundle '
1909 repo.ui.debug('filtering %s because unsupported bundle '
1909 'spec: %s\n' % (entry['URL'], str(e)))
1910 'spec: %s\n' % (entry['URL'], str(e)))
1910 continue
1911 continue
1911
1912
1912 if 'REQUIRESNI' in entry and not sslutil.hassni:
1913 if 'REQUIRESNI' in entry and not sslutil.hassni:
1913 repo.ui.debug('filtering %s because SNI not supported\n' %
1914 repo.ui.debug('filtering %s because SNI not supported\n' %
1914 entry['URL'])
1915 entry['URL'])
1915 continue
1916 continue
1916
1917
1917 newentries.append(entry)
1918 newentries.append(entry)
1918
1919
1919 return newentries
1920 return newentries
1920
1921
1921 class clonebundleentry(object):
1922 class clonebundleentry(object):
1922 """Represents an item in a clone bundles manifest.
1923 """Represents an item in a clone bundles manifest.
1923
1924
1924 This rich class is needed to support sorting since sorted() in Python 3
1925 This rich class is needed to support sorting since sorted() in Python 3
1925 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1926 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1926 won't work.
1927 won't work.
1927 """
1928 """
1928
1929
1929 def __init__(self, value, prefers):
1930 def __init__(self, value, prefers):
1930 self.value = value
1931 self.value = value
1931 self.prefers = prefers
1932 self.prefers = prefers
1932
1933
1933 def _cmp(self, other):
1934 def _cmp(self, other):
1934 for prefkey, prefvalue in self.prefers:
1935 for prefkey, prefvalue in self.prefers:
1935 avalue = self.value.get(prefkey)
1936 avalue = self.value.get(prefkey)
1936 bvalue = other.value.get(prefkey)
1937 bvalue = other.value.get(prefkey)
1937
1938
1938 # Special case for b missing attribute and a matches exactly.
1939 # Special case for b missing attribute and a matches exactly.
1939 if avalue is not None and bvalue is None and avalue == prefvalue:
1940 if avalue is not None and bvalue is None and avalue == prefvalue:
1940 return -1
1941 return -1
1941
1942
1942 # Special case for a missing attribute and b matches exactly.
1943 # Special case for a missing attribute and b matches exactly.
1943 if bvalue is not None and avalue is None and bvalue == prefvalue:
1944 if bvalue is not None and avalue is None and bvalue == prefvalue:
1944 return 1
1945 return 1
1945
1946
1946 # We can't compare unless attribute present on both.
1947 # We can't compare unless attribute present on both.
1947 if avalue is None or bvalue is None:
1948 if avalue is None or bvalue is None:
1948 continue
1949 continue
1949
1950
1950 # Same values should fall back to next attribute.
1951 # Same values should fall back to next attribute.
1951 if avalue == bvalue:
1952 if avalue == bvalue:
1952 continue
1953 continue
1953
1954
1954 # Exact matches come first.
1955 # Exact matches come first.
1955 if avalue == prefvalue:
1956 if avalue == prefvalue:
1956 return -1
1957 return -1
1957 if bvalue == prefvalue:
1958 if bvalue == prefvalue:
1958 return 1
1959 return 1
1959
1960
1960 # Fall back to next attribute.
1961 # Fall back to next attribute.
1961 continue
1962 continue
1962
1963
1963 # If we got here we couldn't sort by attributes and prefers. Fall
1964 # If we got here we couldn't sort by attributes and prefers. Fall
1964 # back to index order.
1965 # back to index order.
1965 return 0
1966 return 0
1966
1967
1967 def __lt__(self, other):
1968 def __lt__(self, other):
1968 return self._cmp(other) < 0
1969 return self._cmp(other) < 0
1969
1970
1970 def __gt__(self, other):
1971 def __gt__(self, other):
1971 return self._cmp(other) > 0
1972 return self._cmp(other) > 0
1972
1973
1973 def __eq__(self, other):
1974 def __eq__(self, other):
1974 return self._cmp(other) == 0
1975 return self._cmp(other) == 0
1975
1976
1976 def __le__(self, other):
1977 def __le__(self, other):
1977 return self._cmp(other) <= 0
1978 return self._cmp(other) <= 0
1978
1979
1979 def __ge__(self, other):
1980 def __ge__(self, other):
1980 return self._cmp(other) >= 0
1981 return self._cmp(other) >= 0
1981
1982
1982 def __ne__(self, other):
1983 def __ne__(self, other):
1983 return self._cmp(other) != 0
1984 return self._cmp(other) != 0
1984
1985
1985 def sortclonebundleentries(ui, entries):
1986 def sortclonebundleentries(ui, entries):
1986 prefers = ui.configlist('ui', 'clonebundleprefers')
1987 prefers = ui.configlist('ui', 'clonebundleprefers')
1987 if not prefers:
1988 if not prefers:
1988 return list(entries)
1989 return list(entries)
1989
1990
1990 prefers = [p.split('=', 1) for p in prefers]
1991 prefers = [p.split('=', 1) for p in prefers]
1991
1992
1992 items = sorted(clonebundleentry(v, prefers) for v in entries)
1993 items = sorted(clonebundleentry(v, prefers) for v in entries)
1993 return [i.value for i in items]
1994 return [i.value for i in items]
1994
1995
1995 def trypullbundlefromurl(ui, repo, url):
1996 def trypullbundlefromurl(ui, repo, url):
1996 """Attempt to apply a bundle from a URL."""
1997 """Attempt to apply a bundle from a URL."""
1997 with repo.lock(), repo.transaction('bundleurl') as tr:
1998 with repo.lock(), repo.transaction('bundleurl') as tr:
1998 try:
1999 try:
1999 fh = urlmod.open(ui, url)
2000 fh = urlmod.open(ui, url)
2000 cg = readbundle(ui, fh, 'stream')
2001 cg = readbundle(ui, fh, 'stream')
2001
2002
2002 if isinstance(cg, streamclone.streamcloneapplier):
2003 if isinstance(cg, streamclone.streamcloneapplier):
2003 cg.apply(repo)
2004 cg.apply(repo)
2004 else:
2005 else:
2005 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2006 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2006 return True
2007 return True
2007 except urlerr.httperror as e:
2008 except urlerr.httperror as e:
2008 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2009 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2009 except urlerr.urlerror as e:
2010 except urlerr.urlerror as e:
2010 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2011 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2011
2012
2012 return False
2013 return False
@@ -1,1270 +1,1270 b''
1 This test file test the various templates related to obsmarkers.
1 This test file test the various templates related to obsmarkers.
2
2
3 Global setup
3 Global setup
4 ============
4 ============
5
5
6 $ . $TESTDIR/testlib/obsmarker-common.sh
6 $ . $TESTDIR/testlib/obsmarker-common.sh
7 $ cat >> $HGRCPATH <<EOF
7 $ cat >> $HGRCPATH <<EOF
8 > [ui]
8 > [ui]
9 > interactive = true
9 > interactive = true
10 > [phases]
10 > [phases]
11 > publish=False
11 > publish=False
12 > [experimental]
12 > [experimental]
13 > evolution=all
13 > evolution=all
14 > [alias]
14 > [alias]
15 > tlog = log -G -T '{node|short}\
15 > tlog = log -G -T '{node|short}\
16 > {if(predecessors, "\n Predecessors: {predecessors}")}\
16 > {if(predecessors, "\n Predecessors: {predecessors}")}\
17 > {if(predecessors, "\n semi-colon: {join(predecessors, "; ")}")}\
17 > {if(predecessors, "\n semi-colon: {join(predecessors, "; ")}")}\
18 > {if(predecessors, "\n json: {predecessors|json}")}\
18 > {if(predecessors, "\n json: {predecessors|json}")}\
19 > {if(predecessors, "\n map: {join(predecessors % "{rev}:{node}", " ")}")}\
19 > {if(predecessors, "\n map: {join(predecessors % "{rev}:{node}", " ")}")}\
20 > {if(successorssets, "\n Successors: {successorssets}")}\
20 > {if(successorssets, "\n Successors: {successorssets}")}\
21 > {if(successorssets, "\n multi-line: {join(successorssets, "\n multi-line: ")}")}\
21 > {if(successorssets, "\n multi-line: {join(successorssets, "\n multi-line: ")}")}\
22 > {if(successorssets, "\n json: {successorssets|json}")}\n'
22 > {if(successorssets, "\n json: {successorssets|json}")}\n'
23 > EOF
23 > EOF
24
24
25 Test templates on amended commit
25 Test templates on amended commit
26 ================================
26 ================================
27
27
28 Test setup
28 Test setup
29 ----------
29 ----------
30
30
31 $ hg init $TESTTMP/templates-local-amend
31 $ hg init $TESTTMP/templates-local-amend
32 $ cd $TESTTMP/templates-local-amend
32 $ cd $TESTTMP/templates-local-amend
33 $ mkcommit ROOT
33 $ mkcommit ROOT
34 $ mkcommit A0
34 $ mkcommit A0
35 $ echo 42 >> A0
35 $ echo 42 >> A0
36 $ hg commit --amend -m "A1"
36 $ hg commit --amend -m "A1"
37 $ hg commit --amend -m "A2"
37 $ hg commit --amend -m "A2"
38
38
39 $ hg log --hidden -G
39 $ hg log --hidden -G
40 @ changeset: 4:d004c8f274b9
40 @ changeset: 4:d004c8f274b9
41 | tag: tip
41 | tag: tip
42 | parent: 0:ea207398892e
42 | parent: 0:ea207398892e
43 | user: test
43 | user: test
44 | date: Thu Jan 01 00:00:00 1970 +0000
44 | date: Thu Jan 01 00:00:00 1970 +0000
45 | summary: A2
45 | summary: A2
46 |
46 |
47 | x changeset: 3:a468dc9b3633
47 | x changeset: 3:a468dc9b3633
48 |/ parent: 0:ea207398892e
48 |/ parent: 0:ea207398892e
49 | user: test
49 | user: test
50 | date: Thu Jan 01 00:00:00 1970 +0000
50 | date: Thu Jan 01 00:00:00 1970 +0000
51 | summary: A1
51 | summary: A1
52 |
52 |
53 | x changeset: 2:f137d23bb3e1
53 | x changeset: 2:f137d23bb3e1
54 | | user: test
54 | | user: test
55 | | date: Thu Jan 01 00:00:00 1970 +0000
55 | | date: Thu Jan 01 00:00:00 1970 +0000
56 | | summary: temporary amend commit for 471f378eab4c
56 | | summary: temporary amend commit for 471f378eab4c
57 | |
57 | |
58 | x changeset: 1:471f378eab4c
58 | x changeset: 1:471f378eab4c
59 |/ user: test
59 |/ user: test
60 | date: Thu Jan 01 00:00:00 1970 +0000
60 | date: Thu Jan 01 00:00:00 1970 +0000
61 | summary: A0
61 | summary: A0
62 |
62 |
63 o changeset: 0:ea207398892e
63 o changeset: 0:ea207398892e
64 user: test
64 user: test
65 date: Thu Jan 01 00:00:00 1970 +0000
65 date: Thu Jan 01 00:00:00 1970 +0000
66 summary: ROOT
66 summary: ROOT
67
67
68 Check templates
68 Check templates
69 ---------------
69 ---------------
70 $ hg up 'desc(A0)' --hidden
70 $ hg up 'desc(A0)' --hidden
71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73 Predecessors template should show current revision as it is the working copy
73 Predecessors template should show current revision as it is the working copy
74 $ hg tlog
74 $ hg tlog
75 o d004c8f274b9
75 o d004c8f274b9
76 | Predecessors: 1:471f378eab4c
76 | Predecessors: 1:471f378eab4c
77 | semi-colon: 1:471f378eab4c
77 | semi-colon: 1:471f378eab4c
78 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
78 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
79 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
79 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
80 | @ 471f378eab4c
80 | @ 471f378eab4c
81 |/ Successors: 4:d004c8f274b9
81 |/ Successors: 4:d004c8f274b9
82 | multi-line: 4:d004c8f274b9
82 | multi-line: 4:d004c8f274b9
83 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
83 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
84 o ea207398892e
84 o ea207398892e
85
85
86 $ hg up 'desc(A1)' --hidden
86 $ hg up 'desc(A1)' --hidden
87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
88
88
89 Predecessors template should show current revision as it is the working copy
89 Predecessors template should show current revision as it is the working copy
90 $ hg tlog
90 $ hg tlog
91 o d004c8f274b9
91 o d004c8f274b9
92 | Predecessors: 3:a468dc9b3633
92 | Predecessors: 3:a468dc9b3633
93 | semi-colon: 3:a468dc9b3633
93 | semi-colon: 3:a468dc9b3633
94 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
94 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
95 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
95 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
96 | @ a468dc9b3633
96 | @ a468dc9b3633
97 |/ Successors: 4:d004c8f274b9
97 |/ Successors: 4:d004c8f274b9
98 | multi-line: 4:d004c8f274b9
98 | multi-line: 4:d004c8f274b9
99 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
99 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
100 o ea207398892e
100 o ea207398892e
101
101
102 Predecessors template should show all the predecessors as we force their display
102 Predecessors template should show all the predecessors as we force their display
103 with --hidden
103 with --hidden
104 $ hg tlog --hidden
104 $ hg tlog --hidden
105 o d004c8f274b9
105 o d004c8f274b9
106 | Predecessors: 3:a468dc9b3633
106 | Predecessors: 3:a468dc9b3633
107 | semi-colon: 3:a468dc9b3633
107 | semi-colon: 3:a468dc9b3633
108 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
108 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
109 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
109 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
110 | @ a468dc9b3633
110 | @ a468dc9b3633
111 |/ Predecessors: 1:471f378eab4c
111 |/ Predecessors: 1:471f378eab4c
112 | semi-colon: 1:471f378eab4c
112 | semi-colon: 1:471f378eab4c
113 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
113 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
114 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
114 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
115 | Successors: 4:d004c8f274b9
115 | Successors: 4:d004c8f274b9
116 | multi-line: 4:d004c8f274b9
116 | multi-line: 4:d004c8f274b9
117 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
117 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
118 | x f137d23bb3e1
118 | x f137d23bb3e1
119 | |
119 | |
120 | x 471f378eab4c
120 | x 471f378eab4c
121 |/ Successors: 3:a468dc9b3633
121 |/ Successors: 3:a468dc9b3633
122 | multi-line: 3:a468dc9b3633
122 | multi-line: 3:a468dc9b3633
123 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
123 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
124 o ea207398892e
124 o ea207398892e
125
125
126
126
127 Predecessors template shouldn't show anything as all obsolete commit are not
127 Predecessors template shouldn't show anything as all obsolete commit are not
128 visible.
128 visible.
129 $ hg up 'desc(A2)'
129 $ hg up 'desc(A2)'
130 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
131 $ hg tlog
131 $ hg tlog
132 @ d004c8f274b9
132 @ d004c8f274b9
133 |
133 |
134 o ea207398892e
134 o ea207398892e
135
135
136 $ hg tlog --hidden
136 $ hg tlog --hidden
137 @ d004c8f274b9
137 @ d004c8f274b9
138 | Predecessors: 3:a468dc9b3633
138 | Predecessors: 3:a468dc9b3633
139 | semi-colon: 3:a468dc9b3633
139 | semi-colon: 3:a468dc9b3633
140 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
140 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
141 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
141 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
142 | x a468dc9b3633
142 | x a468dc9b3633
143 |/ Predecessors: 1:471f378eab4c
143 |/ Predecessors: 1:471f378eab4c
144 | semi-colon: 1:471f378eab4c
144 | semi-colon: 1:471f378eab4c
145 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
145 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
146 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
146 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
147 | Successors: 4:d004c8f274b9
147 | Successors: 4:d004c8f274b9
148 | multi-line: 4:d004c8f274b9
148 | multi-line: 4:d004c8f274b9
149 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
149 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
150 | x f137d23bb3e1
150 | x f137d23bb3e1
151 | |
151 | |
152 | x 471f378eab4c
152 | x 471f378eab4c
153 |/ Successors: 3:a468dc9b3633
153 |/ Successors: 3:a468dc9b3633
154 | multi-line: 3:a468dc9b3633
154 | multi-line: 3:a468dc9b3633
155 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
155 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
156 o ea207398892e
156 o ea207398892e
157
157
158
158
159 Test templates with splitted commit
159 Test templates with splitted commit
160 ===================================
160 ===================================
161
161
162 $ hg init $TESTTMP/templates-local-split
162 $ hg init $TESTTMP/templates-local-split
163 $ cd $TESTTMP/templates-local-split
163 $ cd $TESTTMP/templates-local-split
164 $ mkcommit ROOT
164 $ mkcommit ROOT
165 $ echo 42 >> a
165 $ echo 42 >> a
166 $ echo 43 >> b
166 $ echo 43 >> b
167 $ hg commit -A -m "A0"
167 $ hg commit -A -m "A0"
168 adding a
168 adding a
169 adding b
169 adding b
170 $ hg log --hidden -G
170 $ hg log --hidden -G
171 @ changeset: 1:471597cad322
171 @ changeset: 1:471597cad322
172 | tag: tip
172 | tag: tip
173 | user: test
173 | user: test
174 | date: Thu Jan 01 00:00:00 1970 +0000
174 | date: Thu Jan 01 00:00:00 1970 +0000
175 | summary: A0
175 | summary: A0
176 |
176 |
177 o changeset: 0:ea207398892e
177 o changeset: 0:ea207398892e
178 user: test
178 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
179 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: ROOT
180 summary: ROOT
181
181
182 # Simulate split
182 # Simulate split
183 $ hg up -r "desc(ROOT)"
183 $ hg up -r "desc(ROOT)"
184 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
184 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
185 $ echo 42 >> a
185 $ echo 42 >> a
186 $ hg commit -A -m "A0"
186 $ hg commit -A -m "A0"
187 adding a
187 adding a
188 created new head
188 created new head
189 $ echo 43 >> b
189 $ echo 43 >> b
190 $ hg commit -A -m "A0"
190 $ hg commit -A -m "A0"
191 adding b
191 adding b
192 $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
192 $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
193 obsoleted 1 changesets
193 obsoleted 1 changesets
194
194
195 $ hg log --hidden -G
195 $ hg log --hidden -G
196 @ changeset: 3:f257fde29c7a
196 @ changeset: 3:f257fde29c7a
197 | tag: tip
197 | tag: tip
198 | user: test
198 | user: test
199 | date: Thu Jan 01 00:00:00 1970 +0000
199 | date: Thu Jan 01 00:00:00 1970 +0000
200 | summary: A0
200 | summary: A0
201 |
201 |
202 o changeset: 2:337fec4d2edc
202 o changeset: 2:337fec4d2edc
203 | parent: 0:ea207398892e
203 | parent: 0:ea207398892e
204 | user: test
204 | user: test
205 | date: Thu Jan 01 00:00:00 1970 +0000
205 | date: Thu Jan 01 00:00:00 1970 +0000
206 | summary: A0
206 | summary: A0
207 |
207 |
208 | x changeset: 1:471597cad322
208 | x changeset: 1:471597cad322
209 |/ user: test
209 |/ user: test
210 | date: Thu Jan 01 00:00:00 1970 +0000
210 | date: Thu Jan 01 00:00:00 1970 +0000
211 | summary: A0
211 | summary: A0
212 |
212 |
213 o changeset: 0:ea207398892e
213 o changeset: 0:ea207398892e
214 user: test
214 user: test
215 date: Thu Jan 01 00:00:00 1970 +0000
215 date: Thu Jan 01 00:00:00 1970 +0000
216 summary: ROOT
216 summary: ROOT
217
217
218 Check templates
218 Check templates
219 ---------------
219 ---------------
220
220
221 $ hg up 'obsolete()' --hidden
221 $ hg up 'obsolete()' --hidden
222 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
222 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
223
223
224 Predecessors template should show current revision as it is the working copy
224 Predecessors template should show current revision as it is the working copy
225 $ hg tlog
225 $ hg tlog
226 o f257fde29c7a
226 o f257fde29c7a
227 | Predecessors: 1:471597cad322
227 | Predecessors: 1:471597cad322
228 | semi-colon: 1:471597cad322
228 | semi-colon: 1:471597cad322
229 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
229 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
230 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
230 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
231 o 337fec4d2edc
231 o 337fec4d2edc
232 | Predecessors: 1:471597cad322
232 | Predecessors: 1:471597cad322
233 | semi-colon: 1:471597cad322
233 | semi-colon: 1:471597cad322
234 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
234 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
235 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
235 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
236 | @ 471597cad322
236 | @ 471597cad322
237 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
237 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
238 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
238 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
239 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
239 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
240 o ea207398892e
240 o ea207398892e
241
241
242 $ hg up f257fde29c7a
242 $ hg up f257fde29c7a
243 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
243 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
244
244
245 Predecessors template should not show a predecessor as it's not displayed in
245 Predecessors template should not show a predecessor as it's not displayed in
246 the log
246 the log
247 $ hg tlog
247 $ hg tlog
248 @ f257fde29c7a
248 @ f257fde29c7a
249 |
249 |
250 o 337fec4d2edc
250 o 337fec4d2edc
251 |
251 |
252 o ea207398892e
252 o ea207398892e
253
253
254 Predecessors template should show both predecessors as we force their display
254 Predecessors template should show both predecessors as we force their display
255 with --hidden
255 with --hidden
256 $ hg tlog --hidden
256 $ hg tlog --hidden
257 @ f257fde29c7a
257 @ f257fde29c7a
258 | Predecessors: 1:471597cad322
258 | Predecessors: 1:471597cad322
259 | semi-colon: 1:471597cad322
259 | semi-colon: 1:471597cad322
260 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
260 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
261 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
261 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
262 o 337fec4d2edc
262 o 337fec4d2edc
263 | Predecessors: 1:471597cad322
263 | Predecessors: 1:471597cad322
264 | semi-colon: 1:471597cad322
264 | semi-colon: 1:471597cad322
265 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
265 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
266 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
266 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
267 | x 471597cad322
267 | x 471597cad322
268 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
268 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
269 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
269 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
270 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
270 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
271 o ea207398892e
271 o ea207398892e
272
272
273 Test templates with folded commit
273 Test templates with folded commit
274 =================================
274 =================================
275
275
276 Test setup
276 Test setup
277 ----------
277 ----------
278
278
279 $ hg init $TESTTMP/templates-local-fold
279 $ hg init $TESTTMP/templates-local-fold
280 $ cd $TESTTMP/templates-local-fold
280 $ cd $TESTTMP/templates-local-fold
281 $ mkcommit ROOT
281 $ mkcommit ROOT
282 $ mkcommit A0
282 $ mkcommit A0
283 $ mkcommit B0
283 $ mkcommit B0
284 $ hg log --hidden -G
284 $ hg log --hidden -G
285 @ changeset: 2:0dec01379d3b
285 @ changeset: 2:0dec01379d3b
286 | tag: tip
286 | tag: tip
287 | user: test
287 | user: test
288 | date: Thu Jan 01 00:00:00 1970 +0000
288 | date: Thu Jan 01 00:00:00 1970 +0000
289 | summary: B0
289 | summary: B0
290 |
290 |
291 o changeset: 1:471f378eab4c
291 o changeset: 1:471f378eab4c
292 | user: test
292 | user: test
293 | date: Thu Jan 01 00:00:00 1970 +0000
293 | date: Thu Jan 01 00:00:00 1970 +0000
294 | summary: A0
294 | summary: A0
295 |
295 |
296 o changeset: 0:ea207398892e
296 o changeset: 0:ea207398892e
297 user: test
297 user: test
298 date: Thu Jan 01 00:00:00 1970 +0000
298 date: Thu Jan 01 00:00:00 1970 +0000
299 summary: ROOT
299 summary: ROOT
300
300
301 Simulate a fold
301 Simulate a fold
302 $ hg up -r "desc(ROOT)"
302 $ hg up -r "desc(ROOT)"
303 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
303 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
304 $ echo "A0" > A0
304 $ echo "A0" > A0
305 $ echo "B0" > B0
305 $ echo "B0" > B0
306 $ hg commit -A -m "C0"
306 $ hg commit -A -m "C0"
307 adding A0
307 adding A0
308 adding B0
308 adding B0
309 created new head
309 created new head
310 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
310 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
311 obsoleted 1 changesets
311 obsoleted 1 changesets
312 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
312 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
313 obsoleted 1 changesets
313 obsoleted 1 changesets
314
314
315 $ hg log --hidden -G
315 $ hg log --hidden -G
316 @ changeset: 3:eb5a0daa2192
316 @ changeset: 3:eb5a0daa2192
317 | tag: tip
317 | tag: tip
318 | parent: 0:ea207398892e
318 | parent: 0:ea207398892e
319 | user: test
319 | user: test
320 | date: Thu Jan 01 00:00:00 1970 +0000
320 | date: Thu Jan 01 00:00:00 1970 +0000
321 | summary: C0
321 | summary: C0
322 |
322 |
323 | x changeset: 2:0dec01379d3b
323 | x changeset: 2:0dec01379d3b
324 | | user: test
324 | | user: test
325 | | date: Thu Jan 01 00:00:00 1970 +0000
325 | | date: Thu Jan 01 00:00:00 1970 +0000
326 | | summary: B0
326 | | summary: B0
327 | |
327 | |
328 | x changeset: 1:471f378eab4c
328 | x changeset: 1:471f378eab4c
329 |/ user: test
329 |/ user: test
330 | date: Thu Jan 01 00:00:00 1970 +0000
330 | date: Thu Jan 01 00:00:00 1970 +0000
331 | summary: A0
331 | summary: A0
332 |
332 |
333 o changeset: 0:ea207398892e
333 o changeset: 0:ea207398892e
334 user: test
334 user: test
335 date: Thu Jan 01 00:00:00 1970 +0000
335 date: Thu Jan 01 00:00:00 1970 +0000
336 summary: ROOT
336 summary: ROOT
337
337
338 Check templates
338 Check templates
339 ---------------
339 ---------------
340
340
341 $ hg up 'desc(A0)' --hidden
341 $ hg up 'desc(A0)' --hidden
342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
343
343
344 Predecessors template should show current revision as it is the working copy
344 Predecessors template should show current revision as it is the working copy
345 $ hg tlog
345 $ hg tlog
346 o eb5a0daa2192
346 o eb5a0daa2192
347 | Predecessors: 1:471f378eab4c
347 | Predecessors: 1:471f378eab4c
348 | semi-colon: 1:471f378eab4c
348 | semi-colon: 1:471f378eab4c
349 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
349 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
350 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
350 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
351 | @ 471f378eab4c
351 | @ 471f378eab4c
352 |/ Successors: 3:eb5a0daa2192
352 |/ Successors: 3:eb5a0daa2192
353 | multi-line: 3:eb5a0daa2192
353 | multi-line: 3:eb5a0daa2192
354 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
354 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
355 o ea207398892e
355 o ea207398892e
356
356
357 $ hg up 'desc(B0)' --hidden
357 $ hg up 'desc(B0)' --hidden
358 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
358 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
359
359
360 Predecessors template should show both predecessors as they should be both
360 Predecessors template should show both predecessors as they should be both
361 displayed
361 displayed
362 $ hg tlog
362 $ hg tlog
363 o eb5a0daa2192
363 o eb5a0daa2192
364 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
364 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
365 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
365 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
366 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
366 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
367 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
367 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
368 | @ 0dec01379d3b
368 | @ 0dec01379d3b
369 | | Successors: 3:eb5a0daa2192
369 | | Successors: 3:eb5a0daa2192
370 | | multi-line: 3:eb5a0daa2192
370 | | multi-line: 3:eb5a0daa2192
371 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
371 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
372 | x 471f378eab4c
372 | x 471f378eab4c
373 |/ Successors: 3:eb5a0daa2192
373 |/ Successors: 3:eb5a0daa2192
374 | multi-line: 3:eb5a0daa2192
374 | multi-line: 3:eb5a0daa2192
375 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
375 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
376 o ea207398892e
376 o ea207398892e
377
377
378 $ hg up 'desc(C0)'
378 $ hg up 'desc(C0)'
379 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
379 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
380
380
381 Predecessors template should not show predecessors as they are not displayed in
381 Predecessors template should not show predecessors as they are not displayed in
382 the log
382 the log
383 $ hg tlog
383 $ hg tlog
384 @ eb5a0daa2192
384 @ eb5a0daa2192
385 |
385 |
386 o ea207398892e
386 o ea207398892e
387
387
388 Predecessors template should show both predecessors as we force their display
388 Predecessors template should show both predecessors as we force their display
389 with --hidden
389 with --hidden
390 $ hg tlog --hidden
390 $ hg tlog --hidden
391 @ eb5a0daa2192
391 @ eb5a0daa2192
392 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
392 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
393 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
393 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
394 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
394 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
395 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
395 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
396 | x 0dec01379d3b
396 | x 0dec01379d3b
397 | | Successors: 3:eb5a0daa2192
397 | | Successors: 3:eb5a0daa2192
398 | | multi-line: 3:eb5a0daa2192
398 | | multi-line: 3:eb5a0daa2192
399 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
399 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
400 | x 471f378eab4c
400 | x 471f378eab4c
401 |/ Successors: 3:eb5a0daa2192
401 |/ Successors: 3:eb5a0daa2192
402 | multi-line: 3:eb5a0daa2192
402 | multi-line: 3:eb5a0daa2192
403 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
403 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
404 o ea207398892e
404 o ea207398892e
405
405
406
406
407 Test templates with divergence
407 Test templates with divergence
408 ==============================
408 ==============================
409
409
410 Test setup
410 Test setup
411 ----------
411 ----------
412
412
413 $ hg init $TESTTMP/templates-local-divergence
413 $ hg init $TESTTMP/templates-local-divergence
414 $ cd $TESTTMP/templates-local-divergence
414 $ cd $TESTTMP/templates-local-divergence
415 $ mkcommit ROOT
415 $ mkcommit ROOT
416 $ mkcommit A0
416 $ mkcommit A0
417 $ hg commit --amend -m "A1"
417 $ hg commit --amend -m "A1"
418 $ hg log --hidden -G
418 $ hg log --hidden -G
419 @ changeset: 2:fdf9bde5129a
419 @ changeset: 2:fdf9bde5129a
420 | tag: tip
420 | tag: tip
421 | parent: 0:ea207398892e
421 | parent: 0:ea207398892e
422 | user: test
422 | user: test
423 | date: Thu Jan 01 00:00:00 1970 +0000
423 | date: Thu Jan 01 00:00:00 1970 +0000
424 | summary: A1
424 | summary: A1
425 |
425 |
426 | x changeset: 1:471f378eab4c
426 | x changeset: 1:471f378eab4c
427 |/ user: test
427 |/ user: test
428 | date: Thu Jan 01 00:00:00 1970 +0000
428 | date: Thu Jan 01 00:00:00 1970 +0000
429 | summary: A0
429 | summary: A0
430 |
430 |
431 o changeset: 0:ea207398892e
431 o changeset: 0:ea207398892e
432 user: test
432 user: test
433 date: Thu Jan 01 00:00:00 1970 +0000
433 date: Thu Jan 01 00:00:00 1970 +0000
434 summary: ROOT
434 summary: ROOT
435
435
436 $ hg update --hidden 'desc(A0)'
436 $ hg update --hidden 'desc(A0)'
437 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
437 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
438 $ hg commit --amend -m "A2"
438 $ hg commit --amend -m "A2"
439 $ hg log --hidden -G
439 $ hg log --hidden -G
440 @ changeset: 3:65b757b745b9
440 @ changeset: 3:65b757b745b9
441 | tag: tip
441 | tag: tip
442 | parent: 0:ea207398892e
442 | parent: 0:ea207398892e
443 | user: test
443 | user: test
444 | date: Thu Jan 01 00:00:00 1970 +0000
444 | date: Thu Jan 01 00:00:00 1970 +0000
445 | instability: divergent
445 | instability: content-divergent
446 | summary: A2
446 | summary: A2
447 |
447 |
448 | o changeset: 2:fdf9bde5129a
448 | o changeset: 2:fdf9bde5129a
449 |/ parent: 0:ea207398892e
449 |/ parent: 0:ea207398892e
450 | user: test
450 | user: test
451 | date: Thu Jan 01 00:00:00 1970 +0000
451 | date: Thu Jan 01 00:00:00 1970 +0000
452 | instability: divergent
452 | instability: content-divergent
453 | summary: A1
453 | summary: A1
454 |
454 |
455 | x changeset: 1:471f378eab4c
455 | x changeset: 1:471f378eab4c
456 |/ user: test
456 |/ user: test
457 | date: Thu Jan 01 00:00:00 1970 +0000
457 | date: Thu Jan 01 00:00:00 1970 +0000
458 | summary: A0
458 | summary: A0
459 |
459 |
460 o changeset: 0:ea207398892e
460 o changeset: 0:ea207398892e
461 user: test
461 user: test
462 date: Thu Jan 01 00:00:00 1970 +0000
462 date: Thu Jan 01 00:00:00 1970 +0000
463 summary: ROOT
463 summary: ROOT
464
464
465 $ hg commit --amend -m 'A3'
465 $ hg commit --amend -m 'A3'
466 $ hg log --hidden -G
466 $ hg log --hidden -G
467 @ changeset: 4:019fadeab383
467 @ changeset: 4:019fadeab383
468 | tag: tip
468 | tag: tip
469 | parent: 0:ea207398892e
469 | parent: 0:ea207398892e
470 | user: test
470 | user: test
471 | date: Thu Jan 01 00:00:00 1970 +0000
471 | date: Thu Jan 01 00:00:00 1970 +0000
472 | instability: divergent
472 | instability: content-divergent
473 | summary: A3
473 | summary: A3
474 |
474 |
475 | x changeset: 3:65b757b745b9
475 | x changeset: 3:65b757b745b9
476 |/ parent: 0:ea207398892e
476 |/ parent: 0:ea207398892e
477 | user: test
477 | user: test
478 | date: Thu Jan 01 00:00:00 1970 +0000
478 | date: Thu Jan 01 00:00:00 1970 +0000
479 | summary: A2
479 | summary: A2
480 |
480 |
481 | o changeset: 2:fdf9bde5129a
481 | o changeset: 2:fdf9bde5129a
482 |/ parent: 0:ea207398892e
482 |/ parent: 0:ea207398892e
483 | user: test
483 | user: test
484 | date: Thu Jan 01 00:00:00 1970 +0000
484 | date: Thu Jan 01 00:00:00 1970 +0000
485 | instability: divergent
485 | instability: content-divergent
486 | summary: A1
486 | summary: A1
487 |
487 |
488 | x changeset: 1:471f378eab4c
488 | x changeset: 1:471f378eab4c
489 |/ user: test
489 |/ user: test
490 | date: Thu Jan 01 00:00:00 1970 +0000
490 | date: Thu Jan 01 00:00:00 1970 +0000
491 | summary: A0
491 | summary: A0
492 |
492 |
493 o changeset: 0:ea207398892e
493 o changeset: 0:ea207398892e
494 user: test
494 user: test
495 date: Thu Jan 01 00:00:00 1970 +0000
495 date: Thu Jan 01 00:00:00 1970 +0000
496 summary: ROOT
496 summary: ROOT
497
497
498
498
499 Check templates
499 Check templates
500 ---------------
500 ---------------
501
501
502 $ hg up 'desc(A0)' --hidden
502 $ hg up 'desc(A0)' --hidden
503 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
503 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
504
504
505 Predecessors template should show current revision as it is the working copy
505 Predecessors template should show current revision as it is the working copy
506 $ hg tlog
506 $ hg tlog
507 o 019fadeab383
507 o 019fadeab383
508 | Predecessors: 1:471f378eab4c
508 | Predecessors: 1:471f378eab4c
509 | semi-colon: 1:471f378eab4c
509 | semi-colon: 1:471f378eab4c
510 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
510 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
511 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
511 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
512 | o fdf9bde5129a
512 | o fdf9bde5129a
513 |/ Predecessors: 1:471f378eab4c
513 |/ Predecessors: 1:471f378eab4c
514 | semi-colon: 1:471f378eab4c
514 | semi-colon: 1:471f378eab4c
515 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
515 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
516 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
516 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
517 | @ 471f378eab4c
517 | @ 471f378eab4c
518 |/ Successors: 2:fdf9bde5129a; 4:019fadeab383
518 |/ Successors: 2:fdf9bde5129a; 4:019fadeab383
519 | multi-line: 2:fdf9bde5129a
519 | multi-line: 2:fdf9bde5129a
520 | multi-line: 4:019fadeab383
520 | multi-line: 4:019fadeab383
521 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
521 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
522 o ea207398892e
522 o ea207398892e
523
523
524 $ hg up 'desc(A1)'
524 $ hg up 'desc(A1)'
525 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
525 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
526
526
527 Predecessors template should not show predecessors as they are not displayed in
527 Predecessors template should not show predecessors as they are not displayed in
528 the log
528 the log
529 $ hg tlog
529 $ hg tlog
530 o 019fadeab383
530 o 019fadeab383
531 |
531 |
532 | @ fdf9bde5129a
532 | @ fdf9bde5129a
533 |/
533 |/
534 o ea207398892e
534 o ea207398892e
535
535
536 Predecessors template should the predecessors as we force their display with
536 Predecessors template should the predecessors as we force their display with
537 --hidden
537 --hidden
538 $ hg tlog --hidden
538 $ hg tlog --hidden
539 o 019fadeab383
539 o 019fadeab383
540 | Predecessors: 3:65b757b745b9
540 | Predecessors: 3:65b757b745b9
541 | semi-colon: 3:65b757b745b9
541 | semi-colon: 3:65b757b745b9
542 | json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
542 | json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
543 | map: 3:65b757b745b935093c87a2bccd877521cccffcbd
543 | map: 3:65b757b745b935093c87a2bccd877521cccffcbd
544 | x 65b757b745b9
544 | x 65b757b745b9
545 |/ Predecessors: 1:471f378eab4c
545 |/ Predecessors: 1:471f378eab4c
546 | semi-colon: 1:471f378eab4c
546 | semi-colon: 1:471f378eab4c
547 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
547 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
548 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
548 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
549 | Successors: 4:019fadeab383
549 | Successors: 4:019fadeab383
550 | multi-line: 4:019fadeab383
550 | multi-line: 4:019fadeab383
551 | json: [["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
551 | json: [["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
552 | @ fdf9bde5129a
552 | @ fdf9bde5129a
553 |/ Predecessors: 1:471f378eab4c
553 |/ Predecessors: 1:471f378eab4c
554 | semi-colon: 1:471f378eab4c
554 | semi-colon: 1:471f378eab4c
555 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
555 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
556 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
556 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
557 | x 471f378eab4c
557 | x 471f378eab4c
558 |/ Successors: 2:fdf9bde5129a; 3:65b757b745b9
558 |/ Successors: 2:fdf9bde5129a; 3:65b757b745b9
559 | multi-line: 2:fdf9bde5129a
559 | multi-line: 2:fdf9bde5129a
560 | multi-line: 3:65b757b745b9
560 | multi-line: 3:65b757b745b9
561 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["65b757b745b935093c87a2bccd877521cccffcbd"]]
561 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["65b757b745b935093c87a2bccd877521cccffcbd"]]
562 o ea207398892e
562 o ea207398892e
563
563
564
564
565 Test templates with amended + folded commit
565 Test templates with amended + folded commit
566 ===========================================
566 ===========================================
567
567
568 Test setup
568 Test setup
569 ----------
569 ----------
570
570
571 $ hg init $TESTTMP/templates-local-amend-fold
571 $ hg init $TESTTMP/templates-local-amend-fold
572 $ cd $TESTTMP/templates-local-amend-fold
572 $ cd $TESTTMP/templates-local-amend-fold
573 $ mkcommit ROOT
573 $ mkcommit ROOT
574 $ mkcommit A0
574 $ mkcommit A0
575 $ mkcommit B0
575 $ mkcommit B0
576 $ hg commit --amend -m "B1"
576 $ hg commit --amend -m "B1"
577 $ hg log --hidden -G
577 $ hg log --hidden -G
578 @ changeset: 3:b7ea6d14e664
578 @ changeset: 3:b7ea6d14e664
579 | tag: tip
579 | tag: tip
580 | parent: 1:471f378eab4c
580 | parent: 1:471f378eab4c
581 | user: test
581 | user: test
582 | date: Thu Jan 01 00:00:00 1970 +0000
582 | date: Thu Jan 01 00:00:00 1970 +0000
583 | summary: B1
583 | summary: B1
584 |
584 |
585 | x changeset: 2:0dec01379d3b
585 | x changeset: 2:0dec01379d3b
586 |/ user: test
586 |/ user: test
587 | date: Thu Jan 01 00:00:00 1970 +0000
587 | date: Thu Jan 01 00:00:00 1970 +0000
588 | summary: B0
588 | summary: B0
589 |
589 |
590 o changeset: 1:471f378eab4c
590 o changeset: 1:471f378eab4c
591 | user: test
591 | user: test
592 | date: Thu Jan 01 00:00:00 1970 +0000
592 | date: Thu Jan 01 00:00:00 1970 +0000
593 | summary: A0
593 | summary: A0
594 |
594 |
595 o changeset: 0:ea207398892e
595 o changeset: 0:ea207398892e
596 user: test
596 user: test
597 date: Thu Jan 01 00:00:00 1970 +0000
597 date: Thu Jan 01 00:00:00 1970 +0000
598 summary: ROOT
598 summary: ROOT
599
599
600 # Simulate a fold
600 # Simulate a fold
601 $ hg up -r "desc(ROOT)"
601 $ hg up -r "desc(ROOT)"
602 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
602 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
603 $ echo "A0" > A0
603 $ echo "A0" > A0
604 $ echo "B0" > B0
604 $ echo "B0" > B0
605 $ hg commit -A -m "C0"
605 $ hg commit -A -m "C0"
606 adding A0
606 adding A0
607 adding B0
607 adding B0
608 created new head
608 created new head
609 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
609 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
610 obsoleted 1 changesets
610 obsoleted 1 changesets
611 $ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
611 $ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
612 obsoleted 1 changesets
612 obsoleted 1 changesets
613
613
614 $ hg log --hidden -G
614 $ hg log --hidden -G
615 @ changeset: 4:eb5a0daa2192
615 @ changeset: 4:eb5a0daa2192
616 | tag: tip
616 | tag: tip
617 | parent: 0:ea207398892e
617 | parent: 0:ea207398892e
618 | user: test
618 | user: test
619 | date: Thu Jan 01 00:00:00 1970 +0000
619 | date: Thu Jan 01 00:00:00 1970 +0000
620 | summary: C0
620 | summary: C0
621 |
621 |
622 | x changeset: 3:b7ea6d14e664
622 | x changeset: 3:b7ea6d14e664
623 | | parent: 1:471f378eab4c
623 | | parent: 1:471f378eab4c
624 | | user: test
624 | | user: test
625 | | date: Thu Jan 01 00:00:00 1970 +0000
625 | | date: Thu Jan 01 00:00:00 1970 +0000
626 | | summary: B1
626 | | summary: B1
627 | |
627 | |
628 | | x changeset: 2:0dec01379d3b
628 | | x changeset: 2:0dec01379d3b
629 | |/ user: test
629 | |/ user: test
630 | | date: Thu Jan 01 00:00:00 1970 +0000
630 | | date: Thu Jan 01 00:00:00 1970 +0000
631 | | summary: B0
631 | | summary: B0
632 | |
632 | |
633 | x changeset: 1:471f378eab4c
633 | x changeset: 1:471f378eab4c
634 |/ user: test
634 |/ user: test
635 | date: Thu Jan 01 00:00:00 1970 +0000
635 | date: Thu Jan 01 00:00:00 1970 +0000
636 | summary: A0
636 | summary: A0
637 |
637 |
638 o changeset: 0:ea207398892e
638 o changeset: 0:ea207398892e
639 user: test
639 user: test
640 date: Thu Jan 01 00:00:00 1970 +0000
640 date: Thu Jan 01 00:00:00 1970 +0000
641 summary: ROOT
641 summary: ROOT
642
642
643 Check templates
643 Check templates
644 ---------------
644 ---------------
645
645
646 $ hg up 'desc(A0)' --hidden
646 $ hg up 'desc(A0)' --hidden
647 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
647 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
648
648
649 Predecessors template should show current revision as it is the working copy
649 Predecessors template should show current revision as it is the working copy
650 $ hg tlog
650 $ hg tlog
651 o eb5a0daa2192
651 o eb5a0daa2192
652 | Predecessors: 1:471f378eab4c
652 | Predecessors: 1:471f378eab4c
653 | semi-colon: 1:471f378eab4c
653 | semi-colon: 1:471f378eab4c
654 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
654 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
655 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
655 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
656 | @ 471f378eab4c
656 | @ 471f378eab4c
657 |/ Successors: 4:eb5a0daa2192
657 |/ Successors: 4:eb5a0daa2192
658 | multi-line: 4:eb5a0daa2192
658 | multi-line: 4:eb5a0daa2192
659 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
659 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
660 o ea207398892e
660 o ea207398892e
661
661
662 $ hg up 'desc(B0)' --hidden
662 $ hg up 'desc(B0)' --hidden
663 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
663 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
664
664
665 Predecessors template should both predecessors as they are visible
665 Predecessors template should both predecessors as they are visible
666 $ hg tlog
666 $ hg tlog
667 o eb5a0daa2192
667 o eb5a0daa2192
668 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
668 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
669 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
669 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
670 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
670 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
671 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
671 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
672 | @ 0dec01379d3b
672 | @ 0dec01379d3b
673 | | Successors: 4:eb5a0daa2192
673 | | Successors: 4:eb5a0daa2192
674 | | multi-line: 4:eb5a0daa2192
674 | | multi-line: 4:eb5a0daa2192
675 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
675 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
676 | x 471f378eab4c
676 | x 471f378eab4c
677 |/ Successors: 4:eb5a0daa2192
677 |/ Successors: 4:eb5a0daa2192
678 | multi-line: 4:eb5a0daa2192
678 | multi-line: 4:eb5a0daa2192
679 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
679 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
680 o ea207398892e
680 o ea207398892e
681
681
682 $ hg up 'desc(B1)' --hidden
682 $ hg up 'desc(B1)' --hidden
683 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
683 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
684
684
685 Predecessors template should both predecessors as they are visible
685 Predecessors template should both predecessors as they are visible
686 $ hg tlog
686 $ hg tlog
687 o eb5a0daa2192
687 o eb5a0daa2192
688 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
688 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
689 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
689 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
690 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
690 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
691 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
691 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
692 | @ b7ea6d14e664
692 | @ b7ea6d14e664
693 | | Successors: 4:eb5a0daa2192
693 | | Successors: 4:eb5a0daa2192
694 | | multi-line: 4:eb5a0daa2192
694 | | multi-line: 4:eb5a0daa2192
695 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
695 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
696 | x 471f378eab4c
696 | x 471f378eab4c
697 |/ Successors: 4:eb5a0daa2192
697 |/ Successors: 4:eb5a0daa2192
698 | multi-line: 4:eb5a0daa2192
698 | multi-line: 4:eb5a0daa2192
699 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
699 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
700 o ea207398892e
700 o ea207398892e
701
701
702 $ hg up 'desc(C0)'
702 $ hg up 'desc(C0)'
703 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
703 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
704
704
705 Predecessors template should show no predecessors as they are both non visible
705 Predecessors template should show no predecessors as they are both non visible
706 $ hg tlog
706 $ hg tlog
707 @ eb5a0daa2192
707 @ eb5a0daa2192
708 |
708 |
709 o ea207398892e
709 o ea207398892e
710
710
711 Predecessors template should show all predecessors as we force their display
711 Predecessors template should show all predecessors as we force their display
712 with --hidden
712 with --hidden
713 $ hg tlog --hidden
713 $ hg tlog --hidden
714 @ eb5a0daa2192
714 @ eb5a0daa2192
715 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
715 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
716 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
716 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
717 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
717 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
718 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
718 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
719 | x b7ea6d14e664
719 | x b7ea6d14e664
720 | | Predecessors: 2:0dec01379d3b
720 | | Predecessors: 2:0dec01379d3b
721 | | semi-colon: 2:0dec01379d3b
721 | | semi-colon: 2:0dec01379d3b
722 | | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
722 | | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
723 | | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
723 | | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
724 | | Successors: 4:eb5a0daa2192
724 | | Successors: 4:eb5a0daa2192
725 | | multi-line: 4:eb5a0daa2192
725 | | multi-line: 4:eb5a0daa2192
726 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
726 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
727 | | x 0dec01379d3b
727 | | x 0dec01379d3b
728 | |/ Successors: 3:b7ea6d14e664
728 | |/ Successors: 3:b7ea6d14e664
729 | | multi-line: 3:b7ea6d14e664
729 | | multi-line: 3:b7ea6d14e664
730 | | json: [["b7ea6d14e664bdc8922221f7992631b50da3fb07"]]
730 | | json: [["b7ea6d14e664bdc8922221f7992631b50da3fb07"]]
731 | x 471f378eab4c
731 | x 471f378eab4c
732 |/ Successors: 4:eb5a0daa2192
732 |/ Successors: 4:eb5a0daa2192
733 | multi-line: 4:eb5a0daa2192
733 | multi-line: 4:eb5a0daa2192
734 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
734 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
735 o ea207398892e
735 o ea207398892e
736
736
737
737
738 Test template with pushed and pulled obs markers
738 Test template with pushed and pulled obs markers
739 ================================================
739 ================================================
740
740
741 Test setup
741 Test setup
742 ----------
742 ----------
743
743
744 $ hg init $TESTTMP/templates-local-remote-markers-1
744 $ hg init $TESTTMP/templates-local-remote-markers-1
745 $ cd $TESTTMP/templates-local-remote-markers-1
745 $ cd $TESTTMP/templates-local-remote-markers-1
746 $ mkcommit ROOT
746 $ mkcommit ROOT
747 $ mkcommit A0
747 $ mkcommit A0
748 $ hg clone $TESTTMP/templates-local-remote-markers-1 $TESTTMP/templates-local-remote-markers-2
748 $ hg clone $TESTTMP/templates-local-remote-markers-1 $TESTTMP/templates-local-remote-markers-2
749 updating to branch default
749 updating to branch default
750 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
750 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
751 $ cd $TESTTMP/templates-local-remote-markers-2
751 $ cd $TESTTMP/templates-local-remote-markers-2
752 $ hg log --hidden -G
752 $ hg log --hidden -G
753 @ changeset: 1:471f378eab4c
753 @ changeset: 1:471f378eab4c
754 | tag: tip
754 | tag: tip
755 | user: test
755 | user: test
756 | date: Thu Jan 01 00:00:00 1970 +0000
756 | date: Thu Jan 01 00:00:00 1970 +0000
757 | summary: A0
757 | summary: A0
758 |
758 |
759 o changeset: 0:ea207398892e
759 o changeset: 0:ea207398892e
760 user: test
760 user: test
761 date: Thu Jan 01 00:00:00 1970 +0000
761 date: Thu Jan 01 00:00:00 1970 +0000
762 summary: ROOT
762 summary: ROOT
763
763
764 $ cd $TESTTMP/templates-local-remote-markers-1
764 $ cd $TESTTMP/templates-local-remote-markers-1
765 $ hg commit --amend -m "A1"
765 $ hg commit --amend -m "A1"
766 $ hg commit --amend -m "A2"
766 $ hg commit --amend -m "A2"
767 $ hg log --hidden -G
767 $ hg log --hidden -G
768 @ changeset: 3:7a230b46bf61
768 @ changeset: 3:7a230b46bf61
769 | tag: tip
769 | tag: tip
770 | parent: 0:ea207398892e
770 | parent: 0:ea207398892e
771 | user: test
771 | user: test
772 | date: Thu Jan 01 00:00:00 1970 +0000
772 | date: Thu Jan 01 00:00:00 1970 +0000
773 | summary: A2
773 | summary: A2
774 |
774 |
775 | x changeset: 2:fdf9bde5129a
775 | x changeset: 2:fdf9bde5129a
776 |/ parent: 0:ea207398892e
776 |/ parent: 0:ea207398892e
777 | user: test
777 | user: test
778 | date: Thu Jan 01 00:00:00 1970 +0000
778 | date: Thu Jan 01 00:00:00 1970 +0000
779 | summary: A1
779 | summary: A1
780 |
780 |
781 | x changeset: 1:471f378eab4c
781 | x changeset: 1:471f378eab4c
782 |/ user: test
782 |/ user: test
783 | date: Thu Jan 01 00:00:00 1970 +0000
783 | date: Thu Jan 01 00:00:00 1970 +0000
784 | summary: A0
784 | summary: A0
785 |
785 |
786 o changeset: 0:ea207398892e
786 o changeset: 0:ea207398892e
787 user: test
787 user: test
788 date: Thu Jan 01 00:00:00 1970 +0000
788 date: Thu Jan 01 00:00:00 1970 +0000
789 summary: ROOT
789 summary: ROOT
790
790
791 $ cd $TESTTMP/templates-local-remote-markers-2
791 $ cd $TESTTMP/templates-local-remote-markers-2
792 $ hg pull
792 $ hg pull
793 pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
793 pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
794 searching for changes
794 searching for changes
795 adding changesets
795 adding changesets
796 adding manifests
796 adding manifests
797 adding file changes
797 adding file changes
798 added 1 changesets with 0 changes to 1 files (+1 heads)
798 added 1 changesets with 0 changes to 1 files (+1 heads)
799 2 new obsolescence markers
799 2 new obsolescence markers
800 obsoleted 1 changesets
800 obsoleted 1 changesets
801 (run 'hg heads' to see heads, 'hg merge' to merge)
801 (run 'hg heads' to see heads, 'hg merge' to merge)
802 $ hg log --hidden -G
802 $ hg log --hidden -G
803 o changeset: 2:7a230b46bf61
803 o changeset: 2:7a230b46bf61
804 | tag: tip
804 | tag: tip
805 | parent: 0:ea207398892e
805 | parent: 0:ea207398892e
806 | user: test
806 | user: test
807 | date: Thu Jan 01 00:00:00 1970 +0000
807 | date: Thu Jan 01 00:00:00 1970 +0000
808 | summary: A2
808 | summary: A2
809 |
809 |
810 | @ changeset: 1:471f378eab4c
810 | @ changeset: 1:471f378eab4c
811 |/ user: test
811 |/ user: test
812 | date: Thu Jan 01 00:00:00 1970 +0000
812 | date: Thu Jan 01 00:00:00 1970 +0000
813 | summary: A0
813 | summary: A0
814 |
814 |
815 o changeset: 0:ea207398892e
815 o changeset: 0:ea207398892e
816 user: test
816 user: test
817 date: Thu Jan 01 00:00:00 1970 +0000
817 date: Thu Jan 01 00:00:00 1970 +0000
818 summary: ROOT
818 summary: ROOT
819
819
820
820
821 $ hg debugobsolete
821 $ hg debugobsolete
822 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
822 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
823 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
823 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
824
824
825 Check templates
825 Check templates
826 ---------------
826 ---------------
827
827
828 Predecessors template should show current revision as it is the working copy
828 Predecessors template should show current revision as it is the working copy
829 $ hg tlog
829 $ hg tlog
830 o 7a230b46bf61
830 o 7a230b46bf61
831 | Predecessors: 1:471f378eab4c
831 | Predecessors: 1:471f378eab4c
832 | semi-colon: 1:471f378eab4c
832 | semi-colon: 1:471f378eab4c
833 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
833 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
834 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
834 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
835 | @ 471f378eab4c
835 | @ 471f378eab4c
836 |/ Successors: 2:7a230b46bf61
836 |/ Successors: 2:7a230b46bf61
837 | multi-line: 2:7a230b46bf61
837 | multi-line: 2:7a230b46bf61
838 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
838 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
839 o ea207398892e
839 o ea207398892e
840
840
841 $ hg up 'desc(A2)'
841 $ hg up 'desc(A2)'
842 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
842 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
843
843
844 Predecessors template should show no predecessors as they are non visible
844 Predecessors template should show no predecessors as they are non visible
845 $ hg tlog
845 $ hg tlog
846 @ 7a230b46bf61
846 @ 7a230b46bf61
847 |
847 |
848 o ea207398892e
848 o ea207398892e
849
849
850 Predecessors template should show all predecessors as we force their display
850 Predecessors template should show all predecessors as we force their display
851 with --hidden
851 with --hidden
852 $ hg tlog --hidden
852 $ hg tlog --hidden
853 @ 7a230b46bf61
853 @ 7a230b46bf61
854 | Predecessors: 1:471f378eab4c
854 | Predecessors: 1:471f378eab4c
855 | semi-colon: 1:471f378eab4c
855 | semi-colon: 1:471f378eab4c
856 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
856 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
857 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
857 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
858 | x 471f378eab4c
858 | x 471f378eab4c
859 |/ Successors: 2:7a230b46bf61
859 |/ Successors: 2:7a230b46bf61
860 | multi-line: 2:7a230b46bf61
860 | multi-line: 2:7a230b46bf61
861 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
861 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
862 o ea207398892e
862 o ea207398892e
863
863
864
864
865 Test template with obsmarkers cycle
865 Test template with obsmarkers cycle
866 ===================================
866 ===================================
867
867
868 Test setup
868 Test setup
869 ----------
869 ----------
870
870
871 $ hg init $TESTTMP/templates-local-cycle
871 $ hg init $TESTTMP/templates-local-cycle
872 $ cd $TESTTMP/templates-local-cycle
872 $ cd $TESTTMP/templates-local-cycle
873 $ mkcommit ROOT
873 $ mkcommit ROOT
874 $ mkcommit A0
874 $ mkcommit A0
875 $ mkcommit B0
875 $ mkcommit B0
876 $ hg up -r 0
876 $ hg up -r 0
877 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
877 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
878 $ mkcommit C0
878 $ mkcommit C0
879 created new head
879 created new head
880
880
881 Create the cycle
881 Create the cycle
882
882
883 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
883 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
884 obsoleted 1 changesets
884 obsoleted 1 changesets
885 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
885 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
886 obsoleted 1 changesets
886 obsoleted 1 changesets
887 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
887 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
888
888
889 Check templates
889 Check templates
890 ---------------
890 ---------------
891
891
892 $ hg tlog
892 $ hg tlog
893 @ f897c6137566
893 @ f897c6137566
894 |
894 |
895 o ea207398892e
895 o ea207398892e
896
896
897
897
898 $ hg up -r "desc(B0)" --hidden
898 $ hg up -r "desc(B0)" --hidden
899 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
899 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
900 $ hg tlog
900 $ hg tlog
901 o f897c6137566
901 o f897c6137566
902 | Predecessors: 2:0dec01379d3b
902 | Predecessors: 2:0dec01379d3b
903 | semi-colon: 2:0dec01379d3b
903 | semi-colon: 2:0dec01379d3b
904 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
904 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
905 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
905 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
906 | @ 0dec01379d3b
906 | @ 0dec01379d3b
907 | | Predecessors: 1:471f378eab4c
907 | | Predecessors: 1:471f378eab4c
908 | | semi-colon: 1:471f378eab4c
908 | | semi-colon: 1:471f378eab4c
909 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
909 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
910 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
910 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
911 | | Successors: 3:f897c6137566; 1:471f378eab4c
911 | | Successors: 3:f897c6137566; 1:471f378eab4c
912 | | multi-line: 3:f897c6137566
912 | | multi-line: 3:f897c6137566
913 | | multi-line: 1:471f378eab4c
913 | | multi-line: 1:471f378eab4c
914 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
914 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
915 | x 471f378eab4c
915 | x 471f378eab4c
916 |/ Predecessors: 2:0dec01379d3b
916 |/ Predecessors: 2:0dec01379d3b
917 | semi-colon: 2:0dec01379d3b
917 | semi-colon: 2:0dec01379d3b
918 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
918 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
919 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
919 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
920 | Successors: 2:0dec01379d3b
920 | Successors: 2:0dec01379d3b
921 | multi-line: 2:0dec01379d3b
921 | multi-line: 2:0dec01379d3b
922 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
922 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
923 o ea207398892e
923 o ea207398892e
924
924
925
925
926 $ hg up -r "desc(A0)" --hidden
926 $ hg up -r "desc(A0)" --hidden
927 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
927 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
928 $ hg tlog
928 $ hg tlog
929 o f897c6137566
929 o f897c6137566
930 | Predecessors: 1:471f378eab4c
930 | Predecessors: 1:471f378eab4c
931 | semi-colon: 1:471f378eab4c
931 | semi-colon: 1:471f378eab4c
932 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
932 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
933 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
933 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
934 | @ 471f378eab4c
934 | @ 471f378eab4c
935 |/
935 |/
936 o ea207398892e
936 o ea207398892e
937
937
938
938
939 $ hg up -r "desc(ROOT)" --hidden
939 $ hg up -r "desc(ROOT)" --hidden
940 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
940 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
941 $ hg tlog
941 $ hg tlog
942 o f897c6137566
942 o f897c6137566
943 |
943 |
944 @ ea207398892e
944 @ ea207398892e
945
945
946
946
947 $ hg tlog --hidden
947 $ hg tlog --hidden
948 o f897c6137566
948 o f897c6137566
949 | Predecessors: 2:0dec01379d3b
949 | Predecessors: 2:0dec01379d3b
950 | semi-colon: 2:0dec01379d3b
950 | semi-colon: 2:0dec01379d3b
951 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
951 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
952 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
952 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
953 | x 0dec01379d3b
953 | x 0dec01379d3b
954 | | Predecessors: 1:471f378eab4c
954 | | Predecessors: 1:471f378eab4c
955 | | semi-colon: 1:471f378eab4c
955 | | semi-colon: 1:471f378eab4c
956 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
956 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
957 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
957 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
958 | | Successors: 3:f897c6137566; 1:471f378eab4c
958 | | Successors: 3:f897c6137566; 1:471f378eab4c
959 | | multi-line: 3:f897c6137566
959 | | multi-line: 3:f897c6137566
960 | | multi-line: 1:471f378eab4c
960 | | multi-line: 1:471f378eab4c
961 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
961 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
962 | x 471f378eab4c
962 | x 471f378eab4c
963 |/ Predecessors: 2:0dec01379d3b
963 |/ Predecessors: 2:0dec01379d3b
964 | semi-colon: 2:0dec01379d3b
964 | semi-colon: 2:0dec01379d3b
965 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
965 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
966 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
966 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
967 | Successors: 2:0dec01379d3b
967 | Successors: 2:0dec01379d3b
968 | multi-line: 2:0dec01379d3b
968 | multi-line: 2:0dec01379d3b
969 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
969 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
970 @ ea207398892e
970 @ ea207398892e
971
971
972 Test template with split + divergence with cycles
972 Test template with split + divergence with cycles
973 =================================================
973 =================================================
974
974
975 $ hg log -G
975 $ hg log -G
976 o changeset: 3:f897c6137566
976 o changeset: 3:f897c6137566
977 | tag: tip
977 | tag: tip
978 | parent: 0:ea207398892e
978 | parent: 0:ea207398892e
979 | user: test
979 | user: test
980 | date: Thu Jan 01 00:00:00 1970 +0000
980 | date: Thu Jan 01 00:00:00 1970 +0000
981 | summary: C0
981 | summary: C0
982 |
982 |
983 @ changeset: 0:ea207398892e
983 @ changeset: 0:ea207398892e
984 user: test
984 user: test
985 date: Thu Jan 01 00:00:00 1970 +0000
985 date: Thu Jan 01 00:00:00 1970 +0000
986 summary: ROOT
986 summary: ROOT
987
987
988 $ hg up
988 $ hg up
989 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
989 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
990
990
991 Create a commit with three files
991 Create a commit with three files
992 $ touch A B C
992 $ touch A B C
993 $ hg commit -A -m "Add A,B,C" A B C
993 $ hg commit -A -m "Add A,B,C" A B C
994
994
995 Split it
995 Split it
996 $ hg up 3
996 $ hg up 3
997 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
997 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
998 $ touch A
998 $ touch A
999 $ hg commit -A -m "Add A,B,C" A
999 $ hg commit -A -m "Add A,B,C" A
1000 created new head
1000 created new head
1001
1001
1002 $ touch B
1002 $ touch B
1003 $ hg commit -A -m "Add A,B,C" B
1003 $ hg commit -A -m "Add A,B,C" B
1004
1004
1005 $ touch C
1005 $ touch C
1006 $ hg commit -A -m "Add A,B,C" C
1006 $ hg commit -A -m "Add A,B,C" C
1007
1007
1008 $ hg log -G
1008 $ hg log -G
1009 @ changeset: 7:ba2ed02b0c9a
1009 @ changeset: 7:ba2ed02b0c9a
1010 | tag: tip
1010 | tag: tip
1011 | user: test
1011 | user: test
1012 | date: Thu Jan 01 00:00:00 1970 +0000
1012 | date: Thu Jan 01 00:00:00 1970 +0000
1013 | summary: Add A,B,C
1013 | summary: Add A,B,C
1014 |
1014 |
1015 o changeset: 6:4a004186e638
1015 o changeset: 6:4a004186e638
1016 | user: test
1016 | user: test
1017 | date: Thu Jan 01 00:00:00 1970 +0000
1017 | date: Thu Jan 01 00:00:00 1970 +0000
1018 | summary: Add A,B,C
1018 | summary: Add A,B,C
1019 |
1019 |
1020 o changeset: 5:dd800401bd8c
1020 o changeset: 5:dd800401bd8c
1021 | parent: 3:f897c6137566
1021 | parent: 3:f897c6137566
1022 | user: test
1022 | user: test
1023 | date: Thu Jan 01 00:00:00 1970 +0000
1023 | date: Thu Jan 01 00:00:00 1970 +0000
1024 | summary: Add A,B,C
1024 | summary: Add A,B,C
1025 |
1025 |
1026 | o changeset: 4:9bd10a0775e4
1026 | o changeset: 4:9bd10a0775e4
1027 |/ user: test
1027 |/ user: test
1028 | date: Thu Jan 01 00:00:00 1970 +0000
1028 | date: Thu Jan 01 00:00:00 1970 +0000
1029 | summary: Add A,B,C
1029 | summary: Add A,B,C
1030 |
1030 |
1031 o changeset: 3:f897c6137566
1031 o changeset: 3:f897c6137566
1032 | parent: 0:ea207398892e
1032 | parent: 0:ea207398892e
1033 | user: test
1033 | user: test
1034 | date: Thu Jan 01 00:00:00 1970 +0000
1034 | date: Thu Jan 01 00:00:00 1970 +0000
1035 | summary: C0
1035 | summary: C0
1036 |
1036 |
1037 o changeset: 0:ea207398892e
1037 o changeset: 0:ea207398892e
1038 user: test
1038 user: test
1039 date: Thu Jan 01 00:00:00 1970 +0000
1039 date: Thu Jan 01 00:00:00 1970 +0000
1040 summary: ROOT
1040 summary: ROOT
1041
1041
1042 $ hg debugobsolete `getid "4"` `getid "5"` `getid "6"` `getid "7"`
1042 $ hg debugobsolete `getid "4"` `getid "5"` `getid "6"` `getid "7"`
1043 obsoleted 1 changesets
1043 obsoleted 1 changesets
1044 $ hg log -G
1044 $ hg log -G
1045 @ changeset: 7:ba2ed02b0c9a
1045 @ changeset: 7:ba2ed02b0c9a
1046 | tag: tip
1046 | tag: tip
1047 | user: test
1047 | user: test
1048 | date: Thu Jan 01 00:00:00 1970 +0000
1048 | date: Thu Jan 01 00:00:00 1970 +0000
1049 | summary: Add A,B,C
1049 | summary: Add A,B,C
1050 |
1050 |
1051 o changeset: 6:4a004186e638
1051 o changeset: 6:4a004186e638
1052 | user: test
1052 | user: test
1053 | date: Thu Jan 01 00:00:00 1970 +0000
1053 | date: Thu Jan 01 00:00:00 1970 +0000
1054 | summary: Add A,B,C
1054 | summary: Add A,B,C
1055 |
1055 |
1056 o changeset: 5:dd800401bd8c
1056 o changeset: 5:dd800401bd8c
1057 | parent: 3:f897c6137566
1057 | parent: 3:f897c6137566
1058 | user: test
1058 | user: test
1059 | date: Thu Jan 01 00:00:00 1970 +0000
1059 | date: Thu Jan 01 00:00:00 1970 +0000
1060 | summary: Add A,B,C
1060 | summary: Add A,B,C
1061 |
1061 |
1062 o changeset: 3:f897c6137566
1062 o changeset: 3:f897c6137566
1063 | parent: 0:ea207398892e
1063 | parent: 0:ea207398892e
1064 | user: test
1064 | user: test
1065 | date: Thu Jan 01 00:00:00 1970 +0000
1065 | date: Thu Jan 01 00:00:00 1970 +0000
1066 | summary: C0
1066 | summary: C0
1067 |
1067 |
1068 o changeset: 0:ea207398892e
1068 o changeset: 0:ea207398892e
1069 user: test
1069 user: test
1070 date: Thu Jan 01 00:00:00 1970 +0000
1070 date: Thu Jan 01 00:00:00 1970 +0000
1071 summary: ROOT
1071 summary: ROOT
1072
1072
1073 Diverge one of the splitted commit
1073 Diverge one of the splitted commit
1074
1074
1075 $ hg up 6
1075 $ hg up 6
1076 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1076 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1077 $ hg commit --amend -m "Add only B"
1077 $ hg commit --amend -m "Add only B"
1078
1078
1079 $ hg up 6 --hidden
1079 $ hg up 6 --hidden
1080 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1080 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1081 $ hg commit --amend -m "Add B only"
1081 $ hg commit --amend -m "Add B only"
1082
1082
1083 $ hg log -G
1083 $ hg log -G
1084 @ changeset: 9:0b997eb7ceee
1084 @ changeset: 9:0b997eb7ceee
1085 | tag: tip
1085 | tag: tip
1086 | parent: 5:dd800401bd8c
1086 | parent: 5:dd800401bd8c
1087 | user: test
1087 | user: test
1088 | date: Thu Jan 01 00:00:00 1970 +0000
1088 | date: Thu Jan 01 00:00:00 1970 +0000
1089 | instability: divergent
1089 | instability: content-divergent
1090 | summary: Add B only
1090 | summary: Add B only
1091 |
1091 |
1092 | o changeset: 8:b18bc8331526
1092 | o changeset: 8:b18bc8331526
1093 |/ parent: 5:dd800401bd8c
1093 |/ parent: 5:dd800401bd8c
1094 | user: test
1094 | user: test
1095 | date: Thu Jan 01 00:00:00 1970 +0000
1095 | date: Thu Jan 01 00:00:00 1970 +0000
1096 | instability: divergent
1096 | instability: content-divergent
1097 | summary: Add only B
1097 | summary: Add only B
1098 |
1098 |
1099 | o changeset: 7:ba2ed02b0c9a
1099 | o changeset: 7:ba2ed02b0c9a
1100 | | user: test
1100 | | user: test
1101 | | date: Thu Jan 01 00:00:00 1970 +0000
1101 | | date: Thu Jan 01 00:00:00 1970 +0000
1102 | | instability: orphan, divergent
1102 | | instability: orphan, content-divergent
1103 | | summary: Add A,B,C
1103 | | summary: Add A,B,C
1104 | |
1104 | |
1105 | x changeset: 6:4a004186e638
1105 | x changeset: 6:4a004186e638
1106 |/ user: test
1106 |/ user: test
1107 | date: Thu Jan 01 00:00:00 1970 +0000
1107 | date: Thu Jan 01 00:00:00 1970 +0000
1108 | summary: Add A,B,C
1108 | summary: Add A,B,C
1109 |
1109 |
1110 o changeset: 5:dd800401bd8c
1110 o changeset: 5:dd800401bd8c
1111 | parent: 3:f897c6137566
1111 | parent: 3:f897c6137566
1112 | user: test
1112 | user: test
1113 | date: Thu Jan 01 00:00:00 1970 +0000
1113 | date: Thu Jan 01 00:00:00 1970 +0000
1114 | instability: divergent
1114 | instability: content-divergent
1115 | summary: Add A,B,C
1115 | summary: Add A,B,C
1116 |
1116 |
1117 o changeset: 3:f897c6137566
1117 o changeset: 3:f897c6137566
1118 | parent: 0:ea207398892e
1118 | parent: 0:ea207398892e
1119 | user: test
1119 | user: test
1120 | date: Thu Jan 01 00:00:00 1970 +0000
1120 | date: Thu Jan 01 00:00:00 1970 +0000
1121 | summary: C0
1121 | summary: C0
1122 |
1122 |
1123 o changeset: 0:ea207398892e
1123 o changeset: 0:ea207398892e
1124 user: test
1124 user: test
1125 date: Thu Jan 01 00:00:00 1970 +0000
1125 date: Thu Jan 01 00:00:00 1970 +0000
1126 summary: ROOT
1126 summary: ROOT
1127
1127
1128
1128
1129 Check templates
1129 Check templates
1130 ---------------
1130 ---------------
1131
1131
1132 $ hg tlog
1132 $ hg tlog
1133 @ 0b997eb7ceee
1133 @ 0b997eb7ceee
1134 | Predecessors: 6:4a004186e638
1134 | Predecessors: 6:4a004186e638
1135 | semi-colon: 6:4a004186e638
1135 | semi-colon: 6:4a004186e638
1136 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1136 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1137 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1137 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1138 | o b18bc8331526
1138 | o b18bc8331526
1139 |/ Predecessors: 6:4a004186e638
1139 |/ Predecessors: 6:4a004186e638
1140 | semi-colon: 6:4a004186e638
1140 | semi-colon: 6:4a004186e638
1141 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1141 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1142 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1142 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1143 | o ba2ed02b0c9a
1143 | o ba2ed02b0c9a
1144 | |
1144 | |
1145 | x 4a004186e638
1145 | x 4a004186e638
1146 |/ Successors: 8:b18bc8331526; 9:0b997eb7ceee
1146 |/ Successors: 8:b18bc8331526; 9:0b997eb7ceee
1147 | multi-line: 8:b18bc8331526
1147 | multi-line: 8:b18bc8331526
1148 | multi-line: 9:0b997eb7ceee
1148 | multi-line: 9:0b997eb7ceee
1149 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1149 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1150 o dd800401bd8c
1150 o dd800401bd8c
1151 |
1151 |
1152 o f897c6137566
1152 o f897c6137566
1153 |
1153 |
1154 o ea207398892e
1154 o ea207398892e
1155
1155
1156 $ hg tlog --hidden
1156 $ hg tlog --hidden
1157 @ 0b997eb7ceee
1157 @ 0b997eb7ceee
1158 | Predecessors: 6:4a004186e638
1158 | Predecessors: 6:4a004186e638
1159 | semi-colon: 6:4a004186e638
1159 | semi-colon: 6:4a004186e638
1160 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1160 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1161 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1161 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1162 | o b18bc8331526
1162 | o b18bc8331526
1163 |/ Predecessors: 6:4a004186e638
1163 |/ Predecessors: 6:4a004186e638
1164 | semi-colon: 6:4a004186e638
1164 | semi-colon: 6:4a004186e638
1165 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1165 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1166 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1166 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1167 | o ba2ed02b0c9a
1167 | o ba2ed02b0c9a
1168 | | Predecessors: 4:9bd10a0775e4
1168 | | Predecessors: 4:9bd10a0775e4
1169 | | semi-colon: 4:9bd10a0775e4
1169 | | semi-colon: 4:9bd10a0775e4
1170 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1170 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1171 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1171 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1172 | x 4a004186e638
1172 | x 4a004186e638
1173 |/ Predecessors: 4:9bd10a0775e4
1173 |/ Predecessors: 4:9bd10a0775e4
1174 | semi-colon: 4:9bd10a0775e4
1174 | semi-colon: 4:9bd10a0775e4
1175 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1175 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1176 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1176 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1177 | Successors: 8:b18bc8331526; 9:0b997eb7ceee
1177 | Successors: 8:b18bc8331526; 9:0b997eb7ceee
1178 | multi-line: 8:b18bc8331526
1178 | multi-line: 8:b18bc8331526
1179 | multi-line: 9:0b997eb7ceee
1179 | multi-line: 9:0b997eb7ceee
1180 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1180 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1181 o dd800401bd8c
1181 o dd800401bd8c
1182 | Predecessors: 4:9bd10a0775e4
1182 | Predecessors: 4:9bd10a0775e4
1183 | semi-colon: 4:9bd10a0775e4
1183 | semi-colon: 4:9bd10a0775e4
1184 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1184 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1185 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1185 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1186 | x 9bd10a0775e4
1186 | x 9bd10a0775e4
1187 |/ Successors: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1187 |/ Successors: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1188 | multi-line: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1188 | multi-line: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1189 | json: [["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]]
1189 | json: [["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]]
1190 o f897c6137566
1190 o f897c6137566
1191 | Predecessors: 2:0dec01379d3b
1191 | Predecessors: 2:0dec01379d3b
1192 | semi-colon: 2:0dec01379d3b
1192 | semi-colon: 2:0dec01379d3b
1193 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1193 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1194 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1194 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1195 | x 0dec01379d3b
1195 | x 0dec01379d3b
1196 | | Predecessors: 1:471f378eab4c
1196 | | Predecessors: 1:471f378eab4c
1197 | | semi-colon: 1:471f378eab4c
1197 | | semi-colon: 1:471f378eab4c
1198 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
1198 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
1199 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
1199 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
1200 | | Successors: 3:f897c6137566; 1:471f378eab4c
1200 | | Successors: 3:f897c6137566; 1:471f378eab4c
1201 | | multi-line: 3:f897c6137566
1201 | | multi-line: 3:f897c6137566
1202 | | multi-line: 1:471f378eab4c
1202 | | multi-line: 1:471f378eab4c
1203 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
1203 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
1204 | x 471f378eab4c
1204 | x 471f378eab4c
1205 |/ Predecessors: 2:0dec01379d3b
1205 |/ Predecessors: 2:0dec01379d3b
1206 | semi-colon: 2:0dec01379d3b
1206 | semi-colon: 2:0dec01379d3b
1207 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1207 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1208 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1208 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1209 | Successors: 2:0dec01379d3b
1209 | Successors: 2:0dec01379d3b
1210 | multi-line: 2:0dec01379d3b
1210 | multi-line: 2:0dec01379d3b
1211 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
1211 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
1212 o ea207398892e
1212 o ea207398892e
1213
1213
1214 $ hg up --hidden 4
1214 $ hg up --hidden 4
1215 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1215 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1216 $ hg rebase -r 7 -d 8 --config extensions.rebase=
1216 $ hg rebase -r 7 -d 8 --config extensions.rebase=
1217 rebasing 7:ba2ed02b0c9a "Add A,B,C"
1217 rebasing 7:ba2ed02b0c9a "Add A,B,C"
1218 $ hg tlog
1218 $ hg tlog
1219 o eceed8f98ffc
1219 o eceed8f98ffc
1220 | Predecessors: 4:9bd10a0775e4
1220 | Predecessors: 4:9bd10a0775e4
1221 | semi-colon: 4:9bd10a0775e4
1221 | semi-colon: 4:9bd10a0775e4
1222 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1222 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1223 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1223 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1224 | o 0b997eb7ceee
1224 | o 0b997eb7ceee
1225 | | Predecessors: 4:9bd10a0775e4
1225 | | Predecessors: 4:9bd10a0775e4
1226 | | semi-colon: 4:9bd10a0775e4
1226 | | semi-colon: 4:9bd10a0775e4
1227 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1227 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1228 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1228 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1229 o | b18bc8331526
1229 o | b18bc8331526
1230 |/ Predecessors: 4:9bd10a0775e4
1230 |/ Predecessors: 4:9bd10a0775e4
1231 | semi-colon: 4:9bd10a0775e4
1231 | semi-colon: 4:9bd10a0775e4
1232 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1232 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1233 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1233 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1234 o dd800401bd8c
1234 o dd800401bd8c
1235 | Predecessors: 4:9bd10a0775e4
1235 | Predecessors: 4:9bd10a0775e4
1236 | semi-colon: 4:9bd10a0775e4
1236 | semi-colon: 4:9bd10a0775e4
1237 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1237 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1238 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1238 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1239 | @ 9bd10a0775e4
1239 | @ 9bd10a0775e4
1240 |/ Successors: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc; 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1240 |/ Successors: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc; 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1241 | multi-line: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc
1241 | multi-line: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc
1242 | multi-line: 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1242 | multi-line: 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1243 | json: [["dd800401bd8c79d815329277739e433e883f784e", "0b997eb7ceeee06200a02f8aab185979092d514e", "eceed8f98ffc4186032e29a6542ab98888ebf68d"], ["dd800401bd8c79d815329277739e433e883f784e", "b18bc8331526a22cbb1801022bd1555bf291c48b", "eceed8f98ffc4186032e29a6542ab98888ebf68d"]]
1243 | json: [["dd800401bd8c79d815329277739e433e883f784e", "0b997eb7ceeee06200a02f8aab185979092d514e", "eceed8f98ffc4186032e29a6542ab98888ebf68d"], ["dd800401bd8c79d815329277739e433e883f784e", "b18bc8331526a22cbb1801022bd1555bf291c48b", "eceed8f98ffc4186032e29a6542ab98888ebf68d"]]
1244 o f897c6137566
1244 o f897c6137566
1245 |
1245 |
1246 o ea207398892e
1246 o ea207398892e
1247
1247
1248 Test templates with pruned commits
1248 Test templates with pruned commits
1249 ==================================
1249 ==================================
1250
1250
1251 Test setup
1251 Test setup
1252 ----------
1252 ----------
1253
1253
1254 $ hg init $TESTTMP/templates-local-prune
1254 $ hg init $TESTTMP/templates-local-prune
1255 $ cd $TESTTMP/templates-local-prune
1255 $ cd $TESTTMP/templates-local-prune
1256 $ mkcommit ROOT
1256 $ mkcommit ROOT
1257 $ mkcommit A0
1257 $ mkcommit A0
1258 $ hg debugobsolete --record-parent `getid "."`
1258 $ hg debugobsolete --record-parent `getid "."`
1259 obsoleted 1 changesets
1259 obsoleted 1 changesets
1260
1260
1261 Check output
1261 Check output
1262 ------------
1262 ------------
1263
1263
1264 $ hg up "desc(A0)" --hidden
1264 $ hg up "desc(A0)" --hidden
1265 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1265 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1266 $ hg tlog
1266 $ hg tlog
1267 @ 471f378eab4c
1267 @ 471f378eab4c
1268 |
1268 |
1269 o ea207398892e
1269 o ea207398892e
1270
1270
@@ -1,685 +1,685 b''
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complex troubles from far so we isolate it in a dedicated
3 This is the most complex troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
7
7
8 $ cat >> $HGRCPATH << EOF
8 $ cat >> $HGRCPATH << EOF
9 > [ui]
9 > [ui]
10 > logtemplate = {rev}:{node|short} {desc}\n
10 > logtemplate = {rev}:{node|short} {desc}\n
11 > [experimental]
11 > [experimental]
12 > evolution=createmarkers
12 > evolution=createmarkers
13 > [extensions]
13 > [extensions]
14 > drawdag=$TESTDIR/drawdag.py
14 > drawdag=$TESTDIR/drawdag.py
15 > [alias]
15 > [alias]
16 > debugobsolete = debugobsolete -d '0 0'
16 > debugobsolete = debugobsolete -d '0 0'
17 > [phases]
17 > [phases]
18 > publish=False
18 > publish=False
19 > EOF
19 > EOF
20
20
21
21
22 $ mkcommit() {
22 $ mkcommit() {
23 > echo "$1" > "$1"
23 > echo "$1" > "$1"
24 > hg add "$1"
24 > hg add "$1"
25 > hg ci -m "$1"
25 > hg ci -m "$1"
26 > }
26 > }
27 $ getid() {
27 $ getid() {
28 > hg log --hidden -r "desc('$1')" -T '{node}\n'
28 > hg log --hidden -r "desc('$1')" -T '{node}\n'
29 > }
29 > }
30
30
31 setup repo
31 setup repo
32
32
33 $ hg init reference
33 $ hg init reference
34 $ cd reference
34 $ cd reference
35 $ mkcommit base
35 $ mkcommit base
36 $ mkcommit A_0
36 $ mkcommit A_0
37 $ hg up 0
37 $ hg up 0
38 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
38 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
39 $ mkcommit A_1
39 $ mkcommit A_1
40 created new head
40 created new head
41 $ hg up 0
41 $ hg up 0
42 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
42 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
43 $ mkcommit A_2
43 $ mkcommit A_2
44 created new head
44 created new head
45 $ hg up 0
45 $ hg up 0
46 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
46 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 $ cd ..
47 $ cd ..
48
48
49
49
50 $ newcase() {
50 $ newcase() {
51 > hg clone -u 0 -q reference $1
51 > hg clone -u 0 -q reference $1
52 > cd $1
52 > cd $1
53 > }
53 > }
54
54
55 direct divergence
55 direct divergence
56 -----------------
56 -----------------
57
57
58 A_1 have two direct and divergent successors A_1 and A_1
58 A_1 have two direct and divergent successors A_1 and A_1
59
59
60 $ newcase direct
60 $ newcase direct
61 $ hg debugobsolete `getid A_0` `getid A_1`
61 $ hg debugobsolete `getid A_0` `getid A_1`
62 obsoleted 1 changesets
62 obsoleted 1 changesets
63 $ hg debugobsolete `getid A_0` `getid A_2`
63 $ hg debugobsolete `getid A_0` `getid A_2`
64 $ hg log -G --hidden
64 $ hg log -G --hidden
65 o 3:392fd25390da A_2
65 o 3:392fd25390da A_2
66 |
66 |
67 | o 2:82623d38b9ba A_1
67 | o 2:82623d38b9ba A_1
68 |/
68 |/
69 | x 1:007dc284c1f8 A_0
69 | x 1:007dc284c1f8 A_0
70 |/
70 |/
71 @ 0:d20a80d4def3 base
71 @ 0:d20a80d4def3 base
72
72
73 $ hg debugsuccessorssets --hidden 'all()'
73 $ hg debugsuccessorssets --hidden 'all()'
74 d20a80d4def3
74 d20a80d4def3
75 d20a80d4def3
75 d20a80d4def3
76 007dc284c1f8
76 007dc284c1f8
77 82623d38b9ba
77 82623d38b9ba
78 392fd25390da
78 392fd25390da
79 82623d38b9ba
79 82623d38b9ba
80 82623d38b9ba
80 82623d38b9ba
81 392fd25390da
81 392fd25390da
82 392fd25390da
82 392fd25390da
83 $ hg log -r 'divergent()'
83 $ hg log -r 'divergent()'
84 2:82623d38b9ba A_1
84 2:82623d38b9ba A_1
85 3:392fd25390da A_2
85 3:392fd25390da A_2
86 $ hg debugsuccessorssets 'all()' --closest
86 $ hg debugsuccessorssets 'all()' --closest
87 d20a80d4def3
87 d20a80d4def3
88 d20a80d4def3
88 d20a80d4def3
89 82623d38b9ba
89 82623d38b9ba
90 82623d38b9ba
90 82623d38b9ba
91 392fd25390da
91 392fd25390da
92 392fd25390da
92 392fd25390da
93 $ hg debugsuccessorssets 'all()' --closest --hidden
93 $ hg debugsuccessorssets 'all()' --closest --hidden
94 d20a80d4def3
94 d20a80d4def3
95 d20a80d4def3
95 d20a80d4def3
96 007dc284c1f8
96 007dc284c1f8
97 82623d38b9ba
97 82623d38b9ba
98 392fd25390da
98 392fd25390da
99 82623d38b9ba
99 82623d38b9ba
100 82623d38b9ba
100 82623d38b9ba
101 392fd25390da
101 392fd25390da
102 392fd25390da
102 392fd25390da
103
103
104 check that mercurial refuse to push
104 check that mercurial refuse to push
105
105
106 $ hg init ../other
106 $ hg init ../other
107 $ hg push ../other
107 $ hg push ../other
108 pushing to ../other
108 pushing to ../other
109 searching for changes
109 searching for changes
110 abort: push includes divergent changeset: 392fd25390da!
110 abort: push includes content-divergent changeset: 392fd25390da!
111 [255]
111 [255]
112
112
113 $ cd ..
113 $ cd ..
114
114
115
115
116 indirect divergence with known changeset
116 indirect divergence with known changeset
117 -------------------------------------------
117 -------------------------------------------
118
118
119 $ newcase indirect_known
119 $ newcase indirect_known
120 $ hg debugobsolete `getid A_0` `getid A_1`
120 $ hg debugobsolete `getid A_0` `getid A_1`
121 obsoleted 1 changesets
121 obsoleted 1 changesets
122 $ hg debugobsolete `getid A_0` `getid A_2`
122 $ hg debugobsolete `getid A_0` `getid A_2`
123 $ mkcommit A_3
123 $ mkcommit A_3
124 created new head
124 created new head
125 $ hg debugobsolete `getid A_2` `getid A_3`
125 $ hg debugobsolete `getid A_2` `getid A_3`
126 obsoleted 1 changesets
126 obsoleted 1 changesets
127 $ hg log -G --hidden
127 $ hg log -G --hidden
128 @ 4:01f36c5a8fda A_3
128 @ 4:01f36c5a8fda A_3
129 |
129 |
130 | x 3:392fd25390da A_2
130 | x 3:392fd25390da A_2
131 |/
131 |/
132 | o 2:82623d38b9ba A_1
132 | o 2:82623d38b9ba A_1
133 |/
133 |/
134 | x 1:007dc284c1f8 A_0
134 | x 1:007dc284c1f8 A_0
135 |/
135 |/
136 o 0:d20a80d4def3 base
136 o 0:d20a80d4def3 base
137
137
138 $ hg debugsuccessorssets --hidden 'all()'
138 $ hg debugsuccessorssets --hidden 'all()'
139 d20a80d4def3
139 d20a80d4def3
140 d20a80d4def3
140 d20a80d4def3
141 007dc284c1f8
141 007dc284c1f8
142 82623d38b9ba
142 82623d38b9ba
143 01f36c5a8fda
143 01f36c5a8fda
144 82623d38b9ba
144 82623d38b9ba
145 82623d38b9ba
145 82623d38b9ba
146 392fd25390da
146 392fd25390da
147 01f36c5a8fda
147 01f36c5a8fda
148 01f36c5a8fda
148 01f36c5a8fda
149 01f36c5a8fda
149 01f36c5a8fda
150 $ hg log -r 'divergent()'
150 $ hg log -r 'divergent()'
151 2:82623d38b9ba A_1
151 2:82623d38b9ba A_1
152 4:01f36c5a8fda A_3
152 4:01f36c5a8fda A_3
153 $ hg debugsuccessorssets 'all()' --closest
153 $ hg debugsuccessorssets 'all()' --closest
154 d20a80d4def3
154 d20a80d4def3
155 d20a80d4def3
155 d20a80d4def3
156 82623d38b9ba
156 82623d38b9ba
157 82623d38b9ba
157 82623d38b9ba
158 01f36c5a8fda
158 01f36c5a8fda
159 01f36c5a8fda
159 01f36c5a8fda
160 $ hg debugsuccessorssets 'all()' --closest --hidden
160 $ hg debugsuccessorssets 'all()' --closest --hidden
161 d20a80d4def3
161 d20a80d4def3
162 d20a80d4def3
162 d20a80d4def3
163 007dc284c1f8
163 007dc284c1f8
164 82623d38b9ba
164 82623d38b9ba
165 392fd25390da
165 392fd25390da
166 82623d38b9ba
166 82623d38b9ba
167 82623d38b9ba
167 82623d38b9ba
168 392fd25390da
168 392fd25390da
169 392fd25390da
169 392fd25390da
170 01f36c5a8fda
170 01f36c5a8fda
171 01f36c5a8fda
171 01f36c5a8fda
172 $ cd ..
172 $ cd ..
173
173
174
174
175 indirect divergence with known changeset
175 indirect divergence with known changeset
176 -------------------------------------------
176 -------------------------------------------
177
177
178 $ newcase indirect_unknown
178 $ newcase indirect_unknown
179 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
179 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
180 obsoleted 1 changesets
180 obsoleted 1 changesets
181 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
181 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
182 $ hg debugobsolete `getid A_0` `getid A_2`
182 $ hg debugobsolete `getid A_0` `getid A_2`
183 $ hg log -G --hidden
183 $ hg log -G --hidden
184 o 3:392fd25390da A_2
184 o 3:392fd25390da A_2
185 |
185 |
186 | o 2:82623d38b9ba A_1
186 | o 2:82623d38b9ba A_1
187 |/
187 |/
188 | x 1:007dc284c1f8 A_0
188 | x 1:007dc284c1f8 A_0
189 |/
189 |/
190 @ 0:d20a80d4def3 base
190 @ 0:d20a80d4def3 base
191
191
192 $ hg debugsuccessorssets --hidden 'all()'
192 $ hg debugsuccessorssets --hidden 'all()'
193 d20a80d4def3
193 d20a80d4def3
194 d20a80d4def3
194 d20a80d4def3
195 007dc284c1f8
195 007dc284c1f8
196 82623d38b9ba
196 82623d38b9ba
197 392fd25390da
197 392fd25390da
198 82623d38b9ba
198 82623d38b9ba
199 82623d38b9ba
199 82623d38b9ba
200 392fd25390da
200 392fd25390da
201 392fd25390da
201 392fd25390da
202 $ hg log -r 'divergent()'
202 $ hg log -r 'divergent()'
203 2:82623d38b9ba A_1
203 2:82623d38b9ba A_1
204 3:392fd25390da A_2
204 3:392fd25390da A_2
205 $ hg debugsuccessorssets 'all()' --closest
205 $ hg debugsuccessorssets 'all()' --closest
206 d20a80d4def3
206 d20a80d4def3
207 d20a80d4def3
207 d20a80d4def3
208 82623d38b9ba
208 82623d38b9ba
209 82623d38b9ba
209 82623d38b9ba
210 392fd25390da
210 392fd25390da
211 392fd25390da
211 392fd25390da
212 $ hg debugsuccessorssets 'all()' --closest --hidden
212 $ hg debugsuccessorssets 'all()' --closest --hidden
213 d20a80d4def3
213 d20a80d4def3
214 d20a80d4def3
214 d20a80d4def3
215 007dc284c1f8
215 007dc284c1f8
216 82623d38b9ba
216 82623d38b9ba
217 392fd25390da
217 392fd25390da
218 82623d38b9ba
218 82623d38b9ba
219 82623d38b9ba
219 82623d38b9ba
220 392fd25390da
220 392fd25390da
221 392fd25390da
221 392fd25390da
222 $ cd ..
222 $ cd ..
223
223
224 do not take unknown node in account if they are final
224 do not take unknown node in account if they are final
225 -----------------------------------------------------
225 -----------------------------------------------------
226
226
227 $ newcase final-unknown
227 $ newcase final-unknown
228 $ hg debugobsolete `getid A_0` `getid A_1`
228 $ hg debugobsolete `getid A_0` `getid A_1`
229 obsoleted 1 changesets
229 obsoleted 1 changesets
230 $ hg debugobsolete `getid A_1` `getid A_2`
230 $ hg debugobsolete `getid A_1` `getid A_2`
231 obsoleted 1 changesets
231 obsoleted 1 changesets
232 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
232 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
233 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
233 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
234 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
234 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
235
235
236 $ hg debugsuccessorssets --hidden 'desc('A_0')'
236 $ hg debugsuccessorssets --hidden 'desc('A_0')'
237 007dc284c1f8
237 007dc284c1f8
238 392fd25390da
238 392fd25390da
239 $ hg debugsuccessorssets 'desc('A_0')' --closest
239 $ hg debugsuccessorssets 'desc('A_0')' --closest
240 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
240 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
241 007dc284c1f8
241 007dc284c1f8
242 82623d38b9ba
242 82623d38b9ba
243
243
244 $ cd ..
244 $ cd ..
245
245
246 divergence that converge again is not divergence anymore
246 divergence that converge again is not divergence anymore
247 -----------------------------------------------------
247 -----------------------------------------------------
248
248
249 $ newcase converged_divergence
249 $ newcase converged_divergence
250 $ hg debugobsolete `getid A_0` `getid A_1`
250 $ hg debugobsolete `getid A_0` `getid A_1`
251 obsoleted 1 changesets
251 obsoleted 1 changesets
252 $ hg debugobsolete `getid A_0` `getid A_2`
252 $ hg debugobsolete `getid A_0` `getid A_2`
253 $ mkcommit A_3
253 $ mkcommit A_3
254 created new head
254 created new head
255 $ hg debugobsolete `getid A_1` `getid A_3`
255 $ hg debugobsolete `getid A_1` `getid A_3`
256 obsoleted 1 changesets
256 obsoleted 1 changesets
257 $ hg debugobsolete `getid A_2` `getid A_3`
257 $ hg debugobsolete `getid A_2` `getid A_3`
258 obsoleted 1 changesets
258 obsoleted 1 changesets
259 $ hg log -G --hidden
259 $ hg log -G --hidden
260 @ 4:01f36c5a8fda A_3
260 @ 4:01f36c5a8fda A_3
261 |
261 |
262 | x 3:392fd25390da A_2
262 | x 3:392fd25390da A_2
263 |/
263 |/
264 | x 2:82623d38b9ba A_1
264 | x 2:82623d38b9ba A_1
265 |/
265 |/
266 | x 1:007dc284c1f8 A_0
266 | x 1:007dc284c1f8 A_0
267 |/
267 |/
268 o 0:d20a80d4def3 base
268 o 0:d20a80d4def3 base
269
269
270 $ hg debugsuccessorssets --hidden 'all()'
270 $ hg debugsuccessorssets --hidden 'all()'
271 d20a80d4def3
271 d20a80d4def3
272 d20a80d4def3
272 d20a80d4def3
273 007dc284c1f8
273 007dc284c1f8
274 01f36c5a8fda
274 01f36c5a8fda
275 82623d38b9ba
275 82623d38b9ba
276 01f36c5a8fda
276 01f36c5a8fda
277 392fd25390da
277 392fd25390da
278 01f36c5a8fda
278 01f36c5a8fda
279 01f36c5a8fda
279 01f36c5a8fda
280 01f36c5a8fda
280 01f36c5a8fda
281 $ hg log -r 'divergent()'
281 $ hg log -r 'divergent()'
282 $ hg debugsuccessorssets 'all()' --closest
282 $ hg debugsuccessorssets 'all()' --closest
283 d20a80d4def3
283 d20a80d4def3
284 d20a80d4def3
284 d20a80d4def3
285 01f36c5a8fda
285 01f36c5a8fda
286 01f36c5a8fda
286 01f36c5a8fda
287 $ hg debugsuccessorssets 'all()' --closest --hidden
287 $ hg debugsuccessorssets 'all()' --closest --hidden
288 d20a80d4def3
288 d20a80d4def3
289 d20a80d4def3
289 d20a80d4def3
290 007dc284c1f8
290 007dc284c1f8
291 82623d38b9ba
291 82623d38b9ba
292 392fd25390da
292 392fd25390da
293 82623d38b9ba
293 82623d38b9ba
294 82623d38b9ba
294 82623d38b9ba
295 392fd25390da
295 392fd25390da
296 392fd25390da
296 392fd25390da
297 01f36c5a8fda
297 01f36c5a8fda
298 01f36c5a8fda
298 01f36c5a8fda
299 $ cd ..
299 $ cd ..
300
300
301 split is not divergences
301 split is not divergences
302 -----------------------------
302 -----------------------------
303
303
304 $ newcase split
304 $ newcase split
305 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
305 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
306 obsoleted 1 changesets
306 obsoleted 1 changesets
307 $ hg log -G --hidden
307 $ hg log -G --hidden
308 o 3:392fd25390da A_2
308 o 3:392fd25390da A_2
309 |
309 |
310 | o 2:82623d38b9ba A_1
310 | o 2:82623d38b9ba A_1
311 |/
311 |/
312 | x 1:007dc284c1f8 A_0
312 | x 1:007dc284c1f8 A_0
313 |/
313 |/
314 @ 0:d20a80d4def3 base
314 @ 0:d20a80d4def3 base
315
315
316 $ hg debugsuccessorssets --hidden 'all()'
316 $ hg debugsuccessorssets --hidden 'all()'
317 d20a80d4def3
317 d20a80d4def3
318 d20a80d4def3
318 d20a80d4def3
319 007dc284c1f8
319 007dc284c1f8
320 82623d38b9ba 392fd25390da
320 82623d38b9ba 392fd25390da
321 82623d38b9ba
321 82623d38b9ba
322 82623d38b9ba
322 82623d38b9ba
323 392fd25390da
323 392fd25390da
324 392fd25390da
324 392fd25390da
325 $ hg log -r 'divergent()'
325 $ hg log -r 'divergent()'
326 $ hg debugsuccessorssets 'all()' --closest
326 $ hg debugsuccessorssets 'all()' --closest
327 d20a80d4def3
327 d20a80d4def3
328 d20a80d4def3
328 d20a80d4def3
329 82623d38b9ba
329 82623d38b9ba
330 82623d38b9ba
330 82623d38b9ba
331 392fd25390da
331 392fd25390da
332 392fd25390da
332 392fd25390da
333 $ hg debugsuccessorssets 'all()' --closest --hidden
333 $ hg debugsuccessorssets 'all()' --closest --hidden
334 d20a80d4def3
334 d20a80d4def3
335 d20a80d4def3
335 d20a80d4def3
336 007dc284c1f8
336 007dc284c1f8
337 82623d38b9ba 392fd25390da
337 82623d38b9ba 392fd25390da
338 82623d38b9ba
338 82623d38b9ba
339 82623d38b9ba
339 82623d38b9ba
340 392fd25390da
340 392fd25390da
341 392fd25390da
341 392fd25390da
342
342
343 Even when subsequent rewriting happen
343 Even when subsequent rewriting happen
344
344
345 $ mkcommit A_3
345 $ mkcommit A_3
346 created new head
346 created new head
347 $ hg debugobsolete `getid A_1` `getid A_3`
347 $ hg debugobsolete `getid A_1` `getid A_3`
348 obsoleted 1 changesets
348 obsoleted 1 changesets
349 $ hg up 0
349 $ hg up 0
350 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
350 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
351 $ mkcommit A_4
351 $ mkcommit A_4
352 created new head
352 created new head
353 $ hg debugobsolete `getid A_2` `getid A_4`
353 $ hg debugobsolete `getid A_2` `getid A_4`
354 obsoleted 1 changesets
354 obsoleted 1 changesets
355 $ hg up 0
355 $ hg up 0
356 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
356 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
357 $ mkcommit A_5
357 $ mkcommit A_5
358 created new head
358 created new head
359 $ hg debugobsolete `getid A_4` `getid A_5`
359 $ hg debugobsolete `getid A_4` `getid A_5`
360 obsoleted 1 changesets
360 obsoleted 1 changesets
361 $ hg log -G --hidden
361 $ hg log -G --hidden
362 @ 6:e442cfc57690 A_5
362 @ 6:e442cfc57690 A_5
363 |
363 |
364 | x 5:6a411f0d7a0a A_4
364 | x 5:6a411f0d7a0a A_4
365 |/
365 |/
366 | o 4:01f36c5a8fda A_3
366 | o 4:01f36c5a8fda A_3
367 |/
367 |/
368 | x 3:392fd25390da A_2
368 | x 3:392fd25390da A_2
369 |/
369 |/
370 | x 2:82623d38b9ba A_1
370 | x 2:82623d38b9ba A_1
371 |/
371 |/
372 | x 1:007dc284c1f8 A_0
372 | x 1:007dc284c1f8 A_0
373 |/
373 |/
374 o 0:d20a80d4def3 base
374 o 0:d20a80d4def3 base
375
375
376 $ hg debugsuccessorssets --hidden 'all()'
376 $ hg debugsuccessorssets --hidden 'all()'
377 d20a80d4def3
377 d20a80d4def3
378 d20a80d4def3
378 d20a80d4def3
379 007dc284c1f8
379 007dc284c1f8
380 01f36c5a8fda e442cfc57690
380 01f36c5a8fda e442cfc57690
381 82623d38b9ba
381 82623d38b9ba
382 01f36c5a8fda
382 01f36c5a8fda
383 392fd25390da
383 392fd25390da
384 e442cfc57690
384 e442cfc57690
385 01f36c5a8fda
385 01f36c5a8fda
386 01f36c5a8fda
386 01f36c5a8fda
387 6a411f0d7a0a
387 6a411f0d7a0a
388 e442cfc57690
388 e442cfc57690
389 e442cfc57690
389 e442cfc57690
390 e442cfc57690
390 e442cfc57690
391 $ hg debugsuccessorssets 'all()' --closest
391 $ hg debugsuccessorssets 'all()' --closest
392 d20a80d4def3
392 d20a80d4def3
393 d20a80d4def3
393 d20a80d4def3
394 01f36c5a8fda
394 01f36c5a8fda
395 01f36c5a8fda
395 01f36c5a8fda
396 e442cfc57690
396 e442cfc57690
397 e442cfc57690
397 e442cfc57690
398 $ hg debugsuccessorssets 'all()' --closest --hidden
398 $ hg debugsuccessorssets 'all()' --closest --hidden
399 d20a80d4def3
399 d20a80d4def3
400 d20a80d4def3
400 d20a80d4def3
401 007dc284c1f8
401 007dc284c1f8
402 82623d38b9ba 392fd25390da
402 82623d38b9ba 392fd25390da
403 82623d38b9ba
403 82623d38b9ba
404 82623d38b9ba
404 82623d38b9ba
405 392fd25390da
405 392fd25390da
406 392fd25390da
406 392fd25390da
407 01f36c5a8fda
407 01f36c5a8fda
408 01f36c5a8fda
408 01f36c5a8fda
409 6a411f0d7a0a
409 6a411f0d7a0a
410 e442cfc57690
410 e442cfc57690
411 e442cfc57690
411 e442cfc57690
412 e442cfc57690
412 e442cfc57690
413 $ hg log -r 'divergent()'
413 $ hg log -r 'divergent()'
414
414
415 Check more complex obsolescence graft (with divergence)
415 Check more complex obsolescence graft (with divergence)
416
416
417 $ mkcommit B_0; hg up 0
417 $ mkcommit B_0; hg up 0
418 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
418 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
419 $ hg debugobsolete `getid B_0` `getid A_2`
419 $ hg debugobsolete `getid B_0` `getid A_2`
420 obsoleted 1 changesets
420 obsoleted 1 changesets
421 $ mkcommit A_7; hg up 0
421 $ mkcommit A_7; hg up 0
422 created new head
422 created new head
423 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
423 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
424 $ mkcommit A_8; hg up 0
424 $ mkcommit A_8; hg up 0
425 created new head
425 created new head
426 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
426 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
427 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
427 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
428 obsoleted 1 changesets
428 obsoleted 1 changesets
429 $ mkcommit A_9; hg up 0
429 $ mkcommit A_9; hg up 0
430 created new head
430 created new head
431 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
431 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
432 $ hg debugobsolete `getid A_5` `getid A_9`
432 $ hg debugobsolete `getid A_5` `getid A_9`
433 $ hg log -G --hidden
433 $ hg log -G --hidden
434 o 10:bed64f5d2f5a A_9
434 o 10:bed64f5d2f5a A_9
435 |
435 |
436 | o 9:14608b260df8 A_8
436 | o 9:14608b260df8 A_8
437 |/
437 |/
438 | o 8:7ae126973a96 A_7
438 | o 8:7ae126973a96 A_7
439 |/
439 |/
440 | x 7:3750ebee865d B_0
440 | x 7:3750ebee865d B_0
441 | |
441 | |
442 | x 6:e442cfc57690 A_5
442 | x 6:e442cfc57690 A_5
443 |/
443 |/
444 | x 5:6a411f0d7a0a A_4
444 | x 5:6a411f0d7a0a A_4
445 |/
445 |/
446 | o 4:01f36c5a8fda A_3
446 | o 4:01f36c5a8fda A_3
447 |/
447 |/
448 | x 3:392fd25390da A_2
448 | x 3:392fd25390da A_2
449 |/
449 |/
450 | x 2:82623d38b9ba A_1
450 | x 2:82623d38b9ba A_1
451 |/
451 |/
452 | x 1:007dc284c1f8 A_0
452 | x 1:007dc284c1f8 A_0
453 |/
453 |/
454 @ 0:d20a80d4def3 base
454 @ 0:d20a80d4def3 base
455
455
456 $ hg debugsuccessorssets --hidden 'all()'
456 $ hg debugsuccessorssets --hidden 'all()'
457 d20a80d4def3
457 d20a80d4def3
458 d20a80d4def3
458 d20a80d4def3
459 007dc284c1f8
459 007dc284c1f8
460 01f36c5a8fda bed64f5d2f5a
460 01f36c5a8fda bed64f5d2f5a
461 01f36c5a8fda 7ae126973a96 14608b260df8
461 01f36c5a8fda 7ae126973a96 14608b260df8
462 82623d38b9ba
462 82623d38b9ba
463 01f36c5a8fda
463 01f36c5a8fda
464 392fd25390da
464 392fd25390da
465 bed64f5d2f5a
465 bed64f5d2f5a
466 7ae126973a96 14608b260df8
466 7ae126973a96 14608b260df8
467 01f36c5a8fda
467 01f36c5a8fda
468 01f36c5a8fda
468 01f36c5a8fda
469 6a411f0d7a0a
469 6a411f0d7a0a
470 bed64f5d2f5a
470 bed64f5d2f5a
471 7ae126973a96 14608b260df8
471 7ae126973a96 14608b260df8
472 e442cfc57690
472 e442cfc57690
473 bed64f5d2f5a
473 bed64f5d2f5a
474 7ae126973a96 14608b260df8
474 7ae126973a96 14608b260df8
475 3750ebee865d
475 3750ebee865d
476 bed64f5d2f5a
476 bed64f5d2f5a
477 7ae126973a96 14608b260df8
477 7ae126973a96 14608b260df8
478 7ae126973a96
478 7ae126973a96
479 7ae126973a96
479 7ae126973a96
480 14608b260df8
480 14608b260df8
481 14608b260df8
481 14608b260df8
482 bed64f5d2f5a
482 bed64f5d2f5a
483 bed64f5d2f5a
483 bed64f5d2f5a
484 $ hg debugsuccessorssets 'all()' --closest
484 $ hg debugsuccessorssets 'all()' --closest
485 d20a80d4def3
485 d20a80d4def3
486 d20a80d4def3
486 d20a80d4def3
487 01f36c5a8fda
487 01f36c5a8fda
488 01f36c5a8fda
488 01f36c5a8fda
489 7ae126973a96
489 7ae126973a96
490 7ae126973a96
490 7ae126973a96
491 14608b260df8
491 14608b260df8
492 14608b260df8
492 14608b260df8
493 bed64f5d2f5a
493 bed64f5d2f5a
494 bed64f5d2f5a
494 bed64f5d2f5a
495 $ hg debugsuccessorssets 'all()' --closest --hidden
495 $ hg debugsuccessorssets 'all()' --closest --hidden
496 d20a80d4def3
496 d20a80d4def3
497 d20a80d4def3
497 d20a80d4def3
498 007dc284c1f8
498 007dc284c1f8
499 82623d38b9ba 392fd25390da
499 82623d38b9ba 392fd25390da
500 82623d38b9ba
500 82623d38b9ba
501 82623d38b9ba
501 82623d38b9ba
502 392fd25390da
502 392fd25390da
503 392fd25390da
503 392fd25390da
504 01f36c5a8fda
504 01f36c5a8fda
505 01f36c5a8fda
505 01f36c5a8fda
506 6a411f0d7a0a
506 6a411f0d7a0a
507 e442cfc57690
507 e442cfc57690
508 e442cfc57690
508 e442cfc57690
509 e442cfc57690
509 e442cfc57690
510 3750ebee865d
510 3750ebee865d
511 392fd25390da
511 392fd25390da
512 7ae126973a96
512 7ae126973a96
513 7ae126973a96
513 7ae126973a96
514 14608b260df8
514 14608b260df8
515 14608b260df8
515 14608b260df8
516 bed64f5d2f5a
516 bed64f5d2f5a
517 bed64f5d2f5a
517 bed64f5d2f5a
518 $ hg log -r 'divergent()'
518 $ hg log -r 'divergent()'
519 4:01f36c5a8fda A_3
519 4:01f36c5a8fda A_3
520 8:7ae126973a96 A_7
520 8:7ae126973a96 A_7
521 9:14608b260df8 A_8
521 9:14608b260df8 A_8
522 10:bed64f5d2f5a A_9
522 10:bed64f5d2f5a A_9
523
523
524 fix the divergence
524 fix the divergence
525
525
526 $ mkcommit A_A; hg up 0
526 $ mkcommit A_A; hg up 0
527 created new head
527 created new head
528 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
528 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
529 $ hg debugobsolete `getid A_9` `getid A_A`
529 $ hg debugobsolete `getid A_9` `getid A_A`
530 obsoleted 1 changesets
530 obsoleted 1 changesets
531 $ hg debugobsolete `getid A_7` `getid A_A`
531 $ hg debugobsolete `getid A_7` `getid A_A`
532 obsoleted 1 changesets
532 obsoleted 1 changesets
533 $ hg debugobsolete `getid A_8` `getid A_A`
533 $ hg debugobsolete `getid A_8` `getid A_A`
534 obsoleted 1 changesets
534 obsoleted 1 changesets
535 $ hg log -G --hidden
535 $ hg log -G --hidden
536 o 11:a139f71be9da A_A
536 o 11:a139f71be9da A_A
537 |
537 |
538 | x 10:bed64f5d2f5a A_9
538 | x 10:bed64f5d2f5a A_9
539 |/
539 |/
540 | x 9:14608b260df8 A_8
540 | x 9:14608b260df8 A_8
541 |/
541 |/
542 | x 8:7ae126973a96 A_7
542 | x 8:7ae126973a96 A_7
543 |/
543 |/
544 | x 7:3750ebee865d B_0
544 | x 7:3750ebee865d B_0
545 | |
545 | |
546 | x 6:e442cfc57690 A_5
546 | x 6:e442cfc57690 A_5
547 |/
547 |/
548 | x 5:6a411f0d7a0a A_4
548 | x 5:6a411f0d7a0a A_4
549 |/
549 |/
550 | o 4:01f36c5a8fda A_3
550 | o 4:01f36c5a8fda A_3
551 |/
551 |/
552 | x 3:392fd25390da A_2
552 | x 3:392fd25390da A_2
553 |/
553 |/
554 | x 2:82623d38b9ba A_1
554 | x 2:82623d38b9ba A_1
555 |/
555 |/
556 | x 1:007dc284c1f8 A_0
556 | x 1:007dc284c1f8 A_0
557 |/
557 |/
558 @ 0:d20a80d4def3 base
558 @ 0:d20a80d4def3 base
559
559
560 $ hg debugsuccessorssets --hidden 'all()'
560 $ hg debugsuccessorssets --hidden 'all()'
561 d20a80d4def3
561 d20a80d4def3
562 d20a80d4def3
562 d20a80d4def3
563 007dc284c1f8
563 007dc284c1f8
564 01f36c5a8fda a139f71be9da
564 01f36c5a8fda a139f71be9da
565 82623d38b9ba
565 82623d38b9ba
566 01f36c5a8fda
566 01f36c5a8fda
567 392fd25390da
567 392fd25390da
568 a139f71be9da
568 a139f71be9da
569 01f36c5a8fda
569 01f36c5a8fda
570 01f36c5a8fda
570 01f36c5a8fda
571 6a411f0d7a0a
571 6a411f0d7a0a
572 a139f71be9da
572 a139f71be9da
573 e442cfc57690
573 e442cfc57690
574 a139f71be9da
574 a139f71be9da
575 3750ebee865d
575 3750ebee865d
576 a139f71be9da
576 a139f71be9da
577 7ae126973a96
577 7ae126973a96
578 a139f71be9da
578 a139f71be9da
579 14608b260df8
579 14608b260df8
580 a139f71be9da
580 a139f71be9da
581 bed64f5d2f5a
581 bed64f5d2f5a
582 a139f71be9da
582 a139f71be9da
583 a139f71be9da
583 a139f71be9da
584 a139f71be9da
584 a139f71be9da
585 $ hg debugsuccessorssets 'all()' --closest
585 $ hg debugsuccessorssets 'all()' --closest
586 d20a80d4def3
586 d20a80d4def3
587 d20a80d4def3
587 d20a80d4def3
588 01f36c5a8fda
588 01f36c5a8fda
589 01f36c5a8fda
589 01f36c5a8fda
590 a139f71be9da
590 a139f71be9da
591 a139f71be9da
591 a139f71be9da
592 $ hg debugsuccessorssets 'all()' --closest --hidden
592 $ hg debugsuccessorssets 'all()' --closest --hidden
593 d20a80d4def3
593 d20a80d4def3
594 d20a80d4def3
594 d20a80d4def3
595 007dc284c1f8
595 007dc284c1f8
596 82623d38b9ba 392fd25390da
596 82623d38b9ba 392fd25390da
597 82623d38b9ba
597 82623d38b9ba
598 82623d38b9ba
598 82623d38b9ba
599 392fd25390da
599 392fd25390da
600 392fd25390da
600 392fd25390da
601 01f36c5a8fda
601 01f36c5a8fda
602 01f36c5a8fda
602 01f36c5a8fda
603 6a411f0d7a0a
603 6a411f0d7a0a
604 e442cfc57690
604 e442cfc57690
605 e442cfc57690
605 e442cfc57690
606 e442cfc57690
606 e442cfc57690
607 3750ebee865d
607 3750ebee865d
608 392fd25390da
608 392fd25390da
609 7ae126973a96
609 7ae126973a96
610 a139f71be9da
610 a139f71be9da
611 14608b260df8
611 14608b260df8
612 a139f71be9da
612 a139f71be9da
613 bed64f5d2f5a
613 bed64f5d2f5a
614 a139f71be9da
614 a139f71be9da
615 a139f71be9da
615 a139f71be9da
616 a139f71be9da
616 a139f71be9da
617 $ hg log -r 'divergent()'
617 $ hg log -r 'divergent()'
618
618
619 $ cd ..
619 $ cd ..
620
620
621
621
622 Subset does not diverge
622 Subset does not diverge
623 ------------------------------
623 ------------------------------
624
624
625 Do not report divergent successors-set if it is a subset of another
625 Do not report divergent successors-set if it is a subset of another
626 successors-set. (report [A,B] not [A] + [A,B])
626 successors-set. (report [A,B] not [A] + [A,B])
627
627
628 $ newcase subset
628 $ newcase subset
629 $ hg debugobsolete `getid A_0` `getid A_2`
629 $ hg debugobsolete `getid A_0` `getid A_2`
630 obsoleted 1 changesets
630 obsoleted 1 changesets
631 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
631 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
632 $ hg debugsuccessorssets --hidden 'desc('A_0')'
632 $ hg debugsuccessorssets --hidden 'desc('A_0')'
633 007dc284c1f8
633 007dc284c1f8
634 82623d38b9ba 392fd25390da
634 82623d38b9ba 392fd25390da
635 $ hg debugsuccessorssets 'desc('A_0')' --closest
635 $ hg debugsuccessorssets 'desc('A_0')' --closest
636 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
636 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
637 007dc284c1f8
637 007dc284c1f8
638 82623d38b9ba 392fd25390da
638 82623d38b9ba 392fd25390da
639
639
640 $ cd ..
640 $ cd ..
641
641
642 Use scmutil.cleanupnodes API to create divergence
642 Use scmutil.cleanupnodes API to create divergence
643
643
644 $ hg init cleanupnodes
644 $ hg init cleanupnodes
645 $ cd cleanupnodes
645 $ cd cleanupnodes
646 $ hg debugdrawdag <<'EOS'
646 $ hg debugdrawdag <<'EOS'
647 > B1 B3 B4
647 > B1 B3 B4
648 > | \|
648 > | \|
649 > A Z
649 > A Z
650 > EOS
650 > EOS
651
651
652 $ hg update -q B1
652 $ hg update -q B1
653 $ echo 3 >> B
653 $ echo 3 >> B
654 $ hg commit --amend -m B2
654 $ hg commit --amend -m B2
655 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
655 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
656 > from mercurial import registrar, scmutil
656 > from mercurial import registrar, scmutil
657 > cmdtable = {}
657 > cmdtable = {}
658 > command = registrar.command(cmdtable)
658 > command = registrar.command(cmdtable)
659 > @command('cleanup')
659 > @command('cleanup')
660 > def cleanup(ui, repo):
660 > def cleanup(ui, repo):
661 > def node(expr):
661 > def node(expr):
662 > unfi = repo.unfiltered()
662 > unfi = repo.unfiltered()
663 > rev = unfi.revs(expr).first()
663 > rev = unfi.revs(expr).first()
664 > return unfi.changelog.node(rev)
664 > return unfi.changelog.node(rev)
665 > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
665 > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
666 > mapping = {node('desc(B1)'): [node('desc(B3)')],
666 > mapping = {node('desc(B1)'): [node('desc(B3)')],
667 > node('desc(B3)'): [node('desc(B4)')]}
667 > node('desc(B3)'): [node('desc(B4)')]}
668 > scmutil.cleanupnodes(repo, mapping, 'test')
668 > scmutil.cleanupnodes(repo, mapping, 'test')
669 > EOF
669 > EOF
670
670
671 $ rm .hg/localtags
671 $ rm .hg/localtags
672 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
672 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
673 $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
673 $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
674 @ 5:1a2a9b5b0030 B2 divergent
674 @ 5:1a2a9b5b0030 B2 content-divergent
675 |
675 |
676 | o 4:70d5a63ca112 B4 divergent
676 | o 4:70d5a63ca112 B4 content-divergent
677 | |
677 | |
678 | o 1:48b9aae0607f Z
678 | o 1:48b9aae0607f Z
679 |
679 |
680 o 0:426bada5c675 A
680 o 0:426bada5c675 A
681
681
682 $ hg debugobsolete
682 $ hg debugobsolete
683 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
683 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
684 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
684 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
685 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
685 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1,1158 +1,1158 b''
1 ==========================
1 ==========================
2 Test rebase with obsolete
2 Test rebase with obsolete
3 ==========================
3 ==========================
4
4
5 Enable obsolete
5 Enable obsolete
6
6
7 $ cat >> $HGRCPATH << EOF
7 $ cat >> $HGRCPATH << EOF
8 > [ui]
8 > [ui]
9 > logtemplate= {rev}:{node|short} {desc|firstline}
9 > logtemplate= {rev}:{node|short} {desc|firstline}
10 > [experimental]
10 > [experimental]
11 > evolution=createmarkers,allowunstable
11 > evolution=createmarkers,allowunstable
12 > [phases]
12 > [phases]
13 > publish=False
13 > publish=False
14 > [extensions]
14 > [extensions]
15 > rebase=
15 > rebase=
16 > drawdag=$TESTDIR/drawdag.py
16 > drawdag=$TESTDIR/drawdag.py
17 > EOF
17 > EOF
18
18
19 Setup rebase canonical repo
19 Setup rebase canonical repo
20
20
21 $ hg init base
21 $ hg init base
22 $ cd base
22 $ cd base
23 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
23 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
24 adding changesets
24 adding changesets
25 adding manifests
25 adding manifests
26 adding file changes
26 adding file changes
27 added 8 changesets with 7 changes to 7 files (+2 heads)
27 added 8 changesets with 7 changes to 7 files (+2 heads)
28 (run 'hg heads' to see heads, 'hg merge' to merge)
28 (run 'hg heads' to see heads, 'hg merge' to merge)
29 $ hg up tip
29 $ hg up tip
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 $ hg log -G
31 $ hg log -G
32 @ 7:02de42196ebe H
32 @ 7:02de42196ebe H
33 |
33 |
34 | o 6:eea13746799a G
34 | o 6:eea13746799a G
35 |/|
35 |/|
36 o | 5:24b6387c8c8c F
36 o | 5:24b6387c8c8c F
37 | |
37 | |
38 | o 4:9520eea781bc E
38 | o 4:9520eea781bc E
39 |/
39 |/
40 | o 3:32af7686d403 D
40 | o 3:32af7686d403 D
41 | |
41 | |
42 | o 2:5fddd98957c8 C
42 | o 2:5fddd98957c8 C
43 | |
43 | |
44 | o 1:42ccdea3bb16 B
44 | o 1:42ccdea3bb16 B
45 |/
45 |/
46 o 0:cd010b8cd998 A
46 o 0:cd010b8cd998 A
47
47
48 $ cd ..
48 $ cd ..
49
49
50 simple rebase
50 simple rebase
51 ---------------------------------
51 ---------------------------------
52
52
53 $ hg clone base simple
53 $ hg clone base simple
54 updating to branch default
54 updating to branch default
55 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
55 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 $ cd simple
56 $ cd simple
57 $ hg up 32af7686d403
57 $ hg up 32af7686d403
58 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
58 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
59 $ hg rebase -d eea13746799a
59 $ hg rebase -d eea13746799a
60 rebasing 1:42ccdea3bb16 "B"
60 rebasing 1:42ccdea3bb16 "B"
61 rebasing 2:5fddd98957c8 "C"
61 rebasing 2:5fddd98957c8 "C"
62 rebasing 3:32af7686d403 "D"
62 rebasing 3:32af7686d403 "D"
63 $ hg log -G
63 $ hg log -G
64 @ 10:8eeb3c33ad33 D
64 @ 10:8eeb3c33ad33 D
65 |
65 |
66 o 9:2327fea05063 C
66 o 9:2327fea05063 C
67 |
67 |
68 o 8:e4e5be0395b2 B
68 o 8:e4e5be0395b2 B
69 |
69 |
70 | o 7:02de42196ebe H
70 | o 7:02de42196ebe H
71 | |
71 | |
72 o | 6:eea13746799a G
72 o | 6:eea13746799a G
73 |\|
73 |\|
74 | o 5:24b6387c8c8c F
74 | o 5:24b6387c8c8c F
75 | |
75 | |
76 o | 4:9520eea781bc E
76 o | 4:9520eea781bc E
77 |/
77 |/
78 o 0:cd010b8cd998 A
78 o 0:cd010b8cd998 A
79
79
80 $ hg log --hidden -G
80 $ hg log --hidden -G
81 @ 10:8eeb3c33ad33 D
81 @ 10:8eeb3c33ad33 D
82 |
82 |
83 o 9:2327fea05063 C
83 o 9:2327fea05063 C
84 |
84 |
85 o 8:e4e5be0395b2 B
85 o 8:e4e5be0395b2 B
86 |
86 |
87 | o 7:02de42196ebe H
87 | o 7:02de42196ebe H
88 | |
88 | |
89 o | 6:eea13746799a G
89 o | 6:eea13746799a G
90 |\|
90 |\|
91 | o 5:24b6387c8c8c F
91 | o 5:24b6387c8c8c F
92 | |
92 | |
93 o | 4:9520eea781bc E
93 o | 4:9520eea781bc E
94 |/
94 |/
95 | x 3:32af7686d403 D
95 | x 3:32af7686d403 D
96 | |
96 | |
97 | x 2:5fddd98957c8 C
97 | x 2:5fddd98957c8 C
98 | |
98 | |
99 | x 1:42ccdea3bb16 B
99 | x 1:42ccdea3bb16 B
100 |/
100 |/
101 o 0:cd010b8cd998 A
101 o 0:cd010b8cd998 A
102
102
103 $ hg debugobsolete
103 $ hg debugobsolete
104 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (*) {'user': 'test'} (glob)
104 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (*) {'user': 'test'} (glob)
105 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (*) {'user': 'test'} (glob)
105 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (*) {'user': 'test'} (glob)
106 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (*) {'user': 'test'} (glob)
106 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (*) {'user': 'test'} (glob)
107
107
108
108
109 $ cd ..
109 $ cd ..
110
110
111 empty changeset
111 empty changeset
112 ---------------------------------
112 ---------------------------------
113
113
114 $ hg clone base empty
114 $ hg clone base empty
115 updating to branch default
115 updating to branch default
116 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
116 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
117 $ cd empty
117 $ cd empty
118 $ hg up eea13746799a
118 $ hg up eea13746799a
119 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
119 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
120
120
121 We make a copy of both the first changeset in the rebased and some other in the
121 We make a copy of both the first changeset in the rebased and some other in the
122 set.
122 set.
123
123
124 $ hg graft 42ccdea3bb16 32af7686d403
124 $ hg graft 42ccdea3bb16 32af7686d403
125 grafting 1:42ccdea3bb16 "B"
125 grafting 1:42ccdea3bb16 "B"
126 grafting 3:32af7686d403 "D"
126 grafting 3:32af7686d403 "D"
127 $ hg rebase -s 42ccdea3bb16 -d .
127 $ hg rebase -s 42ccdea3bb16 -d .
128 rebasing 1:42ccdea3bb16 "B"
128 rebasing 1:42ccdea3bb16 "B"
129 note: rebase of 1:42ccdea3bb16 created no changes to commit
129 note: rebase of 1:42ccdea3bb16 created no changes to commit
130 rebasing 2:5fddd98957c8 "C"
130 rebasing 2:5fddd98957c8 "C"
131 rebasing 3:32af7686d403 "D"
131 rebasing 3:32af7686d403 "D"
132 note: rebase of 3:32af7686d403 created no changes to commit
132 note: rebase of 3:32af7686d403 created no changes to commit
133 $ hg log -G
133 $ hg log -G
134 o 10:5ae4c968c6ac C
134 o 10:5ae4c968c6ac C
135 |
135 |
136 @ 9:08483444fef9 D
136 @ 9:08483444fef9 D
137 |
137 |
138 o 8:8877864f1edb B
138 o 8:8877864f1edb B
139 |
139 |
140 | o 7:02de42196ebe H
140 | o 7:02de42196ebe H
141 | |
141 | |
142 o | 6:eea13746799a G
142 o | 6:eea13746799a G
143 |\|
143 |\|
144 | o 5:24b6387c8c8c F
144 | o 5:24b6387c8c8c F
145 | |
145 | |
146 o | 4:9520eea781bc E
146 o | 4:9520eea781bc E
147 |/
147 |/
148 o 0:cd010b8cd998 A
148 o 0:cd010b8cd998 A
149
149
150 $ hg log --hidden -G
150 $ hg log --hidden -G
151 o 10:5ae4c968c6ac C
151 o 10:5ae4c968c6ac C
152 |
152 |
153 @ 9:08483444fef9 D
153 @ 9:08483444fef9 D
154 |
154 |
155 o 8:8877864f1edb B
155 o 8:8877864f1edb B
156 |
156 |
157 | o 7:02de42196ebe H
157 | o 7:02de42196ebe H
158 | |
158 | |
159 o | 6:eea13746799a G
159 o | 6:eea13746799a G
160 |\|
160 |\|
161 | o 5:24b6387c8c8c F
161 | o 5:24b6387c8c8c F
162 | |
162 | |
163 o | 4:9520eea781bc E
163 o | 4:9520eea781bc E
164 |/
164 |/
165 | x 3:32af7686d403 D
165 | x 3:32af7686d403 D
166 | |
166 | |
167 | x 2:5fddd98957c8 C
167 | x 2:5fddd98957c8 C
168 | |
168 | |
169 | x 1:42ccdea3bb16 B
169 | x 1:42ccdea3bb16 B
170 |/
170 |/
171 o 0:cd010b8cd998 A
171 o 0:cd010b8cd998 A
172
172
173 $ hg debugobsolete
173 $ hg debugobsolete
174 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
174 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
175 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
175 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
176 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
176 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
177
177
178
178
179 More complex case where part of the rebase set were already rebased
179 More complex case where part of the rebase set were already rebased
180
180
181 $ hg rebase --rev 'desc(D)' --dest 'desc(H)'
181 $ hg rebase --rev 'desc(D)' --dest 'desc(H)'
182 rebasing 9:08483444fef9 "D"
182 rebasing 9:08483444fef9 "D"
183 $ hg debugobsolete
183 $ hg debugobsolete
184 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
184 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
185 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
185 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
186 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
186 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
187 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
187 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
188 $ hg log -G
188 $ hg log -G
189 @ 11:4596109a6a43 D
189 @ 11:4596109a6a43 D
190 |
190 |
191 | o 10:5ae4c968c6ac C
191 | o 10:5ae4c968c6ac C
192 | |
192 | |
193 | x 9:08483444fef9 D
193 | x 9:08483444fef9 D
194 | |
194 | |
195 | o 8:8877864f1edb B
195 | o 8:8877864f1edb B
196 | |
196 | |
197 o | 7:02de42196ebe H
197 o | 7:02de42196ebe H
198 | |
198 | |
199 | o 6:eea13746799a G
199 | o 6:eea13746799a G
200 |/|
200 |/|
201 o | 5:24b6387c8c8c F
201 o | 5:24b6387c8c8c F
202 | |
202 | |
203 | o 4:9520eea781bc E
203 | o 4:9520eea781bc E
204 |/
204 |/
205 o 0:cd010b8cd998 A
205 o 0:cd010b8cd998 A
206
206
207 $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
207 $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
208 rebasing 8:8877864f1edb "B"
208 rebasing 8:8877864f1edb "B"
209 note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D"
209 note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D"
210 rebasing 10:5ae4c968c6ac "C"
210 rebasing 10:5ae4c968c6ac "C"
211 $ hg debugobsolete
211 $ hg debugobsolete
212 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
212 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
213 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
213 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
214 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
214 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
215 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
215 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
216 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob)
216 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob)
217 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob)
217 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob)
218 $ hg log --rev 'divergent()'
218 $ hg log --rev 'divergent()'
219 $ hg log -G
219 $ hg log -G
220 o 13:98f6af4ee953 C
220 o 13:98f6af4ee953 C
221 |
221 |
222 o 12:462a34d07e59 B
222 o 12:462a34d07e59 B
223 |
223 |
224 @ 11:4596109a6a43 D
224 @ 11:4596109a6a43 D
225 |
225 |
226 o 7:02de42196ebe H
226 o 7:02de42196ebe H
227 |
227 |
228 | o 6:eea13746799a G
228 | o 6:eea13746799a G
229 |/|
229 |/|
230 o | 5:24b6387c8c8c F
230 o | 5:24b6387c8c8c F
231 | |
231 | |
232 | o 4:9520eea781bc E
232 | o 4:9520eea781bc E
233 |/
233 |/
234 o 0:cd010b8cd998 A
234 o 0:cd010b8cd998 A
235
235
236 $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003
236 $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003
237 changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003
237 changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003
238 phase: draft
238 phase: draft
239 parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6
239 parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6
240 parent: -1:0000000000000000000000000000000000000000
240 parent: -1:0000000000000000000000000000000000000000
241 manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905
241 manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905
242 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
242 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
243 date: Sat Apr 30 15:24:48 2011 +0200
243 date: Sat Apr 30 15:24:48 2011 +0200
244 files+: D
244 files+: D
245 extra: branch=default
245 extra: branch=default
246 extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c
246 extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c
247 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
247 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
248 description:
248 description:
249 D
249 D
250
250
251
251
252 $ hg up -qr 'desc(G)'
252 $ hg up -qr 'desc(G)'
253 $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003
253 $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003
254 grafting 11:4596109a6a43 "D"
254 grafting 11:4596109a6a43 "D"
255 $ hg up -qr 'desc(E)'
255 $ hg up -qr 'desc(E)'
256 $ hg rebase -s tip -d .
256 $ hg rebase -s tip -d .
257 rebasing 14:9e36056a46e3 "D" (tip)
257 rebasing 14:9e36056a46e3 "D" (tip)
258 $ hg log --style default --debug -r tip
258 $ hg log --style default --debug -r tip
259 changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab
259 changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab
260 tag: tip
260 tag: tip
261 phase: draft
261 phase: draft
262 parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba
262 parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba
263 parent: -1:0000000000000000000000000000000000000000
263 parent: -1:0000000000000000000000000000000000000000
264 manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42
264 manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42
265 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
265 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
266 date: Sat Apr 30 15:24:48 2011 +0200
266 date: Sat Apr 30 15:24:48 2011 +0200
267 files+: D
267 files+: D
268 extra: branch=default
268 extra: branch=default
269 extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003
269 extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003
270 extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f
270 extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f
271 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
271 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
272 description:
272 description:
273 D
273 D
274
274
275
275
276 Start rebase from a commit that is obsolete but not hidden only because it's
276 Start rebase from a commit that is obsolete but not hidden only because it's
277 a working copy parent. We should be moved back to the starting commit as usual
277 a working copy parent. We should be moved back to the starting commit as usual
278 even though it is hidden (until we're moved there).
278 even though it is hidden (until we're moved there).
279
279
280 $ hg --hidden up -qr 'first(hidden())'
280 $ hg --hidden up -qr 'first(hidden())'
281 $ hg rebase --rev 13 --dest 15
281 $ hg rebase --rev 13 --dest 15
282 rebasing 13:98f6af4ee953 "C"
282 rebasing 13:98f6af4ee953 "C"
283 $ hg log -G
283 $ hg log -G
284 o 16:294a2b93eb4d C
284 o 16:294a2b93eb4d C
285 |
285 |
286 o 15:627d46148090 D
286 o 15:627d46148090 D
287 |
287 |
288 | o 12:462a34d07e59 B
288 | o 12:462a34d07e59 B
289 | |
289 | |
290 | o 11:4596109a6a43 D
290 | o 11:4596109a6a43 D
291 | |
291 | |
292 | o 7:02de42196ebe H
292 | o 7:02de42196ebe H
293 | |
293 | |
294 +---o 6:eea13746799a G
294 +---o 6:eea13746799a G
295 | |/
295 | |/
296 | o 5:24b6387c8c8c F
296 | o 5:24b6387c8c8c F
297 | |
297 | |
298 o | 4:9520eea781bc E
298 o | 4:9520eea781bc E
299 |/
299 |/
300 | @ 1:42ccdea3bb16 B
300 | @ 1:42ccdea3bb16 B
301 |/
301 |/
302 o 0:cd010b8cd998 A
302 o 0:cd010b8cd998 A
303
303
304
304
305 $ cd ..
305 $ cd ..
306
306
307 collapse rebase
307 collapse rebase
308 ---------------------------------
308 ---------------------------------
309
309
310 $ hg clone base collapse
310 $ hg clone base collapse
311 updating to branch default
311 updating to branch default
312 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
312 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
313 $ cd collapse
313 $ cd collapse
314 $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse
314 $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse
315 rebasing 1:42ccdea3bb16 "B"
315 rebasing 1:42ccdea3bb16 "B"
316 rebasing 2:5fddd98957c8 "C"
316 rebasing 2:5fddd98957c8 "C"
317 rebasing 3:32af7686d403 "D"
317 rebasing 3:32af7686d403 "D"
318 $ hg log -G
318 $ hg log -G
319 o 8:4dc2197e807b Collapsed revision
319 o 8:4dc2197e807b Collapsed revision
320 |
320 |
321 | @ 7:02de42196ebe H
321 | @ 7:02de42196ebe H
322 | |
322 | |
323 o | 6:eea13746799a G
323 o | 6:eea13746799a G
324 |\|
324 |\|
325 | o 5:24b6387c8c8c F
325 | o 5:24b6387c8c8c F
326 | |
326 | |
327 o | 4:9520eea781bc E
327 o | 4:9520eea781bc E
328 |/
328 |/
329 o 0:cd010b8cd998 A
329 o 0:cd010b8cd998 A
330
330
331 $ hg log --hidden -G
331 $ hg log --hidden -G
332 o 8:4dc2197e807b Collapsed revision
332 o 8:4dc2197e807b Collapsed revision
333 |
333 |
334 | @ 7:02de42196ebe H
334 | @ 7:02de42196ebe H
335 | |
335 | |
336 o | 6:eea13746799a G
336 o | 6:eea13746799a G
337 |\|
337 |\|
338 | o 5:24b6387c8c8c F
338 | o 5:24b6387c8c8c F
339 | |
339 | |
340 o | 4:9520eea781bc E
340 o | 4:9520eea781bc E
341 |/
341 |/
342 | x 3:32af7686d403 D
342 | x 3:32af7686d403 D
343 | |
343 | |
344 | x 2:5fddd98957c8 C
344 | x 2:5fddd98957c8 C
345 | |
345 | |
346 | x 1:42ccdea3bb16 B
346 | x 1:42ccdea3bb16 B
347 |/
347 |/
348 o 0:cd010b8cd998 A
348 o 0:cd010b8cd998 A
349
349
350 $ hg id --debug -r tip
350 $ hg id --debug -r tip
351 4dc2197e807bae9817f09905b50ab288be2dbbcf tip
351 4dc2197e807bae9817f09905b50ab288be2dbbcf tip
352 $ hg debugobsolete
352 $ hg debugobsolete
353 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
353 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
354 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
354 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
355 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
355 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
356
356
357 $ cd ..
357 $ cd ..
358
358
359 Rebase set has hidden descendants
359 Rebase set has hidden descendants
360 ---------------------------------
360 ---------------------------------
361
361
362 We rebase a changeset which has a hidden changeset. The hidden changeset must
362 We rebase a changeset which has a hidden changeset. The hidden changeset must
363 not be rebased.
363 not be rebased.
364
364
365 $ hg clone base hidden
365 $ hg clone base hidden
366 updating to branch default
366 updating to branch default
367 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
367 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
368 $ cd hidden
368 $ cd hidden
369 $ hg rebase -s 5fddd98957c8 -d eea13746799a
369 $ hg rebase -s 5fddd98957c8 -d eea13746799a
370 rebasing 2:5fddd98957c8 "C"
370 rebasing 2:5fddd98957c8 "C"
371 rebasing 3:32af7686d403 "D"
371 rebasing 3:32af7686d403 "D"
372 $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
372 $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
373 rebasing 1:42ccdea3bb16 "B"
373 rebasing 1:42ccdea3bb16 "B"
374 $ hg log -G
374 $ hg log -G
375 o 10:7c6027df6a99 B
375 o 10:7c6027df6a99 B
376 |
376 |
377 | o 9:cf44d2f5a9f4 D
377 | o 9:cf44d2f5a9f4 D
378 | |
378 | |
379 | o 8:e273c5e7d2d2 C
379 | o 8:e273c5e7d2d2 C
380 | |
380 | |
381 @ | 7:02de42196ebe H
381 @ | 7:02de42196ebe H
382 | |
382 | |
383 | o 6:eea13746799a G
383 | o 6:eea13746799a G
384 |/|
384 |/|
385 o | 5:24b6387c8c8c F
385 o | 5:24b6387c8c8c F
386 | |
386 | |
387 | o 4:9520eea781bc E
387 | o 4:9520eea781bc E
388 |/
388 |/
389 o 0:cd010b8cd998 A
389 o 0:cd010b8cd998 A
390
390
391 $ hg log --hidden -G
391 $ hg log --hidden -G
392 o 10:7c6027df6a99 B
392 o 10:7c6027df6a99 B
393 |
393 |
394 | o 9:cf44d2f5a9f4 D
394 | o 9:cf44d2f5a9f4 D
395 | |
395 | |
396 | o 8:e273c5e7d2d2 C
396 | o 8:e273c5e7d2d2 C
397 | |
397 | |
398 @ | 7:02de42196ebe H
398 @ | 7:02de42196ebe H
399 | |
399 | |
400 | o 6:eea13746799a G
400 | o 6:eea13746799a G
401 |/|
401 |/|
402 o | 5:24b6387c8c8c F
402 o | 5:24b6387c8c8c F
403 | |
403 | |
404 | o 4:9520eea781bc E
404 | o 4:9520eea781bc E
405 |/
405 |/
406 | x 3:32af7686d403 D
406 | x 3:32af7686d403 D
407 | |
407 | |
408 | x 2:5fddd98957c8 C
408 | x 2:5fddd98957c8 C
409 | |
409 | |
410 | x 1:42ccdea3bb16 B
410 | x 1:42ccdea3bb16 B
411 |/
411 |/
412 o 0:cd010b8cd998 A
412 o 0:cd010b8cd998 A
413
413
414 $ hg debugobsolete
414 $ hg debugobsolete
415 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (*) {'user': 'test'} (glob)
415 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (*) {'user': 'test'} (glob)
416 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (*) {'user': 'test'} (glob)
416 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (*) {'user': 'test'} (glob)
417 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (*) {'user': 'test'} (glob)
417 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (*) {'user': 'test'} (glob)
418
418
419 Test that rewriting leaving instability behind is allowed
419 Test that rewriting leaving instability behind is allowed
420 ---------------------------------------------------------------------
420 ---------------------------------------------------------------------
421
421
422 $ hg log -r 'children(8)'
422 $ hg log -r 'children(8)'
423 9:cf44d2f5a9f4 D (no-eol)
423 9:cf44d2f5a9f4 D (no-eol)
424 $ hg rebase -r 8
424 $ hg rebase -r 8
425 rebasing 8:e273c5e7d2d2 "C"
425 rebasing 8:e273c5e7d2d2 "C"
426 $ hg log -G
426 $ hg log -G
427 o 11:0d8f238b634c C
427 o 11:0d8f238b634c C
428 |
428 |
429 o 10:7c6027df6a99 B
429 o 10:7c6027df6a99 B
430 |
430 |
431 | o 9:cf44d2f5a9f4 D
431 | o 9:cf44d2f5a9f4 D
432 | |
432 | |
433 | x 8:e273c5e7d2d2 C
433 | x 8:e273c5e7d2d2 C
434 | |
434 | |
435 @ | 7:02de42196ebe H
435 @ | 7:02de42196ebe H
436 | |
436 | |
437 | o 6:eea13746799a G
437 | o 6:eea13746799a G
438 |/|
438 |/|
439 o | 5:24b6387c8c8c F
439 o | 5:24b6387c8c8c F
440 | |
440 | |
441 | o 4:9520eea781bc E
441 | o 4:9520eea781bc E
442 |/
442 |/
443 o 0:cd010b8cd998 A
443 o 0:cd010b8cd998 A
444
444
445
445
446
446
447 Test multiple root handling
447 Test multiple root handling
448 ------------------------------------
448 ------------------------------------
449
449
450 $ hg rebase --dest 4 --rev '7+11+9'
450 $ hg rebase --dest 4 --rev '7+11+9'
451 rebasing 9:cf44d2f5a9f4 "D"
451 rebasing 9:cf44d2f5a9f4 "D"
452 rebasing 7:02de42196ebe "H"
452 rebasing 7:02de42196ebe "H"
453 not rebasing ignored 10:7c6027df6a99 "B"
453 not rebasing ignored 10:7c6027df6a99 "B"
454 rebasing 11:0d8f238b634c "C" (tip)
454 rebasing 11:0d8f238b634c "C" (tip)
455 $ hg log -G
455 $ hg log -G
456 o 14:1e8370e38cca C
456 o 14:1e8370e38cca C
457 |
457 |
458 @ 13:bfe264faf697 H
458 @ 13:bfe264faf697 H
459 |
459 |
460 | o 12:102b4c1d889b D
460 | o 12:102b4c1d889b D
461 |/
461 |/
462 | o 10:7c6027df6a99 B
462 | o 10:7c6027df6a99 B
463 | |
463 | |
464 | x 7:02de42196ebe H
464 | x 7:02de42196ebe H
465 | |
465 | |
466 +---o 6:eea13746799a G
466 +---o 6:eea13746799a G
467 | |/
467 | |/
468 | o 5:24b6387c8c8c F
468 | o 5:24b6387c8c8c F
469 | |
469 | |
470 o | 4:9520eea781bc E
470 o | 4:9520eea781bc E
471 |/
471 |/
472 o 0:cd010b8cd998 A
472 o 0:cd010b8cd998 A
473
473
474 $ cd ..
474 $ cd ..
475
475
476 Detach both parents
476 Detach both parents
477
477
478 $ hg init double-detach
478 $ hg init double-detach
479 $ cd double-detach
479 $ cd double-detach
480
480
481 $ hg debugdrawdag <<EOF
481 $ hg debugdrawdag <<EOF
482 > F
482 > F
483 > /|
483 > /|
484 > C E
484 > C E
485 > | |
485 > | |
486 > B D G
486 > B D G
487 > \|/
487 > \|/
488 > A
488 > A
489 > EOF
489 > EOF
490
490
491 BROKEN: This raises an exception
491 BROKEN: This raises an exception
492 $ hg rebase -d G -r 'B + D + F' 2>&1 | grep '^AssertionError'
492 $ hg rebase -d G -r 'B + D + F' 2>&1 | grep '^AssertionError'
493 AssertionError: no base found to rebase on (defineparents called wrong)
493 AssertionError: no base found to rebase on (defineparents called wrong)
494
494
495 $ cd ..
495 $ cd ..
496
496
497 test on rebase dropping a merge
497 test on rebase dropping a merge
498
498
499 (setup)
499 (setup)
500
500
501 $ hg init dropmerge
501 $ hg init dropmerge
502 $ cd dropmerge
502 $ cd dropmerge
503 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
503 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
504 adding changesets
504 adding changesets
505 adding manifests
505 adding manifests
506 adding file changes
506 adding file changes
507 added 8 changesets with 7 changes to 7 files (+2 heads)
507 added 8 changesets with 7 changes to 7 files (+2 heads)
508 (run 'hg heads' to see heads, 'hg merge' to merge)
508 (run 'hg heads' to see heads, 'hg merge' to merge)
509 $ hg up 3
509 $ hg up 3
510 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
510 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
511 $ hg merge 7
511 $ hg merge 7
512 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
512 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
513 (branch merge, don't forget to commit)
513 (branch merge, don't forget to commit)
514 $ hg ci -m 'M'
514 $ hg ci -m 'M'
515 $ echo I > I
515 $ echo I > I
516 $ hg add I
516 $ hg add I
517 $ hg ci -m I
517 $ hg ci -m I
518 $ hg log -G
518 $ hg log -G
519 @ 9:4bde274eefcf I
519 @ 9:4bde274eefcf I
520 |
520 |
521 o 8:53a6a128b2b7 M
521 o 8:53a6a128b2b7 M
522 |\
522 |\
523 | o 7:02de42196ebe H
523 | o 7:02de42196ebe H
524 | |
524 | |
525 | | o 6:eea13746799a G
525 | | o 6:eea13746799a G
526 | |/|
526 | |/|
527 | o | 5:24b6387c8c8c F
527 | o | 5:24b6387c8c8c F
528 | | |
528 | | |
529 | | o 4:9520eea781bc E
529 | | o 4:9520eea781bc E
530 | |/
530 | |/
531 o | 3:32af7686d403 D
531 o | 3:32af7686d403 D
532 | |
532 | |
533 o | 2:5fddd98957c8 C
533 o | 2:5fddd98957c8 C
534 | |
534 | |
535 o | 1:42ccdea3bb16 B
535 o | 1:42ccdea3bb16 B
536 |/
536 |/
537 o 0:cd010b8cd998 A
537 o 0:cd010b8cd998 A
538
538
539 (actual test)
539 (actual test)
540
540
541 $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
541 $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
542 rebasing 3:32af7686d403 "D"
542 rebasing 3:32af7686d403 "D"
543 rebasing 7:02de42196ebe "H"
543 rebasing 7:02de42196ebe "H"
544 not rebasing ignored 8:53a6a128b2b7 "M"
544 not rebasing ignored 8:53a6a128b2b7 "M"
545 rebasing 9:4bde274eefcf "I" (tip)
545 rebasing 9:4bde274eefcf "I" (tip)
546 $ hg log -G
546 $ hg log -G
547 @ 12:acd174b7ab39 I
547 @ 12:acd174b7ab39 I
548 |
548 |
549 o 11:6c11a6218c97 H
549 o 11:6c11a6218c97 H
550 |
550 |
551 | o 10:b5313c85b22e D
551 | o 10:b5313c85b22e D
552 |/
552 |/
553 | o 8:53a6a128b2b7 M
553 | o 8:53a6a128b2b7 M
554 | |\
554 | |\
555 | | x 7:02de42196ebe H
555 | | x 7:02de42196ebe H
556 | | |
556 | | |
557 o---+ 6:eea13746799a G
557 o---+ 6:eea13746799a G
558 | | |
558 | | |
559 | | o 5:24b6387c8c8c F
559 | | o 5:24b6387c8c8c F
560 | | |
560 | | |
561 o---+ 4:9520eea781bc E
561 o---+ 4:9520eea781bc E
562 / /
562 / /
563 x | 3:32af7686d403 D
563 x | 3:32af7686d403 D
564 | |
564 | |
565 o | 2:5fddd98957c8 C
565 o | 2:5fddd98957c8 C
566 | |
566 | |
567 o | 1:42ccdea3bb16 B
567 o | 1:42ccdea3bb16 B
568 |/
568 |/
569 o 0:cd010b8cd998 A
569 o 0:cd010b8cd998 A
570
570
571
571
572 Test hidden changesets in the rebase set (issue4504)
572 Test hidden changesets in the rebase set (issue4504)
573
573
574 $ hg up --hidden 9
574 $ hg up --hidden 9
575 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
575 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
576 $ echo J > J
576 $ echo J > J
577 $ hg add J
577 $ hg add J
578 $ hg commit -m J
578 $ hg commit -m J
579 $ hg debugobsolete `hg log --rev . -T '{node}'`
579 $ hg debugobsolete `hg log --rev . -T '{node}'`
580 obsoleted 1 changesets
580 obsoleted 1 changesets
581
581
582 $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
582 $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
583 rebasing 9:4bde274eefcf "I"
583 rebasing 9:4bde274eefcf "I"
584 rebasing 13:06edfc82198f "J" (tip)
584 rebasing 13:06edfc82198f "J" (tip)
585 $ hg log -G
585 $ hg log -G
586 @ 15:5ae8a643467b J
586 @ 15:5ae8a643467b J
587 |
587 |
588 o 14:9ad579b4a5de I
588 o 14:9ad579b4a5de I
589 |
589 |
590 | o 12:acd174b7ab39 I
590 | o 12:acd174b7ab39 I
591 | |
591 | |
592 | o 11:6c11a6218c97 H
592 | o 11:6c11a6218c97 H
593 | |
593 | |
594 o | 10:b5313c85b22e D
594 o | 10:b5313c85b22e D
595 |/
595 |/
596 | o 8:53a6a128b2b7 M
596 | o 8:53a6a128b2b7 M
597 | |\
597 | |\
598 | | x 7:02de42196ebe H
598 | | x 7:02de42196ebe H
599 | | |
599 | | |
600 o---+ 6:eea13746799a G
600 o---+ 6:eea13746799a G
601 | | |
601 | | |
602 | | o 5:24b6387c8c8c F
602 | | o 5:24b6387c8c8c F
603 | | |
603 | | |
604 o---+ 4:9520eea781bc E
604 o---+ 4:9520eea781bc E
605 / /
605 / /
606 x | 3:32af7686d403 D
606 x | 3:32af7686d403 D
607 | |
607 | |
608 o | 2:5fddd98957c8 C
608 o | 2:5fddd98957c8 C
609 | |
609 | |
610 o | 1:42ccdea3bb16 B
610 o | 1:42ccdea3bb16 B
611 |/
611 |/
612 o 0:cd010b8cd998 A
612 o 0:cd010b8cd998 A
613
613
614 $ hg up 14 -C
614 $ hg up 14 -C
615 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
615 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
616 $ echo "K" > K
616 $ echo "K" > K
617 $ hg add K
617 $ hg add K
618 $ hg commit --amend -m "K"
618 $ hg commit --amend -m "K"
619 $ echo "L" > L
619 $ echo "L" > L
620 $ hg add L
620 $ hg add L
621 $ hg commit -m "L"
621 $ hg commit -m "L"
622 $ hg up '.^'
622 $ hg up '.^'
623 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
623 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
624 $ echo "M" > M
624 $ echo "M" > M
625 $ hg add M
625 $ hg add M
626 $ hg commit --amend -m "M"
626 $ hg commit --amend -m "M"
627 $ hg log -G
627 $ hg log -G
628 @ 20:bfaedf8eb73b M
628 @ 20:bfaedf8eb73b M
629 |
629 |
630 | o 18:97219452e4bd L
630 | o 18:97219452e4bd L
631 | |
631 | |
632 | x 17:fc37a630c901 K
632 | x 17:fc37a630c901 K
633 |/
633 |/
634 | o 15:5ae8a643467b J
634 | o 15:5ae8a643467b J
635 | |
635 | |
636 | x 14:9ad579b4a5de I
636 | x 14:9ad579b4a5de I
637 |/
637 |/
638 | o 12:acd174b7ab39 I
638 | o 12:acd174b7ab39 I
639 | |
639 | |
640 | o 11:6c11a6218c97 H
640 | o 11:6c11a6218c97 H
641 | |
641 | |
642 o | 10:b5313c85b22e D
642 o | 10:b5313c85b22e D
643 |/
643 |/
644 | o 8:53a6a128b2b7 M
644 | o 8:53a6a128b2b7 M
645 | |\
645 | |\
646 | | x 7:02de42196ebe H
646 | | x 7:02de42196ebe H
647 | | |
647 | | |
648 o---+ 6:eea13746799a G
648 o---+ 6:eea13746799a G
649 | | |
649 | | |
650 | | o 5:24b6387c8c8c F
650 | | o 5:24b6387c8c8c F
651 | | |
651 | | |
652 o---+ 4:9520eea781bc E
652 o---+ 4:9520eea781bc E
653 / /
653 / /
654 x | 3:32af7686d403 D
654 x | 3:32af7686d403 D
655 | |
655 | |
656 o | 2:5fddd98957c8 C
656 o | 2:5fddd98957c8 C
657 | |
657 | |
658 o | 1:42ccdea3bb16 B
658 o | 1:42ccdea3bb16 B
659 |/
659 |/
660 o 0:cd010b8cd998 A
660 o 0:cd010b8cd998 A
661
661
662 $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True
662 $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True
663 note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K"
663 note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K"
664 rebasing 15:5ae8a643467b "J"
664 rebasing 15:5ae8a643467b "J"
665
665
666 $ cd ..
666 $ cd ..
667
667
668 Skip obsolete changeset even with multiple hops
668 Skip obsolete changeset even with multiple hops
669 -----------------------------------------------
669 -----------------------------------------------
670
670
671 setup
671 setup
672
672
673 $ hg init obsskip
673 $ hg init obsskip
674 $ cd obsskip
674 $ cd obsskip
675 $ cat << EOF >> .hg/hgrc
675 $ cat << EOF >> .hg/hgrc
676 > [experimental]
676 > [experimental]
677 > rebaseskipobsolete = True
677 > rebaseskipobsolete = True
678 > [extensions]
678 > [extensions]
679 > strip =
679 > strip =
680 > EOF
680 > EOF
681 $ echo A > A
681 $ echo A > A
682 $ hg add A
682 $ hg add A
683 $ hg commit -m A
683 $ hg commit -m A
684 $ echo B > B
684 $ echo B > B
685 $ hg add B
685 $ hg add B
686 $ hg commit -m B0
686 $ hg commit -m B0
687 $ hg commit --amend -m B1
687 $ hg commit --amend -m B1
688 $ hg commit --amend -m B2
688 $ hg commit --amend -m B2
689 $ hg up --hidden 'desc(B0)'
689 $ hg up --hidden 'desc(B0)'
690 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
690 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
691 $ echo C > C
691 $ echo C > C
692 $ hg add C
692 $ hg add C
693 $ hg commit -m C
693 $ hg commit -m C
694
694
695 Rebase finds its way in a chain of marker
695 Rebase finds its way in a chain of marker
696
696
697 $ hg rebase -d 'desc(B2)'
697 $ hg rebase -d 'desc(B2)'
698 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2"
698 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2"
699 rebasing 4:212cb178bcbb "C" (tip)
699 rebasing 4:212cb178bcbb "C" (tip)
700
700
701 Even when the chain include missing node
701 Even when the chain include missing node
702
702
703 $ hg up --hidden 'desc(B0)'
703 $ hg up --hidden 'desc(B0)'
704 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
704 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
705 $ echo D > D
705 $ echo D > D
706 $ hg add D
706 $ hg add D
707 $ hg commit -m D
707 $ hg commit -m D
708 $ hg --hidden strip -r 'desc(B1)'
708 $ hg --hidden strip -r 'desc(B1)'
709 saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob)
709 saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob)
710
710
711 $ hg rebase -d 'desc(B2)'
711 $ hg rebase -d 'desc(B2)'
712 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
712 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
713 rebasing 5:1a79b7535141 "D" (tip)
713 rebasing 5:1a79b7535141 "D" (tip)
714 $ hg up 4
714 $ hg up 4
715 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
715 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
716 $ echo "O" > O
716 $ echo "O" > O
717 $ hg add O
717 $ hg add O
718 $ hg commit -m O
718 $ hg commit -m O
719 $ echo "P" > P
719 $ echo "P" > P
720 $ hg add P
720 $ hg add P
721 $ hg commit -m P
721 $ hg commit -m P
722 $ hg log -G
722 $ hg log -G
723 @ 8:8d47583e023f P
723 @ 8:8d47583e023f P
724 |
724 |
725 o 7:360bbaa7d3ce O
725 o 7:360bbaa7d3ce O
726 |
726 |
727 | o 6:9c48361117de D
727 | o 6:9c48361117de D
728 | |
728 | |
729 o | 4:ff2c4d47b71d C
729 o | 4:ff2c4d47b71d C
730 |/
730 |/
731 o 2:261e70097290 B2
731 o 2:261e70097290 B2
732 |
732 |
733 o 0:4a2df7238c3b A
733 o 0:4a2df7238c3b A
734
734
735 $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all
735 $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all
736 obsoleted 1 changesets
736 obsoleted 1 changesets
737 $ hg rebase -d 6 -r "4::"
737 $ hg rebase -d 6 -r "4::"
738 rebasing 4:ff2c4d47b71d "C"
738 rebasing 4:ff2c4d47b71d "C"
739 note: not rebasing 7:360bbaa7d3ce "O", it has no successor
739 note: not rebasing 7:360bbaa7d3ce "O", it has no successor
740 rebasing 8:8d47583e023f "P" (tip)
740 rebasing 8:8d47583e023f "P" (tip)
741
741
742 If all the changeset to be rebased are obsolete and present in the destination, we
742 If all the changeset to be rebased are obsolete and present in the destination, we
743 should display a friendly error message
743 should display a friendly error message
744
744
745 $ hg log -G
745 $ hg log -G
746 @ 10:121d9e3bc4c6 P
746 @ 10:121d9e3bc4c6 P
747 |
747 |
748 o 9:4be60e099a77 C
748 o 9:4be60e099a77 C
749 |
749 |
750 o 6:9c48361117de D
750 o 6:9c48361117de D
751 |
751 |
752 o 2:261e70097290 B2
752 o 2:261e70097290 B2
753 |
753 |
754 o 0:4a2df7238c3b A
754 o 0:4a2df7238c3b A
755
755
756
756
757 $ hg up 9
757 $ hg up 9
758 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
758 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
759 $ echo "non-relevant change" > nonrelevant
759 $ echo "non-relevant change" > nonrelevant
760 $ hg add nonrelevant
760 $ hg add nonrelevant
761 $ hg commit -m nonrelevant
761 $ hg commit -m nonrelevant
762 created new head
762 created new head
763 $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
763 $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
764 obsoleted 1 changesets
764 obsoleted 1 changesets
765 $ hg rebase -r . -d 10
765 $ hg rebase -r . -d 10
766 note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
766 note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
767
767
768 If a rebase is going to create divergence, it should abort
768 If a rebase is going to create divergence, it should abort
769
769
770 $ hg log -G
770 $ hg log -G
771 @ 11:f44da1f4954c nonrelevant
771 @ 11:f44da1f4954c nonrelevant
772 |
772 |
773 | o 10:121d9e3bc4c6 P
773 | o 10:121d9e3bc4c6 P
774 |/
774 |/
775 o 9:4be60e099a77 C
775 o 9:4be60e099a77 C
776 |
776 |
777 o 6:9c48361117de D
777 o 6:9c48361117de D
778 |
778 |
779 o 2:261e70097290 B2
779 o 2:261e70097290 B2
780 |
780 |
781 o 0:4a2df7238c3b A
781 o 0:4a2df7238c3b A
782
782
783
783
784 $ hg up 9
784 $ hg up 9
785 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
785 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
786 $ echo "john" > doe
786 $ echo "john" > doe
787 $ hg add doe
787 $ hg add doe
788 $ hg commit -m "john doe"
788 $ hg commit -m "john doe"
789 created new head
789 created new head
790 $ hg up 10
790 $ hg up 10
791 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
791 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
792 $ echo "foo" > bar
792 $ echo "foo" > bar
793 $ hg add bar
793 $ hg add bar
794 $ hg commit --amend -m "10'"
794 $ hg commit --amend -m "10'"
795 $ hg up 10 --hidden
795 $ hg up 10 --hidden
796 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
796 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
797 $ echo "bar" > foo
797 $ echo "bar" > foo
798 $ hg add foo
798 $ hg add foo
799 $ hg commit -m "bar foo"
799 $ hg commit -m "bar foo"
800 $ hg log -G
800 $ hg log -G
801 @ 15:73568ab6879d bar foo
801 @ 15:73568ab6879d bar foo
802 |
802 |
803 | o 14:77d874d096a2 10'
803 | o 14:77d874d096a2 10'
804 | |
804 | |
805 | | o 12:3eb461388009 john doe
805 | | o 12:3eb461388009 john doe
806 | |/
806 | |/
807 x | 10:121d9e3bc4c6 P
807 x | 10:121d9e3bc4c6 P
808 |/
808 |/
809 o 9:4be60e099a77 C
809 o 9:4be60e099a77 C
810 |
810 |
811 o 6:9c48361117de D
811 o 6:9c48361117de D
812 |
812 |
813 o 2:261e70097290 B2
813 o 2:261e70097290 B2
814 |
814 |
815 o 0:4a2df7238c3b A
815 o 0:4a2df7238c3b A
816
816
817 $ hg summary
817 $ hg summary
818 parent: 15:73568ab6879d tip (orphan)
818 parent: 15:73568ab6879d tip (orphan)
819 bar foo
819 bar foo
820 branch: default
820 branch: default
821 commit: (clean)
821 commit: (clean)
822 update: 2 new changesets, 3 branch heads (merge)
822 update: 2 new changesets, 3 branch heads (merge)
823 phases: 8 draft
823 phases: 8 draft
824 orphan: 1 changesets
824 orphan: 1 changesets
825 $ hg rebase -s 10 -d 12
825 $ hg rebase -s 10 -d 12
826 abort: this rebase will cause divergences from: 121d9e3bc4c6
826 abort: this rebase will cause divergences from: 121d9e3bc4c6
827 (to force the rebase please set experimental.allowdivergence=True)
827 (to force the rebase please set experimental.allowdivergence=True)
828 [255]
828 [255]
829 $ hg log -G
829 $ hg log -G
830 @ 15:73568ab6879d bar foo
830 @ 15:73568ab6879d bar foo
831 |
831 |
832 | o 14:77d874d096a2 10'
832 | o 14:77d874d096a2 10'
833 | |
833 | |
834 | | o 12:3eb461388009 john doe
834 | | o 12:3eb461388009 john doe
835 | |/
835 | |/
836 x | 10:121d9e3bc4c6 P
836 x | 10:121d9e3bc4c6 P
837 |/
837 |/
838 o 9:4be60e099a77 C
838 o 9:4be60e099a77 C
839 |
839 |
840 o 6:9c48361117de D
840 o 6:9c48361117de D
841 |
841 |
842 o 2:261e70097290 B2
842 o 2:261e70097290 B2
843 |
843 |
844 o 0:4a2df7238c3b A
844 o 0:4a2df7238c3b A
845
845
846 With experimental.allowdivergence=True, rebase can create divergence
846 With experimental.allowdivergence=True, rebase can create divergence
847
847
848 $ hg rebase -s 10 -d 12 --config experimental.allowdivergence=True
848 $ hg rebase -s 10 -d 12 --config experimental.allowdivergence=True
849 rebasing 10:121d9e3bc4c6 "P"
849 rebasing 10:121d9e3bc4c6 "P"
850 rebasing 15:73568ab6879d "bar foo" (tip)
850 rebasing 15:73568ab6879d "bar foo" (tip)
851 $ hg summary
851 $ hg summary
852 parent: 17:61bd55f69bc4 tip
852 parent: 17:61bd55f69bc4 tip
853 bar foo
853 bar foo
854 branch: default
854 branch: default
855 commit: (clean)
855 commit: (clean)
856 update: 1 new changesets, 2 branch heads (merge)
856 update: 1 new changesets, 2 branch heads (merge)
857 phases: 8 draft
857 phases: 8 draft
858 divergent: 2 changesets
858 content-divergent: 2 changesets
859
859
860 rebase --continue + skipped rev because their successors are in destination
860 rebase --continue + skipped rev because their successors are in destination
861 we make a change in trunk and work on conflicting changes to make rebase abort.
861 we make a change in trunk and work on conflicting changes to make rebase abort.
862
862
863 $ hg log -G -r 17::
863 $ hg log -G -r 17::
864 @ 17:61bd55f69bc4 bar foo
864 @ 17:61bd55f69bc4 bar foo
865 |
865 |
866 ~
866 ~
867
867
868 Create the two changes in trunk
868 Create the two changes in trunk
869 $ printf "a" > willconflict
869 $ printf "a" > willconflict
870 $ hg add willconflict
870 $ hg add willconflict
871 $ hg commit -m "willconflict first version"
871 $ hg commit -m "willconflict first version"
872
872
873 $ printf "dummy" > C
873 $ printf "dummy" > C
874 $ hg commit -m "dummy change successor"
874 $ hg commit -m "dummy change successor"
875
875
876 Create the changes that we will rebase
876 Create the changes that we will rebase
877 $ hg update -C 17 -q
877 $ hg update -C 17 -q
878 $ printf "b" > willconflict
878 $ printf "b" > willconflict
879 $ hg add willconflict
879 $ hg add willconflict
880 $ hg commit -m "willconflict second version"
880 $ hg commit -m "willconflict second version"
881 created new head
881 created new head
882 $ printf "dummy" > K
882 $ printf "dummy" > K
883 $ hg add K
883 $ hg add K
884 $ hg commit -m "dummy change"
884 $ hg commit -m "dummy change"
885 $ printf "dummy" > L
885 $ printf "dummy" > L
886 $ hg add L
886 $ hg add L
887 $ hg commit -m "dummy change"
887 $ hg commit -m "dummy change"
888 $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all
888 $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all
889 obsoleted 1 changesets
889 obsoleted 1 changesets
890
890
891 $ hg log -G -r 17::
891 $ hg log -G -r 17::
892 @ 22:7bdc8a87673d dummy change
892 @ 22:7bdc8a87673d dummy change
893 |
893 |
894 x 21:8b31da3c4919 dummy change
894 x 21:8b31da3c4919 dummy change
895 |
895 |
896 o 20:b82fb57ea638 willconflict second version
896 o 20:b82fb57ea638 willconflict second version
897 |
897 |
898 | o 19:601db7a18f51 dummy change successor
898 | o 19:601db7a18f51 dummy change successor
899 | |
899 | |
900 | o 18:357ddf1602d5 willconflict first version
900 | o 18:357ddf1602d5 willconflict first version
901 |/
901 |/
902 o 17:61bd55f69bc4 bar foo
902 o 17:61bd55f69bc4 bar foo
903 |
903 |
904 ~
904 ~
905 $ hg rebase -r ".^^ + .^ + ." -d 19
905 $ hg rebase -r ".^^ + .^ + ." -d 19
906 rebasing 20:b82fb57ea638 "willconflict second version"
906 rebasing 20:b82fb57ea638 "willconflict second version"
907 merging willconflict
907 merging willconflict
908 warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark')
908 warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark')
909 unresolved conflicts (see hg resolve, then hg rebase --continue)
909 unresolved conflicts (see hg resolve, then hg rebase --continue)
910 [1]
910 [1]
911
911
912 $ hg resolve --mark willconflict
912 $ hg resolve --mark willconflict
913 (no more unresolved files)
913 (no more unresolved files)
914 continue: hg rebase --continue
914 continue: hg rebase --continue
915 $ hg rebase --continue
915 $ hg rebase --continue
916 rebasing 20:b82fb57ea638 "willconflict second version"
916 rebasing 20:b82fb57ea638 "willconflict second version"
917 note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
917 note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
918 rebasing 22:7bdc8a87673d "dummy change" (tip)
918 rebasing 22:7bdc8a87673d "dummy change" (tip)
919 $ cd ..
919 $ cd ..
920
920
921 Rebase merge where successor of one parent is equal to destination (issue5198)
921 Rebase merge where successor of one parent is equal to destination (issue5198)
922
922
923 $ hg init p1-succ-is-dest
923 $ hg init p1-succ-is-dest
924 $ cd p1-succ-is-dest
924 $ cd p1-succ-is-dest
925
925
926 $ hg debugdrawdag <<EOF
926 $ hg debugdrawdag <<EOF
927 > F
927 > F
928 > /|
928 > /|
929 > E D B # replace: D -> B
929 > E D B # replace: D -> B
930 > \|/
930 > \|/
931 > A
931 > A
932 > EOF
932 > EOF
933
933
934 $ hg rebase -d B -s D
934 $ hg rebase -d B -s D
935 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
935 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
936 rebasing 4:66f1a38021c9 "F" (F tip)
936 rebasing 4:66f1a38021c9 "F" (F tip)
937 $ hg log -G
937 $ hg log -G
938 o 5:50e9d60b99c6 F
938 o 5:50e9d60b99c6 F
939 |\
939 |\
940 | | x 4:66f1a38021c9 F
940 | | x 4:66f1a38021c9 F
941 | |/|
941 | |/|
942 | o | 3:7fb047a69f22 E
942 | o | 3:7fb047a69f22 E
943 | | |
943 | | |
944 | | x 2:b18e25de2cf5 D
944 | | x 2:b18e25de2cf5 D
945 | |/
945 | |/
946 o | 1:112478962961 B
946 o | 1:112478962961 B
947 |/
947 |/
948 o 0:426bada5c675 A
948 o 0:426bada5c675 A
949
949
950 $ cd ..
950 $ cd ..
951
951
952 Rebase merge where successor of other parent is equal to destination
952 Rebase merge where successor of other parent is equal to destination
953
953
954 $ hg init p2-succ-is-dest
954 $ hg init p2-succ-is-dest
955 $ cd p2-succ-is-dest
955 $ cd p2-succ-is-dest
956
956
957 $ hg debugdrawdag <<EOF
957 $ hg debugdrawdag <<EOF
958 > F
958 > F
959 > /|
959 > /|
960 > E D B # replace: E -> B
960 > E D B # replace: E -> B
961 > \|/
961 > \|/
962 > A
962 > A
963 > EOF
963 > EOF
964
964
965 BROKEN: Raises an exception
965 BROKEN: Raises an exception
966 $ hg rebase -d B -s E 2>&1 | grep AssertionError:
966 $ hg rebase -d B -s E 2>&1 | grep AssertionError:
967 AssertionError: no base found to rebase on (defineparents called wrong)
967 AssertionError: no base found to rebase on (defineparents called wrong)
968 $ hg log -G
968 $ hg log -G
969 o 4:66f1a38021c9 F
969 o 4:66f1a38021c9 F
970 |\
970 |\
971 | x 3:7fb047a69f22 E
971 | x 3:7fb047a69f22 E
972 | |
972 | |
973 o | 2:b18e25de2cf5 D
973 o | 2:b18e25de2cf5 D
974 |/
974 |/
975 | o 1:112478962961 B
975 | o 1:112478962961 B
976 |/
976 |/
977 o 0:426bada5c675 A
977 o 0:426bada5c675 A
978
978
979 $ cd ..
979 $ cd ..
980
980
981 Rebase merge where successor of one parent is ancestor of destination
981 Rebase merge where successor of one parent is ancestor of destination
982
982
983 $ hg init p1-succ-in-dest
983 $ hg init p1-succ-in-dest
984 $ cd p1-succ-in-dest
984 $ cd p1-succ-in-dest
985
985
986 $ hg debugdrawdag <<EOF
986 $ hg debugdrawdag <<EOF
987 > F C
987 > F C
988 > /| |
988 > /| |
989 > E D B # replace: D -> B
989 > E D B # replace: D -> B
990 > \|/
990 > \|/
991 > A
991 > A
992 > EOF
992 > EOF
993
993
994 $ hg rebase -d C -s D
994 $ hg rebase -d C -s D
995 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
995 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
996 rebasing 5:66f1a38021c9 "F" (F tip)
996 rebasing 5:66f1a38021c9 "F" (F tip)
997 BROKEN: not rebased on top of requested destination (C)
997 BROKEN: not rebased on top of requested destination (C)
998 $ hg log -G
998 $ hg log -G
999 o 6:50e9d60b99c6 F
999 o 6:50e9d60b99c6 F
1000 |\
1000 |\
1001 | | x 5:66f1a38021c9 F
1001 | | x 5:66f1a38021c9 F
1002 | |/|
1002 | |/|
1003 +-----o 4:26805aba1e60 C
1003 +-----o 4:26805aba1e60 C
1004 | | |
1004 | | |
1005 | o | 3:7fb047a69f22 E
1005 | o | 3:7fb047a69f22 E
1006 | | |
1006 | | |
1007 | | x 2:b18e25de2cf5 D
1007 | | x 2:b18e25de2cf5 D
1008 | |/
1008 | |/
1009 o | 1:112478962961 B
1009 o | 1:112478962961 B
1010 |/
1010 |/
1011 o 0:426bada5c675 A
1011 o 0:426bada5c675 A
1012
1012
1013 $ cd ..
1013 $ cd ..
1014
1014
1015 Rebase merge where successor of other parent is ancestor of destination
1015 Rebase merge where successor of other parent is ancestor of destination
1016
1016
1017 $ hg init p2-succ-in-dest
1017 $ hg init p2-succ-in-dest
1018 $ cd p2-succ-in-dest
1018 $ cd p2-succ-in-dest
1019
1019
1020 $ hg debugdrawdag <<EOF
1020 $ hg debugdrawdag <<EOF
1021 > F C
1021 > F C
1022 > /| |
1022 > /| |
1023 > E D B # replace: E -> B
1023 > E D B # replace: E -> B
1024 > \|/
1024 > \|/
1025 > A
1025 > A
1026 > EOF
1026 > EOF
1027
1027
1028 BROKEN: Raises an exception
1028 BROKEN: Raises an exception
1029 $ hg rebase -d C -s E 2>&1 | grep AssertionError:
1029 $ hg rebase -d C -s E 2>&1 | grep AssertionError:
1030 AssertionError: no base found to rebase on (defineparents called wrong)
1030 AssertionError: no base found to rebase on (defineparents called wrong)
1031 $ hg log -G
1031 $ hg log -G
1032 o 5:66f1a38021c9 F
1032 o 5:66f1a38021c9 F
1033 |\
1033 |\
1034 | | o 4:26805aba1e60 C
1034 | | o 4:26805aba1e60 C
1035 | | |
1035 | | |
1036 | x | 3:7fb047a69f22 E
1036 | x | 3:7fb047a69f22 E
1037 | | |
1037 | | |
1038 o | | 2:b18e25de2cf5 D
1038 o | | 2:b18e25de2cf5 D
1039 |/ /
1039 |/ /
1040 | o 1:112478962961 B
1040 | o 1:112478962961 B
1041 |/
1041 |/
1042 o 0:426bada5c675 A
1042 o 0:426bada5c675 A
1043
1043
1044 $ cd ..
1044 $ cd ..
1045
1045
1046 Rebase merge where successor of one parent is ancestor of destination
1046 Rebase merge where successor of one parent is ancestor of destination
1047
1047
1048 $ hg init p1-succ-in-dest-b
1048 $ hg init p1-succ-in-dest-b
1049 $ cd p1-succ-in-dest-b
1049 $ cd p1-succ-in-dest-b
1050
1050
1051 $ hg debugdrawdag <<EOF
1051 $ hg debugdrawdag <<EOF
1052 > F C
1052 > F C
1053 > /| |
1053 > /| |
1054 > E D B # replace: E -> B
1054 > E D B # replace: E -> B
1055 > \|/
1055 > \|/
1056 > A
1056 > A
1057 > EOF
1057 > EOF
1058
1058
1059 $ hg rebase -d C -b F
1059 $ hg rebase -d C -b F
1060 rebasing 2:b18e25de2cf5 "D" (D)
1060 rebasing 2:b18e25de2cf5 "D" (D)
1061 note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B"
1061 note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B"
1062 rebasing 5:66f1a38021c9 "F" (F tip)
1062 rebasing 5:66f1a38021c9 "F" (F tip)
1063 $ hg log -G
1063 $ hg log -G
1064 o 7:9ed45af61fa0 F
1064 o 7:9ed45af61fa0 F
1065 |
1065 |
1066 o 6:8f47515dda15 D
1066 o 6:8f47515dda15 D
1067 |
1067 |
1068 | x 5:66f1a38021c9 F
1068 | x 5:66f1a38021c9 F
1069 | |\
1069 | |\
1070 o | | 4:26805aba1e60 C
1070 o | | 4:26805aba1e60 C
1071 | | |
1071 | | |
1072 | | x 3:7fb047a69f22 E
1072 | | x 3:7fb047a69f22 E
1073 | | |
1073 | | |
1074 | x | 2:b18e25de2cf5 D
1074 | x | 2:b18e25de2cf5 D
1075 | |/
1075 | |/
1076 o / 1:112478962961 B
1076 o / 1:112478962961 B
1077 |/
1077 |/
1078 o 0:426bada5c675 A
1078 o 0:426bada5c675 A
1079
1079
1080 $ cd ..
1080 $ cd ..
1081
1081
1082 Rebase merge where successor of other parent is ancestor of destination
1082 Rebase merge where successor of other parent is ancestor of destination
1083
1083
1084 $ hg init p2-succ-in-dest-b
1084 $ hg init p2-succ-in-dest-b
1085 $ cd p2-succ-in-dest-b
1085 $ cd p2-succ-in-dest-b
1086
1086
1087 $ hg debugdrawdag <<EOF
1087 $ hg debugdrawdag <<EOF
1088 > F C
1088 > F C
1089 > /| |
1089 > /| |
1090 > E D B # replace: D -> B
1090 > E D B # replace: D -> B
1091 > \|/
1091 > \|/
1092 > A
1092 > A
1093 > EOF
1093 > EOF
1094
1094
1095 $ hg rebase -d C -b F
1095 $ hg rebase -d C -b F
1096 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
1096 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
1097 rebasing 3:7fb047a69f22 "E" (E)
1097 rebasing 3:7fb047a69f22 "E" (E)
1098 rebasing 5:66f1a38021c9 "F" (F tip)
1098 rebasing 5:66f1a38021c9 "F" (F tip)
1099 BROKEN: This should have resulted in a rebased F with one parent, just like in
1099 BROKEN: This should have resulted in a rebased F with one parent, just like in
1100 the test case above
1100 the test case above
1101 $ hg log -G
1101 $ hg log -G
1102 o 7:c1e6f26e339d F
1102 o 7:c1e6f26e339d F
1103 |\
1103 |\
1104 | o 6:533690786a86 E
1104 | o 6:533690786a86 E
1105 |/
1105 |/
1106 | x 5:66f1a38021c9 F
1106 | x 5:66f1a38021c9 F
1107 | |\
1107 | |\
1108 o | | 4:26805aba1e60 C
1108 o | | 4:26805aba1e60 C
1109 | | |
1109 | | |
1110 | | x 3:7fb047a69f22 E
1110 | | x 3:7fb047a69f22 E
1111 | | |
1111 | | |
1112 | x | 2:b18e25de2cf5 D
1112 | x | 2:b18e25de2cf5 D
1113 | |/
1113 | |/
1114 o / 1:112478962961 B
1114 o / 1:112478962961 B
1115 |/
1115 |/
1116 o 0:426bada5c675 A
1116 o 0:426bada5c675 A
1117
1117
1118 $ cd ..
1118 $ cd ..
1119
1119
1120 Test that bookmark is moved and working dir is updated when all changesets have
1120 Test that bookmark is moved and working dir is updated when all changesets have
1121 equivalents in destination
1121 equivalents in destination
1122 $ hg init rbsrepo && cd rbsrepo
1122 $ hg init rbsrepo && cd rbsrepo
1123 $ echo "[experimental]" > .hg/hgrc
1123 $ echo "[experimental]" > .hg/hgrc
1124 $ echo "evolution=all" >> .hg/hgrc
1124 $ echo "evolution=all" >> .hg/hgrc
1125 $ echo "rebaseskipobsolete=on" >> .hg/hgrc
1125 $ echo "rebaseskipobsolete=on" >> .hg/hgrc
1126 $ echo root > root && hg ci -Am root
1126 $ echo root > root && hg ci -Am root
1127 adding root
1127 adding root
1128 $ echo a > a && hg ci -Am a
1128 $ echo a > a && hg ci -Am a
1129 adding a
1129 adding a
1130 $ hg up 0
1130 $ hg up 0
1131 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1131 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1132 $ echo b > b && hg ci -Am b
1132 $ echo b > b && hg ci -Am b
1133 adding b
1133 adding b
1134 created new head
1134 created new head
1135 $ hg rebase -r 2 -d 1
1135 $ hg rebase -r 2 -d 1
1136 rebasing 2:1e9a3c00cbe9 "b" (tip)
1136 rebasing 2:1e9a3c00cbe9 "b" (tip)
1137 $ hg log -r . # working dir is at rev 3 (successor of 2)
1137 $ hg log -r . # working dir is at rev 3 (successor of 2)
1138 3:be1832deae9a b (no-eol)
1138 3:be1832deae9a b (no-eol)
1139 $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now
1139 $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now
1140 $ hg up 2 && hg log -r . # working dir is at rev 2 again
1140 $ hg up 2 && hg log -r . # working dir is at rev 2 again
1141 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1141 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1142 2:1e9a3c00cbe9 b (no-eol)
1142 2:1e9a3c00cbe9 b (no-eol)
1143 $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
1143 $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
1144 note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
1144 note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
1145 Check that working directory was updated to rev 3 although rev 2 was skipped
1145 Check that working directory was updated to rev 3 although rev 2 was skipped
1146 during the rebase operation
1146 during the rebase operation
1147 $ hg log -r .
1147 $ hg log -r .
1148 3:be1832deae9a b (no-eol)
1148 3:be1832deae9a b (no-eol)
1149
1149
1150 Check that bookmark was not moved to rev 3 if rev 2 was skipped during the
1150 Check that bookmark was not moved to rev 3 if rev 2 was skipped during the
1151 rebase operation. This makes sense because if rev 2 has a successor, the
1151 rebase operation. This makes sense because if rev 2 has a successor, the
1152 operation generating that successor (ex. rebase) should be responsible for
1152 operation generating that successor (ex. rebase) should be responsible for
1153 moving bookmarks. If the bookmark is on a precursor, like rev 2, that means the
1153 moving bookmarks. If the bookmark is on a precursor, like rev 2, that means the
1154 user manually moved it back. In that case we should not move it again.
1154 user manually moved it back. In that case we should not move it again.
1155 $ hg bookmarks
1155 $ hg bookmarks
1156 mybook 2:1e9a3c00cbe9
1156 mybook 2:1e9a3c00cbe9
1157 $ hg debugobsolete --rev tip
1157 $ hg debugobsolete --rev tip
1158 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
1158 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
General Comments 0
You need to be logged in to leave comments. Login now