##// END OF EJS Templates
dirstate: fix a potential traceback when in `copy` and `rename`...
marmoute -
r51225:9fc0d244 stable
parent child Browse files
Show More
@@ -1,8013 +1,8013 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import os
9 import os
10 import re
10 import re
11 import sys
11 import sys
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 nullrev,
17 nullrev,
18 short,
18 short,
19 wdirrev,
19 wdirrev,
20 )
20 )
21 from .pycompat import open
21 from .pycompat import open
22 from . import (
22 from . import (
23 archival,
23 archival,
24 bookmarks,
24 bookmarks,
25 bundle2,
25 bundle2,
26 bundlecaches,
26 bundlecaches,
27 changegroup,
27 changegroup,
28 cmdutil,
28 cmdutil,
29 copies,
29 copies,
30 debugcommands as debugcommandsmod,
30 debugcommands as debugcommandsmod,
31 destutil,
31 destutil,
32 discovery,
32 discovery,
33 encoding,
33 encoding,
34 error,
34 error,
35 exchange,
35 exchange,
36 extensions,
36 extensions,
37 filemerge,
37 filemerge,
38 formatter,
38 formatter,
39 graphmod,
39 graphmod,
40 grep as grepmod,
40 grep as grepmod,
41 hbisect,
41 hbisect,
42 help,
42 help,
43 hg,
43 hg,
44 logcmdutil,
44 logcmdutil,
45 merge as mergemod,
45 merge as mergemod,
46 mergestate as mergestatemod,
46 mergestate as mergestatemod,
47 narrowspec,
47 narrowspec,
48 obsolete,
48 obsolete,
49 obsutil,
49 obsutil,
50 patch,
50 patch,
51 phases,
51 phases,
52 pycompat,
52 pycompat,
53 rcutil,
53 rcutil,
54 registrar,
54 registrar,
55 requirements,
55 requirements,
56 revsetlang,
56 revsetlang,
57 rewriteutil,
57 rewriteutil,
58 scmutil,
58 scmutil,
59 server,
59 server,
60 shelve as shelvemod,
60 shelve as shelvemod,
61 state as statemod,
61 state as statemod,
62 streamclone,
62 streamclone,
63 tags as tagsmod,
63 tags as tagsmod,
64 ui as uimod,
64 ui as uimod,
65 util,
65 util,
66 verify as verifymod,
66 verify as verifymod,
67 vfs as vfsmod,
67 vfs as vfsmod,
68 wireprotoserver,
68 wireprotoserver,
69 )
69 )
70 from .utils import (
70 from .utils import (
71 dateutil,
71 dateutil,
72 stringutil,
72 stringutil,
73 urlutil,
73 urlutil,
74 )
74 )
75
75
76 table = {}
76 table = {}
77 table.update(debugcommandsmod.command._table)
77 table.update(debugcommandsmod.command._table)
78
78
79 command = registrar.command(table)
79 command = registrar.command(table)
80 INTENT_READONLY = registrar.INTENT_READONLY
80 INTENT_READONLY = registrar.INTENT_READONLY
81
81
82 # common command options
82 # common command options
83
83
84 globalopts = [
84 globalopts = [
85 (
85 (
86 b'R',
86 b'R',
87 b'repository',
87 b'repository',
88 b'',
88 b'',
89 _(b'repository root directory or name of overlay bundle file'),
89 _(b'repository root directory or name of overlay bundle file'),
90 _(b'REPO'),
90 _(b'REPO'),
91 ),
91 ),
92 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
92 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
93 (
93 (
94 b'y',
94 b'y',
95 b'noninteractive',
95 b'noninteractive',
96 None,
96 None,
97 _(
97 _(
98 b'do not prompt, automatically pick the first choice for all prompts'
98 b'do not prompt, automatically pick the first choice for all prompts'
99 ),
99 ),
100 ),
100 ),
101 (b'q', b'quiet', None, _(b'suppress output')),
101 (b'q', b'quiet', None, _(b'suppress output')),
102 (b'v', b'verbose', None, _(b'enable additional output')),
102 (b'v', b'verbose', None, _(b'enable additional output')),
103 (
103 (
104 b'',
104 b'',
105 b'color',
105 b'color',
106 b'',
106 b'',
107 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
107 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
108 # and should not be translated
108 # and should not be translated
109 _(b"when to colorize (boolean, always, auto, never, or debug)"),
109 _(b"when to colorize (boolean, always, auto, never, or debug)"),
110 _(b'TYPE'),
110 _(b'TYPE'),
111 ),
111 ),
112 (
112 (
113 b'',
113 b'',
114 b'config',
114 b'config',
115 [],
115 [],
116 _(b'set/override config option (use \'section.name=value\')'),
116 _(b'set/override config option (use \'section.name=value\')'),
117 _(b'CONFIG'),
117 _(b'CONFIG'),
118 ),
118 ),
119 (b'', b'debug', None, _(b'enable debugging output')),
119 (b'', b'debug', None, _(b'enable debugging output')),
120 (b'', b'debugger', None, _(b'start debugger')),
120 (b'', b'debugger', None, _(b'start debugger')),
121 (
121 (
122 b'',
122 b'',
123 b'encoding',
123 b'encoding',
124 encoding.encoding,
124 encoding.encoding,
125 _(b'set the charset encoding'),
125 _(b'set the charset encoding'),
126 _(b'ENCODE'),
126 _(b'ENCODE'),
127 ),
127 ),
128 (
128 (
129 b'',
129 b'',
130 b'encodingmode',
130 b'encodingmode',
131 encoding.encodingmode,
131 encoding.encodingmode,
132 _(b'set the charset encoding mode'),
132 _(b'set the charset encoding mode'),
133 _(b'MODE'),
133 _(b'MODE'),
134 ),
134 ),
135 (b'', b'traceback', None, _(b'always print a traceback on exception')),
135 (b'', b'traceback', None, _(b'always print a traceback on exception')),
136 (b'', b'time', None, _(b'time how long the command takes')),
136 (b'', b'time', None, _(b'time how long the command takes')),
137 (b'', b'profile', None, _(b'print command execution profile')),
137 (b'', b'profile', None, _(b'print command execution profile')),
138 (b'', b'version', None, _(b'output version information and exit')),
138 (b'', b'version', None, _(b'output version information and exit')),
139 (b'h', b'help', None, _(b'display help and exit')),
139 (b'h', b'help', None, _(b'display help and exit')),
140 (b'', b'hidden', False, _(b'consider hidden changesets')),
140 (b'', b'hidden', False, _(b'consider hidden changesets')),
141 (
141 (
142 b'',
142 b'',
143 b'pager',
143 b'pager',
144 b'auto',
144 b'auto',
145 _(b"when to paginate (boolean, always, auto, or never)"),
145 _(b"when to paginate (boolean, always, auto, or never)"),
146 _(b'TYPE'),
146 _(b'TYPE'),
147 ),
147 ),
148 ]
148 ]
149
149
150 dryrunopts = cmdutil.dryrunopts
150 dryrunopts = cmdutil.dryrunopts
151 remoteopts = cmdutil.remoteopts
151 remoteopts = cmdutil.remoteopts
152 walkopts = cmdutil.walkopts
152 walkopts = cmdutil.walkopts
153 commitopts = cmdutil.commitopts
153 commitopts = cmdutil.commitopts
154 commitopts2 = cmdutil.commitopts2
154 commitopts2 = cmdutil.commitopts2
155 commitopts3 = cmdutil.commitopts3
155 commitopts3 = cmdutil.commitopts3
156 formatteropts = cmdutil.formatteropts
156 formatteropts = cmdutil.formatteropts
157 templateopts = cmdutil.templateopts
157 templateopts = cmdutil.templateopts
158 logopts = cmdutil.logopts
158 logopts = cmdutil.logopts
159 diffopts = cmdutil.diffopts
159 diffopts = cmdutil.diffopts
160 diffwsopts = cmdutil.diffwsopts
160 diffwsopts = cmdutil.diffwsopts
161 diffopts2 = cmdutil.diffopts2
161 diffopts2 = cmdutil.diffopts2
162 mergetoolopts = cmdutil.mergetoolopts
162 mergetoolopts = cmdutil.mergetoolopts
163 similarityopts = cmdutil.similarityopts
163 similarityopts = cmdutil.similarityopts
164 subrepoopts = cmdutil.subrepoopts
164 subrepoopts = cmdutil.subrepoopts
165 debugrevlogopts = cmdutil.debugrevlogopts
165 debugrevlogopts = cmdutil.debugrevlogopts
166
166
167 # Commands start here, listed alphabetically
167 # Commands start here, listed alphabetically
168
168
169
169
170 @command(
170 @command(
171 b'abort',
171 b'abort',
172 dryrunopts,
172 dryrunopts,
173 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
173 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
174 helpbasic=True,
174 helpbasic=True,
175 )
175 )
176 def abort(ui, repo, **opts):
176 def abort(ui, repo, **opts):
177 """abort an unfinished operation (EXPERIMENTAL)
177 """abort an unfinished operation (EXPERIMENTAL)
178
178
179 Aborts a multistep operation like graft, histedit, rebase, merge,
179 Aborts a multistep operation like graft, histedit, rebase, merge,
180 and unshelve if they are in an unfinished state.
180 and unshelve if they are in an unfinished state.
181
181
182 use --dry-run/-n to dry run the command.
182 use --dry-run/-n to dry run the command.
183 """
183 """
184 dryrun = opts.get('dry_run')
184 dryrun = opts.get('dry_run')
185 abortstate = cmdutil.getunfinishedstate(repo)
185 abortstate = cmdutil.getunfinishedstate(repo)
186 if not abortstate:
186 if not abortstate:
187 raise error.StateError(_(b'no operation in progress'))
187 raise error.StateError(_(b'no operation in progress'))
188 if not abortstate.abortfunc:
188 if not abortstate.abortfunc:
189 raise error.InputError(
189 raise error.InputError(
190 (
190 (
191 _(b"%s in progress but does not support 'hg abort'")
191 _(b"%s in progress but does not support 'hg abort'")
192 % (abortstate._opname)
192 % (abortstate._opname)
193 ),
193 ),
194 hint=abortstate.hint(),
194 hint=abortstate.hint(),
195 )
195 )
196 if dryrun:
196 if dryrun:
197 ui.status(
197 ui.status(
198 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
198 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
199 )
199 )
200 return
200 return
201 return abortstate.abortfunc(ui, repo)
201 return abortstate.abortfunc(ui, repo)
202
202
203
203
204 @command(
204 @command(
205 b'add',
205 b'add',
206 walkopts + subrepoopts + dryrunopts,
206 walkopts + subrepoopts + dryrunopts,
207 _(b'[OPTION]... [FILE]...'),
207 _(b'[OPTION]... [FILE]...'),
208 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
208 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
209 helpbasic=True,
209 helpbasic=True,
210 inferrepo=True,
210 inferrepo=True,
211 )
211 )
212 def add(ui, repo, *pats, **opts):
212 def add(ui, repo, *pats, **opts):
213 """add the specified files on the next commit
213 """add the specified files on the next commit
214
214
215 Schedule files to be version controlled and added to the
215 Schedule files to be version controlled and added to the
216 repository.
216 repository.
217
217
218 The files will be added to the repository at the next commit. To
218 The files will be added to the repository at the next commit. To
219 undo an add before that, see :hg:`forget`.
219 undo an add before that, see :hg:`forget`.
220
220
221 If no names are given, add all files to the repository (except
221 If no names are given, add all files to the repository (except
222 files matching ``.hgignore``).
222 files matching ``.hgignore``).
223
223
224 .. container:: verbose
224 .. container:: verbose
225
225
226 Examples:
226 Examples:
227
227
228 - New (unknown) files are added
228 - New (unknown) files are added
229 automatically by :hg:`add`::
229 automatically by :hg:`add`::
230
230
231 $ ls
231 $ ls
232 foo.c
232 foo.c
233 $ hg status
233 $ hg status
234 ? foo.c
234 ? foo.c
235 $ hg add
235 $ hg add
236 adding foo.c
236 adding foo.c
237 $ hg status
237 $ hg status
238 A foo.c
238 A foo.c
239
239
240 - Specific files to be added can be specified::
240 - Specific files to be added can be specified::
241
241
242 $ ls
242 $ ls
243 bar.c foo.c
243 bar.c foo.c
244 $ hg status
244 $ hg status
245 ? bar.c
245 ? bar.c
246 ? foo.c
246 ? foo.c
247 $ hg add bar.c
247 $ hg add bar.c
248 $ hg status
248 $ hg status
249 A bar.c
249 A bar.c
250 ? foo.c
250 ? foo.c
251
251
252 Returns 0 if all files are successfully added.
252 Returns 0 if all files are successfully added.
253 """
253 """
254
254
255 with repo.wlock(), repo.dirstate.changing_files(repo):
255 with repo.wlock(), repo.dirstate.changing_files(repo):
256 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
256 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
257 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
257 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
258 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
258 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
259 return rejected and 1 or 0
259 return rejected and 1 or 0
260
260
261
261
262 @command(
262 @command(
263 b'addremove',
263 b'addremove',
264 similarityopts + subrepoopts + walkopts + dryrunopts,
264 similarityopts + subrepoopts + walkopts + dryrunopts,
265 _(b'[OPTION]... [FILE]...'),
265 _(b'[OPTION]... [FILE]...'),
266 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
266 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
267 inferrepo=True,
267 inferrepo=True,
268 )
268 )
269 def addremove(ui, repo, *pats, **opts):
269 def addremove(ui, repo, *pats, **opts):
270 """add all new files, delete all missing files
270 """add all new files, delete all missing files
271
271
272 Add all new files and remove all missing files from the
272 Add all new files and remove all missing files from the
273 repository.
273 repository.
274
274
275 Unless names are given, new files are ignored if they match any of
275 Unless names are given, new files are ignored if they match any of
276 the patterns in ``.hgignore``. As with add, these changes take
276 the patterns in ``.hgignore``. As with add, these changes take
277 effect at the next commit.
277 effect at the next commit.
278
278
279 Use the -s/--similarity option to detect renamed files. This
279 Use the -s/--similarity option to detect renamed files. This
280 option takes a percentage between 0 (disabled) and 100 (files must
280 option takes a percentage between 0 (disabled) and 100 (files must
281 be identical) as its parameter. With a parameter greater than 0,
281 be identical) as its parameter. With a parameter greater than 0,
282 this compares every removed file with every added file and records
282 this compares every removed file with every added file and records
283 those similar enough as renames. Detecting renamed files this way
283 those similar enough as renames. Detecting renamed files this way
284 can be expensive. After using this option, :hg:`status -C` can be
284 can be expensive. After using this option, :hg:`status -C` can be
285 used to check which files were identified as moved or renamed. If
285 used to check which files were identified as moved or renamed. If
286 not specified, -s/--similarity defaults to 100 and only renames of
286 not specified, -s/--similarity defaults to 100 and only renames of
287 identical files are detected.
287 identical files are detected.
288
288
289 .. container:: verbose
289 .. container:: verbose
290
290
291 Examples:
291 Examples:
292
292
293 - A number of files (bar.c and foo.c) are new,
293 - A number of files (bar.c and foo.c) are new,
294 while foobar.c has been removed (without using :hg:`remove`)
294 while foobar.c has been removed (without using :hg:`remove`)
295 from the repository::
295 from the repository::
296
296
297 $ ls
297 $ ls
298 bar.c foo.c
298 bar.c foo.c
299 $ hg status
299 $ hg status
300 ! foobar.c
300 ! foobar.c
301 ? bar.c
301 ? bar.c
302 ? foo.c
302 ? foo.c
303 $ hg addremove
303 $ hg addremove
304 adding bar.c
304 adding bar.c
305 adding foo.c
305 adding foo.c
306 removing foobar.c
306 removing foobar.c
307 $ hg status
307 $ hg status
308 A bar.c
308 A bar.c
309 A foo.c
309 A foo.c
310 R foobar.c
310 R foobar.c
311
311
312 - A file foobar.c was moved to foo.c without using :hg:`rename`.
312 - A file foobar.c was moved to foo.c without using :hg:`rename`.
313 Afterwards, it was edited slightly::
313 Afterwards, it was edited slightly::
314
314
315 $ ls
315 $ ls
316 foo.c
316 foo.c
317 $ hg status
317 $ hg status
318 ! foobar.c
318 ! foobar.c
319 ? foo.c
319 ? foo.c
320 $ hg addremove --similarity 90
320 $ hg addremove --similarity 90
321 removing foobar.c
321 removing foobar.c
322 adding foo.c
322 adding foo.c
323 recording removal of foobar.c as rename to foo.c (94% similar)
323 recording removal of foobar.c as rename to foo.c (94% similar)
324 $ hg status -C
324 $ hg status -C
325 A foo.c
325 A foo.c
326 foobar.c
326 foobar.c
327 R foobar.c
327 R foobar.c
328
328
329 Returns 0 if all files are successfully added.
329 Returns 0 if all files are successfully added.
330 """
330 """
331 opts = pycompat.byteskwargs(opts)
331 opts = pycompat.byteskwargs(opts)
332 if not opts.get(b'similarity'):
332 if not opts.get(b'similarity'):
333 opts[b'similarity'] = b'100'
333 opts[b'similarity'] = b'100'
334 with repo.wlock(), repo.dirstate.changing_files(repo):
334 with repo.wlock(), repo.dirstate.changing_files(repo):
335 matcher = scmutil.match(repo[None], pats, opts)
335 matcher = scmutil.match(repo[None], pats, opts)
336 relative = scmutil.anypats(pats, opts)
336 relative = scmutil.anypats(pats, opts)
337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
338 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
338 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
339
339
340
340
341 @command(
341 @command(
342 b'annotate|blame',
342 b'annotate|blame',
343 [
343 [
344 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
344 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
345 (
345 (
346 b'',
346 b'',
347 b'follow',
347 b'follow',
348 None,
348 None,
349 _(b'follow copies/renames and list the filename (DEPRECATED)'),
349 _(b'follow copies/renames and list the filename (DEPRECATED)'),
350 ),
350 ),
351 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
351 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
352 (b'a', b'text', None, _(b'treat all files as text')),
352 (b'a', b'text', None, _(b'treat all files as text')),
353 (b'u', b'user', None, _(b'list the author (long with -v)')),
353 (b'u', b'user', None, _(b'list the author (long with -v)')),
354 (b'f', b'file', None, _(b'list the filename')),
354 (b'f', b'file', None, _(b'list the filename')),
355 (b'd', b'date', None, _(b'list the date (short with -q)')),
355 (b'd', b'date', None, _(b'list the date (short with -q)')),
356 (b'n', b'number', None, _(b'list the revision number (default)')),
356 (b'n', b'number', None, _(b'list the revision number (default)')),
357 (b'c', b'changeset', None, _(b'list the changeset')),
357 (b'c', b'changeset', None, _(b'list the changeset')),
358 (
358 (
359 b'l',
359 b'l',
360 b'line-number',
360 b'line-number',
361 None,
361 None,
362 _(b'show line number at the first appearance'),
362 _(b'show line number at the first appearance'),
363 ),
363 ),
364 (
364 (
365 b'',
365 b'',
366 b'skip',
366 b'skip',
367 [],
367 [],
368 _(b'revset to not display (EXPERIMENTAL)'),
368 _(b'revset to not display (EXPERIMENTAL)'),
369 _(b'REV'),
369 _(b'REV'),
370 ),
370 ),
371 ]
371 ]
372 + diffwsopts
372 + diffwsopts
373 + walkopts
373 + walkopts
374 + formatteropts,
374 + formatteropts,
375 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
375 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
376 helpcategory=command.CATEGORY_FILE_CONTENTS,
376 helpcategory=command.CATEGORY_FILE_CONTENTS,
377 helpbasic=True,
377 helpbasic=True,
378 inferrepo=True,
378 inferrepo=True,
379 )
379 )
380 def annotate(ui, repo, *pats, **opts):
380 def annotate(ui, repo, *pats, **opts):
381 """show changeset information by line for each file
381 """show changeset information by line for each file
382
382
383 List changes in files, showing the revision id responsible for
383 List changes in files, showing the revision id responsible for
384 each line.
384 each line.
385
385
386 This command is useful for discovering when a change was made and
386 This command is useful for discovering when a change was made and
387 by whom.
387 by whom.
388
388
389 If you include --file, --user, or --date, the revision number is
389 If you include --file, --user, or --date, the revision number is
390 suppressed unless you also include --number.
390 suppressed unless you also include --number.
391
391
392 Without the -a/--text option, annotate will avoid processing files
392 Without the -a/--text option, annotate will avoid processing files
393 it detects as binary. With -a, annotate will annotate the file
393 it detects as binary. With -a, annotate will annotate the file
394 anyway, although the results will probably be neither useful
394 anyway, although the results will probably be neither useful
395 nor desirable.
395 nor desirable.
396
396
397 .. container:: verbose
397 .. container:: verbose
398
398
399 Template:
399 Template:
400
400
401 The following keywords are supported in addition to the common template
401 The following keywords are supported in addition to the common template
402 keywords and functions. See also :hg:`help templates`.
402 keywords and functions. See also :hg:`help templates`.
403
403
404 :lines: List of lines with annotation data.
404 :lines: List of lines with annotation data.
405 :path: String. Repository-absolute path of the specified file.
405 :path: String. Repository-absolute path of the specified file.
406
406
407 And each entry of ``{lines}`` provides the following sub-keywords in
407 And each entry of ``{lines}`` provides the following sub-keywords in
408 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
408 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
409
409
410 :line: String. Line content.
410 :line: String. Line content.
411 :lineno: Integer. Line number at that revision.
411 :lineno: Integer. Line number at that revision.
412 :path: String. Repository-absolute path of the file at that revision.
412 :path: String. Repository-absolute path of the file at that revision.
413
413
414 See :hg:`help templates.operators` for the list expansion syntax.
414 See :hg:`help templates.operators` for the list expansion syntax.
415
415
416 Returns 0 on success.
416 Returns 0 on success.
417 """
417 """
418 opts = pycompat.byteskwargs(opts)
418 opts = pycompat.byteskwargs(opts)
419 if not pats:
419 if not pats:
420 raise error.InputError(
420 raise error.InputError(
421 _(b'at least one filename or pattern is required')
421 _(b'at least one filename or pattern is required')
422 )
422 )
423
423
424 if opts.get(b'follow'):
424 if opts.get(b'follow'):
425 # --follow is deprecated and now just an alias for -f/--file
425 # --follow is deprecated and now just an alias for -f/--file
426 # to mimic the behavior of Mercurial before version 1.5
426 # to mimic the behavior of Mercurial before version 1.5
427 opts[b'file'] = True
427 opts[b'file'] = True
428
428
429 if (
429 if (
430 not opts.get(b'user')
430 not opts.get(b'user')
431 and not opts.get(b'changeset')
431 and not opts.get(b'changeset')
432 and not opts.get(b'date')
432 and not opts.get(b'date')
433 and not opts.get(b'file')
433 and not opts.get(b'file')
434 ):
434 ):
435 opts[b'number'] = True
435 opts[b'number'] = True
436
436
437 linenumber = opts.get(b'line_number') is not None
437 linenumber = opts.get(b'line_number') is not None
438 if (
438 if (
439 linenumber
439 linenumber
440 and (not opts.get(b'changeset'))
440 and (not opts.get(b'changeset'))
441 and (not opts.get(b'number'))
441 and (not opts.get(b'number'))
442 ):
442 ):
443 raise error.InputError(_(b'at least one of -n/-c is required for -l'))
443 raise error.InputError(_(b'at least one of -n/-c is required for -l'))
444
444
445 rev = opts.get(b'rev')
445 rev = opts.get(b'rev')
446 if rev:
446 if rev:
447 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
447 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
448 ctx = logcmdutil.revsingle(repo, rev)
448 ctx = logcmdutil.revsingle(repo, rev)
449
449
450 ui.pager(b'annotate')
450 ui.pager(b'annotate')
451 rootfm = ui.formatter(b'annotate', opts)
451 rootfm = ui.formatter(b'annotate', opts)
452 if ui.debugflag:
452 if ui.debugflag:
453 shorthex = pycompat.identity
453 shorthex = pycompat.identity
454 else:
454 else:
455
455
456 def shorthex(h):
456 def shorthex(h):
457 return h[:12]
457 return h[:12]
458
458
459 if ui.quiet:
459 if ui.quiet:
460 datefunc = dateutil.shortdate
460 datefunc = dateutil.shortdate
461 else:
461 else:
462 datefunc = dateutil.datestr
462 datefunc = dateutil.datestr
463 if ctx.rev() is None:
463 if ctx.rev() is None:
464 if opts.get(b'changeset'):
464 if opts.get(b'changeset'):
465 # omit "+" suffix which is appended to node hex
465 # omit "+" suffix which is appended to node hex
466 def formatrev(rev):
466 def formatrev(rev):
467 if rev == wdirrev:
467 if rev == wdirrev:
468 return b'%d' % ctx.p1().rev()
468 return b'%d' % ctx.p1().rev()
469 else:
469 else:
470 return b'%d' % rev
470 return b'%d' % rev
471
471
472 else:
472 else:
473
473
474 def formatrev(rev):
474 def formatrev(rev):
475 if rev == wdirrev:
475 if rev == wdirrev:
476 return b'%d+' % ctx.p1().rev()
476 return b'%d+' % ctx.p1().rev()
477 else:
477 else:
478 return b'%d ' % rev
478 return b'%d ' % rev
479
479
480 def formathex(h):
480 def formathex(h):
481 if h == repo.nodeconstants.wdirhex:
481 if h == repo.nodeconstants.wdirhex:
482 return b'%s+' % shorthex(hex(ctx.p1().node()))
482 return b'%s+' % shorthex(hex(ctx.p1().node()))
483 else:
483 else:
484 return b'%s ' % shorthex(h)
484 return b'%s ' % shorthex(h)
485
485
486 else:
486 else:
487 formatrev = b'%d'.__mod__
487 formatrev = b'%d'.__mod__
488 formathex = shorthex
488 formathex = shorthex
489
489
490 opmap = [
490 opmap = [
491 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
491 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
492 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
492 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
493 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
493 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
494 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
494 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
495 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
495 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
496 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
496 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
497 ]
497 ]
498 opnamemap = {
498 opnamemap = {
499 b'rev': b'number',
499 b'rev': b'number',
500 b'node': b'changeset',
500 b'node': b'changeset',
501 b'path': b'file',
501 b'path': b'file',
502 b'lineno': b'line_number',
502 b'lineno': b'line_number',
503 }
503 }
504
504
505 if rootfm.isplain():
505 if rootfm.isplain():
506
506
507 def makefunc(get, fmt):
507 def makefunc(get, fmt):
508 return lambda x: fmt(get(x))
508 return lambda x: fmt(get(x))
509
509
510 else:
510 else:
511
511
512 def makefunc(get, fmt):
512 def makefunc(get, fmt):
513 return get
513 return get
514
514
515 datahint = rootfm.datahint()
515 datahint = rootfm.datahint()
516 funcmap = [
516 funcmap = [
517 (makefunc(get, fmt), sep)
517 (makefunc(get, fmt), sep)
518 for fn, sep, get, fmt in opmap
518 for fn, sep, get, fmt in opmap
519 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
519 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
520 ]
520 ]
521 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
521 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
522 fields = b' '.join(
522 fields = b' '.join(
523 fn
523 fn
524 for fn, sep, get, fmt in opmap
524 for fn, sep, get, fmt in opmap
525 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
525 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
526 )
526 )
527
527
528 def bad(x, y):
528 def bad(x, y):
529 raise error.InputError(b"%s: %s" % (x, y))
529 raise error.InputError(b"%s: %s" % (x, y))
530
530
531 m = scmutil.match(ctx, pats, opts, badfn=bad)
531 m = scmutil.match(ctx, pats, opts, badfn=bad)
532
532
533 follow = not opts.get(b'no_follow')
533 follow = not opts.get(b'no_follow')
534 diffopts = patch.difffeatureopts(
534 diffopts = patch.difffeatureopts(
535 ui, opts, section=b'annotate', whitespace=True
535 ui, opts, section=b'annotate', whitespace=True
536 )
536 )
537 skiprevs = opts.get(b'skip')
537 skiprevs = opts.get(b'skip')
538 if skiprevs:
538 if skiprevs:
539 skiprevs = logcmdutil.revrange(repo, skiprevs)
539 skiprevs = logcmdutil.revrange(repo, skiprevs)
540
540
541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
542 for abs in ctx.walk(m):
542 for abs in ctx.walk(m):
543 fctx = ctx[abs]
543 fctx = ctx[abs]
544 rootfm.startitem()
544 rootfm.startitem()
545 rootfm.data(path=abs)
545 rootfm.data(path=abs)
546 if not opts.get(b'text') and fctx.isbinary():
546 if not opts.get(b'text') and fctx.isbinary():
547 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
547 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
548 continue
548 continue
549
549
550 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
550 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
551 lines = fctx.annotate(
551 lines = fctx.annotate(
552 follow=follow, skiprevs=skiprevs, diffopts=diffopts
552 follow=follow, skiprevs=skiprevs, diffopts=diffopts
553 )
553 )
554 if not lines:
554 if not lines:
555 fm.end()
555 fm.end()
556 continue
556 continue
557 formats = []
557 formats = []
558 pieces = []
558 pieces = []
559
559
560 for f, sep in funcmap:
560 for f, sep in funcmap:
561 l = [f(n) for n in lines]
561 l = [f(n) for n in lines]
562 if fm.isplain():
562 if fm.isplain():
563 sizes = [encoding.colwidth(x) for x in l]
563 sizes = [encoding.colwidth(x) for x in l]
564 ml = max(sizes)
564 ml = max(sizes)
565 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
565 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
566 else:
566 else:
567 formats.append([b'%s'] * len(l))
567 formats.append([b'%s'] * len(l))
568 pieces.append(l)
568 pieces.append(l)
569
569
570 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
570 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
571 fm.startitem()
571 fm.startitem()
572 fm.context(fctx=n.fctx)
572 fm.context(fctx=n.fctx)
573 fm.write(fields, b"".join(f), *p)
573 fm.write(fields, b"".join(f), *p)
574 if n.skip:
574 if n.skip:
575 fmt = b"* %s"
575 fmt = b"* %s"
576 else:
576 else:
577 fmt = b": %s"
577 fmt = b": %s"
578 fm.write(b'line', fmt, n.text)
578 fm.write(b'line', fmt, n.text)
579
579
580 if not lines[-1].text.endswith(b'\n'):
580 if not lines[-1].text.endswith(b'\n'):
581 fm.plain(b'\n')
581 fm.plain(b'\n')
582 fm.end()
582 fm.end()
583
583
584 rootfm.end()
584 rootfm.end()
585
585
586
586
587 @command(
587 @command(
588 b'archive',
588 b'archive',
589 [
589 [
590 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
590 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
591 (
591 (
592 b'p',
592 b'p',
593 b'prefix',
593 b'prefix',
594 b'',
594 b'',
595 _(b'directory prefix for files in archive'),
595 _(b'directory prefix for files in archive'),
596 _(b'PREFIX'),
596 _(b'PREFIX'),
597 ),
597 ),
598 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
598 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
599 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
599 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
600 ]
600 ]
601 + subrepoopts
601 + subrepoopts
602 + walkopts,
602 + walkopts,
603 _(b'[OPTION]... DEST'),
603 _(b'[OPTION]... DEST'),
604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
604 helpcategory=command.CATEGORY_IMPORT_EXPORT,
605 )
605 )
606 def archive(ui, repo, dest, **opts):
606 def archive(ui, repo, dest, **opts):
607 """create an unversioned archive of a repository revision
607 """create an unversioned archive of a repository revision
608
608
609 By default, the revision used is the parent of the working
609 By default, the revision used is the parent of the working
610 directory; use -r/--rev to specify a different revision.
610 directory; use -r/--rev to specify a different revision.
611
611
612 The archive type is automatically detected based on file
612 The archive type is automatically detected based on file
613 extension (to override, use -t/--type).
613 extension (to override, use -t/--type).
614
614
615 .. container:: verbose
615 .. container:: verbose
616
616
617 Examples:
617 Examples:
618
618
619 - create a zip file containing the 1.0 release::
619 - create a zip file containing the 1.0 release::
620
620
621 hg archive -r 1.0 project-1.0.zip
621 hg archive -r 1.0 project-1.0.zip
622
622
623 - create a tarball excluding .hg files::
623 - create a tarball excluding .hg files::
624
624
625 hg archive project.tar.gz -X ".hg*"
625 hg archive project.tar.gz -X ".hg*"
626
626
627 Valid types are:
627 Valid types are:
628
628
629 :``files``: a directory full of files (default)
629 :``files``: a directory full of files (default)
630 :``tar``: tar archive, uncompressed
630 :``tar``: tar archive, uncompressed
631 :``tbz2``: tar archive, compressed using bzip2
631 :``tbz2``: tar archive, compressed using bzip2
632 :``tgz``: tar archive, compressed using gzip
632 :``tgz``: tar archive, compressed using gzip
633 :``txz``: tar archive, compressed using lzma (only in Python 3)
633 :``txz``: tar archive, compressed using lzma (only in Python 3)
634 :``uzip``: zip archive, uncompressed
634 :``uzip``: zip archive, uncompressed
635 :``zip``: zip archive, compressed using deflate
635 :``zip``: zip archive, compressed using deflate
636
636
637 The exact name of the destination archive or directory is given
637 The exact name of the destination archive or directory is given
638 using a format string; see :hg:`help export` for details.
638 using a format string; see :hg:`help export` for details.
639
639
640 Each member added to an archive file has a directory prefix
640 Each member added to an archive file has a directory prefix
641 prepended. Use -p/--prefix to specify a format string for the
641 prepended. Use -p/--prefix to specify a format string for the
642 prefix. The default is the basename of the archive, with suffixes
642 prefix. The default is the basename of the archive, with suffixes
643 removed.
643 removed.
644
644
645 Returns 0 on success.
645 Returns 0 on success.
646 """
646 """
647
647
648 opts = pycompat.byteskwargs(opts)
648 opts = pycompat.byteskwargs(opts)
649 rev = opts.get(b'rev')
649 rev = opts.get(b'rev')
650 if rev:
650 if rev:
651 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
651 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
652 ctx = logcmdutil.revsingle(repo, rev)
652 ctx = logcmdutil.revsingle(repo, rev)
653 if not ctx:
653 if not ctx:
654 raise error.InputError(
654 raise error.InputError(
655 _(b'no working directory: please specify a revision')
655 _(b'no working directory: please specify a revision')
656 )
656 )
657 node = ctx.node()
657 node = ctx.node()
658 dest = cmdutil.makefilename(ctx, dest)
658 dest = cmdutil.makefilename(ctx, dest)
659 if os.path.realpath(dest) == repo.root:
659 if os.path.realpath(dest) == repo.root:
660 raise error.InputError(_(b'repository root cannot be destination'))
660 raise error.InputError(_(b'repository root cannot be destination'))
661
661
662 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
662 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
663 prefix = opts.get(b'prefix')
663 prefix = opts.get(b'prefix')
664
664
665 if dest == b'-':
665 if dest == b'-':
666 if kind == b'files':
666 if kind == b'files':
667 raise error.InputError(_(b'cannot archive plain files to stdout'))
667 raise error.InputError(_(b'cannot archive plain files to stdout'))
668 dest = cmdutil.makefileobj(ctx, dest)
668 dest = cmdutil.makefileobj(ctx, dest)
669 if not prefix:
669 if not prefix:
670 prefix = os.path.basename(repo.root) + b'-%h'
670 prefix = os.path.basename(repo.root) + b'-%h'
671
671
672 prefix = cmdutil.makefilename(ctx, prefix)
672 prefix = cmdutil.makefilename(ctx, prefix)
673 match = scmutil.match(ctx, [], opts)
673 match = scmutil.match(ctx, [], opts)
674 archival.archive(
674 archival.archive(
675 repo,
675 repo,
676 dest,
676 dest,
677 node,
677 node,
678 kind,
678 kind,
679 not opts.get(b'no_decode'),
679 not opts.get(b'no_decode'),
680 match,
680 match,
681 prefix,
681 prefix,
682 subrepos=opts.get(b'subrepos'),
682 subrepos=opts.get(b'subrepos'),
683 )
683 )
684
684
685
685
686 @command(
686 @command(
687 b'backout',
687 b'backout',
688 [
688 [
689 (
689 (
690 b'',
690 b'',
691 b'merge',
691 b'merge',
692 None,
692 None,
693 _(b'merge with old dirstate parent after backout'),
693 _(b'merge with old dirstate parent after backout'),
694 ),
694 ),
695 (
695 (
696 b'',
696 b'',
697 b'commit',
697 b'commit',
698 None,
698 None,
699 _(b'commit if no conflicts were encountered (DEPRECATED)'),
699 _(b'commit if no conflicts were encountered (DEPRECATED)'),
700 ),
700 ),
701 (b'', b'no-commit', None, _(b'do not commit')),
701 (b'', b'no-commit', None, _(b'do not commit')),
702 (
702 (
703 b'',
703 b'',
704 b'parent',
704 b'parent',
705 b'',
705 b'',
706 _(b'parent to choose when backing out merge (DEPRECATED)'),
706 _(b'parent to choose when backing out merge (DEPRECATED)'),
707 _(b'REV'),
707 _(b'REV'),
708 ),
708 ),
709 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
709 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
710 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
710 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
711 ]
711 ]
712 + mergetoolopts
712 + mergetoolopts
713 + walkopts
713 + walkopts
714 + commitopts
714 + commitopts
715 + commitopts2,
715 + commitopts2,
716 _(b'[OPTION]... [-r] REV'),
716 _(b'[OPTION]... [-r] REV'),
717 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
717 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
718 )
718 )
719 def backout(ui, repo, node=None, rev=None, **opts):
719 def backout(ui, repo, node=None, rev=None, **opts):
720 """reverse effect of earlier changeset
720 """reverse effect of earlier changeset
721
721
722 Prepare a new changeset with the effect of REV undone in the
722 Prepare a new changeset with the effect of REV undone in the
723 current working directory. If no conflicts were encountered,
723 current working directory. If no conflicts were encountered,
724 it will be committed immediately.
724 it will be committed immediately.
725
725
726 If REV is the parent of the working directory, then this new changeset
726 If REV is the parent of the working directory, then this new changeset
727 is committed automatically (unless --no-commit is specified).
727 is committed automatically (unless --no-commit is specified).
728
728
729 .. note::
729 .. note::
730
730
731 :hg:`backout` cannot be used to fix either an unwanted or
731 :hg:`backout` cannot be used to fix either an unwanted or
732 incorrect merge.
732 incorrect merge.
733
733
734 .. container:: verbose
734 .. container:: verbose
735
735
736 Examples:
736 Examples:
737
737
738 - Reverse the effect of the parent of the working directory.
738 - Reverse the effect of the parent of the working directory.
739 This backout will be committed immediately::
739 This backout will be committed immediately::
740
740
741 hg backout -r .
741 hg backout -r .
742
742
743 - Reverse the effect of previous bad revision 23::
743 - Reverse the effect of previous bad revision 23::
744
744
745 hg backout -r 23
745 hg backout -r 23
746
746
747 - Reverse the effect of previous bad revision 23 and
747 - Reverse the effect of previous bad revision 23 and
748 leave changes uncommitted::
748 leave changes uncommitted::
749
749
750 hg backout -r 23 --no-commit
750 hg backout -r 23 --no-commit
751 hg commit -m "Backout revision 23"
751 hg commit -m "Backout revision 23"
752
752
753 By default, the pending changeset will have one parent,
753 By default, the pending changeset will have one parent,
754 maintaining a linear history. With --merge, the pending
754 maintaining a linear history. With --merge, the pending
755 changeset will instead have two parents: the old parent of the
755 changeset will instead have two parents: the old parent of the
756 working directory and a new child of REV that simply undoes REV.
756 working directory and a new child of REV that simply undoes REV.
757
757
758 Before version 1.7, the behavior without --merge was equivalent
758 Before version 1.7, the behavior without --merge was equivalent
759 to specifying --merge followed by :hg:`update --clean .` to
759 to specifying --merge followed by :hg:`update --clean .` to
760 cancel the merge and leave the child of REV as a head to be
760 cancel the merge and leave the child of REV as a head to be
761 merged separately.
761 merged separately.
762
762
763 See :hg:`help dates` for a list of formats valid for -d/--date.
763 See :hg:`help dates` for a list of formats valid for -d/--date.
764
764
765 See :hg:`help revert` for a way to restore files to the state
765 See :hg:`help revert` for a way to restore files to the state
766 of another revision.
766 of another revision.
767
767
768 Returns 0 on success, 1 if nothing to backout or there are unresolved
768 Returns 0 on success, 1 if nothing to backout or there are unresolved
769 files.
769 files.
770 """
770 """
771 with repo.wlock(), repo.lock():
771 with repo.wlock(), repo.lock():
772 return _dobackout(ui, repo, node, rev, **opts)
772 return _dobackout(ui, repo, node, rev, **opts)
773
773
774
774
775 def _dobackout(ui, repo, node=None, rev=None, **opts):
775 def _dobackout(ui, repo, node=None, rev=None, **opts):
776 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
776 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
777 opts = pycompat.byteskwargs(opts)
777 opts = pycompat.byteskwargs(opts)
778
778
779 if rev and node:
779 if rev and node:
780 raise error.InputError(_(b"please specify just one revision"))
780 raise error.InputError(_(b"please specify just one revision"))
781
781
782 if not rev:
782 if not rev:
783 rev = node
783 rev = node
784
784
785 if not rev:
785 if not rev:
786 raise error.InputError(_(b"please specify a revision to backout"))
786 raise error.InputError(_(b"please specify a revision to backout"))
787
787
788 date = opts.get(b'date')
788 date = opts.get(b'date')
789 if date:
789 if date:
790 opts[b'date'] = dateutil.parsedate(date)
790 opts[b'date'] = dateutil.parsedate(date)
791
791
792 cmdutil.checkunfinished(repo)
792 cmdutil.checkunfinished(repo)
793 cmdutil.bailifchanged(repo)
793 cmdutil.bailifchanged(repo)
794 ctx = logcmdutil.revsingle(repo, rev)
794 ctx = logcmdutil.revsingle(repo, rev)
795 node = ctx.node()
795 node = ctx.node()
796
796
797 op1, op2 = repo.dirstate.parents()
797 op1, op2 = repo.dirstate.parents()
798 if not repo.changelog.isancestor(node, op1):
798 if not repo.changelog.isancestor(node, op1):
799 raise error.InputError(
799 raise error.InputError(
800 _(b'cannot backout change that is not an ancestor')
800 _(b'cannot backout change that is not an ancestor')
801 )
801 )
802
802
803 p1, p2 = repo.changelog.parents(node)
803 p1, p2 = repo.changelog.parents(node)
804 if p1 == repo.nullid:
804 if p1 == repo.nullid:
805 raise error.InputError(_(b'cannot backout a change with no parents'))
805 raise error.InputError(_(b'cannot backout a change with no parents'))
806 if p2 != repo.nullid:
806 if p2 != repo.nullid:
807 if not opts.get(b'parent'):
807 if not opts.get(b'parent'):
808 raise error.InputError(_(b'cannot backout a merge changeset'))
808 raise error.InputError(_(b'cannot backout a merge changeset'))
809 p = repo.lookup(opts[b'parent'])
809 p = repo.lookup(opts[b'parent'])
810 if p not in (p1, p2):
810 if p not in (p1, p2):
811 raise error.InputError(
811 raise error.InputError(
812 _(b'%s is not a parent of %s') % (short(p), short(node))
812 _(b'%s is not a parent of %s') % (short(p), short(node))
813 )
813 )
814 parent = p
814 parent = p
815 else:
815 else:
816 if opts.get(b'parent'):
816 if opts.get(b'parent'):
817 raise error.InputError(
817 raise error.InputError(
818 _(b'cannot use --parent on non-merge changeset')
818 _(b'cannot use --parent on non-merge changeset')
819 )
819 )
820 parent = p1
820 parent = p1
821
821
822 # the backout should appear on the same branch
822 # the backout should appear on the same branch
823 branch = repo.dirstate.branch()
823 branch = repo.dirstate.branch()
824 bheads = repo.branchheads(branch)
824 bheads = repo.branchheads(branch)
825 rctx = scmutil.revsingle(repo, hex(parent))
825 rctx = scmutil.revsingle(repo, hex(parent))
826 if not opts.get(b'merge') and op1 != node:
826 if not opts.get(b'merge') and op1 != node:
827 with repo.transaction(b"backout"):
827 with repo.transaction(b"backout"):
828 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
828 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
829 with ui.configoverride(overrides, b'backout'):
829 with ui.configoverride(overrides, b'backout'):
830 stats = mergemod.back_out(ctx, parent=repo[parent])
830 stats = mergemod.back_out(ctx, parent=repo[parent])
831 repo.setparents(op1, op2)
831 repo.setparents(op1, op2)
832 hg._showstats(repo, stats)
832 hg._showstats(repo, stats)
833 if stats.unresolvedcount:
833 if stats.unresolvedcount:
834 repo.ui.status(
834 repo.ui.status(
835 _(b"use 'hg resolve' to retry unresolved file merges\n")
835 _(b"use 'hg resolve' to retry unresolved file merges\n")
836 )
836 )
837 return 1
837 return 1
838 else:
838 else:
839 hg.clean(repo, node, show_stats=False)
839 hg.clean(repo, node, show_stats=False)
840 repo.dirstate.setbranch(branch, repo.currenttransaction())
840 repo.dirstate.setbranch(branch, repo.currenttransaction())
841 cmdutil.revert(ui, repo, rctx)
841 cmdutil.revert(ui, repo, rctx)
842
842
843 if opts.get(b'no_commit'):
843 if opts.get(b'no_commit'):
844 msg = _(b"changeset %s backed out, don't forget to commit.\n")
844 msg = _(b"changeset %s backed out, don't forget to commit.\n")
845 ui.status(msg % short(node))
845 ui.status(msg % short(node))
846 return 0
846 return 0
847
847
848 def commitfunc(ui, repo, message, match, opts):
848 def commitfunc(ui, repo, message, match, opts):
849 editform = b'backout'
849 editform = b'backout'
850 e = cmdutil.getcommiteditor(
850 e = cmdutil.getcommiteditor(
851 editform=editform, **pycompat.strkwargs(opts)
851 editform=editform, **pycompat.strkwargs(opts)
852 )
852 )
853 if not message:
853 if not message:
854 # we don't translate commit messages
854 # we don't translate commit messages
855 message = b"Backed out changeset %s" % short(node)
855 message = b"Backed out changeset %s" % short(node)
856 e = cmdutil.getcommiteditor(edit=True, editform=editform)
856 e = cmdutil.getcommiteditor(edit=True, editform=editform)
857 return repo.commit(
857 return repo.commit(
858 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
858 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
859 )
859 )
860
860
861 # save to detect changes
861 # save to detect changes
862 tip = repo.changelog.tip()
862 tip = repo.changelog.tip()
863
863
864 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
864 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
865 if not newnode:
865 if not newnode:
866 ui.status(_(b"nothing changed\n"))
866 ui.status(_(b"nothing changed\n"))
867 return 1
867 return 1
868 cmdutil.commitstatus(repo, newnode, branch, bheads, tip)
868 cmdutil.commitstatus(repo, newnode, branch, bheads, tip)
869
869
870 def nice(node):
870 def nice(node):
871 return b'%d:%s' % (repo.changelog.rev(node), short(node))
871 return b'%d:%s' % (repo.changelog.rev(node), short(node))
872
872
873 ui.status(
873 ui.status(
874 _(b'changeset %s backs out changeset %s\n')
874 _(b'changeset %s backs out changeset %s\n')
875 % (nice(newnode), nice(node))
875 % (nice(newnode), nice(node))
876 )
876 )
877 if opts.get(b'merge') and op1 != node:
877 if opts.get(b'merge') and op1 != node:
878 hg.clean(repo, op1, show_stats=False)
878 hg.clean(repo, op1, show_stats=False)
879 ui.status(_(b'merging with changeset %s\n') % nice(newnode))
879 ui.status(_(b'merging with changeset %s\n') % nice(newnode))
880 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
880 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
881 with ui.configoverride(overrides, b'backout'):
881 with ui.configoverride(overrides, b'backout'):
882 return hg.merge(repo[b'tip'])
882 return hg.merge(repo[b'tip'])
883 return 0
883 return 0
884
884
885
885
886 @command(
886 @command(
887 b'bisect',
887 b'bisect',
888 [
888 [
889 (b'r', b'reset', False, _(b'reset bisect state')),
889 (b'r', b'reset', False, _(b'reset bisect state')),
890 (b'g', b'good', False, _(b'mark changeset good')),
890 (b'g', b'good', False, _(b'mark changeset good')),
891 (b'b', b'bad', False, _(b'mark changeset bad')),
891 (b'b', b'bad', False, _(b'mark changeset bad')),
892 (b's', b'skip', False, _(b'skip testing changeset')),
892 (b's', b'skip', False, _(b'skip testing changeset')),
893 (b'e', b'extend', False, _(b'extend the bisect range')),
893 (b'e', b'extend', False, _(b'extend the bisect range')),
894 (
894 (
895 b'c',
895 b'c',
896 b'command',
896 b'command',
897 b'',
897 b'',
898 _(b'use command to check changeset state'),
898 _(b'use command to check changeset state'),
899 _(b'CMD'),
899 _(b'CMD'),
900 ),
900 ),
901 (b'U', b'noupdate', False, _(b'do not update to target')),
901 (b'U', b'noupdate', False, _(b'do not update to target')),
902 ],
902 ],
903 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
903 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
904 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
904 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
905 )
905 )
906 def bisect(
906 def bisect(
907 ui,
907 ui,
908 repo,
908 repo,
909 positional_1=None,
909 positional_1=None,
910 positional_2=None,
910 positional_2=None,
911 command=None,
911 command=None,
912 reset=None,
912 reset=None,
913 good=None,
913 good=None,
914 bad=None,
914 bad=None,
915 skip=None,
915 skip=None,
916 extend=None,
916 extend=None,
917 noupdate=None,
917 noupdate=None,
918 ):
918 ):
919 """subdivision search of changesets
919 """subdivision search of changesets
920
920
921 This command helps to find changesets which introduce problems. To
921 This command helps to find changesets which introduce problems. To
922 use, mark the earliest changeset you know exhibits the problem as
922 use, mark the earliest changeset you know exhibits the problem as
923 bad, then mark the latest changeset which is free from the problem
923 bad, then mark the latest changeset which is free from the problem
924 as good. Bisect will update your working directory to a revision
924 as good. Bisect will update your working directory to a revision
925 for testing (unless the -U/--noupdate option is specified). Once
925 for testing (unless the -U/--noupdate option is specified). Once
926 you have performed tests, mark the working directory as good or
926 you have performed tests, mark the working directory as good or
927 bad, and bisect will either update to another candidate changeset
927 bad, and bisect will either update to another candidate changeset
928 or announce that it has found the bad revision.
928 or announce that it has found the bad revision.
929
929
930 As a shortcut, you can also use the revision argument to mark a
930 As a shortcut, you can also use the revision argument to mark a
931 revision as good or bad without checking it out first.
931 revision as good or bad without checking it out first.
932
932
933 If you supply a command, it will be used for automatic bisection.
933 If you supply a command, it will be used for automatic bisection.
934 The environment variable HG_NODE will contain the ID of the
934 The environment variable HG_NODE will contain the ID of the
935 changeset being tested. The exit status of the command will be
935 changeset being tested. The exit status of the command will be
936 used to mark revisions as good or bad: status 0 means good, 125
936 used to mark revisions as good or bad: status 0 means good, 125
937 means to skip the revision, 127 (command not found) will abort the
937 means to skip the revision, 127 (command not found) will abort the
938 bisection, and any other non-zero exit status means the revision
938 bisection, and any other non-zero exit status means the revision
939 is bad.
939 is bad.
940
940
941 .. container:: verbose
941 .. container:: verbose
942
942
943 Some examples:
943 Some examples:
944
944
945 - start a bisection with known bad revision 34, and good revision 12::
945 - start a bisection with known bad revision 34, and good revision 12::
946
946
947 hg bisect --bad 34
947 hg bisect --bad 34
948 hg bisect --good 12
948 hg bisect --good 12
949
949
950 - advance the current bisection by marking current revision as good or
950 - advance the current bisection by marking current revision as good or
951 bad::
951 bad::
952
952
953 hg bisect --good
953 hg bisect --good
954 hg bisect --bad
954 hg bisect --bad
955
955
956 - mark the current revision, or a known revision, to be skipped (e.g. if
956 - mark the current revision, or a known revision, to be skipped (e.g. if
957 that revision is not usable because of another issue)::
957 that revision is not usable because of another issue)::
958
958
959 hg bisect --skip
959 hg bisect --skip
960 hg bisect --skip 23
960 hg bisect --skip 23
961
961
962 - skip all revisions that do not touch directories ``foo`` or ``bar``::
962 - skip all revisions that do not touch directories ``foo`` or ``bar``::
963
963
964 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
964 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
965
965
966 - forget the current bisection::
966 - forget the current bisection::
967
967
968 hg bisect --reset
968 hg bisect --reset
969
969
970 - use 'make && make tests' to automatically find the first broken
970 - use 'make && make tests' to automatically find the first broken
971 revision::
971 revision::
972
972
973 hg bisect --reset
973 hg bisect --reset
974 hg bisect --bad 34
974 hg bisect --bad 34
975 hg bisect --good 12
975 hg bisect --good 12
976 hg bisect --command "make && make tests"
976 hg bisect --command "make && make tests"
977
977
978 - see all changesets whose states are already known in the current
978 - see all changesets whose states are already known in the current
979 bisection::
979 bisection::
980
980
981 hg log -r "bisect(pruned)"
981 hg log -r "bisect(pruned)"
982
982
983 - see the changeset currently being bisected (especially useful
983 - see the changeset currently being bisected (especially useful
984 if running with -U/--noupdate)::
984 if running with -U/--noupdate)::
985
985
986 hg log -r "bisect(current)"
986 hg log -r "bisect(current)"
987
987
988 - see all changesets that took part in the current bisection::
988 - see all changesets that took part in the current bisection::
989
989
990 hg log -r "bisect(range)"
990 hg log -r "bisect(range)"
991
991
992 - you can even get a nice graph::
992 - you can even get a nice graph::
993
993
994 hg log --graph -r "bisect(range)"
994 hg log --graph -r "bisect(range)"
995
995
996 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
996 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
997
997
998 Returns 0 on success.
998 Returns 0 on success.
999 """
999 """
1000 rev = []
1000 rev = []
1001 # backward compatibility
1001 # backward compatibility
1002 if positional_1 in (b"good", b"bad", b"reset", b"init"):
1002 if positional_1 in (b"good", b"bad", b"reset", b"init"):
1003 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
1003 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
1004 cmd = positional_1
1004 cmd = positional_1
1005 rev.append(positional_2)
1005 rev.append(positional_2)
1006 if cmd == b"good":
1006 if cmd == b"good":
1007 good = True
1007 good = True
1008 elif cmd == b"bad":
1008 elif cmd == b"bad":
1009 bad = True
1009 bad = True
1010 else:
1010 else:
1011 reset = True
1011 reset = True
1012 elif positional_2:
1012 elif positional_2:
1013 raise error.InputError(_(b'incompatible arguments'))
1013 raise error.InputError(_(b'incompatible arguments'))
1014 elif positional_1 is not None:
1014 elif positional_1 is not None:
1015 rev.append(positional_1)
1015 rev.append(positional_1)
1016
1016
1017 incompatibles = {
1017 incompatibles = {
1018 b'--bad': bad,
1018 b'--bad': bad,
1019 b'--command': bool(command),
1019 b'--command': bool(command),
1020 b'--extend': extend,
1020 b'--extend': extend,
1021 b'--good': good,
1021 b'--good': good,
1022 b'--reset': reset,
1022 b'--reset': reset,
1023 b'--skip': skip,
1023 b'--skip': skip,
1024 }
1024 }
1025
1025
1026 enabled = [x for x in incompatibles if incompatibles[x]]
1026 enabled = [x for x in incompatibles if incompatibles[x]]
1027
1027
1028 if len(enabled) > 1:
1028 if len(enabled) > 1:
1029 raise error.InputError(
1029 raise error.InputError(
1030 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1030 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1031 )
1031 )
1032
1032
1033 if reset:
1033 if reset:
1034 hbisect.resetstate(repo)
1034 hbisect.resetstate(repo)
1035 return
1035 return
1036
1036
1037 state = hbisect.load_state(repo)
1037 state = hbisect.load_state(repo)
1038
1038
1039 if rev:
1039 if rev:
1040 revs = logcmdutil.revrange(repo, rev)
1040 revs = logcmdutil.revrange(repo, rev)
1041 goodnodes = state[b'good']
1041 goodnodes = state[b'good']
1042 badnodes = state[b'bad']
1042 badnodes = state[b'bad']
1043 if goodnodes and badnodes:
1043 if goodnodes and badnodes:
1044 candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
1044 candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
1045 candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
1045 candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
1046 revs = candidates & revs
1046 revs = candidates & revs
1047 nodes = [repo.changelog.node(i) for i in revs]
1047 nodes = [repo.changelog.node(i) for i in revs]
1048 else:
1048 else:
1049 nodes = [repo.lookup(b'.')]
1049 nodes = [repo.lookup(b'.')]
1050
1050
1051 # update state
1051 # update state
1052 if good or bad or skip:
1052 if good or bad or skip:
1053 if good:
1053 if good:
1054 state[b'good'] += nodes
1054 state[b'good'] += nodes
1055 elif bad:
1055 elif bad:
1056 state[b'bad'] += nodes
1056 state[b'bad'] += nodes
1057 elif skip:
1057 elif skip:
1058 state[b'skip'] += nodes
1058 state[b'skip'] += nodes
1059 hbisect.save_state(repo, state)
1059 hbisect.save_state(repo, state)
1060 if not (state[b'good'] and state[b'bad']):
1060 if not (state[b'good'] and state[b'bad']):
1061 return
1061 return
1062
1062
1063 def mayupdate(repo, node, show_stats=True):
1063 def mayupdate(repo, node, show_stats=True):
1064 """common used update sequence"""
1064 """common used update sequence"""
1065 if noupdate:
1065 if noupdate:
1066 return
1066 return
1067 cmdutil.checkunfinished(repo)
1067 cmdutil.checkunfinished(repo)
1068 cmdutil.bailifchanged(repo)
1068 cmdutil.bailifchanged(repo)
1069 return hg.clean(repo, node, show_stats=show_stats)
1069 return hg.clean(repo, node, show_stats=show_stats)
1070
1070
1071 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1071 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1072
1072
1073 if command:
1073 if command:
1074 changesets = 1
1074 changesets = 1
1075 if noupdate:
1075 if noupdate:
1076 try:
1076 try:
1077 node = state[b'current'][0]
1077 node = state[b'current'][0]
1078 except LookupError:
1078 except LookupError:
1079 raise error.StateError(
1079 raise error.StateError(
1080 _(
1080 _(
1081 b'current bisect revision is unknown - '
1081 b'current bisect revision is unknown - '
1082 b'start a new bisect to fix'
1082 b'start a new bisect to fix'
1083 )
1083 )
1084 )
1084 )
1085 else:
1085 else:
1086 node, p2 = repo.dirstate.parents()
1086 node, p2 = repo.dirstate.parents()
1087 if p2 != repo.nullid:
1087 if p2 != repo.nullid:
1088 raise error.StateError(_(b'current bisect revision is a merge'))
1088 raise error.StateError(_(b'current bisect revision is a merge'))
1089 if rev:
1089 if rev:
1090 if not nodes:
1090 if not nodes:
1091 raise error.InputError(_(b'empty revision set'))
1091 raise error.InputError(_(b'empty revision set'))
1092 node = repo[nodes[-1]].node()
1092 node = repo[nodes[-1]].node()
1093 with hbisect.restore_state(repo, state, node):
1093 with hbisect.restore_state(repo, state, node):
1094 while changesets:
1094 while changesets:
1095 # update state
1095 # update state
1096 state[b'current'] = [node]
1096 state[b'current'] = [node]
1097 hbisect.save_state(repo, state)
1097 hbisect.save_state(repo, state)
1098 status = ui.system(
1098 status = ui.system(
1099 command,
1099 command,
1100 environ={b'HG_NODE': hex(node)},
1100 environ={b'HG_NODE': hex(node)},
1101 blockedtag=b'bisect_check',
1101 blockedtag=b'bisect_check',
1102 )
1102 )
1103 if status == 125:
1103 if status == 125:
1104 transition = b"skip"
1104 transition = b"skip"
1105 elif status == 0:
1105 elif status == 0:
1106 transition = b"good"
1106 transition = b"good"
1107 # status < 0 means process was killed
1107 # status < 0 means process was killed
1108 elif status == 127:
1108 elif status == 127:
1109 raise error.Abort(_(b"failed to execute %s") % command)
1109 raise error.Abort(_(b"failed to execute %s") % command)
1110 elif status < 0:
1110 elif status < 0:
1111 raise error.Abort(_(b"%s killed") % command)
1111 raise error.Abort(_(b"%s killed") % command)
1112 else:
1112 else:
1113 transition = b"bad"
1113 transition = b"bad"
1114 state[transition].append(node)
1114 state[transition].append(node)
1115 ctx = repo[node]
1115 ctx = repo[node]
1116 summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect')
1116 summary = cmdutil.format_changeset_summary(ui, ctx, b'bisect')
1117 ui.status(_(b'changeset %s: %s\n') % (summary, transition))
1117 ui.status(_(b'changeset %s: %s\n') % (summary, transition))
1118 hbisect.checkstate(state)
1118 hbisect.checkstate(state)
1119 # bisect
1119 # bisect
1120 nodes, changesets, bgood = hbisect.bisect(repo, state)
1120 nodes, changesets, bgood = hbisect.bisect(repo, state)
1121 # update to next check
1121 # update to next check
1122 node = nodes[0]
1122 node = nodes[0]
1123 mayupdate(repo, node, show_stats=False)
1123 mayupdate(repo, node, show_stats=False)
1124 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1124 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1125 return
1125 return
1126
1126
1127 hbisect.checkstate(state)
1127 hbisect.checkstate(state)
1128
1128
1129 # actually bisect
1129 # actually bisect
1130 nodes, changesets, good = hbisect.bisect(repo, state)
1130 nodes, changesets, good = hbisect.bisect(repo, state)
1131 if extend:
1131 if extend:
1132 if not changesets:
1132 if not changesets:
1133 extendctx = hbisect.extendrange(repo, state, nodes, good)
1133 extendctx = hbisect.extendrange(repo, state, nodes, good)
1134 if extendctx is not None:
1134 if extendctx is not None:
1135 ui.write(
1135 ui.write(
1136 _(b"Extending search to changeset %s\n")
1136 _(b"Extending search to changeset %s\n")
1137 % cmdutil.format_changeset_summary(ui, extendctx, b'bisect')
1137 % cmdutil.format_changeset_summary(ui, extendctx, b'bisect')
1138 )
1138 )
1139 state[b'current'] = [extendctx.node()]
1139 state[b'current'] = [extendctx.node()]
1140 hbisect.save_state(repo, state)
1140 hbisect.save_state(repo, state)
1141 return mayupdate(repo, extendctx.node())
1141 return mayupdate(repo, extendctx.node())
1142 raise error.StateError(_(b"nothing to extend"))
1142 raise error.StateError(_(b"nothing to extend"))
1143
1143
1144 if changesets == 0:
1144 if changesets == 0:
1145 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1145 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1146 else:
1146 else:
1147 assert len(nodes) == 1 # only a single node can be tested next
1147 assert len(nodes) == 1 # only a single node can be tested next
1148 node = nodes[0]
1148 node = nodes[0]
1149 # compute the approximate number of remaining tests
1149 # compute the approximate number of remaining tests
1150 tests, size = 0, 2
1150 tests, size = 0, 2
1151 while size <= changesets:
1151 while size <= changesets:
1152 tests, size = tests + 1, size * 2
1152 tests, size = tests + 1, size * 2
1153 rev = repo.changelog.rev(node)
1153 rev = repo.changelog.rev(node)
1154 summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect')
1154 summary = cmdutil.format_changeset_summary(ui, repo[rev], b'bisect')
1155 ui.write(
1155 ui.write(
1156 _(
1156 _(
1157 b"Testing changeset %s "
1157 b"Testing changeset %s "
1158 b"(%d changesets remaining, ~%d tests)\n"
1158 b"(%d changesets remaining, ~%d tests)\n"
1159 )
1159 )
1160 % (summary, changesets, tests)
1160 % (summary, changesets, tests)
1161 )
1161 )
1162 state[b'current'] = [node]
1162 state[b'current'] = [node]
1163 hbisect.save_state(repo, state)
1163 hbisect.save_state(repo, state)
1164 return mayupdate(repo, node)
1164 return mayupdate(repo, node)
1165
1165
1166
1166
1167 @command(
1167 @command(
1168 b'bookmarks|bookmark',
1168 b'bookmarks|bookmark',
1169 [
1169 [
1170 (b'f', b'force', False, _(b'force')),
1170 (b'f', b'force', False, _(b'force')),
1171 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1171 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1172 (b'd', b'delete', False, _(b'delete a given bookmark')),
1172 (b'd', b'delete', False, _(b'delete a given bookmark')),
1173 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1173 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1174 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1174 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1175 (b'l', b'list', False, _(b'list existing bookmarks')),
1175 (b'l', b'list', False, _(b'list existing bookmarks')),
1176 ]
1176 ]
1177 + formatteropts,
1177 + formatteropts,
1178 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1178 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1179 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1179 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1180 )
1180 )
1181 def bookmark(ui, repo, *names, **opts):
1181 def bookmark(ui, repo, *names, **opts):
1182 """create a new bookmark or list existing bookmarks
1182 """create a new bookmark or list existing bookmarks
1183
1183
1184 Bookmarks are labels on changesets to help track lines of development.
1184 Bookmarks are labels on changesets to help track lines of development.
1185 Bookmarks are unversioned and can be moved, renamed and deleted.
1185 Bookmarks are unversioned and can be moved, renamed and deleted.
1186 Deleting or moving a bookmark has no effect on the associated changesets.
1186 Deleting or moving a bookmark has no effect on the associated changesets.
1187
1187
1188 Creating or updating to a bookmark causes it to be marked as 'active'.
1188 Creating or updating to a bookmark causes it to be marked as 'active'.
1189 The active bookmark is indicated with a '*'.
1189 The active bookmark is indicated with a '*'.
1190 When a commit is made, the active bookmark will advance to the new commit.
1190 When a commit is made, the active bookmark will advance to the new commit.
1191 A plain :hg:`update` will also advance an active bookmark, if possible.
1191 A plain :hg:`update` will also advance an active bookmark, if possible.
1192 Updating away from a bookmark will cause it to be deactivated.
1192 Updating away from a bookmark will cause it to be deactivated.
1193
1193
1194 Bookmarks can be pushed and pulled between repositories (see
1194 Bookmarks can be pushed and pulled between repositories (see
1195 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1195 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1196 diverged, a new 'divergent bookmark' of the form 'name@path' will
1196 diverged, a new 'divergent bookmark' of the form 'name@path' will
1197 be created. Using :hg:`merge` will resolve the divergence.
1197 be created. Using :hg:`merge` will resolve the divergence.
1198
1198
1199 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1199 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1200 the active bookmark's name.
1200 the active bookmark's name.
1201
1201
1202 A bookmark named '@' has the special property that :hg:`clone` will
1202 A bookmark named '@' has the special property that :hg:`clone` will
1203 check it out by default if it exists.
1203 check it out by default if it exists.
1204
1204
1205 .. container:: verbose
1205 .. container:: verbose
1206
1206
1207 Template:
1207 Template:
1208
1208
1209 The following keywords are supported in addition to the common template
1209 The following keywords are supported in addition to the common template
1210 keywords and functions such as ``{bookmark}``. See also
1210 keywords and functions such as ``{bookmark}``. See also
1211 :hg:`help templates`.
1211 :hg:`help templates`.
1212
1212
1213 :active: Boolean. True if the bookmark is active.
1213 :active: Boolean. True if the bookmark is active.
1214
1214
1215 Examples:
1215 Examples:
1216
1216
1217 - create an active bookmark for a new line of development::
1217 - create an active bookmark for a new line of development::
1218
1218
1219 hg book new-feature
1219 hg book new-feature
1220
1220
1221 - create an inactive bookmark as a place marker::
1221 - create an inactive bookmark as a place marker::
1222
1222
1223 hg book -i reviewed
1223 hg book -i reviewed
1224
1224
1225 - create an inactive bookmark on another changeset::
1225 - create an inactive bookmark on another changeset::
1226
1226
1227 hg book -r .^ tested
1227 hg book -r .^ tested
1228
1228
1229 - rename bookmark turkey to dinner::
1229 - rename bookmark turkey to dinner::
1230
1230
1231 hg book -m turkey dinner
1231 hg book -m turkey dinner
1232
1232
1233 - move the '@' bookmark from another branch::
1233 - move the '@' bookmark from another branch::
1234
1234
1235 hg book -f @
1235 hg book -f @
1236
1236
1237 - print only the active bookmark name::
1237 - print only the active bookmark name::
1238
1238
1239 hg book -ql .
1239 hg book -ql .
1240 """
1240 """
1241 opts = pycompat.byteskwargs(opts)
1241 opts = pycompat.byteskwargs(opts)
1242 force = opts.get(b'force')
1242 force = opts.get(b'force')
1243 rev = opts.get(b'rev')
1243 rev = opts.get(b'rev')
1244 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1244 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1245
1245
1246 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1246 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1247 if action:
1247 if action:
1248 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1248 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1249 elif names or rev:
1249 elif names or rev:
1250 action = b'add'
1250 action = b'add'
1251 elif inactive:
1251 elif inactive:
1252 action = b'inactive' # meaning deactivate
1252 action = b'inactive' # meaning deactivate
1253 else:
1253 else:
1254 action = b'list'
1254 action = b'list'
1255
1255
1256 cmdutil.check_incompatible_arguments(
1256 cmdutil.check_incompatible_arguments(
1257 opts, b'inactive', [b'delete', b'list']
1257 opts, b'inactive', [b'delete', b'list']
1258 )
1258 )
1259 if not names and action in {b'add', b'delete'}:
1259 if not names and action in {b'add', b'delete'}:
1260 raise error.InputError(_(b"bookmark name required"))
1260 raise error.InputError(_(b"bookmark name required"))
1261
1261
1262 if action in {b'add', b'delete', b'rename', b'inactive'}:
1262 if action in {b'add', b'delete', b'rename', b'inactive'}:
1263 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1263 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1264 if action == b'delete':
1264 if action == b'delete':
1265 names = pycompat.maplist(repo._bookmarks.expandname, names)
1265 names = pycompat.maplist(repo._bookmarks.expandname, names)
1266 bookmarks.delete(repo, tr, names)
1266 bookmarks.delete(repo, tr, names)
1267 elif action == b'rename':
1267 elif action == b'rename':
1268 if not names:
1268 if not names:
1269 raise error.InputError(_(b"new bookmark name required"))
1269 raise error.InputError(_(b"new bookmark name required"))
1270 elif len(names) > 1:
1270 elif len(names) > 1:
1271 raise error.InputError(
1271 raise error.InputError(
1272 _(b"only one new bookmark name allowed")
1272 _(b"only one new bookmark name allowed")
1273 )
1273 )
1274 oldname = repo._bookmarks.expandname(opts[b'rename'])
1274 oldname = repo._bookmarks.expandname(opts[b'rename'])
1275 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1275 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1276 elif action == b'add':
1276 elif action == b'add':
1277 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1277 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1278 elif action == b'inactive':
1278 elif action == b'inactive':
1279 if len(repo._bookmarks) == 0:
1279 if len(repo._bookmarks) == 0:
1280 ui.status(_(b"no bookmarks set\n"))
1280 ui.status(_(b"no bookmarks set\n"))
1281 elif not repo._activebookmark:
1281 elif not repo._activebookmark:
1282 ui.status(_(b"no active bookmark\n"))
1282 ui.status(_(b"no active bookmark\n"))
1283 else:
1283 else:
1284 bookmarks.deactivate(repo)
1284 bookmarks.deactivate(repo)
1285 elif action == b'list':
1285 elif action == b'list':
1286 names = pycompat.maplist(repo._bookmarks.expandname, names)
1286 names = pycompat.maplist(repo._bookmarks.expandname, names)
1287 with ui.formatter(b'bookmarks', opts) as fm:
1287 with ui.formatter(b'bookmarks', opts) as fm:
1288 bookmarks.printbookmarks(ui, repo, fm, names)
1288 bookmarks.printbookmarks(ui, repo, fm, names)
1289 else:
1289 else:
1290 raise error.ProgrammingError(b'invalid action: %s' % action)
1290 raise error.ProgrammingError(b'invalid action: %s' % action)
1291
1291
1292
1292
1293 @command(
1293 @command(
1294 b'branch',
1294 b'branch',
1295 [
1295 [
1296 (
1296 (
1297 b'f',
1297 b'f',
1298 b'force',
1298 b'force',
1299 None,
1299 None,
1300 _(b'set branch name even if it shadows an existing branch'),
1300 _(b'set branch name even if it shadows an existing branch'),
1301 ),
1301 ),
1302 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1302 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1303 (
1303 (
1304 b'r',
1304 b'r',
1305 b'rev',
1305 b'rev',
1306 [],
1306 [],
1307 _(b'change branches of the given revs (EXPERIMENTAL)'),
1307 _(b'change branches of the given revs (EXPERIMENTAL)'),
1308 ),
1308 ),
1309 ],
1309 ],
1310 _(b'[-fC] [NAME]'),
1310 _(b'[-fC] [NAME]'),
1311 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1311 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1312 )
1312 )
1313 def branch(ui, repo, label=None, **opts):
1313 def branch(ui, repo, label=None, **opts):
1314 """set or show the current branch name
1314 """set or show the current branch name
1315
1315
1316 .. note::
1316 .. note::
1317
1317
1318 Branch names are permanent and global. Use :hg:`bookmark` to create a
1318 Branch names are permanent and global. Use :hg:`bookmark` to create a
1319 light-weight bookmark instead. See :hg:`help glossary` for more
1319 light-weight bookmark instead. See :hg:`help glossary` for more
1320 information about named branches and bookmarks.
1320 information about named branches and bookmarks.
1321
1321
1322 With no argument, show the current branch name. With one argument,
1322 With no argument, show the current branch name. With one argument,
1323 set the working directory branch name (the branch will not exist
1323 set the working directory branch name (the branch will not exist
1324 in the repository until the next commit). Standard practice
1324 in the repository until the next commit). Standard practice
1325 recommends that primary development take place on the 'default'
1325 recommends that primary development take place on the 'default'
1326 branch.
1326 branch.
1327
1327
1328 Unless -f/--force is specified, branch will not let you set a
1328 Unless -f/--force is specified, branch will not let you set a
1329 branch name that already exists.
1329 branch name that already exists.
1330
1330
1331 Use -C/--clean to reset the working directory branch to that of
1331 Use -C/--clean to reset the working directory branch to that of
1332 the parent of the working directory, negating a previous branch
1332 the parent of the working directory, negating a previous branch
1333 change.
1333 change.
1334
1334
1335 Use the command :hg:`update` to switch to an existing branch. Use
1335 Use the command :hg:`update` to switch to an existing branch. Use
1336 :hg:`commit --close-branch` to mark this branch head as closed.
1336 :hg:`commit --close-branch` to mark this branch head as closed.
1337 When all heads of a branch are closed, the branch will be
1337 When all heads of a branch are closed, the branch will be
1338 considered closed.
1338 considered closed.
1339
1339
1340 Returns 0 on success.
1340 Returns 0 on success.
1341 """
1341 """
1342 opts = pycompat.byteskwargs(opts)
1342 opts = pycompat.byteskwargs(opts)
1343 revs = opts.get(b'rev')
1343 revs = opts.get(b'rev')
1344 if label:
1344 if label:
1345 label = label.strip()
1345 label = label.strip()
1346
1346
1347 if not opts.get(b'clean') and not label:
1347 if not opts.get(b'clean') and not label:
1348 if revs:
1348 if revs:
1349 raise error.InputError(
1349 raise error.InputError(
1350 _(b"no branch name specified for the revisions")
1350 _(b"no branch name specified for the revisions")
1351 )
1351 )
1352 ui.write(b"%s\n" % repo.dirstate.branch())
1352 ui.write(b"%s\n" % repo.dirstate.branch())
1353 return
1353 return
1354
1354
1355 with repo.wlock():
1355 with repo.wlock():
1356 if opts.get(b'clean'):
1356 if opts.get(b'clean'):
1357 label = repo[b'.'].branch()
1357 label = repo[b'.'].branch()
1358 repo.dirstate.setbranch(label, repo.currenttransaction())
1358 repo.dirstate.setbranch(label, repo.currenttransaction())
1359 ui.status(_(b'reset working directory to branch %s\n') % label)
1359 ui.status(_(b'reset working directory to branch %s\n') % label)
1360 elif label:
1360 elif label:
1361
1361
1362 scmutil.checknewlabel(repo, label, b'branch')
1362 scmutil.checknewlabel(repo, label, b'branch')
1363 if revs:
1363 if revs:
1364 return cmdutil.changebranch(ui, repo, revs, label, opts)
1364 return cmdutil.changebranch(ui, repo, revs, label, opts)
1365
1365
1366 if not opts.get(b'force') and label in repo.branchmap():
1366 if not opts.get(b'force') and label in repo.branchmap():
1367 if label not in [p.branch() for p in repo[None].parents()]:
1367 if label not in [p.branch() for p in repo[None].parents()]:
1368 raise error.InputError(
1368 raise error.InputError(
1369 _(b'a branch of the same name already exists'),
1369 _(b'a branch of the same name already exists'),
1370 # i18n: "it" refers to an existing branch
1370 # i18n: "it" refers to an existing branch
1371 hint=_(b"use 'hg update' to switch to it"),
1371 hint=_(b"use 'hg update' to switch to it"),
1372 )
1372 )
1373
1373
1374 repo.dirstate.setbranch(label, repo.currenttransaction())
1374 repo.dirstate.setbranch(label, repo.currenttransaction())
1375 ui.status(_(b'marked working directory as branch %s\n') % label)
1375 ui.status(_(b'marked working directory as branch %s\n') % label)
1376
1376
1377 # find any open named branches aside from default
1377 # find any open named branches aside from default
1378 for n, h, t, c in repo.branchmap().iterbranches():
1378 for n, h, t, c in repo.branchmap().iterbranches():
1379 if n != b"default" and not c:
1379 if n != b"default" and not c:
1380 return 0
1380 return 0
1381 ui.status(
1381 ui.status(
1382 _(
1382 _(
1383 b'(branches are permanent and global, '
1383 b'(branches are permanent and global, '
1384 b'did you want a bookmark?)\n'
1384 b'did you want a bookmark?)\n'
1385 )
1385 )
1386 )
1386 )
1387
1387
1388
1388
1389 @command(
1389 @command(
1390 b'branches',
1390 b'branches',
1391 [
1391 [
1392 (
1392 (
1393 b'a',
1393 b'a',
1394 b'active',
1394 b'active',
1395 False,
1395 False,
1396 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1396 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1397 ),
1397 ),
1398 (b'c', b'closed', False, _(b'show normal and closed branches')),
1398 (b'c', b'closed', False, _(b'show normal and closed branches')),
1399 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1399 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1400 ]
1400 ]
1401 + formatteropts,
1401 + formatteropts,
1402 _(b'[-c]'),
1402 _(b'[-c]'),
1403 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1403 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1404 intents={INTENT_READONLY},
1404 intents={INTENT_READONLY},
1405 )
1405 )
1406 def branches(ui, repo, active=False, closed=False, **opts):
1406 def branches(ui, repo, active=False, closed=False, **opts):
1407 """list repository named branches
1407 """list repository named branches
1408
1408
1409 List the repository's named branches, indicating which ones are
1409 List the repository's named branches, indicating which ones are
1410 inactive. If -c/--closed is specified, also list branches which have
1410 inactive. If -c/--closed is specified, also list branches which have
1411 been marked closed (see :hg:`commit --close-branch`).
1411 been marked closed (see :hg:`commit --close-branch`).
1412
1412
1413 Use the command :hg:`update` to switch to an existing branch.
1413 Use the command :hg:`update` to switch to an existing branch.
1414
1414
1415 .. container:: verbose
1415 .. container:: verbose
1416
1416
1417 Template:
1417 Template:
1418
1418
1419 The following keywords are supported in addition to the common template
1419 The following keywords are supported in addition to the common template
1420 keywords and functions such as ``{branch}``. See also
1420 keywords and functions such as ``{branch}``. See also
1421 :hg:`help templates`.
1421 :hg:`help templates`.
1422
1422
1423 :active: Boolean. True if the branch is active.
1423 :active: Boolean. True if the branch is active.
1424 :closed: Boolean. True if the branch is closed.
1424 :closed: Boolean. True if the branch is closed.
1425 :current: Boolean. True if it is the current branch.
1425 :current: Boolean. True if it is the current branch.
1426
1426
1427 Returns 0.
1427 Returns 0.
1428 """
1428 """
1429
1429
1430 opts = pycompat.byteskwargs(opts)
1430 opts = pycompat.byteskwargs(opts)
1431 revs = opts.get(b'rev')
1431 revs = opts.get(b'rev')
1432 selectedbranches = None
1432 selectedbranches = None
1433 if revs:
1433 if revs:
1434 revs = logcmdutil.revrange(repo, revs)
1434 revs = logcmdutil.revrange(repo, revs)
1435 getbi = repo.revbranchcache().branchinfo
1435 getbi = repo.revbranchcache().branchinfo
1436 selectedbranches = {getbi(r)[0] for r in revs}
1436 selectedbranches = {getbi(r)[0] for r in revs}
1437
1437
1438 ui.pager(b'branches')
1438 ui.pager(b'branches')
1439 fm = ui.formatter(b'branches', opts)
1439 fm = ui.formatter(b'branches', opts)
1440 hexfunc = fm.hexfunc
1440 hexfunc = fm.hexfunc
1441
1441
1442 allheads = set(repo.heads())
1442 allheads = set(repo.heads())
1443 branches = []
1443 branches = []
1444 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1444 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1445 if selectedbranches is not None and tag not in selectedbranches:
1445 if selectedbranches is not None and tag not in selectedbranches:
1446 continue
1446 continue
1447 isactive = False
1447 isactive = False
1448 if not isclosed:
1448 if not isclosed:
1449 openheads = set(repo.branchmap().iteropen(heads))
1449 openheads = set(repo.branchmap().iteropen(heads))
1450 isactive = bool(openheads & allheads)
1450 isactive = bool(openheads & allheads)
1451 branches.append((tag, repo[tip], isactive, not isclosed))
1451 branches.append((tag, repo[tip], isactive, not isclosed))
1452 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1452 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1453
1453
1454 for tag, ctx, isactive, isopen in branches:
1454 for tag, ctx, isactive, isopen in branches:
1455 if active and not isactive:
1455 if active and not isactive:
1456 continue
1456 continue
1457 if isactive:
1457 if isactive:
1458 label = b'branches.active'
1458 label = b'branches.active'
1459 notice = b''
1459 notice = b''
1460 elif not isopen:
1460 elif not isopen:
1461 if not closed:
1461 if not closed:
1462 continue
1462 continue
1463 label = b'branches.closed'
1463 label = b'branches.closed'
1464 notice = _(b' (closed)')
1464 notice = _(b' (closed)')
1465 else:
1465 else:
1466 label = b'branches.inactive'
1466 label = b'branches.inactive'
1467 notice = _(b' (inactive)')
1467 notice = _(b' (inactive)')
1468 current = tag == repo.dirstate.branch()
1468 current = tag == repo.dirstate.branch()
1469 if current:
1469 if current:
1470 label = b'branches.current'
1470 label = b'branches.current'
1471
1471
1472 fm.startitem()
1472 fm.startitem()
1473 fm.write(b'branch', b'%s', tag, label=label)
1473 fm.write(b'branch', b'%s', tag, label=label)
1474 rev = ctx.rev()
1474 rev = ctx.rev()
1475 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1475 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1476 fmt = b' ' * padsize + b' %d:%s'
1476 fmt = b' ' * padsize + b' %d:%s'
1477 fm.condwrite(
1477 fm.condwrite(
1478 not ui.quiet,
1478 not ui.quiet,
1479 b'rev node',
1479 b'rev node',
1480 fmt,
1480 fmt,
1481 rev,
1481 rev,
1482 hexfunc(ctx.node()),
1482 hexfunc(ctx.node()),
1483 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1483 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1484 )
1484 )
1485 fm.context(ctx=ctx)
1485 fm.context(ctx=ctx)
1486 fm.data(active=isactive, closed=not isopen, current=current)
1486 fm.data(active=isactive, closed=not isopen, current=current)
1487 if not ui.quiet:
1487 if not ui.quiet:
1488 fm.plain(notice)
1488 fm.plain(notice)
1489 fm.plain(b'\n')
1489 fm.plain(b'\n')
1490 fm.end()
1490 fm.end()
1491
1491
1492
1492
1493 @command(
1493 @command(
1494 b'bundle',
1494 b'bundle',
1495 [
1495 [
1496 (
1496 (
1497 b'',
1497 b'',
1498 b'exact',
1498 b'exact',
1499 None,
1499 None,
1500 _(b'compute the base from the revision specified'),
1500 _(b'compute the base from the revision specified'),
1501 ),
1501 ),
1502 (
1502 (
1503 b'f',
1503 b'f',
1504 b'force',
1504 b'force',
1505 None,
1505 None,
1506 _(b'run even when the destination is unrelated'),
1506 _(b'run even when the destination is unrelated'),
1507 ),
1507 ),
1508 (
1508 (
1509 b'r',
1509 b'r',
1510 b'rev',
1510 b'rev',
1511 [],
1511 [],
1512 _(b'a changeset intended to be added to the destination'),
1512 _(b'a changeset intended to be added to the destination'),
1513 _(b'REV'),
1513 _(b'REV'),
1514 ),
1514 ),
1515 (
1515 (
1516 b'b',
1516 b'b',
1517 b'branch',
1517 b'branch',
1518 [],
1518 [],
1519 _(b'a specific branch you would like to bundle'),
1519 _(b'a specific branch you would like to bundle'),
1520 _(b'BRANCH'),
1520 _(b'BRANCH'),
1521 ),
1521 ),
1522 (
1522 (
1523 b'',
1523 b'',
1524 b'base',
1524 b'base',
1525 [],
1525 [],
1526 _(b'a base changeset assumed to be available at the destination'),
1526 _(b'a base changeset assumed to be available at the destination'),
1527 _(b'REV'),
1527 _(b'REV'),
1528 ),
1528 ),
1529 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1529 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1530 (
1530 (
1531 b't',
1531 b't',
1532 b'type',
1532 b'type',
1533 b'bzip2',
1533 b'bzip2',
1534 _(b'bundle compression type to use'),
1534 _(b'bundle compression type to use'),
1535 _(b'TYPE'),
1535 _(b'TYPE'),
1536 ),
1536 ),
1537 ]
1537 ]
1538 + remoteopts,
1538 + remoteopts,
1539 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'),
1539 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]...'),
1540 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1540 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1541 )
1541 )
1542 def bundle(ui, repo, fname, *dests, **opts):
1542 def bundle(ui, repo, fname, *dests, **opts):
1543 """create a bundle file
1543 """create a bundle file
1544
1544
1545 Generate a bundle file containing data to be transferred to another
1545 Generate a bundle file containing data to be transferred to another
1546 repository.
1546 repository.
1547
1547
1548 To create a bundle containing all changesets, use -a/--all
1548 To create a bundle containing all changesets, use -a/--all
1549 (or --base null). Otherwise, hg assumes the destination will have
1549 (or --base null). Otherwise, hg assumes the destination will have
1550 all the nodes you specify with --base parameters. Otherwise, hg
1550 all the nodes you specify with --base parameters. Otherwise, hg
1551 will assume the repository has all the nodes in destination, or
1551 will assume the repository has all the nodes in destination, or
1552 default-push/default if no destination is specified, where destination
1552 default-push/default if no destination is specified, where destination
1553 is the repositories you provide through DEST option.
1553 is the repositories you provide through DEST option.
1554
1554
1555 You can change bundle format with the -t/--type option. See
1555 You can change bundle format with the -t/--type option. See
1556 :hg:`help bundlespec` for documentation on this format. By default,
1556 :hg:`help bundlespec` for documentation on this format. By default,
1557 the most appropriate format is used and compression defaults to
1557 the most appropriate format is used and compression defaults to
1558 bzip2.
1558 bzip2.
1559
1559
1560 The bundle file can then be transferred using conventional means
1560 The bundle file can then be transferred using conventional means
1561 and applied to another repository with the unbundle or pull
1561 and applied to another repository with the unbundle or pull
1562 command. This is useful when direct push and pull are not
1562 command. This is useful when direct push and pull are not
1563 available or when exporting an entire repository is undesirable.
1563 available or when exporting an entire repository is undesirable.
1564
1564
1565 Applying bundles preserves all changeset contents including
1565 Applying bundles preserves all changeset contents including
1566 permissions, copy/rename information, and revision history.
1566 permissions, copy/rename information, and revision history.
1567
1567
1568 Returns 0 on success, 1 if no changes found.
1568 Returns 0 on success, 1 if no changes found.
1569 """
1569 """
1570 opts = pycompat.byteskwargs(opts)
1570 opts = pycompat.byteskwargs(opts)
1571
1571
1572 revs = None
1572 revs = None
1573 if b'rev' in opts:
1573 if b'rev' in opts:
1574 revstrings = opts[b'rev']
1574 revstrings = opts[b'rev']
1575 revs = logcmdutil.revrange(repo, revstrings)
1575 revs = logcmdutil.revrange(repo, revstrings)
1576 if revstrings and not revs:
1576 if revstrings and not revs:
1577 raise error.InputError(_(b'no commits to bundle'))
1577 raise error.InputError(_(b'no commits to bundle'))
1578
1578
1579 bundletype = opts.get(b'type', b'bzip2').lower()
1579 bundletype = opts.get(b'type', b'bzip2').lower()
1580 try:
1580 try:
1581 bundlespec = bundlecaches.parsebundlespec(
1581 bundlespec = bundlecaches.parsebundlespec(
1582 repo, bundletype, strict=False
1582 repo, bundletype, strict=False
1583 )
1583 )
1584 except error.UnsupportedBundleSpecification as e:
1584 except error.UnsupportedBundleSpecification as e:
1585 raise error.InputError(
1585 raise error.InputError(
1586 pycompat.bytestr(e),
1586 pycompat.bytestr(e),
1587 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1587 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1588 )
1588 )
1589 cgversion = bundlespec.params[b"cg.version"]
1589 cgversion = bundlespec.params[b"cg.version"]
1590
1590
1591 # Packed bundles are a pseudo bundle format for now.
1591 # Packed bundles are a pseudo bundle format for now.
1592 if cgversion == b's1':
1592 if cgversion == b's1':
1593 raise error.InputError(
1593 raise error.InputError(
1594 _(b'packed bundles cannot be produced by "hg bundle"'),
1594 _(b'packed bundles cannot be produced by "hg bundle"'),
1595 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1595 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1596 )
1596 )
1597
1597
1598 if opts.get(b'all'):
1598 if opts.get(b'all'):
1599 if dests:
1599 if dests:
1600 raise error.InputError(
1600 raise error.InputError(
1601 _(b"--all is incompatible with specifying destinations")
1601 _(b"--all is incompatible with specifying destinations")
1602 )
1602 )
1603 if opts.get(b'base'):
1603 if opts.get(b'base'):
1604 ui.warn(_(b"ignoring --base because --all was specified\n"))
1604 ui.warn(_(b"ignoring --base because --all was specified\n"))
1605 if opts.get(b'exact'):
1605 if opts.get(b'exact'):
1606 ui.warn(_(b"ignoring --exact because --all was specified\n"))
1606 ui.warn(_(b"ignoring --exact because --all was specified\n"))
1607 base = [nullrev]
1607 base = [nullrev]
1608 elif opts.get(b'exact'):
1608 elif opts.get(b'exact'):
1609 if dests:
1609 if dests:
1610 raise error.InputError(
1610 raise error.InputError(
1611 _(b"--exact is incompatible with specifying destinations")
1611 _(b"--exact is incompatible with specifying destinations")
1612 )
1612 )
1613 if opts.get(b'base'):
1613 if opts.get(b'base'):
1614 ui.warn(_(b"ignoring --base because --exact was specified\n"))
1614 ui.warn(_(b"ignoring --base because --exact was specified\n"))
1615 base = repo.revs(b'parents(%ld) - %ld', revs, revs)
1615 base = repo.revs(b'parents(%ld) - %ld', revs, revs)
1616 if not base:
1616 if not base:
1617 base = [nullrev]
1617 base = [nullrev]
1618 else:
1618 else:
1619 base = logcmdutil.revrange(repo, opts.get(b'base'))
1619 base = logcmdutil.revrange(repo, opts.get(b'base'))
1620 if cgversion not in changegroup.supportedoutgoingversions(repo):
1620 if cgversion not in changegroup.supportedoutgoingversions(repo):
1621 raise error.Abort(
1621 raise error.Abort(
1622 _(b"repository does not support bundle version %s") % cgversion
1622 _(b"repository does not support bundle version %s") % cgversion
1623 )
1623 )
1624
1624
1625 if base:
1625 if base:
1626 if dests:
1626 if dests:
1627 raise error.InputError(
1627 raise error.InputError(
1628 _(b"--base is incompatible with specifying destinations")
1628 _(b"--base is incompatible with specifying destinations")
1629 )
1629 )
1630 cl = repo.changelog
1630 cl = repo.changelog
1631 common = [cl.node(rev) for rev in base]
1631 common = [cl.node(rev) for rev in base]
1632 heads = [cl.node(r) for r in revs] if revs else None
1632 heads = [cl.node(r) for r in revs] if revs else None
1633 outgoing = discovery.outgoing(repo, common, heads)
1633 outgoing = discovery.outgoing(repo, common, heads)
1634 missing = outgoing.missing
1634 missing = outgoing.missing
1635 excluded = outgoing.excluded
1635 excluded = outgoing.excluded
1636 else:
1636 else:
1637 missing = set()
1637 missing = set()
1638 excluded = set()
1638 excluded = set()
1639 for path in urlutil.get_push_paths(repo, ui, dests):
1639 for path in urlutil.get_push_paths(repo, ui, dests):
1640 other = hg.peer(repo, opts, path)
1640 other = hg.peer(repo, opts, path)
1641 if revs is not None:
1641 if revs is not None:
1642 hex_revs = [repo[r].hex() for r in revs]
1642 hex_revs = [repo[r].hex() for r in revs]
1643 else:
1643 else:
1644 hex_revs = None
1644 hex_revs = None
1645 branches = (path.branch, [])
1645 branches = (path.branch, [])
1646 head_revs, checkout = hg.addbranchrevs(
1646 head_revs, checkout = hg.addbranchrevs(
1647 repo, repo, branches, hex_revs
1647 repo, repo, branches, hex_revs
1648 )
1648 )
1649 heads = (
1649 heads = (
1650 head_revs
1650 head_revs
1651 and pycompat.maplist(repo.lookup, head_revs)
1651 and pycompat.maplist(repo.lookup, head_revs)
1652 or head_revs
1652 or head_revs
1653 )
1653 )
1654 outgoing = discovery.findcommonoutgoing(
1654 outgoing = discovery.findcommonoutgoing(
1655 repo,
1655 repo,
1656 other,
1656 other,
1657 onlyheads=heads,
1657 onlyheads=heads,
1658 force=opts.get(b'force'),
1658 force=opts.get(b'force'),
1659 portable=True,
1659 portable=True,
1660 )
1660 )
1661 missing.update(outgoing.missing)
1661 missing.update(outgoing.missing)
1662 excluded.update(outgoing.excluded)
1662 excluded.update(outgoing.excluded)
1663
1663
1664 if not missing:
1664 if not missing:
1665 scmutil.nochangesfound(ui, repo, not base and excluded)
1665 scmutil.nochangesfound(ui, repo, not base and excluded)
1666 return 1
1666 return 1
1667
1667
1668 if heads:
1668 if heads:
1669 outgoing = discovery.outgoing(
1669 outgoing = discovery.outgoing(
1670 repo, missingroots=missing, ancestorsof=heads
1670 repo, missingroots=missing, ancestorsof=heads
1671 )
1671 )
1672 else:
1672 else:
1673 outgoing = discovery.outgoing(repo, missingroots=missing)
1673 outgoing = discovery.outgoing(repo, missingroots=missing)
1674 outgoing.excluded = sorted(excluded)
1674 outgoing.excluded = sorted(excluded)
1675
1675
1676 if cgversion == b'01': # bundle1
1676 if cgversion == b'01': # bundle1
1677 bversion = b'HG10' + bundlespec.wirecompression
1677 bversion = b'HG10' + bundlespec.wirecompression
1678 bcompression = None
1678 bcompression = None
1679 elif cgversion in (b'02', b'03'):
1679 elif cgversion in (b'02', b'03'):
1680 bversion = b'HG20'
1680 bversion = b'HG20'
1681 bcompression = bundlespec.wirecompression
1681 bcompression = bundlespec.wirecompression
1682 else:
1682 else:
1683 raise error.ProgrammingError(
1683 raise error.ProgrammingError(
1684 b'bundle: unexpected changegroup version %s' % cgversion
1684 b'bundle: unexpected changegroup version %s' % cgversion
1685 )
1685 )
1686
1686
1687 # TODO compression options should be derived from bundlespec parsing.
1687 # TODO compression options should be derived from bundlespec parsing.
1688 # This is a temporary hack to allow adjusting bundle compression
1688 # This is a temporary hack to allow adjusting bundle compression
1689 # level without a) formalizing the bundlespec changes to declare it
1689 # level without a) formalizing the bundlespec changes to declare it
1690 # b) introducing a command flag.
1690 # b) introducing a command flag.
1691 compopts = {}
1691 compopts = {}
1692 complevel = ui.configint(
1692 complevel = ui.configint(
1693 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1693 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1694 )
1694 )
1695 if complevel is None:
1695 if complevel is None:
1696 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1696 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1697 if complevel is not None:
1697 if complevel is not None:
1698 compopts[b'level'] = complevel
1698 compopts[b'level'] = complevel
1699
1699
1700 compthreads = ui.configint(
1700 compthreads = ui.configint(
1701 b'experimental', b'bundlecompthreads.' + bundlespec.compression
1701 b'experimental', b'bundlecompthreads.' + bundlespec.compression
1702 )
1702 )
1703 if compthreads is None:
1703 if compthreads is None:
1704 compthreads = ui.configint(b'experimental', b'bundlecompthreads')
1704 compthreads = ui.configint(b'experimental', b'bundlecompthreads')
1705 if compthreads is not None:
1705 if compthreads is not None:
1706 compopts[b'threads'] = compthreads
1706 compopts[b'threads'] = compthreads
1707
1707
1708 # Bundling of obsmarker and phases is optional as not all clients
1708 # Bundling of obsmarker and phases is optional as not all clients
1709 # support the necessary features.
1709 # support the necessary features.
1710 cfg = ui.configbool
1710 cfg = ui.configbool
1711 obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
1711 obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
1712 bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
1712 bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
1713 obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
1713 obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
1714 bundlespec.set_param(
1714 bundlespec.set_param(
1715 b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
1715 b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
1716 )
1716 )
1717 phases_cfg = cfg(b'experimental', b'bundle-phases')
1717 phases_cfg = cfg(b'experimental', b'bundle-phases')
1718 bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
1718 bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
1719
1719
1720 bundle2.writenewbundle(
1720 bundle2.writenewbundle(
1721 ui,
1721 ui,
1722 repo,
1722 repo,
1723 b'bundle',
1723 b'bundle',
1724 fname,
1724 fname,
1725 bversion,
1725 bversion,
1726 outgoing,
1726 outgoing,
1727 bundlespec.params,
1727 bundlespec.params,
1728 compression=bcompression,
1728 compression=bcompression,
1729 compopts=compopts,
1729 compopts=compopts,
1730 )
1730 )
1731
1731
1732
1732
1733 @command(
1733 @command(
1734 b'cat',
1734 b'cat',
1735 [
1735 [
1736 (
1736 (
1737 b'o',
1737 b'o',
1738 b'output',
1738 b'output',
1739 b'',
1739 b'',
1740 _(b'print output to file with formatted name'),
1740 _(b'print output to file with formatted name'),
1741 _(b'FORMAT'),
1741 _(b'FORMAT'),
1742 ),
1742 ),
1743 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1743 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1744 (b'', b'decode', None, _(b'apply any matching decode filter')),
1744 (b'', b'decode', None, _(b'apply any matching decode filter')),
1745 ]
1745 ]
1746 + walkopts
1746 + walkopts
1747 + formatteropts,
1747 + formatteropts,
1748 _(b'[OPTION]... FILE...'),
1748 _(b'[OPTION]... FILE...'),
1749 helpcategory=command.CATEGORY_FILE_CONTENTS,
1749 helpcategory=command.CATEGORY_FILE_CONTENTS,
1750 inferrepo=True,
1750 inferrepo=True,
1751 intents={INTENT_READONLY},
1751 intents={INTENT_READONLY},
1752 )
1752 )
1753 def cat(ui, repo, file1, *pats, **opts):
1753 def cat(ui, repo, file1, *pats, **opts):
1754 """output the current or given revision of files
1754 """output the current or given revision of files
1755
1755
1756 Print the specified files as they were at the given revision. If
1756 Print the specified files as they were at the given revision. If
1757 no revision is given, the parent of the working directory is used.
1757 no revision is given, the parent of the working directory is used.
1758
1758
1759 Output may be to a file, in which case the name of the file is
1759 Output may be to a file, in which case the name of the file is
1760 given using a template string. See :hg:`help templates`. In addition
1760 given using a template string. See :hg:`help templates`. In addition
1761 to the common template keywords, the following formatting rules are
1761 to the common template keywords, the following formatting rules are
1762 supported:
1762 supported:
1763
1763
1764 :``%%``: literal "%" character
1764 :``%%``: literal "%" character
1765 :``%s``: basename of file being printed
1765 :``%s``: basename of file being printed
1766 :``%d``: dirname of file being printed, or '.' if in repository root
1766 :``%d``: dirname of file being printed, or '.' if in repository root
1767 :``%p``: root-relative path name of file being printed
1767 :``%p``: root-relative path name of file being printed
1768 :``%H``: changeset hash (40 hexadecimal digits)
1768 :``%H``: changeset hash (40 hexadecimal digits)
1769 :``%R``: changeset revision number
1769 :``%R``: changeset revision number
1770 :``%h``: short-form changeset hash (12 hexadecimal digits)
1770 :``%h``: short-form changeset hash (12 hexadecimal digits)
1771 :``%r``: zero-padded changeset revision number
1771 :``%r``: zero-padded changeset revision number
1772 :``%b``: basename of the exporting repository
1772 :``%b``: basename of the exporting repository
1773 :``\\``: literal "\\" character
1773 :``\\``: literal "\\" character
1774
1774
1775 .. container:: verbose
1775 .. container:: verbose
1776
1776
1777 Template:
1777 Template:
1778
1778
1779 The following keywords are supported in addition to the common template
1779 The following keywords are supported in addition to the common template
1780 keywords and functions. See also :hg:`help templates`.
1780 keywords and functions. See also :hg:`help templates`.
1781
1781
1782 :data: String. File content.
1782 :data: String. File content.
1783 :path: String. Repository-absolute path of the file.
1783 :path: String. Repository-absolute path of the file.
1784
1784
1785 Returns 0 on success.
1785 Returns 0 on success.
1786 """
1786 """
1787 opts = pycompat.byteskwargs(opts)
1787 opts = pycompat.byteskwargs(opts)
1788 rev = opts.get(b'rev')
1788 rev = opts.get(b'rev')
1789 if rev:
1789 if rev:
1790 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1790 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1791 ctx = logcmdutil.revsingle(repo, rev)
1791 ctx = logcmdutil.revsingle(repo, rev)
1792 m = scmutil.match(ctx, (file1,) + pats, opts)
1792 m = scmutil.match(ctx, (file1,) + pats, opts)
1793 fntemplate = opts.pop(b'output', b'')
1793 fntemplate = opts.pop(b'output', b'')
1794 if cmdutil.isstdiofilename(fntemplate):
1794 if cmdutil.isstdiofilename(fntemplate):
1795 fntemplate = b''
1795 fntemplate = b''
1796
1796
1797 if fntemplate:
1797 if fntemplate:
1798 fm = formatter.nullformatter(ui, b'cat', opts)
1798 fm = formatter.nullformatter(ui, b'cat', opts)
1799 else:
1799 else:
1800 ui.pager(b'cat')
1800 ui.pager(b'cat')
1801 fm = ui.formatter(b'cat', opts)
1801 fm = ui.formatter(b'cat', opts)
1802 with fm:
1802 with fm:
1803 return cmdutil.cat(
1803 return cmdutil.cat(
1804 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1804 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1805 )
1805 )
1806
1806
1807
1807
1808 @command(
1808 @command(
1809 b'clone',
1809 b'clone',
1810 [
1810 [
1811 (
1811 (
1812 b'U',
1812 b'U',
1813 b'noupdate',
1813 b'noupdate',
1814 None,
1814 None,
1815 _(
1815 _(
1816 b'the clone will include an empty working '
1816 b'the clone will include an empty working '
1817 b'directory (only a repository)'
1817 b'directory (only a repository)'
1818 ),
1818 ),
1819 ),
1819 ),
1820 (
1820 (
1821 b'u',
1821 b'u',
1822 b'updaterev',
1822 b'updaterev',
1823 b'',
1823 b'',
1824 _(b'revision, tag, or branch to check out'),
1824 _(b'revision, tag, or branch to check out'),
1825 _(b'REV'),
1825 _(b'REV'),
1826 ),
1826 ),
1827 (
1827 (
1828 b'r',
1828 b'r',
1829 b'rev',
1829 b'rev',
1830 [],
1830 [],
1831 _(
1831 _(
1832 b'do not clone everything, but include this changeset'
1832 b'do not clone everything, but include this changeset'
1833 b' and its ancestors'
1833 b' and its ancestors'
1834 ),
1834 ),
1835 _(b'REV'),
1835 _(b'REV'),
1836 ),
1836 ),
1837 (
1837 (
1838 b'b',
1838 b'b',
1839 b'branch',
1839 b'branch',
1840 [],
1840 [],
1841 _(
1841 _(
1842 b'do not clone everything, but include this branch\'s'
1842 b'do not clone everything, but include this branch\'s'
1843 b' changesets and their ancestors'
1843 b' changesets and their ancestors'
1844 ),
1844 ),
1845 _(b'BRANCH'),
1845 _(b'BRANCH'),
1846 ),
1846 ),
1847 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1847 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1848 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1848 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1849 (b'', b'stream', None, _(b'clone with minimal data processing')),
1849 (b'', b'stream', None, _(b'clone with minimal data processing')),
1850 ]
1850 ]
1851 + remoteopts,
1851 + remoteopts,
1852 _(b'[OPTION]... SOURCE [DEST]'),
1852 _(b'[OPTION]... SOURCE [DEST]'),
1853 helpcategory=command.CATEGORY_REPO_CREATION,
1853 helpcategory=command.CATEGORY_REPO_CREATION,
1854 helpbasic=True,
1854 helpbasic=True,
1855 norepo=True,
1855 norepo=True,
1856 )
1856 )
1857 def clone(ui, source, dest=None, **opts):
1857 def clone(ui, source, dest=None, **opts):
1858 """make a copy of an existing repository
1858 """make a copy of an existing repository
1859
1859
1860 Create a copy of an existing repository in a new directory.
1860 Create a copy of an existing repository in a new directory.
1861
1861
1862 If no destination directory name is specified, it defaults to the
1862 If no destination directory name is specified, it defaults to the
1863 basename of the source.
1863 basename of the source.
1864
1864
1865 The location of the source is added to the new repository's
1865 The location of the source is added to the new repository's
1866 ``.hg/hgrc`` file, as the default to be used for future pulls.
1866 ``.hg/hgrc`` file, as the default to be used for future pulls.
1867
1867
1868 Only local paths and ``ssh://`` URLs are supported as
1868 Only local paths and ``ssh://`` URLs are supported as
1869 destinations. For ``ssh://`` destinations, no working directory or
1869 destinations. For ``ssh://`` destinations, no working directory or
1870 ``.hg/hgrc`` will be created on the remote side.
1870 ``.hg/hgrc`` will be created on the remote side.
1871
1871
1872 If the source repository has a bookmark called '@' set, that
1872 If the source repository has a bookmark called '@' set, that
1873 revision will be checked out in the new repository by default.
1873 revision will be checked out in the new repository by default.
1874
1874
1875 To check out a particular version, use -u/--update, or
1875 To check out a particular version, use -u/--update, or
1876 -U/--noupdate to create a clone with no working directory.
1876 -U/--noupdate to create a clone with no working directory.
1877
1877
1878 To pull only a subset of changesets, specify one or more revisions
1878 To pull only a subset of changesets, specify one or more revisions
1879 identifiers with -r/--rev or branches with -b/--branch. The
1879 identifiers with -r/--rev or branches with -b/--branch. The
1880 resulting clone will contain only the specified changesets and
1880 resulting clone will contain only the specified changesets and
1881 their ancestors. These options (or 'clone src#rev dest') imply
1881 their ancestors. These options (or 'clone src#rev dest') imply
1882 --pull, even for local source repositories.
1882 --pull, even for local source repositories.
1883
1883
1884 In normal clone mode, the remote normalizes repository data into a common
1884 In normal clone mode, the remote normalizes repository data into a common
1885 exchange format and the receiving end translates this data into its local
1885 exchange format and the receiving end translates this data into its local
1886 storage format. --stream activates a different clone mode that essentially
1886 storage format. --stream activates a different clone mode that essentially
1887 copies repository files from the remote with minimal data processing. This
1887 copies repository files from the remote with minimal data processing. This
1888 significantly reduces the CPU cost of a clone both remotely and locally.
1888 significantly reduces the CPU cost of a clone both remotely and locally.
1889 However, it often increases the transferred data size by 30-40%. This can
1889 However, it often increases the transferred data size by 30-40%. This can
1890 result in substantially faster clones where I/O throughput is plentiful,
1890 result in substantially faster clones where I/O throughput is plentiful,
1891 especially for larger repositories. A side-effect of --stream clones is
1891 especially for larger repositories. A side-effect of --stream clones is
1892 that storage settings and requirements on the remote are applied locally:
1892 that storage settings and requirements on the remote are applied locally:
1893 a modern client may inherit legacy or inefficient storage used by the
1893 a modern client may inherit legacy or inefficient storage used by the
1894 remote or a legacy Mercurial client may not be able to clone from a
1894 remote or a legacy Mercurial client may not be able to clone from a
1895 modern Mercurial remote.
1895 modern Mercurial remote.
1896
1896
1897 .. note::
1897 .. note::
1898
1898
1899 Specifying a tag will include the tagged changeset but not the
1899 Specifying a tag will include the tagged changeset but not the
1900 changeset containing the tag.
1900 changeset containing the tag.
1901
1901
1902 .. container:: verbose
1902 .. container:: verbose
1903
1903
1904 For efficiency, hardlinks are used for cloning whenever the
1904 For efficiency, hardlinks are used for cloning whenever the
1905 source and destination are on the same filesystem (note this
1905 source and destination are on the same filesystem (note this
1906 applies only to the repository data, not to the working
1906 applies only to the repository data, not to the working
1907 directory). Some filesystems, such as AFS, implement hardlinking
1907 directory). Some filesystems, such as AFS, implement hardlinking
1908 incorrectly, but do not report errors. In these cases, use the
1908 incorrectly, but do not report errors. In these cases, use the
1909 --pull option to avoid hardlinking.
1909 --pull option to avoid hardlinking.
1910
1910
1911 Mercurial will update the working directory to the first applicable
1911 Mercurial will update the working directory to the first applicable
1912 revision from this list:
1912 revision from this list:
1913
1913
1914 a) null if -U or the source repository has no changesets
1914 a) null if -U or the source repository has no changesets
1915 b) if -u . and the source repository is local, the first parent of
1915 b) if -u . and the source repository is local, the first parent of
1916 the source repository's working directory
1916 the source repository's working directory
1917 c) the changeset specified with -u (if a branch name, this means the
1917 c) the changeset specified with -u (if a branch name, this means the
1918 latest head of that branch)
1918 latest head of that branch)
1919 d) the changeset specified with -r
1919 d) the changeset specified with -r
1920 e) the tipmost head specified with -b
1920 e) the tipmost head specified with -b
1921 f) the tipmost head specified with the url#branch source syntax
1921 f) the tipmost head specified with the url#branch source syntax
1922 g) the revision marked with the '@' bookmark, if present
1922 g) the revision marked with the '@' bookmark, if present
1923 h) the tipmost head of the default branch
1923 h) the tipmost head of the default branch
1924 i) tip
1924 i) tip
1925
1925
1926 When cloning from servers that support it, Mercurial may fetch
1926 When cloning from servers that support it, Mercurial may fetch
1927 pre-generated data from a server-advertised URL or inline from the
1927 pre-generated data from a server-advertised URL or inline from the
1928 same stream. When this is done, hooks operating on incoming changesets
1928 same stream. When this is done, hooks operating on incoming changesets
1929 and changegroups may fire more than once, once for each pre-generated
1929 and changegroups may fire more than once, once for each pre-generated
1930 bundle and as well as for any additional remaining data. In addition,
1930 bundle and as well as for any additional remaining data. In addition,
1931 if an error occurs, the repository may be rolled back to a partial
1931 if an error occurs, the repository may be rolled back to a partial
1932 clone. This behavior may change in future releases.
1932 clone. This behavior may change in future releases.
1933 See :hg:`help -e clonebundles` for more.
1933 See :hg:`help -e clonebundles` for more.
1934
1934
1935 Examples:
1935 Examples:
1936
1936
1937 - clone a remote repository to a new directory named hg/::
1937 - clone a remote repository to a new directory named hg/::
1938
1938
1939 hg clone https://www.mercurial-scm.org/repo/hg/
1939 hg clone https://www.mercurial-scm.org/repo/hg/
1940
1940
1941 - create a lightweight local clone::
1941 - create a lightweight local clone::
1942
1942
1943 hg clone project/ project-feature/
1943 hg clone project/ project-feature/
1944
1944
1945 - clone from an absolute path on an ssh server (note double-slash)::
1945 - clone from an absolute path on an ssh server (note double-slash)::
1946
1946
1947 hg clone ssh://user@server//home/projects/alpha/
1947 hg clone ssh://user@server//home/projects/alpha/
1948
1948
1949 - do a streaming clone while checking out a specified version::
1949 - do a streaming clone while checking out a specified version::
1950
1950
1951 hg clone --stream http://server/repo -u 1.5
1951 hg clone --stream http://server/repo -u 1.5
1952
1952
1953 - create a repository without changesets after a particular revision::
1953 - create a repository without changesets after a particular revision::
1954
1954
1955 hg clone -r 04e544 experimental/ good/
1955 hg clone -r 04e544 experimental/ good/
1956
1956
1957 - clone (and track) a particular named branch::
1957 - clone (and track) a particular named branch::
1958
1958
1959 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1959 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1960
1960
1961 See :hg:`help urls` for details on specifying URLs.
1961 See :hg:`help urls` for details on specifying URLs.
1962
1962
1963 Returns 0 on success.
1963 Returns 0 on success.
1964 """
1964 """
1965 opts = pycompat.byteskwargs(opts)
1965 opts = pycompat.byteskwargs(opts)
1966 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1966 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1967
1967
1968 # --include/--exclude can come from narrow or sparse.
1968 # --include/--exclude can come from narrow or sparse.
1969 includepats, excludepats = None, None
1969 includepats, excludepats = None, None
1970
1970
1971 # hg.clone() differentiates between None and an empty set. So make sure
1971 # hg.clone() differentiates between None and an empty set. So make sure
1972 # patterns are sets if narrow is requested without patterns.
1972 # patterns are sets if narrow is requested without patterns.
1973 if opts.get(b'narrow'):
1973 if opts.get(b'narrow'):
1974 includepats = set()
1974 includepats = set()
1975 excludepats = set()
1975 excludepats = set()
1976
1976
1977 if opts.get(b'include'):
1977 if opts.get(b'include'):
1978 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1978 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1979 if opts.get(b'exclude'):
1979 if opts.get(b'exclude'):
1980 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1980 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1981
1981
1982 r = hg.clone(
1982 r = hg.clone(
1983 ui,
1983 ui,
1984 opts,
1984 opts,
1985 source,
1985 source,
1986 dest,
1986 dest,
1987 pull=opts.get(b'pull'),
1987 pull=opts.get(b'pull'),
1988 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1988 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1989 revs=opts.get(b'rev'),
1989 revs=opts.get(b'rev'),
1990 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
1990 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
1991 branch=opts.get(b'branch'),
1991 branch=opts.get(b'branch'),
1992 shareopts=opts.get(b'shareopts'),
1992 shareopts=opts.get(b'shareopts'),
1993 storeincludepats=includepats,
1993 storeincludepats=includepats,
1994 storeexcludepats=excludepats,
1994 storeexcludepats=excludepats,
1995 depth=opts.get(b'depth') or None,
1995 depth=opts.get(b'depth') or None,
1996 )
1996 )
1997
1997
1998 return r is None
1998 return r is None
1999
1999
2000
2000
2001 @command(
2001 @command(
2002 b'commit|ci',
2002 b'commit|ci',
2003 [
2003 [
2004 (
2004 (
2005 b'A',
2005 b'A',
2006 b'addremove',
2006 b'addremove',
2007 None,
2007 None,
2008 _(b'mark new/missing files as added/removed before committing'),
2008 _(b'mark new/missing files as added/removed before committing'),
2009 ),
2009 ),
2010 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
2010 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
2011 (b'', b'amend', None, _(b'amend the parent of the working directory')),
2011 (b'', b'amend', None, _(b'amend the parent of the working directory')),
2012 (b's', b'secret', None, _(b'use the secret phase for committing')),
2012 (b's', b'secret', None, _(b'use the secret phase for committing')),
2013 (b'', b'draft', None, _(b'use the draft phase for committing')),
2013 (b'', b'draft', None, _(b'use the draft phase for committing')),
2014 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
2014 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
2015 (
2015 (
2016 b'',
2016 b'',
2017 b'force-close-branch',
2017 b'force-close-branch',
2018 None,
2018 None,
2019 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
2019 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
2020 ),
2020 ),
2021 (b'i', b'interactive', None, _(b'use interactive mode')),
2021 (b'i', b'interactive', None, _(b'use interactive mode')),
2022 ]
2022 ]
2023 + walkopts
2023 + walkopts
2024 + commitopts
2024 + commitopts
2025 + commitopts2
2025 + commitopts2
2026 + subrepoopts,
2026 + subrepoopts,
2027 _(b'[OPTION]... [FILE]...'),
2027 _(b'[OPTION]... [FILE]...'),
2028 helpcategory=command.CATEGORY_COMMITTING,
2028 helpcategory=command.CATEGORY_COMMITTING,
2029 helpbasic=True,
2029 helpbasic=True,
2030 inferrepo=True,
2030 inferrepo=True,
2031 )
2031 )
2032 def commit(ui, repo, *pats, **opts):
2032 def commit(ui, repo, *pats, **opts):
2033 """commit the specified files or all outstanding changes
2033 """commit the specified files or all outstanding changes
2034
2034
2035 Commit changes to the given files into the repository. Unlike a
2035 Commit changes to the given files into the repository. Unlike a
2036 centralized SCM, this operation is a local operation. See
2036 centralized SCM, this operation is a local operation. See
2037 :hg:`push` for a way to actively distribute your changes.
2037 :hg:`push` for a way to actively distribute your changes.
2038
2038
2039 If a list of files is omitted, all changes reported by :hg:`status`
2039 If a list of files is omitted, all changes reported by :hg:`status`
2040 will be committed.
2040 will be committed.
2041
2041
2042 If you are committing the result of a merge, do not provide any
2042 If you are committing the result of a merge, do not provide any
2043 filenames or -I/-X filters.
2043 filenames or -I/-X filters.
2044
2044
2045 If no commit message is specified, Mercurial starts your
2045 If no commit message is specified, Mercurial starts your
2046 configured editor where you can enter a message. In case your
2046 configured editor where you can enter a message. In case your
2047 commit fails, you will find a backup of your message in
2047 commit fails, you will find a backup of your message in
2048 ``.hg/last-message.txt``.
2048 ``.hg/last-message.txt``.
2049
2049
2050 The --close-branch flag can be used to mark the current branch
2050 The --close-branch flag can be used to mark the current branch
2051 head closed. When all heads of a branch are closed, the branch
2051 head closed. When all heads of a branch are closed, the branch
2052 will be considered closed and no longer listed.
2052 will be considered closed and no longer listed.
2053
2053
2054 The --amend flag can be used to amend the parent of the
2054 The --amend flag can be used to amend the parent of the
2055 working directory with a new commit that contains the changes
2055 working directory with a new commit that contains the changes
2056 in the parent in addition to those currently reported by :hg:`status`,
2056 in the parent in addition to those currently reported by :hg:`status`,
2057 if there are any. The old commit is stored in a backup bundle in
2057 if there are any. The old commit is stored in a backup bundle in
2058 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
2058 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
2059 on how to restore it).
2059 on how to restore it).
2060
2060
2061 Message, user and date are taken from the amended commit unless
2061 Message, user and date are taken from the amended commit unless
2062 specified. When a message isn't specified on the command line,
2062 specified. When a message isn't specified on the command line,
2063 the editor will open with the message of the amended commit.
2063 the editor will open with the message of the amended commit.
2064
2064
2065 It is not possible to amend public changesets (see :hg:`help phases`)
2065 It is not possible to amend public changesets (see :hg:`help phases`)
2066 or changesets that have children.
2066 or changesets that have children.
2067
2067
2068 See :hg:`help dates` for a list of formats valid for -d/--date.
2068 See :hg:`help dates` for a list of formats valid for -d/--date.
2069
2069
2070 Returns 0 on success, 1 if nothing changed.
2070 Returns 0 on success, 1 if nothing changed.
2071
2071
2072 .. container:: verbose
2072 .. container:: verbose
2073
2073
2074 Examples:
2074 Examples:
2075
2075
2076 - commit all files ending in .py::
2076 - commit all files ending in .py::
2077
2077
2078 hg commit --include "set:**.py"
2078 hg commit --include "set:**.py"
2079
2079
2080 - commit all non-binary files::
2080 - commit all non-binary files::
2081
2081
2082 hg commit --exclude "set:binary()"
2082 hg commit --exclude "set:binary()"
2083
2083
2084 - amend the current commit and set the date to now::
2084 - amend the current commit and set the date to now::
2085
2085
2086 hg commit --amend --date now
2086 hg commit --amend --date now
2087 """
2087 """
2088 cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
2088 cmdutil.check_at_most_one_arg(opts, 'draft', 'secret')
2089 cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
2089 cmdutil.check_incompatible_arguments(opts, 'subrepos', ['amend'])
2090 with repo.wlock(), repo.lock():
2090 with repo.wlock(), repo.lock():
2091 return _docommit(ui, repo, *pats, **opts)
2091 return _docommit(ui, repo, *pats, **opts)
2092
2092
2093
2093
2094 def _docommit(ui, repo, *pats, **opts):
2094 def _docommit(ui, repo, *pats, **opts):
2095 if opts.get('interactive'):
2095 if opts.get('interactive'):
2096 opts.pop('interactive')
2096 opts.pop('interactive')
2097 ret = cmdutil.dorecord(
2097 ret = cmdutil.dorecord(
2098 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2098 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2099 )
2099 )
2100 # ret can be 0 (no changes to record) or the value returned by
2100 # ret can be 0 (no changes to record) or the value returned by
2101 # commit(), 1 if nothing changed or None on success.
2101 # commit(), 1 if nothing changed or None on success.
2102 return 1 if ret == 0 else ret
2102 return 1 if ret == 0 else ret
2103
2103
2104 if opts.get('subrepos'):
2104 if opts.get('subrepos'):
2105 # Let --subrepos on the command line override config setting.
2105 # Let --subrepos on the command line override config setting.
2106 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2106 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2107
2107
2108 cmdutil.checkunfinished(repo, commit=True)
2108 cmdutil.checkunfinished(repo, commit=True)
2109
2109
2110 branch = repo[None].branch()
2110 branch = repo[None].branch()
2111 bheads = repo.branchheads(branch)
2111 bheads = repo.branchheads(branch)
2112 tip = repo.changelog.tip()
2112 tip = repo.changelog.tip()
2113
2113
2114 extra = {}
2114 extra = {}
2115 if opts.get('close_branch') or opts.get('force_close_branch'):
2115 if opts.get('close_branch') or opts.get('force_close_branch'):
2116 extra[b'close'] = b'1'
2116 extra[b'close'] = b'1'
2117
2117
2118 if repo[b'.'].closesbranch():
2118 if repo[b'.'].closesbranch():
2119 # Not ideal, but let us do an extra status early to prevent early
2119 # Not ideal, but let us do an extra status early to prevent early
2120 # bail out.
2120 # bail out.
2121 matcher = scmutil.match(
2121 matcher = scmutil.match(
2122 repo[None], pats, pycompat.byteskwargs(opts)
2122 repo[None], pats, pycompat.byteskwargs(opts)
2123 )
2123 )
2124 s = repo.status(match=matcher)
2124 s = repo.status(match=matcher)
2125 if s.modified or s.added or s.removed:
2125 if s.modified or s.added or s.removed:
2126 bheads = repo.branchheads(branch, closed=True)
2126 bheads = repo.branchheads(branch, closed=True)
2127 else:
2127 else:
2128 msg = _(b'current revision is already a branch closing head')
2128 msg = _(b'current revision is already a branch closing head')
2129 raise error.InputError(msg)
2129 raise error.InputError(msg)
2130
2130
2131 if not bheads:
2131 if not bheads:
2132 raise error.InputError(
2132 raise error.InputError(
2133 _(b'branch "%s" has no heads to close') % branch
2133 _(b'branch "%s" has no heads to close') % branch
2134 )
2134 )
2135 elif (
2135 elif (
2136 branch == repo[b'.'].branch()
2136 branch == repo[b'.'].branch()
2137 and repo[b'.'].node() not in bheads
2137 and repo[b'.'].node() not in bheads
2138 and not opts.get('force_close_branch')
2138 and not opts.get('force_close_branch')
2139 ):
2139 ):
2140 hint = _(
2140 hint = _(
2141 b'use --force-close-branch to close branch from a non-head'
2141 b'use --force-close-branch to close branch from a non-head'
2142 b' changeset'
2142 b' changeset'
2143 )
2143 )
2144 raise error.InputError(_(b'can only close branch heads'), hint=hint)
2144 raise error.InputError(_(b'can only close branch heads'), hint=hint)
2145 elif opts.get('amend'):
2145 elif opts.get('amend'):
2146 if (
2146 if (
2147 repo[b'.'].p1().branch() != branch
2147 repo[b'.'].p1().branch() != branch
2148 and repo[b'.'].p2().branch() != branch
2148 and repo[b'.'].p2().branch() != branch
2149 ):
2149 ):
2150 raise error.InputError(_(b'can only close branch heads'))
2150 raise error.InputError(_(b'can only close branch heads'))
2151
2151
2152 if opts.get('amend'):
2152 if opts.get('amend'):
2153 if ui.configbool(b'ui', b'commitsubrepos'):
2153 if ui.configbool(b'ui', b'commitsubrepos'):
2154 raise error.InputError(
2154 raise error.InputError(
2155 _(b'cannot amend with ui.commitsubrepos enabled')
2155 _(b'cannot amend with ui.commitsubrepos enabled')
2156 )
2156 )
2157
2157
2158 old = repo[b'.']
2158 old = repo[b'.']
2159 rewriteutil.precheck(repo, [old.rev()], b'amend')
2159 rewriteutil.precheck(repo, [old.rev()], b'amend')
2160
2160
2161 # Currently histedit gets confused if an amend happens while histedit
2161 # Currently histedit gets confused if an amend happens while histedit
2162 # is in progress. Since we have a checkunfinished command, we are
2162 # is in progress. Since we have a checkunfinished command, we are
2163 # temporarily honoring it.
2163 # temporarily honoring it.
2164 #
2164 #
2165 # Note: eventually this guard will be removed. Please do not expect
2165 # Note: eventually this guard will be removed. Please do not expect
2166 # this behavior to remain.
2166 # this behavior to remain.
2167 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2167 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2168 cmdutil.checkunfinished(repo)
2168 cmdutil.checkunfinished(repo)
2169
2169
2170 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2170 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2171 opts = pycompat.byteskwargs(opts)
2171 opts = pycompat.byteskwargs(opts)
2172 if node == old.node():
2172 if node == old.node():
2173 ui.status(_(b"nothing changed\n"))
2173 ui.status(_(b"nothing changed\n"))
2174 return 1
2174 return 1
2175 else:
2175 else:
2176
2176
2177 def commitfunc(ui, repo, message, match, opts):
2177 def commitfunc(ui, repo, message, match, opts):
2178 overrides = {}
2178 overrides = {}
2179 if opts.get(b'secret'):
2179 if opts.get(b'secret'):
2180 overrides[(b'phases', b'new-commit')] = b'secret'
2180 overrides[(b'phases', b'new-commit')] = b'secret'
2181 elif opts.get(b'draft'):
2181 elif opts.get(b'draft'):
2182 overrides[(b'phases', b'new-commit')] = b'draft'
2182 overrides[(b'phases', b'new-commit')] = b'draft'
2183
2183
2184 baseui = repo.baseui
2184 baseui = repo.baseui
2185 with baseui.configoverride(overrides, b'commit'):
2185 with baseui.configoverride(overrides, b'commit'):
2186 with ui.configoverride(overrides, b'commit'):
2186 with ui.configoverride(overrides, b'commit'):
2187 editform = cmdutil.mergeeditform(
2187 editform = cmdutil.mergeeditform(
2188 repo[None], b'commit.normal'
2188 repo[None], b'commit.normal'
2189 )
2189 )
2190 editor = cmdutil.getcommiteditor(
2190 editor = cmdutil.getcommiteditor(
2191 editform=editform, **pycompat.strkwargs(opts)
2191 editform=editform, **pycompat.strkwargs(opts)
2192 )
2192 )
2193 return repo.commit(
2193 return repo.commit(
2194 message,
2194 message,
2195 opts.get(b'user'),
2195 opts.get(b'user'),
2196 opts.get(b'date'),
2196 opts.get(b'date'),
2197 match,
2197 match,
2198 editor=editor,
2198 editor=editor,
2199 extra=extra,
2199 extra=extra,
2200 )
2200 )
2201
2201
2202 opts = pycompat.byteskwargs(opts)
2202 opts = pycompat.byteskwargs(opts)
2203 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2203 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2204
2204
2205 if not node:
2205 if not node:
2206 stat = cmdutil.postcommitstatus(repo, pats, opts)
2206 stat = cmdutil.postcommitstatus(repo, pats, opts)
2207 if stat.deleted:
2207 if stat.deleted:
2208 ui.status(
2208 ui.status(
2209 _(
2209 _(
2210 b"nothing changed (%d missing files, see "
2210 b"nothing changed (%d missing files, see "
2211 b"'hg status')\n"
2211 b"'hg status')\n"
2212 )
2212 )
2213 % len(stat.deleted)
2213 % len(stat.deleted)
2214 )
2214 )
2215 else:
2215 else:
2216 ui.status(_(b"nothing changed\n"))
2216 ui.status(_(b"nothing changed\n"))
2217 return 1
2217 return 1
2218
2218
2219 cmdutil.commitstatus(repo, node, branch, bheads, tip, opts)
2219 cmdutil.commitstatus(repo, node, branch, bheads, tip, opts)
2220
2220
2221 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2221 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2222 status(
2222 status(
2223 ui,
2223 ui,
2224 repo,
2224 repo,
2225 modified=True,
2225 modified=True,
2226 added=True,
2226 added=True,
2227 removed=True,
2227 removed=True,
2228 deleted=True,
2228 deleted=True,
2229 unknown=True,
2229 unknown=True,
2230 subrepos=opts.get(b'subrepos'),
2230 subrepos=opts.get(b'subrepos'),
2231 )
2231 )
2232
2232
2233
2233
2234 @command(
2234 @command(
2235 b'config|showconfig|debugconfig',
2235 b'config|showconfig|debugconfig',
2236 [
2236 [
2237 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2237 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2238 # This is experimental because we need
2238 # This is experimental because we need
2239 # * reasonable behavior around aliases,
2239 # * reasonable behavior around aliases,
2240 # * decide if we display [debug] [experimental] and [devel] section par
2240 # * decide if we display [debug] [experimental] and [devel] section par
2241 # default
2241 # default
2242 # * some way to display "generic" config entry (the one matching
2242 # * some way to display "generic" config entry (the one matching
2243 # regexp,
2243 # regexp,
2244 # * proper display of the different value type
2244 # * proper display of the different value type
2245 # * a better way to handle <DYNAMIC> values (and variable types),
2245 # * a better way to handle <DYNAMIC> values (and variable types),
2246 # * maybe some type information ?
2246 # * maybe some type information ?
2247 (
2247 (
2248 b'',
2248 b'',
2249 b'exp-all-known',
2249 b'exp-all-known',
2250 None,
2250 None,
2251 _(b'show all known config option (EXPERIMENTAL)'),
2251 _(b'show all known config option (EXPERIMENTAL)'),
2252 ),
2252 ),
2253 (b'e', b'edit', None, _(b'edit user config')),
2253 (b'e', b'edit', None, _(b'edit user config')),
2254 (b'l', b'local', None, _(b'edit repository config')),
2254 (b'l', b'local', None, _(b'edit repository config')),
2255 (b'', b'source', None, _(b'show source of configuration value')),
2255 (b'', b'source', None, _(b'show source of configuration value')),
2256 (
2256 (
2257 b'',
2257 b'',
2258 b'shared',
2258 b'shared',
2259 None,
2259 None,
2260 _(b'edit shared source repository config (EXPERIMENTAL)'),
2260 _(b'edit shared source repository config (EXPERIMENTAL)'),
2261 ),
2261 ),
2262 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2262 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2263 (b'g', b'global', None, _(b'edit global config')),
2263 (b'g', b'global', None, _(b'edit global config')),
2264 ]
2264 ]
2265 + formatteropts,
2265 + formatteropts,
2266 _(b'[-u] [NAME]...'),
2266 _(b'[-u] [NAME]...'),
2267 helpcategory=command.CATEGORY_HELP,
2267 helpcategory=command.CATEGORY_HELP,
2268 optionalrepo=True,
2268 optionalrepo=True,
2269 intents={INTENT_READONLY},
2269 intents={INTENT_READONLY},
2270 )
2270 )
2271 def config(ui, repo, *values, **opts):
2271 def config(ui, repo, *values, **opts):
2272 """show combined config settings from all hgrc files
2272 """show combined config settings from all hgrc files
2273
2273
2274 With no arguments, print names and values of all config items.
2274 With no arguments, print names and values of all config items.
2275
2275
2276 With one argument of the form section.name, print just the value
2276 With one argument of the form section.name, print just the value
2277 of that config item.
2277 of that config item.
2278
2278
2279 With multiple arguments, print names and values of all config
2279 With multiple arguments, print names and values of all config
2280 items with matching section names or section.names.
2280 items with matching section names or section.names.
2281
2281
2282 With --edit, start an editor on the user-level config file. With
2282 With --edit, start an editor on the user-level config file. With
2283 --global, edit the system-wide config file. With --local, edit the
2283 --global, edit the system-wide config file. With --local, edit the
2284 repository-level config file.
2284 repository-level config file.
2285
2285
2286 With --source, the source (filename and line number) is printed
2286 With --source, the source (filename and line number) is printed
2287 for each config item.
2287 for each config item.
2288
2288
2289 See :hg:`help config` for more information about config files.
2289 See :hg:`help config` for more information about config files.
2290
2290
2291 .. container:: verbose
2291 .. container:: verbose
2292
2292
2293 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2293 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2294 This file is not shared across shares when in share-safe mode.
2294 This file is not shared across shares when in share-safe mode.
2295
2295
2296 Template:
2296 Template:
2297
2297
2298 The following keywords are supported. See also :hg:`help templates`.
2298 The following keywords are supported. See also :hg:`help templates`.
2299
2299
2300 :name: String. Config name.
2300 :name: String. Config name.
2301 :source: String. Filename and line number where the item is defined.
2301 :source: String. Filename and line number where the item is defined.
2302 :value: String. Config value.
2302 :value: String. Config value.
2303
2303
2304 The --shared flag can be used to edit the config file of shared source
2304 The --shared flag can be used to edit the config file of shared source
2305 repository. It only works when you have shared using the experimental
2305 repository. It only works when you have shared using the experimental
2306 share safe feature.
2306 share safe feature.
2307
2307
2308 Returns 0 on success, 1 if NAME does not exist.
2308 Returns 0 on success, 1 if NAME does not exist.
2309
2309
2310 """
2310 """
2311
2311
2312 opts = pycompat.byteskwargs(opts)
2312 opts = pycompat.byteskwargs(opts)
2313 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2313 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2314 if any(opts.get(o) for o in editopts):
2314 if any(opts.get(o) for o in editopts):
2315 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2315 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2316 if opts.get(b'local'):
2316 if opts.get(b'local'):
2317 if not repo:
2317 if not repo:
2318 raise error.InputError(
2318 raise error.InputError(
2319 _(b"can't use --local outside a repository")
2319 _(b"can't use --local outside a repository")
2320 )
2320 )
2321 paths = [repo.vfs.join(b'hgrc')]
2321 paths = [repo.vfs.join(b'hgrc')]
2322 elif opts.get(b'global'):
2322 elif opts.get(b'global'):
2323 paths = rcutil.systemrcpath()
2323 paths = rcutil.systemrcpath()
2324 elif opts.get(b'shared'):
2324 elif opts.get(b'shared'):
2325 if not repo.shared():
2325 if not repo.shared():
2326 raise error.InputError(
2326 raise error.InputError(
2327 _(b"repository is not shared; can't use --shared")
2327 _(b"repository is not shared; can't use --shared")
2328 )
2328 )
2329 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2329 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2330 raise error.InputError(
2330 raise error.InputError(
2331 _(
2331 _(
2332 b"share safe feature not enabled; "
2332 b"share safe feature not enabled; "
2333 b"unable to edit shared source repository config"
2333 b"unable to edit shared source repository config"
2334 )
2334 )
2335 )
2335 )
2336 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2336 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2337 elif opts.get(b'non_shared'):
2337 elif opts.get(b'non_shared'):
2338 paths = [repo.vfs.join(b'hgrc-not-shared')]
2338 paths = [repo.vfs.join(b'hgrc-not-shared')]
2339 else:
2339 else:
2340 paths = rcutil.userrcpath()
2340 paths = rcutil.userrcpath()
2341
2341
2342 for f in paths:
2342 for f in paths:
2343 if os.path.exists(f):
2343 if os.path.exists(f):
2344 break
2344 break
2345 else:
2345 else:
2346 if opts.get(b'global'):
2346 if opts.get(b'global'):
2347 samplehgrc = uimod.samplehgrcs[b'global']
2347 samplehgrc = uimod.samplehgrcs[b'global']
2348 elif opts.get(b'local'):
2348 elif opts.get(b'local'):
2349 samplehgrc = uimod.samplehgrcs[b'local']
2349 samplehgrc = uimod.samplehgrcs[b'local']
2350 else:
2350 else:
2351 samplehgrc = uimod.samplehgrcs[b'user']
2351 samplehgrc = uimod.samplehgrcs[b'user']
2352
2352
2353 f = paths[0]
2353 f = paths[0]
2354 fp = open(f, b"wb")
2354 fp = open(f, b"wb")
2355 fp.write(util.tonativeeol(samplehgrc))
2355 fp.write(util.tonativeeol(samplehgrc))
2356 fp.close()
2356 fp.close()
2357
2357
2358 editor = ui.geteditor()
2358 editor = ui.geteditor()
2359 ui.system(
2359 ui.system(
2360 b"%s \"%s\"" % (editor, f),
2360 b"%s \"%s\"" % (editor, f),
2361 onerr=error.InputError,
2361 onerr=error.InputError,
2362 errprefix=_(b"edit failed"),
2362 errprefix=_(b"edit failed"),
2363 blockedtag=b'config_edit',
2363 blockedtag=b'config_edit',
2364 )
2364 )
2365 return
2365 return
2366 ui.pager(b'config')
2366 ui.pager(b'config')
2367 fm = ui.formatter(b'config', opts)
2367 fm = ui.formatter(b'config', opts)
2368 for t, f in rcutil.rccomponents():
2368 for t, f in rcutil.rccomponents():
2369 if t == b'path':
2369 if t == b'path':
2370 ui.debug(b'read config from: %s\n' % f)
2370 ui.debug(b'read config from: %s\n' % f)
2371 elif t == b'resource':
2371 elif t == b'resource':
2372 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2372 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2373 elif t == b'items':
2373 elif t == b'items':
2374 # Don't print anything for 'items'.
2374 # Don't print anything for 'items'.
2375 pass
2375 pass
2376 else:
2376 else:
2377 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2377 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2378 untrusted = bool(opts.get(b'untrusted'))
2378 untrusted = bool(opts.get(b'untrusted'))
2379
2379
2380 selsections = selentries = []
2380 selsections = selentries = []
2381 if values:
2381 if values:
2382 selsections = [v for v in values if b'.' not in v]
2382 selsections = [v for v in values if b'.' not in v]
2383 selentries = [v for v in values if b'.' in v]
2383 selentries = [v for v in values if b'.' in v]
2384 uniquesel = len(selentries) == 1 and not selsections
2384 uniquesel = len(selentries) == 1 and not selsections
2385 selsections = set(selsections)
2385 selsections = set(selsections)
2386 selentries = set(selentries)
2386 selentries = set(selentries)
2387
2387
2388 matched = False
2388 matched = False
2389 all_known = opts[b'exp_all_known']
2389 all_known = opts[b'exp_all_known']
2390 show_source = ui.debugflag or opts.get(b'source')
2390 show_source = ui.debugflag or opts.get(b'source')
2391 entries = ui.walkconfig(untrusted=untrusted, all_known=all_known)
2391 entries = ui.walkconfig(untrusted=untrusted, all_known=all_known)
2392 for section, name, value in entries:
2392 for section, name, value in entries:
2393 source = ui.configsource(section, name, untrusted)
2393 source = ui.configsource(section, name, untrusted)
2394 value = pycompat.bytestr(value)
2394 value = pycompat.bytestr(value)
2395 defaultvalue = ui.configdefault(section, name)
2395 defaultvalue = ui.configdefault(section, name)
2396 if fm.isplain():
2396 if fm.isplain():
2397 source = source or b'none'
2397 source = source or b'none'
2398 value = value.replace(b'\n', b'\\n')
2398 value = value.replace(b'\n', b'\\n')
2399 entryname = section + b'.' + name
2399 entryname = section + b'.' + name
2400 if values and not (section in selsections or entryname in selentries):
2400 if values and not (section in selsections or entryname in selentries):
2401 continue
2401 continue
2402 fm.startitem()
2402 fm.startitem()
2403 fm.condwrite(show_source, b'source', b'%s: ', source)
2403 fm.condwrite(show_source, b'source', b'%s: ', source)
2404 if uniquesel:
2404 if uniquesel:
2405 fm.data(name=entryname)
2405 fm.data(name=entryname)
2406 fm.write(b'value', b'%s\n', value)
2406 fm.write(b'value', b'%s\n', value)
2407 else:
2407 else:
2408 fm.write(b'name value', b'%s=%s\n', entryname, value)
2408 fm.write(b'name value', b'%s=%s\n', entryname, value)
2409 if formatter.isprintable(defaultvalue):
2409 if formatter.isprintable(defaultvalue):
2410 fm.data(defaultvalue=defaultvalue)
2410 fm.data(defaultvalue=defaultvalue)
2411 elif isinstance(defaultvalue, list) and all(
2411 elif isinstance(defaultvalue, list) and all(
2412 formatter.isprintable(e) for e in defaultvalue
2412 formatter.isprintable(e) for e in defaultvalue
2413 ):
2413 ):
2414 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2414 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2415 # TODO: no idea how to process unsupported defaultvalue types
2415 # TODO: no idea how to process unsupported defaultvalue types
2416 matched = True
2416 matched = True
2417 fm.end()
2417 fm.end()
2418 if matched:
2418 if matched:
2419 return 0
2419 return 0
2420 return 1
2420 return 1
2421
2421
2422
2422
2423 @command(
2423 @command(
2424 b'continue',
2424 b'continue',
2425 dryrunopts,
2425 dryrunopts,
2426 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2426 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2427 helpbasic=True,
2427 helpbasic=True,
2428 )
2428 )
2429 def continuecmd(ui, repo, **opts):
2429 def continuecmd(ui, repo, **opts):
2430 """resumes an interrupted operation (EXPERIMENTAL)
2430 """resumes an interrupted operation (EXPERIMENTAL)
2431
2431
2432 Finishes a multistep operation like graft, histedit, rebase, merge,
2432 Finishes a multistep operation like graft, histedit, rebase, merge,
2433 and unshelve if they are in an interrupted state.
2433 and unshelve if they are in an interrupted state.
2434
2434
2435 use --dry-run/-n to dry run the command.
2435 use --dry-run/-n to dry run the command.
2436 """
2436 """
2437 dryrun = opts.get('dry_run')
2437 dryrun = opts.get('dry_run')
2438 contstate = cmdutil.getunfinishedstate(repo)
2438 contstate = cmdutil.getunfinishedstate(repo)
2439 if not contstate:
2439 if not contstate:
2440 raise error.StateError(_(b'no operation in progress'))
2440 raise error.StateError(_(b'no operation in progress'))
2441 if not contstate.continuefunc:
2441 if not contstate.continuefunc:
2442 raise error.StateError(
2442 raise error.StateError(
2443 (
2443 (
2444 _(b"%s in progress but does not support 'hg continue'")
2444 _(b"%s in progress but does not support 'hg continue'")
2445 % (contstate._opname)
2445 % (contstate._opname)
2446 ),
2446 ),
2447 hint=contstate.continuemsg(),
2447 hint=contstate.continuemsg(),
2448 )
2448 )
2449 if dryrun:
2449 if dryrun:
2450 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2450 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2451 return
2451 return
2452 return contstate.continuefunc(ui, repo)
2452 return contstate.continuefunc(ui, repo)
2453
2453
2454
2454
2455 @command(
2455 @command(
2456 b'copy|cp',
2456 b'copy|cp',
2457 [
2457 [
2458 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2458 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2459 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2459 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2460 (
2460 (
2461 b'',
2461 b'',
2462 b'at-rev',
2462 b'at-rev',
2463 b'',
2463 b'',
2464 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2464 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2465 _(b'REV'),
2465 _(b'REV'),
2466 ),
2466 ),
2467 (
2467 (
2468 b'f',
2468 b'f',
2469 b'force',
2469 b'force',
2470 None,
2470 None,
2471 _(b'forcibly copy over an existing managed file'),
2471 _(b'forcibly copy over an existing managed file'),
2472 ),
2472 ),
2473 ]
2473 ]
2474 + walkopts
2474 + walkopts
2475 + dryrunopts,
2475 + dryrunopts,
2476 _(b'[OPTION]... (SOURCE... DEST | --forget DEST...)'),
2476 _(b'[OPTION]... (SOURCE... DEST | --forget DEST...)'),
2477 helpcategory=command.CATEGORY_FILE_CONTENTS,
2477 helpcategory=command.CATEGORY_FILE_CONTENTS,
2478 )
2478 )
2479 def copy(ui, repo, *pats, **opts):
2479 def copy(ui, repo, *pats, **opts):
2480 """mark files as copied for the next commit
2480 """mark files as copied for the next commit
2481
2481
2482 Mark dest as having copies of source files. If dest is a
2482 Mark dest as having copies of source files. If dest is a
2483 directory, copies are put in that directory. If dest is a file,
2483 directory, copies are put in that directory. If dest is a file,
2484 the source must be a single file.
2484 the source must be a single file.
2485
2485
2486 By default, this command copies the contents of files as they
2486 By default, this command copies the contents of files as they
2487 exist in the working directory. If invoked with -A/--after, the
2487 exist in the working directory. If invoked with -A/--after, the
2488 operation is recorded, but no copying is performed.
2488 operation is recorded, but no copying is performed.
2489
2489
2490 To undo marking a destination file as copied, use --forget. With that
2490 To undo marking a destination file as copied, use --forget. With that
2491 option, all given (positional) arguments are unmarked as copies. The
2491 option, all given (positional) arguments are unmarked as copies. The
2492 destination file(s) will be left in place (still tracked). Note that
2492 destination file(s) will be left in place (still tracked). Note that
2493 :hg:`copy --forget` behaves the same way as :hg:`rename --forget`.
2493 :hg:`copy --forget` behaves the same way as :hg:`rename --forget`.
2494
2494
2495 This command takes effect with the next commit by default.
2495 This command takes effect with the next commit by default.
2496
2496
2497 Returns 0 on success, 1 if errors are encountered.
2497 Returns 0 on success, 1 if errors are encountered.
2498 """
2498 """
2499 opts = pycompat.byteskwargs(opts)
2499 opts = pycompat.byteskwargs(opts)
2500
2500
2501 context = repo.dirstate.changing_files
2501 context = lambda repo: repo.dirstate.changing_files(repo)
2502 rev = opts.get(b'at_rev')
2502 rev = opts.get(b'at_rev')
2503 ctx = None
2503 ctx = None
2504 if rev:
2504 if rev:
2505 ctx = logcmdutil.revsingle(repo, rev)
2505 ctx = logcmdutil.revsingle(repo, rev)
2506 if ctx.rev() is not None:
2506 if ctx.rev() is not None:
2507
2507
2508 def context(repo):
2508 def context(repo):
2509 return util.nullcontextmanager()
2509 return util.nullcontextmanager()
2510
2510
2511 opts[b'at_rev'] = ctx.rev()
2511 opts[b'at_rev'] = ctx.rev()
2512 with repo.wlock(), context(repo):
2512 with repo.wlock(), context(repo):
2513 return cmdutil.copy(ui, repo, pats, opts)
2513 return cmdutil.copy(ui, repo, pats, opts)
2514
2514
2515
2515
2516 @command(
2516 @command(
2517 b'debugcommands',
2517 b'debugcommands',
2518 [],
2518 [],
2519 _(b'[COMMAND]'),
2519 _(b'[COMMAND]'),
2520 helpcategory=command.CATEGORY_HELP,
2520 helpcategory=command.CATEGORY_HELP,
2521 norepo=True,
2521 norepo=True,
2522 )
2522 )
2523 def debugcommands(ui, cmd=b'', *args):
2523 def debugcommands(ui, cmd=b'', *args):
2524 """list all available commands and options"""
2524 """list all available commands and options"""
2525 for cmd, vals in sorted(table.items()):
2525 for cmd, vals in sorted(table.items()):
2526 cmd = cmd.split(b'|')[0]
2526 cmd = cmd.split(b'|')[0]
2527 opts = b', '.join([i[1] for i in vals[1]])
2527 opts = b', '.join([i[1] for i in vals[1]])
2528 ui.write(b'%s: %s\n' % (cmd, opts))
2528 ui.write(b'%s: %s\n' % (cmd, opts))
2529
2529
2530
2530
2531 @command(
2531 @command(
2532 b'debugcomplete',
2532 b'debugcomplete',
2533 [(b'o', b'options', None, _(b'show the command options'))],
2533 [(b'o', b'options', None, _(b'show the command options'))],
2534 _(b'[-o] CMD'),
2534 _(b'[-o] CMD'),
2535 helpcategory=command.CATEGORY_HELP,
2535 helpcategory=command.CATEGORY_HELP,
2536 norepo=True,
2536 norepo=True,
2537 )
2537 )
2538 def debugcomplete(ui, cmd=b'', **opts):
2538 def debugcomplete(ui, cmd=b'', **opts):
2539 """returns the completion list associated with the given command"""
2539 """returns the completion list associated with the given command"""
2540
2540
2541 if opts.get('options'):
2541 if opts.get('options'):
2542 options = []
2542 options = []
2543 otables = [globalopts]
2543 otables = [globalopts]
2544 if cmd:
2544 if cmd:
2545 aliases, entry = cmdutil.findcmd(cmd, table, False)
2545 aliases, entry = cmdutil.findcmd(cmd, table, False)
2546 otables.append(entry[1])
2546 otables.append(entry[1])
2547 for t in otables:
2547 for t in otables:
2548 for o in t:
2548 for o in t:
2549 if b"(DEPRECATED)" in o[3]:
2549 if b"(DEPRECATED)" in o[3]:
2550 continue
2550 continue
2551 if o[0]:
2551 if o[0]:
2552 options.append(b'-%s' % o[0])
2552 options.append(b'-%s' % o[0])
2553 options.append(b'--%s' % o[1])
2553 options.append(b'--%s' % o[1])
2554 ui.write(b"%s\n" % b"\n".join(options))
2554 ui.write(b"%s\n" % b"\n".join(options))
2555 return
2555 return
2556
2556
2557 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2557 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2558 if ui.verbose:
2558 if ui.verbose:
2559 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2559 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2560 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2560 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2561
2561
2562
2562
2563 @command(
2563 @command(
2564 b'diff',
2564 b'diff',
2565 [
2565 [
2566 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
2566 (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
2567 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
2567 (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
2568 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
2568 (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
2569 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2569 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2570 ]
2570 ]
2571 + diffopts
2571 + diffopts
2572 + diffopts2
2572 + diffopts2
2573 + walkopts
2573 + walkopts
2574 + subrepoopts,
2574 + subrepoopts,
2575 _(b'[OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...'),
2575 _(b'[OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...'),
2576 helpcategory=command.CATEGORY_FILE_CONTENTS,
2576 helpcategory=command.CATEGORY_FILE_CONTENTS,
2577 helpbasic=True,
2577 helpbasic=True,
2578 inferrepo=True,
2578 inferrepo=True,
2579 intents={INTENT_READONLY},
2579 intents={INTENT_READONLY},
2580 )
2580 )
2581 def diff(ui, repo, *pats, **opts):
2581 def diff(ui, repo, *pats, **opts):
2582 """diff repository (or selected files)
2582 """diff repository (or selected files)
2583
2583
2584 Show differences between revisions for the specified files.
2584 Show differences between revisions for the specified files.
2585
2585
2586 Differences between files are shown using the unified diff format.
2586 Differences between files are shown using the unified diff format.
2587
2587
2588 .. note::
2588 .. note::
2589
2589
2590 :hg:`diff` may generate unexpected results for merges, as it will
2590 :hg:`diff` may generate unexpected results for merges, as it will
2591 default to comparing against the working directory's first
2591 default to comparing against the working directory's first
2592 parent changeset if no revisions are specified. To diff against the
2592 parent changeset if no revisions are specified. To diff against the
2593 conflict regions, you can use `--config diff.merge=yes`.
2593 conflict regions, you can use `--config diff.merge=yes`.
2594
2594
2595 By default, the working directory files are compared to its first parent. To
2595 By default, the working directory files are compared to its first parent. To
2596 see the differences from another revision, use --from. To see the difference
2596 see the differences from another revision, use --from. To see the difference
2597 to another revision, use --to. For example, :hg:`diff --from .^` will show
2597 to another revision, use --to. For example, :hg:`diff --from .^` will show
2598 the differences from the working copy's grandparent to the working copy,
2598 the differences from the working copy's grandparent to the working copy,
2599 :hg:`diff --to .` will show the diff from the working copy to its parent
2599 :hg:`diff --to .` will show the diff from the working copy to its parent
2600 (i.e. the reverse of the default), and :hg:`diff --from 1.0 --to 1.2` will
2600 (i.e. the reverse of the default), and :hg:`diff --from 1.0 --to 1.2` will
2601 show the diff between those two revisions.
2601 show the diff between those two revisions.
2602
2602
2603 Alternatively you can specify -c/--change with a revision to see the changes
2603 Alternatively you can specify -c/--change with a revision to see the changes
2604 in that changeset relative to its first parent (i.e. :hg:`diff -c 42` is
2604 in that changeset relative to its first parent (i.e. :hg:`diff -c 42` is
2605 equivalent to :hg:`diff --from 42^ --to 42`)
2605 equivalent to :hg:`diff --from 42^ --to 42`)
2606
2606
2607 Without the -a/--text option, diff will avoid generating diffs of
2607 Without the -a/--text option, diff will avoid generating diffs of
2608 files it detects as binary. With -a, diff will generate a diff
2608 files it detects as binary. With -a, diff will generate a diff
2609 anyway, probably with undesirable results.
2609 anyway, probably with undesirable results.
2610
2610
2611 Use the -g/--git option to generate diffs in the git extended diff
2611 Use the -g/--git option to generate diffs in the git extended diff
2612 format. For more information, read :hg:`help diffs`.
2612 format. For more information, read :hg:`help diffs`.
2613
2613
2614 .. container:: verbose
2614 .. container:: verbose
2615
2615
2616 Examples:
2616 Examples:
2617
2617
2618 - compare a file in the current working directory to its parent::
2618 - compare a file in the current working directory to its parent::
2619
2619
2620 hg diff foo.c
2620 hg diff foo.c
2621
2621
2622 - compare two historical versions of a directory, with rename info::
2622 - compare two historical versions of a directory, with rename info::
2623
2623
2624 hg diff --git --from 1.0 --to 1.2 lib/
2624 hg diff --git --from 1.0 --to 1.2 lib/
2625
2625
2626 - get change stats relative to the last change on some date::
2626 - get change stats relative to the last change on some date::
2627
2627
2628 hg diff --stat --from "date('may 2')"
2628 hg diff --stat --from "date('may 2')"
2629
2629
2630 - diff all newly-added files that contain a keyword::
2630 - diff all newly-added files that contain a keyword::
2631
2631
2632 hg diff "set:added() and grep(GNU)"
2632 hg diff "set:added() and grep(GNU)"
2633
2633
2634 - compare a revision and its parents::
2634 - compare a revision and its parents::
2635
2635
2636 hg diff -c 9353 # compare against first parent
2636 hg diff -c 9353 # compare against first parent
2637 hg diff --from 9353^ --to 9353 # same using revset syntax
2637 hg diff --from 9353^ --to 9353 # same using revset syntax
2638 hg diff --from 9353^2 --to 9353 # compare against the second parent
2638 hg diff --from 9353^2 --to 9353 # compare against the second parent
2639
2639
2640 Returns 0 on success.
2640 Returns 0 on success.
2641 """
2641 """
2642
2642
2643 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2643 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2644 opts = pycompat.byteskwargs(opts)
2644 opts = pycompat.byteskwargs(opts)
2645 revs = opts.get(b'rev')
2645 revs = opts.get(b'rev')
2646 change = opts.get(b'change')
2646 change = opts.get(b'change')
2647 from_rev = opts.get(b'from')
2647 from_rev = opts.get(b'from')
2648 to_rev = opts.get(b'to')
2648 to_rev = opts.get(b'to')
2649 stat = opts.get(b'stat')
2649 stat = opts.get(b'stat')
2650 reverse = opts.get(b'reverse')
2650 reverse = opts.get(b'reverse')
2651
2651
2652 cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
2652 cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
2653 cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
2653 cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
2654 if change:
2654 if change:
2655 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2655 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2656 ctx2 = logcmdutil.revsingle(repo, change, None)
2656 ctx2 = logcmdutil.revsingle(repo, change, None)
2657 ctx1 = logcmdutil.diff_parent(ctx2)
2657 ctx1 = logcmdutil.diff_parent(ctx2)
2658 elif from_rev or to_rev:
2658 elif from_rev or to_rev:
2659 repo = scmutil.unhidehashlikerevs(
2659 repo = scmutil.unhidehashlikerevs(
2660 repo, [from_rev] + [to_rev], b'nowarn'
2660 repo, [from_rev] + [to_rev], b'nowarn'
2661 )
2661 )
2662 ctx1 = logcmdutil.revsingle(repo, from_rev, None)
2662 ctx1 = logcmdutil.revsingle(repo, from_rev, None)
2663 ctx2 = logcmdutil.revsingle(repo, to_rev, None)
2663 ctx2 = logcmdutil.revsingle(repo, to_rev, None)
2664 else:
2664 else:
2665 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2665 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2666 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
2666 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
2667
2667
2668 if reverse:
2668 if reverse:
2669 ctxleft = ctx2
2669 ctxleft = ctx2
2670 ctxright = ctx1
2670 ctxright = ctx1
2671 else:
2671 else:
2672 ctxleft = ctx1
2672 ctxleft = ctx1
2673 ctxright = ctx2
2673 ctxright = ctx2
2674
2674
2675 diffopts = patch.diffallopts(ui, opts)
2675 diffopts = patch.diffallopts(ui, opts)
2676 m = scmutil.match(ctx2, pats, opts)
2676 m = scmutil.match(ctx2, pats, opts)
2677 m = repo.narrowmatch(m)
2677 m = repo.narrowmatch(m)
2678 ui.pager(b'diff')
2678 ui.pager(b'diff')
2679 logcmdutil.diffordiffstat(
2679 logcmdutil.diffordiffstat(
2680 ui,
2680 ui,
2681 repo,
2681 repo,
2682 diffopts,
2682 diffopts,
2683 ctxleft,
2683 ctxleft,
2684 ctxright,
2684 ctxright,
2685 m,
2685 m,
2686 stat=stat,
2686 stat=stat,
2687 listsubrepos=opts.get(b'subrepos'),
2687 listsubrepos=opts.get(b'subrepos'),
2688 root=opts.get(b'root'),
2688 root=opts.get(b'root'),
2689 )
2689 )
2690
2690
2691
2691
2692 @command(
2692 @command(
2693 b'export',
2693 b'export',
2694 [
2694 [
2695 (
2695 (
2696 b'B',
2696 b'B',
2697 b'bookmark',
2697 b'bookmark',
2698 b'',
2698 b'',
2699 _(b'export changes only reachable by given bookmark'),
2699 _(b'export changes only reachable by given bookmark'),
2700 _(b'BOOKMARK'),
2700 _(b'BOOKMARK'),
2701 ),
2701 ),
2702 (
2702 (
2703 b'o',
2703 b'o',
2704 b'output',
2704 b'output',
2705 b'',
2705 b'',
2706 _(b'print output to file with formatted name'),
2706 _(b'print output to file with formatted name'),
2707 _(b'FORMAT'),
2707 _(b'FORMAT'),
2708 ),
2708 ),
2709 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2709 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2710 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2710 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2711 ]
2711 ]
2712 + diffopts
2712 + diffopts
2713 + formatteropts,
2713 + formatteropts,
2714 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2714 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2715 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2715 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2716 helpbasic=True,
2716 helpbasic=True,
2717 intents={INTENT_READONLY},
2717 intents={INTENT_READONLY},
2718 )
2718 )
2719 def export(ui, repo, *changesets, **opts):
2719 def export(ui, repo, *changesets, **opts):
2720 """dump the header and diffs for one or more changesets
2720 """dump the header and diffs for one or more changesets
2721
2721
2722 Print the changeset header and diffs for one or more revisions.
2722 Print the changeset header and diffs for one or more revisions.
2723 If no revision is given, the parent of the working directory is used.
2723 If no revision is given, the parent of the working directory is used.
2724
2724
2725 The information shown in the changeset header is: author, date,
2725 The information shown in the changeset header is: author, date,
2726 branch name (if non-default), changeset hash, parent(s) and commit
2726 branch name (if non-default), changeset hash, parent(s) and commit
2727 comment.
2727 comment.
2728
2728
2729 .. note::
2729 .. note::
2730
2730
2731 :hg:`export` may generate unexpected diff output for merge
2731 :hg:`export` may generate unexpected diff output for merge
2732 changesets, as it will compare the merge changeset against its
2732 changesets, as it will compare the merge changeset against its
2733 first parent only.
2733 first parent only.
2734
2734
2735 Output may be to a file, in which case the name of the file is
2735 Output may be to a file, in which case the name of the file is
2736 given using a template string. See :hg:`help templates`. In addition
2736 given using a template string. See :hg:`help templates`. In addition
2737 to the common template keywords, the following formatting rules are
2737 to the common template keywords, the following formatting rules are
2738 supported:
2738 supported:
2739
2739
2740 :``%%``: literal "%" character
2740 :``%%``: literal "%" character
2741 :``%H``: changeset hash (40 hexadecimal digits)
2741 :``%H``: changeset hash (40 hexadecimal digits)
2742 :``%N``: number of patches being generated
2742 :``%N``: number of patches being generated
2743 :``%R``: changeset revision number
2743 :``%R``: changeset revision number
2744 :``%b``: basename of the exporting repository
2744 :``%b``: basename of the exporting repository
2745 :``%h``: short-form changeset hash (12 hexadecimal digits)
2745 :``%h``: short-form changeset hash (12 hexadecimal digits)
2746 :``%m``: first line of the commit message (only alphanumeric characters)
2746 :``%m``: first line of the commit message (only alphanumeric characters)
2747 :``%n``: zero-padded sequence number, starting at 1
2747 :``%n``: zero-padded sequence number, starting at 1
2748 :``%r``: zero-padded changeset revision number
2748 :``%r``: zero-padded changeset revision number
2749 :``\\``: literal "\\" character
2749 :``\\``: literal "\\" character
2750
2750
2751 Without the -a/--text option, export will avoid generating diffs
2751 Without the -a/--text option, export will avoid generating diffs
2752 of files it detects as binary. With -a, export will generate a
2752 of files it detects as binary. With -a, export will generate a
2753 diff anyway, probably with undesirable results.
2753 diff anyway, probably with undesirable results.
2754
2754
2755 With -B/--bookmark changesets reachable by the given bookmark are
2755 With -B/--bookmark changesets reachable by the given bookmark are
2756 selected.
2756 selected.
2757
2757
2758 Use the -g/--git option to generate diffs in the git extended diff
2758 Use the -g/--git option to generate diffs in the git extended diff
2759 format. See :hg:`help diffs` for more information.
2759 format. See :hg:`help diffs` for more information.
2760
2760
2761 With the --switch-parent option, the diff will be against the
2761 With the --switch-parent option, the diff will be against the
2762 second parent. It can be useful to review a merge.
2762 second parent. It can be useful to review a merge.
2763
2763
2764 .. container:: verbose
2764 .. container:: verbose
2765
2765
2766 Template:
2766 Template:
2767
2767
2768 The following keywords are supported in addition to the common template
2768 The following keywords are supported in addition to the common template
2769 keywords and functions. See also :hg:`help templates`.
2769 keywords and functions. See also :hg:`help templates`.
2770
2770
2771 :diff: String. Diff content.
2771 :diff: String. Diff content.
2772 :parents: List of strings. Parent nodes of the changeset.
2772 :parents: List of strings. Parent nodes of the changeset.
2773
2773
2774 Examples:
2774 Examples:
2775
2775
2776 - use export and import to transplant a bugfix to the current
2776 - use export and import to transplant a bugfix to the current
2777 branch::
2777 branch::
2778
2778
2779 hg export -r 9353 | hg import -
2779 hg export -r 9353 | hg import -
2780
2780
2781 - export all the changesets between two revisions to a file with
2781 - export all the changesets between two revisions to a file with
2782 rename information::
2782 rename information::
2783
2783
2784 hg export --git -r 123:150 > changes.txt
2784 hg export --git -r 123:150 > changes.txt
2785
2785
2786 - split outgoing changes into a series of patches with
2786 - split outgoing changes into a series of patches with
2787 descriptive names::
2787 descriptive names::
2788
2788
2789 hg export -r "outgoing()" -o "%n-%m.patch"
2789 hg export -r "outgoing()" -o "%n-%m.patch"
2790
2790
2791 Returns 0 on success.
2791 Returns 0 on success.
2792 """
2792 """
2793 opts = pycompat.byteskwargs(opts)
2793 opts = pycompat.byteskwargs(opts)
2794 bookmark = opts.get(b'bookmark')
2794 bookmark = opts.get(b'bookmark')
2795 changesets += tuple(opts.get(b'rev', []))
2795 changesets += tuple(opts.get(b'rev', []))
2796
2796
2797 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2797 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2798
2798
2799 if bookmark:
2799 if bookmark:
2800 if bookmark not in repo._bookmarks:
2800 if bookmark not in repo._bookmarks:
2801 raise error.InputError(_(b"bookmark '%s' not found") % bookmark)
2801 raise error.InputError(_(b"bookmark '%s' not found") % bookmark)
2802
2802
2803 revs = scmutil.bookmarkrevs(repo, bookmark)
2803 revs = scmutil.bookmarkrevs(repo, bookmark)
2804 else:
2804 else:
2805 if not changesets:
2805 if not changesets:
2806 changesets = [b'.']
2806 changesets = [b'.']
2807
2807
2808 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2808 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2809 revs = logcmdutil.revrange(repo, changesets)
2809 revs = logcmdutil.revrange(repo, changesets)
2810
2810
2811 if not revs:
2811 if not revs:
2812 raise error.InputError(_(b"export requires at least one changeset"))
2812 raise error.InputError(_(b"export requires at least one changeset"))
2813 if len(revs) > 1:
2813 if len(revs) > 1:
2814 ui.note(_(b'exporting patches:\n'))
2814 ui.note(_(b'exporting patches:\n'))
2815 else:
2815 else:
2816 ui.note(_(b'exporting patch:\n'))
2816 ui.note(_(b'exporting patch:\n'))
2817
2817
2818 fntemplate = opts.get(b'output')
2818 fntemplate = opts.get(b'output')
2819 if cmdutil.isstdiofilename(fntemplate):
2819 if cmdutil.isstdiofilename(fntemplate):
2820 fntemplate = b''
2820 fntemplate = b''
2821
2821
2822 if fntemplate:
2822 if fntemplate:
2823 fm = formatter.nullformatter(ui, b'export', opts)
2823 fm = formatter.nullformatter(ui, b'export', opts)
2824 else:
2824 else:
2825 ui.pager(b'export')
2825 ui.pager(b'export')
2826 fm = ui.formatter(b'export', opts)
2826 fm = ui.formatter(b'export', opts)
2827 with fm:
2827 with fm:
2828 cmdutil.export(
2828 cmdutil.export(
2829 repo,
2829 repo,
2830 revs,
2830 revs,
2831 fm,
2831 fm,
2832 fntemplate=fntemplate,
2832 fntemplate=fntemplate,
2833 switch_parent=opts.get(b'switch_parent'),
2833 switch_parent=opts.get(b'switch_parent'),
2834 opts=patch.diffallopts(ui, opts),
2834 opts=patch.diffallopts(ui, opts),
2835 )
2835 )
2836
2836
2837
2837
2838 @command(
2838 @command(
2839 b'files',
2839 b'files',
2840 [
2840 [
2841 (
2841 (
2842 b'r',
2842 b'r',
2843 b'rev',
2843 b'rev',
2844 b'',
2844 b'',
2845 _(b'search the repository as it is in REV'),
2845 _(b'search the repository as it is in REV'),
2846 _(b'REV'),
2846 _(b'REV'),
2847 ),
2847 ),
2848 (
2848 (
2849 b'0',
2849 b'0',
2850 b'print0',
2850 b'print0',
2851 None,
2851 None,
2852 _(b'end filenames with NUL, for use with xargs'),
2852 _(b'end filenames with NUL, for use with xargs'),
2853 ),
2853 ),
2854 ]
2854 ]
2855 + walkopts
2855 + walkopts
2856 + formatteropts
2856 + formatteropts
2857 + subrepoopts,
2857 + subrepoopts,
2858 _(b'[OPTION]... [FILE]...'),
2858 _(b'[OPTION]... [FILE]...'),
2859 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2859 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2860 intents={INTENT_READONLY},
2860 intents={INTENT_READONLY},
2861 )
2861 )
2862 def files(ui, repo, *pats, **opts):
2862 def files(ui, repo, *pats, **opts):
2863 """list tracked files
2863 """list tracked files
2864
2864
2865 Print files under Mercurial control in the working directory or
2865 Print files under Mercurial control in the working directory or
2866 specified revision for given files (excluding removed files).
2866 specified revision for given files (excluding removed files).
2867 Files can be specified as filenames or filesets.
2867 Files can be specified as filenames or filesets.
2868
2868
2869 If no files are given to match, this command prints the names
2869 If no files are given to match, this command prints the names
2870 of all files under Mercurial control.
2870 of all files under Mercurial control.
2871
2871
2872 .. container:: verbose
2872 .. container:: verbose
2873
2873
2874 Template:
2874 Template:
2875
2875
2876 The following keywords are supported in addition to the common template
2876 The following keywords are supported in addition to the common template
2877 keywords and functions. See also :hg:`help templates`.
2877 keywords and functions. See also :hg:`help templates`.
2878
2878
2879 :flags: String. Character denoting file's symlink and executable bits.
2879 :flags: String. Character denoting file's symlink and executable bits.
2880 :path: String. Repository-absolute path of the file.
2880 :path: String. Repository-absolute path of the file.
2881 :size: Integer. Size of the file in bytes.
2881 :size: Integer. Size of the file in bytes.
2882
2882
2883 Examples:
2883 Examples:
2884
2884
2885 - list all files under the current directory::
2885 - list all files under the current directory::
2886
2886
2887 hg files .
2887 hg files .
2888
2888
2889 - shows sizes and flags for current revision::
2889 - shows sizes and flags for current revision::
2890
2890
2891 hg files -vr .
2891 hg files -vr .
2892
2892
2893 - list all files named README::
2893 - list all files named README::
2894
2894
2895 hg files -I "**/README"
2895 hg files -I "**/README"
2896
2896
2897 - list all binary files::
2897 - list all binary files::
2898
2898
2899 hg files "set:binary()"
2899 hg files "set:binary()"
2900
2900
2901 - find files containing a regular expression::
2901 - find files containing a regular expression::
2902
2902
2903 hg files "set:grep('bob')"
2903 hg files "set:grep('bob')"
2904
2904
2905 - search tracked file contents with xargs and grep::
2905 - search tracked file contents with xargs and grep::
2906
2906
2907 hg files -0 | xargs -0 grep foo
2907 hg files -0 | xargs -0 grep foo
2908
2908
2909 See :hg:`help patterns` and :hg:`help filesets` for more information
2909 See :hg:`help patterns` and :hg:`help filesets` for more information
2910 on specifying file patterns.
2910 on specifying file patterns.
2911
2911
2912 Returns 0 if a match is found, 1 otherwise.
2912 Returns 0 if a match is found, 1 otherwise.
2913
2913
2914 """
2914 """
2915
2915
2916 opts = pycompat.byteskwargs(opts)
2916 opts = pycompat.byteskwargs(opts)
2917 rev = opts.get(b'rev')
2917 rev = opts.get(b'rev')
2918 if rev:
2918 if rev:
2919 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2919 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2920 ctx = logcmdutil.revsingle(repo, rev, None)
2920 ctx = logcmdutil.revsingle(repo, rev, None)
2921
2921
2922 end = b'\n'
2922 end = b'\n'
2923 if opts.get(b'print0'):
2923 if opts.get(b'print0'):
2924 end = b'\0'
2924 end = b'\0'
2925 fmt = b'%s' + end
2925 fmt = b'%s' + end
2926
2926
2927 m = scmutil.match(ctx, pats, opts)
2927 m = scmutil.match(ctx, pats, opts)
2928 ui.pager(b'files')
2928 ui.pager(b'files')
2929 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2929 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2930 with ui.formatter(b'files', opts) as fm:
2930 with ui.formatter(b'files', opts) as fm:
2931 return cmdutil.files(
2931 return cmdutil.files(
2932 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2932 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2933 )
2933 )
2934
2934
2935
2935
2936 @command(
2936 @command(
2937 b'forget',
2937 b'forget',
2938 [
2938 [
2939 (b'i', b'interactive', None, _(b'use interactive mode')),
2939 (b'i', b'interactive', None, _(b'use interactive mode')),
2940 ]
2940 ]
2941 + walkopts
2941 + walkopts
2942 + dryrunopts,
2942 + dryrunopts,
2943 _(b'[OPTION]... FILE...'),
2943 _(b'[OPTION]... FILE...'),
2944 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2944 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2945 helpbasic=True,
2945 helpbasic=True,
2946 inferrepo=True,
2946 inferrepo=True,
2947 )
2947 )
2948 def forget(ui, repo, *pats, **opts):
2948 def forget(ui, repo, *pats, **opts):
2949 """forget the specified files on the next commit
2949 """forget the specified files on the next commit
2950
2950
2951 Mark the specified files so they will no longer be tracked
2951 Mark the specified files so they will no longer be tracked
2952 after the next commit.
2952 after the next commit.
2953
2953
2954 This only removes files from the current branch, not from the
2954 This only removes files from the current branch, not from the
2955 entire project history, and it does not delete them from the
2955 entire project history, and it does not delete them from the
2956 working directory.
2956 working directory.
2957
2957
2958 To delete the file from the working directory, see :hg:`remove`.
2958 To delete the file from the working directory, see :hg:`remove`.
2959
2959
2960 To undo a forget before the next commit, see :hg:`add`.
2960 To undo a forget before the next commit, see :hg:`add`.
2961
2961
2962 .. container:: verbose
2962 .. container:: verbose
2963
2963
2964 Examples:
2964 Examples:
2965
2965
2966 - forget newly-added binary files::
2966 - forget newly-added binary files::
2967
2967
2968 hg forget "set:added() and binary()"
2968 hg forget "set:added() and binary()"
2969
2969
2970 - forget files that would be excluded by .hgignore::
2970 - forget files that would be excluded by .hgignore::
2971
2971
2972 hg forget "set:hgignore()"
2972 hg forget "set:hgignore()"
2973
2973
2974 Returns 0 on success.
2974 Returns 0 on success.
2975 """
2975 """
2976
2976
2977 opts = pycompat.byteskwargs(opts)
2977 opts = pycompat.byteskwargs(opts)
2978 if not pats:
2978 if not pats:
2979 raise error.InputError(_(b'no files specified'))
2979 raise error.InputError(_(b'no files specified'))
2980
2980
2981 with repo.wlock(), repo.dirstate.changing_files(repo):
2981 with repo.wlock(), repo.dirstate.changing_files(repo):
2982 m = scmutil.match(repo[None], pats, opts)
2982 m = scmutil.match(repo[None], pats, opts)
2983 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2983 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2984 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2984 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2985 rejected = cmdutil.forget(
2985 rejected = cmdutil.forget(
2986 ui,
2986 ui,
2987 repo,
2987 repo,
2988 m,
2988 m,
2989 prefix=b"",
2989 prefix=b"",
2990 uipathfn=uipathfn,
2990 uipathfn=uipathfn,
2991 explicitonly=False,
2991 explicitonly=False,
2992 dryrun=dryrun,
2992 dryrun=dryrun,
2993 interactive=interactive,
2993 interactive=interactive,
2994 )[0]
2994 )[0]
2995 return rejected and 1 or 0
2995 return rejected and 1 or 0
2996
2996
2997
2997
2998 @command(
2998 @command(
2999 b'graft',
2999 b'graft',
3000 [
3000 [
3001 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
3001 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
3002 (
3002 (
3003 b'',
3003 b'',
3004 b'base',
3004 b'base',
3005 b'',
3005 b'',
3006 _(b'base revision when doing the graft merge (ADVANCED)'),
3006 _(b'base revision when doing the graft merge (ADVANCED)'),
3007 _(b'REV'),
3007 _(b'REV'),
3008 ),
3008 ),
3009 (b'c', b'continue', False, _(b'resume interrupted graft')),
3009 (b'c', b'continue', False, _(b'resume interrupted graft')),
3010 (b'', b'stop', False, _(b'stop interrupted graft')),
3010 (b'', b'stop', False, _(b'stop interrupted graft')),
3011 (b'', b'abort', False, _(b'abort interrupted graft')),
3011 (b'', b'abort', False, _(b'abort interrupted graft')),
3012 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3012 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3013 (b'', b'log', None, _(b'append graft info to log message')),
3013 (b'', b'log', None, _(b'append graft info to log message')),
3014 (
3014 (
3015 b'',
3015 b'',
3016 b'no-commit',
3016 b'no-commit',
3017 None,
3017 None,
3018 _(b"don't commit, just apply the changes in working directory"),
3018 _(b"don't commit, just apply the changes in working directory"),
3019 ),
3019 ),
3020 (b'f', b'force', False, _(b'force graft')),
3020 (b'f', b'force', False, _(b'force graft')),
3021 (
3021 (
3022 b'D',
3022 b'D',
3023 b'currentdate',
3023 b'currentdate',
3024 False,
3024 False,
3025 _(b'record the current date as commit date'),
3025 _(b'record the current date as commit date'),
3026 ),
3026 ),
3027 (
3027 (
3028 b'U',
3028 b'U',
3029 b'currentuser',
3029 b'currentuser',
3030 False,
3030 False,
3031 _(b'record the current user as committer'),
3031 _(b'record the current user as committer'),
3032 ),
3032 ),
3033 ]
3033 ]
3034 + commitopts2
3034 + commitopts2
3035 + mergetoolopts
3035 + mergetoolopts
3036 + dryrunopts,
3036 + dryrunopts,
3037 _(b'[OPTION]... [-r REV]... REV...'),
3037 _(b'[OPTION]... [-r REV]... REV...'),
3038 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3038 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
3039 )
3039 )
3040 def graft(ui, repo, *revs, **opts):
3040 def graft(ui, repo, *revs, **opts):
3041 """copy changes from other branches onto the current branch
3041 """copy changes from other branches onto the current branch
3042
3042
3043 This command uses Mercurial's merge logic to copy individual
3043 This command uses Mercurial's merge logic to copy individual
3044 changes from other branches without merging branches in the
3044 changes from other branches without merging branches in the
3045 history graph. This is sometimes known as 'backporting' or
3045 history graph. This is sometimes known as 'backporting' or
3046 'cherry-picking'. By default, graft will copy user, date, and
3046 'cherry-picking'. By default, graft will copy user, date, and
3047 description from the source changesets.
3047 description from the source changesets.
3048
3048
3049 Changesets that are ancestors of the current revision, that have
3049 Changesets that are ancestors of the current revision, that have
3050 already been grafted, or that are merges will be skipped.
3050 already been grafted, or that are merges will be skipped.
3051
3051
3052 If --log is specified, log messages will have a comment appended
3052 If --log is specified, log messages will have a comment appended
3053 of the form::
3053 of the form::
3054
3054
3055 (grafted from CHANGESETHASH)
3055 (grafted from CHANGESETHASH)
3056
3056
3057 If --force is specified, revisions will be grafted even if they
3057 If --force is specified, revisions will be grafted even if they
3058 are already ancestors of, or have been grafted to, the destination.
3058 are already ancestors of, or have been grafted to, the destination.
3059 This is useful when the revisions have since been backed out.
3059 This is useful when the revisions have since been backed out.
3060
3060
3061 If a graft merge results in conflicts, the graft process is
3061 If a graft merge results in conflicts, the graft process is
3062 interrupted so that the current merge can be manually resolved.
3062 interrupted so that the current merge can be manually resolved.
3063 Once all conflicts are addressed, the graft process can be
3063 Once all conflicts are addressed, the graft process can be
3064 continued with the -c/--continue option.
3064 continued with the -c/--continue option.
3065
3065
3066 The -c/--continue option reapplies all the earlier options.
3066 The -c/--continue option reapplies all the earlier options.
3067
3067
3068 .. container:: verbose
3068 .. container:: verbose
3069
3069
3070 The --base option exposes more of how graft internally uses merge with a
3070 The --base option exposes more of how graft internally uses merge with a
3071 custom base revision. --base can be used to specify another ancestor than
3071 custom base revision. --base can be used to specify another ancestor than
3072 the first and only parent.
3072 the first and only parent.
3073
3073
3074 The command::
3074 The command::
3075
3075
3076 hg graft -r 345 --base 234
3076 hg graft -r 345 --base 234
3077
3077
3078 is thus pretty much the same as::
3078 is thus pretty much the same as::
3079
3079
3080 hg diff --from 234 --to 345 | hg import
3080 hg diff --from 234 --to 345 | hg import
3081
3081
3082 but using merge to resolve conflicts and track moved files.
3082 but using merge to resolve conflicts and track moved files.
3083
3083
3084 The result of a merge can thus be backported as a single commit by
3084 The result of a merge can thus be backported as a single commit by
3085 specifying one of the merge parents as base, and thus effectively
3085 specifying one of the merge parents as base, and thus effectively
3086 grafting the changes from the other side.
3086 grafting the changes from the other side.
3087
3087
3088 It is also possible to collapse multiple changesets and clean up history
3088 It is also possible to collapse multiple changesets and clean up history
3089 by specifying another ancestor as base, much like rebase --collapse
3089 by specifying another ancestor as base, much like rebase --collapse
3090 --keep.
3090 --keep.
3091
3091
3092 The commit message can be tweaked after the fact using commit --amend .
3092 The commit message can be tweaked after the fact using commit --amend .
3093
3093
3094 For using non-ancestors as the base to backout changes, see the backout
3094 For using non-ancestors as the base to backout changes, see the backout
3095 command and the hidden --parent option.
3095 command and the hidden --parent option.
3096
3096
3097 .. container:: verbose
3097 .. container:: verbose
3098
3098
3099 Examples:
3099 Examples:
3100
3100
3101 - copy a single change to the stable branch and edit its description::
3101 - copy a single change to the stable branch and edit its description::
3102
3102
3103 hg update stable
3103 hg update stable
3104 hg graft --edit 9393
3104 hg graft --edit 9393
3105
3105
3106 - graft a range of changesets with one exception, updating dates::
3106 - graft a range of changesets with one exception, updating dates::
3107
3107
3108 hg graft -D "2085::2093 and not 2091"
3108 hg graft -D "2085::2093 and not 2091"
3109
3109
3110 - continue a graft after resolving conflicts::
3110 - continue a graft after resolving conflicts::
3111
3111
3112 hg graft -c
3112 hg graft -c
3113
3113
3114 - show the source of a grafted changeset::
3114 - show the source of a grafted changeset::
3115
3115
3116 hg log --debug -r .
3116 hg log --debug -r .
3117
3117
3118 - show revisions sorted by date::
3118 - show revisions sorted by date::
3119
3119
3120 hg log -r "sort(all(), date)"
3120 hg log -r "sort(all(), date)"
3121
3121
3122 - backport the result of a merge as a single commit::
3122 - backport the result of a merge as a single commit::
3123
3123
3124 hg graft -r 123 --base 123^
3124 hg graft -r 123 --base 123^
3125
3125
3126 - land a feature branch as one changeset::
3126 - land a feature branch as one changeset::
3127
3127
3128 hg up -cr default
3128 hg up -cr default
3129 hg graft -r featureX --base "ancestor('featureX', 'default')"
3129 hg graft -r featureX --base "ancestor('featureX', 'default')"
3130
3130
3131 See :hg:`help revisions` for more about specifying revisions.
3131 See :hg:`help revisions` for more about specifying revisions.
3132
3132
3133 Returns 0 on successful completion, 1 if there are unresolved files.
3133 Returns 0 on successful completion, 1 if there are unresolved files.
3134 """
3134 """
3135 with repo.wlock():
3135 with repo.wlock():
3136 return _dograft(ui, repo, *revs, **opts)
3136 return _dograft(ui, repo, *revs, **opts)
3137
3137
3138
3138
3139 def _dograft(ui, repo, *revs, **opts):
3139 def _dograft(ui, repo, *revs, **opts):
3140 if revs and opts.get('rev'):
3140 if revs and opts.get('rev'):
3141 ui.warn(
3141 ui.warn(
3142 _(
3142 _(
3143 b'warning: inconsistent use of --rev might give unexpected '
3143 b'warning: inconsistent use of --rev might give unexpected '
3144 b'revision ordering!\n'
3144 b'revision ordering!\n'
3145 )
3145 )
3146 )
3146 )
3147
3147
3148 revs = list(revs)
3148 revs = list(revs)
3149 revs.extend(opts.get('rev'))
3149 revs.extend(opts.get('rev'))
3150 # a dict of data to be stored in state file
3150 # a dict of data to be stored in state file
3151 statedata = {}
3151 statedata = {}
3152 # list of new nodes created by ongoing graft
3152 # list of new nodes created by ongoing graft
3153 statedata[b'newnodes'] = []
3153 statedata[b'newnodes'] = []
3154
3154
3155 cmdutil.resolve_commit_options(ui, opts)
3155 cmdutil.resolve_commit_options(ui, opts)
3156
3156
3157 editor = cmdutil.getcommiteditor(editform=b'graft', **opts)
3157 editor = cmdutil.getcommiteditor(editform=b'graft', **opts)
3158
3158
3159 cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
3159 cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
3160
3160
3161 cont = False
3161 cont = False
3162 if opts.get('no_commit'):
3162 if opts.get('no_commit'):
3163 cmdutil.check_incompatible_arguments(
3163 cmdutil.check_incompatible_arguments(
3164 opts,
3164 opts,
3165 'no_commit',
3165 'no_commit',
3166 ['edit', 'currentuser', 'currentdate', 'log'],
3166 ['edit', 'currentuser', 'currentdate', 'log'],
3167 )
3167 )
3168
3168
3169 graftstate = statemod.cmdstate(repo, b'graftstate')
3169 graftstate = statemod.cmdstate(repo, b'graftstate')
3170
3170
3171 if opts.get('stop'):
3171 if opts.get('stop'):
3172 cmdutil.check_incompatible_arguments(
3172 cmdutil.check_incompatible_arguments(
3173 opts,
3173 opts,
3174 'stop',
3174 'stop',
3175 [
3175 [
3176 'edit',
3176 'edit',
3177 'log',
3177 'log',
3178 'user',
3178 'user',
3179 'date',
3179 'date',
3180 'currentdate',
3180 'currentdate',
3181 'currentuser',
3181 'currentuser',
3182 'rev',
3182 'rev',
3183 ],
3183 ],
3184 )
3184 )
3185 return _stopgraft(ui, repo, graftstate)
3185 return _stopgraft(ui, repo, graftstate)
3186 elif opts.get('abort'):
3186 elif opts.get('abort'):
3187 cmdutil.check_incompatible_arguments(
3187 cmdutil.check_incompatible_arguments(
3188 opts,
3188 opts,
3189 'abort',
3189 'abort',
3190 [
3190 [
3191 'edit',
3191 'edit',
3192 'log',
3192 'log',
3193 'user',
3193 'user',
3194 'date',
3194 'date',
3195 'currentdate',
3195 'currentdate',
3196 'currentuser',
3196 'currentuser',
3197 'rev',
3197 'rev',
3198 ],
3198 ],
3199 )
3199 )
3200 return cmdutil.abortgraft(ui, repo, graftstate)
3200 return cmdutil.abortgraft(ui, repo, graftstate)
3201 elif opts.get('continue'):
3201 elif opts.get('continue'):
3202 cont = True
3202 cont = True
3203 if revs:
3203 if revs:
3204 raise error.InputError(_(b"can't specify --continue and revisions"))
3204 raise error.InputError(_(b"can't specify --continue and revisions"))
3205 # read in unfinished revisions
3205 # read in unfinished revisions
3206 if graftstate.exists():
3206 if graftstate.exists():
3207 statedata = cmdutil.readgraftstate(repo, graftstate)
3207 statedata = cmdutil.readgraftstate(repo, graftstate)
3208 if statedata.get(b'date'):
3208 if statedata.get(b'date'):
3209 opts['date'] = statedata[b'date']
3209 opts['date'] = statedata[b'date']
3210 if statedata.get(b'user'):
3210 if statedata.get(b'user'):
3211 opts['user'] = statedata[b'user']
3211 opts['user'] = statedata[b'user']
3212 if statedata.get(b'log'):
3212 if statedata.get(b'log'):
3213 opts['log'] = True
3213 opts['log'] = True
3214 if statedata.get(b'no_commit'):
3214 if statedata.get(b'no_commit'):
3215 opts['no_commit'] = statedata.get(b'no_commit')
3215 opts['no_commit'] = statedata.get(b'no_commit')
3216 if statedata.get(b'base'):
3216 if statedata.get(b'base'):
3217 opts['base'] = statedata.get(b'base')
3217 opts['base'] = statedata.get(b'base')
3218 nodes = statedata[b'nodes']
3218 nodes = statedata[b'nodes']
3219 revs = [repo[node].rev() for node in nodes]
3219 revs = [repo[node].rev() for node in nodes]
3220 else:
3220 else:
3221 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3221 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3222 else:
3222 else:
3223 if not revs:
3223 if not revs:
3224 raise error.InputError(_(b'no revisions specified'))
3224 raise error.InputError(_(b'no revisions specified'))
3225 cmdutil.checkunfinished(repo)
3225 cmdutil.checkunfinished(repo)
3226 cmdutil.bailifchanged(repo)
3226 cmdutil.bailifchanged(repo)
3227 revs = logcmdutil.revrange(repo, revs)
3227 revs = logcmdutil.revrange(repo, revs)
3228
3228
3229 skipped = set()
3229 skipped = set()
3230 basectx = None
3230 basectx = None
3231 if opts.get('base'):
3231 if opts.get('base'):
3232 basectx = logcmdutil.revsingle(repo, opts['base'], None)
3232 basectx = logcmdutil.revsingle(repo, opts['base'], None)
3233 if basectx is None:
3233 if basectx is None:
3234 # check for merges
3234 # check for merges
3235 for rev in repo.revs(b'%ld and merge()', revs):
3235 for rev in repo.revs(b'%ld and merge()', revs):
3236 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3236 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3237 skipped.add(rev)
3237 skipped.add(rev)
3238 revs = [r for r in revs if r not in skipped]
3238 revs = [r for r in revs if r not in skipped]
3239 if not revs:
3239 if not revs:
3240 return -1
3240 return -1
3241 if basectx is not None and len(revs) != 1:
3241 if basectx is not None and len(revs) != 1:
3242 raise error.InputError(_(b'only one revision allowed with --base '))
3242 raise error.InputError(_(b'only one revision allowed with --base '))
3243
3243
3244 # Don't check in the --continue case, in effect retaining --force across
3244 # Don't check in the --continue case, in effect retaining --force across
3245 # --continues. That's because without --force, any revisions we decided to
3245 # --continues. That's because without --force, any revisions we decided to
3246 # skip would have been filtered out here, so they wouldn't have made their
3246 # skip would have been filtered out here, so they wouldn't have made their
3247 # way to the graftstate. With --force, any revisions we would have otherwise
3247 # way to the graftstate. With --force, any revisions we would have otherwise
3248 # skipped would not have been filtered out, and if they hadn't been applied
3248 # skipped would not have been filtered out, and if they hadn't been applied
3249 # already, they'd have been in the graftstate.
3249 # already, they'd have been in the graftstate.
3250 if not (cont or opts.get('force')) and basectx is None:
3250 if not (cont or opts.get('force')) and basectx is None:
3251 # check for ancestors of dest branch
3251 # check for ancestors of dest branch
3252 ancestors = repo.revs(b'%ld & (::.)', revs)
3252 ancestors = repo.revs(b'%ld & (::.)', revs)
3253 for rev in ancestors:
3253 for rev in ancestors:
3254 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3254 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3255
3255
3256 revs = [r for r in revs if r not in ancestors]
3256 revs = [r for r in revs if r not in ancestors]
3257
3257
3258 if not revs:
3258 if not revs:
3259 return -1
3259 return -1
3260
3260
3261 # analyze revs for earlier grafts
3261 # analyze revs for earlier grafts
3262 ids = {}
3262 ids = {}
3263 for ctx in repo.set(b"%ld", revs):
3263 for ctx in repo.set(b"%ld", revs):
3264 ids[ctx.hex()] = ctx.rev()
3264 ids[ctx.hex()] = ctx.rev()
3265 n = ctx.extra().get(b'source')
3265 n = ctx.extra().get(b'source')
3266 if n:
3266 if n:
3267 ids[n] = ctx.rev()
3267 ids[n] = ctx.rev()
3268
3268
3269 # check ancestors for earlier grafts
3269 # check ancestors for earlier grafts
3270 ui.debug(b'scanning for duplicate grafts\n')
3270 ui.debug(b'scanning for duplicate grafts\n')
3271
3271
3272 # The only changesets we can be sure doesn't contain grafts of any
3272 # The only changesets we can be sure doesn't contain grafts of any
3273 # revs, are the ones that are common ancestors of *all* revs:
3273 # revs, are the ones that are common ancestors of *all* revs:
3274 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3274 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3275 ctx = repo[rev]
3275 ctx = repo[rev]
3276 n = ctx.extra().get(b'source')
3276 n = ctx.extra().get(b'source')
3277 if n in ids:
3277 if n in ids:
3278 try:
3278 try:
3279 r = repo[n].rev()
3279 r = repo[n].rev()
3280 except error.RepoLookupError:
3280 except error.RepoLookupError:
3281 r = None
3281 r = None
3282 if r in revs:
3282 if r in revs:
3283 ui.warn(
3283 ui.warn(
3284 _(
3284 _(
3285 b'skipping revision %d:%s '
3285 b'skipping revision %d:%s '
3286 b'(already grafted to %d:%s)\n'
3286 b'(already grafted to %d:%s)\n'
3287 )
3287 )
3288 % (r, repo[r], rev, ctx)
3288 % (r, repo[r], rev, ctx)
3289 )
3289 )
3290 revs.remove(r)
3290 revs.remove(r)
3291 elif ids[n] in revs:
3291 elif ids[n] in revs:
3292 if r is None:
3292 if r is None:
3293 ui.warn(
3293 ui.warn(
3294 _(
3294 _(
3295 b'skipping already grafted revision %d:%s '
3295 b'skipping already grafted revision %d:%s '
3296 b'(%d:%s also has unknown origin %s)\n'
3296 b'(%d:%s also has unknown origin %s)\n'
3297 )
3297 )
3298 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3298 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3299 )
3299 )
3300 else:
3300 else:
3301 ui.warn(
3301 ui.warn(
3302 _(
3302 _(
3303 b'skipping already grafted revision %d:%s '
3303 b'skipping already grafted revision %d:%s '
3304 b'(%d:%s also has origin %d:%s)\n'
3304 b'(%d:%s also has origin %d:%s)\n'
3305 )
3305 )
3306 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3306 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3307 )
3307 )
3308 revs.remove(ids[n])
3308 revs.remove(ids[n])
3309 elif ctx.hex() in ids:
3309 elif ctx.hex() in ids:
3310 r = ids[ctx.hex()]
3310 r = ids[ctx.hex()]
3311 if r in revs:
3311 if r in revs:
3312 ui.warn(
3312 ui.warn(
3313 _(
3313 _(
3314 b'skipping already grafted revision %d:%s '
3314 b'skipping already grafted revision %d:%s '
3315 b'(was grafted from %d:%s)\n'
3315 b'(was grafted from %d:%s)\n'
3316 )
3316 )
3317 % (r, repo[r], rev, ctx)
3317 % (r, repo[r], rev, ctx)
3318 )
3318 )
3319 revs.remove(r)
3319 revs.remove(r)
3320 if not revs:
3320 if not revs:
3321 return -1
3321 return -1
3322
3322
3323 if opts.get('no_commit'):
3323 if opts.get('no_commit'):
3324 statedata[b'no_commit'] = True
3324 statedata[b'no_commit'] = True
3325 if opts.get('base'):
3325 if opts.get('base'):
3326 statedata[b'base'] = opts['base']
3326 statedata[b'base'] = opts['base']
3327 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3327 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3328 desc = b'%d:%s "%s"' % (
3328 desc = b'%d:%s "%s"' % (
3329 ctx.rev(),
3329 ctx.rev(),
3330 ctx,
3330 ctx,
3331 ctx.description().split(b'\n', 1)[0],
3331 ctx.description().split(b'\n', 1)[0],
3332 )
3332 )
3333 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3333 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3334 if names:
3334 if names:
3335 desc += b' (%s)' % b' '.join(names)
3335 desc += b' (%s)' % b' '.join(names)
3336 ui.status(_(b'grafting %s\n') % desc)
3336 ui.status(_(b'grafting %s\n') % desc)
3337 if opts.get('dry_run'):
3337 if opts.get('dry_run'):
3338 continue
3338 continue
3339
3339
3340 source = ctx.extra().get(b'source')
3340 source = ctx.extra().get(b'source')
3341 extra = {}
3341 extra = {}
3342 if source:
3342 if source:
3343 extra[b'source'] = source
3343 extra[b'source'] = source
3344 extra[b'intermediate-source'] = ctx.hex()
3344 extra[b'intermediate-source'] = ctx.hex()
3345 else:
3345 else:
3346 extra[b'source'] = ctx.hex()
3346 extra[b'source'] = ctx.hex()
3347 user = ctx.user()
3347 user = ctx.user()
3348 if opts.get('user'):
3348 if opts.get('user'):
3349 user = opts['user']
3349 user = opts['user']
3350 statedata[b'user'] = user
3350 statedata[b'user'] = user
3351 date = ctx.date()
3351 date = ctx.date()
3352 if opts.get('date'):
3352 if opts.get('date'):
3353 date = opts['date']
3353 date = opts['date']
3354 statedata[b'date'] = date
3354 statedata[b'date'] = date
3355 message = ctx.description()
3355 message = ctx.description()
3356 if opts.get('log'):
3356 if opts.get('log'):
3357 message += b'\n(grafted from %s)' % ctx.hex()
3357 message += b'\n(grafted from %s)' % ctx.hex()
3358 statedata[b'log'] = True
3358 statedata[b'log'] = True
3359
3359
3360 # we don't merge the first commit when continuing
3360 # we don't merge the first commit when continuing
3361 if not cont:
3361 if not cont:
3362 # perform the graft merge with p1(rev) as 'ancestor'
3362 # perform the graft merge with p1(rev) as 'ancestor'
3363 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
3363 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
3364 base = ctx.p1() if basectx is None else basectx
3364 base = ctx.p1() if basectx is None else basectx
3365 with ui.configoverride(overrides, b'graft'):
3365 with ui.configoverride(overrides, b'graft'):
3366 stats = mergemod.graft(
3366 stats = mergemod.graft(
3367 repo, ctx, base, [b'local', b'graft', b'parent of graft']
3367 repo, ctx, base, [b'local', b'graft', b'parent of graft']
3368 )
3368 )
3369 # report any conflicts
3369 # report any conflicts
3370 if stats.unresolvedcount > 0:
3370 if stats.unresolvedcount > 0:
3371 # write out state for --continue
3371 # write out state for --continue
3372 nodes = [repo[rev].hex() for rev in revs[pos:]]
3372 nodes = [repo[rev].hex() for rev in revs[pos:]]
3373 statedata[b'nodes'] = nodes
3373 statedata[b'nodes'] = nodes
3374 stateversion = 1
3374 stateversion = 1
3375 graftstate.save(stateversion, statedata)
3375 graftstate.save(stateversion, statedata)
3376 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3376 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3377 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3377 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3378 return 1
3378 return 1
3379 else:
3379 else:
3380 cont = False
3380 cont = False
3381
3381
3382 # commit if --no-commit is false
3382 # commit if --no-commit is false
3383 if not opts.get('no_commit'):
3383 if not opts.get('no_commit'):
3384 node = repo.commit(
3384 node = repo.commit(
3385 text=message, user=user, date=date, extra=extra, editor=editor
3385 text=message, user=user, date=date, extra=extra, editor=editor
3386 )
3386 )
3387 if node is None:
3387 if node is None:
3388 ui.warn(
3388 ui.warn(
3389 _(b'note: graft of %d:%s created no changes to commit\n')
3389 _(b'note: graft of %d:%s created no changes to commit\n')
3390 % (ctx.rev(), ctx)
3390 % (ctx.rev(), ctx)
3391 )
3391 )
3392 # checking that newnodes exist because old state files won't have it
3392 # checking that newnodes exist because old state files won't have it
3393 elif statedata.get(b'newnodes') is not None:
3393 elif statedata.get(b'newnodes') is not None:
3394 nn = statedata[b'newnodes']
3394 nn = statedata[b'newnodes']
3395 assert isinstance(nn, list) # list of bytes
3395 assert isinstance(nn, list) # list of bytes
3396 nn.append(node)
3396 nn.append(node)
3397
3397
3398 # remove state when we complete successfully
3398 # remove state when we complete successfully
3399 if not opts.get('dry_run'):
3399 if not opts.get('dry_run'):
3400 graftstate.delete()
3400 graftstate.delete()
3401
3401
3402 return 0
3402 return 0
3403
3403
3404
3404
3405 def _stopgraft(ui, repo, graftstate):
3405 def _stopgraft(ui, repo, graftstate):
3406 """stop the interrupted graft"""
3406 """stop the interrupted graft"""
3407 if not graftstate.exists():
3407 if not graftstate.exists():
3408 raise error.StateError(_(b"no interrupted graft found"))
3408 raise error.StateError(_(b"no interrupted graft found"))
3409 pctx = repo[b'.']
3409 pctx = repo[b'.']
3410 mergemod.clean_update(pctx)
3410 mergemod.clean_update(pctx)
3411 graftstate.delete()
3411 graftstate.delete()
3412 ui.status(_(b"stopped the interrupted graft\n"))
3412 ui.status(_(b"stopped the interrupted graft\n"))
3413 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3413 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3414 return 0
3414 return 0
3415
3415
3416
3416
3417 statemod.addunfinished(
3417 statemod.addunfinished(
3418 b'graft',
3418 b'graft',
3419 fname=b'graftstate',
3419 fname=b'graftstate',
3420 clearable=True,
3420 clearable=True,
3421 stopflag=True,
3421 stopflag=True,
3422 continueflag=True,
3422 continueflag=True,
3423 abortfunc=cmdutil.hgabortgraft,
3423 abortfunc=cmdutil.hgabortgraft,
3424 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3424 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3425 )
3425 )
3426
3426
3427
3427
3428 @command(
3428 @command(
3429 b'grep',
3429 b'grep',
3430 [
3430 [
3431 (b'0', b'print0', None, _(b'end fields with NUL')),
3431 (b'0', b'print0', None, _(b'end fields with NUL')),
3432 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3432 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3433 (
3433 (
3434 b'',
3434 b'',
3435 b'diff',
3435 b'diff',
3436 None,
3436 None,
3437 _(
3437 _(
3438 b'search revision differences for when the pattern was added '
3438 b'search revision differences for when the pattern was added '
3439 b'or removed'
3439 b'or removed'
3440 ),
3440 ),
3441 ),
3441 ),
3442 (b'a', b'text', None, _(b'treat all files as text')),
3442 (b'a', b'text', None, _(b'treat all files as text')),
3443 (
3443 (
3444 b'f',
3444 b'f',
3445 b'follow',
3445 b'follow',
3446 None,
3446 None,
3447 _(
3447 _(
3448 b'follow changeset history,'
3448 b'follow changeset history,'
3449 b' or file history across copies and renames'
3449 b' or file history across copies and renames'
3450 ),
3450 ),
3451 ),
3451 ),
3452 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3452 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3453 (
3453 (
3454 b'l',
3454 b'l',
3455 b'files-with-matches',
3455 b'files-with-matches',
3456 None,
3456 None,
3457 _(b'print only filenames and revisions that match'),
3457 _(b'print only filenames and revisions that match'),
3458 ),
3458 ),
3459 (b'n', b'line-number', None, _(b'print matching line numbers')),
3459 (b'n', b'line-number', None, _(b'print matching line numbers')),
3460 (
3460 (
3461 b'r',
3461 b'r',
3462 b'rev',
3462 b'rev',
3463 [],
3463 [],
3464 _(b'search files changed within revision range'),
3464 _(b'search files changed within revision range'),
3465 _(b'REV'),
3465 _(b'REV'),
3466 ),
3466 ),
3467 (
3467 (
3468 b'',
3468 b'',
3469 b'all-files',
3469 b'all-files',
3470 None,
3470 None,
3471 _(
3471 _(
3472 b'include all files in the changeset while grepping (DEPRECATED)'
3472 b'include all files in the changeset while grepping (DEPRECATED)'
3473 ),
3473 ),
3474 ),
3474 ),
3475 (b'u', b'user', None, _(b'list the author (long with -v)')),
3475 (b'u', b'user', None, _(b'list the author (long with -v)')),
3476 (b'd', b'date', None, _(b'list the date (short with -q)')),
3476 (b'd', b'date', None, _(b'list the date (short with -q)')),
3477 ]
3477 ]
3478 + formatteropts
3478 + formatteropts
3479 + walkopts,
3479 + walkopts,
3480 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3480 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3481 helpcategory=command.CATEGORY_FILE_CONTENTS,
3481 helpcategory=command.CATEGORY_FILE_CONTENTS,
3482 inferrepo=True,
3482 inferrepo=True,
3483 intents={INTENT_READONLY},
3483 intents={INTENT_READONLY},
3484 )
3484 )
3485 def grep(ui, repo, pattern, *pats, **opts):
3485 def grep(ui, repo, pattern, *pats, **opts):
3486 """search for a pattern in specified files
3486 """search for a pattern in specified files
3487
3487
3488 Search the working directory or revision history for a regular
3488 Search the working directory or revision history for a regular
3489 expression in the specified files for the entire repository.
3489 expression in the specified files for the entire repository.
3490
3490
3491 By default, grep searches the repository files in the working
3491 By default, grep searches the repository files in the working
3492 directory and prints the files where it finds a match. To specify
3492 directory and prints the files where it finds a match. To specify
3493 historical revisions instead of the working directory, use the
3493 historical revisions instead of the working directory, use the
3494 --rev flag.
3494 --rev flag.
3495
3495
3496 To search instead historical revision differences that contains a
3496 To search instead historical revision differences that contains a
3497 change in match status ("-" for a match that becomes a non-match,
3497 change in match status ("-" for a match that becomes a non-match,
3498 or "+" for a non-match that becomes a match), use the --diff flag.
3498 or "+" for a non-match that becomes a match), use the --diff flag.
3499
3499
3500 PATTERN can be any Python (roughly Perl-compatible) regular
3500 PATTERN can be any Python (roughly Perl-compatible) regular
3501 expression.
3501 expression.
3502
3502
3503 If no FILEs are specified and the --rev flag isn't supplied, all
3503 If no FILEs are specified and the --rev flag isn't supplied, all
3504 files in the working directory are searched. When using the --rev
3504 files in the working directory are searched. When using the --rev
3505 flag and specifying FILEs, use the --follow argument to also
3505 flag and specifying FILEs, use the --follow argument to also
3506 follow the specified FILEs across renames and copies.
3506 follow the specified FILEs across renames and copies.
3507
3507
3508 .. container:: verbose
3508 .. container:: verbose
3509
3509
3510 Template:
3510 Template:
3511
3511
3512 The following keywords are supported in addition to the common template
3512 The following keywords are supported in addition to the common template
3513 keywords and functions. See also :hg:`help templates`.
3513 keywords and functions. See also :hg:`help templates`.
3514
3514
3515 :change: String. Character denoting insertion ``+`` or removal ``-``.
3515 :change: String. Character denoting insertion ``+`` or removal ``-``.
3516 Available if ``--diff`` is specified.
3516 Available if ``--diff`` is specified.
3517 :lineno: Integer. Line number of the match.
3517 :lineno: Integer. Line number of the match.
3518 :path: String. Repository-absolute path of the file.
3518 :path: String. Repository-absolute path of the file.
3519 :texts: List of text chunks.
3519 :texts: List of text chunks.
3520
3520
3521 And each entry of ``{texts}`` provides the following sub-keywords.
3521 And each entry of ``{texts}`` provides the following sub-keywords.
3522
3522
3523 :matched: Boolean. True if the chunk matches the specified pattern.
3523 :matched: Boolean. True if the chunk matches the specified pattern.
3524 :text: String. Chunk content.
3524 :text: String. Chunk content.
3525
3525
3526 See :hg:`help templates.operators` for the list expansion syntax.
3526 See :hg:`help templates.operators` for the list expansion syntax.
3527
3527
3528 Returns 0 if a match is found, 1 otherwise.
3528 Returns 0 if a match is found, 1 otherwise.
3529
3529
3530 """
3530 """
3531 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3531 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3532 opts = pycompat.byteskwargs(opts)
3532 opts = pycompat.byteskwargs(opts)
3533 diff = opts.get(b'all') or opts.get(b'diff')
3533 diff = opts.get(b'all') or opts.get(b'diff')
3534 follow = opts.get(b'follow')
3534 follow = opts.get(b'follow')
3535 if opts.get(b'all_files') is None and not diff:
3535 if opts.get(b'all_files') is None and not diff:
3536 opts[b'all_files'] = True
3536 opts[b'all_files'] = True
3537 plaingrep = (
3537 plaingrep = (
3538 opts.get(b'all_files')
3538 opts.get(b'all_files')
3539 and not opts.get(b'rev')
3539 and not opts.get(b'rev')
3540 and not opts.get(b'follow')
3540 and not opts.get(b'follow')
3541 )
3541 )
3542 all_files = opts.get(b'all_files')
3542 all_files = opts.get(b'all_files')
3543 if plaingrep:
3543 if plaingrep:
3544 opts[b'rev'] = [b'wdir()']
3544 opts[b'rev'] = [b'wdir()']
3545
3545
3546 reflags = re.M
3546 reflags = re.M
3547 if opts.get(b'ignore_case'):
3547 if opts.get(b'ignore_case'):
3548 reflags |= re.I
3548 reflags |= re.I
3549 try:
3549 try:
3550 regexp = util.re.compile(pattern, reflags)
3550 regexp = util.re.compile(pattern, reflags)
3551 except re.error as inst:
3551 except re.error as inst:
3552 ui.warn(
3552 ui.warn(
3553 _(b"grep: invalid match pattern: %s\n")
3553 _(b"grep: invalid match pattern: %s\n")
3554 % stringutil.forcebytestr(inst)
3554 % stringutil.forcebytestr(inst)
3555 )
3555 )
3556 return 1
3556 return 1
3557 sep, eol = b':', b'\n'
3557 sep, eol = b':', b'\n'
3558 if opts.get(b'print0'):
3558 if opts.get(b'print0'):
3559 sep = eol = b'\0'
3559 sep = eol = b'\0'
3560
3560
3561 searcher = grepmod.grepsearcher(
3561 searcher = grepmod.grepsearcher(
3562 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3562 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3563 )
3563 )
3564
3564
3565 getfile = searcher._getfile
3565 getfile = searcher._getfile
3566
3566
3567 uipathfn = scmutil.getuipathfn(repo)
3567 uipathfn = scmutil.getuipathfn(repo)
3568
3568
3569 def display(fm, fn, ctx, pstates, states):
3569 def display(fm, fn, ctx, pstates, states):
3570 rev = scmutil.intrev(ctx)
3570 rev = scmutil.intrev(ctx)
3571 if fm.isplain():
3571 if fm.isplain():
3572 formatuser = ui.shortuser
3572 formatuser = ui.shortuser
3573 else:
3573 else:
3574 formatuser = pycompat.bytestr
3574 formatuser = pycompat.bytestr
3575 if ui.quiet:
3575 if ui.quiet:
3576 datefmt = b'%Y-%m-%d'
3576 datefmt = b'%Y-%m-%d'
3577 else:
3577 else:
3578 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3578 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3579 found = False
3579 found = False
3580
3580
3581 @util.cachefunc
3581 @util.cachefunc
3582 def binary():
3582 def binary():
3583 flog = getfile(fn)
3583 flog = getfile(fn)
3584 try:
3584 try:
3585 return stringutil.binary(flog.read(ctx.filenode(fn)))
3585 return stringutil.binary(flog.read(ctx.filenode(fn)))
3586 except error.WdirUnsupported:
3586 except error.WdirUnsupported:
3587 return ctx[fn].isbinary()
3587 return ctx[fn].isbinary()
3588
3588
3589 fieldnamemap = {b'linenumber': b'lineno'}
3589 fieldnamemap = {b'linenumber': b'lineno'}
3590 if diff:
3590 if diff:
3591 iter = grepmod.difflinestates(pstates, states)
3591 iter = grepmod.difflinestates(pstates, states)
3592 else:
3592 else:
3593 iter = [(b'', l) for l in states]
3593 iter = [(b'', l) for l in states]
3594 for change, l in iter:
3594 for change, l in iter:
3595 fm.startitem()
3595 fm.startitem()
3596 fm.context(ctx=ctx)
3596 fm.context(ctx=ctx)
3597 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3597 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3598 fm.plain(uipathfn(fn), label=b'grep.filename')
3598 fm.plain(uipathfn(fn), label=b'grep.filename')
3599
3599
3600 cols = [
3600 cols = [
3601 (b'rev', b'%d', rev, not plaingrep, b''),
3601 (b'rev', b'%d', rev, not plaingrep, b''),
3602 (
3602 (
3603 b'linenumber',
3603 b'linenumber',
3604 b'%d',
3604 b'%d',
3605 l.linenum,
3605 l.linenum,
3606 opts.get(b'line_number'),
3606 opts.get(b'line_number'),
3607 b'',
3607 b'',
3608 ),
3608 ),
3609 ]
3609 ]
3610 if diff:
3610 if diff:
3611 cols.append(
3611 cols.append(
3612 (
3612 (
3613 b'change',
3613 b'change',
3614 b'%s',
3614 b'%s',
3615 change,
3615 change,
3616 True,
3616 True,
3617 b'grep.inserted '
3617 b'grep.inserted '
3618 if change == b'+'
3618 if change == b'+'
3619 else b'grep.deleted ',
3619 else b'grep.deleted ',
3620 )
3620 )
3621 )
3621 )
3622 cols.extend(
3622 cols.extend(
3623 [
3623 [
3624 (
3624 (
3625 b'user',
3625 b'user',
3626 b'%s',
3626 b'%s',
3627 formatuser(ctx.user()),
3627 formatuser(ctx.user()),
3628 opts.get(b'user'),
3628 opts.get(b'user'),
3629 b'',
3629 b'',
3630 ),
3630 ),
3631 (
3631 (
3632 b'date',
3632 b'date',
3633 b'%s',
3633 b'%s',
3634 fm.formatdate(ctx.date(), datefmt),
3634 fm.formatdate(ctx.date(), datefmt),
3635 opts.get(b'date'),
3635 opts.get(b'date'),
3636 b'',
3636 b'',
3637 ),
3637 ),
3638 ]
3638 ]
3639 )
3639 )
3640 for name, fmt, data, cond, extra_label in cols:
3640 for name, fmt, data, cond, extra_label in cols:
3641 if cond:
3641 if cond:
3642 fm.plain(sep, label=b'grep.sep')
3642 fm.plain(sep, label=b'grep.sep')
3643 field = fieldnamemap.get(name, name)
3643 field = fieldnamemap.get(name, name)
3644 label = extra_label + (b'grep.%s' % name)
3644 label = extra_label + (b'grep.%s' % name)
3645 fm.condwrite(cond, field, fmt, data, label=label)
3645 fm.condwrite(cond, field, fmt, data, label=label)
3646 if not opts.get(b'files_with_matches'):
3646 if not opts.get(b'files_with_matches'):
3647 fm.plain(sep, label=b'grep.sep')
3647 fm.plain(sep, label=b'grep.sep')
3648 if not opts.get(b'text') and binary():
3648 if not opts.get(b'text') and binary():
3649 fm.plain(_(b" Binary file matches"))
3649 fm.plain(_(b" Binary file matches"))
3650 else:
3650 else:
3651 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3651 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3652 fm.plain(eol)
3652 fm.plain(eol)
3653 found = True
3653 found = True
3654 if opts.get(b'files_with_matches'):
3654 if opts.get(b'files_with_matches'):
3655 break
3655 break
3656 return found
3656 return found
3657
3657
3658 def displaymatches(fm, l):
3658 def displaymatches(fm, l):
3659 p = 0
3659 p = 0
3660 for s, e in l.findpos(regexp):
3660 for s, e in l.findpos(regexp):
3661 if p < s:
3661 if p < s:
3662 fm.startitem()
3662 fm.startitem()
3663 fm.write(b'text', b'%s', l.line[p:s])
3663 fm.write(b'text', b'%s', l.line[p:s])
3664 fm.data(matched=False)
3664 fm.data(matched=False)
3665 fm.startitem()
3665 fm.startitem()
3666 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3666 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3667 fm.data(matched=True)
3667 fm.data(matched=True)
3668 p = e
3668 p = e
3669 if p < len(l.line):
3669 if p < len(l.line):
3670 fm.startitem()
3670 fm.startitem()
3671 fm.write(b'text', b'%s', l.line[p:])
3671 fm.write(b'text', b'%s', l.line[p:])
3672 fm.data(matched=False)
3672 fm.data(matched=False)
3673 fm.end()
3673 fm.end()
3674
3674
3675 found = False
3675 found = False
3676
3676
3677 wopts = logcmdutil.walkopts(
3677 wopts = logcmdutil.walkopts(
3678 pats=pats,
3678 pats=pats,
3679 opts=opts,
3679 opts=opts,
3680 revspec=opts[b'rev'],
3680 revspec=opts[b'rev'],
3681 include_pats=opts[b'include'],
3681 include_pats=opts[b'include'],
3682 exclude_pats=opts[b'exclude'],
3682 exclude_pats=opts[b'exclude'],
3683 follow=follow,
3683 follow=follow,
3684 force_changelog_traversal=all_files,
3684 force_changelog_traversal=all_files,
3685 filter_revisions_by_pats=not all_files,
3685 filter_revisions_by_pats=not all_files,
3686 )
3686 )
3687 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3687 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3688
3688
3689 ui.pager(b'grep')
3689 ui.pager(b'grep')
3690 fm = ui.formatter(b'grep', opts)
3690 fm = ui.formatter(b'grep', opts)
3691 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3691 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3692 r = display(fm, fn, ctx, pstates, states)
3692 r = display(fm, fn, ctx, pstates, states)
3693 found = found or r
3693 found = found or r
3694 if r and not diff and not all_files:
3694 if r and not diff and not all_files:
3695 searcher.skipfile(fn, ctx.rev())
3695 searcher.skipfile(fn, ctx.rev())
3696 fm.end()
3696 fm.end()
3697
3697
3698 return not found
3698 return not found
3699
3699
3700
3700
3701 @command(
3701 @command(
3702 b'heads',
3702 b'heads',
3703 [
3703 [
3704 (
3704 (
3705 b'r',
3705 b'r',
3706 b'rev',
3706 b'rev',
3707 b'',
3707 b'',
3708 _(b'show only heads which are descendants of STARTREV'),
3708 _(b'show only heads which are descendants of STARTREV'),
3709 _(b'STARTREV'),
3709 _(b'STARTREV'),
3710 ),
3710 ),
3711 (b't', b'topo', False, _(b'show topological heads only')),
3711 (b't', b'topo', False, _(b'show topological heads only')),
3712 (
3712 (
3713 b'a',
3713 b'a',
3714 b'active',
3714 b'active',
3715 False,
3715 False,
3716 _(b'show active branchheads only (DEPRECATED)'),
3716 _(b'show active branchheads only (DEPRECATED)'),
3717 ),
3717 ),
3718 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3718 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3719 ]
3719 ]
3720 + templateopts,
3720 + templateopts,
3721 _(b'[-ct] [-r STARTREV] [REV]...'),
3721 _(b'[-ct] [-r STARTREV] [REV]...'),
3722 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3722 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3723 intents={INTENT_READONLY},
3723 intents={INTENT_READONLY},
3724 )
3724 )
3725 def heads(ui, repo, *branchrevs, **opts):
3725 def heads(ui, repo, *branchrevs, **opts):
3726 """show branch heads
3726 """show branch heads
3727
3727
3728 With no arguments, show all open branch heads in the repository.
3728 With no arguments, show all open branch heads in the repository.
3729 Branch heads are changesets that have no descendants on the
3729 Branch heads are changesets that have no descendants on the
3730 same branch. They are where development generally takes place and
3730 same branch. They are where development generally takes place and
3731 are the usual targets for update and merge operations.
3731 are the usual targets for update and merge operations.
3732
3732
3733 If one or more REVs are given, only open branch heads on the
3733 If one or more REVs are given, only open branch heads on the
3734 branches associated with the specified changesets are shown. This
3734 branches associated with the specified changesets are shown. This
3735 means that you can use :hg:`heads .` to see the heads on the
3735 means that you can use :hg:`heads .` to see the heads on the
3736 currently checked-out branch.
3736 currently checked-out branch.
3737
3737
3738 If -c/--closed is specified, also show branch heads marked closed
3738 If -c/--closed is specified, also show branch heads marked closed
3739 (see :hg:`commit --close-branch`).
3739 (see :hg:`commit --close-branch`).
3740
3740
3741 If STARTREV is specified, only those heads that are descendants of
3741 If STARTREV is specified, only those heads that are descendants of
3742 STARTREV will be displayed.
3742 STARTREV will be displayed.
3743
3743
3744 If -t/--topo is specified, named branch mechanics will be ignored and only
3744 If -t/--topo is specified, named branch mechanics will be ignored and only
3745 topological heads (changesets with no children) will be shown.
3745 topological heads (changesets with no children) will be shown.
3746
3746
3747 Returns 0 if matching heads are found, 1 if not.
3747 Returns 0 if matching heads are found, 1 if not.
3748 """
3748 """
3749
3749
3750 opts = pycompat.byteskwargs(opts)
3750 opts = pycompat.byteskwargs(opts)
3751 start = None
3751 start = None
3752 rev = opts.get(b'rev')
3752 rev = opts.get(b'rev')
3753 if rev:
3753 if rev:
3754 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3754 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3755 start = logcmdutil.revsingle(repo, rev, None).node()
3755 start = logcmdutil.revsingle(repo, rev, None).node()
3756
3756
3757 if opts.get(b'topo'):
3757 if opts.get(b'topo'):
3758 heads = [repo[h] for h in repo.heads(start)]
3758 heads = [repo[h] for h in repo.heads(start)]
3759 else:
3759 else:
3760 heads = []
3760 heads = []
3761 for branch in repo.branchmap():
3761 for branch in repo.branchmap():
3762 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3762 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3763 heads = [repo[h] for h in heads]
3763 heads = [repo[h] for h in heads]
3764
3764
3765 if branchrevs:
3765 if branchrevs:
3766 branches = {
3766 branches = {
3767 repo[r].branch() for r in logcmdutil.revrange(repo, branchrevs)
3767 repo[r].branch() for r in logcmdutil.revrange(repo, branchrevs)
3768 }
3768 }
3769 heads = [h for h in heads if h.branch() in branches]
3769 heads = [h for h in heads if h.branch() in branches]
3770
3770
3771 if opts.get(b'active') and branchrevs:
3771 if opts.get(b'active') and branchrevs:
3772 dagheads = repo.heads(start)
3772 dagheads = repo.heads(start)
3773 heads = [h for h in heads if h.node() in dagheads]
3773 heads = [h for h in heads if h.node() in dagheads]
3774
3774
3775 if branchrevs:
3775 if branchrevs:
3776 haveheads = {h.branch() for h in heads}
3776 haveheads = {h.branch() for h in heads}
3777 if branches - haveheads:
3777 if branches - haveheads:
3778 headless = b', '.join(b for b in branches - haveheads)
3778 headless = b', '.join(b for b in branches - haveheads)
3779 msg = _(b'no open branch heads found on branches %s')
3779 msg = _(b'no open branch heads found on branches %s')
3780 if opts.get(b'rev'):
3780 if opts.get(b'rev'):
3781 msg += _(b' (started at %s)') % opts[b'rev']
3781 msg += _(b' (started at %s)') % opts[b'rev']
3782 ui.warn((msg + b'\n') % headless)
3782 ui.warn((msg + b'\n') % headless)
3783
3783
3784 if not heads:
3784 if not heads:
3785 return 1
3785 return 1
3786
3786
3787 ui.pager(b'heads')
3787 ui.pager(b'heads')
3788 heads = sorted(heads, key=lambda x: -(x.rev()))
3788 heads = sorted(heads, key=lambda x: -(x.rev()))
3789 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3789 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3790 for ctx in heads:
3790 for ctx in heads:
3791 displayer.show(ctx)
3791 displayer.show(ctx)
3792 displayer.close()
3792 displayer.close()
3793
3793
3794
3794
3795 @command(
3795 @command(
3796 b'help',
3796 b'help',
3797 [
3797 [
3798 (b'e', b'extension', None, _(b'show only help for extensions')),
3798 (b'e', b'extension', None, _(b'show only help for extensions')),
3799 (b'c', b'command', None, _(b'show only help for commands')),
3799 (b'c', b'command', None, _(b'show only help for commands')),
3800 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3800 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3801 (
3801 (
3802 b's',
3802 b's',
3803 b'system',
3803 b'system',
3804 [],
3804 [],
3805 _(b'show help for specific platform(s)'),
3805 _(b'show help for specific platform(s)'),
3806 _(b'PLATFORM'),
3806 _(b'PLATFORM'),
3807 ),
3807 ),
3808 ],
3808 ],
3809 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3809 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3810 helpcategory=command.CATEGORY_HELP,
3810 helpcategory=command.CATEGORY_HELP,
3811 norepo=True,
3811 norepo=True,
3812 intents={INTENT_READONLY},
3812 intents={INTENT_READONLY},
3813 )
3813 )
3814 def help_(ui, name=None, **opts):
3814 def help_(ui, name=None, **opts):
3815 """show help for a given topic or a help overview
3815 """show help for a given topic or a help overview
3816
3816
3817 With no arguments, print a list of commands with short help messages.
3817 With no arguments, print a list of commands with short help messages.
3818
3818
3819 Given a topic, extension, or command name, print help for that
3819 Given a topic, extension, or command name, print help for that
3820 topic.
3820 topic.
3821
3821
3822 Returns 0 if successful.
3822 Returns 0 if successful.
3823 """
3823 """
3824
3824
3825 keep = opts.get('system') or []
3825 keep = opts.get('system') or []
3826 if len(keep) == 0:
3826 if len(keep) == 0:
3827 if pycompat.sysplatform.startswith(b'win'):
3827 if pycompat.sysplatform.startswith(b'win'):
3828 keep.append(b'windows')
3828 keep.append(b'windows')
3829 elif pycompat.sysplatform == b'OpenVMS':
3829 elif pycompat.sysplatform == b'OpenVMS':
3830 keep.append(b'vms')
3830 keep.append(b'vms')
3831 elif pycompat.sysplatform == b'plan9':
3831 elif pycompat.sysplatform == b'plan9':
3832 keep.append(b'plan9')
3832 keep.append(b'plan9')
3833 else:
3833 else:
3834 keep.append(b'unix')
3834 keep.append(b'unix')
3835 keep.append(pycompat.sysplatform.lower())
3835 keep.append(pycompat.sysplatform.lower())
3836 if ui.verbose:
3836 if ui.verbose:
3837 keep.append(b'verbose')
3837 keep.append(b'verbose')
3838
3838
3839 commands = sys.modules[__name__]
3839 commands = sys.modules[__name__]
3840 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3840 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3841 ui.pager(b'help')
3841 ui.pager(b'help')
3842 ui.write(formatted)
3842 ui.write(formatted)
3843
3843
3844
3844
3845 @command(
3845 @command(
3846 b'identify|id',
3846 b'identify|id',
3847 [
3847 [
3848 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3848 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3849 (b'n', b'num', None, _(b'show local revision number')),
3849 (b'n', b'num', None, _(b'show local revision number')),
3850 (b'i', b'id', None, _(b'show global revision id')),
3850 (b'i', b'id', None, _(b'show global revision id')),
3851 (b'b', b'branch', None, _(b'show branch')),
3851 (b'b', b'branch', None, _(b'show branch')),
3852 (b't', b'tags', None, _(b'show tags')),
3852 (b't', b'tags', None, _(b'show tags')),
3853 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3853 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3854 ]
3854 ]
3855 + remoteopts
3855 + remoteopts
3856 + formatteropts,
3856 + formatteropts,
3857 _(b'[-nibtB] [-r REV] [SOURCE]'),
3857 _(b'[-nibtB] [-r REV] [SOURCE]'),
3858 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3858 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3859 optionalrepo=True,
3859 optionalrepo=True,
3860 intents={INTENT_READONLY},
3860 intents={INTENT_READONLY},
3861 )
3861 )
3862 def identify(
3862 def identify(
3863 ui,
3863 ui,
3864 repo,
3864 repo,
3865 source=None,
3865 source=None,
3866 rev=None,
3866 rev=None,
3867 num=None,
3867 num=None,
3868 id=None,
3868 id=None,
3869 branch=None,
3869 branch=None,
3870 tags=None,
3870 tags=None,
3871 bookmarks=None,
3871 bookmarks=None,
3872 **opts
3872 **opts
3873 ):
3873 ):
3874 """identify the working directory or specified revision
3874 """identify the working directory or specified revision
3875
3875
3876 Print a summary identifying the repository state at REV using one or
3876 Print a summary identifying the repository state at REV using one or
3877 two parent hash identifiers, followed by a "+" if the working
3877 two parent hash identifiers, followed by a "+" if the working
3878 directory has uncommitted changes, the branch name (if not default),
3878 directory has uncommitted changes, the branch name (if not default),
3879 a list of tags, and a list of bookmarks.
3879 a list of tags, and a list of bookmarks.
3880
3880
3881 When REV is not given, print a summary of the current state of the
3881 When REV is not given, print a summary of the current state of the
3882 repository including the working directory. Specify -r. to get information
3882 repository including the working directory. Specify -r. to get information
3883 of the working directory parent without scanning uncommitted changes.
3883 of the working directory parent without scanning uncommitted changes.
3884
3884
3885 Specifying a path to a repository root or Mercurial bundle will
3885 Specifying a path to a repository root or Mercurial bundle will
3886 cause lookup to operate on that repository/bundle.
3886 cause lookup to operate on that repository/bundle.
3887
3887
3888 .. container:: verbose
3888 .. container:: verbose
3889
3889
3890 Template:
3890 Template:
3891
3891
3892 The following keywords are supported in addition to the common template
3892 The following keywords are supported in addition to the common template
3893 keywords and functions. See also :hg:`help templates`.
3893 keywords and functions. See also :hg:`help templates`.
3894
3894
3895 :dirty: String. Character ``+`` denoting if the working directory has
3895 :dirty: String. Character ``+`` denoting if the working directory has
3896 uncommitted changes.
3896 uncommitted changes.
3897 :id: String. One or two nodes, optionally followed by ``+``.
3897 :id: String. One or two nodes, optionally followed by ``+``.
3898 :parents: List of strings. Parent nodes of the changeset.
3898 :parents: List of strings. Parent nodes of the changeset.
3899
3899
3900 Examples:
3900 Examples:
3901
3901
3902 - generate a build identifier for the working directory::
3902 - generate a build identifier for the working directory::
3903
3903
3904 hg id --id > build-id.dat
3904 hg id --id > build-id.dat
3905
3905
3906 - find the revision corresponding to a tag::
3906 - find the revision corresponding to a tag::
3907
3907
3908 hg id -n -r 1.3
3908 hg id -n -r 1.3
3909
3909
3910 - check the most recent revision of a remote repository::
3910 - check the most recent revision of a remote repository::
3911
3911
3912 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3912 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3913
3913
3914 See :hg:`log` for generating more information about specific revisions,
3914 See :hg:`log` for generating more information about specific revisions,
3915 including full hash identifiers.
3915 including full hash identifiers.
3916
3916
3917 Returns 0 if successful.
3917 Returns 0 if successful.
3918 """
3918 """
3919
3919
3920 opts = pycompat.byteskwargs(opts)
3920 opts = pycompat.byteskwargs(opts)
3921 if not repo and not source:
3921 if not repo and not source:
3922 raise error.InputError(
3922 raise error.InputError(
3923 _(b"there is no Mercurial repository here (.hg not found)")
3923 _(b"there is no Mercurial repository here (.hg not found)")
3924 )
3924 )
3925
3925
3926 default = not (num or id or branch or tags or bookmarks)
3926 default = not (num or id or branch or tags or bookmarks)
3927 output = []
3927 output = []
3928 revs = []
3928 revs = []
3929
3929
3930 peer = None
3930 peer = None
3931 try:
3931 try:
3932 if source:
3932 if source:
3933 path = urlutil.get_unique_pull_path_obj(b'identify', ui, source)
3933 path = urlutil.get_unique_pull_path_obj(b'identify', ui, source)
3934 # only pass ui when no repo
3934 # only pass ui when no repo
3935 peer = hg.peer(repo or ui, opts, path)
3935 peer = hg.peer(repo or ui, opts, path)
3936 repo = peer.local()
3936 repo = peer.local()
3937 branches = (path.branch, [])
3937 branches = (path.branch, [])
3938 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3938 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3939
3939
3940 fm = ui.formatter(b'identify', opts)
3940 fm = ui.formatter(b'identify', opts)
3941 fm.startitem()
3941 fm.startitem()
3942
3942
3943 if not repo:
3943 if not repo:
3944 if num or branch or tags:
3944 if num or branch or tags:
3945 raise error.InputError(
3945 raise error.InputError(
3946 _(b"can't query remote revision number, branch, or tags")
3946 _(b"can't query remote revision number, branch, or tags")
3947 )
3947 )
3948 if not rev and revs:
3948 if not rev and revs:
3949 rev = revs[0]
3949 rev = revs[0]
3950 if not rev:
3950 if not rev:
3951 rev = b"tip"
3951 rev = b"tip"
3952
3952
3953 remoterev = peer.lookup(rev)
3953 remoterev = peer.lookup(rev)
3954 hexrev = fm.hexfunc(remoterev)
3954 hexrev = fm.hexfunc(remoterev)
3955 if default or id:
3955 if default or id:
3956 output = [hexrev]
3956 output = [hexrev]
3957 fm.data(id=hexrev)
3957 fm.data(id=hexrev)
3958
3958
3959 @util.cachefunc
3959 @util.cachefunc
3960 def getbms():
3960 def getbms():
3961 bms = []
3961 bms = []
3962
3962
3963 if b'bookmarks' in peer.listkeys(b'namespaces'):
3963 if b'bookmarks' in peer.listkeys(b'namespaces'):
3964 hexremoterev = hex(remoterev)
3964 hexremoterev = hex(remoterev)
3965 bms = [
3965 bms = [
3966 bm
3966 bm
3967 for bm, bmr in peer.listkeys(b'bookmarks').items()
3967 for bm, bmr in peer.listkeys(b'bookmarks').items()
3968 if bmr == hexremoterev
3968 if bmr == hexremoterev
3969 ]
3969 ]
3970
3970
3971 return sorted(bms)
3971 return sorted(bms)
3972
3972
3973 if fm.isplain():
3973 if fm.isplain():
3974 if bookmarks:
3974 if bookmarks:
3975 output.extend(getbms())
3975 output.extend(getbms())
3976 elif default and not ui.quiet:
3976 elif default and not ui.quiet:
3977 # multiple bookmarks for a single parent separated by '/'
3977 # multiple bookmarks for a single parent separated by '/'
3978 bm = b'/'.join(getbms())
3978 bm = b'/'.join(getbms())
3979 if bm:
3979 if bm:
3980 output.append(bm)
3980 output.append(bm)
3981 else:
3981 else:
3982 fm.data(node=hex(remoterev))
3982 fm.data(node=hex(remoterev))
3983 if bookmarks or b'bookmarks' in fm.datahint():
3983 if bookmarks or b'bookmarks' in fm.datahint():
3984 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3984 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3985 else:
3985 else:
3986 if rev:
3986 if rev:
3987 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3987 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3988 ctx = logcmdutil.revsingle(repo, rev, None)
3988 ctx = logcmdutil.revsingle(repo, rev, None)
3989
3989
3990 if ctx.rev() is None:
3990 if ctx.rev() is None:
3991 ctx = repo[None]
3991 ctx = repo[None]
3992 parents = ctx.parents()
3992 parents = ctx.parents()
3993 taglist = []
3993 taglist = []
3994 for p in parents:
3994 for p in parents:
3995 taglist.extend(p.tags())
3995 taglist.extend(p.tags())
3996
3996
3997 dirty = b""
3997 dirty = b""
3998 if ctx.dirty(missing=True, merge=False, branch=False):
3998 if ctx.dirty(missing=True, merge=False, branch=False):
3999 dirty = b'+'
3999 dirty = b'+'
4000 fm.data(dirty=dirty)
4000 fm.data(dirty=dirty)
4001
4001
4002 hexoutput = [fm.hexfunc(p.node()) for p in parents]
4002 hexoutput = [fm.hexfunc(p.node()) for p in parents]
4003 if default or id:
4003 if default or id:
4004 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
4004 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
4005 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
4005 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
4006
4006
4007 if num:
4007 if num:
4008 numoutput = [b"%d" % p.rev() for p in parents]
4008 numoutput = [b"%d" % p.rev() for p in parents]
4009 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
4009 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
4010
4010
4011 fm.data(
4011 fm.data(
4012 parents=fm.formatlist(
4012 parents=fm.formatlist(
4013 [fm.hexfunc(p.node()) for p in parents], name=b'node'
4013 [fm.hexfunc(p.node()) for p in parents], name=b'node'
4014 )
4014 )
4015 )
4015 )
4016 else:
4016 else:
4017 hexoutput = fm.hexfunc(ctx.node())
4017 hexoutput = fm.hexfunc(ctx.node())
4018 if default or id:
4018 if default or id:
4019 output = [hexoutput]
4019 output = [hexoutput]
4020 fm.data(id=hexoutput)
4020 fm.data(id=hexoutput)
4021
4021
4022 if num:
4022 if num:
4023 output.append(pycompat.bytestr(ctx.rev()))
4023 output.append(pycompat.bytestr(ctx.rev()))
4024 taglist = ctx.tags()
4024 taglist = ctx.tags()
4025
4025
4026 if default and not ui.quiet:
4026 if default and not ui.quiet:
4027 b = ctx.branch()
4027 b = ctx.branch()
4028 if b != b'default':
4028 if b != b'default':
4029 output.append(b"(%s)" % b)
4029 output.append(b"(%s)" % b)
4030
4030
4031 # multiple tags for a single parent separated by '/'
4031 # multiple tags for a single parent separated by '/'
4032 t = b'/'.join(taglist)
4032 t = b'/'.join(taglist)
4033 if t:
4033 if t:
4034 output.append(t)
4034 output.append(t)
4035
4035
4036 # multiple bookmarks for a single parent separated by '/'
4036 # multiple bookmarks for a single parent separated by '/'
4037 bm = b'/'.join(ctx.bookmarks())
4037 bm = b'/'.join(ctx.bookmarks())
4038 if bm:
4038 if bm:
4039 output.append(bm)
4039 output.append(bm)
4040 else:
4040 else:
4041 if branch:
4041 if branch:
4042 output.append(ctx.branch())
4042 output.append(ctx.branch())
4043
4043
4044 if tags:
4044 if tags:
4045 output.extend(taglist)
4045 output.extend(taglist)
4046
4046
4047 if bookmarks:
4047 if bookmarks:
4048 output.extend(ctx.bookmarks())
4048 output.extend(ctx.bookmarks())
4049
4049
4050 fm.data(node=ctx.hex())
4050 fm.data(node=ctx.hex())
4051 fm.data(branch=ctx.branch())
4051 fm.data(branch=ctx.branch())
4052 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
4052 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
4053 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
4053 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
4054 fm.context(ctx=ctx)
4054 fm.context(ctx=ctx)
4055
4055
4056 fm.plain(b"%s\n" % b' '.join(output))
4056 fm.plain(b"%s\n" % b' '.join(output))
4057 fm.end()
4057 fm.end()
4058 finally:
4058 finally:
4059 if peer:
4059 if peer:
4060 peer.close()
4060 peer.close()
4061
4061
4062
4062
4063 @command(
4063 @command(
4064 b'import|patch',
4064 b'import|patch',
4065 [
4065 [
4066 (
4066 (
4067 b'p',
4067 b'p',
4068 b'strip',
4068 b'strip',
4069 1,
4069 1,
4070 _(
4070 _(
4071 b'directory strip option for patch. This has the same '
4071 b'directory strip option for patch. This has the same '
4072 b'meaning as the corresponding patch option'
4072 b'meaning as the corresponding patch option'
4073 ),
4073 ),
4074 _(b'NUM'),
4074 _(b'NUM'),
4075 ),
4075 ),
4076 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
4076 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
4077 (b'', b'secret', None, _(b'use the secret phase for committing')),
4077 (b'', b'secret', None, _(b'use the secret phase for committing')),
4078 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
4078 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
4079 (
4079 (
4080 b'f',
4080 b'f',
4081 b'force',
4081 b'force',
4082 None,
4082 None,
4083 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
4083 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
4084 ),
4084 ),
4085 (
4085 (
4086 b'',
4086 b'',
4087 b'no-commit',
4087 b'no-commit',
4088 None,
4088 None,
4089 _(b"don't commit, just update the working directory"),
4089 _(b"don't commit, just update the working directory"),
4090 ),
4090 ),
4091 (
4091 (
4092 b'',
4092 b'',
4093 b'bypass',
4093 b'bypass',
4094 None,
4094 None,
4095 _(b"apply patch without touching the working directory"),
4095 _(b"apply patch without touching the working directory"),
4096 ),
4096 ),
4097 (b'', b'partial', None, _(b'commit even if some hunks fail')),
4097 (b'', b'partial', None, _(b'commit even if some hunks fail')),
4098 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
4098 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
4099 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
4099 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
4100 (
4100 (
4101 b'',
4101 b'',
4102 b'import-branch',
4102 b'import-branch',
4103 None,
4103 None,
4104 _(b'use any branch information in patch (implied by --exact)'),
4104 _(b'use any branch information in patch (implied by --exact)'),
4105 ),
4105 ),
4106 ]
4106 ]
4107 + commitopts
4107 + commitopts
4108 + commitopts2
4108 + commitopts2
4109 + similarityopts,
4109 + similarityopts,
4110 _(b'[OPTION]... PATCH...'),
4110 _(b'[OPTION]... PATCH...'),
4111 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4111 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4112 )
4112 )
4113 def import_(ui, repo, patch1=None, *patches, **opts):
4113 def import_(ui, repo, patch1=None, *patches, **opts):
4114 """import an ordered set of patches
4114 """import an ordered set of patches
4115
4115
4116 Import a list of patches and commit them individually (unless
4116 Import a list of patches and commit them individually (unless
4117 --no-commit is specified).
4117 --no-commit is specified).
4118
4118
4119 To read a patch from standard input (stdin), use "-" as the patch
4119 To read a patch from standard input (stdin), use "-" as the patch
4120 name. If a URL is specified, the patch will be downloaded from
4120 name. If a URL is specified, the patch will be downloaded from
4121 there.
4121 there.
4122
4122
4123 Import first applies changes to the working directory (unless
4123 Import first applies changes to the working directory (unless
4124 --bypass is specified), import will abort if there are outstanding
4124 --bypass is specified), import will abort if there are outstanding
4125 changes.
4125 changes.
4126
4126
4127 Use --bypass to apply and commit patches directly to the
4127 Use --bypass to apply and commit patches directly to the
4128 repository, without affecting the working directory. Without
4128 repository, without affecting the working directory. Without
4129 --exact, patches will be applied on top of the working directory
4129 --exact, patches will be applied on top of the working directory
4130 parent revision.
4130 parent revision.
4131
4131
4132 You can import a patch straight from a mail message. Even patches
4132 You can import a patch straight from a mail message. Even patches
4133 as attachments work (to use the body part, it must have type
4133 as attachments work (to use the body part, it must have type
4134 text/plain or text/x-patch). From and Subject headers of email
4134 text/plain or text/x-patch). From and Subject headers of email
4135 message are used as default committer and commit message. All
4135 message are used as default committer and commit message. All
4136 text/plain body parts before first diff are added to the commit
4136 text/plain body parts before first diff are added to the commit
4137 message.
4137 message.
4138
4138
4139 If the imported patch was generated by :hg:`export`, user and
4139 If the imported patch was generated by :hg:`export`, user and
4140 description from patch override values from message headers and
4140 description from patch override values from message headers and
4141 body. Values given on command line with -m/--message and -u/--user
4141 body. Values given on command line with -m/--message and -u/--user
4142 override these.
4142 override these.
4143
4143
4144 If --exact is specified, import will set the working directory to
4144 If --exact is specified, import will set the working directory to
4145 the parent of each patch before applying it, and will abort if the
4145 the parent of each patch before applying it, and will abort if the
4146 resulting changeset has a different ID than the one recorded in
4146 resulting changeset has a different ID than the one recorded in
4147 the patch. This will guard against various ways that portable
4147 the patch. This will guard against various ways that portable
4148 patch formats and mail systems might fail to transfer Mercurial
4148 patch formats and mail systems might fail to transfer Mercurial
4149 data or metadata. See :hg:`bundle` for lossless transmission.
4149 data or metadata. See :hg:`bundle` for lossless transmission.
4150
4150
4151 Use --partial to ensure a changeset will be created from the patch
4151 Use --partial to ensure a changeset will be created from the patch
4152 even if some hunks fail to apply. Hunks that fail to apply will be
4152 even if some hunks fail to apply. Hunks that fail to apply will be
4153 written to a <target-file>.rej file. Conflicts can then be resolved
4153 written to a <target-file>.rej file. Conflicts can then be resolved
4154 by hand before :hg:`commit --amend` is run to update the created
4154 by hand before :hg:`commit --amend` is run to update the created
4155 changeset. This flag exists to let people import patches that
4155 changeset. This flag exists to let people import patches that
4156 partially apply without losing the associated metadata (author,
4156 partially apply without losing the associated metadata (author,
4157 date, description, ...).
4157 date, description, ...).
4158
4158
4159 .. note::
4159 .. note::
4160
4160
4161 When no hunks apply cleanly, :hg:`import --partial` will create
4161 When no hunks apply cleanly, :hg:`import --partial` will create
4162 an empty changeset, importing only the patch metadata.
4162 an empty changeset, importing only the patch metadata.
4163
4163
4164 With -s/--similarity, hg will attempt to discover renames and
4164 With -s/--similarity, hg will attempt to discover renames and
4165 copies in the patch in the same way as :hg:`addremove`.
4165 copies in the patch in the same way as :hg:`addremove`.
4166
4166
4167 It is possible to use external patch programs to perform the patch
4167 It is possible to use external patch programs to perform the patch
4168 by setting the ``ui.patch`` configuration option. For the default
4168 by setting the ``ui.patch`` configuration option. For the default
4169 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4169 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4170 See :hg:`help config` for more information about configuration
4170 See :hg:`help config` for more information about configuration
4171 files and how to use these options.
4171 files and how to use these options.
4172
4172
4173 See :hg:`help dates` for a list of formats valid for -d/--date.
4173 See :hg:`help dates` for a list of formats valid for -d/--date.
4174
4174
4175 .. container:: verbose
4175 .. container:: verbose
4176
4176
4177 Examples:
4177 Examples:
4178
4178
4179 - import a traditional patch from a website and detect renames::
4179 - import a traditional patch from a website and detect renames::
4180
4180
4181 hg import -s 80 http://example.com/bugfix.patch
4181 hg import -s 80 http://example.com/bugfix.patch
4182
4182
4183 - import a changeset from an hgweb server::
4183 - import a changeset from an hgweb server::
4184
4184
4185 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4185 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4186
4186
4187 - import all the patches in an Unix-style mbox::
4187 - import all the patches in an Unix-style mbox::
4188
4188
4189 hg import incoming-patches.mbox
4189 hg import incoming-patches.mbox
4190
4190
4191 - import patches from stdin::
4191 - import patches from stdin::
4192
4192
4193 hg import -
4193 hg import -
4194
4194
4195 - attempt to exactly restore an exported changeset (not always
4195 - attempt to exactly restore an exported changeset (not always
4196 possible)::
4196 possible)::
4197
4197
4198 hg import --exact proposed-fix.patch
4198 hg import --exact proposed-fix.patch
4199
4199
4200 - use an external tool to apply a patch which is too fuzzy for
4200 - use an external tool to apply a patch which is too fuzzy for
4201 the default internal tool.
4201 the default internal tool.
4202
4202
4203 hg import --config ui.patch="patch --merge" fuzzy.patch
4203 hg import --config ui.patch="patch --merge" fuzzy.patch
4204
4204
4205 - change the default fuzzing from 2 to a less strict 7
4205 - change the default fuzzing from 2 to a less strict 7
4206
4206
4207 hg import --config ui.fuzz=7 fuzz.patch
4207 hg import --config ui.fuzz=7 fuzz.patch
4208
4208
4209 Returns 0 on success, 1 on partial success (see --partial).
4209 Returns 0 on success, 1 on partial success (see --partial).
4210 """
4210 """
4211
4211
4212 cmdutil.check_incompatible_arguments(
4212 cmdutil.check_incompatible_arguments(
4213 opts, 'no_commit', ['bypass', 'secret']
4213 opts, 'no_commit', ['bypass', 'secret']
4214 )
4214 )
4215 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4215 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4216 opts = pycompat.byteskwargs(opts)
4216 opts = pycompat.byteskwargs(opts)
4217 if not patch1:
4217 if not patch1:
4218 raise error.InputError(_(b'need at least one patch to import'))
4218 raise error.InputError(_(b'need at least one patch to import'))
4219
4219
4220 patches = (patch1,) + patches
4220 patches = (patch1,) + patches
4221
4221
4222 date = opts.get(b'date')
4222 date = opts.get(b'date')
4223 if date:
4223 if date:
4224 opts[b'date'] = dateutil.parsedate(date)
4224 opts[b'date'] = dateutil.parsedate(date)
4225
4225
4226 exact = opts.get(b'exact')
4226 exact = opts.get(b'exact')
4227 update = not opts.get(b'bypass')
4227 update = not opts.get(b'bypass')
4228 try:
4228 try:
4229 sim = float(opts.get(b'similarity') or 0)
4229 sim = float(opts.get(b'similarity') or 0)
4230 except ValueError:
4230 except ValueError:
4231 raise error.InputError(_(b'similarity must be a number'))
4231 raise error.InputError(_(b'similarity must be a number'))
4232 if sim < 0 or sim > 100:
4232 if sim < 0 or sim > 100:
4233 raise error.InputError(_(b'similarity must be between 0 and 100'))
4233 raise error.InputError(_(b'similarity must be between 0 and 100'))
4234 if sim and not update:
4234 if sim and not update:
4235 raise error.InputError(_(b'cannot use --similarity with --bypass'))
4235 raise error.InputError(_(b'cannot use --similarity with --bypass'))
4236
4236
4237 base = opts[b"base"]
4237 base = opts[b"base"]
4238 msgs = []
4238 msgs = []
4239 ret = 0
4239 ret = 0
4240
4240
4241 with repo.wlock():
4241 with repo.wlock():
4242 if update:
4242 if update:
4243 cmdutil.checkunfinished(repo)
4243 cmdutil.checkunfinished(repo)
4244 if exact or not opts.get(b'force'):
4244 if exact or not opts.get(b'force'):
4245 cmdutil.bailifchanged(repo)
4245 cmdutil.bailifchanged(repo)
4246
4246
4247 if not opts.get(b'no_commit'):
4247 if not opts.get(b'no_commit'):
4248 lock = repo.lock
4248 lock = repo.lock
4249 tr = lambda: repo.transaction(b'import')
4249 tr = lambda: repo.transaction(b'import')
4250 else:
4250 else:
4251 lock = util.nullcontextmanager
4251 lock = util.nullcontextmanager
4252 tr = util.nullcontextmanager
4252 tr = util.nullcontextmanager
4253 with lock(), tr():
4253 with lock(), tr():
4254 parents = repo[None].parents()
4254 parents = repo[None].parents()
4255 for patchurl in patches:
4255 for patchurl in patches:
4256 if patchurl == b'-':
4256 if patchurl == b'-':
4257 ui.status(_(b'applying patch from stdin\n'))
4257 ui.status(_(b'applying patch from stdin\n'))
4258 patchfile = ui.fin
4258 patchfile = ui.fin
4259 patchurl = b'stdin' # for error message
4259 patchurl = b'stdin' # for error message
4260 else:
4260 else:
4261 patchurl = os.path.join(base, patchurl)
4261 patchurl = os.path.join(base, patchurl)
4262 ui.status(_(b'applying %s\n') % patchurl)
4262 ui.status(_(b'applying %s\n') % patchurl)
4263 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4263 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4264
4264
4265 haspatch = False
4265 haspatch = False
4266 for hunk in patch.split(patchfile):
4266 for hunk in patch.split(patchfile):
4267 with patch.extract(ui, hunk) as patchdata:
4267 with patch.extract(ui, hunk) as patchdata:
4268 msg, node, rej = cmdutil.tryimportone(
4268 msg, node, rej = cmdutil.tryimportone(
4269 ui, repo, patchdata, parents, opts, msgs, hg.clean
4269 ui, repo, patchdata, parents, opts, msgs, hg.clean
4270 )
4270 )
4271 if msg:
4271 if msg:
4272 haspatch = True
4272 haspatch = True
4273 ui.note(msg + b'\n')
4273 ui.note(msg + b'\n')
4274 if update or exact:
4274 if update or exact:
4275 parents = repo[None].parents()
4275 parents = repo[None].parents()
4276 else:
4276 else:
4277 parents = [repo[node]]
4277 parents = [repo[node]]
4278 if rej:
4278 if rej:
4279 ui.write_err(_(b"patch applied partially\n"))
4279 ui.write_err(_(b"patch applied partially\n"))
4280 ui.write_err(
4280 ui.write_err(
4281 _(
4281 _(
4282 b"(fix the .rej files and run "
4282 b"(fix the .rej files and run "
4283 b"`hg commit --amend`)\n"
4283 b"`hg commit --amend`)\n"
4284 )
4284 )
4285 )
4285 )
4286 ret = 1
4286 ret = 1
4287 break
4287 break
4288
4288
4289 if not haspatch:
4289 if not haspatch:
4290 raise error.InputError(_(b'%s: no diffs found') % patchurl)
4290 raise error.InputError(_(b'%s: no diffs found') % patchurl)
4291
4291
4292 if msgs:
4292 if msgs:
4293 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4293 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4294 return ret
4294 return ret
4295
4295
4296
4296
4297 @command(
4297 @command(
4298 b'incoming|in',
4298 b'incoming|in',
4299 [
4299 [
4300 (
4300 (
4301 b'f',
4301 b'f',
4302 b'force',
4302 b'force',
4303 None,
4303 None,
4304 _(b'run even if remote repository is unrelated'),
4304 _(b'run even if remote repository is unrelated'),
4305 ),
4305 ),
4306 (b'n', b'newest-first', None, _(b'show newest record first')),
4306 (b'n', b'newest-first', None, _(b'show newest record first')),
4307 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4307 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4308 (
4308 (
4309 b'r',
4309 b'r',
4310 b'rev',
4310 b'rev',
4311 [],
4311 [],
4312 _(b'a remote changeset intended to be added'),
4312 _(b'a remote changeset intended to be added'),
4313 _(b'REV'),
4313 _(b'REV'),
4314 ),
4314 ),
4315 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4315 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4316 (
4316 (
4317 b'b',
4317 b'b',
4318 b'branch',
4318 b'branch',
4319 [],
4319 [],
4320 _(b'a specific branch you would like to pull'),
4320 _(b'a specific branch you would like to pull'),
4321 _(b'BRANCH'),
4321 _(b'BRANCH'),
4322 ),
4322 ),
4323 ]
4323 ]
4324 + logopts
4324 + logopts
4325 + remoteopts
4325 + remoteopts
4326 + subrepoopts,
4326 + subrepoopts,
4327 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4327 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4328 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4328 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4329 )
4329 )
4330 def incoming(ui, repo, source=b"default", **opts):
4330 def incoming(ui, repo, source=b"default", **opts):
4331 """show new changesets found in source
4331 """show new changesets found in source
4332
4332
4333 Show new changesets found in the specified path/URL or the default
4333 Show new changesets found in the specified path/URL or the default
4334 pull location. These are the changesets that would have been pulled
4334 pull location. These are the changesets that would have been pulled
4335 by :hg:`pull` at the time you issued this command.
4335 by :hg:`pull` at the time you issued this command.
4336
4336
4337 See pull for valid source format details.
4337 See pull for valid source format details.
4338
4338
4339 .. container:: verbose
4339 .. container:: verbose
4340
4340
4341 With -B/--bookmarks, the result of bookmark comparison between
4341 With -B/--bookmarks, the result of bookmark comparison between
4342 local and remote repositories is displayed. With -v/--verbose,
4342 local and remote repositories is displayed. With -v/--verbose,
4343 status is also displayed for each bookmark like below::
4343 status is also displayed for each bookmark like below::
4344
4344
4345 BM1 01234567890a added
4345 BM1 01234567890a added
4346 BM2 1234567890ab advanced
4346 BM2 1234567890ab advanced
4347 BM3 234567890abc diverged
4347 BM3 234567890abc diverged
4348 BM4 34567890abcd changed
4348 BM4 34567890abcd changed
4349
4349
4350 The action taken locally when pulling depends on the
4350 The action taken locally when pulling depends on the
4351 status of each bookmark:
4351 status of each bookmark:
4352
4352
4353 :``added``: pull will create it
4353 :``added``: pull will create it
4354 :``advanced``: pull will update it
4354 :``advanced``: pull will update it
4355 :``diverged``: pull will create a divergent bookmark
4355 :``diverged``: pull will create a divergent bookmark
4356 :``changed``: result depends on remote changesets
4356 :``changed``: result depends on remote changesets
4357
4357
4358 From the point of view of pulling behavior, bookmark
4358 From the point of view of pulling behavior, bookmark
4359 existing only in the remote repository are treated as ``added``,
4359 existing only in the remote repository are treated as ``added``,
4360 even if it is in fact locally deleted.
4360 even if it is in fact locally deleted.
4361
4361
4362 .. container:: verbose
4362 .. container:: verbose
4363
4363
4364 For remote repository, using --bundle avoids downloading the
4364 For remote repository, using --bundle avoids downloading the
4365 changesets twice if the incoming is followed by a pull.
4365 changesets twice if the incoming is followed by a pull.
4366
4366
4367 Examples:
4367 Examples:
4368
4368
4369 - show incoming changes with patches and full description::
4369 - show incoming changes with patches and full description::
4370
4370
4371 hg incoming -vp
4371 hg incoming -vp
4372
4372
4373 - show incoming changes excluding merges, store a bundle::
4373 - show incoming changes excluding merges, store a bundle::
4374
4374
4375 hg in -vpM --bundle incoming.hg
4375 hg in -vpM --bundle incoming.hg
4376 hg pull incoming.hg
4376 hg pull incoming.hg
4377
4377
4378 - briefly list changes inside a bundle::
4378 - briefly list changes inside a bundle::
4379
4379
4380 hg in changes.hg -T "{desc|firstline}\\n"
4380 hg in changes.hg -T "{desc|firstline}\\n"
4381
4381
4382 Returns 0 if there are incoming changes, 1 otherwise.
4382 Returns 0 if there are incoming changes, 1 otherwise.
4383 """
4383 """
4384 opts = pycompat.byteskwargs(opts)
4384 opts = pycompat.byteskwargs(opts)
4385 if opts.get(b'graph'):
4385 if opts.get(b'graph'):
4386 logcmdutil.checkunsupportedgraphflags([], opts)
4386 logcmdutil.checkunsupportedgraphflags([], opts)
4387
4387
4388 def display(other, chlist, displayer):
4388 def display(other, chlist, displayer):
4389 revdag = logcmdutil.graphrevs(other, chlist, opts)
4389 revdag = logcmdutil.graphrevs(other, chlist, opts)
4390 logcmdutil.displaygraph(
4390 logcmdutil.displaygraph(
4391 ui, repo, revdag, displayer, graphmod.asciiedges
4391 ui, repo, revdag, displayer, graphmod.asciiedges
4392 )
4392 )
4393
4393
4394 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4394 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4395 return 0
4395 return 0
4396
4396
4397 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4397 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4398
4398
4399 if opts.get(b'bookmarks'):
4399 if opts.get(b'bookmarks'):
4400 srcs = urlutil.get_pull_paths(repo, ui, [source])
4400 srcs = urlutil.get_pull_paths(repo, ui, [source])
4401 for path in srcs:
4401 for path in srcs:
4402 # XXX the "branches" options are not used. Should it be used?
4402 # XXX the "branches" options are not used. Should it be used?
4403 other = hg.peer(repo, opts, path)
4403 other = hg.peer(repo, opts, path)
4404 try:
4404 try:
4405 if b'bookmarks' not in other.listkeys(b'namespaces'):
4405 if b'bookmarks' not in other.listkeys(b'namespaces'):
4406 ui.warn(_(b"remote doesn't support bookmarks\n"))
4406 ui.warn(_(b"remote doesn't support bookmarks\n"))
4407 return 0
4407 return 0
4408 ui.pager(b'incoming')
4408 ui.pager(b'incoming')
4409 ui.status(
4409 ui.status(
4410 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
4410 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
4411 )
4411 )
4412 return bookmarks.incoming(
4412 return bookmarks.incoming(
4413 ui, repo, other, mode=path.bookmarks_mode
4413 ui, repo, other, mode=path.bookmarks_mode
4414 )
4414 )
4415 finally:
4415 finally:
4416 other.close()
4416 other.close()
4417
4417
4418 return hg.incoming(ui, repo, source, opts)
4418 return hg.incoming(ui, repo, source, opts)
4419
4419
4420
4420
4421 @command(
4421 @command(
4422 b'init',
4422 b'init',
4423 remoteopts,
4423 remoteopts,
4424 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4424 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4425 helpcategory=command.CATEGORY_REPO_CREATION,
4425 helpcategory=command.CATEGORY_REPO_CREATION,
4426 helpbasic=True,
4426 helpbasic=True,
4427 norepo=True,
4427 norepo=True,
4428 )
4428 )
4429 def init(ui, dest=b".", **opts):
4429 def init(ui, dest=b".", **opts):
4430 """create a new repository in the given directory
4430 """create a new repository in the given directory
4431
4431
4432 Initialize a new repository in the given directory. If the given
4432 Initialize a new repository in the given directory. If the given
4433 directory does not exist, it will be created.
4433 directory does not exist, it will be created.
4434
4434
4435 If no directory is given, the current directory is used.
4435 If no directory is given, the current directory is used.
4436
4436
4437 It is possible to specify an ``ssh://`` URL as the destination.
4437 It is possible to specify an ``ssh://`` URL as the destination.
4438 See :hg:`help urls` for more information.
4438 See :hg:`help urls` for more information.
4439
4439
4440 Returns 0 on success.
4440 Returns 0 on success.
4441 """
4441 """
4442 opts = pycompat.byteskwargs(opts)
4442 opts = pycompat.byteskwargs(opts)
4443 path = urlutil.get_clone_path_obj(ui, dest)
4443 path = urlutil.get_clone_path_obj(ui, dest)
4444 peer = hg.peer(ui, opts, path, create=True)
4444 peer = hg.peer(ui, opts, path, create=True)
4445 peer.close()
4445 peer.close()
4446
4446
4447
4447
4448 @command(
4448 @command(
4449 b'locate',
4449 b'locate',
4450 [
4450 [
4451 (
4451 (
4452 b'r',
4452 b'r',
4453 b'rev',
4453 b'rev',
4454 b'',
4454 b'',
4455 _(b'search the repository as it is in REV'),
4455 _(b'search the repository as it is in REV'),
4456 _(b'REV'),
4456 _(b'REV'),
4457 ),
4457 ),
4458 (
4458 (
4459 b'0',
4459 b'0',
4460 b'print0',
4460 b'print0',
4461 None,
4461 None,
4462 _(b'end filenames with NUL, for use with xargs'),
4462 _(b'end filenames with NUL, for use with xargs'),
4463 ),
4463 ),
4464 (
4464 (
4465 b'f',
4465 b'f',
4466 b'fullpath',
4466 b'fullpath',
4467 None,
4467 None,
4468 _(b'print complete paths from the filesystem root'),
4468 _(b'print complete paths from the filesystem root'),
4469 ),
4469 ),
4470 ]
4470 ]
4471 + walkopts,
4471 + walkopts,
4472 _(b'[OPTION]... [PATTERN]...'),
4472 _(b'[OPTION]... [PATTERN]...'),
4473 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4473 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4474 )
4474 )
4475 def locate(ui, repo, *pats, **opts):
4475 def locate(ui, repo, *pats, **opts):
4476 """locate files matching specific patterns (DEPRECATED)
4476 """locate files matching specific patterns (DEPRECATED)
4477
4477
4478 Print files under Mercurial control in the working directory whose
4478 Print files under Mercurial control in the working directory whose
4479 names match the given patterns.
4479 names match the given patterns.
4480
4480
4481 By default, this command searches all directories in the working
4481 By default, this command searches all directories in the working
4482 directory. To search just the current directory and its
4482 directory. To search just the current directory and its
4483 subdirectories, use "--include .".
4483 subdirectories, use "--include .".
4484
4484
4485 If no patterns are given to match, this command prints the names
4485 If no patterns are given to match, this command prints the names
4486 of all files under Mercurial control in the working directory.
4486 of all files under Mercurial control in the working directory.
4487
4487
4488 If you want to feed the output of this command into the "xargs"
4488 If you want to feed the output of this command into the "xargs"
4489 command, use the -0 option to both this command and "xargs". This
4489 command, use the -0 option to both this command and "xargs". This
4490 will avoid the problem of "xargs" treating single filenames that
4490 will avoid the problem of "xargs" treating single filenames that
4491 contain whitespace as multiple filenames.
4491 contain whitespace as multiple filenames.
4492
4492
4493 See :hg:`help files` for a more versatile command.
4493 See :hg:`help files` for a more versatile command.
4494
4494
4495 Returns 0 if a match is found, 1 otherwise.
4495 Returns 0 if a match is found, 1 otherwise.
4496 """
4496 """
4497 opts = pycompat.byteskwargs(opts)
4497 opts = pycompat.byteskwargs(opts)
4498 if opts.get(b'print0'):
4498 if opts.get(b'print0'):
4499 end = b'\0'
4499 end = b'\0'
4500 else:
4500 else:
4501 end = b'\n'
4501 end = b'\n'
4502 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
4502 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
4503
4503
4504 ret = 1
4504 ret = 1
4505 m = scmutil.match(
4505 m = scmutil.match(
4506 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4506 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4507 )
4507 )
4508
4508
4509 ui.pager(b'locate')
4509 ui.pager(b'locate')
4510 if ctx.rev() is None:
4510 if ctx.rev() is None:
4511 # When run on the working copy, "locate" includes removed files, so
4511 # When run on the working copy, "locate" includes removed files, so
4512 # we get the list of files from the dirstate.
4512 # we get the list of files from the dirstate.
4513 filesgen = sorted(repo.dirstate.matches(m))
4513 filesgen = sorted(repo.dirstate.matches(m))
4514 else:
4514 else:
4515 filesgen = ctx.matches(m)
4515 filesgen = ctx.matches(m)
4516 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4516 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4517 for abs in filesgen:
4517 for abs in filesgen:
4518 if opts.get(b'fullpath'):
4518 if opts.get(b'fullpath'):
4519 ui.write(repo.wjoin(abs), end)
4519 ui.write(repo.wjoin(abs), end)
4520 else:
4520 else:
4521 ui.write(uipathfn(abs), end)
4521 ui.write(uipathfn(abs), end)
4522 ret = 0
4522 ret = 0
4523
4523
4524 return ret
4524 return ret
4525
4525
4526
4526
4527 @command(
4527 @command(
4528 b'log|history',
4528 b'log|history',
4529 [
4529 [
4530 (
4530 (
4531 b'f',
4531 b'f',
4532 b'follow',
4532 b'follow',
4533 None,
4533 None,
4534 _(
4534 _(
4535 b'follow changeset history, or file history across copies and renames'
4535 b'follow changeset history, or file history across copies and renames'
4536 ),
4536 ),
4537 ),
4537 ),
4538 (
4538 (
4539 b'',
4539 b'',
4540 b'follow-first',
4540 b'follow-first',
4541 None,
4541 None,
4542 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4542 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4543 ),
4543 ),
4544 (
4544 (
4545 b'd',
4545 b'd',
4546 b'date',
4546 b'date',
4547 b'',
4547 b'',
4548 _(b'show revisions matching date spec'),
4548 _(b'show revisions matching date spec'),
4549 _(b'DATE'),
4549 _(b'DATE'),
4550 ),
4550 ),
4551 (b'C', b'copies', None, _(b'show copied files')),
4551 (b'C', b'copies', None, _(b'show copied files')),
4552 (
4552 (
4553 b'k',
4553 b'k',
4554 b'keyword',
4554 b'keyword',
4555 [],
4555 [],
4556 _(b'do case-insensitive search for a given text'),
4556 _(b'do case-insensitive search for a given text'),
4557 _(b'TEXT'),
4557 _(b'TEXT'),
4558 ),
4558 ),
4559 (
4559 (
4560 b'r',
4560 b'r',
4561 b'rev',
4561 b'rev',
4562 [],
4562 [],
4563 _(b'revisions to select or follow from'),
4563 _(b'revisions to select or follow from'),
4564 _(b'REV'),
4564 _(b'REV'),
4565 ),
4565 ),
4566 (
4566 (
4567 b'L',
4567 b'L',
4568 b'line-range',
4568 b'line-range',
4569 [],
4569 [],
4570 _(b'follow line range of specified file (EXPERIMENTAL)'),
4570 _(b'follow line range of specified file (EXPERIMENTAL)'),
4571 _(b'FILE,RANGE'),
4571 _(b'FILE,RANGE'),
4572 ),
4572 ),
4573 (
4573 (
4574 b'',
4574 b'',
4575 b'removed',
4575 b'removed',
4576 None,
4576 None,
4577 _(b'include revisions where files were removed'),
4577 _(b'include revisions where files were removed'),
4578 ),
4578 ),
4579 (
4579 (
4580 b'm',
4580 b'm',
4581 b'only-merges',
4581 b'only-merges',
4582 None,
4582 None,
4583 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4583 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4584 ),
4584 ),
4585 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4585 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4586 (
4586 (
4587 b'',
4587 b'',
4588 b'only-branch',
4588 b'only-branch',
4589 [],
4589 [],
4590 _(
4590 _(
4591 b'show only changesets within the given named branch (DEPRECATED)'
4591 b'show only changesets within the given named branch (DEPRECATED)'
4592 ),
4592 ),
4593 _(b'BRANCH'),
4593 _(b'BRANCH'),
4594 ),
4594 ),
4595 (
4595 (
4596 b'b',
4596 b'b',
4597 b'branch',
4597 b'branch',
4598 [],
4598 [],
4599 _(b'show changesets within the given named branch'),
4599 _(b'show changesets within the given named branch'),
4600 _(b'BRANCH'),
4600 _(b'BRANCH'),
4601 ),
4601 ),
4602 (
4602 (
4603 b'B',
4603 b'B',
4604 b'bookmark',
4604 b'bookmark',
4605 [],
4605 [],
4606 _(b"show changesets within the given bookmark"),
4606 _(b"show changesets within the given bookmark"),
4607 _(b'BOOKMARK'),
4607 _(b'BOOKMARK'),
4608 ),
4608 ),
4609 (
4609 (
4610 b'P',
4610 b'P',
4611 b'prune',
4611 b'prune',
4612 [],
4612 [],
4613 _(b'do not display revision or any of its ancestors'),
4613 _(b'do not display revision or any of its ancestors'),
4614 _(b'REV'),
4614 _(b'REV'),
4615 ),
4615 ),
4616 ]
4616 ]
4617 + logopts
4617 + logopts
4618 + walkopts,
4618 + walkopts,
4619 _(b'[OPTION]... [FILE]'),
4619 _(b'[OPTION]... [FILE]'),
4620 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4620 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4621 helpbasic=True,
4621 helpbasic=True,
4622 inferrepo=True,
4622 inferrepo=True,
4623 intents={INTENT_READONLY},
4623 intents={INTENT_READONLY},
4624 )
4624 )
4625 def log(ui, repo, *pats, **opts):
4625 def log(ui, repo, *pats, **opts):
4626 """show revision history of entire repository or files
4626 """show revision history of entire repository or files
4627
4627
4628 Print the revision history of the specified files or the entire
4628 Print the revision history of the specified files or the entire
4629 project.
4629 project.
4630
4630
4631 If no revision range is specified, the default is ``tip:0`` unless
4631 If no revision range is specified, the default is ``tip:0`` unless
4632 --follow is set.
4632 --follow is set.
4633
4633
4634 File history is shown without following rename or copy history of
4634 File history is shown without following rename or copy history of
4635 files. Use -f/--follow with a filename to follow history across
4635 files. Use -f/--follow with a filename to follow history across
4636 renames and copies. --follow without a filename will only show
4636 renames and copies. --follow without a filename will only show
4637 ancestors of the starting revisions. The starting revisions can be
4637 ancestors of the starting revisions. The starting revisions can be
4638 specified by -r/--rev, which default to the working directory parent.
4638 specified by -r/--rev, which default to the working directory parent.
4639
4639
4640 By default this command prints revision number and changeset id,
4640 By default this command prints revision number and changeset id,
4641 tags, non-trivial parents, user, date and time, and a summary for
4641 tags, non-trivial parents, user, date and time, and a summary for
4642 each commit. When the -v/--verbose switch is used, the list of
4642 each commit. When the -v/--verbose switch is used, the list of
4643 changed files and full commit message are shown.
4643 changed files and full commit message are shown.
4644
4644
4645 With --graph the revisions are shown as an ASCII art DAG with the most
4645 With --graph the revisions are shown as an ASCII art DAG with the most
4646 recent changeset at the top.
4646 recent changeset at the top.
4647 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4647 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4648 involved in an unresolved merge conflict, '_' closes a branch,
4648 involved in an unresolved merge conflict, '_' closes a branch,
4649 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4649 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4650 changeset from the lines below is a parent of the 'o' merge on the same
4650 changeset from the lines below is a parent of the 'o' merge on the same
4651 line.
4651 line.
4652 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4652 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4653 of a '|' indicates one or more revisions in a path are omitted.
4653 of a '|' indicates one or more revisions in a path are omitted.
4654
4654
4655 .. container:: verbose
4655 .. container:: verbose
4656
4656
4657 Use -L/--line-range FILE,M:N options to follow the history of lines
4657 Use -L/--line-range FILE,M:N options to follow the history of lines
4658 from M to N in FILE. With -p/--patch only diff hunks affecting
4658 from M to N in FILE. With -p/--patch only diff hunks affecting
4659 specified line range will be shown. This option requires --follow;
4659 specified line range will be shown. This option requires --follow;
4660 it can be specified multiple times. Currently, this option is not
4660 it can be specified multiple times. Currently, this option is not
4661 compatible with --graph. This option is experimental.
4661 compatible with --graph. This option is experimental.
4662
4662
4663 .. note::
4663 .. note::
4664
4664
4665 :hg:`log --patch` may generate unexpected diff output for merge
4665 :hg:`log --patch` may generate unexpected diff output for merge
4666 changesets, as it will only compare the merge changeset against
4666 changesets, as it will only compare the merge changeset against
4667 its first parent. Also, only files different from BOTH parents
4667 its first parent. Also, only files different from BOTH parents
4668 will appear in files:.
4668 will appear in files:.
4669
4669
4670 .. note::
4670 .. note::
4671
4671
4672 For performance reasons, :hg:`log FILE` may omit duplicate changes
4672 For performance reasons, :hg:`log FILE` may omit duplicate changes
4673 made on branches and will not show removals or mode changes. To
4673 made on branches and will not show removals or mode changes. To
4674 see all such changes, use the --removed switch.
4674 see all such changes, use the --removed switch.
4675
4675
4676 .. container:: verbose
4676 .. container:: verbose
4677
4677
4678 .. note::
4678 .. note::
4679
4679
4680 The history resulting from -L/--line-range options depends on diff
4680 The history resulting from -L/--line-range options depends on diff
4681 options; for instance if white-spaces are ignored, respective changes
4681 options; for instance if white-spaces are ignored, respective changes
4682 with only white-spaces in specified line range will not be listed.
4682 with only white-spaces in specified line range will not be listed.
4683
4683
4684 .. container:: verbose
4684 .. container:: verbose
4685
4685
4686 Some examples:
4686 Some examples:
4687
4687
4688 - changesets with full descriptions and file lists::
4688 - changesets with full descriptions and file lists::
4689
4689
4690 hg log -v
4690 hg log -v
4691
4691
4692 - changesets ancestral to the working directory::
4692 - changesets ancestral to the working directory::
4693
4693
4694 hg log -f
4694 hg log -f
4695
4695
4696 - last 10 commits on the current branch::
4696 - last 10 commits on the current branch::
4697
4697
4698 hg log -l 10 -b .
4698 hg log -l 10 -b .
4699
4699
4700 - changesets showing all modifications of a file, including removals::
4700 - changesets showing all modifications of a file, including removals::
4701
4701
4702 hg log --removed file.c
4702 hg log --removed file.c
4703
4703
4704 - all changesets that touch a directory, with diffs, excluding merges::
4704 - all changesets that touch a directory, with diffs, excluding merges::
4705
4705
4706 hg log -Mp lib/
4706 hg log -Mp lib/
4707
4707
4708 - all revision numbers that match a keyword::
4708 - all revision numbers that match a keyword::
4709
4709
4710 hg log -k bug --template "{rev}\\n"
4710 hg log -k bug --template "{rev}\\n"
4711
4711
4712 - the full hash identifier of the working directory parent::
4712 - the full hash identifier of the working directory parent::
4713
4713
4714 hg log -r . --template "{node}\\n"
4714 hg log -r . --template "{node}\\n"
4715
4715
4716 - list available log templates::
4716 - list available log templates::
4717
4717
4718 hg log -T list
4718 hg log -T list
4719
4719
4720 - check if a given changeset is included in a tagged release::
4720 - check if a given changeset is included in a tagged release::
4721
4721
4722 hg log -r "a21ccf and ancestor(1.9)"
4722 hg log -r "a21ccf and ancestor(1.9)"
4723
4723
4724 - find all changesets by some user in a date range::
4724 - find all changesets by some user in a date range::
4725
4725
4726 hg log -k alice -d "may 2008 to jul 2008"
4726 hg log -k alice -d "may 2008 to jul 2008"
4727
4727
4728 - summary of all changesets after the last tag::
4728 - summary of all changesets after the last tag::
4729
4729
4730 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4730 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4731
4731
4732 - changesets touching lines 13 to 23 for file.c::
4732 - changesets touching lines 13 to 23 for file.c::
4733
4733
4734 hg log -L file.c,13:23
4734 hg log -L file.c,13:23
4735
4735
4736 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4736 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4737 main.c with patch::
4737 main.c with patch::
4738
4738
4739 hg log -L file.c,13:23 -L main.c,2:6 -p
4739 hg log -L file.c,13:23 -L main.c,2:6 -p
4740
4740
4741 See :hg:`help dates` for a list of formats valid for -d/--date.
4741 See :hg:`help dates` for a list of formats valid for -d/--date.
4742
4742
4743 See :hg:`help revisions` for more about specifying and ordering
4743 See :hg:`help revisions` for more about specifying and ordering
4744 revisions.
4744 revisions.
4745
4745
4746 See :hg:`help templates` for more about pre-packaged styles and
4746 See :hg:`help templates` for more about pre-packaged styles and
4747 specifying custom templates. The default template used by the log
4747 specifying custom templates. The default template used by the log
4748 command can be customized via the ``command-templates.log`` configuration
4748 command can be customized via the ``command-templates.log`` configuration
4749 setting.
4749 setting.
4750
4750
4751 Returns 0 on success.
4751 Returns 0 on success.
4752
4752
4753 """
4753 """
4754 opts = pycompat.byteskwargs(opts)
4754 opts = pycompat.byteskwargs(opts)
4755 linerange = opts.get(b'line_range')
4755 linerange = opts.get(b'line_range')
4756
4756
4757 if linerange and not opts.get(b'follow'):
4757 if linerange and not opts.get(b'follow'):
4758 raise error.InputError(_(b'--line-range requires --follow'))
4758 raise error.InputError(_(b'--line-range requires --follow'))
4759
4759
4760 if linerange and pats:
4760 if linerange and pats:
4761 # TODO: take pats as patterns with no line-range filter
4761 # TODO: take pats as patterns with no line-range filter
4762 raise error.InputError(
4762 raise error.InputError(
4763 _(b'FILE arguments are not compatible with --line-range option')
4763 _(b'FILE arguments are not compatible with --line-range option')
4764 )
4764 )
4765
4765
4766 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4766 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4767 walk_opts = logcmdutil.parseopts(ui, pats, opts)
4767 walk_opts = logcmdutil.parseopts(ui, pats, opts)
4768 revs, differ = logcmdutil.getrevs(repo, walk_opts)
4768 revs, differ = logcmdutil.getrevs(repo, walk_opts)
4769 if linerange:
4769 if linerange:
4770 # TODO: should follow file history from logcmdutil._initialrevs(),
4770 # TODO: should follow file history from logcmdutil._initialrevs(),
4771 # then filter the result by logcmdutil._makerevset() and --limit
4771 # then filter the result by logcmdutil._makerevset() and --limit
4772 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4772 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4773
4773
4774 getcopies = None
4774 getcopies = None
4775 if opts.get(b'copies'):
4775 if opts.get(b'copies'):
4776 endrev = None
4776 endrev = None
4777 if revs:
4777 if revs:
4778 endrev = revs.max() + 1
4778 endrev = revs.max() + 1
4779 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4779 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4780
4780
4781 ui.pager(b'log')
4781 ui.pager(b'log')
4782 displayer = logcmdutil.changesetdisplayer(
4782 displayer = logcmdutil.changesetdisplayer(
4783 ui, repo, opts, differ, buffered=True
4783 ui, repo, opts, differ, buffered=True
4784 )
4784 )
4785 if opts.get(b'graph'):
4785 if opts.get(b'graph'):
4786 displayfn = logcmdutil.displaygraphrevs
4786 displayfn = logcmdutil.displaygraphrevs
4787 else:
4787 else:
4788 displayfn = logcmdutil.displayrevs
4788 displayfn = logcmdutil.displayrevs
4789 displayfn(ui, repo, revs, displayer, getcopies)
4789 displayfn(ui, repo, revs, displayer, getcopies)
4790
4790
4791
4791
4792 @command(
4792 @command(
4793 b'manifest',
4793 b'manifest',
4794 [
4794 [
4795 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4795 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4796 (b'', b'all', False, _(b"list files from all revisions")),
4796 (b'', b'all', False, _(b"list files from all revisions")),
4797 ]
4797 ]
4798 + formatteropts,
4798 + formatteropts,
4799 _(b'[-r REV]'),
4799 _(b'[-r REV]'),
4800 helpcategory=command.CATEGORY_MAINTENANCE,
4800 helpcategory=command.CATEGORY_MAINTENANCE,
4801 intents={INTENT_READONLY},
4801 intents={INTENT_READONLY},
4802 )
4802 )
4803 def manifest(ui, repo, node=None, rev=None, **opts):
4803 def manifest(ui, repo, node=None, rev=None, **opts):
4804 """output the current or given revision of the project manifest
4804 """output the current or given revision of the project manifest
4805
4805
4806 Print a list of version controlled files for the given revision.
4806 Print a list of version controlled files for the given revision.
4807 If no revision is given, the first parent of the working directory
4807 If no revision is given, the first parent of the working directory
4808 is used, or the null revision if no revision is checked out.
4808 is used, or the null revision if no revision is checked out.
4809
4809
4810 With -v, print file permissions, symlink and executable bits.
4810 With -v, print file permissions, symlink and executable bits.
4811 With --debug, print file revision hashes.
4811 With --debug, print file revision hashes.
4812
4812
4813 If option --all is specified, the list of all files from all revisions
4813 If option --all is specified, the list of all files from all revisions
4814 is printed. This includes deleted and renamed files.
4814 is printed. This includes deleted and renamed files.
4815
4815
4816 Returns 0 on success.
4816 Returns 0 on success.
4817 """
4817 """
4818 opts = pycompat.byteskwargs(opts)
4818 opts = pycompat.byteskwargs(opts)
4819 fm = ui.formatter(b'manifest', opts)
4819 fm = ui.formatter(b'manifest', opts)
4820
4820
4821 if opts.get(b'all'):
4821 if opts.get(b'all'):
4822 if rev or node:
4822 if rev or node:
4823 raise error.InputError(_(b"can't specify a revision with --all"))
4823 raise error.InputError(_(b"can't specify a revision with --all"))
4824
4824
4825 res = set()
4825 res = set()
4826 for rev in repo:
4826 for rev in repo:
4827 ctx = repo[rev]
4827 ctx = repo[rev]
4828 res |= set(ctx.files())
4828 res |= set(ctx.files())
4829
4829
4830 ui.pager(b'manifest')
4830 ui.pager(b'manifest')
4831 for f in sorted(res):
4831 for f in sorted(res):
4832 fm.startitem()
4832 fm.startitem()
4833 fm.write(b"path", b'%s\n', f)
4833 fm.write(b"path", b'%s\n', f)
4834 fm.end()
4834 fm.end()
4835 return
4835 return
4836
4836
4837 if rev and node:
4837 if rev and node:
4838 raise error.InputError(_(b"please specify just one revision"))
4838 raise error.InputError(_(b"please specify just one revision"))
4839
4839
4840 if not node:
4840 if not node:
4841 node = rev
4841 node = rev
4842
4842
4843 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4843 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4844 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4844 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4845 if node:
4845 if node:
4846 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4846 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4847 ctx = logcmdutil.revsingle(repo, node)
4847 ctx = logcmdutil.revsingle(repo, node)
4848 mf = ctx.manifest()
4848 mf = ctx.manifest()
4849 ui.pager(b'manifest')
4849 ui.pager(b'manifest')
4850 for f in ctx:
4850 for f in ctx:
4851 fm.startitem()
4851 fm.startitem()
4852 fm.context(ctx=ctx)
4852 fm.context(ctx=ctx)
4853 fl = ctx[f].flags()
4853 fl = ctx[f].flags()
4854 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4854 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4855 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4855 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4856 fm.write(b'path', b'%s\n', f)
4856 fm.write(b'path', b'%s\n', f)
4857 fm.end()
4857 fm.end()
4858
4858
4859
4859
4860 @command(
4860 @command(
4861 b'merge',
4861 b'merge',
4862 [
4862 [
4863 (
4863 (
4864 b'f',
4864 b'f',
4865 b'force',
4865 b'force',
4866 None,
4866 None,
4867 _(b'force a merge including outstanding changes (DEPRECATED)'),
4867 _(b'force a merge including outstanding changes (DEPRECATED)'),
4868 ),
4868 ),
4869 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4869 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4870 (
4870 (
4871 b'P',
4871 b'P',
4872 b'preview',
4872 b'preview',
4873 None,
4873 None,
4874 _(b'review revisions to merge (no merge is performed)'),
4874 _(b'review revisions to merge (no merge is performed)'),
4875 ),
4875 ),
4876 (b'', b'abort', None, _(b'abort the ongoing merge')),
4876 (b'', b'abort', None, _(b'abort the ongoing merge')),
4877 ]
4877 ]
4878 + mergetoolopts,
4878 + mergetoolopts,
4879 _(b'[-P] [[-r] REV]'),
4879 _(b'[-P] [[-r] REV]'),
4880 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4880 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4881 helpbasic=True,
4881 helpbasic=True,
4882 )
4882 )
4883 def merge(ui, repo, node=None, **opts):
4883 def merge(ui, repo, node=None, **opts):
4884 """merge another revision into working directory
4884 """merge another revision into working directory
4885
4885
4886 The current working directory is updated with all changes made in
4886 The current working directory is updated with all changes made in
4887 the requested revision since the last common predecessor revision.
4887 the requested revision since the last common predecessor revision.
4888
4888
4889 Files that changed between either parent are marked as changed for
4889 Files that changed between either parent are marked as changed for
4890 the next commit and a commit must be performed before any further
4890 the next commit and a commit must be performed before any further
4891 updates to the repository are allowed. The next commit will have
4891 updates to the repository are allowed. The next commit will have
4892 two parents.
4892 two parents.
4893
4893
4894 ``--tool`` can be used to specify the merge tool used for file
4894 ``--tool`` can be used to specify the merge tool used for file
4895 merges. It overrides the HGMERGE environment variable and your
4895 merges. It overrides the HGMERGE environment variable and your
4896 configuration files. See :hg:`help merge-tools` for options.
4896 configuration files. See :hg:`help merge-tools` for options.
4897
4897
4898 If no revision is specified, the working directory's parent is a
4898 If no revision is specified, the working directory's parent is a
4899 head revision, and the current branch contains exactly one other
4899 head revision, and the current branch contains exactly one other
4900 head, the other head is merged with by default. Otherwise, an
4900 head, the other head is merged with by default. Otherwise, an
4901 explicit revision with which to merge must be provided.
4901 explicit revision with which to merge must be provided.
4902
4902
4903 See :hg:`help resolve` for information on handling file conflicts.
4903 See :hg:`help resolve` for information on handling file conflicts.
4904
4904
4905 To undo an uncommitted merge, use :hg:`merge --abort` which
4905 To undo an uncommitted merge, use :hg:`merge --abort` which
4906 will check out a clean copy of the original merge parent, losing
4906 will check out a clean copy of the original merge parent, losing
4907 all changes.
4907 all changes.
4908
4908
4909 Returns 0 on success, 1 if there are unresolved files.
4909 Returns 0 on success, 1 if there are unresolved files.
4910 """
4910 """
4911
4911
4912 opts = pycompat.byteskwargs(opts)
4912 opts = pycompat.byteskwargs(opts)
4913 abort = opts.get(b'abort')
4913 abort = opts.get(b'abort')
4914 if abort and repo.dirstate.p2() == repo.nullid:
4914 if abort and repo.dirstate.p2() == repo.nullid:
4915 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4915 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4916 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4916 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4917 if abort:
4917 if abort:
4918 state = cmdutil.getunfinishedstate(repo)
4918 state = cmdutil.getunfinishedstate(repo)
4919 if state and state._opname != b'merge':
4919 if state and state._opname != b'merge':
4920 raise error.StateError(
4920 raise error.StateError(
4921 _(b'cannot abort merge with %s in progress') % (state._opname),
4921 _(b'cannot abort merge with %s in progress') % (state._opname),
4922 hint=state.hint(),
4922 hint=state.hint(),
4923 )
4923 )
4924 if node:
4924 if node:
4925 raise error.InputError(_(b"cannot specify a node with --abort"))
4925 raise error.InputError(_(b"cannot specify a node with --abort"))
4926 return hg.abortmerge(repo.ui, repo)
4926 return hg.abortmerge(repo.ui, repo)
4927
4927
4928 if opts.get(b'rev') and node:
4928 if opts.get(b'rev') and node:
4929 raise error.InputError(_(b"please specify just one revision"))
4929 raise error.InputError(_(b"please specify just one revision"))
4930 if not node:
4930 if not node:
4931 node = opts.get(b'rev')
4931 node = opts.get(b'rev')
4932
4932
4933 if node:
4933 if node:
4934 ctx = logcmdutil.revsingle(repo, node)
4934 ctx = logcmdutil.revsingle(repo, node)
4935 else:
4935 else:
4936 if ui.configbool(b'commands', b'merge.require-rev'):
4936 if ui.configbool(b'commands', b'merge.require-rev'):
4937 raise error.InputError(
4937 raise error.InputError(
4938 _(
4938 _(
4939 b'configuration requires specifying revision to merge '
4939 b'configuration requires specifying revision to merge '
4940 b'with'
4940 b'with'
4941 )
4941 )
4942 )
4942 )
4943 ctx = repo[destutil.destmerge(repo)]
4943 ctx = repo[destutil.destmerge(repo)]
4944
4944
4945 if ctx.node() is None:
4945 if ctx.node() is None:
4946 raise error.InputError(
4946 raise error.InputError(
4947 _(b'merging with the working copy has no effect')
4947 _(b'merging with the working copy has no effect')
4948 )
4948 )
4949
4949
4950 if opts.get(b'preview'):
4950 if opts.get(b'preview'):
4951 # find nodes that are ancestors of p2 but not of p1
4951 # find nodes that are ancestors of p2 but not of p1
4952 p1 = repo[b'.'].node()
4952 p1 = repo[b'.'].node()
4953 p2 = ctx.node()
4953 p2 = ctx.node()
4954 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4954 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4955
4955
4956 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4956 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4957 for node in nodes:
4957 for node in nodes:
4958 displayer.show(repo[node])
4958 displayer.show(repo[node])
4959 displayer.close()
4959 displayer.close()
4960 return 0
4960 return 0
4961
4961
4962 # ui.forcemerge is an internal variable, do not document
4962 # ui.forcemerge is an internal variable, do not document
4963 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4963 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4964 with ui.configoverride(overrides, b'merge'):
4964 with ui.configoverride(overrides, b'merge'):
4965 force = opts.get(b'force')
4965 force = opts.get(b'force')
4966 labels = [b'working copy', b'merge rev', b'common ancestor']
4966 labels = [b'working copy', b'merge rev', b'common ancestor']
4967 return hg.merge(ctx, force=force, labels=labels)
4967 return hg.merge(ctx, force=force, labels=labels)
4968
4968
4969
4969
4970 statemod.addunfinished(
4970 statemod.addunfinished(
4971 b'merge',
4971 b'merge',
4972 fname=None,
4972 fname=None,
4973 clearable=True,
4973 clearable=True,
4974 allowcommit=True,
4974 allowcommit=True,
4975 cmdmsg=_(b'outstanding uncommitted merge'),
4975 cmdmsg=_(b'outstanding uncommitted merge'),
4976 abortfunc=hg.abortmerge,
4976 abortfunc=hg.abortmerge,
4977 statushint=_(
4977 statushint=_(
4978 b'To continue: hg commit\nTo abort: hg merge --abort'
4978 b'To continue: hg commit\nTo abort: hg merge --abort'
4979 ),
4979 ),
4980 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4980 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4981 )
4981 )
4982
4982
4983
4983
4984 @command(
4984 @command(
4985 b'outgoing|out',
4985 b'outgoing|out',
4986 [
4986 [
4987 (
4987 (
4988 b'f',
4988 b'f',
4989 b'force',
4989 b'force',
4990 None,
4990 None,
4991 _(b'run even when the destination is unrelated'),
4991 _(b'run even when the destination is unrelated'),
4992 ),
4992 ),
4993 (
4993 (
4994 b'r',
4994 b'r',
4995 b'rev',
4995 b'rev',
4996 [],
4996 [],
4997 _(b'a changeset intended to be included in the destination'),
4997 _(b'a changeset intended to be included in the destination'),
4998 _(b'REV'),
4998 _(b'REV'),
4999 ),
4999 ),
5000 (b'n', b'newest-first', None, _(b'show newest record first')),
5000 (b'n', b'newest-first', None, _(b'show newest record first')),
5001 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
5001 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
5002 (
5002 (
5003 b'b',
5003 b'b',
5004 b'branch',
5004 b'branch',
5005 [],
5005 [],
5006 _(b'a specific branch you would like to push'),
5006 _(b'a specific branch you would like to push'),
5007 _(b'BRANCH'),
5007 _(b'BRANCH'),
5008 ),
5008 ),
5009 ]
5009 ]
5010 + logopts
5010 + logopts
5011 + remoteopts
5011 + remoteopts
5012 + subrepoopts,
5012 + subrepoopts,
5013 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'),
5013 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]...'),
5014 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5014 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5015 )
5015 )
5016 def outgoing(ui, repo, *dests, **opts):
5016 def outgoing(ui, repo, *dests, **opts):
5017 """show changesets not found in the destination
5017 """show changesets not found in the destination
5018
5018
5019 Show changesets not found in the specified destination repository
5019 Show changesets not found in the specified destination repository
5020 or the default push location. These are the changesets that would
5020 or the default push location. These are the changesets that would
5021 be pushed if a push was requested.
5021 be pushed if a push was requested.
5022
5022
5023 See pull for details of valid destination formats.
5023 See pull for details of valid destination formats.
5024
5024
5025 .. container:: verbose
5025 .. container:: verbose
5026
5026
5027 With -B/--bookmarks, the result of bookmark comparison between
5027 With -B/--bookmarks, the result of bookmark comparison between
5028 local and remote repositories is displayed. With -v/--verbose,
5028 local and remote repositories is displayed. With -v/--verbose,
5029 status is also displayed for each bookmark like below::
5029 status is also displayed for each bookmark like below::
5030
5030
5031 BM1 01234567890a added
5031 BM1 01234567890a added
5032 BM2 deleted
5032 BM2 deleted
5033 BM3 234567890abc advanced
5033 BM3 234567890abc advanced
5034 BM4 34567890abcd diverged
5034 BM4 34567890abcd diverged
5035 BM5 4567890abcde changed
5035 BM5 4567890abcde changed
5036
5036
5037 The action taken when pushing depends on the
5037 The action taken when pushing depends on the
5038 status of each bookmark:
5038 status of each bookmark:
5039
5039
5040 :``added``: push with ``-B`` will create it
5040 :``added``: push with ``-B`` will create it
5041 :``deleted``: push with ``-B`` will delete it
5041 :``deleted``: push with ``-B`` will delete it
5042 :``advanced``: push will update it
5042 :``advanced``: push will update it
5043 :``diverged``: push with ``-B`` will update it
5043 :``diverged``: push with ``-B`` will update it
5044 :``changed``: push with ``-B`` will update it
5044 :``changed``: push with ``-B`` will update it
5045
5045
5046 From the point of view of pushing behavior, bookmarks
5046 From the point of view of pushing behavior, bookmarks
5047 existing only in the remote repository are treated as
5047 existing only in the remote repository are treated as
5048 ``deleted``, even if it is in fact added remotely.
5048 ``deleted``, even if it is in fact added remotely.
5049
5049
5050 Returns 0 if there are outgoing changes, 1 otherwise.
5050 Returns 0 if there are outgoing changes, 1 otherwise.
5051 """
5051 """
5052 opts = pycompat.byteskwargs(opts)
5052 opts = pycompat.byteskwargs(opts)
5053 if opts.get(b'bookmarks'):
5053 if opts.get(b'bookmarks'):
5054 for path in urlutil.get_push_paths(repo, ui, dests):
5054 for path in urlutil.get_push_paths(repo, ui, dests):
5055 other = hg.peer(repo, opts, path)
5055 other = hg.peer(repo, opts, path)
5056 try:
5056 try:
5057 if b'bookmarks' not in other.listkeys(b'namespaces'):
5057 if b'bookmarks' not in other.listkeys(b'namespaces'):
5058 ui.warn(_(b"remote doesn't support bookmarks\n"))
5058 ui.warn(_(b"remote doesn't support bookmarks\n"))
5059 return 0
5059 return 0
5060 ui.status(
5060 ui.status(
5061 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
5061 _(b'comparing with %s\n') % urlutil.hidepassword(path.loc)
5062 )
5062 )
5063 ui.pager(b'outgoing')
5063 ui.pager(b'outgoing')
5064 return bookmarks.outgoing(ui, repo, other)
5064 return bookmarks.outgoing(ui, repo, other)
5065 finally:
5065 finally:
5066 other.close()
5066 other.close()
5067
5067
5068 return hg.outgoing(ui, repo, dests, opts)
5068 return hg.outgoing(ui, repo, dests, opts)
5069
5069
5070
5070
5071 @command(
5071 @command(
5072 b'parents',
5072 b'parents',
5073 [
5073 [
5074 (
5074 (
5075 b'r',
5075 b'r',
5076 b'rev',
5076 b'rev',
5077 b'',
5077 b'',
5078 _(b'show parents of the specified revision'),
5078 _(b'show parents of the specified revision'),
5079 _(b'REV'),
5079 _(b'REV'),
5080 ),
5080 ),
5081 ]
5081 ]
5082 + templateopts,
5082 + templateopts,
5083 _(b'[-r REV] [FILE]'),
5083 _(b'[-r REV] [FILE]'),
5084 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
5084 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
5085 inferrepo=True,
5085 inferrepo=True,
5086 )
5086 )
5087 def parents(ui, repo, file_=None, **opts):
5087 def parents(ui, repo, file_=None, **opts):
5088 """show the parents of the working directory or revision (DEPRECATED)
5088 """show the parents of the working directory or revision (DEPRECATED)
5089
5089
5090 Print the working directory's parent revisions. If a revision is
5090 Print the working directory's parent revisions. If a revision is
5091 given via -r/--rev, the parent of that revision will be printed.
5091 given via -r/--rev, the parent of that revision will be printed.
5092 If a file argument is given, the revision in which the file was
5092 If a file argument is given, the revision in which the file was
5093 last changed (before the working directory revision or the
5093 last changed (before the working directory revision or the
5094 argument to --rev if given) is printed.
5094 argument to --rev if given) is printed.
5095
5095
5096 This command is equivalent to::
5096 This command is equivalent to::
5097
5097
5098 hg log -r "p1()+p2()" or
5098 hg log -r "p1()+p2()" or
5099 hg log -r "p1(REV)+p2(REV)" or
5099 hg log -r "p1(REV)+p2(REV)" or
5100 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5100 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
5101 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5101 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
5102
5102
5103 See :hg:`summary` and :hg:`help revsets` for related information.
5103 See :hg:`summary` and :hg:`help revsets` for related information.
5104
5104
5105 Returns 0 on success.
5105 Returns 0 on success.
5106 """
5106 """
5107
5107
5108 opts = pycompat.byteskwargs(opts)
5108 opts = pycompat.byteskwargs(opts)
5109 rev = opts.get(b'rev')
5109 rev = opts.get(b'rev')
5110 if rev:
5110 if rev:
5111 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
5111 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
5112 ctx = logcmdutil.revsingle(repo, rev, None)
5112 ctx = logcmdutil.revsingle(repo, rev, None)
5113
5113
5114 if file_:
5114 if file_:
5115 m = scmutil.match(ctx, (file_,), opts)
5115 m = scmutil.match(ctx, (file_,), opts)
5116 if m.anypats() or len(m.files()) != 1:
5116 if m.anypats() or len(m.files()) != 1:
5117 raise error.InputError(_(b'can only specify an explicit filename'))
5117 raise error.InputError(_(b'can only specify an explicit filename'))
5118 file_ = m.files()[0]
5118 file_ = m.files()[0]
5119 filenodes = []
5119 filenodes = []
5120 for cp in ctx.parents():
5120 for cp in ctx.parents():
5121 if not cp:
5121 if not cp:
5122 continue
5122 continue
5123 try:
5123 try:
5124 filenodes.append(cp.filenode(file_))
5124 filenodes.append(cp.filenode(file_))
5125 except error.LookupError:
5125 except error.LookupError:
5126 pass
5126 pass
5127 if not filenodes:
5127 if not filenodes:
5128 raise error.InputError(_(b"'%s' not found in manifest") % file_)
5128 raise error.InputError(_(b"'%s' not found in manifest") % file_)
5129 p = []
5129 p = []
5130 for fn in filenodes:
5130 for fn in filenodes:
5131 fctx = repo.filectx(file_, fileid=fn)
5131 fctx = repo.filectx(file_, fileid=fn)
5132 p.append(fctx.node())
5132 p.append(fctx.node())
5133 else:
5133 else:
5134 p = [cp.node() for cp in ctx.parents()]
5134 p = [cp.node() for cp in ctx.parents()]
5135
5135
5136 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5136 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5137 for n in p:
5137 for n in p:
5138 if n != repo.nullid:
5138 if n != repo.nullid:
5139 displayer.show(repo[n])
5139 displayer.show(repo[n])
5140 displayer.close()
5140 displayer.close()
5141
5141
5142
5142
5143 @command(
5143 @command(
5144 b'paths',
5144 b'paths',
5145 formatteropts,
5145 formatteropts,
5146 _(b'[NAME]'),
5146 _(b'[NAME]'),
5147 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5147 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5148 optionalrepo=True,
5148 optionalrepo=True,
5149 intents={INTENT_READONLY},
5149 intents={INTENT_READONLY},
5150 )
5150 )
5151 def paths(ui, repo, search=None, **opts):
5151 def paths(ui, repo, search=None, **opts):
5152 """show aliases for remote repositories
5152 """show aliases for remote repositories
5153
5153
5154 Show definition of symbolic path name NAME. If no name is given,
5154 Show definition of symbolic path name NAME. If no name is given,
5155 show definition of all available names.
5155 show definition of all available names.
5156
5156
5157 Option -q/--quiet suppresses all output when searching for NAME
5157 Option -q/--quiet suppresses all output when searching for NAME
5158 and shows only the path names when listing all definitions.
5158 and shows only the path names when listing all definitions.
5159
5159
5160 Path names are defined in the [paths] section of your
5160 Path names are defined in the [paths] section of your
5161 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5161 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5162 repository, ``.hg/hgrc`` is used, too.
5162 repository, ``.hg/hgrc`` is used, too.
5163
5163
5164 The path names ``default`` and ``default-push`` have a special
5164 The path names ``default`` and ``default-push`` have a special
5165 meaning. When performing a push or pull operation, they are used
5165 meaning. When performing a push or pull operation, they are used
5166 as fallbacks if no location is specified on the command-line.
5166 as fallbacks if no location is specified on the command-line.
5167 When ``default-push`` is set, it will be used for push and
5167 When ``default-push`` is set, it will be used for push and
5168 ``default`` will be used for pull; otherwise ``default`` is used
5168 ``default`` will be used for pull; otherwise ``default`` is used
5169 as the fallback for both. When cloning a repository, the clone
5169 as the fallback for both. When cloning a repository, the clone
5170 source is written as ``default`` in ``.hg/hgrc``.
5170 source is written as ``default`` in ``.hg/hgrc``.
5171
5171
5172 .. note::
5172 .. note::
5173
5173
5174 ``default`` and ``default-push`` apply to all inbound (e.g.
5174 ``default`` and ``default-push`` apply to all inbound (e.g.
5175 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5175 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5176 and :hg:`bundle`) operations.
5176 and :hg:`bundle`) operations.
5177
5177
5178 See :hg:`help urls` for more information.
5178 See :hg:`help urls` for more information.
5179
5179
5180 .. container:: verbose
5180 .. container:: verbose
5181
5181
5182 Template:
5182 Template:
5183
5183
5184 The following keywords are supported. See also :hg:`help templates`.
5184 The following keywords are supported. See also :hg:`help templates`.
5185
5185
5186 :name: String. Symbolic name of the path alias.
5186 :name: String. Symbolic name of the path alias.
5187 :pushurl: String. URL for push operations.
5187 :pushurl: String. URL for push operations.
5188 :url: String. URL or directory path for the other operations.
5188 :url: String. URL or directory path for the other operations.
5189
5189
5190 Returns 0 on success.
5190 Returns 0 on success.
5191 """
5191 """
5192
5192
5193 opts = pycompat.byteskwargs(opts)
5193 opts = pycompat.byteskwargs(opts)
5194
5194
5195 pathitems = urlutil.list_paths(ui, search)
5195 pathitems = urlutil.list_paths(ui, search)
5196 ui.pager(b'paths')
5196 ui.pager(b'paths')
5197
5197
5198 fm = ui.formatter(b'paths', opts)
5198 fm = ui.formatter(b'paths', opts)
5199 if fm.isplain():
5199 if fm.isplain():
5200 hidepassword = urlutil.hidepassword
5200 hidepassword = urlutil.hidepassword
5201 else:
5201 else:
5202 hidepassword = bytes
5202 hidepassword = bytes
5203 if ui.quiet:
5203 if ui.quiet:
5204 namefmt = b'%s\n'
5204 namefmt = b'%s\n'
5205 else:
5205 else:
5206 namefmt = b'%s = '
5206 namefmt = b'%s = '
5207 showsubopts = not search and not ui.quiet
5207 showsubopts = not search and not ui.quiet
5208
5208
5209 for name, path in pathitems:
5209 for name, path in pathitems:
5210 fm.startitem()
5210 fm.startitem()
5211 fm.condwrite(not search, b'name', namefmt, name)
5211 fm.condwrite(not search, b'name', namefmt, name)
5212 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5212 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5213 for subopt, value in sorted(path.suboptions.items()):
5213 for subopt, value in sorted(path.suboptions.items()):
5214 assert subopt not in (b'name', b'url')
5214 assert subopt not in (b'name', b'url')
5215 if showsubopts:
5215 if showsubopts:
5216 fm.plain(b'%s:%s = ' % (name, subopt))
5216 fm.plain(b'%s:%s = ' % (name, subopt))
5217 if isinstance(value, bool):
5217 if isinstance(value, bool):
5218 if value:
5218 if value:
5219 value = b'yes'
5219 value = b'yes'
5220 else:
5220 else:
5221 value = b'no'
5221 value = b'no'
5222 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5222 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5223
5223
5224 fm.end()
5224 fm.end()
5225
5225
5226 if search and not pathitems:
5226 if search and not pathitems:
5227 if not ui.quiet:
5227 if not ui.quiet:
5228 ui.warn(_(b"not found!\n"))
5228 ui.warn(_(b"not found!\n"))
5229 return 1
5229 return 1
5230 else:
5230 else:
5231 return 0
5231 return 0
5232
5232
5233
5233
5234 @command(
5234 @command(
5235 b'phase',
5235 b'phase',
5236 [
5236 [
5237 (b'p', b'public', False, _(b'set changeset phase to public')),
5237 (b'p', b'public', False, _(b'set changeset phase to public')),
5238 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5238 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5239 (b's', b'secret', False, _(b'set changeset phase to secret')),
5239 (b's', b'secret', False, _(b'set changeset phase to secret')),
5240 (b'f', b'force', False, _(b'allow to move boundary backward')),
5240 (b'f', b'force', False, _(b'allow to move boundary backward')),
5241 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5241 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5242 ],
5242 ],
5243 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5243 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5244 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5244 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5245 )
5245 )
5246 def phase(ui, repo, *revs, **opts):
5246 def phase(ui, repo, *revs, **opts):
5247 """set or show the current phase name
5247 """set or show the current phase name
5248
5248
5249 With no argument, show the phase name of the current revision(s).
5249 With no argument, show the phase name of the current revision(s).
5250
5250
5251 With one of -p/--public, -d/--draft or -s/--secret, change the
5251 With one of -p/--public, -d/--draft or -s/--secret, change the
5252 phase value of the specified revisions.
5252 phase value of the specified revisions.
5253
5253
5254 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5254 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5255 lower phase to a higher phase. Phases are ordered as follows::
5255 lower phase to a higher phase. Phases are ordered as follows::
5256
5256
5257 public < draft < secret
5257 public < draft < secret
5258
5258
5259 Returns 0 on success, 1 if some phases could not be changed.
5259 Returns 0 on success, 1 if some phases could not be changed.
5260
5260
5261 (For more information about the phases concept, see :hg:`help phases`.)
5261 (For more information about the phases concept, see :hg:`help phases`.)
5262 """
5262 """
5263 opts = pycompat.byteskwargs(opts)
5263 opts = pycompat.byteskwargs(opts)
5264 # search for a unique phase argument
5264 # search for a unique phase argument
5265 targetphase = None
5265 targetphase = None
5266 for idx, name in enumerate(phases.cmdphasenames):
5266 for idx, name in enumerate(phases.cmdphasenames):
5267 if opts[name]:
5267 if opts[name]:
5268 if targetphase is not None:
5268 if targetphase is not None:
5269 raise error.InputError(_(b'only one phase can be specified'))
5269 raise error.InputError(_(b'only one phase can be specified'))
5270 targetphase = idx
5270 targetphase = idx
5271
5271
5272 # look for specified revision
5272 # look for specified revision
5273 revs = list(revs)
5273 revs = list(revs)
5274 revs.extend(opts[b'rev'])
5274 revs.extend(opts[b'rev'])
5275 if revs:
5275 if revs:
5276 revs = logcmdutil.revrange(repo, revs)
5276 revs = logcmdutil.revrange(repo, revs)
5277 else:
5277 else:
5278 # display both parents as the second parent phase can influence
5278 # display both parents as the second parent phase can influence
5279 # the phase of a merge commit
5279 # the phase of a merge commit
5280 revs = [c.rev() for c in repo[None].parents()]
5280 revs = [c.rev() for c in repo[None].parents()]
5281
5281
5282 ret = 0
5282 ret = 0
5283 if targetphase is None:
5283 if targetphase is None:
5284 # display
5284 # display
5285 for r in revs:
5285 for r in revs:
5286 ctx = repo[r]
5286 ctx = repo[r]
5287 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5287 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5288 else:
5288 else:
5289 with repo.lock(), repo.transaction(b"phase") as tr:
5289 with repo.lock(), repo.transaction(b"phase") as tr:
5290 # set phase
5290 # set phase
5291 if not revs:
5291 if not revs:
5292 raise error.InputError(_(b'empty revision set'))
5292 raise error.InputError(_(b'empty revision set'))
5293 nodes = [repo[r].node() for r in revs]
5293 nodes = [repo[r].node() for r in revs]
5294 # moving revision from public to draft may hide them
5294 # moving revision from public to draft may hide them
5295 # We have to check result on an unfiltered repository
5295 # We have to check result on an unfiltered repository
5296 unfi = repo.unfiltered()
5296 unfi = repo.unfiltered()
5297 getphase = unfi._phasecache.phase
5297 getphase = unfi._phasecache.phase
5298 olddata = [getphase(unfi, r) for r in unfi]
5298 olddata = [getphase(unfi, r) for r in unfi]
5299 phases.advanceboundary(repo, tr, targetphase, nodes)
5299 phases.advanceboundary(repo, tr, targetphase, nodes)
5300 if opts[b'force']:
5300 if opts[b'force']:
5301 phases.retractboundary(repo, tr, targetphase, nodes)
5301 phases.retractboundary(repo, tr, targetphase, nodes)
5302 getphase = unfi._phasecache.phase
5302 getphase = unfi._phasecache.phase
5303 newdata = [getphase(unfi, r) for r in unfi]
5303 newdata = [getphase(unfi, r) for r in unfi]
5304 changes = sum(newdata[r] != olddata[r] for r in unfi)
5304 changes = sum(newdata[r] != olddata[r] for r in unfi)
5305 cl = unfi.changelog
5305 cl = unfi.changelog
5306 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5306 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5307 if rejected:
5307 if rejected:
5308 ui.warn(
5308 ui.warn(
5309 _(
5309 _(
5310 b'cannot move %i changesets to a higher '
5310 b'cannot move %i changesets to a higher '
5311 b'phase, use --force\n'
5311 b'phase, use --force\n'
5312 )
5312 )
5313 % len(rejected)
5313 % len(rejected)
5314 )
5314 )
5315 ret = 1
5315 ret = 1
5316 if changes:
5316 if changes:
5317 msg = _(b'phase changed for %i changesets\n') % changes
5317 msg = _(b'phase changed for %i changesets\n') % changes
5318 if ret:
5318 if ret:
5319 ui.status(msg)
5319 ui.status(msg)
5320 else:
5320 else:
5321 ui.note(msg)
5321 ui.note(msg)
5322 else:
5322 else:
5323 ui.warn(_(b'no phases changed\n'))
5323 ui.warn(_(b'no phases changed\n'))
5324 return ret
5324 return ret
5325
5325
5326
5326
5327 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5327 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5328 """Run after a changegroup has been added via pull/unbundle
5328 """Run after a changegroup has been added via pull/unbundle
5329
5329
5330 This takes arguments below:
5330 This takes arguments below:
5331
5331
5332 :modheads: change of heads by pull/unbundle
5332 :modheads: change of heads by pull/unbundle
5333 :optupdate: updating working directory is needed or not
5333 :optupdate: updating working directory is needed or not
5334 :checkout: update destination revision (or None to default destination)
5334 :checkout: update destination revision (or None to default destination)
5335 :brev: a name, which might be a bookmark to be activated after updating
5335 :brev: a name, which might be a bookmark to be activated after updating
5336
5336
5337 return True if update raise any conflict, False otherwise.
5337 return True if update raise any conflict, False otherwise.
5338 """
5338 """
5339 if modheads == 0:
5339 if modheads == 0:
5340 return False
5340 return False
5341 if optupdate:
5341 if optupdate:
5342 try:
5342 try:
5343 return hg.updatetotally(ui, repo, checkout, brev)
5343 return hg.updatetotally(ui, repo, checkout, brev)
5344 except error.UpdateAbort as inst:
5344 except error.UpdateAbort as inst:
5345 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5345 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5346 hint = inst.hint
5346 hint = inst.hint
5347 raise error.UpdateAbort(msg, hint=hint)
5347 raise error.UpdateAbort(msg, hint=hint)
5348 if modheads is not None and modheads > 1:
5348 if modheads is not None and modheads > 1:
5349 currentbranchheads = len(repo.branchheads())
5349 currentbranchheads = len(repo.branchheads())
5350 if currentbranchheads == modheads:
5350 if currentbranchheads == modheads:
5351 ui.status(
5351 ui.status(
5352 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5352 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5353 )
5353 )
5354 elif currentbranchheads > 1:
5354 elif currentbranchheads > 1:
5355 ui.status(
5355 ui.status(
5356 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5356 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5357 )
5357 )
5358 else:
5358 else:
5359 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5359 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5360 elif not ui.configbool(b'commands', b'update.requiredest'):
5360 elif not ui.configbool(b'commands', b'update.requiredest'):
5361 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5361 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5362 return False
5362 return False
5363
5363
5364
5364
5365 @command(
5365 @command(
5366 b'pull',
5366 b'pull',
5367 [
5367 [
5368 (
5368 (
5369 b'u',
5369 b'u',
5370 b'update',
5370 b'update',
5371 None,
5371 None,
5372 _(b'update to new branch head if new descendants were pulled'),
5372 _(b'update to new branch head if new descendants were pulled'),
5373 ),
5373 ),
5374 (
5374 (
5375 b'f',
5375 b'f',
5376 b'force',
5376 b'force',
5377 None,
5377 None,
5378 _(b'run even when remote repository is unrelated'),
5378 _(b'run even when remote repository is unrelated'),
5379 ),
5379 ),
5380 (
5380 (
5381 b'',
5381 b'',
5382 b'confirm',
5382 b'confirm',
5383 None,
5383 None,
5384 _(b'confirm pull before applying changes'),
5384 _(b'confirm pull before applying changes'),
5385 ),
5385 ),
5386 (
5386 (
5387 b'r',
5387 b'r',
5388 b'rev',
5388 b'rev',
5389 [],
5389 [],
5390 _(b'a remote changeset intended to be added'),
5390 _(b'a remote changeset intended to be added'),
5391 _(b'REV'),
5391 _(b'REV'),
5392 ),
5392 ),
5393 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5393 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5394 (
5394 (
5395 b'b',
5395 b'b',
5396 b'branch',
5396 b'branch',
5397 [],
5397 [],
5398 _(b'a specific branch you would like to pull'),
5398 _(b'a specific branch you would like to pull'),
5399 _(b'BRANCH'),
5399 _(b'BRANCH'),
5400 ),
5400 ),
5401 ]
5401 ]
5402 + remoteopts,
5402 + remoteopts,
5403 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
5403 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
5404 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5404 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5405 helpbasic=True,
5405 helpbasic=True,
5406 )
5406 )
5407 def pull(ui, repo, *sources, **opts):
5407 def pull(ui, repo, *sources, **opts):
5408 """pull changes from the specified source
5408 """pull changes from the specified source
5409
5409
5410 Pull changes from a remote repository to a local one.
5410 Pull changes from a remote repository to a local one.
5411
5411
5412 This finds all changes from the repository at the specified path
5412 This finds all changes from the repository at the specified path
5413 or URL and adds them to a local repository (the current one unless
5413 or URL and adds them to a local repository (the current one unless
5414 -R is specified). By default, this does not update the copy of the
5414 -R is specified). By default, this does not update the copy of the
5415 project in the working directory.
5415 project in the working directory.
5416
5416
5417 When cloning from servers that support it, Mercurial may fetch
5417 When cloning from servers that support it, Mercurial may fetch
5418 pre-generated data. When this is done, hooks operating on incoming
5418 pre-generated data. When this is done, hooks operating on incoming
5419 changesets and changegroups may fire more than once, once for each
5419 changesets and changegroups may fire more than once, once for each
5420 pre-generated bundle and as well as for any additional remaining
5420 pre-generated bundle and as well as for any additional remaining
5421 data. See :hg:`help -e clonebundles` for more.
5421 data. See :hg:`help -e clonebundles` for more.
5422
5422
5423 Use :hg:`incoming` if you want to see what would have been added
5423 Use :hg:`incoming` if you want to see what would have been added
5424 by a pull at the time you issued this command. If you then decide
5424 by a pull at the time you issued this command. If you then decide
5425 to add those changes to the repository, you should use :hg:`pull
5425 to add those changes to the repository, you should use :hg:`pull
5426 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5426 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5427
5427
5428 If SOURCE is omitted, the 'default' path will be used.
5428 If SOURCE is omitted, the 'default' path will be used.
5429 See :hg:`help urls` for more information.
5429 See :hg:`help urls` for more information.
5430
5430
5431 If multiple sources are specified, they will be pulled sequentially as if
5431 If multiple sources are specified, they will be pulled sequentially as if
5432 the command was run multiple time. If --update is specify and the command
5432 the command was run multiple time. If --update is specify and the command
5433 will stop at the first failed --update.
5433 will stop at the first failed --update.
5434
5434
5435 Specifying bookmark as ``.`` is equivalent to specifying the active
5435 Specifying bookmark as ``.`` is equivalent to specifying the active
5436 bookmark's name.
5436 bookmark's name.
5437
5437
5438 Returns 0 on success, 1 if an update had unresolved files.
5438 Returns 0 on success, 1 if an update had unresolved files.
5439 """
5439 """
5440
5440
5441 opts = pycompat.byteskwargs(opts)
5441 opts = pycompat.byteskwargs(opts)
5442 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5442 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5443 b'update'
5443 b'update'
5444 ):
5444 ):
5445 msg = _(b'update destination required by configuration')
5445 msg = _(b'update destination required by configuration')
5446 hint = _(b'use hg pull followed by hg update DEST')
5446 hint = _(b'use hg pull followed by hg update DEST')
5447 raise error.InputError(msg, hint=hint)
5447 raise error.InputError(msg, hint=hint)
5448
5448
5449 for path in urlutil.get_pull_paths(repo, ui, sources):
5449 for path in urlutil.get_pull_paths(repo, ui, sources):
5450 ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
5450 ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
5451 ui.flush()
5451 ui.flush()
5452 other = hg.peer(repo, opts, path)
5452 other = hg.peer(repo, opts, path)
5453 update_conflict = None
5453 update_conflict = None
5454 try:
5454 try:
5455 branches = (path.branch, opts.get(b'branch', []))
5455 branches = (path.branch, opts.get(b'branch', []))
5456 revs, checkout = hg.addbranchrevs(
5456 revs, checkout = hg.addbranchrevs(
5457 repo, other, branches, opts.get(b'rev')
5457 repo, other, branches, opts.get(b'rev')
5458 )
5458 )
5459
5459
5460 pullopargs = {}
5460 pullopargs = {}
5461
5461
5462 nodes = None
5462 nodes = None
5463 if opts.get(b'bookmark') or revs:
5463 if opts.get(b'bookmark') or revs:
5464 # The list of bookmark used here is the same used to actually update
5464 # The list of bookmark used here is the same used to actually update
5465 # the bookmark names, to avoid the race from issue 4689 and we do
5465 # the bookmark names, to avoid the race from issue 4689 and we do
5466 # all lookup and bookmark queries in one go so they see the same
5466 # all lookup and bookmark queries in one go so they see the same
5467 # version of the server state (issue 4700).
5467 # version of the server state (issue 4700).
5468 nodes = []
5468 nodes = []
5469 fnodes = []
5469 fnodes = []
5470 revs = revs or []
5470 revs = revs or []
5471 if revs and not other.capable(b'lookup'):
5471 if revs and not other.capable(b'lookup'):
5472 err = _(
5472 err = _(
5473 b"other repository doesn't support revision lookup, "
5473 b"other repository doesn't support revision lookup, "
5474 b"so a rev cannot be specified."
5474 b"so a rev cannot be specified."
5475 )
5475 )
5476 raise error.Abort(err)
5476 raise error.Abort(err)
5477 with other.commandexecutor() as e:
5477 with other.commandexecutor() as e:
5478 fremotebookmarks = e.callcommand(
5478 fremotebookmarks = e.callcommand(
5479 b'listkeys', {b'namespace': b'bookmarks'}
5479 b'listkeys', {b'namespace': b'bookmarks'}
5480 )
5480 )
5481 for r in revs:
5481 for r in revs:
5482 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5482 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5483 remotebookmarks = fremotebookmarks.result()
5483 remotebookmarks = fremotebookmarks.result()
5484 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5484 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5485 pullopargs[b'remotebookmarks'] = remotebookmarks
5485 pullopargs[b'remotebookmarks'] = remotebookmarks
5486 for b in opts.get(b'bookmark', []):
5486 for b in opts.get(b'bookmark', []):
5487 b = repo._bookmarks.expandname(b)
5487 b = repo._bookmarks.expandname(b)
5488 if b not in remotebookmarks:
5488 if b not in remotebookmarks:
5489 raise error.InputError(
5489 raise error.InputError(
5490 _(b'remote bookmark %s not found!') % b
5490 _(b'remote bookmark %s not found!') % b
5491 )
5491 )
5492 nodes.append(remotebookmarks[b])
5492 nodes.append(remotebookmarks[b])
5493 for i, rev in enumerate(revs):
5493 for i, rev in enumerate(revs):
5494 node = fnodes[i].result()
5494 node = fnodes[i].result()
5495 nodes.append(node)
5495 nodes.append(node)
5496 if rev == checkout:
5496 if rev == checkout:
5497 checkout = node
5497 checkout = node
5498
5498
5499 wlock = util.nullcontextmanager()
5499 wlock = util.nullcontextmanager()
5500 if opts.get(b'update'):
5500 if opts.get(b'update'):
5501 wlock = repo.wlock()
5501 wlock = repo.wlock()
5502 with wlock:
5502 with wlock:
5503 pullopargs.update(opts.get(b'opargs', {}))
5503 pullopargs.update(opts.get(b'opargs', {}))
5504 modheads = exchange.pull(
5504 modheads = exchange.pull(
5505 repo,
5505 repo,
5506 other,
5506 other,
5507 path=path,
5507 path=path,
5508 heads=nodes,
5508 heads=nodes,
5509 force=opts.get(b'force'),
5509 force=opts.get(b'force'),
5510 bookmarks=opts.get(b'bookmark', ()),
5510 bookmarks=opts.get(b'bookmark', ()),
5511 opargs=pullopargs,
5511 opargs=pullopargs,
5512 confirm=opts.get(b'confirm'),
5512 confirm=opts.get(b'confirm'),
5513 ).cgresult
5513 ).cgresult
5514
5514
5515 # brev is a name, which might be a bookmark to be activated at
5515 # brev is a name, which might be a bookmark to be activated at
5516 # the end of the update. In other words, it is an explicit
5516 # the end of the update. In other words, it is an explicit
5517 # destination of the update
5517 # destination of the update
5518 brev = None
5518 brev = None
5519
5519
5520 if checkout:
5520 if checkout:
5521 checkout = repo.unfiltered().changelog.rev(checkout)
5521 checkout = repo.unfiltered().changelog.rev(checkout)
5522
5522
5523 # order below depends on implementation of
5523 # order below depends on implementation of
5524 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5524 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5525 # because 'checkout' is determined without it.
5525 # because 'checkout' is determined without it.
5526 if opts.get(b'rev'):
5526 if opts.get(b'rev'):
5527 brev = opts[b'rev'][0]
5527 brev = opts[b'rev'][0]
5528 elif opts.get(b'branch'):
5528 elif opts.get(b'branch'):
5529 brev = opts[b'branch'][0]
5529 brev = opts[b'branch'][0]
5530 else:
5530 else:
5531 brev = path.branch
5531 brev = path.branch
5532
5532
5533 # XXX path: we are losing the `path` object here. Keeping it
5533 # XXX path: we are losing the `path` object here. Keeping it
5534 # would be valuable. For example as a "variant" as we do
5534 # would be valuable. For example as a "variant" as we do
5535 # for pushes.
5535 # for pushes.
5536 repo._subtoppath = path.loc
5536 repo._subtoppath = path.loc
5537 try:
5537 try:
5538 update_conflict = postincoming(
5538 update_conflict = postincoming(
5539 ui, repo, modheads, opts.get(b'update'), checkout, brev
5539 ui, repo, modheads, opts.get(b'update'), checkout, brev
5540 )
5540 )
5541 except error.FilteredRepoLookupError as exc:
5541 except error.FilteredRepoLookupError as exc:
5542 msg = _(b'cannot update to target: %s') % exc.args[0]
5542 msg = _(b'cannot update to target: %s') % exc.args[0]
5543 exc.args = (msg,) + exc.args[1:]
5543 exc.args = (msg,) + exc.args[1:]
5544 raise
5544 raise
5545 finally:
5545 finally:
5546 del repo._subtoppath
5546 del repo._subtoppath
5547
5547
5548 finally:
5548 finally:
5549 other.close()
5549 other.close()
5550 # skip the remaining pull source if they are some conflict.
5550 # skip the remaining pull source if they are some conflict.
5551 if update_conflict:
5551 if update_conflict:
5552 break
5552 break
5553 if update_conflict:
5553 if update_conflict:
5554 return 1
5554 return 1
5555 else:
5555 else:
5556 return 0
5556 return 0
5557
5557
5558
5558
5559 @command(
5559 @command(
5560 b'purge|clean',
5560 b'purge|clean',
5561 [
5561 [
5562 (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
5562 (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
5563 (b'', b'all', None, _(b'purge ignored files too')),
5563 (b'', b'all', None, _(b'purge ignored files too')),
5564 (b'i', b'ignored', None, _(b'purge only ignored files')),
5564 (b'i', b'ignored', None, _(b'purge only ignored files')),
5565 (b'', b'dirs', None, _(b'purge empty directories')),
5565 (b'', b'dirs', None, _(b'purge empty directories')),
5566 (b'', b'files', None, _(b'purge files')),
5566 (b'', b'files', None, _(b'purge files')),
5567 (b'p', b'print', None, _(b'print filenames instead of deleting them')),
5567 (b'p', b'print', None, _(b'print filenames instead of deleting them')),
5568 (
5568 (
5569 b'0',
5569 b'0',
5570 b'print0',
5570 b'print0',
5571 None,
5571 None,
5572 _(
5572 _(
5573 b'end filenames with NUL, for use with xargs'
5573 b'end filenames with NUL, for use with xargs'
5574 b' (implies -p/--print)'
5574 b' (implies -p/--print)'
5575 ),
5575 ),
5576 ),
5576 ),
5577 (b'', b'confirm', None, _(b'ask before permanently deleting files')),
5577 (b'', b'confirm', None, _(b'ask before permanently deleting files')),
5578 ]
5578 ]
5579 + cmdutil.walkopts,
5579 + cmdutil.walkopts,
5580 _(b'hg purge [OPTION]... [DIR]...'),
5580 _(b'hg purge [OPTION]... [DIR]...'),
5581 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5581 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5582 )
5582 )
5583 def purge(ui, repo, *dirs, **opts):
5583 def purge(ui, repo, *dirs, **opts):
5584 """removes files not tracked by Mercurial
5584 """removes files not tracked by Mercurial
5585
5585
5586 Delete files not known to Mercurial. This is useful to test local
5586 Delete files not known to Mercurial. This is useful to test local
5587 and uncommitted changes in an otherwise-clean source tree.
5587 and uncommitted changes in an otherwise-clean source tree.
5588
5588
5589 This means that purge will delete the following by default:
5589 This means that purge will delete the following by default:
5590
5590
5591 - Unknown files: files marked with "?" by :hg:`status`
5591 - Unknown files: files marked with "?" by :hg:`status`
5592 - Empty directories: in fact Mercurial ignores directories unless
5592 - Empty directories: in fact Mercurial ignores directories unless
5593 they contain files under source control management
5593 they contain files under source control management
5594
5594
5595 But it will leave untouched:
5595 But it will leave untouched:
5596
5596
5597 - Modified and unmodified tracked files
5597 - Modified and unmodified tracked files
5598 - Ignored files (unless -i or --all is specified)
5598 - Ignored files (unless -i or --all is specified)
5599 - New files added to the repository (with :hg:`add`)
5599 - New files added to the repository (with :hg:`add`)
5600
5600
5601 The --files and --dirs options can be used to direct purge to delete
5601 The --files and --dirs options can be used to direct purge to delete
5602 only files, only directories, or both. If neither option is given,
5602 only files, only directories, or both. If neither option is given,
5603 both will be deleted.
5603 both will be deleted.
5604
5604
5605 If directories are given on the command line, only files in these
5605 If directories are given on the command line, only files in these
5606 directories are considered.
5606 directories are considered.
5607
5607
5608 Be careful with purge, as you could irreversibly delete some files
5608 Be careful with purge, as you could irreversibly delete some files
5609 you forgot to add to the repository. If you only want to print the
5609 you forgot to add to the repository. If you only want to print the
5610 list of files that this program would delete, use the --print
5610 list of files that this program would delete, use the --print
5611 option.
5611 option.
5612 """
5612 """
5613 opts = pycompat.byteskwargs(opts)
5613 opts = pycompat.byteskwargs(opts)
5614 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
5614 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
5615
5615
5616 act = not opts.get(b'print')
5616 act = not opts.get(b'print')
5617 eol = b'\n'
5617 eol = b'\n'
5618 if opts.get(b'print0'):
5618 if opts.get(b'print0'):
5619 eol = b'\0'
5619 eol = b'\0'
5620 act = False # --print0 implies --print
5620 act = False # --print0 implies --print
5621 if opts.get(b'all', False):
5621 if opts.get(b'all', False):
5622 ignored = True
5622 ignored = True
5623 unknown = True
5623 unknown = True
5624 else:
5624 else:
5625 ignored = opts.get(b'ignored', False)
5625 ignored = opts.get(b'ignored', False)
5626 unknown = not ignored
5626 unknown = not ignored
5627
5627
5628 removefiles = opts.get(b'files')
5628 removefiles = opts.get(b'files')
5629 removedirs = opts.get(b'dirs')
5629 removedirs = opts.get(b'dirs')
5630 confirm = opts.get(b'confirm')
5630 confirm = opts.get(b'confirm')
5631 if confirm is None:
5631 if confirm is None:
5632 try:
5632 try:
5633 extensions.find(b'purge')
5633 extensions.find(b'purge')
5634 confirm = False
5634 confirm = False
5635 except KeyError:
5635 except KeyError:
5636 confirm = True
5636 confirm = True
5637
5637
5638 if not removefiles and not removedirs:
5638 if not removefiles and not removedirs:
5639 removefiles = True
5639 removefiles = True
5640 removedirs = True
5640 removedirs = True
5641
5641
5642 match = scmutil.match(repo[None], dirs, opts)
5642 match = scmutil.match(repo[None], dirs, opts)
5643
5643
5644 paths = mergemod.purge(
5644 paths = mergemod.purge(
5645 repo,
5645 repo,
5646 match,
5646 match,
5647 unknown=unknown,
5647 unknown=unknown,
5648 ignored=ignored,
5648 ignored=ignored,
5649 removeemptydirs=removedirs,
5649 removeemptydirs=removedirs,
5650 removefiles=removefiles,
5650 removefiles=removefiles,
5651 abortonerror=opts.get(b'abort_on_err'),
5651 abortonerror=opts.get(b'abort_on_err'),
5652 noop=not act,
5652 noop=not act,
5653 confirm=confirm,
5653 confirm=confirm,
5654 )
5654 )
5655
5655
5656 for path in paths:
5656 for path in paths:
5657 if not act:
5657 if not act:
5658 ui.write(b'%s%s' % (path, eol))
5658 ui.write(b'%s%s' % (path, eol))
5659
5659
5660
5660
5661 @command(
5661 @command(
5662 b'push',
5662 b'push',
5663 [
5663 [
5664 (b'f', b'force', None, _(b'force push')),
5664 (b'f', b'force', None, _(b'force push')),
5665 (
5665 (
5666 b'r',
5666 b'r',
5667 b'rev',
5667 b'rev',
5668 [],
5668 [],
5669 _(b'a changeset intended to be included in the destination'),
5669 _(b'a changeset intended to be included in the destination'),
5670 _(b'REV'),
5670 _(b'REV'),
5671 ),
5671 ),
5672 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5672 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5673 (b'', b'all-bookmarks', None, _(b"push all bookmarks (EXPERIMENTAL)")),
5673 (b'', b'all-bookmarks', None, _(b"push all bookmarks (EXPERIMENTAL)")),
5674 (
5674 (
5675 b'b',
5675 b'b',
5676 b'branch',
5676 b'branch',
5677 [],
5677 [],
5678 _(b'a specific branch you would like to push'),
5678 _(b'a specific branch you would like to push'),
5679 _(b'BRANCH'),
5679 _(b'BRANCH'),
5680 ),
5680 ),
5681 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5681 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5682 (
5682 (
5683 b'',
5683 b'',
5684 b'pushvars',
5684 b'pushvars',
5685 [],
5685 [],
5686 _(b'variables that can be sent to server (ADVANCED)'),
5686 _(b'variables that can be sent to server (ADVANCED)'),
5687 ),
5687 ),
5688 (
5688 (
5689 b'',
5689 b'',
5690 b'publish',
5690 b'publish',
5691 False,
5691 False,
5692 _(b'push the changeset as public (EXPERIMENTAL)'),
5692 _(b'push the changeset as public (EXPERIMENTAL)'),
5693 ),
5693 ),
5694 ]
5694 ]
5695 + remoteopts,
5695 + remoteopts,
5696 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'),
5696 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]...'),
5697 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5697 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5698 helpbasic=True,
5698 helpbasic=True,
5699 )
5699 )
5700 def push(ui, repo, *dests, **opts):
5700 def push(ui, repo, *dests, **opts):
5701 """push changes to the specified destination
5701 """push changes to the specified destination
5702
5702
5703 Push changesets from the local repository to the specified
5703 Push changesets from the local repository to the specified
5704 destination.
5704 destination.
5705
5705
5706 This operation is symmetrical to pull: it is identical to a pull
5706 This operation is symmetrical to pull: it is identical to a pull
5707 in the destination repository from the current one.
5707 in the destination repository from the current one.
5708
5708
5709 By default, push will not allow creation of new heads at the
5709 By default, push will not allow creation of new heads at the
5710 destination, since multiple heads would make it unclear which head
5710 destination, since multiple heads would make it unclear which head
5711 to use. In this situation, it is recommended to pull and merge
5711 to use. In this situation, it is recommended to pull and merge
5712 before pushing.
5712 before pushing.
5713
5713
5714 Use --new-branch if you want to allow push to create a new named
5714 Use --new-branch if you want to allow push to create a new named
5715 branch that is not present at the destination. This allows you to
5715 branch that is not present at the destination. This allows you to
5716 only create a new branch without forcing other changes.
5716 only create a new branch without forcing other changes.
5717
5717
5718 .. note::
5718 .. note::
5719
5719
5720 Extra care should be taken with the -f/--force option,
5720 Extra care should be taken with the -f/--force option,
5721 which will push all new heads on all branches, an action which will
5721 which will push all new heads on all branches, an action which will
5722 almost always cause confusion for collaborators.
5722 almost always cause confusion for collaborators.
5723
5723
5724 If -r/--rev is used, the specified revision and all its ancestors
5724 If -r/--rev is used, the specified revision and all its ancestors
5725 will be pushed to the remote repository.
5725 will be pushed to the remote repository.
5726
5726
5727 If -B/--bookmark is used, the specified bookmarked revision, its
5727 If -B/--bookmark is used, the specified bookmarked revision, its
5728 ancestors, and the bookmark will be pushed to the remote
5728 ancestors, and the bookmark will be pushed to the remote
5729 repository. Specifying ``.`` is equivalent to specifying the active
5729 repository. Specifying ``.`` is equivalent to specifying the active
5730 bookmark's name. Use the --all-bookmarks option for pushing all
5730 bookmark's name. Use the --all-bookmarks option for pushing all
5731 current bookmarks.
5731 current bookmarks.
5732
5732
5733 Please see :hg:`help urls` for important details about ``ssh://``
5733 Please see :hg:`help urls` for important details about ``ssh://``
5734 URLs. If DESTINATION is omitted, a default path will be used.
5734 URLs. If DESTINATION is omitted, a default path will be used.
5735
5735
5736 When passed multiple destinations, push will process them one after the
5736 When passed multiple destinations, push will process them one after the
5737 other, but stop should an error occur.
5737 other, but stop should an error occur.
5738
5738
5739 .. container:: verbose
5739 .. container:: verbose
5740
5740
5741 The --pushvars option sends strings to the server that become
5741 The --pushvars option sends strings to the server that become
5742 environment variables prepended with ``HG_USERVAR_``. For example,
5742 environment variables prepended with ``HG_USERVAR_``. For example,
5743 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5743 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5744 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5744 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5745
5745
5746 pushvars can provide for user-overridable hooks as well as set debug
5746 pushvars can provide for user-overridable hooks as well as set debug
5747 levels. One example is having a hook that blocks commits containing
5747 levels. One example is having a hook that blocks commits containing
5748 conflict markers, but enables the user to override the hook if the file
5748 conflict markers, but enables the user to override the hook if the file
5749 is using conflict markers for testing purposes or the file format has
5749 is using conflict markers for testing purposes or the file format has
5750 strings that look like conflict markers.
5750 strings that look like conflict markers.
5751
5751
5752 By default, servers will ignore `--pushvars`. To enable it add the
5752 By default, servers will ignore `--pushvars`. To enable it add the
5753 following to your configuration file::
5753 following to your configuration file::
5754
5754
5755 [push]
5755 [push]
5756 pushvars.server = true
5756 pushvars.server = true
5757
5757
5758 Returns 0 if push was successful, 1 if nothing to push.
5758 Returns 0 if push was successful, 1 if nothing to push.
5759 """
5759 """
5760
5760
5761 opts = pycompat.byteskwargs(opts)
5761 opts = pycompat.byteskwargs(opts)
5762
5762
5763 if opts.get(b'all_bookmarks'):
5763 if opts.get(b'all_bookmarks'):
5764 cmdutil.check_incompatible_arguments(
5764 cmdutil.check_incompatible_arguments(
5765 opts,
5765 opts,
5766 b'all_bookmarks',
5766 b'all_bookmarks',
5767 [b'bookmark', b'rev'],
5767 [b'bookmark', b'rev'],
5768 )
5768 )
5769 opts[b'bookmark'] = list(repo._bookmarks)
5769 opts[b'bookmark'] = list(repo._bookmarks)
5770
5770
5771 if opts.get(b'bookmark'):
5771 if opts.get(b'bookmark'):
5772 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5772 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5773 for b in opts[b'bookmark']:
5773 for b in opts[b'bookmark']:
5774 # translate -B options to -r so changesets get pushed
5774 # translate -B options to -r so changesets get pushed
5775 b = repo._bookmarks.expandname(b)
5775 b = repo._bookmarks.expandname(b)
5776 if b in repo._bookmarks:
5776 if b in repo._bookmarks:
5777 opts.setdefault(b'rev', []).append(b)
5777 opts.setdefault(b'rev', []).append(b)
5778 else:
5778 else:
5779 # if we try to push a deleted bookmark, translate it to null
5779 # if we try to push a deleted bookmark, translate it to null
5780 # this lets simultaneous -r, -b options continue working
5780 # this lets simultaneous -r, -b options continue working
5781 opts.setdefault(b'rev', []).append(b"null")
5781 opts.setdefault(b'rev', []).append(b"null")
5782
5782
5783 some_pushed = False
5783 some_pushed = False
5784 result = 0
5784 result = 0
5785 for path in urlutil.get_push_paths(repo, ui, dests):
5785 for path in urlutil.get_push_paths(repo, ui, dests):
5786 dest = path.loc
5786 dest = path.loc
5787 branches = (path.branch, opts.get(b'branch') or [])
5787 branches = (path.branch, opts.get(b'branch') or [])
5788 ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
5788 ui.status(_(b'pushing to %s\n') % urlutil.hidepassword(dest))
5789 revs, checkout = hg.addbranchrevs(
5789 revs, checkout = hg.addbranchrevs(
5790 repo, repo, branches, opts.get(b'rev')
5790 repo, repo, branches, opts.get(b'rev')
5791 )
5791 )
5792 other = hg.peer(repo, opts, dest)
5792 other = hg.peer(repo, opts, dest)
5793
5793
5794 try:
5794 try:
5795 if revs:
5795 if revs:
5796 revs = [repo[r].node() for r in logcmdutil.revrange(repo, revs)]
5796 revs = [repo[r].node() for r in logcmdutil.revrange(repo, revs)]
5797 if not revs:
5797 if not revs:
5798 raise error.InputError(
5798 raise error.InputError(
5799 _(b"specified revisions evaluate to an empty set"),
5799 _(b"specified revisions evaluate to an empty set"),
5800 hint=_(b"use different revision arguments"),
5800 hint=_(b"use different revision arguments"),
5801 )
5801 )
5802 elif path.pushrev:
5802 elif path.pushrev:
5803 # It doesn't make any sense to specify ancestor revisions. So limit
5803 # It doesn't make any sense to specify ancestor revisions. So limit
5804 # to DAG heads to make discovery simpler.
5804 # to DAG heads to make discovery simpler.
5805 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5805 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5806 revs = scmutil.revrange(repo, [expr])
5806 revs = scmutil.revrange(repo, [expr])
5807 revs = [repo[rev].node() for rev in revs]
5807 revs = [repo[rev].node() for rev in revs]
5808 if not revs:
5808 if not revs:
5809 raise error.InputError(
5809 raise error.InputError(
5810 _(
5810 _(
5811 b'default push revset for path evaluates to an empty set'
5811 b'default push revset for path evaluates to an empty set'
5812 )
5812 )
5813 )
5813 )
5814 elif ui.configbool(b'commands', b'push.require-revs'):
5814 elif ui.configbool(b'commands', b'push.require-revs'):
5815 raise error.InputError(
5815 raise error.InputError(
5816 _(b'no revisions specified to push'),
5816 _(b'no revisions specified to push'),
5817 hint=_(b'did you mean "hg push -r ."?'),
5817 hint=_(b'did you mean "hg push -r ."?'),
5818 )
5818 )
5819
5819
5820 repo._subtoppath = dest
5820 repo._subtoppath = dest
5821 try:
5821 try:
5822 # push subrepos depth-first for coherent ordering
5822 # push subrepos depth-first for coherent ordering
5823 c = repo[b'.']
5823 c = repo[b'.']
5824 subs = c.substate # only repos that are committed
5824 subs = c.substate # only repos that are committed
5825 for s in sorted(subs):
5825 for s in sorted(subs):
5826 sub_result = c.sub(s).push(opts)
5826 sub_result = c.sub(s).push(opts)
5827 if sub_result == 0:
5827 if sub_result == 0:
5828 return 1
5828 return 1
5829 finally:
5829 finally:
5830 del repo._subtoppath
5830 del repo._subtoppath
5831
5831
5832 opargs = dict(
5832 opargs = dict(
5833 opts.get(b'opargs', {})
5833 opts.get(b'opargs', {})
5834 ) # copy opargs since we may mutate it
5834 ) # copy opargs since we may mutate it
5835 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5835 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5836
5836
5837 pushop = exchange.push(
5837 pushop = exchange.push(
5838 repo,
5838 repo,
5839 other,
5839 other,
5840 opts.get(b'force'),
5840 opts.get(b'force'),
5841 revs=revs,
5841 revs=revs,
5842 newbranch=opts.get(b'new_branch'),
5842 newbranch=opts.get(b'new_branch'),
5843 bookmarks=opts.get(b'bookmark', ()),
5843 bookmarks=opts.get(b'bookmark', ()),
5844 publish=opts.get(b'publish'),
5844 publish=opts.get(b'publish'),
5845 opargs=opargs,
5845 opargs=opargs,
5846 )
5846 )
5847
5847
5848 if pushop.cgresult == 0:
5848 if pushop.cgresult == 0:
5849 result = 1
5849 result = 1
5850 elif pushop.cgresult is not None:
5850 elif pushop.cgresult is not None:
5851 some_pushed = True
5851 some_pushed = True
5852
5852
5853 if pushop.bkresult is not None:
5853 if pushop.bkresult is not None:
5854 if pushop.bkresult == 2:
5854 if pushop.bkresult == 2:
5855 result = 2
5855 result = 2
5856 elif not result and pushop.bkresult:
5856 elif not result and pushop.bkresult:
5857 result = 2
5857 result = 2
5858
5858
5859 if result:
5859 if result:
5860 break
5860 break
5861
5861
5862 finally:
5862 finally:
5863 other.close()
5863 other.close()
5864 if result == 0 and not some_pushed:
5864 if result == 0 and not some_pushed:
5865 result = 1
5865 result = 1
5866 return result
5866 return result
5867
5867
5868
5868
5869 @command(
5869 @command(
5870 b'recover',
5870 b'recover',
5871 [
5871 [
5872 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5872 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5873 ],
5873 ],
5874 helpcategory=command.CATEGORY_MAINTENANCE,
5874 helpcategory=command.CATEGORY_MAINTENANCE,
5875 )
5875 )
5876 def recover(ui, repo, **opts):
5876 def recover(ui, repo, **opts):
5877 """roll back an interrupted transaction
5877 """roll back an interrupted transaction
5878
5878
5879 Recover from an interrupted commit or pull.
5879 Recover from an interrupted commit or pull.
5880
5880
5881 This command tries to fix the repository status after an
5881 This command tries to fix the repository status after an
5882 interrupted operation. It should only be necessary when Mercurial
5882 interrupted operation. It should only be necessary when Mercurial
5883 suggests it.
5883 suggests it.
5884
5884
5885 Returns 0 if successful, 1 if nothing to recover or verify fails.
5885 Returns 0 if successful, 1 if nothing to recover or verify fails.
5886 """
5886 """
5887 ret = repo.recover()
5887 ret = repo.recover()
5888 if ret:
5888 if ret:
5889 if opts['verify']:
5889 if opts['verify']:
5890 return hg.verify(repo)
5890 return hg.verify(repo)
5891 else:
5891 else:
5892 msg = _(
5892 msg = _(
5893 b"(verify step skipped, run `hg verify` to check your "
5893 b"(verify step skipped, run `hg verify` to check your "
5894 b"repository content)\n"
5894 b"repository content)\n"
5895 )
5895 )
5896 ui.warn(msg)
5896 ui.warn(msg)
5897 return 0
5897 return 0
5898 return 1
5898 return 1
5899
5899
5900
5900
5901 @command(
5901 @command(
5902 b'remove|rm',
5902 b'remove|rm',
5903 [
5903 [
5904 (b'A', b'after', None, _(b'record delete for missing files')),
5904 (b'A', b'after', None, _(b'record delete for missing files')),
5905 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5905 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5906 ]
5906 ]
5907 + subrepoopts
5907 + subrepoopts
5908 + walkopts
5908 + walkopts
5909 + dryrunopts,
5909 + dryrunopts,
5910 _(b'[OPTION]... FILE...'),
5910 _(b'[OPTION]... FILE...'),
5911 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5911 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5912 helpbasic=True,
5912 helpbasic=True,
5913 inferrepo=True,
5913 inferrepo=True,
5914 )
5914 )
5915 def remove(ui, repo, *pats, **opts):
5915 def remove(ui, repo, *pats, **opts):
5916 """remove the specified files on the next commit
5916 """remove the specified files on the next commit
5917
5917
5918 Schedule the indicated files for removal from the current branch.
5918 Schedule the indicated files for removal from the current branch.
5919
5919
5920 This command schedules the files to be removed at the next commit.
5920 This command schedules the files to be removed at the next commit.
5921 To undo a remove before that, see :hg:`revert`. To undo added
5921 To undo a remove before that, see :hg:`revert`. To undo added
5922 files, see :hg:`forget`.
5922 files, see :hg:`forget`.
5923
5923
5924 .. container:: verbose
5924 .. container:: verbose
5925
5925
5926 -A/--after can be used to remove only files that have already
5926 -A/--after can be used to remove only files that have already
5927 been deleted, -f/--force can be used to force deletion, and -Af
5927 been deleted, -f/--force can be used to force deletion, and -Af
5928 can be used to remove files from the next revision without
5928 can be used to remove files from the next revision without
5929 deleting them from the working directory.
5929 deleting them from the working directory.
5930
5930
5931 The following table details the behavior of remove for different
5931 The following table details the behavior of remove for different
5932 file states (columns) and option combinations (rows). The file
5932 file states (columns) and option combinations (rows). The file
5933 states are Added [A], Clean [C], Modified [M] and Missing [!]
5933 states are Added [A], Clean [C], Modified [M] and Missing [!]
5934 (as reported by :hg:`status`). The actions are Warn, Remove
5934 (as reported by :hg:`status`). The actions are Warn, Remove
5935 (from branch) and Delete (from disk):
5935 (from branch) and Delete (from disk):
5936
5936
5937 ========= == == == ==
5937 ========= == == == ==
5938 opt/state A C M !
5938 opt/state A C M !
5939 ========= == == == ==
5939 ========= == == == ==
5940 none W RD W R
5940 none W RD W R
5941 -f R RD RD R
5941 -f R RD RD R
5942 -A W W W R
5942 -A W W W R
5943 -Af R R R R
5943 -Af R R R R
5944 ========= == == == ==
5944 ========= == == == ==
5945
5945
5946 .. note::
5946 .. note::
5947
5947
5948 :hg:`remove` never deletes files in Added [A] state from the
5948 :hg:`remove` never deletes files in Added [A] state from the
5949 working directory, not even if ``--force`` is specified.
5949 working directory, not even if ``--force`` is specified.
5950
5950
5951 Returns 0 on success, 1 if any warnings encountered.
5951 Returns 0 on success, 1 if any warnings encountered.
5952 """
5952 """
5953
5953
5954 opts = pycompat.byteskwargs(opts)
5954 opts = pycompat.byteskwargs(opts)
5955 after, force = opts.get(b'after'), opts.get(b'force')
5955 after, force = opts.get(b'after'), opts.get(b'force')
5956 dryrun = opts.get(b'dry_run')
5956 dryrun = opts.get(b'dry_run')
5957 if not pats and not after:
5957 if not pats and not after:
5958 raise error.InputError(_(b'no files specified'))
5958 raise error.InputError(_(b'no files specified'))
5959
5959
5960 with repo.wlock(), repo.dirstate.changing_files(repo):
5960 with repo.wlock(), repo.dirstate.changing_files(repo):
5961 m = scmutil.match(repo[None], pats, opts)
5961 m = scmutil.match(repo[None], pats, opts)
5962 subrepos = opts.get(b'subrepos')
5962 subrepos = opts.get(b'subrepos')
5963 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5963 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5964 return cmdutil.remove(
5964 return cmdutil.remove(
5965 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5965 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5966 )
5966 )
5967
5967
5968
5968
5969 @command(
5969 @command(
5970 b'rename|move|mv',
5970 b'rename|move|mv',
5971 [
5971 [
5972 (b'', b'forget', None, _(b'unmark a destination file as renamed')),
5972 (b'', b'forget', None, _(b'unmark a destination file as renamed')),
5973 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5973 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5974 (
5974 (
5975 b'',
5975 b'',
5976 b'at-rev',
5976 b'at-rev',
5977 b'',
5977 b'',
5978 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5978 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5979 _(b'REV'),
5979 _(b'REV'),
5980 ),
5980 ),
5981 (
5981 (
5982 b'f',
5982 b'f',
5983 b'force',
5983 b'force',
5984 None,
5984 None,
5985 _(b'forcibly move over an existing managed file'),
5985 _(b'forcibly move over an existing managed file'),
5986 ),
5986 ),
5987 ]
5987 ]
5988 + walkopts
5988 + walkopts
5989 + dryrunopts,
5989 + dryrunopts,
5990 _(b'[OPTION]... SOURCE... DEST'),
5990 _(b'[OPTION]... SOURCE... DEST'),
5991 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5991 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5992 )
5992 )
5993 def rename(ui, repo, *pats, **opts):
5993 def rename(ui, repo, *pats, **opts):
5994 """rename files; equivalent of copy + remove
5994 """rename files; equivalent of copy + remove
5995
5995
5996 Mark dest as copies of sources; mark sources for deletion. If dest
5996 Mark dest as copies of sources; mark sources for deletion. If dest
5997 is a directory, copies are put in that directory. If dest is a
5997 is a directory, copies are put in that directory. If dest is a
5998 file, there can only be one source.
5998 file, there can only be one source.
5999
5999
6000 By default, this command copies the contents of files as they
6000 By default, this command copies the contents of files as they
6001 exist in the working directory. If invoked with -A/--after, the
6001 exist in the working directory. If invoked with -A/--after, the
6002 operation is recorded, but no copying is performed.
6002 operation is recorded, but no copying is performed.
6003
6003
6004 To undo marking a destination file as renamed, use --forget. With that
6004 To undo marking a destination file as renamed, use --forget. With that
6005 option, all given (positional) arguments are unmarked as renames. The
6005 option, all given (positional) arguments are unmarked as renames. The
6006 destination file(s) will be left in place (still tracked). The source
6006 destination file(s) will be left in place (still tracked). The source
6007 file(s) will not be restored. Note that :hg:`rename --forget` behaves
6007 file(s) will not be restored. Note that :hg:`rename --forget` behaves
6008 the same way as :hg:`copy --forget`.
6008 the same way as :hg:`copy --forget`.
6009
6009
6010 This command takes effect with the next commit by default.
6010 This command takes effect with the next commit by default.
6011
6011
6012 Returns 0 on success, 1 if errors are encountered.
6012 Returns 0 on success, 1 if errors are encountered.
6013 """
6013 """
6014 opts = pycompat.byteskwargs(opts)
6014 opts = pycompat.byteskwargs(opts)
6015 context = repo.dirstate.changing_files
6015 context = lambda repo: repo.dirstate.changing_files(repo)
6016 rev = opts.get(b'at_rev')
6016 rev = opts.get(b'at_rev')
6017 ctx = None
6017 ctx = None
6018 if rev:
6018 if rev:
6019 ctx = logcmdutil.revsingle(repo, rev)
6019 ctx = logcmdutil.revsingle(repo, rev)
6020 if ctx.rev() is not None:
6020 if ctx.rev() is not None:
6021
6021
6022 def context(repo):
6022 def context(repo):
6023 return util.nullcontextmanager()
6023 return util.nullcontextmanager()
6024
6024
6025 opts[b'at_rev'] = ctx.rev()
6025 opts[b'at_rev'] = ctx.rev()
6026 with repo.wlock(), context(repo):
6026 with repo.wlock(), context(repo):
6027 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6027 return cmdutil.copy(ui, repo, pats, opts, rename=True)
6028
6028
6029
6029
6030 @command(
6030 @command(
6031 b'resolve',
6031 b'resolve',
6032 [
6032 [
6033 (b'a', b'all', None, _(b'select all unresolved files')),
6033 (b'a', b'all', None, _(b'select all unresolved files')),
6034 (b'l', b'list', None, _(b'list state of files needing merge')),
6034 (b'l', b'list', None, _(b'list state of files needing merge')),
6035 (b'm', b'mark', None, _(b'mark files as resolved')),
6035 (b'm', b'mark', None, _(b'mark files as resolved')),
6036 (b'u', b'unmark', None, _(b'mark files as unresolved')),
6036 (b'u', b'unmark', None, _(b'mark files as unresolved')),
6037 (b'n', b'no-status', None, _(b'hide status prefix')),
6037 (b'n', b'no-status', None, _(b'hide status prefix')),
6038 (b'', b're-merge', None, _(b're-merge files')),
6038 (b'', b're-merge', None, _(b're-merge files')),
6039 ]
6039 ]
6040 + mergetoolopts
6040 + mergetoolopts
6041 + walkopts
6041 + walkopts
6042 + formatteropts,
6042 + formatteropts,
6043 _(b'[OPTION]... [FILE]...'),
6043 _(b'[OPTION]... [FILE]...'),
6044 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6044 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6045 inferrepo=True,
6045 inferrepo=True,
6046 )
6046 )
6047 def resolve(ui, repo, *pats, **opts):
6047 def resolve(ui, repo, *pats, **opts):
6048 """redo merges or set/view the merge status of files
6048 """redo merges or set/view the merge status of files
6049
6049
6050 Merges with unresolved conflicts are often the result of
6050 Merges with unresolved conflicts are often the result of
6051 non-interactive merging using the ``internal:merge`` configuration
6051 non-interactive merging using the ``internal:merge`` configuration
6052 setting, or a command-line merge tool like ``diff3``. The resolve
6052 setting, or a command-line merge tool like ``diff3``. The resolve
6053 command is used to manage the files involved in a merge, after
6053 command is used to manage the files involved in a merge, after
6054 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6054 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
6055 working directory must have two parents). See :hg:`help
6055 working directory must have two parents). See :hg:`help
6056 merge-tools` for information on configuring merge tools.
6056 merge-tools` for information on configuring merge tools.
6057
6057
6058 The resolve command can be used in the following ways:
6058 The resolve command can be used in the following ways:
6059
6059
6060 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
6060 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
6061 the specified files, discarding any previous merge attempts. Re-merging
6061 the specified files, discarding any previous merge attempts. Re-merging
6062 is not performed for files already marked as resolved. Use ``--all/-a``
6062 is not performed for files already marked as resolved. Use ``--all/-a``
6063 to select all unresolved files. ``--tool`` can be used to specify
6063 to select all unresolved files. ``--tool`` can be used to specify
6064 the merge tool used for the given files. It overrides the HGMERGE
6064 the merge tool used for the given files. It overrides the HGMERGE
6065 environment variable and your configuration files. Previous file
6065 environment variable and your configuration files. Previous file
6066 contents are saved with a ``.orig`` suffix.
6066 contents are saved with a ``.orig`` suffix.
6067
6067
6068 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6068 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
6069 (e.g. after having manually fixed-up the files). The default is
6069 (e.g. after having manually fixed-up the files). The default is
6070 to mark all unresolved files.
6070 to mark all unresolved files.
6071
6071
6072 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6072 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
6073 default is to mark all resolved files.
6073 default is to mark all resolved files.
6074
6074
6075 - :hg:`resolve -l`: list files which had or still have conflicts.
6075 - :hg:`resolve -l`: list files which had or still have conflicts.
6076 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6076 In the printed list, ``U`` = unresolved and ``R`` = resolved.
6077 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
6077 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
6078 the list. See :hg:`help filesets` for details.
6078 the list. See :hg:`help filesets` for details.
6079
6079
6080 .. note::
6080 .. note::
6081
6081
6082 Mercurial will not let you commit files with unresolved merge
6082 Mercurial will not let you commit files with unresolved merge
6083 conflicts. You must use :hg:`resolve -m ...` before you can
6083 conflicts. You must use :hg:`resolve -m ...` before you can
6084 commit after a conflicting merge.
6084 commit after a conflicting merge.
6085
6085
6086 .. container:: verbose
6086 .. container:: verbose
6087
6087
6088 Template:
6088 Template:
6089
6089
6090 The following keywords are supported in addition to the common template
6090 The following keywords are supported in addition to the common template
6091 keywords and functions. See also :hg:`help templates`.
6091 keywords and functions. See also :hg:`help templates`.
6092
6092
6093 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
6093 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
6094 :path: String. Repository-absolute path of the file.
6094 :path: String. Repository-absolute path of the file.
6095
6095
6096 Returns 0 on success, 1 if any files fail a resolve attempt.
6096 Returns 0 on success, 1 if any files fail a resolve attempt.
6097 """
6097 """
6098
6098
6099 opts = pycompat.byteskwargs(opts)
6099 opts = pycompat.byteskwargs(opts)
6100 confirm = ui.configbool(b'commands', b'resolve.confirm')
6100 confirm = ui.configbool(b'commands', b'resolve.confirm')
6101 flaglist = b'all mark unmark list no_status re_merge'.split()
6101 flaglist = b'all mark unmark list no_status re_merge'.split()
6102 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
6102 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
6103
6103
6104 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
6104 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
6105 if actioncount > 1:
6105 if actioncount > 1:
6106 raise error.InputError(_(b"too many actions specified"))
6106 raise error.InputError(_(b"too many actions specified"))
6107 elif actioncount == 0 and ui.configbool(
6107 elif actioncount == 0 and ui.configbool(
6108 b'commands', b'resolve.explicit-re-merge'
6108 b'commands', b'resolve.explicit-re-merge'
6109 ):
6109 ):
6110 hint = _(b'use --mark, --unmark, --list or --re-merge')
6110 hint = _(b'use --mark, --unmark, --list or --re-merge')
6111 raise error.InputError(_(b'no action specified'), hint=hint)
6111 raise error.InputError(_(b'no action specified'), hint=hint)
6112 if pats and all:
6112 if pats and all:
6113 raise error.InputError(_(b"can't specify --all and patterns"))
6113 raise error.InputError(_(b"can't specify --all and patterns"))
6114 if not (all or pats or show or mark or unmark):
6114 if not (all or pats or show or mark or unmark):
6115 raise error.InputError(
6115 raise error.InputError(
6116 _(b'no files or directories specified'),
6116 _(b'no files or directories specified'),
6117 hint=b'use --all to re-merge all unresolved files',
6117 hint=b'use --all to re-merge all unresolved files',
6118 )
6118 )
6119
6119
6120 if confirm:
6120 if confirm:
6121 if all:
6121 if all:
6122 if ui.promptchoice(
6122 if ui.promptchoice(
6123 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
6123 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
6124 ):
6124 ):
6125 raise error.CanceledError(_(b'user quit'))
6125 raise error.CanceledError(_(b'user quit'))
6126 if mark and not pats:
6126 if mark and not pats:
6127 if ui.promptchoice(
6127 if ui.promptchoice(
6128 _(
6128 _(
6129 b'mark all unresolved files as resolved (yn)?'
6129 b'mark all unresolved files as resolved (yn)?'
6130 b'$$ &Yes $$ &No'
6130 b'$$ &Yes $$ &No'
6131 )
6131 )
6132 ):
6132 ):
6133 raise error.CanceledError(_(b'user quit'))
6133 raise error.CanceledError(_(b'user quit'))
6134 if unmark and not pats:
6134 if unmark and not pats:
6135 if ui.promptchoice(
6135 if ui.promptchoice(
6136 _(
6136 _(
6137 b'mark all resolved files as unresolved (yn)?'
6137 b'mark all resolved files as unresolved (yn)?'
6138 b'$$ &Yes $$ &No'
6138 b'$$ &Yes $$ &No'
6139 )
6139 )
6140 ):
6140 ):
6141 raise error.CanceledError(_(b'user quit'))
6141 raise error.CanceledError(_(b'user quit'))
6142
6142
6143 uipathfn = scmutil.getuipathfn(repo)
6143 uipathfn = scmutil.getuipathfn(repo)
6144
6144
6145 if show:
6145 if show:
6146 ui.pager(b'resolve')
6146 ui.pager(b'resolve')
6147 fm = ui.formatter(b'resolve', opts)
6147 fm = ui.formatter(b'resolve', opts)
6148 ms = mergestatemod.mergestate.read(repo)
6148 ms = mergestatemod.mergestate.read(repo)
6149 wctx = repo[None]
6149 wctx = repo[None]
6150 m = scmutil.match(wctx, pats, opts)
6150 m = scmutil.match(wctx, pats, opts)
6151
6151
6152 # Labels and keys based on merge state. Unresolved path conflicts show
6152 # Labels and keys based on merge state. Unresolved path conflicts show
6153 # as 'P'. Resolved path conflicts show as 'R', the same as normal
6153 # as 'P'. Resolved path conflicts show as 'R', the same as normal
6154 # resolved conflicts.
6154 # resolved conflicts.
6155 mergestateinfo = {
6155 mergestateinfo = {
6156 mergestatemod.MERGE_RECORD_UNRESOLVED: (
6156 mergestatemod.MERGE_RECORD_UNRESOLVED: (
6157 b'resolve.unresolved',
6157 b'resolve.unresolved',
6158 b'U',
6158 b'U',
6159 ),
6159 ),
6160 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
6160 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
6161 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
6161 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
6162 b'resolve.unresolved',
6162 b'resolve.unresolved',
6163 b'P',
6163 b'P',
6164 ),
6164 ),
6165 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
6165 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
6166 b'resolve.resolved',
6166 b'resolve.resolved',
6167 b'R',
6167 b'R',
6168 ),
6168 ),
6169 }
6169 }
6170
6170
6171 for f in ms:
6171 for f in ms:
6172 if not m(f):
6172 if not m(f):
6173 continue
6173 continue
6174
6174
6175 label, key = mergestateinfo[ms[f]]
6175 label, key = mergestateinfo[ms[f]]
6176 fm.startitem()
6176 fm.startitem()
6177 fm.context(ctx=wctx)
6177 fm.context(ctx=wctx)
6178 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
6178 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
6179 fm.data(path=f)
6179 fm.data(path=f)
6180 fm.plain(b'%s\n' % uipathfn(f), label=label)
6180 fm.plain(b'%s\n' % uipathfn(f), label=label)
6181 fm.end()
6181 fm.end()
6182 return 0
6182 return 0
6183
6183
6184 with repo.wlock():
6184 with repo.wlock():
6185 ms = mergestatemod.mergestate.read(repo)
6185 ms = mergestatemod.mergestate.read(repo)
6186
6186
6187 if not (ms.active() or repo.dirstate.p2() != repo.nullid):
6187 if not (ms.active() or repo.dirstate.p2() != repo.nullid):
6188 raise error.StateError(
6188 raise error.StateError(
6189 _(b'resolve command not applicable when not merging')
6189 _(b'resolve command not applicable when not merging')
6190 )
6190 )
6191
6191
6192 wctx = repo[None]
6192 wctx = repo[None]
6193 m = scmutil.match(wctx, pats, opts)
6193 m = scmutil.match(wctx, pats, opts)
6194 ret = 0
6194 ret = 0
6195 didwork = False
6195 didwork = False
6196
6196
6197 hasconflictmarkers = []
6197 hasconflictmarkers = []
6198 if mark:
6198 if mark:
6199 markcheck = ui.config(b'commands', b'resolve.mark-check')
6199 markcheck = ui.config(b'commands', b'resolve.mark-check')
6200 if markcheck not in [b'warn', b'abort']:
6200 if markcheck not in [b'warn', b'abort']:
6201 # Treat all invalid / unrecognized values as 'none'.
6201 # Treat all invalid / unrecognized values as 'none'.
6202 markcheck = False
6202 markcheck = False
6203 for f in ms:
6203 for f in ms:
6204 if not m(f):
6204 if not m(f):
6205 continue
6205 continue
6206
6206
6207 didwork = True
6207 didwork = True
6208
6208
6209 # path conflicts must be resolved manually
6209 # path conflicts must be resolved manually
6210 if ms[f] in (
6210 if ms[f] in (
6211 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
6211 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
6212 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
6212 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
6213 ):
6213 ):
6214 if mark:
6214 if mark:
6215 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
6215 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
6216 elif unmark:
6216 elif unmark:
6217 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
6217 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
6218 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
6218 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
6219 ui.warn(
6219 ui.warn(
6220 _(b'%s: path conflict must be resolved manually\n')
6220 _(b'%s: path conflict must be resolved manually\n')
6221 % uipathfn(f)
6221 % uipathfn(f)
6222 )
6222 )
6223 continue
6223 continue
6224
6224
6225 if mark:
6225 if mark:
6226 if markcheck:
6226 if markcheck:
6227 fdata = repo.wvfs.tryread(f)
6227 fdata = repo.wvfs.tryread(f)
6228 if (
6228 if (
6229 filemerge.hasconflictmarkers(fdata)
6229 filemerge.hasconflictmarkers(fdata)
6230 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
6230 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
6231 ):
6231 ):
6232 hasconflictmarkers.append(f)
6232 hasconflictmarkers.append(f)
6233 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
6233 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
6234 elif unmark:
6234 elif unmark:
6235 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
6235 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
6236 else:
6236 else:
6237 # backup pre-resolve (merge uses .orig for its own purposes)
6237 # backup pre-resolve (merge uses .orig for its own purposes)
6238 a = repo.wjoin(f)
6238 a = repo.wjoin(f)
6239 try:
6239 try:
6240 util.copyfile(a, a + b".resolve")
6240 util.copyfile(a, a + b".resolve")
6241 except FileNotFoundError:
6241 except FileNotFoundError:
6242 pass
6242 pass
6243
6243
6244 try:
6244 try:
6245 # preresolve file
6245 # preresolve file
6246 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
6246 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
6247 with ui.configoverride(overrides, b'resolve'):
6247 with ui.configoverride(overrides, b'resolve'):
6248 r = ms.resolve(f, wctx)
6248 r = ms.resolve(f, wctx)
6249 if r:
6249 if r:
6250 ret = 1
6250 ret = 1
6251 finally:
6251 finally:
6252 ms.commit()
6252 ms.commit()
6253
6253
6254 # replace filemerge's .orig file with our resolve file
6254 # replace filemerge's .orig file with our resolve file
6255 try:
6255 try:
6256 util.rename(
6256 util.rename(
6257 a + b".resolve", scmutil.backuppath(ui, repo, f)
6257 a + b".resolve", scmutil.backuppath(ui, repo, f)
6258 )
6258 )
6259 except FileNotFoundError:
6259 except FileNotFoundError:
6260 pass
6260 pass
6261
6261
6262 if hasconflictmarkers:
6262 if hasconflictmarkers:
6263 ui.warn(
6263 ui.warn(
6264 _(
6264 _(
6265 b'warning: the following files still have conflict '
6265 b'warning: the following files still have conflict '
6266 b'markers:\n'
6266 b'markers:\n'
6267 )
6267 )
6268 + b''.join(
6268 + b''.join(
6269 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
6269 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
6270 )
6270 )
6271 )
6271 )
6272 if markcheck == b'abort' and not all and not pats:
6272 if markcheck == b'abort' and not all and not pats:
6273 raise error.StateError(
6273 raise error.StateError(
6274 _(b'conflict markers detected'),
6274 _(b'conflict markers detected'),
6275 hint=_(b'use --all to mark anyway'),
6275 hint=_(b'use --all to mark anyway'),
6276 )
6276 )
6277
6277
6278 ms.commit()
6278 ms.commit()
6279 branchmerge = repo.dirstate.p2() != repo.nullid
6279 branchmerge = repo.dirstate.p2() != repo.nullid
6280 # resolve is not doing a parent change here, however, `record updates`
6280 # resolve is not doing a parent change here, however, `record updates`
6281 # will call some dirstate API that at intended for parent changes call.
6281 # will call some dirstate API that at intended for parent changes call.
6282 # Ideally we would not need this and could implement a lighter version
6282 # Ideally we would not need this and could implement a lighter version
6283 # of the recordupdateslogic that will not have to deal with the part
6283 # of the recordupdateslogic that will not have to deal with the part
6284 # related to parent changes. However this would requires that:
6284 # related to parent changes. However this would requires that:
6285 # - we are sure we passed around enough information at update/merge
6285 # - we are sure we passed around enough information at update/merge
6286 # time to no longer needs it at `hg resolve time`
6286 # time to no longer needs it at `hg resolve time`
6287 # - we are sure we store that information well enough to be able to reuse it
6287 # - we are sure we store that information well enough to be able to reuse it
6288 # - we are the necessary logic to reuse it right.
6288 # - we are the necessary logic to reuse it right.
6289 #
6289 #
6290 # All this should eventually happens, but in the mean time, we use this
6290 # All this should eventually happens, but in the mean time, we use this
6291 # context manager slightly out of the context it should be.
6291 # context manager slightly out of the context it should be.
6292 with repo.dirstate.changing_parents(repo):
6292 with repo.dirstate.changing_parents(repo):
6293 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
6293 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
6294
6294
6295 if not didwork and pats:
6295 if not didwork and pats:
6296 hint = None
6296 hint = None
6297 if not any([p for p in pats if p.find(b':') >= 0]):
6297 if not any([p for p in pats if p.find(b':') >= 0]):
6298 pats = [b'path:%s' % p for p in pats]
6298 pats = [b'path:%s' % p for p in pats]
6299 m = scmutil.match(wctx, pats, opts)
6299 m = scmutil.match(wctx, pats, opts)
6300 for f in ms:
6300 for f in ms:
6301 if not m(f):
6301 if not m(f):
6302 continue
6302 continue
6303
6303
6304 def flag(o):
6304 def flag(o):
6305 if o == b're_merge':
6305 if o == b're_merge':
6306 return b'--re-merge '
6306 return b'--re-merge '
6307 return b'-%s ' % o[0:1]
6307 return b'-%s ' % o[0:1]
6308
6308
6309 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
6309 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
6310 hint = _(b"(try: hg resolve %s%s)\n") % (
6310 hint = _(b"(try: hg resolve %s%s)\n") % (
6311 flags,
6311 flags,
6312 b' '.join(pats),
6312 b' '.join(pats),
6313 )
6313 )
6314 break
6314 break
6315 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6315 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6316 if hint:
6316 if hint:
6317 ui.warn(hint)
6317 ui.warn(hint)
6318
6318
6319 unresolvedf = ms.unresolvedcount()
6319 unresolvedf = ms.unresolvedcount()
6320 if not unresolvedf:
6320 if not unresolvedf:
6321 ui.status(_(b'(no more unresolved files)\n'))
6321 ui.status(_(b'(no more unresolved files)\n'))
6322 cmdutil.checkafterresolved(repo)
6322 cmdutil.checkafterresolved(repo)
6323
6323
6324 return ret
6324 return ret
6325
6325
6326
6326
6327 @command(
6327 @command(
6328 b'revert',
6328 b'revert',
6329 [
6329 [
6330 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6330 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6331 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6331 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6332 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6332 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6333 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6333 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6334 (b'i', b'interactive', None, _(b'interactively select the changes')),
6334 (b'i', b'interactive', None, _(b'interactively select the changes')),
6335 ]
6335 ]
6336 + walkopts
6336 + walkopts
6337 + dryrunopts,
6337 + dryrunopts,
6338 _(b'[OPTION]... [-r REV] [NAME]...'),
6338 _(b'[OPTION]... [-r REV] [NAME]...'),
6339 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6339 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6340 )
6340 )
6341 def revert(ui, repo, *pats, **opts):
6341 def revert(ui, repo, *pats, **opts):
6342 """restore files to their checkout state
6342 """restore files to their checkout state
6343
6343
6344 .. note::
6344 .. note::
6345
6345
6346 To check out earlier revisions, you should use :hg:`update REV`.
6346 To check out earlier revisions, you should use :hg:`update REV`.
6347 To cancel an uncommitted merge (and lose your changes),
6347 To cancel an uncommitted merge (and lose your changes),
6348 use :hg:`merge --abort`.
6348 use :hg:`merge --abort`.
6349
6349
6350 With no revision specified, revert the specified files or directories
6350 With no revision specified, revert the specified files or directories
6351 to the contents they had in the parent of the working directory.
6351 to the contents they had in the parent of the working directory.
6352 This restores the contents of files to an unmodified
6352 This restores the contents of files to an unmodified
6353 state and unschedules adds, removes, copies, and renames. If the
6353 state and unschedules adds, removes, copies, and renames. If the
6354 working directory has two parents, you must explicitly specify a
6354 working directory has two parents, you must explicitly specify a
6355 revision.
6355 revision.
6356
6356
6357 Using the -r/--rev or -d/--date options, revert the given files or
6357 Using the -r/--rev or -d/--date options, revert the given files or
6358 directories to their states as of a specific revision. Because
6358 directories to their states as of a specific revision. Because
6359 revert does not change the working directory parents, this will
6359 revert does not change the working directory parents, this will
6360 cause these files to appear modified. This can be helpful to "back
6360 cause these files to appear modified. This can be helpful to "back
6361 out" some or all of an earlier change. See :hg:`backout` for a
6361 out" some or all of an earlier change. See :hg:`backout` for a
6362 related method.
6362 related method.
6363
6363
6364 Modified files are saved with a .orig suffix before reverting.
6364 Modified files are saved with a .orig suffix before reverting.
6365 To disable these backups, use --no-backup. It is possible to store
6365 To disable these backups, use --no-backup. It is possible to store
6366 the backup files in a custom directory relative to the root of the
6366 the backup files in a custom directory relative to the root of the
6367 repository by setting the ``ui.origbackuppath`` configuration
6367 repository by setting the ``ui.origbackuppath`` configuration
6368 option.
6368 option.
6369
6369
6370 See :hg:`help dates` for a list of formats valid for -d/--date.
6370 See :hg:`help dates` for a list of formats valid for -d/--date.
6371
6371
6372 See :hg:`help backout` for a way to reverse the effect of an
6372 See :hg:`help backout` for a way to reverse the effect of an
6373 earlier changeset.
6373 earlier changeset.
6374
6374
6375 Returns 0 on success.
6375 Returns 0 on success.
6376 """
6376 """
6377
6377
6378 opts = pycompat.byteskwargs(opts)
6378 opts = pycompat.byteskwargs(opts)
6379 if opts.get(b"date"):
6379 if opts.get(b"date"):
6380 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6380 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6381 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6381 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6382
6382
6383 parent, p2 = repo.dirstate.parents()
6383 parent, p2 = repo.dirstate.parents()
6384 if not opts.get(b'rev') and p2 != repo.nullid:
6384 if not opts.get(b'rev') and p2 != repo.nullid:
6385 # revert after merge is a trap for new users (issue2915)
6385 # revert after merge is a trap for new users (issue2915)
6386 raise error.InputError(
6386 raise error.InputError(
6387 _(b'uncommitted merge with no revision specified'),
6387 _(b'uncommitted merge with no revision specified'),
6388 hint=_(b"use 'hg update' or see 'hg help revert'"),
6388 hint=_(b"use 'hg update' or see 'hg help revert'"),
6389 )
6389 )
6390
6390
6391 rev = opts.get(b'rev')
6391 rev = opts.get(b'rev')
6392 if rev:
6392 if rev:
6393 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6393 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6394 ctx = logcmdutil.revsingle(repo, rev)
6394 ctx = logcmdutil.revsingle(repo, rev)
6395
6395
6396 if not (
6396 if not (
6397 pats
6397 pats
6398 or opts.get(b'include')
6398 or opts.get(b'include')
6399 or opts.get(b'exclude')
6399 or opts.get(b'exclude')
6400 or opts.get(b'all')
6400 or opts.get(b'all')
6401 or opts.get(b'interactive')
6401 or opts.get(b'interactive')
6402 ):
6402 ):
6403 msg = _(b"no files or directories specified")
6403 msg = _(b"no files or directories specified")
6404 if p2 != repo.nullid:
6404 if p2 != repo.nullid:
6405 hint = _(
6405 hint = _(
6406 b"uncommitted merge, use --all to discard all changes,"
6406 b"uncommitted merge, use --all to discard all changes,"
6407 b" or 'hg update -C .' to abort the merge"
6407 b" or 'hg update -C .' to abort the merge"
6408 )
6408 )
6409 raise error.InputError(msg, hint=hint)
6409 raise error.InputError(msg, hint=hint)
6410 dirty = any(repo.status())
6410 dirty = any(repo.status())
6411 node = ctx.node()
6411 node = ctx.node()
6412 if node != parent:
6412 if node != parent:
6413 if dirty:
6413 if dirty:
6414 hint = (
6414 hint = (
6415 _(
6415 _(
6416 b"uncommitted changes, use --all to discard all"
6416 b"uncommitted changes, use --all to discard all"
6417 b" changes, or 'hg update %d' to update"
6417 b" changes, or 'hg update %d' to update"
6418 )
6418 )
6419 % ctx.rev()
6419 % ctx.rev()
6420 )
6420 )
6421 else:
6421 else:
6422 hint = (
6422 hint = (
6423 _(
6423 _(
6424 b"use --all to revert all files,"
6424 b"use --all to revert all files,"
6425 b" or 'hg update %d' to update"
6425 b" or 'hg update %d' to update"
6426 )
6426 )
6427 % ctx.rev()
6427 % ctx.rev()
6428 )
6428 )
6429 elif dirty:
6429 elif dirty:
6430 hint = _(b"uncommitted changes, use --all to discard all changes")
6430 hint = _(b"uncommitted changes, use --all to discard all changes")
6431 else:
6431 else:
6432 hint = _(b"use --all to revert all files")
6432 hint = _(b"use --all to revert all files")
6433 raise error.InputError(msg, hint=hint)
6433 raise error.InputError(msg, hint=hint)
6434
6434
6435 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6435 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6436
6436
6437
6437
6438 @command(
6438 @command(
6439 b'rollback',
6439 b'rollback',
6440 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6440 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6441 helpcategory=command.CATEGORY_MAINTENANCE,
6441 helpcategory=command.CATEGORY_MAINTENANCE,
6442 )
6442 )
6443 def rollback(ui, repo, **opts):
6443 def rollback(ui, repo, **opts):
6444 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6444 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6445
6445
6446 Please use :hg:`commit --amend` instead of rollback to correct
6446 Please use :hg:`commit --amend` instead of rollback to correct
6447 mistakes in the last commit.
6447 mistakes in the last commit.
6448
6448
6449 This command should be used with care. There is only one level of
6449 This command should be used with care. There is only one level of
6450 rollback, and there is no way to undo a rollback. It will also
6450 rollback, and there is no way to undo a rollback. It will also
6451 restore the dirstate at the time of the last transaction, losing
6451 restore the dirstate at the time of the last transaction, losing
6452 any dirstate changes since that time. This command does not alter
6452 any dirstate changes since that time. This command does not alter
6453 the working directory.
6453 the working directory.
6454
6454
6455 Transactions are used to encapsulate the effects of all commands
6455 Transactions are used to encapsulate the effects of all commands
6456 that create new changesets or propagate existing changesets into a
6456 that create new changesets or propagate existing changesets into a
6457 repository.
6457 repository.
6458
6458
6459 .. container:: verbose
6459 .. container:: verbose
6460
6460
6461 For example, the following commands are transactional, and their
6461 For example, the following commands are transactional, and their
6462 effects can be rolled back:
6462 effects can be rolled back:
6463
6463
6464 - commit
6464 - commit
6465 - import
6465 - import
6466 - pull
6466 - pull
6467 - push (with this repository as the destination)
6467 - push (with this repository as the destination)
6468 - unbundle
6468 - unbundle
6469
6469
6470 To avoid permanent data loss, rollback will refuse to rollback a
6470 To avoid permanent data loss, rollback will refuse to rollback a
6471 commit transaction if it isn't checked out. Use --force to
6471 commit transaction if it isn't checked out. Use --force to
6472 override this protection.
6472 override this protection.
6473
6473
6474 The rollback command can be entirely disabled by setting the
6474 The rollback command can be entirely disabled by setting the
6475 ``ui.rollback`` configuration setting to false. If you're here
6475 ``ui.rollback`` configuration setting to false. If you're here
6476 because you want to use rollback and it's disabled, you can
6476 because you want to use rollback and it's disabled, you can
6477 re-enable the command by setting ``ui.rollback`` to true.
6477 re-enable the command by setting ``ui.rollback`` to true.
6478
6478
6479 This command is not intended for use on public repositories. Once
6479 This command is not intended for use on public repositories. Once
6480 changes are visible for pull by other users, rolling a transaction
6480 changes are visible for pull by other users, rolling a transaction
6481 back locally is ineffective (someone else may already have pulled
6481 back locally is ineffective (someone else may already have pulled
6482 the changes). Furthermore, a race is possible with readers of the
6482 the changes). Furthermore, a race is possible with readers of the
6483 repository; for example an in-progress pull from the repository
6483 repository; for example an in-progress pull from the repository
6484 may fail if a rollback is performed.
6484 may fail if a rollback is performed.
6485
6485
6486 Returns 0 on success, 1 if no rollback data is available.
6486 Returns 0 on success, 1 if no rollback data is available.
6487 """
6487 """
6488 if not ui.configbool(b'ui', b'rollback'):
6488 if not ui.configbool(b'ui', b'rollback'):
6489 raise error.Abort(
6489 raise error.Abort(
6490 _(b'rollback is disabled because it is unsafe'),
6490 _(b'rollback is disabled because it is unsafe'),
6491 hint=b'see `hg help -v rollback` for information',
6491 hint=b'see `hg help -v rollback` for information',
6492 )
6492 )
6493 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6493 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6494
6494
6495
6495
6496 @command(
6496 @command(
6497 b'root',
6497 b'root',
6498 [] + formatteropts,
6498 [] + formatteropts,
6499 intents={INTENT_READONLY},
6499 intents={INTENT_READONLY},
6500 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6500 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6501 )
6501 )
6502 def root(ui, repo, **opts):
6502 def root(ui, repo, **opts):
6503 """print the root (top) of the current working directory
6503 """print the root (top) of the current working directory
6504
6504
6505 Print the root directory of the current repository.
6505 Print the root directory of the current repository.
6506
6506
6507 .. container:: verbose
6507 .. container:: verbose
6508
6508
6509 Template:
6509 Template:
6510
6510
6511 The following keywords are supported in addition to the common template
6511 The following keywords are supported in addition to the common template
6512 keywords and functions. See also :hg:`help templates`.
6512 keywords and functions. See also :hg:`help templates`.
6513
6513
6514 :hgpath: String. Path to the .hg directory.
6514 :hgpath: String. Path to the .hg directory.
6515 :storepath: String. Path to the directory holding versioned data.
6515 :storepath: String. Path to the directory holding versioned data.
6516
6516
6517 Returns 0 on success.
6517 Returns 0 on success.
6518 """
6518 """
6519 opts = pycompat.byteskwargs(opts)
6519 opts = pycompat.byteskwargs(opts)
6520 with ui.formatter(b'root', opts) as fm:
6520 with ui.formatter(b'root', opts) as fm:
6521 fm.startitem()
6521 fm.startitem()
6522 fm.write(b'reporoot', b'%s\n', repo.root)
6522 fm.write(b'reporoot', b'%s\n', repo.root)
6523 fm.data(hgpath=repo.path, storepath=repo.spath)
6523 fm.data(hgpath=repo.path, storepath=repo.spath)
6524
6524
6525
6525
6526 @command(
6526 @command(
6527 b'serve',
6527 b'serve',
6528 [
6528 [
6529 (
6529 (
6530 b'A',
6530 b'A',
6531 b'accesslog',
6531 b'accesslog',
6532 b'',
6532 b'',
6533 _(b'name of access log file to write to'),
6533 _(b'name of access log file to write to'),
6534 _(b'FILE'),
6534 _(b'FILE'),
6535 ),
6535 ),
6536 (b'd', b'daemon', None, _(b'run server in background')),
6536 (b'd', b'daemon', None, _(b'run server in background')),
6537 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6537 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6538 (
6538 (
6539 b'E',
6539 b'E',
6540 b'errorlog',
6540 b'errorlog',
6541 b'',
6541 b'',
6542 _(b'name of error log file to write to'),
6542 _(b'name of error log file to write to'),
6543 _(b'FILE'),
6543 _(b'FILE'),
6544 ),
6544 ),
6545 # use string type, then we can check if something was passed
6545 # use string type, then we can check if something was passed
6546 (
6546 (
6547 b'p',
6547 b'p',
6548 b'port',
6548 b'port',
6549 b'',
6549 b'',
6550 _(b'port to listen on (default: 8000)'),
6550 _(b'port to listen on (default: 8000)'),
6551 _(b'PORT'),
6551 _(b'PORT'),
6552 ),
6552 ),
6553 (
6553 (
6554 b'a',
6554 b'a',
6555 b'address',
6555 b'address',
6556 b'',
6556 b'',
6557 _(b'address to listen on (default: all interfaces)'),
6557 _(b'address to listen on (default: all interfaces)'),
6558 _(b'ADDR'),
6558 _(b'ADDR'),
6559 ),
6559 ),
6560 (
6560 (
6561 b'',
6561 b'',
6562 b'prefix',
6562 b'prefix',
6563 b'',
6563 b'',
6564 _(b'prefix path to serve from (default: server root)'),
6564 _(b'prefix path to serve from (default: server root)'),
6565 _(b'PREFIX'),
6565 _(b'PREFIX'),
6566 ),
6566 ),
6567 (
6567 (
6568 b'n',
6568 b'n',
6569 b'name',
6569 b'name',
6570 b'',
6570 b'',
6571 _(b'name to show in web pages (default: working directory)'),
6571 _(b'name to show in web pages (default: working directory)'),
6572 _(b'NAME'),
6572 _(b'NAME'),
6573 ),
6573 ),
6574 (
6574 (
6575 b'',
6575 b'',
6576 b'web-conf',
6576 b'web-conf',
6577 b'',
6577 b'',
6578 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6578 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6579 _(b'FILE'),
6579 _(b'FILE'),
6580 ),
6580 ),
6581 (
6581 (
6582 b'',
6582 b'',
6583 b'webdir-conf',
6583 b'webdir-conf',
6584 b'',
6584 b'',
6585 _(b'name of the hgweb config file (DEPRECATED)'),
6585 _(b'name of the hgweb config file (DEPRECATED)'),
6586 _(b'FILE'),
6586 _(b'FILE'),
6587 ),
6587 ),
6588 (
6588 (
6589 b'',
6589 b'',
6590 b'pid-file',
6590 b'pid-file',
6591 b'',
6591 b'',
6592 _(b'name of file to write process ID to'),
6592 _(b'name of file to write process ID to'),
6593 _(b'FILE'),
6593 _(b'FILE'),
6594 ),
6594 ),
6595 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6595 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6596 (
6596 (
6597 b'',
6597 b'',
6598 b'cmdserver',
6598 b'cmdserver',
6599 b'',
6599 b'',
6600 _(b'for remote clients (ADVANCED)'),
6600 _(b'for remote clients (ADVANCED)'),
6601 _(b'MODE'),
6601 _(b'MODE'),
6602 ),
6602 ),
6603 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6603 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6604 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6604 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6605 (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
6605 (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
6606 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6606 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6607 (b'', b'print-url', None, _(b'start and print only the URL')),
6607 (b'', b'print-url', None, _(b'start and print only the URL')),
6608 ]
6608 ]
6609 + subrepoopts,
6609 + subrepoopts,
6610 _(b'[OPTION]...'),
6610 _(b'[OPTION]...'),
6611 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6611 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6612 helpbasic=True,
6612 helpbasic=True,
6613 optionalrepo=True,
6613 optionalrepo=True,
6614 )
6614 )
6615 def serve(ui, repo, **opts):
6615 def serve(ui, repo, **opts):
6616 """start stand-alone webserver
6616 """start stand-alone webserver
6617
6617
6618 Start a local HTTP repository browser and pull server. You can use
6618 Start a local HTTP repository browser and pull server. You can use
6619 this for ad-hoc sharing and browsing of repositories. It is
6619 this for ad-hoc sharing and browsing of repositories. It is
6620 recommended to use a real web server to serve a repository for
6620 recommended to use a real web server to serve a repository for
6621 longer periods of time.
6621 longer periods of time.
6622
6622
6623 Please note that the server does not implement access control.
6623 Please note that the server does not implement access control.
6624 This means that, by default, anybody can read from the server and
6624 This means that, by default, anybody can read from the server and
6625 nobody can write to it by default. Set the ``web.allow-push``
6625 nobody can write to it by default. Set the ``web.allow-push``
6626 option to ``*`` to allow everybody to push to the server. You
6626 option to ``*`` to allow everybody to push to the server. You
6627 should use a real web server if you need to authenticate users.
6627 should use a real web server if you need to authenticate users.
6628
6628
6629 By default, the server logs accesses to stdout and errors to
6629 By default, the server logs accesses to stdout and errors to
6630 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6630 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6631 files.
6631 files.
6632
6632
6633 To have the server choose a free port number to listen on, specify
6633 To have the server choose a free port number to listen on, specify
6634 a port number of 0; in this case, the server will print the port
6634 a port number of 0; in this case, the server will print the port
6635 number it uses.
6635 number it uses.
6636
6636
6637 Returns 0 on success.
6637 Returns 0 on success.
6638 """
6638 """
6639
6639
6640 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6640 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6641 opts = pycompat.byteskwargs(opts)
6641 opts = pycompat.byteskwargs(opts)
6642 if opts[b"print_url"] and ui.verbose:
6642 if opts[b"print_url"] and ui.verbose:
6643 raise error.InputError(_(b"cannot use --print-url with --verbose"))
6643 raise error.InputError(_(b"cannot use --print-url with --verbose"))
6644
6644
6645 if opts[b"stdio"]:
6645 if opts[b"stdio"]:
6646 if repo is None:
6646 if repo is None:
6647 raise error.RepoError(
6647 raise error.RepoError(
6648 _(b"there is no Mercurial repository here (.hg not found)")
6648 _(b"there is no Mercurial repository here (.hg not found)")
6649 )
6649 )
6650 s = wireprotoserver.sshserver(ui, repo)
6650 s = wireprotoserver.sshserver(ui, repo)
6651 s.serve_forever()
6651 s.serve_forever()
6652 return
6652 return
6653
6653
6654 service = server.createservice(ui, repo, opts)
6654 service = server.createservice(ui, repo, opts)
6655 return server.runservice(opts, initfn=service.init, runfn=service.run)
6655 return server.runservice(opts, initfn=service.init, runfn=service.run)
6656
6656
6657
6657
6658 @command(
6658 @command(
6659 b'shelve',
6659 b'shelve',
6660 [
6660 [
6661 (
6661 (
6662 b'A',
6662 b'A',
6663 b'addremove',
6663 b'addremove',
6664 None,
6664 None,
6665 _(b'mark new/missing files as added/removed before shelving'),
6665 _(b'mark new/missing files as added/removed before shelving'),
6666 ),
6666 ),
6667 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6667 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6668 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6668 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6669 (
6669 (
6670 b'',
6670 b'',
6671 b'date',
6671 b'date',
6672 b'',
6672 b'',
6673 _(b'shelve with the specified commit date'),
6673 _(b'shelve with the specified commit date'),
6674 _(b'DATE'),
6674 _(b'DATE'),
6675 ),
6675 ),
6676 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6676 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6677 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6677 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6678 (
6678 (
6679 b'k',
6679 b'k',
6680 b'keep',
6680 b'keep',
6681 False,
6681 False,
6682 _(b'shelve, but keep changes in the working directory'),
6682 _(b'shelve, but keep changes in the working directory'),
6683 ),
6683 ),
6684 (b'l', b'list', None, _(b'list current shelves')),
6684 (b'l', b'list', None, _(b'list current shelves')),
6685 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6685 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6686 (
6686 (
6687 b'n',
6687 b'n',
6688 b'name',
6688 b'name',
6689 b'',
6689 b'',
6690 _(b'use the given name for the shelved commit'),
6690 _(b'use the given name for the shelved commit'),
6691 _(b'NAME'),
6691 _(b'NAME'),
6692 ),
6692 ),
6693 (
6693 (
6694 b'p',
6694 b'p',
6695 b'patch',
6695 b'patch',
6696 None,
6696 None,
6697 _(
6697 _(
6698 b'output patches for changes (provide the names of the shelved '
6698 b'output patches for changes (provide the names of the shelved '
6699 b'changes as positional arguments)'
6699 b'changes as positional arguments)'
6700 ),
6700 ),
6701 ),
6701 ),
6702 (b'i', b'interactive', None, _(b'interactive mode')),
6702 (b'i', b'interactive', None, _(b'interactive mode')),
6703 (
6703 (
6704 b'',
6704 b'',
6705 b'stat',
6705 b'stat',
6706 None,
6706 None,
6707 _(
6707 _(
6708 b'output diffstat-style summary of changes (provide the names of '
6708 b'output diffstat-style summary of changes (provide the names of '
6709 b'the shelved changes as positional arguments)'
6709 b'the shelved changes as positional arguments)'
6710 ),
6710 ),
6711 ),
6711 ),
6712 ]
6712 ]
6713 + cmdutil.walkopts,
6713 + cmdutil.walkopts,
6714 _(b'hg shelve [OPTION]... [FILE]...'),
6714 _(b'hg shelve [OPTION]... [FILE]...'),
6715 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6715 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6716 )
6716 )
6717 def shelve(ui, repo, *pats, **opts):
6717 def shelve(ui, repo, *pats, **opts):
6718 """save and set aside changes from the working directory
6718 """save and set aside changes from the working directory
6719
6719
6720 Shelving takes files that "hg status" reports as not clean, saves
6720 Shelving takes files that "hg status" reports as not clean, saves
6721 the modifications to a bundle (a shelved change), and reverts the
6721 the modifications to a bundle (a shelved change), and reverts the
6722 files so that their state in the working directory becomes clean.
6722 files so that their state in the working directory becomes clean.
6723
6723
6724 To restore these changes to the working directory, using "hg
6724 To restore these changes to the working directory, using "hg
6725 unshelve"; this will work even if you switch to a different
6725 unshelve"; this will work even if you switch to a different
6726 commit.
6726 commit.
6727
6727
6728 When no files are specified, "hg shelve" saves all not-clean
6728 When no files are specified, "hg shelve" saves all not-clean
6729 files. If specific files or directories are named, only changes to
6729 files. If specific files or directories are named, only changes to
6730 those files are shelved.
6730 those files are shelved.
6731
6731
6732 In bare shelve (when no files are specified, without interactive,
6732 In bare shelve (when no files are specified, without interactive,
6733 include and exclude option), shelving remembers information if the
6733 include and exclude option), shelving remembers information if the
6734 working directory was on newly created branch, in other words working
6734 working directory was on newly created branch, in other words working
6735 directory was on different branch than its first parent. In this
6735 directory was on different branch than its first parent. In this
6736 situation unshelving restores branch information to the working directory.
6736 situation unshelving restores branch information to the working directory.
6737
6737
6738 Each shelved change has a name that makes it easier to find later.
6738 Each shelved change has a name that makes it easier to find later.
6739 The name of a shelved change defaults to being based on the active
6739 The name of a shelved change defaults to being based on the active
6740 bookmark, or if there is no active bookmark, the current named
6740 bookmark, or if there is no active bookmark, the current named
6741 branch. To specify a different name, use ``--name``.
6741 branch. To specify a different name, use ``--name``.
6742
6742
6743 To see a list of existing shelved changes, use the ``--list``
6743 To see a list of existing shelved changes, use the ``--list``
6744 option. For each shelved change, this will print its name, age,
6744 option. For each shelved change, this will print its name, age,
6745 and description; use ``--patch`` or ``--stat`` for more details.
6745 and description; use ``--patch`` or ``--stat`` for more details.
6746
6746
6747 To delete specific shelved changes, use ``--delete``. To delete
6747 To delete specific shelved changes, use ``--delete``. To delete
6748 all shelved changes, use ``--cleanup``.
6748 all shelved changes, use ``--cleanup``.
6749 """
6749 """
6750 opts = pycompat.byteskwargs(opts)
6750 opts = pycompat.byteskwargs(opts)
6751 allowables = [
6751 allowables = [
6752 (b'addremove', {b'create'}), # 'create' is pseudo action
6752 (b'addremove', {b'create'}), # 'create' is pseudo action
6753 (b'unknown', {b'create'}),
6753 (b'unknown', {b'create'}),
6754 (b'cleanup', {b'cleanup'}),
6754 (b'cleanup', {b'cleanup'}),
6755 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6755 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6756 (b'delete', {b'delete'}),
6756 (b'delete', {b'delete'}),
6757 (b'edit', {b'create'}),
6757 (b'edit', {b'create'}),
6758 (b'keep', {b'create'}),
6758 (b'keep', {b'create'}),
6759 (b'list', {b'list'}),
6759 (b'list', {b'list'}),
6760 (b'message', {b'create'}),
6760 (b'message', {b'create'}),
6761 (b'name', {b'create'}),
6761 (b'name', {b'create'}),
6762 (b'patch', {b'patch', b'list'}),
6762 (b'patch', {b'patch', b'list'}),
6763 (b'stat', {b'stat', b'list'}),
6763 (b'stat', {b'stat', b'list'}),
6764 ]
6764 ]
6765
6765
6766 def checkopt(opt):
6766 def checkopt(opt):
6767 if opts.get(opt):
6767 if opts.get(opt):
6768 for i, allowable in allowables:
6768 for i, allowable in allowables:
6769 if opts[i] and opt not in allowable:
6769 if opts[i] and opt not in allowable:
6770 raise error.InputError(
6770 raise error.InputError(
6771 _(
6771 _(
6772 b"options '--%s' and '--%s' may not be "
6772 b"options '--%s' and '--%s' may not be "
6773 b"used together"
6773 b"used together"
6774 )
6774 )
6775 % (opt, i)
6775 % (opt, i)
6776 )
6776 )
6777 return True
6777 return True
6778
6778
6779 if checkopt(b'cleanup'):
6779 if checkopt(b'cleanup'):
6780 if pats:
6780 if pats:
6781 raise error.InputError(
6781 raise error.InputError(
6782 _(b"cannot specify names when using '--cleanup'")
6782 _(b"cannot specify names when using '--cleanup'")
6783 )
6783 )
6784 return shelvemod.cleanupcmd(ui, repo)
6784 return shelvemod.cleanupcmd(ui, repo)
6785 elif checkopt(b'delete'):
6785 elif checkopt(b'delete'):
6786 return shelvemod.deletecmd(ui, repo, pats)
6786 return shelvemod.deletecmd(ui, repo, pats)
6787 elif checkopt(b'list'):
6787 elif checkopt(b'list'):
6788 return shelvemod.listcmd(ui, repo, pats, opts)
6788 return shelvemod.listcmd(ui, repo, pats, opts)
6789 elif checkopt(b'patch') or checkopt(b'stat'):
6789 elif checkopt(b'patch') or checkopt(b'stat'):
6790 return shelvemod.patchcmds(ui, repo, pats, opts)
6790 return shelvemod.patchcmds(ui, repo, pats, opts)
6791 else:
6791 else:
6792 return shelvemod.createcmd(ui, repo, pats, opts)
6792 return shelvemod.createcmd(ui, repo, pats, opts)
6793
6793
6794
6794
6795 _NOTTERSE = b'nothing'
6795 _NOTTERSE = b'nothing'
6796
6796
6797
6797
6798 @command(
6798 @command(
6799 b'status|st',
6799 b'status|st',
6800 [
6800 [
6801 (b'A', b'all', None, _(b'show status of all files')),
6801 (b'A', b'all', None, _(b'show status of all files')),
6802 (b'm', b'modified', None, _(b'show only modified files')),
6802 (b'm', b'modified', None, _(b'show only modified files')),
6803 (b'a', b'added', None, _(b'show only added files')),
6803 (b'a', b'added', None, _(b'show only added files')),
6804 (b'r', b'removed', None, _(b'show only removed files')),
6804 (b'r', b'removed', None, _(b'show only removed files')),
6805 (b'd', b'deleted', None, _(b'show only missing files')),
6805 (b'd', b'deleted', None, _(b'show only missing files')),
6806 (b'c', b'clean', None, _(b'show only files without changes')),
6806 (b'c', b'clean', None, _(b'show only files without changes')),
6807 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6807 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6808 (b'i', b'ignored', None, _(b'show only ignored files')),
6808 (b'i', b'ignored', None, _(b'show only ignored files')),
6809 (b'n', b'no-status', None, _(b'hide status prefix')),
6809 (b'n', b'no-status', None, _(b'hide status prefix')),
6810 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6810 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6811 (
6811 (
6812 b'C',
6812 b'C',
6813 b'copies',
6813 b'copies',
6814 None,
6814 None,
6815 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6815 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6816 ),
6816 ),
6817 (
6817 (
6818 b'0',
6818 b'0',
6819 b'print0',
6819 b'print0',
6820 None,
6820 None,
6821 _(b'end filenames with NUL, for use with xargs'),
6821 _(b'end filenames with NUL, for use with xargs'),
6822 ),
6822 ),
6823 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6823 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6824 (
6824 (
6825 b'',
6825 b'',
6826 b'change',
6826 b'change',
6827 b'',
6827 b'',
6828 _(b'list the changed files of a revision'),
6828 _(b'list the changed files of a revision'),
6829 _(b'REV'),
6829 _(b'REV'),
6830 ),
6830 ),
6831 ]
6831 ]
6832 + walkopts
6832 + walkopts
6833 + subrepoopts
6833 + subrepoopts
6834 + formatteropts,
6834 + formatteropts,
6835 _(b'[OPTION]... [FILE]...'),
6835 _(b'[OPTION]... [FILE]...'),
6836 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6836 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6837 helpbasic=True,
6837 helpbasic=True,
6838 inferrepo=True,
6838 inferrepo=True,
6839 intents={INTENT_READONLY},
6839 intents={INTENT_READONLY},
6840 )
6840 )
6841 def status(ui, repo, *pats, **opts):
6841 def status(ui, repo, *pats, **opts):
6842 """show changed files in the working directory
6842 """show changed files in the working directory
6843
6843
6844 Show status of files in the repository. If names are given, only
6844 Show status of files in the repository. If names are given, only
6845 files that match are shown. Files that are clean or ignored or
6845 files that match are shown. Files that are clean or ignored or
6846 the source of a copy/move operation, are not listed unless
6846 the source of a copy/move operation, are not listed unless
6847 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6847 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6848 Unless options described with "show only ..." are given, the
6848 Unless options described with "show only ..." are given, the
6849 options -mardu are used.
6849 options -mardu are used.
6850
6850
6851 Option -q/--quiet hides untracked (unknown and ignored) files
6851 Option -q/--quiet hides untracked (unknown and ignored) files
6852 unless explicitly requested with -u/--unknown or -i/--ignored.
6852 unless explicitly requested with -u/--unknown or -i/--ignored.
6853
6853
6854 .. note::
6854 .. note::
6855
6855
6856 :hg:`status` may appear to disagree with diff if permissions have
6856 :hg:`status` may appear to disagree with diff if permissions have
6857 changed or a merge has occurred. The standard diff format does
6857 changed or a merge has occurred. The standard diff format does
6858 not report permission changes and diff only reports changes
6858 not report permission changes and diff only reports changes
6859 relative to one merge parent.
6859 relative to one merge parent.
6860
6860
6861 If one revision is given, it is used as the base revision.
6861 If one revision is given, it is used as the base revision.
6862 If two revisions are given, the differences between them are
6862 If two revisions are given, the differences between them are
6863 shown. The --change option can also be used as a shortcut to list
6863 shown. The --change option can also be used as a shortcut to list
6864 the changed files of a revision from its first parent.
6864 the changed files of a revision from its first parent.
6865
6865
6866 The codes used to show the status of files are::
6866 The codes used to show the status of files are::
6867
6867
6868 M = modified
6868 M = modified
6869 A = added
6869 A = added
6870 R = removed
6870 R = removed
6871 C = clean
6871 C = clean
6872 ! = missing (deleted by non-hg command, but still tracked)
6872 ! = missing (deleted by non-hg command, but still tracked)
6873 ? = not tracked
6873 ? = not tracked
6874 I = ignored
6874 I = ignored
6875 = origin of the previous file (with --copies)
6875 = origin of the previous file (with --copies)
6876
6876
6877 .. container:: verbose
6877 .. container:: verbose
6878
6878
6879 The -t/--terse option abbreviates the output by showing only the directory
6879 The -t/--terse option abbreviates the output by showing only the directory
6880 name if all the files in it share the same status. The option takes an
6880 name if all the files in it share the same status. The option takes an
6881 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6881 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6882 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6882 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6883 for 'ignored' and 'c' for clean.
6883 for 'ignored' and 'c' for clean.
6884
6884
6885 It abbreviates only those statuses which are passed. Note that clean and
6885 It abbreviates only those statuses which are passed. Note that clean and
6886 ignored files are not displayed with '--terse ic' unless the -c/--clean
6886 ignored files are not displayed with '--terse ic' unless the -c/--clean
6887 and -i/--ignored options are also used.
6887 and -i/--ignored options are also used.
6888
6888
6889 The -v/--verbose option shows information when the repository is in an
6889 The -v/--verbose option shows information when the repository is in an
6890 unfinished merge, shelve, rebase state etc. You can have this behavior
6890 unfinished merge, shelve, rebase state etc. You can have this behavior
6891 turned on by default by enabling the ``commands.status.verbose`` option.
6891 turned on by default by enabling the ``commands.status.verbose`` option.
6892
6892
6893 You can skip displaying some of these states by setting
6893 You can skip displaying some of these states by setting
6894 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6894 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6895 'histedit', 'merge', 'rebase', or 'unshelve'.
6895 'histedit', 'merge', 'rebase', or 'unshelve'.
6896
6896
6897 Template:
6897 Template:
6898
6898
6899 The following keywords are supported in addition to the common template
6899 The following keywords are supported in addition to the common template
6900 keywords and functions. See also :hg:`help templates`.
6900 keywords and functions. See also :hg:`help templates`.
6901
6901
6902 :path: String. Repository-absolute path of the file.
6902 :path: String. Repository-absolute path of the file.
6903 :source: String. Repository-absolute path of the file originated from.
6903 :source: String. Repository-absolute path of the file originated from.
6904 Available if ``--copies`` is specified.
6904 Available if ``--copies`` is specified.
6905 :status: String. Character denoting file's status.
6905 :status: String. Character denoting file's status.
6906
6906
6907 Examples:
6907 Examples:
6908
6908
6909 - show changes in the working directory relative to a
6909 - show changes in the working directory relative to a
6910 changeset::
6910 changeset::
6911
6911
6912 hg status --rev 9353
6912 hg status --rev 9353
6913
6913
6914 - show changes in the working directory relative to the
6914 - show changes in the working directory relative to the
6915 current directory (see :hg:`help patterns` for more information)::
6915 current directory (see :hg:`help patterns` for more information)::
6916
6916
6917 hg status re:
6917 hg status re:
6918
6918
6919 - show all changes including copies in an existing changeset::
6919 - show all changes including copies in an existing changeset::
6920
6920
6921 hg status --copies --change 9353
6921 hg status --copies --change 9353
6922
6922
6923 - get a NUL separated list of added files, suitable for xargs::
6923 - get a NUL separated list of added files, suitable for xargs::
6924
6924
6925 hg status -an0
6925 hg status -an0
6926
6926
6927 - show more information about the repository status, abbreviating
6927 - show more information about the repository status, abbreviating
6928 added, removed, modified, deleted, and untracked paths::
6928 added, removed, modified, deleted, and untracked paths::
6929
6929
6930 hg status -v -t mardu
6930 hg status -v -t mardu
6931
6931
6932 Returns 0 on success.
6932 Returns 0 on success.
6933
6933
6934 """
6934 """
6935
6935
6936 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6936 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6937 opts = pycompat.byteskwargs(opts)
6937 opts = pycompat.byteskwargs(opts)
6938 revs = opts.get(b'rev', [])
6938 revs = opts.get(b'rev', [])
6939 change = opts.get(b'change', b'')
6939 change = opts.get(b'change', b'')
6940 terse = opts.get(b'terse', _NOTTERSE)
6940 terse = opts.get(b'terse', _NOTTERSE)
6941 if terse is _NOTTERSE:
6941 if terse is _NOTTERSE:
6942 if revs:
6942 if revs:
6943 terse = b''
6943 terse = b''
6944 else:
6944 else:
6945 terse = ui.config(b'commands', b'status.terse')
6945 terse = ui.config(b'commands', b'status.terse')
6946
6946
6947 if revs and terse:
6947 if revs and terse:
6948 msg = _(b'cannot use --terse with --rev')
6948 msg = _(b'cannot use --terse with --rev')
6949 raise error.InputError(msg)
6949 raise error.InputError(msg)
6950 elif change:
6950 elif change:
6951 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6951 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6952 ctx2 = logcmdutil.revsingle(repo, change, None)
6952 ctx2 = logcmdutil.revsingle(repo, change, None)
6953 ctx1 = ctx2.p1()
6953 ctx1 = ctx2.p1()
6954 else:
6954 else:
6955 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6955 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6956 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
6956 ctx1, ctx2 = logcmdutil.revpair(repo, revs)
6957
6957
6958 forcerelativevalue = None
6958 forcerelativevalue = None
6959 if ui.hasconfig(b'commands', b'status.relative'):
6959 if ui.hasconfig(b'commands', b'status.relative'):
6960 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6960 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6961 uipathfn = scmutil.getuipathfn(
6961 uipathfn = scmutil.getuipathfn(
6962 repo,
6962 repo,
6963 legacyrelativevalue=bool(pats),
6963 legacyrelativevalue=bool(pats),
6964 forcerelativevalue=forcerelativevalue,
6964 forcerelativevalue=forcerelativevalue,
6965 )
6965 )
6966
6966
6967 if opts.get(b'print0'):
6967 if opts.get(b'print0'):
6968 end = b'\0'
6968 end = b'\0'
6969 else:
6969 else:
6970 end = b'\n'
6970 end = b'\n'
6971 states = b'modified added removed deleted unknown ignored clean'.split()
6971 states = b'modified added removed deleted unknown ignored clean'.split()
6972 show = [k for k in states if opts.get(k)]
6972 show = [k for k in states if opts.get(k)]
6973 if opts.get(b'all'):
6973 if opts.get(b'all'):
6974 show += ui.quiet and (states[:4] + [b'clean']) or states
6974 show += ui.quiet and (states[:4] + [b'clean']) or states
6975
6975
6976 if not show:
6976 if not show:
6977 if ui.quiet:
6977 if ui.quiet:
6978 show = states[:4]
6978 show = states[:4]
6979 else:
6979 else:
6980 show = states[:5]
6980 show = states[:5]
6981
6981
6982 m = scmutil.match(ctx2, pats, opts)
6982 m = scmutil.match(ctx2, pats, opts)
6983 if terse:
6983 if terse:
6984 # we need to compute clean and unknown to terse
6984 # we need to compute clean and unknown to terse
6985 stat = repo.status(
6985 stat = repo.status(
6986 ctx1.node(),
6986 ctx1.node(),
6987 ctx2.node(),
6987 ctx2.node(),
6988 m,
6988 m,
6989 b'ignored' in show or b'i' in terse,
6989 b'ignored' in show or b'i' in terse,
6990 clean=True,
6990 clean=True,
6991 unknown=True,
6991 unknown=True,
6992 listsubrepos=opts.get(b'subrepos'),
6992 listsubrepos=opts.get(b'subrepos'),
6993 )
6993 )
6994
6994
6995 stat = cmdutil.tersedir(stat, terse)
6995 stat = cmdutil.tersedir(stat, terse)
6996 else:
6996 else:
6997 stat = repo.status(
6997 stat = repo.status(
6998 ctx1.node(),
6998 ctx1.node(),
6999 ctx2.node(),
6999 ctx2.node(),
7000 m,
7000 m,
7001 b'ignored' in show,
7001 b'ignored' in show,
7002 b'clean' in show,
7002 b'clean' in show,
7003 b'unknown' in show,
7003 b'unknown' in show,
7004 opts.get(b'subrepos'),
7004 opts.get(b'subrepos'),
7005 )
7005 )
7006
7006
7007 changestates = zip(
7007 changestates = zip(
7008 states,
7008 states,
7009 pycompat.iterbytestr(b'MAR!?IC'),
7009 pycompat.iterbytestr(b'MAR!?IC'),
7010 [getattr(stat, s.decode('utf8')) for s in states],
7010 [getattr(stat, s.decode('utf8')) for s in states],
7011 )
7011 )
7012
7012
7013 copy = {}
7013 copy = {}
7014 show_copies = ui.configbool(b'ui', b'statuscopies')
7014 show_copies = ui.configbool(b'ui', b'statuscopies')
7015 if opts.get(b'copies') is not None:
7015 if opts.get(b'copies') is not None:
7016 show_copies = opts.get(b'copies')
7016 show_copies = opts.get(b'copies')
7017 show_copies = (show_copies or opts.get(b'all')) and not opts.get(
7017 show_copies = (show_copies or opts.get(b'all')) and not opts.get(
7018 b'no_status'
7018 b'no_status'
7019 )
7019 )
7020 if show_copies:
7020 if show_copies:
7021 copy = copies.pathcopies(ctx1, ctx2, m)
7021 copy = copies.pathcopies(ctx1, ctx2, m)
7022
7022
7023 morestatus = None
7023 morestatus = None
7024 if (
7024 if (
7025 (ui.verbose or ui.configbool(b'commands', b'status.verbose'))
7025 (ui.verbose or ui.configbool(b'commands', b'status.verbose'))
7026 and not ui.plain()
7026 and not ui.plain()
7027 and not opts.get(b'print0')
7027 and not opts.get(b'print0')
7028 ):
7028 ):
7029 morestatus = cmdutil.readmorestatus(repo)
7029 morestatus = cmdutil.readmorestatus(repo)
7030
7030
7031 ui.pager(b'status')
7031 ui.pager(b'status')
7032 fm = ui.formatter(b'status', opts)
7032 fm = ui.formatter(b'status', opts)
7033 fmt = b'%s' + end
7033 fmt = b'%s' + end
7034 showchar = not opts.get(b'no_status')
7034 showchar = not opts.get(b'no_status')
7035
7035
7036 for state, char, files in changestates:
7036 for state, char, files in changestates:
7037 if state in show:
7037 if state in show:
7038 label = b'status.' + state
7038 label = b'status.' + state
7039 for f in files:
7039 for f in files:
7040 fm.startitem()
7040 fm.startitem()
7041 fm.context(ctx=ctx2)
7041 fm.context(ctx=ctx2)
7042 fm.data(itemtype=b'file', path=f)
7042 fm.data(itemtype=b'file', path=f)
7043 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
7043 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
7044 fm.plain(fmt % uipathfn(f), label=label)
7044 fm.plain(fmt % uipathfn(f), label=label)
7045 if f in copy:
7045 if f in copy:
7046 fm.data(source=copy[f])
7046 fm.data(source=copy[f])
7047 fm.plain(
7047 fm.plain(
7048 (b' %s' + end) % uipathfn(copy[f]),
7048 (b' %s' + end) % uipathfn(copy[f]),
7049 label=b'status.copied',
7049 label=b'status.copied',
7050 )
7050 )
7051 if morestatus:
7051 if morestatus:
7052 morestatus.formatfile(f, fm)
7052 morestatus.formatfile(f, fm)
7053
7053
7054 if morestatus:
7054 if morestatus:
7055 morestatus.formatfooter(fm)
7055 morestatus.formatfooter(fm)
7056 fm.end()
7056 fm.end()
7057
7057
7058
7058
7059 @command(
7059 @command(
7060 b'summary|sum',
7060 b'summary|sum',
7061 [(b'', b'remote', None, _(b'check for push and pull'))],
7061 [(b'', b'remote', None, _(b'check for push and pull'))],
7062 b'[--remote]',
7062 b'[--remote]',
7063 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7063 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7064 helpbasic=True,
7064 helpbasic=True,
7065 intents={INTENT_READONLY},
7065 intents={INTENT_READONLY},
7066 )
7066 )
7067 def summary(ui, repo, **opts):
7067 def summary(ui, repo, **opts):
7068 """summarize working directory state
7068 """summarize working directory state
7069
7069
7070 This generates a brief summary of the working directory state,
7070 This generates a brief summary of the working directory state,
7071 including parents, branch, commit status, phase and available updates.
7071 including parents, branch, commit status, phase and available updates.
7072
7072
7073 With the --remote option, this will check the default paths for
7073 With the --remote option, this will check the default paths for
7074 incoming and outgoing changes. This can be time-consuming.
7074 incoming and outgoing changes. This can be time-consuming.
7075
7075
7076 Returns 0 on success.
7076 Returns 0 on success.
7077 """
7077 """
7078
7078
7079 opts = pycompat.byteskwargs(opts)
7079 opts = pycompat.byteskwargs(opts)
7080 ui.pager(b'summary')
7080 ui.pager(b'summary')
7081 ctx = repo[None]
7081 ctx = repo[None]
7082 parents = ctx.parents()
7082 parents = ctx.parents()
7083 pnode = parents[0].node()
7083 pnode = parents[0].node()
7084 marks = []
7084 marks = []
7085
7085
7086 try:
7086 try:
7087 ms = mergestatemod.mergestate.read(repo)
7087 ms = mergestatemod.mergestate.read(repo)
7088 except error.UnsupportedMergeRecords as e:
7088 except error.UnsupportedMergeRecords as e:
7089 s = b' '.join(e.recordtypes)
7089 s = b' '.join(e.recordtypes)
7090 ui.warn(
7090 ui.warn(
7091 _(b'warning: merge state has unsupported record types: %s\n') % s
7091 _(b'warning: merge state has unsupported record types: %s\n') % s
7092 )
7092 )
7093 unresolved = []
7093 unresolved = []
7094 else:
7094 else:
7095 unresolved = list(ms.unresolved())
7095 unresolved = list(ms.unresolved())
7096
7096
7097 for p in parents:
7097 for p in parents:
7098 # label with log.changeset (instead of log.parent) since this
7098 # label with log.changeset (instead of log.parent) since this
7099 # shows a working directory parent *changeset*:
7099 # shows a working directory parent *changeset*:
7100 # i18n: column positioning for "hg summary"
7100 # i18n: column positioning for "hg summary"
7101 ui.write(
7101 ui.write(
7102 _(b'parent: %d:%s ') % (p.rev(), p),
7102 _(b'parent: %d:%s ') % (p.rev(), p),
7103 label=logcmdutil.changesetlabels(p),
7103 label=logcmdutil.changesetlabels(p),
7104 )
7104 )
7105 ui.write(b' '.join(p.tags()), label=b'log.tag')
7105 ui.write(b' '.join(p.tags()), label=b'log.tag')
7106 if p.bookmarks():
7106 if p.bookmarks():
7107 marks.extend(p.bookmarks())
7107 marks.extend(p.bookmarks())
7108 if p.rev() == -1:
7108 if p.rev() == -1:
7109 if not len(repo):
7109 if not len(repo):
7110 ui.write(_(b' (empty repository)'))
7110 ui.write(_(b' (empty repository)'))
7111 else:
7111 else:
7112 ui.write(_(b' (no revision checked out)'))
7112 ui.write(_(b' (no revision checked out)'))
7113 if p.obsolete():
7113 if p.obsolete():
7114 ui.write(_(b' (obsolete)'))
7114 ui.write(_(b' (obsolete)'))
7115 if p.isunstable():
7115 if p.isunstable():
7116 instabilities = (
7116 instabilities = (
7117 ui.label(instability, b'trouble.%s' % instability)
7117 ui.label(instability, b'trouble.%s' % instability)
7118 for instability in p.instabilities()
7118 for instability in p.instabilities()
7119 )
7119 )
7120 ui.write(b' (' + b', '.join(instabilities) + b')')
7120 ui.write(b' (' + b', '.join(instabilities) + b')')
7121 ui.write(b'\n')
7121 ui.write(b'\n')
7122 if p.description():
7122 if p.description():
7123 ui.status(
7123 ui.status(
7124 b' ' + p.description().splitlines()[0].strip() + b'\n',
7124 b' ' + p.description().splitlines()[0].strip() + b'\n',
7125 label=b'log.summary',
7125 label=b'log.summary',
7126 )
7126 )
7127
7127
7128 branch = ctx.branch()
7128 branch = ctx.branch()
7129 bheads = repo.branchheads(branch)
7129 bheads = repo.branchheads(branch)
7130 # i18n: column positioning for "hg summary"
7130 # i18n: column positioning for "hg summary"
7131 m = _(b'branch: %s\n') % branch
7131 m = _(b'branch: %s\n') % branch
7132 if branch != b'default':
7132 if branch != b'default':
7133 ui.write(m, label=b'log.branch')
7133 ui.write(m, label=b'log.branch')
7134 else:
7134 else:
7135 ui.status(m, label=b'log.branch')
7135 ui.status(m, label=b'log.branch')
7136
7136
7137 if marks:
7137 if marks:
7138 active = repo._activebookmark
7138 active = repo._activebookmark
7139 # i18n: column positioning for "hg summary"
7139 # i18n: column positioning for "hg summary"
7140 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
7140 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
7141 if active is not None:
7141 if active is not None:
7142 if active in marks:
7142 if active in marks:
7143 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
7143 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
7144 marks.remove(active)
7144 marks.remove(active)
7145 else:
7145 else:
7146 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
7146 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
7147 for m in marks:
7147 for m in marks:
7148 ui.write(b' ' + m, label=b'log.bookmark')
7148 ui.write(b' ' + m, label=b'log.bookmark')
7149 ui.write(b'\n', label=b'log.bookmark')
7149 ui.write(b'\n', label=b'log.bookmark')
7150
7150
7151 status = repo.status(unknown=True)
7151 status = repo.status(unknown=True)
7152
7152
7153 c = repo.dirstate.copies()
7153 c = repo.dirstate.copies()
7154 copied, renamed = [], []
7154 copied, renamed = [], []
7155 for d, s in c.items():
7155 for d, s in c.items():
7156 if s in status.removed:
7156 if s in status.removed:
7157 status.removed.remove(s)
7157 status.removed.remove(s)
7158 renamed.append(d)
7158 renamed.append(d)
7159 else:
7159 else:
7160 copied.append(d)
7160 copied.append(d)
7161 if d in status.added:
7161 if d in status.added:
7162 status.added.remove(d)
7162 status.added.remove(d)
7163
7163
7164 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
7164 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
7165
7165
7166 labels = [
7166 labels = [
7167 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
7167 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
7168 (ui.label(_(b'%d added'), b'status.added'), status.added),
7168 (ui.label(_(b'%d added'), b'status.added'), status.added),
7169 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
7169 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
7170 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
7170 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
7171 (ui.label(_(b'%d copied'), b'status.copied'), copied),
7171 (ui.label(_(b'%d copied'), b'status.copied'), copied),
7172 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
7172 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
7173 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
7173 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
7174 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
7174 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
7175 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
7175 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
7176 ]
7176 ]
7177 t = []
7177 t = []
7178 for l, s in labels:
7178 for l, s in labels:
7179 if s:
7179 if s:
7180 t.append(l % len(s))
7180 t.append(l % len(s))
7181
7181
7182 t = b', '.join(t)
7182 t = b', '.join(t)
7183 cleanworkdir = False
7183 cleanworkdir = False
7184
7184
7185 if repo.vfs.exists(b'graftstate'):
7185 if repo.vfs.exists(b'graftstate'):
7186 t += _(b' (graft in progress)')
7186 t += _(b' (graft in progress)')
7187 if repo.vfs.exists(b'updatestate'):
7187 if repo.vfs.exists(b'updatestate'):
7188 t += _(b' (interrupted update)')
7188 t += _(b' (interrupted update)')
7189 elif len(parents) > 1:
7189 elif len(parents) > 1:
7190 t += _(b' (merge)')
7190 t += _(b' (merge)')
7191 elif branch != parents[0].branch():
7191 elif branch != parents[0].branch():
7192 t += _(b' (new branch)')
7192 t += _(b' (new branch)')
7193 elif parents[0].closesbranch() and pnode in repo.branchheads(
7193 elif parents[0].closesbranch() and pnode in repo.branchheads(
7194 branch, closed=True
7194 branch, closed=True
7195 ):
7195 ):
7196 t += _(b' (head closed)')
7196 t += _(b' (head closed)')
7197 elif not (
7197 elif not (
7198 status.modified
7198 status.modified
7199 or status.added
7199 or status.added
7200 or status.removed
7200 or status.removed
7201 or renamed
7201 or renamed
7202 or copied
7202 or copied
7203 or subs
7203 or subs
7204 ):
7204 ):
7205 t += _(b' (clean)')
7205 t += _(b' (clean)')
7206 cleanworkdir = True
7206 cleanworkdir = True
7207 elif pnode not in bheads:
7207 elif pnode not in bheads:
7208 t += _(b' (new branch head)')
7208 t += _(b' (new branch head)')
7209
7209
7210 if parents:
7210 if parents:
7211 pendingphase = max(p.phase() for p in parents)
7211 pendingphase = max(p.phase() for p in parents)
7212 else:
7212 else:
7213 pendingphase = phases.public
7213 pendingphase = phases.public
7214
7214
7215 if pendingphase > phases.newcommitphase(ui):
7215 if pendingphase > phases.newcommitphase(ui):
7216 t += b' (%s)' % phases.phasenames[pendingphase]
7216 t += b' (%s)' % phases.phasenames[pendingphase]
7217
7217
7218 if cleanworkdir:
7218 if cleanworkdir:
7219 # i18n: column positioning for "hg summary"
7219 # i18n: column positioning for "hg summary"
7220 ui.status(_(b'commit: %s\n') % t.strip())
7220 ui.status(_(b'commit: %s\n') % t.strip())
7221 else:
7221 else:
7222 # i18n: column positioning for "hg summary"
7222 # i18n: column positioning for "hg summary"
7223 ui.write(_(b'commit: %s\n') % t.strip())
7223 ui.write(_(b'commit: %s\n') % t.strip())
7224
7224
7225 # all ancestors of branch heads - all ancestors of parent = new csets
7225 # all ancestors of branch heads - all ancestors of parent = new csets
7226 new = len(
7226 new = len(
7227 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
7227 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
7228 )
7228 )
7229
7229
7230 if new == 0:
7230 if new == 0:
7231 # i18n: column positioning for "hg summary"
7231 # i18n: column positioning for "hg summary"
7232 ui.status(_(b'update: (current)\n'))
7232 ui.status(_(b'update: (current)\n'))
7233 elif pnode not in bheads:
7233 elif pnode not in bheads:
7234 # i18n: column positioning for "hg summary"
7234 # i18n: column positioning for "hg summary"
7235 ui.write(_(b'update: %d new changesets (update)\n') % new)
7235 ui.write(_(b'update: %d new changesets (update)\n') % new)
7236 else:
7236 else:
7237 # i18n: column positioning for "hg summary"
7237 # i18n: column positioning for "hg summary"
7238 ui.write(
7238 ui.write(
7239 _(b'update: %d new changesets, %d branch heads (merge)\n')
7239 _(b'update: %d new changesets, %d branch heads (merge)\n')
7240 % (new, len(bheads))
7240 % (new, len(bheads))
7241 )
7241 )
7242
7242
7243 t = []
7243 t = []
7244 draft = len(repo.revs(b'draft()'))
7244 draft = len(repo.revs(b'draft()'))
7245 if draft:
7245 if draft:
7246 t.append(_(b'%d draft') % draft)
7246 t.append(_(b'%d draft') % draft)
7247 secret = len(repo.revs(b'secret()'))
7247 secret = len(repo.revs(b'secret()'))
7248 if secret:
7248 if secret:
7249 t.append(_(b'%d secret') % secret)
7249 t.append(_(b'%d secret') % secret)
7250
7250
7251 if draft or secret:
7251 if draft or secret:
7252 ui.status(_(b'phases: %s\n') % b', '.join(t))
7252 ui.status(_(b'phases: %s\n') % b', '.join(t))
7253
7253
7254 if obsolete.isenabled(repo, obsolete.createmarkersopt):
7254 if obsolete.isenabled(repo, obsolete.createmarkersopt):
7255 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
7255 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
7256 numtrouble = len(repo.revs(trouble + b"()"))
7256 numtrouble = len(repo.revs(trouble + b"()"))
7257 # We write all the possibilities to ease translation
7257 # We write all the possibilities to ease translation
7258 troublemsg = {
7258 troublemsg = {
7259 b"orphan": _(b"orphan: %d changesets"),
7259 b"orphan": _(b"orphan: %d changesets"),
7260 b"contentdivergent": _(b"content-divergent: %d changesets"),
7260 b"contentdivergent": _(b"content-divergent: %d changesets"),
7261 b"phasedivergent": _(b"phase-divergent: %d changesets"),
7261 b"phasedivergent": _(b"phase-divergent: %d changesets"),
7262 }
7262 }
7263 if numtrouble > 0:
7263 if numtrouble > 0:
7264 ui.status(troublemsg[trouble] % numtrouble + b"\n")
7264 ui.status(troublemsg[trouble] % numtrouble + b"\n")
7265
7265
7266 cmdutil.summaryhooks(ui, repo)
7266 cmdutil.summaryhooks(ui, repo)
7267
7267
7268 if opts.get(b'remote'):
7268 if opts.get(b'remote'):
7269 needsincoming, needsoutgoing = True, True
7269 needsincoming, needsoutgoing = True, True
7270 else:
7270 else:
7271 needsincoming, needsoutgoing = False, False
7271 needsincoming, needsoutgoing = False, False
7272 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
7272 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
7273 if i:
7273 if i:
7274 needsincoming = True
7274 needsincoming = True
7275 if o:
7275 if o:
7276 needsoutgoing = True
7276 needsoutgoing = True
7277 if not needsincoming and not needsoutgoing:
7277 if not needsincoming and not needsoutgoing:
7278 return
7278 return
7279
7279
7280 def getincoming():
7280 def getincoming():
7281 # XXX We should actually skip this if no default is specified, instead
7281 # XXX We should actually skip this if no default is specified, instead
7282 # of passing "default" which will resolve as "./default/" if no default
7282 # of passing "default" which will resolve as "./default/" if no default
7283 # path is defined.
7283 # path is defined.
7284 path = urlutil.get_unique_pull_path_obj(b'summary', ui, b'default')
7284 path = urlutil.get_unique_pull_path_obj(b'summary', ui, b'default')
7285 sbranch = path.branch
7285 sbranch = path.branch
7286 try:
7286 try:
7287 other = hg.peer(repo, {}, path)
7287 other = hg.peer(repo, {}, path)
7288 except error.RepoError:
7288 except error.RepoError:
7289 if opts.get(b'remote'):
7289 if opts.get(b'remote'):
7290 raise
7290 raise
7291 return path.loc, sbranch, None, None, None
7291 return path.loc, sbranch, None, None, None
7292 branches = (path.branch, [])
7292 branches = (path.branch, [])
7293 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
7293 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
7294 if revs:
7294 if revs:
7295 revs = [other.lookup(rev) for rev in revs]
7295 revs = [other.lookup(rev) for rev in revs]
7296 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(path.loc))
7296 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(path.loc))
7297 with repo.ui.silent():
7297 with repo.ui.silent():
7298 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
7298 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
7299 return path.loc, sbranch, other, commoninc, commoninc[1]
7299 return path.loc, sbranch, other, commoninc, commoninc[1]
7300
7300
7301 if needsincoming:
7301 if needsincoming:
7302 source, sbranch, sother, commoninc, incoming = getincoming()
7302 source, sbranch, sother, commoninc, incoming = getincoming()
7303 else:
7303 else:
7304 source = sbranch = sother = commoninc = incoming = None
7304 source = sbranch = sother = commoninc = incoming = None
7305
7305
7306 def getoutgoing():
7306 def getoutgoing():
7307 # XXX We should actually skip this if no default is specified, instead
7307 # XXX We should actually skip this if no default is specified, instead
7308 # of passing "default" which will resolve as "./default/" if no default
7308 # of passing "default" which will resolve as "./default/" if no default
7309 # path is defined.
7309 # path is defined.
7310 d = None
7310 d = None
7311 if b'default-push' in ui.paths:
7311 if b'default-push' in ui.paths:
7312 d = b'default-push'
7312 d = b'default-push'
7313 elif b'default' in ui.paths:
7313 elif b'default' in ui.paths:
7314 d = b'default'
7314 d = b'default'
7315 path = None
7315 path = None
7316 if d is not None:
7316 if d is not None:
7317 path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
7317 path = urlutil.get_unique_push_path(b'summary', repo, ui, d)
7318 dest = path.loc
7318 dest = path.loc
7319 dbranch = path.branch
7319 dbranch = path.branch
7320 else:
7320 else:
7321 dest = b'default'
7321 dest = b'default'
7322 dbranch = None
7322 dbranch = None
7323 revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
7323 revs, checkout = hg.addbranchrevs(repo, repo, (dbranch, []), None)
7324 if source != dest:
7324 if source != dest:
7325 try:
7325 try:
7326 dother = hg.peer(repo, {}, path if path is not None else dest)
7326 dother = hg.peer(repo, {}, path if path is not None else dest)
7327 except error.RepoError:
7327 except error.RepoError:
7328 if opts.get(b'remote'):
7328 if opts.get(b'remote'):
7329 raise
7329 raise
7330 return dest, dbranch, None, None
7330 return dest, dbranch, None, None
7331 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest))
7331 ui.debug(b'comparing with %s\n' % urlutil.hidepassword(dest))
7332 elif sother is None:
7332 elif sother is None:
7333 # there is no explicit destination peer, but source one is invalid
7333 # there is no explicit destination peer, but source one is invalid
7334 return dest, dbranch, None, None
7334 return dest, dbranch, None, None
7335 else:
7335 else:
7336 dother = sother
7336 dother = sother
7337 if source != dest or (sbranch is not None and sbranch != dbranch):
7337 if source != dest or (sbranch is not None and sbranch != dbranch):
7338 common = None
7338 common = None
7339 else:
7339 else:
7340 common = commoninc
7340 common = commoninc
7341 if revs:
7341 if revs:
7342 revs = [repo.lookup(rev) for rev in revs]
7342 revs = [repo.lookup(rev) for rev in revs]
7343 with repo.ui.silent():
7343 with repo.ui.silent():
7344 outgoing = discovery.findcommonoutgoing(
7344 outgoing = discovery.findcommonoutgoing(
7345 repo, dother, onlyheads=revs, commoninc=common
7345 repo, dother, onlyheads=revs, commoninc=common
7346 )
7346 )
7347 return dest, dbranch, dother, outgoing
7347 return dest, dbranch, dother, outgoing
7348
7348
7349 if needsoutgoing:
7349 if needsoutgoing:
7350 dest, dbranch, dother, outgoing = getoutgoing()
7350 dest, dbranch, dother, outgoing = getoutgoing()
7351 else:
7351 else:
7352 dest = dbranch = dother = outgoing = None
7352 dest = dbranch = dother = outgoing = None
7353
7353
7354 if opts.get(b'remote'):
7354 if opts.get(b'remote'):
7355 # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`.
7355 # Help pytype. --remote sets both `needsincoming` and `needsoutgoing`.
7356 # The former always sets `sother` (or raises an exception if it can't);
7356 # The former always sets `sother` (or raises an exception if it can't);
7357 # the latter always sets `outgoing`.
7357 # the latter always sets `outgoing`.
7358 assert sother is not None
7358 assert sother is not None
7359 assert outgoing is not None
7359 assert outgoing is not None
7360
7360
7361 t = []
7361 t = []
7362 if incoming:
7362 if incoming:
7363 t.append(_(b'1 or more incoming'))
7363 t.append(_(b'1 or more incoming'))
7364 o = outgoing.missing
7364 o = outgoing.missing
7365 if o:
7365 if o:
7366 t.append(_(b'%d outgoing') % len(o))
7366 t.append(_(b'%d outgoing') % len(o))
7367 other = dother or sother
7367 other = dother or sother
7368 if b'bookmarks' in other.listkeys(b'namespaces'):
7368 if b'bookmarks' in other.listkeys(b'namespaces'):
7369 counts = bookmarks.summary(repo, other)
7369 counts = bookmarks.summary(repo, other)
7370 if counts[0] > 0:
7370 if counts[0] > 0:
7371 t.append(_(b'%d incoming bookmarks') % counts[0])
7371 t.append(_(b'%d incoming bookmarks') % counts[0])
7372 if counts[1] > 0:
7372 if counts[1] > 0:
7373 t.append(_(b'%d outgoing bookmarks') % counts[1])
7373 t.append(_(b'%d outgoing bookmarks') % counts[1])
7374
7374
7375 if t:
7375 if t:
7376 # i18n: column positioning for "hg summary"
7376 # i18n: column positioning for "hg summary"
7377 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7377 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7378 else:
7378 else:
7379 # i18n: column positioning for "hg summary"
7379 # i18n: column positioning for "hg summary"
7380 ui.status(_(b'remote: (synced)\n'))
7380 ui.status(_(b'remote: (synced)\n'))
7381
7381
7382 cmdutil.summaryremotehooks(
7382 cmdutil.summaryremotehooks(
7383 ui,
7383 ui,
7384 repo,
7384 repo,
7385 opts,
7385 opts,
7386 (
7386 (
7387 (source, sbranch, sother, commoninc),
7387 (source, sbranch, sother, commoninc),
7388 (dest, dbranch, dother, outgoing),
7388 (dest, dbranch, dother, outgoing),
7389 ),
7389 ),
7390 )
7390 )
7391
7391
7392
7392
7393 @command(
7393 @command(
7394 b'tag',
7394 b'tag',
7395 [
7395 [
7396 (b'f', b'force', None, _(b'force tag')),
7396 (b'f', b'force', None, _(b'force tag')),
7397 (b'l', b'local', None, _(b'make the tag local')),
7397 (b'l', b'local', None, _(b'make the tag local')),
7398 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7398 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7399 (b'', b'remove', None, _(b'remove a tag')),
7399 (b'', b'remove', None, _(b'remove a tag')),
7400 # -l/--local is already there, commitopts cannot be used
7400 # -l/--local is already there, commitopts cannot be used
7401 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7401 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7402 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7402 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7403 ]
7403 ]
7404 + commitopts2,
7404 + commitopts2,
7405 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7405 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7406 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7406 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7407 )
7407 )
7408 def tag(ui, repo, name1, *names, **opts):
7408 def tag(ui, repo, name1, *names, **opts):
7409 """add one or more tags for the current or given revision
7409 """add one or more tags for the current or given revision
7410
7410
7411 Name a particular revision using <name>.
7411 Name a particular revision using <name>.
7412
7412
7413 Tags are used to name particular revisions of the repository and are
7413 Tags are used to name particular revisions of the repository and are
7414 very useful to compare different revisions, to go back to significant
7414 very useful to compare different revisions, to go back to significant
7415 earlier versions or to mark branch points as releases, etc. Changing
7415 earlier versions or to mark branch points as releases, etc. Changing
7416 an existing tag is normally disallowed; use -f/--force to override.
7416 an existing tag is normally disallowed; use -f/--force to override.
7417
7417
7418 If no revision is given, the parent of the working directory is
7418 If no revision is given, the parent of the working directory is
7419 used.
7419 used.
7420
7420
7421 To facilitate version control, distribution, and merging of tags,
7421 To facilitate version control, distribution, and merging of tags,
7422 they are stored as a file named ".hgtags" which is managed similarly
7422 they are stored as a file named ".hgtags" which is managed similarly
7423 to other project files and can be hand-edited if necessary. This
7423 to other project files and can be hand-edited if necessary. This
7424 also means that tagging creates a new commit. The file
7424 also means that tagging creates a new commit. The file
7425 ".hg/localtags" is used for local tags (not shared among
7425 ".hg/localtags" is used for local tags (not shared among
7426 repositories).
7426 repositories).
7427
7427
7428 Tag commits are usually made at the head of a branch. If the parent
7428 Tag commits are usually made at the head of a branch. If the parent
7429 of the working directory is not a branch head, :hg:`tag` aborts; use
7429 of the working directory is not a branch head, :hg:`tag` aborts; use
7430 -f/--force to force the tag commit to be based on a non-head
7430 -f/--force to force the tag commit to be based on a non-head
7431 changeset.
7431 changeset.
7432
7432
7433 See :hg:`help dates` for a list of formats valid for -d/--date.
7433 See :hg:`help dates` for a list of formats valid for -d/--date.
7434
7434
7435 Since tag names have priority over branch names during revision
7435 Since tag names have priority over branch names during revision
7436 lookup, using an existing branch name as a tag name is discouraged.
7436 lookup, using an existing branch name as a tag name is discouraged.
7437
7437
7438 Returns 0 on success.
7438 Returns 0 on success.
7439 """
7439 """
7440 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7440 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7441 opts = pycompat.byteskwargs(opts)
7441 opts = pycompat.byteskwargs(opts)
7442 with repo.wlock(), repo.lock():
7442 with repo.wlock(), repo.lock():
7443 rev_ = b"."
7443 rev_ = b"."
7444 names = [t.strip() for t in (name1,) + names]
7444 names = [t.strip() for t in (name1,) + names]
7445 if len(names) != len(set(names)):
7445 if len(names) != len(set(names)):
7446 raise error.InputError(_(b'tag names must be unique'))
7446 raise error.InputError(_(b'tag names must be unique'))
7447 for n in names:
7447 for n in names:
7448 scmutil.checknewlabel(repo, n, b'tag')
7448 scmutil.checknewlabel(repo, n, b'tag')
7449 if not n:
7449 if not n:
7450 raise error.InputError(
7450 raise error.InputError(
7451 _(b'tag names cannot consist entirely of whitespace')
7451 _(b'tag names cannot consist entirely of whitespace')
7452 )
7452 )
7453 if opts.get(b'rev'):
7453 if opts.get(b'rev'):
7454 rev_ = opts[b'rev']
7454 rev_ = opts[b'rev']
7455 message = opts.get(b'message')
7455 message = opts.get(b'message')
7456 if opts.get(b'remove'):
7456 if opts.get(b'remove'):
7457 if opts.get(b'local'):
7457 if opts.get(b'local'):
7458 expectedtype = b'local'
7458 expectedtype = b'local'
7459 else:
7459 else:
7460 expectedtype = b'global'
7460 expectedtype = b'global'
7461
7461
7462 for n in names:
7462 for n in names:
7463 if repo.tagtype(n) == b'global':
7463 if repo.tagtype(n) == b'global':
7464 alltags = tagsmod.findglobaltags(ui, repo)
7464 alltags = tagsmod.findglobaltags(ui, repo)
7465 if alltags[n][0] == repo.nullid:
7465 if alltags[n][0] == repo.nullid:
7466 raise error.InputError(
7466 raise error.InputError(
7467 _(b"tag '%s' is already removed") % n
7467 _(b"tag '%s' is already removed") % n
7468 )
7468 )
7469 if not repo.tagtype(n):
7469 if not repo.tagtype(n):
7470 raise error.InputError(_(b"tag '%s' does not exist") % n)
7470 raise error.InputError(_(b"tag '%s' does not exist") % n)
7471 if repo.tagtype(n) != expectedtype:
7471 if repo.tagtype(n) != expectedtype:
7472 if expectedtype == b'global':
7472 if expectedtype == b'global':
7473 raise error.InputError(
7473 raise error.InputError(
7474 _(b"tag '%s' is not a global tag") % n
7474 _(b"tag '%s' is not a global tag") % n
7475 )
7475 )
7476 else:
7476 else:
7477 raise error.InputError(
7477 raise error.InputError(
7478 _(b"tag '%s' is not a local tag") % n
7478 _(b"tag '%s' is not a local tag") % n
7479 )
7479 )
7480 rev_ = b'null'
7480 rev_ = b'null'
7481 if not message:
7481 if not message:
7482 # we don't translate commit messages
7482 # we don't translate commit messages
7483 message = b'Removed tag %s' % b', '.join(names)
7483 message = b'Removed tag %s' % b', '.join(names)
7484 elif not opts.get(b'force'):
7484 elif not opts.get(b'force'):
7485 for n in names:
7485 for n in names:
7486 if n in repo.tags():
7486 if n in repo.tags():
7487 raise error.InputError(
7487 raise error.InputError(
7488 _(b"tag '%s' already exists (use -f to force)") % n
7488 _(b"tag '%s' already exists (use -f to force)") % n
7489 )
7489 )
7490 if not opts.get(b'local'):
7490 if not opts.get(b'local'):
7491 p1, p2 = repo.dirstate.parents()
7491 p1, p2 = repo.dirstate.parents()
7492 if p2 != repo.nullid:
7492 if p2 != repo.nullid:
7493 raise error.StateError(_(b'uncommitted merge'))
7493 raise error.StateError(_(b'uncommitted merge'))
7494 bheads = repo.branchheads()
7494 bheads = repo.branchheads()
7495 if not opts.get(b'force') and bheads and p1 not in bheads:
7495 if not opts.get(b'force') and bheads and p1 not in bheads:
7496 raise error.InputError(
7496 raise error.InputError(
7497 _(
7497 _(
7498 b'working directory is not at a branch head '
7498 b'working directory is not at a branch head '
7499 b'(use -f to force)'
7499 b'(use -f to force)'
7500 )
7500 )
7501 )
7501 )
7502 node = logcmdutil.revsingle(repo, rev_).node()
7502 node = logcmdutil.revsingle(repo, rev_).node()
7503
7503
7504 # don't allow tagging the null rev or the working directory
7504 # don't allow tagging the null rev or the working directory
7505 if node is None:
7505 if node is None:
7506 raise error.InputError(_(b"cannot tag working directory"))
7506 raise error.InputError(_(b"cannot tag working directory"))
7507 elif not opts.get(b'remove') and node == nullid:
7507 elif not opts.get(b'remove') and node == nullid:
7508 raise error.InputError(_(b"cannot tag null revision"))
7508 raise error.InputError(_(b"cannot tag null revision"))
7509
7509
7510 if not message:
7510 if not message:
7511 # we don't translate commit messages
7511 # we don't translate commit messages
7512 message = b'Added tag %s for changeset %s' % (
7512 message = b'Added tag %s for changeset %s' % (
7513 b', '.join(names),
7513 b', '.join(names),
7514 short(node),
7514 short(node),
7515 )
7515 )
7516
7516
7517 date = opts.get(b'date')
7517 date = opts.get(b'date')
7518 if date:
7518 if date:
7519 date = dateutil.parsedate(date)
7519 date = dateutil.parsedate(date)
7520
7520
7521 if opts.get(b'remove'):
7521 if opts.get(b'remove'):
7522 editform = b'tag.remove'
7522 editform = b'tag.remove'
7523 else:
7523 else:
7524 editform = b'tag.add'
7524 editform = b'tag.add'
7525 editor = cmdutil.getcommiteditor(
7525 editor = cmdutil.getcommiteditor(
7526 editform=editform, **pycompat.strkwargs(opts)
7526 editform=editform, **pycompat.strkwargs(opts)
7527 )
7527 )
7528
7528
7529 tagsmod.tag(
7529 tagsmod.tag(
7530 repo,
7530 repo,
7531 names,
7531 names,
7532 node,
7532 node,
7533 message,
7533 message,
7534 opts.get(b'local'),
7534 opts.get(b'local'),
7535 opts.get(b'user'),
7535 opts.get(b'user'),
7536 date,
7536 date,
7537 editor=editor,
7537 editor=editor,
7538 )
7538 )
7539
7539
7540
7540
7541 @command(
7541 @command(
7542 b'tags',
7542 b'tags',
7543 formatteropts,
7543 formatteropts,
7544 b'',
7544 b'',
7545 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7545 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7546 intents={INTENT_READONLY},
7546 intents={INTENT_READONLY},
7547 )
7547 )
7548 def tags(ui, repo, **opts):
7548 def tags(ui, repo, **opts):
7549 """list repository tags
7549 """list repository tags
7550
7550
7551 This lists both regular and local tags. When the -v/--verbose
7551 This lists both regular and local tags. When the -v/--verbose
7552 switch is used, a third column "local" is printed for local tags.
7552 switch is used, a third column "local" is printed for local tags.
7553 When the -q/--quiet switch is used, only the tag name is printed.
7553 When the -q/--quiet switch is used, only the tag name is printed.
7554
7554
7555 .. container:: verbose
7555 .. container:: verbose
7556
7556
7557 Template:
7557 Template:
7558
7558
7559 The following keywords are supported in addition to the common template
7559 The following keywords are supported in addition to the common template
7560 keywords and functions such as ``{tag}``. See also
7560 keywords and functions such as ``{tag}``. See also
7561 :hg:`help templates`.
7561 :hg:`help templates`.
7562
7562
7563 :type: String. ``local`` for local tags.
7563 :type: String. ``local`` for local tags.
7564
7564
7565 Returns 0 on success.
7565 Returns 0 on success.
7566 """
7566 """
7567
7567
7568 opts = pycompat.byteskwargs(opts)
7568 opts = pycompat.byteskwargs(opts)
7569 ui.pager(b'tags')
7569 ui.pager(b'tags')
7570 fm = ui.formatter(b'tags', opts)
7570 fm = ui.formatter(b'tags', opts)
7571 hexfunc = fm.hexfunc
7571 hexfunc = fm.hexfunc
7572
7572
7573 for t, n in reversed(repo.tagslist()):
7573 for t, n in reversed(repo.tagslist()):
7574 hn = hexfunc(n)
7574 hn = hexfunc(n)
7575 label = b'tags.normal'
7575 label = b'tags.normal'
7576 tagtype = repo.tagtype(t)
7576 tagtype = repo.tagtype(t)
7577 if not tagtype or tagtype == b'global':
7577 if not tagtype or tagtype == b'global':
7578 tagtype = b''
7578 tagtype = b''
7579 else:
7579 else:
7580 label = b'tags.' + tagtype
7580 label = b'tags.' + tagtype
7581
7581
7582 fm.startitem()
7582 fm.startitem()
7583 fm.context(repo=repo)
7583 fm.context(repo=repo)
7584 fm.write(b'tag', b'%s', t, label=label)
7584 fm.write(b'tag', b'%s', t, label=label)
7585 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7585 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7586 fm.condwrite(
7586 fm.condwrite(
7587 not ui.quiet,
7587 not ui.quiet,
7588 b'rev node',
7588 b'rev node',
7589 fmt,
7589 fmt,
7590 repo.changelog.rev(n),
7590 repo.changelog.rev(n),
7591 hn,
7591 hn,
7592 label=label,
7592 label=label,
7593 )
7593 )
7594 fm.condwrite(
7594 fm.condwrite(
7595 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7595 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7596 )
7596 )
7597 fm.plain(b'\n')
7597 fm.plain(b'\n')
7598 fm.end()
7598 fm.end()
7599
7599
7600
7600
7601 @command(
7601 @command(
7602 b'tip',
7602 b'tip',
7603 [
7603 [
7604 (b'p', b'patch', None, _(b'show patch')),
7604 (b'p', b'patch', None, _(b'show patch')),
7605 (b'g', b'git', None, _(b'use git extended diff format')),
7605 (b'g', b'git', None, _(b'use git extended diff format')),
7606 ]
7606 ]
7607 + templateopts,
7607 + templateopts,
7608 _(b'[-p] [-g]'),
7608 _(b'[-p] [-g]'),
7609 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7609 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7610 )
7610 )
7611 def tip(ui, repo, **opts):
7611 def tip(ui, repo, **opts):
7612 """show the tip revision (DEPRECATED)
7612 """show the tip revision (DEPRECATED)
7613
7613
7614 The tip revision (usually just called the tip) is the changeset
7614 The tip revision (usually just called the tip) is the changeset
7615 most recently added to the repository (and therefore the most
7615 most recently added to the repository (and therefore the most
7616 recently changed head).
7616 recently changed head).
7617
7617
7618 If you have just made a commit, that commit will be the tip. If
7618 If you have just made a commit, that commit will be the tip. If
7619 you have just pulled changes from another repository, the tip of
7619 you have just pulled changes from another repository, the tip of
7620 that repository becomes the current tip. The "tip" tag is special
7620 that repository becomes the current tip. The "tip" tag is special
7621 and cannot be renamed or assigned to a different changeset.
7621 and cannot be renamed or assigned to a different changeset.
7622
7622
7623 This command is deprecated, please use :hg:`heads` instead.
7623 This command is deprecated, please use :hg:`heads` instead.
7624
7624
7625 Returns 0 on success.
7625 Returns 0 on success.
7626 """
7626 """
7627 opts = pycompat.byteskwargs(opts)
7627 opts = pycompat.byteskwargs(opts)
7628 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7628 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7629 displayer.show(repo[b'tip'])
7629 displayer.show(repo[b'tip'])
7630 displayer.close()
7630 displayer.close()
7631
7631
7632
7632
7633 @command(
7633 @command(
7634 b'unbundle',
7634 b'unbundle',
7635 [
7635 [
7636 (
7636 (
7637 b'u',
7637 b'u',
7638 b'update',
7638 b'update',
7639 None,
7639 None,
7640 _(b'update to new branch head if changesets were unbundled'),
7640 _(b'update to new branch head if changesets were unbundled'),
7641 )
7641 )
7642 ],
7642 ],
7643 _(b'[-u] FILE...'),
7643 _(b'[-u] FILE...'),
7644 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7644 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7645 )
7645 )
7646 def unbundle(ui, repo, fname1, *fnames, **opts):
7646 def unbundle(ui, repo, fname1, *fnames, **opts):
7647 """apply one or more bundle files
7647 """apply one or more bundle files
7648
7648
7649 Apply one or more bundle files generated by :hg:`bundle`.
7649 Apply one or more bundle files generated by :hg:`bundle`.
7650
7650
7651 Returns 0 on success, 1 if an update has unresolved files.
7651 Returns 0 on success, 1 if an update has unresolved files.
7652 """
7652 """
7653 fnames = (fname1,) + fnames
7653 fnames = (fname1,) + fnames
7654
7654
7655 with repo.lock():
7655 with repo.lock():
7656 for fname in fnames:
7656 for fname in fnames:
7657 f = hg.openpath(ui, fname)
7657 f = hg.openpath(ui, fname)
7658 gen = exchange.readbundle(ui, f, fname)
7658 gen = exchange.readbundle(ui, f, fname)
7659 if isinstance(gen, streamclone.streamcloneapplier):
7659 if isinstance(gen, streamclone.streamcloneapplier):
7660 raise error.InputError(
7660 raise error.InputError(
7661 _(
7661 _(
7662 b'packed bundles cannot be applied with '
7662 b'packed bundles cannot be applied with '
7663 b'"hg unbundle"'
7663 b'"hg unbundle"'
7664 ),
7664 ),
7665 hint=_(b'use "hg debugapplystreamclonebundle"'),
7665 hint=_(b'use "hg debugapplystreamclonebundle"'),
7666 )
7666 )
7667 url = b'bundle:' + fname
7667 url = b'bundle:' + fname
7668 try:
7668 try:
7669 txnname = b'unbundle'
7669 txnname = b'unbundle'
7670 if not isinstance(gen, bundle2.unbundle20):
7670 if not isinstance(gen, bundle2.unbundle20):
7671 txnname = b'unbundle\n%s' % urlutil.hidepassword(url)
7671 txnname = b'unbundle\n%s' % urlutil.hidepassword(url)
7672 with repo.transaction(txnname) as tr:
7672 with repo.transaction(txnname) as tr:
7673 op = bundle2.applybundle(
7673 op = bundle2.applybundle(
7674 repo, gen, tr, source=b'unbundle', url=url
7674 repo, gen, tr, source=b'unbundle', url=url
7675 )
7675 )
7676 except error.BundleUnknownFeatureError as exc:
7676 except error.BundleUnknownFeatureError as exc:
7677 raise error.Abort(
7677 raise error.Abort(
7678 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7678 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7679 hint=_(
7679 hint=_(
7680 b"see https://mercurial-scm.org/"
7680 b"see https://mercurial-scm.org/"
7681 b"wiki/BundleFeature for more "
7681 b"wiki/BundleFeature for more "
7682 b"information"
7682 b"information"
7683 ),
7683 ),
7684 )
7684 )
7685 modheads = bundle2.combinechangegroupresults(op)
7685 modheads = bundle2.combinechangegroupresults(op)
7686
7686
7687 if postincoming(ui, repo, modheads, opts.get('update'), None, None):
7687 if postincoming(ui, repo, modheads, opts.get('update'), None, None):
7688 return 1
7688 return 1
7689 else:
7689 else:
7690 return 0
7690 return 0
7691
7691
7692
7692
7693 @command(
7693 @command(
7694 b'unshelve',
7694 b'unshelve',
7695 [
7695 [
7696 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7696 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7697 (
7697 (
7698 b'c',
7698 b'c',
7699 b'continue',
7699 b'continue',
7700 None,
7700 None,
7701 _(b'continue an incomplete unshelve operation'),
7701 _(b'continue an incomplete unshelve operation'),
7702 ),
7702 ),
7703 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7703 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7704 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7704 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7705 (
7705 (
7706 b'n',
7706 b'n',
7707 b'name',
7707 b'name',
7708 b'',
7708 b'',
7709 _(b'restore shelved change with given name'),
7709 _(b'restore shelved change with given name'),
7710 _(b'NAME'),
7710 _(b'NAME'),
7711 ),
7711 ),
7712 (b't', b'tool', b'', _(b'specify merge tool')),
7712 (b't', b'tool', b'', _(b'specify merge tool')),
7713 (
7713 (
7714 b'',
7714 b'',
7715 b'date',
7715 b'date',
7716 b'',
7716 b'',
7717 _(b'set date for temporary commits (DEPRECATED)'),
7717 _(b'set date for temporary commits (DEPRECATED)'),
7718 _(b'DATE'),
7718 _(b'DATE'),
7719 ),
7719 ),
7720 ],
7720 ],
7721 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7721 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7722 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7722 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7723 )
7723 )
7724 def unshelve(ui, repo, *shelved, **opts):
7724 def unshelve(ui, repo, *shelved, **opts):
7725 """restore a shelved change to the working directory
7725 """restore a shelved change to the working directory
7726
7726
7727 This command accepts an optional name of a shelved change to
7727 This command accepts an optional name of a shelved change to
7728 restore. If none is given, the most recent shelved change is used.
7728 restore. If none is given, the most recent shelved change is used.
7729
7729
7730 If a shelved change is applied successfully, the bundle that
7730 If a shelved change is applied successfully, the bundle that
7731 contains the shelved changes is moved to a backup location
7731 contains the shelved changes is moved to a backup location
7732 (.hg/shelve-backup).
7732 (.hg/shelve-backup).
7733
7733
7734 Since you can restore a shelved change on top of an arbitrary
7734 Since you can restore a shelved change on top of an arbitrary
7735 commit, it is possible that unshelving will result in a conflict
7735 commit, it is possible that unshelving will result in a conflict
7736 between your changes and the commits you are unshelving onto. If
7736 between your changes and the commits you are unshelving onto. If
7737 this occurs, you must resolve the conflict, then use
7737 this occurs, you must resolve the conflict, then use
7738 ``--continue`` to complete the unshelve operation. (The bundle
7738 ``--continue`` to complete the unshelve operation. (The bundle
7739 will not be moved until you successfully complete the unshelve.)
7739 will not be moved until you successfully complete the unshelve.)
7740
7740
7741 (Alternatively, you can use ``--abort`` to abandon an unshelve
7741 (Alternatively, you can use ``--abort`` to abandon an unshelve
7742 that causes a conflict. This reverts the unshelved changes, and
7742 that causes a conflict. This reverts the unshelved changes, and
7743 leaves the bundle in place.)
7743 leaves the bundle in place.)
7744
7744
7745 If bare shelved change (without interactive, include and exclude
7745 If bare shelved change (without interactive, include and exclude
7746 option) was done on newly created branch it would restore branch
7746 option) was done on newly created branch it would restore branch
7747 information to the working directory.
7747 information to the working directory.
7748
7748
7749 After a successful unshelve, the shelved changes are stored in a
7749 After a successful unshelve, the shelved changes are stored in a
7750 backup directory. Only the N most recent backups are kept. N
7750 backup directory. Only the N most recent backups are kept. N
7751 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7751 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7752 configuration option.
7752 configuration option.
7753
7753
7754 .. container:: verbose
7754 .. container:: verbose
7755
7755
7756 Timestamp in seconds is used to decide order of backups. More
7756 Timestamp in seconds is used to decide order of backups. More
7757 than ``maxbackups`` backups are kept, if same timestamp
7757 than ``maxbackups`` backups are kept, if same timestamp
7758 prevents from deciding exact order of them, for safety.
7758 prevents from deciding exact order of them, for safety.
7759
7759
7760 Selected changes can be unshelved with ``--interactive`` flag.
7760 Selected changes can be unshelved with ``--interactive`` flag.
7761 The working directory is updated with the selected changes, and
7761 The working directory is updated with the selected changes, and
7762 only the unselected changes remain shelved.
7762 only the unselected changes remain shelved.
7763 Note: The whole shelve is applied to working directory first before
7763 Note: The whole shelve is applied to working directory first before
7764 running interactively. So, this will bring up all the conflicts between
7764 running interactively. So, this will bring up all the conflicts between
7765 working directory and the shelve, irrespective of which changes will be
7765 working directory and the shelve, irrespective of which changes will be
7766 unshelved.
7766 unshelved.
7767 """
7767 """
7768 with repo.wlock():
7768 with repo.wlock():
7769 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7769 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7770
7770
7771
7771
7772 statemod.addunfinished(
7772 statemod.addunfinished(
7773 b'unshelve',
7773 b'unshelve',
7774 fname=b'shelvedstate',
7774 fname=b'shelvedstate',
7775 continueflag=True,
7775 continueflag=True,
7776 abortfunc=shelvemod.hgabortunshelve,
7776 abortfunc=shelvemod.hgabortunshelve,
7777 continuefunc=shelvemod.hgcontinueunshelve,
7777 continuefunc=shelvemod.hgcontinueunshelve,
7778 cmdmsg=_(b'unshelve already in progress'),
7778 cmdmsg=_(b'unshelve already in progress'),
7779 )
7779 )
7780
7780
7781
7781
7782 @command(
7782 @command(
7783 b'update|up|checkout|co',
7783 b'update|up|checkout|co',
7784 [
7784 [
7785 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7785 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7786 (b'c', b'check', None, _(b'require clean working directory')),
7786 (b'c', b'check', None, _(b'require clean working directory')),
7787 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7787 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7788 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7788 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7789 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7789 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7790 ]
7790 ]
7791 + mergetoolopts,
7791 + mergetoolopts,
7792 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7792 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7793 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7793 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7794 helpbasic=True,
7794 helpbasic=True,
7795 )
7795 )
7796 def update(ui, repo, node=None, **opts):
7796 def update(ui, repo, node=None, **opts):
7797 """update working directory (or switch revisions)
7797 """update working directory (or switch revisions)
7798
7798
7799 Update the repository's working directory to the specified
7799 Update the repository's working directory to the specified
7800 changeset. If no changeset is specified, update to the tip of the
7800 changeset. If no changeset is specified, update to the tip of the
7801 current named branch and move the active bookmark (see :hg:`help
7801 current named branch and move the active bookmark (see :hg:`help
7802 bookmarks`).
7802 bookmarks`).
7803
7803
7804 Update sets the working directory's parent revision to the specified
7804 Update sets the working directory's parent revision to the specified
7805 changeset (see :hg:`help parents`).
7805 changeset (see :hg:`help parents`).
7806
7806
7807 If the changeset is not a descendant or ancestor of the working
7807 If the changeset is not a descendant or ancestor of the working
7808 directory's parent and there are uncommitted changes, the update is
7808 directory's parent and there are uncommitted changes, the update is
7809 aborted. With the -c/--check option, the working directory is checked
7809 aborted. With the -c/--check option, the working directory is checked
7810 for uncommitted changes; if none are found, the working directory is
7810 for uncommitted changes; if none are found, the working directory is
7811 updated to the specified changeset.
7811 updated to the specified changeset.
7812
7812
7813 .. container:: verbose
7813 .. container:: verbose
7814
7814
7815 The -C/--clean, -c/--check, and -m/--merge options control what
7815 The -C/--clean, -c/--check, and -m/--merge options control what
7816 happens if the working directory contains uncommitted changes.
7816 happens if the working directory contains uncommitted changes.
7817 At most of one of them can be specified.
7817 At most of one of them can be specified.
7818
7818
7819 1. If no option is specified, and if
7819 1. If no option is specified, and if
7820 the requested changeset is an ancestor or descendant of
7820 the requested changeset is an ancestor or descendant of
7821 the working directory's parent, the uncommitted changes
7821 the working directory's parent, the uncommitted changes
7822 are merged into the requested changeset and the merged
7822 are merged into the requested changeset and the merged
7823 result is left uncommitted. If the requested changeset is
7823 result is left uncommitted. If the requested changeset is
7824 not an ancestor or descendant (that is, it is on another
7824 not an ancestor or descendant (that is, it is on another
7825 branch), the update is aborted and the uncommitted changes
7825 branch), the update is aborted and the uncommitted changes
7826 are preserved.
7826 are preserved.
7827
7827
7828 2. With the -m/--merge option, the update is allowed even if the
7828 2. With the -m/--merge option, the update is allowed even if the
7829 requested changeset is not an ancestor or descendant of
7829 requested changeset is not an ancestor or descendant of
7830 the working directory's parent.
7830 the working directory's parent.
7831
7831
7832 3. With the -c/--check option, the update is aborted and the
7832 3. With the -c/--check option, the update is aborted and the
7833 uncommitted changes are preserved.
7833 uncommitted changes are preserved.
7834
7834
7835 4. With the -C/--clean option, uncommitted changes are discarded and
7835 4. With the -C/--clean option, uncommitted changes are discarded and
7836 the working directory is updated to the requested changeset.
7836 the working directory is updated to the requested changeset.
7837
7837
7838 To cancel an uncommitted merge (and lose your changes), use
7838 To cancel an uncommitted merge (and lose your changes), use
7839 :hg:`merge --abort`.
7839 :hg:`merge --abort`.
7840
7840
7841 Use null as the changeset to remove the working directory (like
7841 Use null as the changeset to remove the working directory (like
7842 :hg:`clone -U`).
7842 :hg:`clone -U`).
7843
7843
7844 If you want to revert just one file to an older revision, use
7844 If you want to revert just one file to an older revision, use
7845 :hg:`revert [-r REV] NAME`.
7845 :hg:`revert [-r REV] NAME`.
7846
7846
7847 See :hg:`help dates` for a list of formats valid for -d/--date.
7847 See :hg:`help dates` for a list of formats valid for -d/--date.
7848
7848
7849 Returns 0 on success, 1 if there are unresolved files.
7849 Returns 0 on success, 1 if there are unresolved files.
7850 """
7850 """
7851 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7851 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7852 rev = opts.get('rev')
7852 rev = opts.get('rev')
7853 date = opts.get('date')
7853 date = opts.get('date')
7854 clean = opts.get('clean')
7854 clean = opts.get('clean')
7855 check = opts.get('check')
7855 check = opts.get('check')
7856 merge = opts.get('merge')
7856 merge = opts.get('merge')
7857 if rev and node:
7857 if rev and node:
7858 raise error.InputError(_(b"please specify just one revision"))
7858 raise error.InputError(_(b"please specify just one revision"))
7859
7859
7860 if ui.configbool(b'commands', b'update.requiredest'):
7860 if ui.configbool(b'commands', b'update.requiredest'):
7861 if not node and not rev and not date:
7861 if not node and not rev and not date:
7862 raise error.InputError(
7862 raise error.InputError(
7863 _(b'you must specify a destination'),
7863 _(b'you must specify a destination'),
7864 hint=_(b'for example: hg update ".::"'),
7864 hint=_(b'for example: hg update ".::"'),
7865 )
7865 )
7866
7866
7867 if rev is None or rev == b'':
7867 if rev is None or rev == b'':
7868 rev = node
7868 rev = node
7869
7869
7870 if date and rev is not None:
7870 if date and rev is not None:
7871 raise error.InputError(_(b"you can't specify a revision and a date"))
7871 raise error.InputError(_(b"you can't specify a revision and a date"))
7872
7872
7873 updatecheck = None
7873 updatecheck = None
7874 if check or merge is not None and not merge:
7874 if check or merge is not None and not merge:
7875 updatecheck = b'abort'
7875 updatecheck = b'abort'
7876 elif merge or check is not None and not check:
7876 elif merge or check is not None and not check:
7877 updatecheck = b'none'
7877 updatecheck = b'none'
7878
7878
7879 with repo.wlock():
7879 with repo.wlock():
7880 cmdutil.clearunfinished(repo)
7880 cmdutil.clearunfinished(repo)
7881 if date:
7881 if date:
7882 rev = cmdutil.finddate(ui, repo, date)
7882 rev = cmdutil.finddate(ui, repo, date)
7883
7883
7884 # if we defined a bookmark, we have to remember the original name
7884 # if we defined a bookmark, we have to remember the original name
7885 brev = rev
7885 brev = rev
7886 if rev:
7886 if rev:
7887 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7887 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7888 ctx = logcmdutil.revsingle(repo, rev, default=None)
7888 ctx = logcmdutil.revsingle(repo, rev, default=None)
7889 rev = ctx.rev()
7889 rev = ctx.rev()
7890 hidden = ctx.hidden()
7890 hidden = ctx.hidden()
7891 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7891 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7892 with ui.configoverride(overrides, b'update'):
7892 with ui.configoverride(overrides, b'update'):
7893 ret = hg.updatetotally(
7893 ret = hg.updatetotally(
7894 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7894 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7895 )
7895 )
7896 if hidden:
7896 if hidden:
7897 ctxstr = ctx.hex()[:12]
7897 ctxstr = ctx.hex()[:12]
7898 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7898 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7899
7899
7900 if ctx.obsolete():
7900 if ctx.obsolete():
7901 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7901 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7902 ui.warn(b"(%s)\n" % obsfatemsg)
7902 ui.warn(b"(%s)\n" % obsfatemsg)
7903 return ret
7903 return ret
7904
7904
7905
7905
7906 @command(
7906 @command(
7907 b'verify',
7907 b'verify',
7908 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7908 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7909 helpcategory=command.CATEGORY_MAINTENANCE,
7909 helpcategory=command.CATEGORY_MAINTENANCE,
7910 )
7910 )
7911 def verify(ui, repo, **opts):
7911 def verify(ui, repo, **opts):
7912 """verify the integrity of the repository
7912 """verify the integrity of the repository
7913
7913
7914 Verify the integrity of the current repository.
7914 Verify the integrity of the current repository.
7915
7915
7916 This will perform an extensive check of the repository's
7916 This will perform an extensive check of the repository's
7917 integrity, validating the hashes and checksums of each entry in
7917 integrity, validating the hashes and checksums of each entry in
7918 the changelog, manifest, and tracked files, as well as the
7918 the changelog, manifest, and tracked files, as well as the
7919 integrity of their crosslinks and indices.
7919 integrity of their crosslinks and indices.
7920
7920
7921 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7921 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7922 for more information about recovery from corruption of the
7922 for more information about recovery from corruption of the
7923 repository.
7923 repository.
7924
7924
7925 Returns 0 on success, 1 if errors are encountered.
7925 Returns 0 on success, 1 if errors are encountered.
7926 """
7926 """
7927 opts = pycompat.byteskwargs(opts)
7927 opts = pycompat.byteskwargs(opts)
7928
7928
7929 level = None
7929 level = None
7930 if opts[b'full']:
7930 if opts[b'full']:
7931 level = verifymod.VERIFY_FULL
7931 level = verifymod.VERIFY_FULL
7932 return hg.verify(repo, level)
7932 return hg.verify(repo, level)
7933
7933
7934
7934
7935 @command(
7935 @command(
7936 b'version',
7936 b'version',
7937 [] + formatteropts,
7937 [] + formatteropts,
7938 helpcategory=command.CATEGORY_HELP,
7938 helpcategory=command.CATEGORY_HELP,
7939 norepo=True,
7939 norepo=True,
7940 intents={INTENT_READONLY},
7940 intents={INTENT_READONLY},
7941 )
7941 )
7942 def version_(ui, **opts):
7942 def version_(ui, **opts):
7943 """output version and copyright information
7943 """output version and copyright information
7944
7944
7945 .. container:: verbose
7945 .. container:: verbose
7946
7946
7947 Template:
7947 Template:
7948
7948
7949 The following keywords are supported. See also :hg:`help templates`.
7949 The following keywords are supported. See also :hg:`help templates`.
7950
7950
7951 :extensions: List of extensions.
7951 :extensions: List of extensions.
7952 :ver: String. Version number.
7952 :ver: String. Version number.
7953
7953
7954 And each entry of ``{extensions}`` provides the following sub-keywords
7954 And each entry of ``{extensions}`` provides the following sub-keywords
7955 in addition to ``{ver}``.
7955 in addition to ``{ver}``.
7956
7956
7957 :bundled: Boolean. True if included in the release.
7957 :bundled: Boolean. True if included in the release.
7958 :name: String. Extension name.
7958 :name: String. Extension name.
7959 """
7959 """
7960 opts = pycompat.byteskwargs(opts)
7960 opts = pycompat.byteskwargs(opts)
7961 if ui.verbose:
7961 if ui.verbose:
7962 ui.pager(b'version')
7962 ui.pager(b'version')
7963 fm = ui.formatter(b"version", opts)
7963 fm = ui.formatter(b"version", opts)
7964 fm.startitem()
7964 fm.startitem()
7965 fm.write(
7965 fm.write(
7966 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7966 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7967 )
7967 )
7968 license = _(
7968 license = _(
7969 b"(see https://mercurial-scm.org for more information)\n"
7969 b"(see https://mercurial-scm.org for more information)\n"
7970 b"\nCopyright (C) 2005-2023 Olivia Mackall and others\n"
7970 b"\nCopyright (C) 2005-2023 Olivia Mackall and others\n"
7971 b"This is free software; see the source for copying conditions. "
7971 b"This is free software; see the source for copying conditions. "
7972 b"There is NO\nwarranty; "
7972 b"There is NO\nwarranty; "
7973 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7973 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7974 )
7974 )
7975 if not ui.quiet:
7975 if not ui.quiet:
7976 fm.plain(license)
7976 fm.plain(license)
7977
7977
7978 if ui.verbose:
7978 if ui.verbose:
7979 fm.plain(_(b"\nEnabled extensions:\n\n"))
7979 fm.plain(_(b"\nEnabled extensions:\n\n"))
7980 # format names and versions into columns
7980 # format names and versions into columns
7981 names = []
7981 names = []
7982 vers = []
7982 vers = []
7983 isinternals = []
7983 isinternals = []
7984 for name, module in sorted(extensions.extensions()):
7984 for name, module in sorted(extensions.extensions()):
7985 names.append(name)
7985 names.append(name)
7986 vers.append(extensions.moduleversion(module) or None)
7986 vers.append(extensions.moduleversion(module) or None)
7987 isinternals.append(extensions.ismoduleinternal(module))
7987 isinternals.append(extensions.ismoduleinternal(module))
7988 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7988 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7989 if names:
7989 if names:
7990 namefmt = b" %%-%ds " % max(len(n) for n in names)
7990 namefmt = b" %%-%ds " % max(len(n) for n in names)
7991 places = [_(b"external"), _(b"internal")]
7991 places = [_(b"external"), _(b"internal")]
7992 for n, v, p in zip(names, vers, isinternals):
7992 for n, v, p in zip(names, vers, isinternals):
7993 fn.startitem()
7993 fn.startitem()
7994 fn.condwrite(ui.verbose, b"name", namefmt, n)
7994 fn.condwrite(ui.verbose, b"name", namefmt, n)
7995 if ui.verbose:
7995 if ui.verbose:
7996 fn.plain(b"%s " % places[p])
7996 fn.plain(b"%s " % places[p])
7997 fn.data(bundled=p)
7997 fn.data(bundled=p)
7998 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
7998 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
7999 if ui.verbose:
7999 if ui.verbose:
8000 fn.plain(b"\n")
8000 fn.plain(b"\n")
8001 fn.end()
8001 fn.end()
8002 fm.end()
8002 fm.end()
8003
8003
8004
8004
8005 def loadcmdtable(ui, name, cmdtable):
8005 def loadcmdtable(ui, name, cmdtable):
8006 """Load command functions from specified cmdtable"""
8006 """Load command functions from specified cmdtable"""
8007 overrides = [cmd for cmd in cmdtable if cmd in table]
8007 overrides = [cmd for cmd in cmdtable if cmd in table]
8008 if overrides:
8008 if overrides:
8009 ui.warn(
8009 ui.warn(
8010 _(b"extension '%s' overrides commands: %s\n")
8010 _(b"extension '%s' overrides commands: %s\n")
8011 % (name, b" ".join(overrides))
8011 % (name, b" ".join(overrides))
8012 )
8012 )
8013 table.update(cmdtable)
8013 table.update(cmdtable)
@@ -1,1802 +1,1808 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .dirstateutils import (
34 from .dirstateutils import (
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 # use to detect lack of a parameter
46 # use to detect lack of a parameter
47 SENTINEL = object()
47 SENTINEL = object()
48
48
49 HAS_FAST_DIRSTATE_V2 = rustmod is not None
49 HAS_FAST_DIRSTATE_V2 = rustmod is not None
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52 filecache = scmutil.filecache
52 filecache = scmutil.filecache
53 _rangemask = dirstatemap.rangemask
53 _rangemask = dirstatemap.rangemask
54
54
55 DirstateItem = dirstatemap.DirstateItem
55 DirstateItem = dirstatemap.DirstateItem
56
56
57
57
58 class repocache(filecache):
58 class repocache(filecache):
59 """filecache for files in .hg/"""
59 """filecache for files in .hg/"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._opener.join(fname)
62 return obj._opener.join(fname)
63
63
64
64
65 class rootcache(filecache):
65 class rootcache(filecache):
66 """filecache for files in the repository root"""
66 """filecache for files in the repository root"""
67
67
68 def join(self, obj, fname):
68 def join(self, obj, fname):
69 return obj._join(fname)
69 return obj._join(fname)
70
70
71
71
72 def check_invalidated(func):
72 def check_invalidated(func):
73 """check that the func is called with a non-invalidated dirstate
73 """check that the func is called with a non-invalidated dirstate
74
74
75 The dirstate is in an "invalidated state" after an error occured during its
75 The dirstate is in an "invalidated state" after an error occured during its
76 modification and remains so until we exited the top level scope that framed
76 modification and remains so until we exited the top level scope that framed
77 such change.
77 such change.
78 """
78 """
79
79
80 def wrap(self, *args, **kwargs):
80 def wrap(self, *args, **kwargs):
81 if self._invalidated_context:
81 if self._invalidated_context:
82 msg = 'calling `%s` after the dirstate was invalidated'
82 msg = 'calling `%s` after the dirstate was invalidated'
83 msg %= func.__name__
83 msg %= func.__name__
84 raise error.ProgrammingError(msg)
84 raise error.ProgrammingError(msg)
85 return func(self, *args, **kwargs)
85 return func(self, *args, **kwargs)
86
86
87 return wrap
87 return wrap
88
88
89
89
90 def requires_changing_parents(func):
90 def requires_changing_parents(func):
91 def wrap(self, *args, **kwargs):
91 def wrap(self, *args, **kwargs):
92 if not self.is_changing_parents:
92 if not self.is_changing_parents:
93 msg = 'calling `%s` outside of a changing_parents context'
93 msg = 'calling `%s` outside of a changing_parents context'
94 msg %= func.__name__
94 msg %= func.__name__
95 raise error.ProgrammingError(msg)
95 raise error.ProgrammingError(msg)
96 return func(self, *args, **kwargs)
96 return func(self, *args, **kwargs)
97
97
98 return check_invalidated(wrap)
98 return check_invalidated(wrap)
99
99
100
100
101 def requires_changing_files(func):
101 def requires_changing_files(func):
102 def wrap(self, *args, **kwargs):
102 def wrap(self, *args, **kwargs):
103 if not self.is_changing_files:
103 if not self.is_changing_files:
104 msg = 'calling `%s` outside of a `changing_files`'
104 msg = 'calling `%s` outside of a `changing_files`'
105 msg %= func.__name__
105 msg %= func.__name__
106 raise error.ProgrammingError(msg)
106 raise error.ProgrammingError(msg)
107 return func(self, *args, **kwargs)
107 return func(self, *args, **kwargs)
108
108
109 return check_invalidated(wrap)
109 return check_invalidated(wrap)
110
110
111
111
112 def requires_changing_any(func):
112 def requires_changing_any(func):
113 def wrap(self, *args, **kwargs):
113 def wrap(self, *args, **kwargs):
114 if not self.is_changing_any:
114 if not self.is_changing_any:
115 msg = 'calling `%s` outside of a changing context'
115 msg = 'calling `%s` outside of a changing context'
116 msg %= func.__name__
116 msg %= func.__name__
117 raise error.ProgrammingError(msg)
117 raise error.ProgrammingError(msg)
118 return func(self, *args, **kwargs)
118 return func(self, *args, **kwargs)
119
119
120 return check_invalidated(wrap)
120 return check_invalidated(wrap)
121
121
122
122
123 def requires_changing_files_or_status(func):
123 def requires_changing_files_or_status(func):
124 def wrap(self, *args, **kwargs):
124 def wrap(self, *args, **kwargs):
125 if not (self.is_changing_files or self._running_status > 0):
125 if not (self.is_changing_files or self._running_status > 0):
126 msg = (
126 msg = (
127 'calling `%s` outside of a changing_files '
127 'calling `%s` outside of a changing_files '
128 'or running_status context'
128 'or running_status context'
129 )
129 )
130 msg %= func.__name__
130 msg %= func.__name__
131 raise error.ProgrammingError(msg)
131 raise error.ProgrammingError(msg)
132 return func(self, *args, **kwargs)
132 return func(self, *args, **kwargs)
133
133
134 return check_invalidated(wrap)
134 return check_invalidated(wrap)
135
135
136
136
137 CHANGE_TYPE_PARENTS = "parents"
137 CHANGE_TYPE_PARENTS = "parents"
138 CHANGE_TYPE_FILES = "files"
138 CHANGE_TYPE_FILES = "files"
139
139
140
140
141 @interfaceutil.implementer(intdirstate.idirstate)
141 @interfaceutil.implementer(intdirstate.idirstate)
142 class dirstate:
142 class dirstate:
143
143
144 # used by largefile to avoid overwritting transaction callback
144 # used by largefile to avoid overwritting transaction callback
145 _tr_key_suffix = b''
145 _tr_key_suffix = b''
146
146
147 def __init__(
147 def __init__(
148 self,
148 self,
149 opener,
149 opener,
150 ui,
150 ui,
151 root,
151 root,
152 validate,
152 validate,
153 sparsematchfn,
153 sparsematchfn,
154 nodeconstants,
154 nodeconstants,
155 use_dirstate_v2,
155 use_dirstate_v2,
156 use_tracked_hint=False,
156 use_tracked_hint=False,
157 ):
157 ):
158 """Create a new dirstate object.
158 """Create a new dirstate object.
159
159
160 opener is an open()-like callable that can be used to open the
160 opener is an open()-like callable that can be used to open the
161 dirstate file; root is the root of the directory tracked by
161 dirstate file; root is the root of the directory tracked by
162 the dirstate.
162 the dirstate.
163 """
163 """
164 self._use_dirstate_v2 = use_dirstate_v2
164 self._use_dirstate_v2 = use_dirstate_v2
165 self._use_tracked_hint = use_tracked_hint
165 self._use_tracked_hint = use_tracked_hint
166 self._nodeconstants = nodeconstants
166 self._nodeconstants = nodeconstants
167 self._opener = opener
167 self._opener = opener
168 self._validate = validate
168 self._validate = validate
169 self._root = root
169 self._root = root
170 # Either build a sparse-matcher or None if sparse is disabled
170 # Either build a sparse-matcher or None if sparse is disabled
171 self._sparsematchfn = sparsematchfn
171 self._sparsematchfn = sparsematchfn
172 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
172 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
173 # UNC path pointing to root share (issue4557)
173 # UNC path pointing to root share (issue4557)
174 self._rootdir = pathutil.normasprefix(root)
174 self._rootdir = pathutil.normasprefix(root)
175 # True is any internal state may be different
175 # True is any internal state may be different
176 self._dirty = False
176 self._dirty = False
177 # True if the set of tracked file may be different
177 # True if the set of tracked file may be different
178 self._dirty_tracked_set = False
178 self._dirty_tracked_set = False
179 self._ui = ui
179 self._ui = ui
180 self._filecache = {}
180 self._filecache = {}
181 # nesting level of `changing_parents` context
181 # nesting level of `changing_parents` context
182 self._changing_level = 0
182 self._changing_level = 0
183 # the change currently underway
183 # the change currently underway
184 self._change_type = None
184 self._change_type = None
185 # number of open _running_status context
185 # number of open _running_status context
186 self._running_status = 0
186 self._running_status = 0
187 # True if the current dirstate changing operations have been
187 # True if the current dirstate changing operations have been
188 # invalidated (used to make sure all nested contexts have been exited)
188 # invalidated (used to make sure all nested contexts have been exited)
189 self._invalidated_context = False
189 self._invalidated_context = False
190 self._attached_to_a_transaction = False
190 self._attached_to_a_transaction = False
191 self._filename = b'dirstate'
191 self._filename = b'dirstate'
192 self._filename_th = b'dirstate-tracked-hint'
192 self._filename_th = b'dirstate-tracked-hint'
193 self._pendingfilename = b'%s.pending' % self._filename
193 self._pendingfilename = b'%s.pending' % self._filename
194 self._plchangecallbacks = {}
194 self._plchangecallbacks = {}
195 self._origpl = None
195 self._origpl = None
196 self._mapcls = dirstatemap.dirstatemap
196 self._mapcls = dirstatemap.dirstatemap
197 # Access and cache cwd early, so we don't access it for the first time
197 # Access and cache cwd early, so we don't access it for the first time
198 # after a working-copy update caused it to not exist (accessing it then
198 # after a working-copy update caused it to not exist (accessing it then
199 # raises an exception).
199 # raises an exception).
200 self._cwd
200 self._cwd
201
201
202 def refresh(self):
202 def refresh(self):
203 # XXX if this happens, you likely did not enter the `changing_xxx`
204 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
205 # `refresh`.
206 if self.is_changing_any:
207 msg = "refreshing the dirstate in the middle of a change"
208 raise error.ProgrammingError(msg)
203 if '_branch' in vars(self):
209 if '_branch' in vars(self):
204 del self._branch
210 del self._branch
205 if '_map' in vars(self) and self._map.may_need_refresh():
211 if '_map' in vars(self) and self._map.may_need_refresh():
206 self.invalidate()
212 self.invalidate()
207
213
208 def prefetch_parents(self):
214 def prefetch_parents(self):
209 """make sure the parents are loaded
215 """make sure the parents are loaded
210
216
211 Used to avoid a race condition.
217 Used to avoid a race condition.
212 """
218 """
213 self._pl
219 self._pl
214
220
215 @contextlib.contextmanager
221 @contextlib.contextmanager
216 @check_invalidated
222 @check_invalidated
217 def running_status(self, repo):
223 def running_status(self, repo):
218 """Wrap a status operation
224 """Wrap a status operation
219
225
220 This context is not mutally exclusive with the `changing_*` context. It
226 This context is not mutally exclusive with the `changing_*` context. It
221 also do not warrant for the `wlock` to be taken.
227 also do not warrant for the `wlock` to be taken.
222
228
223 If the wlock is taken, this context will behave in a simple way, and
229 If the wlock is taken, this context will behave in a simple way, and
224 ensure the data are scheduled for write when leaving the top level
230 ensure the data are scheduled for write when leaving the top level
225 context.
231 context.
226
232
227 If the lock is not taken, it will only warrant that the data are either
233 If the lock is not taken, it will only warrant that the data are either
228 committed (written) and rolled back (invalidated) when exiting the top
234 committed (written) and rolled back (invalidated) when exiting the top
229 level context. The write/invalidate action must be performed by the
235 level context. The write/invalidate action must be performed by the
230 wrapped code.
236 wrapped code.
231
237
232
238
233 The expected logic is:
239 The expected logic is:
234
240
235 A: read the dirstate
241 A: read the dirstate
236 B: run status
242 B: run status
237 This might make the dirstate dirty by updating cache,
243 This might make the dirstate dirty by updating cache,
238 especially in Rust.
244 especially in Rust.
239 C: do more "post status fixup if relevant
245 C: do more "post status fixup if relevant
240 D: try to take the w-lock (this will invalidate the changes if they were raced)
246 D: try to take the w-lock (this will invalidate the changes if they were raced)
241 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
247 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
242 E1: elif lock was acquired β†’ write the changes
248 E1: elif lock was acquired β†’ write the changes
243 E2: else β†’ discard the changes
249 E2: else β†’ discard the changes
244 """
250 """
245 has_lock = repo.currentwlock() is not None
251 has_lock = repo.currentwlock() is not None
246 is_changing = self.is_changing_any
252 is_changing = self.is_changing_any
247 tr = repo.currenttransaction()
253 tr = repo.currenttransaction()
248 has_tr = tr is not None
254 has_tr = tr is not None
249 nested = bool(self._running_status)
255 nested = bool(self._running_status)
250
256
251 first_and_alone = not (is_changing or has_tr or nested)
257 first_and_alone = not (is_changing or has_tr or nested)
252
258
253 # enforce no change happened outside of a proper context.
259 # enforce no change happened outside of a proper context.
254 if first_and_alone and self._dirty:
260 if first_and_alone and self._dirty:
255 has_tr = repo.currenttransaction() is not None
261 has_tr = repo.currenttransaction() is not None
256 if not has_tr and self._changing_level == 0 and self._dirty:
262 if not has_tr and self._changing_level == 0 and self._dirty:
257 msg = "entering a status context, but dirstate is already dirty"
263 msg = "entering a status context, but dirstate is already dirty"
258 raise error.ProgrammingError(msg)
264 raise error.ProgrammingError(msg)
259
265
260 should_write = has_lock and not (nested or is_changing)
266 should_write = has_lock and not (nested or is_changing)
261
267
262 self._running_status += 1
268 self._running_status += 1
263 try:
269 try:
264 yield
270 yield
265 except Exception:
271 except Exception:
266 self.invalidate()
272 self.invalidate()
267 raise
273 raise
268 finally:
274 finally:
269 self._running_status -= 1
275 self._running_status -= 1
270 if self._invalidated_context:
276 if self._invalidated_context:
271 should_write = False
277 should_write = False
272 self.invalidate()
278 self.invalidate()
273
279
274 if should_write:
280 if should_write:
275 assert repo.currenttransaction() is tr
281 assert repo.currenttransaction() is tr
276 self.write(tr)
282 self.write(tr)
277 elif not has_lock:
283 elif not has_lock:
278 if self._dirty:
284 if self._dirty:
279 msg = b'dirstate dirty while exiting an isolated status context'
285 msg = b'dirstate dirty while exiting an isolated status context'
280 repo.ui.develwarn(msg)
286 repo.ui.develwarn(msg)
281 self.invalidate()
287 self.invalidate()
282
288
283 @contextlib.contextmanager
289 @contextlib.contextmanager
284 @check_invalidated
290 @check_invalidated
285 def _changing(self, repo, change_type):
291 def _changing(self, repo, change_type):
286 if repo.currentwlock() is None:
292 if repo.currentwlock() is None:
287 msg = b"trying to change the dirstate without holding the wlock"
293 msg = b"trying to change the dirstate without holding the wlock"
288 raise error.ProgrammingError(msg)
294 raise error.ProgrammingError(msg)
289
295
290 has_tr = repo.currenttransaction() is not None
296 has_tr = repo.currenttransaction() is not None
291 if not has_tr and self._changing_level == 0 and self._dirty:
297 if not has_tr and self._changing_level == 0 and self._dirty:
292 msg = b"entering a changing context, but dirstate is already dirty"
298 msg = b"entering a changing context, but dirstate is already dirty"
293 repo.ui.develwarn(msg)
299 repo.ui.develwarn(msg)
294
300
295 assert self._changing_level >= 0
301 assert self._changing_level >= 0
296 # different type of change are mutually exclusive
302 # different type of change are mutually exclusive
297 if self._change_type is None:
303 if self._change_type is None:
298 assert self._changing_level == 0
304 assert self._changing_level == 0
299 self._change_type = change_type
305 self._change_type = change_type
300 elif self._change_type != change_type:
306 elif self._change_type != change_type:
301 msg = (
307 msg = (
302 'trying to open "%s" dirstate-changing context while a "%s" is'
308 'trying to open "%s" dirstate-changing context while a "%s" is'
303 ' already open'
309 ' already open'
304 )
310 )
305 msg %= (change_type, self._change_type)
311 msg %= (change_type, self._change_type)
306 raise error.ProgrammingError(msg)
312 raise error.ProgrammingError(msg)
307 should_write = False
313 should_write = False
308 self._changing_level += 1
314 self._changing_level += 1
309 try:
315 try:
310 yield
316 yield
311 except: # re-raises
317 except: # re-raises
312 self.invalidate() # this will set `_invalidated_context`
318 self.invalidate() # this will set `_invalidated_context`
313 raise
319 raise
314 finally:
320 finally:
315 assert self._changing_level > 0
321 assert self._changing_level > 0
316 self._changing_level -= 1
322 self._changing_level -= 1
317 # If the dirstate is being invalidated, call invalidate again.
323 # If the dirstate is being invalidated, call invalidate again.
318 # This will throw away anything added by a upper context and
324 # This will throw away anything added by a upper context and
319 # reset the `_invalidated_context` flag when relevant
325 # reset the `_invalidated_context` flag when relevant
320 if self._changing_level <= 0:
326 if self._changing_level <= 0:
321 self._change_type = None
327 self._change_type = None
322 assert self._changing_level == 0
328 assert self._changing_level == 0
323 if self._invalidated_context:
329 if self._invalidated_context:
324 # make sure we invalidate anything an upper context might
330 # make sure we invalidate anything an upper context might
325 # have changed.
331 # have changed.
326 self.invalidate()
332 self.invalidate()
327 else:
333 else:
328 should_write = self._changing_level <= 0
334 should_write = self._changing_level <= 0
329 tr = repo.currenttransaction()
335 tr = repo.currenttransaction()
330 if has_tr != (tr is not None):
336 if has_tr != (tr is not None):
331 if has_tr:
337 if has_tr:
332 m = "transaction vanished while changing dirstate"
338 m = "transaction vanished while changing dirstate"
333 else:
339 else:
334 m = "transaction appeared while changing dirstate"
340 m = "transaction appeared while changing dirstate"
335 raise error.ProgrammingError(m)
341 raise error.ProgrammingError(m)
336 if should_write:
342 if should_write:
337 self.write(tr)
343 self.write(tr)
338
344
339 @contextlib.contextmanager
345 @contextlib.contextmanager
340 def changing_parents(self, repo):
346 def changing_parents(self, repo):
341 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
347 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
342 yield c
348 yield c
343
349
344 @contextlib.contextmanager
350 @contextlib.contextmanager
345 def changing_files(self, repo):
351 def changing_files(self, repo):
346 with self._changing(repo, CHANGE_TYPE_FILES) as c:
352 with self._changing(repo, CHANGE_TYPE_FILES) as c:
347 yield c
353 yield c
348
354
349 # here to help migration to the new code
355 # here to help migration to the new code
350 def parentchange(self):
356 def parentchange(self):
351 msg = (
357 msg = (
352 "Mercurial 6.4 and later requires call to "
358 "Mercurial 6.4 and later requires call to "
353 "`dirstate.changing_parents(repo)`"
359 "`dirstate.changing_parents(repo)`"
354 )
360 )
355 raise error.ProgrammingError(msg)
361 raise error.ProgrammingError(msg)
356
362
357 @property
363 @property
358 def is_changing_any(self):
364 def is_changing_any(self):
359 """Returns true if the dirstate is in the middle of a set of changes.
365 """Returns true if the dirstate is in the middle of a set of changes.
360
366
361 This returns True for any kind of change.
367 This returns True for any kind of change.
362 """
368 """
363 return self._changing_level > 0
369 return self._changing_level > 0
364
370
365 def pendingparentchange(self):
371 def pendingparentchange(self):
366 return self.is_changing_parent()
372 return self.is_changing_parent()
367
373
368 def is_changing_parent(self):
374 def is_changing_parent(self):
369 """Returns true if the dirstate is in the middle of a set of changes
375 """Returns true if the dirstate is in the middle of a set of changes
370 that modify the dirstate parent.
376 that modify the dirstate parent.
371 """
377 """
372 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
378 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
373 return self.is_changing_parents
379 return self.is_changing_parents
374
380
375 @property
381 @property
376 def is_changing_parents(self):
382 def is_changing_parents(self):
377 """Returns true if the dirstate is in the middle of a set of changes
383 """Returns true if the dirstate is in the middle of a set of changes
378 that modify the dirstate parent.
384 that modify the dirstate parent.
379 """
385 """
380 if self._changing_level <= 0:
386 if self._changing_level <= 0:
381 return False
387 return False
382 return self._change_type == CHANGE_TYPE_PARENTS
388 return self._change_type == CHANGE_TYPE_PARENTS
383
389
384 @property
390 @property
385 def is_changing_files(self):
391 def is_changing_files(self):
386 """Returns true if the dirstate is in the middle of a set of changes
392 """Returns true if the dirstate is in the middle of a set of changes
387 that modify the files tracked or their sources.
393 that modify the files tracked or their sources.
388 """
394 """
389 if self._changing_level <= 0:
395 if self._changing_level <= 0:
390 return False
396 return False
391 return self._change_type == CHANGE_TYPE_FILES
397 return self._change_type == CHANGE_TYPE_FILES
392
398
393 @propertycache
399 @propertycache
394 def _map(self):
400 def _map(self):
395 """Return the dirstate contents (see documentation for dirstatemap)."""
401 """Return the dirstate contents (see documentation for dirstatemap)."""
396 return self._mapcls(
402 return self._mapcls(
397 self._ui,
403 self._ui,
398 self._opener,
404 self._opener,
399 self._root,
405 self._root,
400 self._nodeconstants,
406 self._nodeconstants,
401 self._use_dirstate_v2,
407 self._use_dirstate_v2,
402 )
408 )
403
409
404 @property
410 @property
405 def _sparsematcher(self):
411 def _sparsematcher(self):
406 """The matcher for the sparse checkout.
412 """The matcher for the sparse checkout.
407
413
408 The working directory may not include every file from a manifest. The
414 The working directory may not include every file from a manifest. The
409 matcher obtained by this property will match a path if it is to be
415 matcher obtained by this property will match a path if it is to be
410 included in the working directory.
416 included in the working directory.
411
417
412 When sparse if disabled, return None.
418 When sparse if disabled, return None.
413 """
419 """
414 if self._sparsematchfn is None:
420 if self._sparsematchfn is None:
415 return None
421 return None
416 # TODO there is potential to cache this property. For now, the matcher
422 # TODO there is potential to cache this property. For now, the matcher
417 # is resolved on every access. (But the called function does use a
423 # is resolved on every access. (But the called function does use a
418 # cache to keep the lookup fast.)
424 # cache to keep the lookup fast.)
419 return self._sparsematchfn()
425 return self._sparsematchfn()
420
426
421 @repocache(b'branch')
427 @repocache(b'branch')
422 def _branch(self):
428 def _branch(self):
423 f = None
429 f = None
424 data = b''
430 data = b''
425 try:
431 try:
426 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
432 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
427 data = f.read().strip()
433 data = f.read().strip()
428 except FileNotFoundError:
434 except FileNotFoundError:
429 pass
435 pass
430 finally:
436 finally:
431 if f is not None:
437 if f is not None:
432 f.close()
438 f.close()
433 if not data:
439 if not data:
434 return b"default"
440 return b"default"
435 return data
441 return data
436
442
437 @property
443 @property
438 def _pl(self):
444 def _pl(self):
439 return self._map.parents()
445 return self._map.parents()
440
446
441 def hasdir(self, d):
447 def hasdir(self, d):
442 return self._map.hastrackeddir(d)
448 return self._map.hastrackeddir(d)
443
449
444 @rootcache(b'.hgignore')
450 @rootcache(b'.hgignore')
445 def _ignore(self):
451 def _ignore(self):
446 files = self._ignorefiles()
452 files = self._ignorefiles()
447 if not files:
453 if not files:
448 return matchmod.never()
454 return matchmod.never()
449
455
450 pats = [b'include:%s' % f for f in files]
456 pats = [b'include:%s' % f for f in files]
451 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
457 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
452
458
453 @propertycache
459 @propertycache
454 def _slash(self):
460 def _slash(self):
455 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
461 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
456
462
457 @propertycache
463 @propertycache
458 def _checklink(self):
464 def _checklink(self):
459 return util.checklink(self._root)
465 return util.checklink(self._root)
460
466
461 @propertycache
467 @propertycache
462 def _checkexec(self):
468 def _checkexec(self):
463 return bool(util.checkexec(self._root))
469 return bool(util.checkexec(self._root))
464
470
465 @propertycache
471 @propertycache
466 def _checkcase(self):
472 def _checkcase(self):
467 return not util.fscasesensitive(self._join(b'.hg'))
473 return not util.fscasesensitive(self._join(b'.hg'))
468
474
469 def _join(self, f):
475 def _join(self, f):
470 # much faster than os.path.join()
476 # much faster than os.path.join()
471 # it's safe because f is always a relative path
477 # it's safe because f is always a relative path
472 return self._rootdir + f
478 return self._rootdir + f
473
479
474 def flagfunc(self, buildfallback):
480 def flagfunc(self, buildfallback):
475 """build a callable that returns flags associated with a filename
481 """build a callable that returns flags associated with a filename
476
482
477 The information is extracted from three possible layers:
483 The information is extracted from three possible layers:
478 1. the file system if it supports the information
484 1. the file system if it supports the information
479 2. the "fallback" information stored in the dirstate if any
485 2. the "fallback" information stored in the dirstate if any
480 3. a more expensive mechanism inferring the flags from the parents.
486 3. a more expensive mechanism inferring the flags from the parents.
481 """
487 """
482
488
483 # small hack to cache the result of buildfallback()
489 # small hack to cache the result of buildfallback()
484 fallback_func = []
490 fallback_func = []
485
491
486 def get_flags(x):
492 def get_flags(x):
487 entry = None
493 entry = None
488 fallback_value = None
494 fallback_value = None
489 try:
495 try:
490 st = os.lstat(self._join(x))
496 st = os.lstat(self._join(x))
491 except OSError:
497 except OSError:
492 return b''
498 return b''
493
499
494 if self._checklink:
500 if self._checklink:
495 if util.statislink(st):
501 if util.statislink(st):
496 return b'l'
502 return b'l'
497 else:
503 else:
498 entry = self.get_entry(x)
504 entry = self.get_entry(x)
499 if entry.has_fallback_symlink:
505 if entry.has_fallback_symlink:
500 if entry.fallback_symlink:
506 if entry.fallback_symlink:
501 return b'l'
507 return b'l'
502 else:
508 else:
503 if not fallback_func:
509 if not fallback_func:
504 fallback_func.append(buildfallback())
510 fallback_func.append(buildfallback())
505 fallback_value = fallback_func[0](x)
511 fallback_value = fallback_func[0](x)
506 if b'l' in fallback_value:
512 if b'l' in fallback_value:
507 return b'l'
513 return b'l'
508
514
509 if self._checkexec:
515 if self._checkexec:
510 if util.statisexec(st):
516 if util.statisexec(st):
511 return b'x'
517 return b'x'
512 else:
518 else:
513 if entry is None:
519 if entry is None:
514 entry = self.get_entry(x)
520 entry = self.get_entry(x)
515 if entry.has_fallback_exec:
521 if entry.has_fallback_exec:
516 if entry.fallback_exec:
522 if entry.fallback_exec:
517 return b'x'
523 return b'x'
518 else:
524 else:
519 if fallback_value is None:
525 if fallback_value is None:
520 if not fallback_func:
526 if not fallback_func:
521 fallback_func.append(buildfallback())
527 fallback_func.append(buildfallback())
522 fallback_value = fallback_func[0](x)
528 fallback_value = fallback_func[0](x)
523 if b'x' in fallback_value:
529 if b'x' in fallback_value:
524 return b'x'
530 return b'x'
525 return b''
531 return b''
526
532
527 return get_flags
533 return get_flags
528
534
529 @propertycache
535 @propertycache
530 def _cwd(self):
536 def _cwd(self):
531 # internal config: ui.forcecwd
537 # internal config: ui.forcecwd
532 forcecwd = self._ui.config(b'ui', b'forcecwd')
538 forcecwd = self._ui.config(b'ui', b'forcecwd')
533 if forcecwd:
539 if forcecwd:
534 return forcecwd
540 return forcecwd
535 return encoding.getcwd()
541 return encoding.getcwd()
536
542
537 def getcwd(self):
543 def getcwd(self):
538 """Return the path from which a canonical path is calculated.
544 """Return the path from which a canonical path is calculated.
539
545
540 This path should be used to resolve file patterns or to convert
546 This path should be used to resolve file patterns or to convert
541 canonical paths back to file paths for display. It shouldn't be
547 canonical paths back to file paths for display. It shouldn't be
542 used to get real file paths. Use vfs functions instead.
548 used to get real file paths. Use vfs functions instead.
543 """
549 """
544 cwd = self._cwd
550 cwd = self._cwd
545 if cwd == self._root:
551 if cwd == self._root:
546 return b''
552 return b''
547 # self._root ends with a path separator if self._root is '/' or 'C:\'
553 # self._root ends with a path separator if self._root is '/' or 'C:\'
548 rootsep = self._root
554 rootsep = self._root
549 if not util.endswithsep(rootsep):
555 if not util.endswithsep(rootsep):
550 rootsep += pycompat.ossep
556 rootsep += pycompat.ossep
551 if cwd.startswith(rootsep):
557 if cwd.startswith(rootsep):
552 return cwd[len(rootsep) :]
558 return cwd[len(rootsep) :]
553 else:
559 else:
554 # we're outside the repo. return an absolute path.
560 # we're outside the repo. return an absolute path.
555 return cwd
561 return cwd
556
562
557 def pathto(self, f, cwd=None):
563 def pathto(self, f, cwd=None):
558 if cwd is None:
564 if cwd is None:
559 cwd = self.getcwd()
565 cwd = self.getcwd()
560 path = util.pathto(self._root, cwd, f)
566 path = util.pathto(self._root, cwd, f)
561 if self._slash:
567 if self._slash:
562 return util.pconvert(path)
568 return util.pconvert(path)
563 return path
569 return path
564
570
565 def get_entry(self, path):
571 def get_entry(self, path):
566 """return a DirstateItem for the associated path"""
572 """return a DirstateItem for the associated path"""
567 entry = self._map.get(path)
573 entry = self._map.get(path)
568 if entry is None:
574 if entry is None:
569 return DirstateItem()
575 return DirstateItem()
570 return entry
576 return entry
571
577
572 def __contains__(self, key):
578 def __contains__(self, key):
573 return key in self._map
579 return key in self._map
574
580
575 def __iter__(self):
581 def __iter__(self):
576 return iter(sorted(self._map))
582 return iter(sorted(self._map))
577
583
578 def items(self):
584 def items(self):
579 return self._map.items()
585 return self._map.items()
580
586
581 iteritems = items
587 iteritems = items
582
588
583 def parents(self):
589 def parents(self):
584 return [self._validate(p) for p in self._pl]
590 return [self._validate(p) for p in self._pl]
585
591
586 def p1(self):
592 def p1(self):
587 return self._validate(self._pl[0])
593 return self._validate(self._pl[0])
588
594
589 def p2(self):
595 def p2(self):
590 return self._validate(self._pl[1])
596 return self._validate(self._pl[1])
591
597
592 @property
598 @property
593 def in_merge(self):
599 def in_merge(self):
594 """True if a merge is in progress"""
600 """True if a merge is in progress"""
595 return self._pl[1] != self._nodeconstants.nullid
601 return self._pl[1] != self._nodeconstants.nullid
596
602
597 def branch(self):
603 def branch(self):
598 return encoding.tolocal(self._branch)
604 return encoding.tolocal(self._branch)
599
605
600 @requires_changing_parents
606 @requires_changing_parents
601 def setparents(self, p1, p2=None):
607 def setparents(self, p1, p2=None):
602 """Set dirstate parents to p1 and p2.
608 """Set dirstate parents to p1 and p2.
603
609
604 When moving from two parents to one, "merged" entries a
610 When moving from two parents to one, "merged" entries a
605 adjusted to normal and previous copy records discarded and
611 adjusted to normal and previous copy records discarded and
606 returned by the call.
612 returned by the call.
607
613
608 See localrepo.setparents()
614 See localrepo.setparents()
609 """
615 """
610 if p2 is None:
616 if p2 is None:
611 p2 = self._nodeconstants.nullid
617 p2 = self._nodeconstants.nullid
612 if self._changing_level == 0:
618 if self._changing_level == 0:
613 raise ValueError(
619 raise ValueError(
614 b"cannot set dirstate parent outside of "
620 b"cannot set dirstate parent outside of "
615 b"dirstate.changing_parents context manager"
621 b"dirstate.changing_parents context manager"
616 )
622 )
617
623
618 self._dirty = True
624 self._dirty = True
619 oldp2 = self._pl[1]
625 oldp2 = self._pl[1]
620 if self._origpl is None:
626 if self._origpl is None:
621 self._origpl = self._pl
627 self._origpl = self._pl
622 nullid = self._nodeconstants.nullid
628 nullid = self._nodeconstants.nullid
623 # True if we need to fold p2 related state back to a linear case
629 # True if we need to fold p2 related state back to a linear case
624 fold_p2 = oldp2 != nullid and p2 == nullid
630 fold_p2 = oldp2 != nullid and p2 == nullid
625 return self._map.setparents(p1, p2, fold_p2=fold_p2)
631 return self._map.setparents(p1, p2, fold_p2=fold_p2)
626
632
627 def setbranch(self, branch, transaction=SENTINEL):
633 def setbranch(self, branch, transaction=SENTINEL):
628 self.__class__._branch.set(self, encoding.fromlocal(branch))
634 self.__class__._branch.set(self, encoding.fromlocal(branch))
629 if transaction is SENTINEL:
635 if transaction is SENTINEL:
630 msg = b"setbranch needs a `transaction` argument"
636 msg = b"setbranch needs a `transaction` argument"
631 self._ui.deprecwarn(msg, b'6.5')
637 self._ui.deprecwarn(msg, b'6.5')
632 transaction = None
638 transaction = None
633 if transaction is not None:
639 if transaction is not None:
634 self._setup_tr_abort(transaction)
640 self._setup_tr_abort(transaction)
635 transaction.addfilegenerator(
641 transaction.addfilegenerator(
636 b'dirstate-3-branch%s' % self._tr_key_suffix,
642 b'dirstate-3-branch%s' % self._tr_key_suffix,
637 (b'branch',),
643 (b'branch',),
638 self._write_branch,
644 self._write_branch,
639 location=b'plain',
645 location=b'plain',
640 post_finalize=True,
646 post_finalize=True,
641 )
647 )
642 return
648 return
643
649
644 vfs = self._opener
650 vfs = self._opener
645 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
651 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
646 self._write_branch(f)
652 self._write_branch(f)
647 # make sure filecache has the correct stat info for _branch after
653 # make sure filecache has the correct stat info for _branch after
648 # replacing the underlying file
654 # replacing the underlying file
649 #
655 #
650 # XXX do we actually need this,
656 # XXX do we actually need this,
651 # refreshing the attribute is quite cheap
657 # refreshing the attribute is quite cheap
652 ce = self._filecache[b'_branch']
658 ce = self._filecache[b'_branch']
653 if ce:
659 if ce:
654 ce.refresh()
660 ce.refresh()
655
661
656 def _write_branch(self, file_obj):
662 def _write_branch(self, file_obj):
657 file_obj.write(self._branch + b'\n')
663 file_obj.write(self._branch + b'\n')
658
664
659 def invalidate(self):
665 def invalidate(self):
660 """Causes the next access to reread the dirstate.
666 """Causes the next access to reread the dirstate.
661
667
662 This is different from localrepo.invalidatedirstate() because it always
668 This is different from localrepo.invalidatedirstate() because it always
663 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
669 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
664 check whether the dirstate has changed before rereading it."""
670 check whether the dirstate has changed before rereading it."""
665
671
666 for a in ("_map", "_branch", "_ignore"):
672 for a in ("_map", "_branch", "_ignore"):
667 if a in self.__dict__:
673 if a in self.__dict__:
668 delattr(self, a)
674 delattr(self, a)
669 self._dirty = False
675 self._dirty = False
670 self._dirty_tracked_set = False
676 self._dirty_tracked_set = False
671 self._invalidated_context = bool(
677 self._invalidated_context = bool(
672 self._changing_level > 0
678 self._changing_level > 0
673 or self._attached_to_a_transaction
679 or self._attached_to_a_transaction
674 or self._running_status
680 or self._running_status
675 )
681 )
676 self._origpl = None
682 self._origpl = None
677
683
678 @requires_changing_any
684 @requires_changing_any
679 def copy(self, source, dest):
685 def copy(self, source, dest):
680 """Mark dest as a copy of source. Unmark dest if source is None."""
686 """Mark dest as a copy of source. Unmark dest if source is None."""
681 if source == dest:
687 if source == dest:
682 return
688 return
683 self._dirty = True
689 self._dirty = True
684 if source is not None:
690 if source is not None:
685 self._check_sparse(source)
691 self._check_sparse(source)
686 self._map.copymap[dest] = source
692 self._map.copymap[dest] = source
687 else:
693 else:
688 self._map.copymap.pop(dest, None)
694 self._map.copymap.pop(dest, None)
689
695
690 def copied(self, file):
696 def copied(self, file):
691 return self._map.copymap.get(file, None)
697 return self._map.copymap.get(file, None)
692
698
693 def copies(self):
699 def copies(self):
694 return self._map.copymap
700 return self._map.copymap
695
701
696 @requires_changing_files
702 @requires_changing_files
697 def set_tracked(self, filename, reset_copy=False):
703 def set_tracked(self, filename, reset_copy=False):
698 """a "public" method for generic code to mark a file as tracked
704 """a "public" method for generic code to mark a file as tracked
699
705
700 This function is to be called outside of "update/merge" case. For
706 This function is to be called outside of "update/merge" case. For
701 example by a command like `hg add X`.
707 example by a command like `hg add X`.
702
708
703 if reset_copy is set, any existing copy information will be dropped.
709 if reset_copy is set, any existing copy information will be dropped.
704
710
705 return True the file was previously untracked, False otherwise.
711 return True the file was previously untracked, False otherwise.
706 """
712 """
707 self._dirty = True
713 self._dirty = True
708 entry = self._map.get(filename)
714 entry = self._map.get(filename)
709 if entry is None or not entry.tracked:
715 if entry is None or not entry.tracked:
710 self._check_new_tracked_filename(filename)
716 self._check_new_tracked_filename(filename)
711 pre_tracked = self._map.set_tracked(filename)
717 pre_tracked = self._map.set_tracked(filename)
712 if reset_copy:
718 if reset_copy:
713 self._map.copymap.pop(filename, None)
719 self._map.copymap.pop(filename, None)
714 if pre_tracked:
720 if pre_tracked:
715 self._dirty_tracked_set = True
721 self._dirty_tracked_set = True
716 return pre_tracked
722 return pre_tracked
717
723
718 @requires_changing_files
724 @requires_changing_files
719 def set_untracked(self, filename):
725 def set_untracked(self, filename):
720 """a "public" method for generic code to mark a file as untracked
726 """a "public" method for generic code to mark a file as untracked
721
727
722 This function is to be called outside of "update/merge" case. For
728 This function is to be called outside of "update/merge" case. For
723 example by a command like `hg remove X`.
729 example by a command like `hg remove X`.
724
730
725 return True the file was previously tracked, False otherwise.
731 return True the file was previously tracked, False otherwise.
726 """
732 """
727 ret = self._map.set_untracked(filename)
733 ret = self._map.set_untracked(filename)
728 if ret:
734 if ret:
729 self._dirty = True
735 self._dirty = True
730 self._dirty_tracked_set = True
736 self._dirty_tracked_set = True
731 return ret
737 return ret
732
738
733 @requires_changing_files_or_status
739 @requires_changing_files_or_status
734 def set_clean(self, filename, parentfiledata):
740 def set_clean(self, filename, parentfiledata):
735 """record that the current state of the file on disk is known to be clean"""
741 """record that the current state of the file on disk is known to be clean"""
736 self._dirty = True
742 self._dirty = True
737 if not self._map[filename].tracked:
743 if not self._map[filename].tracked:
738 self._check_new_tracked_filename(filename)
744 self._check_new_tracked_filename(filename)
739 (mode, size, mtime) = parentfiledata
745 (mode, size, mtime) = parentfiledata
740 self._map.set_clean(filename, mode, size, mtime)
746 self._map.set_clean(filename, mode, size, mtime)
741
747
742 @requires_changing_files_or_status
748 @requires_changing_files_or_status
743 def set_possibly_dirty(self, filename):
749 def set_possibly_dirty(self, filename):
744 """record that the current state of the file on disk is unknown"""
750 """record that the current state of the file on disk is unknown"""
745 self._dirty = True
751 self._dirty = True
746 self._map.set_possibly_dirty(filename)
752 self._map.set_possibly_dirty(filename)
747
753
748 @requires_changing_parents
754 @requires_changing_parents
749 def update_file_p1(
755 def update_file_p1(
750 self,
756 self,
751 filename,
757 filename,
752 p1_tracked,
758 p1_tracked,
753 ):
759 ):
754 """Set a file as tracked in the parent (or not)
760 """Set a file as tracked in the parent (or not)
755
761
756 This is to be called when adjust the dirstate to a new parent after an history
762 This is to be called when adjust the dirstate to a new parent after an history
757 rewriting operation.
763 rewriting operation.
758
764
759 It should not be called during a merge (p2 != nullid) and only within
765 It should not be called during a merge (p2 != nullid) and only within
760 a `with dirstate.changing_parents(repo):` context.
766 a `with dirstate.changing_parents(repo):` context.
761 """
767 """
762 if self.in_merge:
768 if self.in_merge:
763 msg = b'update_file_reference should not be called when merging'
769 msg = b'update_file_reference should not be called when merging'
764 raise error.ProgrammingError(msg)
770 raise error.ProgrammingError(msg)
765 entry = self._map.get(filename)
771 entry = self._map.get(filename)
766 if entry is None:
772 if entry is None:
767 wc_tracked = False
773 wc_tracked = False
768 else:
774 else:
769 wc_tracked = entry.tracked
775 wc_tracked = entry.tracked
770 if not (p1_tracked or wc_tracked):
776 if not (p1_tracked or wc_tracked):
771 # the file is no longer relevant to anyone
777 # the file is no longer relevant to anyone
772 if self._map.get(filename) is not None:
778 if self._map.get(filename) is not None:
773 self._map.reset_state(filename)
779 self._map.reset_state(filename)
774 self._dirty = True
780 self._dirty = True
775 elif (not p1_tracked) and wc_tracked:
781 elif (not p1_tracked) and wc_tracked:
776 if entry is not None and entry.added:
782 if entry is not None and entry.added:
777 return # avoid dropping copy information (maybe?)
783 return # avoid dropping copy information (maybe?)
778
784
779 self._map.reset_state(
785 self._map.reset_state(
780 filename,
786 filename,
781 wc_tracked,
787 wc_tracked,
782 p1_tracked,
788 p1_tracked,
783 # the underlying reference might have changed, we will have to
789 # the underlying reference might have changed, we will have to
784 # check it.
790 # check it.
785 has_meaningful_mtime=False,
791 has_meaningful_mtime=False,
786 )
792 )
787
793
788 @requires_changing_parents
794 @requires_changing_parents
789 def update_file(
795 def update_file(
790 self,
796 self,
791 filename,
797 filename,
792 wc_tracked,
798 wc_tracked,
793 p1_tracked,
799 p1_tracked,
794 p2_info=False,
800 p2_info=False,
795 possibly_dirty=False,
801 possibly_dirty=False,
796 parentfiledata=None,
802 parentfiledata=None,
797 ):
803 ):
798 """update the information about a file in the dirstate
804 """update the information about a file in the dirstate
799
805
800 This is to be called when the direstates parent changes to keep track
806 This is to be called when the direstates parent changes to keep track
801 of what is the file situation in regards to the working copy and its parent.
807 of what is the file situation in regards to the working copy and its parent.
802
808
803 This function must be called within a `dirstate.changing_parents` context.
809 This function must be called within a `dirstate.changing_parents` context.
804
810
805 note: the API is at an early stage and we might need to adjust it
811 note: the API is at an early stage and we might need to adjust it
806 depending of what information ends up being relevant and useful to
812 depending of what information ends up being relevant and useful to
807 other processing.
813 other processing.
808 """
814 """
809 self._update_file(
815 self._update_file(
810 filename=filename,
816 filename=filename,
811 wc_tracked=wc_tracked,
817 wc_tracked=wc_tracked,
812 p1_tracked=p1_tracked,
818 p1_tracked=p1_tracked,
813 p2_info=p2_info,
819 p2_info=p2_info,
814 possibly_dirty=possibly_dirty,
820 possibly_dirty=possibly_dirty,
815 parentfiledata=parentfiledata,
821 parentfiledata=parentfiledata,
816 )
822 )
817
823
818 def hacky_extension_update_file(self, *args, **kwargs):
824 def hacky_extension_update_file(self, *args, **kwargs):
819 """NEVER USE THIS, YOU DO NOT NEED IT
825 """NEVER USE THIS, YOU DO NOT NEED IT
820
826
821 This function is a variant of "update_file" to be called by a small set
827 This function is a variant of "update_file" to be called by a small set
822 of extensions, it also adjust the internal state of file, but can be
828 of extensions, it also adjust the internal state of file, but can be
823 called outside an `changing_parents` context.
829 called outside an `changing_parents` context.
824
830
825 A very small number of extension meddle with the working copy content
831 A very small number of extension meddle with the working copy content
826 in a way that requires to adjust the dirstate accordingly. At the time
832 in a way that requires to adjust the dirstate accordingly. At the time
827 this command is written they are :
833 this command is written they are :
828 - keyword,
834 - keyword,
829 - largefile,
835 - largefile,
830 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
836 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
831
837
832 This function could probably be replaced by more semantic one (like
838 This function could probably be replaced by more semantic one (like
833 "adjust expected size" or "always revalidate file content", etc)
839 "adjust expected size" or "always revalidate file content", etc)
834 however at the time where this is writen, this is too much of a detour
840 however at the time where this is writen, this is too much of a detour
835 to be considered.
841 to be considered.
836 """
842 """
837 if not (self._changing_level > 0 or self._running_status > 0):
843 if not (self._changing_level > 0 or self._running_status > 0):
838 msg = "requires a changes context"
844 msg = "requires a changes context"
839 raise error.ProgrammingError(msg)
845 raise error.ProgrammingError(msg)
840 self._update_file(
846 self._update_file(
841 *args,
847 *args,
842 **kwargs,
848 **kwargs,
843 )
849 )
844
850
845 def _update_file(
851 def _update_file(
846 self,
852 self,
847 filename,
853 filename,
848 wc_tracked,
854 wc_tracked,
849 p1_tracked,
855 p1_tracked,
850 p2_info=False,
856 p2_info=False,
851 possibly_dirty=False,
857 possibly_dirty=False,
852 parentfiledata=None,
858 parentfiledata=None,
853 ):
859 ):
854
860
855 # note: I do not think we need to double check name clash here since we
861 # note: I do not think we need to double check name clash here since we
856 # are in a update/merge case that should already have taken care of
862 # are in a update/merge case that should already have taken care of
857 # this. The test agrees
863 # this. The test agrees
858
864
859 self._dirty = True
865 self._dirty = True
860 old_entry = self._map.get(filename)
866 old_entry = self._map.get(filename)
861 if old_entry is None:
867 if old_entry is None:
862 prev_tracked = False
868 prev_tracked = False
863 else:
869 else:
864 prev_tracked = old_entry.tracked
870 prev_tracked = old_entry.tracked
865 if prev_tracked != wc_tracked:
871 if prev_tracked != wc_tracked:
866 self._dirty_tracked_set = True
872 self._dirty_tracked_set = True
867
873
868 self._map.reset_state(
874 self._map.reset_state(
869 filename,
875 filename,
870 wc_tracked,
876 wc_tracked,
871 p1_tracked,
877 p1_tracked,
872 p2_info=p2_info,
878 p2_info=p2_info,
873 has_meaningful_mtime=not possibly_dirty,
879 has_meaningful_mtime=not possibly_dirty,
874 parentfiledata=parentfiledata,
880 parentfiledata=parentfiledata,
875 )
881 )
876
882
877 def _check_new_tracked_filename(self, filename):
883 def _check_new_tracked_filename(self, filename):
878 scmutil.checkfilename(filename)
884 scmutil.checkfilename(filename)
879 if self._map.hastrackeddir(filename):
885 if self._map.hastrackeddir(filename):
880 msg = _(b'directory %r already in dirstate')
886 msg = _(b'directory %r already in dirstate')
881 msg %= pycompat.bytestr(filename)
887 msg %= pycompat.bytestr(filename)
882 raise error.Abort(msg)
888 raise error.Abort(msg)
883 # shadows
889 # shadows
884 for d in pathutil.finddirs(filename):
890 for d in pathutil.finddirs(filename):
885 if self._map.hastrackeddir(d):
891 if self._map.hastrackeddir(d):
886 break
892 break
887 entry = self._map.get(d)
893 entry = self._map.get(d)
888 if entry is not None and not entry.removed:
894 if entry is not None and not entry.removed:
889 msg = _(b'file %r in dirstate clashes with %r')
895 msg = _(b'file %r in dirstate clashes with %r')
890 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
896 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
891 raise error.Abort(msg)
897 raise error.Abort(msg)
892 self._check_sparse(filename)
898 self._check_sparse(filename)
893
899
894 def _check_sparse(self, filename):
900 def _check_sparse(self, filename):
895 """Check that a filename is inside the sparse profile"""
901 """Check that a filename is inside the sparse profile"""
896 sparsematch = self._sparsematcher
902 sparsematch = self._sparsematcher
897 if sparsematch is not None and not sparsematch.always():
903 if sparsematch is not None and not sparsematch.always():
898 if not sparsematch(filename):
904 if not sparsematch(filename):
899 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
905 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
900 hint = _(
906 hint = _(
901 b'include file with `hg debugsparse --include <pattern>` or use '
907 b'include file with `hg debugsparse --include <pattern>` or use '
902 b'`hg add -s <file>` to include file directory while adding'
908 b'`hg add -s <file>` to include file directory while adding'
903 )
909 )
904 raise error.Abort(msg % filename, hint=hint)
910 raise error.Abort(msg % filename, hint=hint)
905
911
906 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
912 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
907 if exists is None:
913 if exists is None:
908 exists = os.path.lexists(os.path.join(self._root, path))
914 exists = os.path.lexists(os.path.join(self._root, path))
909 if not exists:
915 if not exists:
910 # Maybe a path component exists
916 # Maybe a path component exists
911 if not ignoremissing and b'/' in path:
917 if not ignoremissing and b'/' in path:
912 d, f = path.rsplit(b'/', 1)
918 d, f = path.rsplit(b'/', 1)
913 d = self._normalize(d, False, ignoremissing, None)
919 d = self._normalize(d, False, ignoremissing, None)
914 folded = d + b"/" + f
920 folded = d + b"/" + f
915 else:
921 else:
916 # No path components, preserve original case
922 # No path components, preserve original case
917 folded = path
923 folded = path
918 else:
924 else:
919 # recursively normalize leading directory components
925 # recursively normalize leading directory components
920 # against dirstate
926 # against dirstate
921 if b'/' in normed:
927 if b'/' in normed:
922 d, f = normed.rsplit(b'/', 1)
928 d, f = normed.rsplit(b'/', 1)
923 d = self._normalize(d, False, ignoremissing, True)
929 d = self._normalize(d, False, ignoremissing, True)
924 r = self._root + b"/" + d
930 r = self._root + b"/" + d
925 folded = d + b"/" + util.fspath(f, r)
931 folded = d + b"/" + util.fspath(f, r)
926 else:
932 else:
927 folded = util.fspath(normed, self._root)
933 folded = util.fspath(normed, self._root)
928 storemap[normed] = folded
934 storemap[normed] = folded
929
935
930 return folded
936 return folded
931
937
932 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
938 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
933 normed = util.normcase(path)
939 normed = util.normcase(path)
934 folded = self._map.filefoldmap.get(normed, None)
940 folded = self._map.filefoldmap.get(normed, None)
935 if folded is None:
941 if folded is None:
936 if isknown:
942 if isknown:
937 folded = path
943 folded = path
938 else:
944 else:
939 folded = self._discoverpath(
945 folded = self._discoverpath(
940 path, normed, ignoremissing, exists, self._map.filefoldmap
946 path, normed, ignoremissing, exists, self._map.filefoldmap
941 )
947 )
942 return folded
948 return folded
943
949
944 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
950 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
945 normed = util.normcase(path)
951 normed = util.normcase(path)
946 folded = self._map.filefoldmap.get(normed, None)
952 folded = self._map.filefoldmap.get(normed, None)
947 if folded is None:
953 if folded is None:
948 folded = self._map.dirfoldmap.get(normed, None)
954 folded = self._map.dirfoldmap.get(normed, None)
949 if folded is None:
955 if folded is None:
950 if isknown:
956 if isknown:
951 folded = path
957 folded = path
952 else:
958 else:
953 # store discovered result in dirfoldmap so that future
959 # store discovered result in dirfoldmap so that future
954 # normalizefile calls don't start matching directories
960 # normalizefile calls don't start matching directories
955 folded = self._discoverpath(
961 folded = self._discoverpath(
956 path, normed, ignoremissing, exists, self._map.dirfoldmap
962 path, normed, ignoremissing, exists, self._map.dirfoldmap
957 )
963 )
958 return folded
964 return folded
959
965
960 def normalize(self, path, isknown=False, ignoremissing=False):
966 def normalize(self, path, isknown=False, ignoremissing=False):
961 """
967 """
962 normalize the case of a pathname when on a casefolding filesystem
968 normalize the case of a pathname when on a casefolding filesystem
963
969
964 isknown specifies whether the filename came from walking the
970 isknown specifies whether the filename came from walking the
965 disk, to avoid extra filesystem access.
971 disk, to avoid extra filesystem access.
966
972
967 If ignoremissing is True, missing path are returned
973 If ignoremissing is True, missing path are returned
968 unchanged. Otherwise, we try harder to normalize possibly
974 unchanged. Otherwise, we try harder to normalize possibly
969 existing path components.
975 existing path components.
970
976
971 The normalized case is determined based on the following precedence:
977 The normalized case is determined based on the following precedence:
972
978
973 - version of name already stored in the dirstate
979 - version of name already stored in the dirstate
974 - version of name stored on disk
980 - version of name stored on disk
975 - version provided via command arguments
981 - version provided via command arguments
976 """
982 """
977
983
978 if self._checkcase:
984 if self._checkcase:
979 return self._normalize(path, isknown, ignoremissing)
985 return self._normalize(path, isknown, ignoremissing)
980 return path
986 return path
981
987
982 # XXX this method is barely used, as a result:
988 # XXX this method is barely used, as a result:
983 # - its semantic is unclear
989 # - its semantic is unclear
984 # - do we really needs it ?
990 # - do we really needs it ?
985 @requires_changing_parents
991 @requires_changing_parents
986 def clear(self):
992 def clear(self):
987 self._map.clear()
993 self._map.clear()
988 self._dirty = True
994 self._dirty = True
989
995
990 @requires_changing_parents
996 @requires_changing_parents
991 def rebuild(self, parent, allfiles, changedfiles=None):
997 def rebuild(self, parent, allfiles, changedfiles=None):
992 matcher = self._sparsematcher
998 matcher = self._sparsematcher
993 if matcher is not None and not matcher.always():
999 if matcher is not None and not matcher.always():
994 # should not add non-matching files
1000 # should not add non-matching files
995 allfiles = [f for f in allfiles if matcher(f)]
1001 allfiles = [f for f in allfiles if matcher(f)]
996 if changedfiles:
1002 if changedfiles:
997 changedfiles = [f for f in changedfiles if matcher(f)]
1003 changedfiles = [f for f in changedfiles if matcher(f)]
998
1004
999 if changedfiles is not None:
1005 if changedfiles is not None:
1000 # these files will be deleted from the dirstate when they are
1006 # these files will be deleted from the dirstate when they are
1001 # not found to be in allfiles
1007 # not found to be in allfiles
1002 dirstatefilestoremove = {f for f in self if not matcher(f)}
1008 dirstatefilestoremove = {f for f in self if not matcher(f)}
1003 changedfiles = dirstatefilestoremove.union(changedfiles)
1009 changedfiles = dirstatefilestoremove.union(changedfiles)
1004
1010
1005 if changedfiles is None:
1011 if changedfiles is None:
1006 # Rebuild entire dirstate
1012 # Rebuild entire dirstate
1007 to_lookup = allfiles
1013 to_lookup = allfiles
1008 to_drop = []
1014 to_drop = []
1009 self.clear()
1015 self.clear()
1010 elif len(changedfiles) < 10:
1016 elif len(changedfiles) < 10:
1011 # Avoid turning allfiles into a set, which can be expensive if it's
1017 # Avoid turning allfiles into a set, which can be expensive if it's
1012 # large.
1018 # large.
1013 to_lookup = []
1019 to_lookup = []
1014 to_drop = []
1020 to_drop = []
1015 for f in changedfiles:
1021 for f in changedfiles:
1016 if f in allfiles:
1022 if f in allfiles:
1017 to_lookup.append(f)
1023 to_lookup.append(f)
1018 else:
1024 else:
1019 to_drop.append(f)
1025 to_drop.append(f)
1020 else:
1026 else:
1021 changedfilesset = set(changedfiles)
1027 changedfilesset = set(changedfiles)
1022 to_lookup = changedfilesset & set(allfiles)
1028 to_lookup = changedfilesset & set(allfiles)
1023 to_drop = changedfilesset - to_lookup
1029 to_drop = changedfilesset - to_lookup
1024
1030
1025 if self._origpl is None:
1031 if self._origpl is None:
1026 self._origpl = self._pl
1032 self._origpl = self._pl
1027 self._map.setparents(parent, self._nodeconstants.nullid)
1033 self._map.setparents(parent, self._nodeconstants.nullid)
1028
1034
1029 for f in to_lookup:
1035 for f in to_lookup:
1030 if self.in_merge:
1036 if self.in_merge:
1031 self.set_tracked(f)
1037 self.set_tracked(f)
1032 else:
1038 else:
1033 self._map.reset_state(
1039 self._map.reset_state(
1034 f,
1040 f,
1035 wc_tracked=True,
1041 wc_tracked=True,
1036 p1_tracked=True,
1042 p1_tracked=True,
1037 )
1043 )
1038 for f in to_drop:
1044 for f in to_drop:
1039 self._map.reset_state(f)
1045 self._map.reset_state(f)
1040
1046
1041 self._dirty = True
1047 self._dirty = True
1042
1048
1043 def _setup_tr_abort(self, tr):
1049 def _setup_tr_abort(self, tr):
1044 """make sure we invalidate the current change on abort"""
1050 """make sure we invalidate the current change on abort"""
1045 if tr is None:
1051 if tr is None:
1046 return
1052 return
1047
1053
1048 def on_abort(tr):
1054 def on_abort(tr):
1049 self._attached_to_a_transaction = False
1055 self._attached_to_a_transaction = False
1050 self.invalidate()
1056 self.invalidate()
1051
1057
1052 tr.addabort(
1058 tr.addabort(
1053 b'dirstate-invalidate%s' % self._tr_key_suffix,
1059 b'dirstate-invalidate%s' % self._tr_key_suffix,
1054 on_abort,
1060 on_abort,
1055 )
1061 )
1056
1062
1057 def write(self, tr):
1063 def write(self, tr):
1058 if not self._dirty:
1064 if not self._dirty:
1059 return
1065 return
1060 # make sure we don't request a write of invalidated content
1066 # make sure we don't request a write of invalidated content
1061 # XXX move before the dirty check once `unlock` stop calling `write`
1067 # XXX move before the dirty check once `unlock` stop calling `write`
1062 assert not self._invalidated_context
1068 assert not self._invalidated_context
1063
1069
1064 write_key = self._use_tracked_hint and self._dirty_tracked_set
1070 write_key = self._use_tracked_hint and self._dirty_tracked_set
1065 if tr:
1071 if tr:
1066
1072
1067 self._setup_tr_abort(tr)
1073 self._setup_tr_abort(tr)
1068 self._attached_to_a_transaction = True
1074 self._attached_to_a_transaction = True
1069
1075
1070 def on_success(f):
1076 def on_success(f):
1071 self._attached_to_a_transaction = False
1077 self._attached_to_a_transaction = False
1072 self._writedirstate(tr, f),
1078 self._writedirstate(tr, f),
1073
1079
1074 # delay writing in-memory changes out
1080 # delay writing in-memory changes out
1075 tr.addfilegenerator(
1081 tr.addfilegenerator(
1076 b'dirstate-1-main%s' % self._tr_key_suffix,
1082 b'dirstate-1-main%s' % self._tr_key_suffix,
1077 (self._filename,),
1083 (self._filename,),
1078 on_success,
1084 on_success,
1079 location=b'plain',
1085 location=b'plain',
1080 post_finalize=True,
1086 post_finalize=True,
1081 )
1087 )
1082 if write_key:
1088 if write_key:
1083 tr.addfilegenerator(
1089 tr.addfilegenerator(
1084 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1090 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1085 (self._filename_th,),
1091 (self._filename_th,),
1086 lambda f: self._write_tracked_hint(tr, f),
1092 lambda f: self._write_tracked_hint(tr, f),
1087 location=b'plain',
1093 location=b'plain',
1088 post_finalize=True,
1094 post_finalize=True,
1089 )
1095 )
1090 return
1096 return
1091
1097
1092 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1098 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1093 with file(self._filename) as f:
1099 with file(self._filename) as f:
1094 self._writedirstate(tr, f)
1100 self._writedirstate(tr, f)
1095 if write_key:
1101 if write_key:
1096 # we update the key-file after writing to make sure reader have a
1102 # we update the key-file after writing to make sure reader have a
1097 # key that match the newly written content
1103 # key that match the newly written content
1098 with file(self._filename_th) as f:
1104 with file(self._filename_th) as f:
1099 self._write_tracked_hint(tr, f)
1105 self._write_tracked_hint(tr, f)
1100
1106
1101 def delete_tracked_hint(self):
1107 def delete_tracked_hint(self):
1102 """remove the tracked_hint file
1108 """remove the tracked_hint file
1103
1109
1104 To be used by format downgrades operation"""
1110 To be used by format downgrades operation"""
1105 self._opener.unlink(self._filename_th)
1111 self._opener.unlink(self._filename_th)
1106 self._use_tracked_hint = False
1112 self._use_tracked_hint = False
1107
1113
1108 def addparentchangecallback(self, category, callback):
1114 def addparentchangecallback(self, category, callback):
1109 """add a callback to be called when the wd parents are changed
1115 """add a callback to be called when the wd parents are changed
1110
1116
1111 Callback will be called with the following arguments:
1117 Callback will be called with the following arguments:
1112 dirstate, (oldp1, oldp2), (newp1, newp2)
1118 dirstate, (oldp1, oldp2), (newp1, newp2)
1113
1119
1114 Category is a unique identifier to allow overwriting an old callback
1120 Category is a unique identifier to allow overwriting an old callback
1115 with a newer callback.
1121 with a newer callback.
1116 """
1122 """
1117 self._plchangecallbacks[category] = callback
1123 self._plchangecallbacks[category] = callback
1118
1124
1119 def _writedirstate(self, tr, st):
1125 def _writedirstate(self, tr, st):
1120 # make sure we don't write invalidated content
1126 # make sure we don't write invalidated content
1121 assert not self._invalidated_context
1127 assert not self._invalidated_context
1122 # notify callbacks about parents change
1128 # notify callbacks about parents change
1123 if self._origpl is not None and self._origpl != self._pl:
1129 if self._origpl is not None and self._origpl != self._pl:
1124 for c, callback in sorted(self._plchangecallbacks.items()):
1130 for c, callback in sorted(self._plchangecallbacks.items()):
1125 callback(self, self._origpl, self._pl)
1131 callback(self, self._origpl, self._pl)
1126 self._origpl = None
1132 self._origpl = None
1127 self._map.write(tr, st)
1133 self._map.write(tr, st)
1128 self._dirty = False
1134 self._dirty = False
1129 self._dirty_tracked_set = False
1135 self._dirty_tracked_set = False
1130
1136
1131 def _write_tracked_hint(self, tr, f):
1137 def _write_tracked_hint(self, tr, f):
1132 key = node.hex(uuid.uuid4().bytes)
1138 key = node.hex(uuid.uuid4().bytes)
1133 f.write(b"1\n%s\n" % key) # 1 is the format version
1139 f.write(b"1\n%s\n" % key) # 1 is the format version
1134
1140
1135 def _dirignore(self, f):
1141 def _dirignore(self, f):
1136 if self._ignore(f):
1142 if self._ignore(f):
1137 return True
1143 return True
1138 for p in pathutil.finddirs(f):
1144 for p in pathutil.finddirs(f):
1139 if self._ignore(p):
1145 if self._ignore(p):
1140 return True
1146 return True
1141 return False
1147 return False
1142
1148
1143 def _ignorefiles(self):
1149 def _ignorefiles(self):
1144 files = []
1150 files = []
1145 if os.path.exists(self._join(b'.hgignore')):
1151 if os.path.exists(self._join(b'.hgignore')):
1146 files.append(self._join(b'.hgignore'))
1152 files.append(self._join(b'.hgignore'))
1147 for name, path in self._ui.configitems(b"ui"):
1153 for name, path in self._ui.configitems(b"ui"):
1148 if name == b'ignore' or name.startswith(b'ignore.'):
1154 if name == b'ignore' or name.startswith(b'ignore.'):
1149 # we need to use os.path.join here rather than self._join
1155 # we need to use os.path.join here rather than self._join
1150 # because path is arbitrary and user-specified
1156 # because path is arbitrary and user-specified
1151 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1157 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1152 return files
1158 return files
1153
1159
1154 def _ignorefileandline(self, f):
1160 def _ignorefileandline(self, f):
1155 files = collections.deque(self._ignorefiles())
1161 files = collections.deque(self._ignorefiles())
1156 visited = set()
1162 visited = set()
1157 while files:
1163 while files:
1158 i = files.popleft()
1164 i = files.popleft()
1159 patterns = matchmod.readpatternfile(
1165 patterns = matchmod.readpatternfile(
1160 i, self._ui.warn, sourceinfo=True
1166 i, self._ui.warn, sourceinfo=True
1161 )
1167 )
1162 for pattern, lineno, line in patterns:
1168 for pattern, lineno, line in patterns:
1163 kind, p = matchmod._patsplit(pattern, b'glob')
1169 kind, p = matchmod._patsplit(pattern, b'glob')
1164 if kind == b"subinclude":
1170 if kind == b"subinclude":
1165 if p not in visited:
1171 if p not in visited:
1166 files.append(p)
1172 files.append(p)
1167 continue
1173 continue
1168 m = matchmod.match(
1174 m = matchmod.match(
1169 self._root, b'', [], [pattern], warn=self._ui.warn
1175 self._root, b'', [], [pattern], warn=self._ui.warn
1170 )
1176 )
1171 if m(f):
1177 if m(f):
1172 return (i, lineno, line)
1178 return (i, lineno, line)
1173 visited.add(i)
1179 visited.add(i)
1174 return (None, -1, b"")
1180 return (None, -1, b"")
1175
1181
1176 def _walkexplicit(self, match, subrepos):
1182 def _walkexplicit(self, match, subrepos):
1177 """Get stat data about the files explicitly specified by match.
1183 """Get stat data about the files explicitly specified by match.
1178
1184
1179 Return a triple (results, dirsfound, dirsnotfound).
1185 Return a triple (results, dirsfound, dirsnotfound).
1180 - results is a mapping from filename to stat result. It also contains
1186 - results is a mapping from filename to stat result. It also contains
1181 listings mapping subrepos and .hg to None.
1187 listings mapping subrepos and .hg to None.
1182 - dirsfound is a list of files found to be directories.
1188 - dirsfound is a list of files found to be directories.
1183 - dirsnotfound is a list of files that the dirstate thinks are
1189 - dirsnotfound is a list of files that the dirstate thinks are
1184 directories and that were not found."""
1190 directories and that were not found."""
1185
1191
1186 def badtype(mode):
1192 def badtype(mode):
1187 kind = _(b'unknown')
1193 kind = _(b'unknown')
1188 if stat.S_ISCHR(mode):
1194 if stat.S_ISCHR(mode):
1189 kind = _(b'character device')
1195 kind = _(b'character device')
1190 elif stat.S_ISBLK(mode):
1196 elif stat.S_ISBLK(mode):
1191 kind = _(b'block device')
1197 kind = _(b'block device')
1192 elif stat.S_ISFIFO(mode):
1198 elif stat.S_ISFIFO(mode):
1193 kind = _(b'fifo')
1199 kind = _(b'fifo')
1194 elif stat.S_ISSOCK(mode):
1200 elif stat.S_ISSOCK(mode):
1195 kind = _(b'socket')
1201 kind = _(b'socket')
1196 elif stat.S_ISDIR(mode):
1202 elif stat.S_ISDIR(mode):
1197 kind = _(b'directory')
1203 kind = _(b'directory')
1198 return _(b'unsupported file type (type is %s)') % kind
1204 return _(b'unsupported file type (type is %s)') % kind
1199
1205
1200 badfn = match.bad
1206 badfn = match.bad
1201 dmap = self._map
1207 dmap = self._map
1202 lstat = os.lstat
1208 lstat = os.lstat
1203 getkind = stat.S_IFMT
1209 getkind = stat.S_IFMT
1204 dirkind = stat.S_IFDIR
1210 dirkind = stat.S_IFDIR
1205 regkind = stat.S_IFREG
1211 regkind = stat.S_IFREG
1206 lnkkind = stat.S_IFLNK
1212 lnkkind = stat.S_IFLNK
1207 join = self._join
1213 join = self._join
1208 dirsfound = []
1214 dirsfound = []
1209 foundadd = dirsfound.append
1215 foundadd = dirsfound.append
1210 dirsnotfound = []
1216 dirsnotfound = []
1211 notfoundadd = dirsnotfound.append
1217 notfoundadd = dirsnotfound.append
1212
1218
1213 if not match.isexact() and self._checkcase:
1219 if not match.isexact() and self._checkcase:
1214 normalize = self._normalize
1220 normalize = self._normalize
1215 else:
1221 else:
1216 normalize = None
1222 normalize = None
1217
1223
1218 files = sorted(match.files())
1224 files = sorted(match.files())
1219 subrepos.sort()
1225 subrepos.sort()
1220 i, j = 0, 0
1226 i, j = 0, 0
1221 while i < len(files) and j < len(subrepos):
1227 while i < len(files) and j < len(subrepos):
1222 subpath = subrepos[j] + b"/"
1228 subpath = subrepos[j] + b"/"
1223 if files[i] < subpath:
1229 if files[i] < subpath:
1224 i += 1
1230 i += 1
1225 continue
1231 continue
1226 while i < len(files) and files[i].startswith(subpath):
1232 while i < len(files) and files[i].startswith(subpath):
1227 del files[i]
1233 del files[i]
1228 j += 1
1234 j += 1
1229
1235
1230 if not files or b'' in files:
1236 if not files or b'' in files:
1231 files = [b'']
1237 files = [b'']
1232 # constructing the foldmap is expensive, so don't do it for the
1238 # constructing the foldmap is expensive, so don't do it for the
1233 # common case where files is ['']
1239 # common case where files is ['']
1234 normalize = None
1240 normalize = None
1235 results = dict.fromkeys(subrepos)
1241 results = dict.fromkeys(subrepos)
1236 results[b'.hg'] = None
1242 results[b'.hg'] = None
1237
1243
1238 for ff in files:
1244 for ff in files:
1239 if normalize:
1245 if normalize:
1240 nf = normalize(ff, False, True)
1246 nf = normalize(ff, False, True)
1241 else:
1247 else:
1242 nf = ff
1248 nf = ff
1243 if nf in results:
1249 if nf in results:
1244 continue
1250 continue
1245
1251
1246 try:
1252 try:
1247 st = lstat(join(nf))
1253 st = lstat(join(nf))
1248 kind = getkind(st.st_mode)
1254 kind = getkind(st.st_mode)
1249 if kind == dirkind:
1255 if kind == dirkind:
1250 if nf in dmap:
1256 if nf in dmap:
1251 # file replaced by dir on disk but still in dirstate
1257 # file replaced by dir on disk but still in dirstate
1252 results[nf] = None
1258 results[nf] = None
1253 foundadd((nf, ff))
1259 foundadd((nf, ff))
1254 elif kind == regkind or kind == lnkkind:
1260 elif kind == regkind or kind == lnkkind:
1255 results[nf] = st
1261 results[nf] = st
1256 else:
1262 else:
1257 badfn(ff, badtype(kind))
1263 badfn(ff, badtype(kind))
1258 if nf in dmap:
1264 if nf in dmap:
1259 results[nf] = None
1265 results[nf] = None
1260 except (OSError) as inst:
1266 except (OSError) as inst:
1261 # nf not found on disk - it is dirstate only
1267 # nf not found on disk - it is dirstate only
1262 if nf in dmap: # does it exactly match a missing file?
1268 if nf in dmap: # does it exactly match a missing file?
1263 results[nf] = None
1269 results[nf] = None
1264 else: # does it match a missing directory?
1270 else: # does it match a missing directory?
1265 if self._map.hasdir(nf):
1271 if self._map.hasdir(nf):
1266 notfoundadd(nf)
1272 notfoundadd(nf)
1267 else:
1273 else:
1268 badfn(ff, encoding.strtolocal(inst.strerror))
1274 badfn(ff, encoding.strtolocal(inst.strerror))
1269
1275
1270 # match.files() may contain explicitly-specified paths that shouldn't
1276 # match.files() may contain explicitly-specified paths that shouldn't
1271 # be taken; drop them from the list of files found. dirsfound/notfound
1277 # be taken; drop them from the list of files found. dirsfound/notfound
1272 # aren't filtered here because they will be tested later.
1278 # aren't filtered here because they will be tested later.
1273 if match.anypats():
1279 if match.anypats():
1274 for f in list(results):
1280 for f in list(results):
1275 if f == b'.hg' or f in subrepos:
1281 if f == b'.hg' or f in subrepos:
1276 # keep sentinel to disable further out-of-repo walks
1282 # keep sentinel to disable further out-of-repo walks
1277 continue
1283 continue
1278 if not match(f):
1284 if not match(f):
1279 del results[f]
1285 del results[f]
1280
1286
1281 # Case insensitive filesystems cannot rely on lstat() failing to detect
1287 # Case insensitive filesystems cannot rely on lstat() failing to detect
1282 # a case-only rename. Prune the stat object for any file that does not
1288 # a case-only rename. Prune the stat object for any file that does not
1283 # match the case in the filesystem, if there are multiple files that
1289 # match the case in the filesystem, if there are multiple files that
1284 # normalize to the same path.
1290 # normalize to the same path.
1285 if match.isexact() and self._checkcase:
1291 if match.isexact() and self._checkcase:
1286 normed = {}
1292 normed = {}
1287
1293
1288 for f, st in results.items():
1294 for f, st in results.items():
1289 if st is None:
1295 if st is None:
1290 continue
1296 continue
1291
1297
1292 nc = util.normcase(f)
1298 nc = util.normcase(f)
1293 paths = normed.get(nc)
1299 paths = normed.get(nc)
1294
1300
1295 if paths is None:
1301 if paths is None:
1296 paths = set()
1302 paths = set()
1297 normed[nc] = paths
1303 normed[nc] = paths
1298
1304
1299 paths.add(f)
1305 paths.add(f)
1300
1306
1301 for norm, paths in normed.items():
1307 for norm, paths in normed.items():
1302 if len(paths) > 1:
1308 if len(paths) > 1:
1303 for path in paths:
1309 for path in paths:
1304 folded = self._discoverpath(
1310 folded = self._discoverpath(
1305 path, norm, True, None, self._map.dirfoldmap
1311 path, norm, True, None, self._map.dirfoldmap
1306 )
1312 )
1307 if path != folded:
1313 if path != folded:
1308 results[path] = None
1314 results[path] = None
1309
1315
1310 return results, dirsfound, dirsnotfound
1316 return results, dirsfound, dirsnotfound
1311
1317
1312 def walk(self, match, subrepos, unknown, ignored, full=True):
1318 def walk(self, match, subrepos, unknown, ignored, full=True):
1313 """
1319 """
1314 Walk recursively through the directory tree, finding all files
1320 Walk recursively through the directory tree, finding all files
1315 matched by match.
1321 matched by match.
1316
1322
1317 If full is False, maybe skip some known-clean files.
1323 If full is False, maybe skip some known-clean files.
1318
1324
1319 Return a dict mapping filename to stat-like object (either
1325 Return a dict mapping filename to stat-like object (either
1320 mercurial.osutil.stat instance or return value of os.stat()).
1326 mercurial.osutil.stat instance or return value of os.stat()).
1321
1327
1322 """
1328 """
1323 # full is a flag that extensions that hook into walk can use -- this
1329 # full is a flag that extensions that hook into walk can use -- this
1324 # implementation doesn't use it at all. This satisfies the contract
1330 # implementation doesn't use it at all. This satisfies the contract
1325 # because we only guarantee a "maybe".
1331 # because we only guarantee a "maybe".
1326
1332
1327 if ignored:
1333 if ignored:
1328 ignore = util.never
1334 ignore = util.never
1329 dirignore = util.never
1335 dirignore = util.never
1330 elif unknown:
1336 elif unknown:
1331 ignore = self._ignore
1337 ignore = self._ignore
1332 dirignore = self._dirignore
1338 dirignore = self._dirignore
1333 else:
1339 else:
1334 # if not unknown and not ignored, drop dir recursion and step 2
1340 # if not unknown and not ignored, drop dir recursion and step 2
1335 ignore = util.always
1341 ignore = util.always
1336 dirignore = util.always
1342 dirignore = util.always
1337
1343
1338 if self._sparsematchfn is not None:
1344 if self._sparsematchfn is not None:
1339 em = matchmod.exact(match.files())
1345 em = matchmod.exact(match.files())
1340 sm = matchmod.unionmatcher([self._sparsematcher, em])
1346 sm = matchmod.unionmatcher([self._sparsematcher, em])
1341 match = matchmod.intersectmatchers(match, sm)
1347 match = matchmod.intersectmatchers(match, sm)
1342
1348
1343 matchfn = match.matchfn
1349 matchfn = match.matchfn
1344 matchalways = match.always()
1350 matchalways = match.always()
1345 matchtdir = match.traversedir
1351 matchtdir = match.traversedir
1346 dmap = self._map
1352 dmap = self._map
1347 listdir = util.listdir
1353 listdir = util.listdir
1348 lstat = os.lstat
1354 lstat = os.lstat
1349 dirkind = stat.S_IFDIR
1355 dirkind = stat.S_IFDIR
1350 regkind = stat.S_IFREG
1356 regkind = stat.S_IFREG
1351 lnkkind = stat.S_IFLNK
1357 lnkkind = stat.S_IFLNK
1352 join = self._join
1358 join = self._join
1353
1359
1354 exact = skipstep3 = False
1360 exact = skipstep3 = False
1355 if match.isexact(): # match.exact
1361 if match.isexact(): # match.exact
1356 exact = True
1362 exact = True
1357 dirignore = util.always # skip step 2
1363 dirignore = util.always # skip step 2
1358 elif match.prefix(): # match.match, no patterns
1364 elif match.prefix(): # match.match, no patterns
1359 skipstep3 = True
1365 skipstep3 = True
1360
1366
1361 if not exact and self._checkcase:
1367 if not exact and self._checkcase:
1362 normalize = self._normalize
1368 normalize = self._normalize
1363 normalizefile = self._normalizefile
1369 normalizefile = self._normalizefile
1364 skipstep3 = False
1370 skipstep3 = False
1365 else:
1371 else:
1366 normalize = self._normalize
1372 normalize = self._normalize
1367 normalizefile = None
1373 normalizefile = None
1368
1374
1369 # step 1: find all explicit files
1375 # step 1: find all explicit files
1370 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1376 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1371 if matchtdir:
1377 if matchtdir:
1372 for d in work:
1378 for d in work:
1373 matchtdir(d[0])
1379 matchtdir(d[0])
1374 for d in dirsnotfound:
1380 for d in dirsnotfound:
1375 matchtdir(d)
1381 matchtdir(d)
1376
1382
1377 skipstep3 = skipstep3 and not (work or dirsnotfound)
1383 skipstep3 = skipstep3 and not (work or dirsnotfound)
1378 work = [d for d in work if not dirignore(d[0])]
1384 work = [d for d in work if not dirignore(d[0])]
1379
1385
1380 # step 2: visit subdirectories
1386 # step 2: visit subdirectories
1381 def traverse(work, alreadynormed):
1387 def traverse(work, alreadynormed):
1382 wadd = work.append
1388 wadd = work.append
1383 while work:
1389 while work:
1384 tracing.counter('dirstate.walk work', len(work))
1390 tracing.counter('dirstate.walk work', len(work))
1385 nd = work.pop()
1391 nd = work.pop()
1386 visitentries = match.visitchildrenset(nd)
1392 visitentries = match.visitchildrenset(nd)
1387 if not visitentries:
1393 if not visitentries:
1388 continue
1394 continue
1389 if visitentries == b'this' or visitentries == b'all':
1395 if visitentries == b'this' or visitentries == b'all':
1390 visitentries = None
1396 visitentries = None
1391 skip = None
1397 skip = None
1392 if nd != b'':
1398 if nd != b'':
1393 skip = b'.hg'
1399 skip = b'.hg'
1394 try:
1400 try:
1395 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1401 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1396 entries = listdir(join(nd), stat=True, skip=skip)
1402 entries = listdir(join(nd), stat=True, skip=skip)
1397 except (PermissionError, FileNotFoundError) as inst:
1403 except (PermissionError, FileNotFoundError) as inst:
1398 match.bad(
1404 match.bad(
1399 self.pathto(nd), encoding.strtolocal(inst.strerror)
1405 self.pathto(nd), encoding.strtolocal(inst.strerror)
1400 )
1406 )
1401 continue
1407 continue
1402 for f, kind, st in entries:
1408 for f, kind, st in entries:
1403 # Some matchers may return files in the visitentries set,
1409 # Some matchers may return files in the visitentries set,
1404 # instead of 'this', if the matcher explicitly mentions them
1410 # instead of 'this', if the matcher explicitly mentions them
1405 # and is not an exactmatcher. This is acceptable; we do not
1411 # and is not an exactmatcher. This is acceptable; we do not
1406 # make any hard assumptions about file-or-directory below
1412 # make any hard assumptions about file-or-directory below
1407 # based on the presence of `f` in visitentries. If
1413 # based on the presence of `f` in visitentries. If
1408 # visitchildrenset returned a set, we can always skip the
1414 # visitchildrenset returned a set, we can always skip the
1409 # entries *not* in the set it provided regardless of whether
1415 # entries *not* in the set it provided regardless of whether
1410 # they're actually a file or a directory.
1416 # they're actually a file or a directory.
1411 if visitentries and f not in visitentries:
1417 if visitentries and f not in visitentries:
1412 continue
1418 continue
1413 if normalizefile:
1419 if normalizefile:
1414 # even though f might be a directory, we're only
1420 # even though f might be a directory, we're only
1415 # interested in comparing it to files currently in the
1421 # interested in comparing it to files currently in the
1416 # dmap -- therefore normalizefile is enough
1422 # dmap -- therefore normalizefile is enough
1417 nf = normalizefile(
1423 nf = normalizefile(
1418 nd and (nd + b"/" + f) or f, True, True
1424 nd and (nd + b"/" + f) or f, True, True
1419 )
1425 )
1420 else:
1426 else:
1421 nf = nd and (nd + b"/" + f) or f
1427 nf = nd and (nd + b"/" + f) or f
1422 if nf not in results:
1428 if nf not in results:
1423 if kind == dirkind:
1429 if kind == dirkind:
1424 if not ignore(nf):
1430 if not ignore(nf):
1425 if matchtdir:
1431 if matchtdir:
1426 matchtdir(nf)
1432 matchtdir(nf)
1427 wadd(nf)
1433 wadd(nf)
1428 if nf in dmap and (matchalways or matchfn(nf)):
1434 if nf in dmap and (matchalways or matchfn(nf)):
1429 results[nf] = None
1435 results[nf] = None
1430 elif kind == regkind or kind == lnkkind:
1436 elif kind == regkind or kind == lnkkind:
1431 if nf in dmap:
1437 if nf in dmap:
1432 if matchalways or matchfn(nf):
1438 if matchalways or matchfn(nf):
1433 results[nf] = st
1439 results[nf] = st
1434 elif (matchalways or matchfn(nf)) and not ignore(
1440 elif (matchalways or matchfn(nf)) and not ignore(
1435 nf
1441 nf
1436 ):
1442 ):
1437 # unknown file -- normalize if necessary
1443 # unknown file -- normalize if necessary
1438 if not alreadynormed:
1444 if not alreadynormed:
1439 nf = normalize(nf, False, True)
1445 nf = normalize(nf, False, True)
1440 results[nf] = st
1446 results[nf] = st
1441 elif nf in dmap and (matchalways or matchfn(nf)):
1447 elif nf in dmap and (matchalways or matchfn(nf)):
1442 results[nf] = None
1448 results[nf] = None
1443
1449
1444 for nd, d in work:
1450 for nd, d in work:
1445 # alreadynormed means that processwork doesn't have to do any
1451 # alreadynormed means that processwork doesn't have to do any
1446 # expensive directory normalization
1452 # expensive directory normalization
1447 alreadynormed = not normalize or nd == d
1453 alreadynormed = not normalize or nd == d
1448 traverse([d], alreadynormed)
1454 traverse([d], alreadynormed)
1449
1455
1450 for s in subrepos:
1456 for s in subrepos:
1451 del results[s]
1457 del results[s]
1452 del results[b'.hg']
1458 del results[b'.hg']
1453
1459
1454 # step 3: visit remaining files from dmap
1460 # step 3: visit remaining files from dmap
1455 if not skipstep3 and not exact:
1461 if not skipstep3 and not exact:
1456 # If a dmap file is not in results yet, it was either
1462 # If a dmap file is not in results yet, it was either
1457 # a) not matching matchfn b) ignored, c) missing, or d) under a
1463 # a) not matching matchfn b) ignored, c) missing, or d) under a
1458 # symlink directory.
1464 # symlink directory.
1459 if not results and matchalways:
1465 if not results and matchalways:
1460 visit = [f for f in dmap]
1466 visit = [f for f in dmap]
1461 else:
1467 else:
1462 visit = [f for f in dmap if f not in results and matchfn(f)]
1468 visit = [f for f in dmap if f not in results and matchfn(f)]
1463 visit.sort()
1469 visit.sort()
1464
1470
1465 if unknown:
1471 if unknown:
1466 # unknown == True means we walked all dirs under the roots
1472 # unknown == True means we walked all dirs under the roots
1467 # that wasn't ignored, and everything that matched was stat'ed
1473 # that wasn't ignored, and everything that matched was stat'ed
1468 # and is already in results.
1474 # and is already in results.
1469 # The rest must thus be ignored or under a symlink.
1475 # The rest must thus be ignored or under a symlink.
1470 audit_path = pathutil.pathauditor(self._root, cached=True)
1476 audit_path = pathutil.pathauditor(self._root, cached=True)
1471
1477
1472 for nf in iter(visit):
1478 for nf in iter(visit):
1473 # If a stat for the same file was already added with a
1479 # If a stat for the same file was already added with a
1474 # different case, don't add one for this, since that would
1480 # different case, don't add one for this, since that would
1475 # make it appear as if the file exists under both names
1481 # make it appear as if the file exists under both names
1476 # on disk.
1482 # on disk.
1477 if (
1483 if (
1478 normalizefile
1484 normalizefile
1479 and normalizefile(nf, True, True) in results
1485 and normalizefile(nf, True, True) in results
1480 ):
1486 ):
1481 results[nf] = None
1487 results[nf] = None
1482 # Report ignored items in the dmap as long as they are not
1488 # Report ignored items in the dmap as long as they are not
1483 # under a symlink directory.
1489 # under a symlink directory.
1484 elif audit_path.check(nf):
1490 elif audit_path.check(nf):
1485 try:
1491 try:
1486 results[nf] = lstat(join(nf))
1492 results[nf] = lstat(join(nf))
1487 # file was just ignored, no links, and exists
1493 # file was just ignored, no links, and exists
1488 except OSError:
1494 except OSError:
1489 # file doesn't exist
1495 # file doesn't exist
1490 results[nf] = None
1496 results[nf] = None
1491 else:
1497 else:
1492 # It's either missing or under a symlink directory
1498 # It's either missing or under a symlink directory
1493 # which we in this case report as missing
1499 # which we in this case report as missing
1494 results[nf] = None
1500 results[nf] = None
1495 else:
1501 else:
1496 # We may not have walked the full directory tree above,
1502 # We may not have walked the full directory tree above,
1497 # so stat and check everything we missed.
1503 # so stat and check everything we missed.
1498 iv = iter(visit)
1504 iv = iter(visit)
1499 for st in util.statfiles([join(i) for i in visit]):
1505 for st in util.statfiles([join(i) for i in visit]):
1500 results[next(iv)] = st
1506 results[next(iv)] = st
1501 return results
1507 return results
1502
1508
1503 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1509 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1504 if self._sparsematchfn is not None:
1510 if self._sparsematchfn is not None:
1505 em = matchmod.exact(matcher.files())
1511 em = matchmod.exact(matcher.files())
1506 sm = matchmod.unionmatcher([self._sparsematcher, em])
1512 sm = matchmod.unionmatcher([self._sparsematcher, em])
1507 matcher = matchmod.intersectmatchers(matcher, sm)
1513 matcher = matchmod.intersectmatchers(matcher, sm)
1508 # Force Rayon (Rust parallelism library) to respect the number of
1514 # Force Rayon (Rust parallelism library) to respect the number of
1509 # workers. This is a temporary workaround until Rust code knows
1515 # workers. This is a temporary workaround until Rust code knows
1510 # how to read the config file.
1516 # how to read the config file.
1511 numcpus = self._ui.configint(b"worker", b"numcpus")
1517 numcpus = self._ui.configint(b"worker", b"numcpus")
1512 if numcpus is not None:
1518 if numcpus is not None:
1513 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1519 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1514
1520
1515 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1521 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1516 if not workers_enabled:
1522 if not workers_enabled:
1517 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1523 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1518
1524
1519 (
1525 (
1520 lookup,
1526 lookup,
1521 modified,
1527 modified,
1522 added,
1528 added,
1523 removed,
1529 removed,
1524 deleted,
1530 deleted,
1525 clean,
1531 clean,
1526 ignored,
1532 ignored,
1527 unknown,
1533 unknown,
1528 warnings,
1534 warnings,
1529 bad,
1535 bad,
1530 traversed,
1536 traversed,
1531 dirty,
1537 dirty,
1532 ) = rustmod.status(
1538 ) = rustmod.status(
1533 self._map._map,
1539 self._map._map,
1534 matcher,
1540 matcher,
1535 self._rootdir,
1541 self._rootdir,
1536 self._ignorefiles(),
1542 self._ignorefiles(),
1537 self._checkexec,
1543 self._checkexec,
1538 bool(list_clean),
1544 bool(list_clean),
1539 bool(list_ignored),
1545 bool(list_ignored),
1540 bool(list_unknown),
1546 bool(list_unknown),
1541 bool(matcher.traversedir),
1547 bool(matcher.traversedir),
1542 )
1548 )
1543
1549
1544 self._dirty |= dirty
1550 self._dirty |= dirty
1545
1551
1546 if matcher.traversedir:
1552 if matcher.traversedir:
1547 for dir in traversed:
1553 for dir in traversed:
1548 matcher.traversedir(dir)
1554 matcher.traversedir(dir)
1549
1555
1550 if self._ui.warn:
1556 if self._ui.warn:
1551 for item in warnings:
1557 for item in warnings:
1552 if isinstance(item, tuple):
1558 if isinstance(item, tuple):
1553 file_path, syntax = item
1559 file_path, syntax = item
1554 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1560 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1555 file_path,
1561 file_path,
1556 syntax,
1562 syntax,
1557 )
1563 )
1558 self._ui.warn(msg)
1564 self._ui.warn(msg)
1559 else:
1565 else:
1560 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1566 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1561 self._ui.warn(
1567 self._ui.warn(
1562 msg
1568 msg
1563 % (
1569 % (
1564 pathutil.canonpath(
1570 pathutil.canonpath(
1565 self._rootdir, self._rootdir, item
1571 self._rootdir, self._rootdir, item
1566 ),
1572 ),
1567 b"No such file or directory",
1573 b"No such file or directory",
1568 )
1574 )
1569 )
1575 )
1570
1576
1571 for fn, message in bad:
1577 for fn, message in bad:
1572 matcher.bad(fn, encoding.strtolocal(message))
1578 matcher.bad(fn, encoding.strtolocal(message))
1573
1579
1574 status = scmutil.status(
1580 status = scmutil.status(
1575 modified=modified,
1581 modified=modified,
1576 added=added,
1582 added=added,
1577 removed=removed,
1583 removed=removed,
1578 deleted=deleted,
1584 deleted=deleted,
1579 unknown=unknown,
1585 unknown=unknown,
1580 ignored=ignored,
1586 ignored=ignored,
1581 clean=clean,
1587 clean=clean,
1582 )
1588 )
1583 return (lookup, status)
1589 return (lookup, status)
1584
1590
1585 def status(self, match, subrepos, ignored, clean, unknown):
1591 def status(self, match, subrepos, ignored, clean, unknown):
1586 """Determine the status of the working copy relative to the
1592 """Determine the status of the working copy relative to the
1587 dirstate and return a pair of (unsure, status), where status is of type
1593 dirstate and return a pair of (unsure, status), where status is of type
1588 scmutil.status and:
1594 scmutil.status and:
1589
1595
1590 unsure:
1596 unsure:
1591 files that might have been modified since the dirstate was
1597 files that might have been modified since the dirstate was
1592 written, but need to be read to be sure (size is the same
1598 written, but need to be read to be sure (size is the same
1593 but mtime differs)
1599 but mtime differs)
1594 status.modified:
1600 status.modified:
1595 files that have definitely been modified since the dirstate
1601 files that have definitely been modified since the dirstate
1596 was written (different size or mode)
1602 was written (different size or mode)
1597 status.clean:
1603 status.clean:
1598 files that have definitely not been modified since the
1604 files that have definitely not been modified since the
1599 dirstate was written
1605 dirstate was written
1600 """
1606 """
1601 if not self._running_status:
1607 if not self._running_status:
1602 msg = "Calling `status` outside a `running_status` context"
1608 msg = "Calling `status` outside a `running_status` context"
1603 raise error.ProgrammingError(msg)
1609 raise error.ProgrammingError(msg)
1604 listignored, listclean, listunknown = ignored, clean, unknown
1610 listignored, listclean, listunknown = ignored, clean, unknown
1605 lookup, modified, added, unknown, ignored = [], [], [], [], []
1611 lookup, modified, added, unknown, ignored = [], [], [], [], []
1606 removed, deleted, clean = [], [], []
1612 removed, deleted, clean = [], [], []
1607
1613
1608 dmap = self._map
1614 dmap = self._map
1609 dmap.preload()
1615 dmap.preload()
1610
1616
1611 use_rust = True
1617 use_rust = True
1612
1618
1613 allowed_matchers = (
1619 allowed_matchers = (
1614 matchmod.alwaysmatcher,
1620 matchmod.alwaysmatcher,
1615 matchmod.differencematcher,
1621 matchmod.differencematcher,
1616 matchmod.exactmatcher,
1622 matchmod.exactmatcher,
1617 matchmod.includematcher,
1623 matchmod.includematcher,
1618 matchmod.intersectionmatcher,
1624 matchmod.intersectionmatcher,
1619 matchmod.nevermatcher,
1625 matchmod.nevermatcher,
1620 matchmod.unionmatcher,
1626 matchmod.unionmatcher,
1621 )
1627 )
1622
1628
1623 if rustmod is None:
1629 if rustmod is None:
1624 use_rust = False
1630 use_rust = False
1625 elif self._checkcase:
1631 elif self._checkcase:
1626 # Case-insensitive filesystems are not handled yet
1632 # Case-insensitive filesystems are not handled yet
1627 use_rust = False
1633 use_rust = False
1628 elif subrepos:
1634 elif subrepos:
1629 use_rust = False
1635 use_rust = False
1630 elif not isinstance(match, allowed_matchers):
1636 elif not isinstance(match, allowed_matchers):
1631 # Some matchers have yet to be implemented
1637 # Some matchers have yet to be implemented
1632 use_rust = False
1638 use_rust = False
1633
1639
1634 # Get the time from the filesystem so we can disambiguate files that
1640 # Get the time from the filesystem so we can disambiguate files that
1635 # appear modified in the present or future.
1641 # appear modified in the present or future.
1636 try:
1642 try:
1637 mtime_boundary = timestamp.get_fs_now(self._opener)
1643 mtime_boundary = timestamp.get_fs_now(self._opener)
1638 except OSError:
1644 except OSError:
1639 # In largefiles or readonly context
1645 # In largefiles or readonly context
1640 mtime_boundary = None
1646 mtime_boundary = None
1641
1647
1642 if use_rust:
1648 if use_rust:
1643 try:
1649 try:
1644 res = self._rust_status(
1650 res = self._rust_status(
1645 match, listclean, listignored, listunknown
1651 match, listclean, listignored, listunknown
1646 )
1652 )
1647 return res + (mtime_boundary,)
1653 return res + (mtime_boundary,)
1648 except rustmod.FallbackError:
1654 except rustmod.FallbackError:
1649 pass
1655 pass
1650
1656
1651 def noop(f):
1657 def noop(f):
1652 pass
1658 pass
1653
1659
1654 dcontains = dmap.__contains__
1660 dcontains = dmap.__contains__
1655 dget = dmap.__getitem__
1661 dget = dmap.__getitem__
1656 ladd = lookup.append # aka "unsure"
1662 ladd = lookup.append # aka "unsure"
1657 madd = modified.append
1663 madd = modified.append
1658 aadd = added.append
1664 aadd = added.append
1659 uadd = unknown.append if listunknown else noop
1665 uadd = unknown.append if listunknown else noop
1660 iadd = ignored.append if listignored else noop
1666 iadd = ignored.append if listignored else noop
1661 radd = removed.append
1667 radd = removed.append
1662 dadd = deleted.append
1668 dadd = deleted.append
1663 cadd = clean.append if listclean else noop
1669 cadd = clean.append if listclean else noop
1664 mexact = match.exact
1670 mexact = match.exact
1665 dirignore = self._dirignore
1671 dirignore = self._dirignore
1666 checkexec = self._checkexec
1672 checkexec = self._checkexec
1667 checklink = self._checklink
1673 checklink = self._checklink
1668 copymap = self._map.copymap
1674 copymap = self._map.copymap
1669
1675
1670 # We need to do full walks when either
1676 # We need to do full walks when either
1671 # - we're listing all clean files, or
1677 # - we're listing all clean files, or
1672 # - match.traversedir does something, because match.traversedir should
1678 # - match.traversedir does something, because match.traversedir should
1673 # be called for every dir in the working dir
1679 # be called for every dir in the working dir
1674 full = listclean or match.traversedir is not None
1680 full = listclean or match.traversedir is not None
1675 for fn, st in self.walk(
1681 for fn, st in self.walk(
1676 match, subrepos, listunknown, listignored, full=full
1682 match, subrepos, listunknown, listignored, full=full
1677 ).items():
1683 ).items():
1678 if not dcontains(fn):
1684 if not dcontains(fn):
1679 if (listignored or mexact(fn)) and dirignore(fn):
1685 if (listignored or mexact(fn)) and dirignore(fn):
1680 if listignored:
1686 if listignored:
1681 iadd(fn)
1687 iadd(fn)
1682 else:
1688 else:
1683 uadd(fn)
1689 uadd(fn)
1684 continue
1690 continue
1685
1691
1686 t = dget(fn)
1692 t = dget(fn)
1687 mode = t.mode
1693 mode = t.mode
1688 size = t.size
1694 size = t.size
1689
1695
1690 if not st and t.tracked:
1696 if not st and t.tracked:
1691 dadd(fn)
1697 dadd(fn)
1692 elif t.p2_info:
1698 elif t.p2_info:
1693 madd(fn)
1699 madd(fn)
1694 elif t.added:
1700 elif t.added:
1695 aadd(fn)
1701 aadd(fn)
1696 elif t.removed:
1702 elif t.removed:
1697 radd(fn)
1703 radd(fn)
1698 elif t.tracked:
1704 elif t.tracked:
1699 if not checklink and t.has_fallback_symlink:
1705 if not checklink and t.has_fallback_symlink:
1700 # If the file system does not support symlink, the mode
1706 # If the file system does not support symlink, the mode
1701 # might not be correctly stored in the dirstate, so do not
1707 # might not be correctly stored in the dirstate, so do not
1702 # trust it.
1708 # trust it.
1703 ladd(fn)
1709 ladd(fn)
1704 elif not checkexec and t.has_fallback_exec:
1710 elif not checkexec and t.has_fallback_exec:
1705 # If the file system does not support exec bits, the mode
1711 # If the file system does not support exec bits, the mode
1706 # might not be correctly stored in the dirstate, so do not
1712 # might not be correctly stored in the dirstate, so do not
1707 # trust it.
1713 # trust it.
1708 ladd(fn)
1714 ladd(fn)
1709 elif (
1715 elif (
1710 size >= 0
1716 size >= 0
1711 and (
1717 and (
1712 (size != st.st_size and size != st.st_size & _rangemask)
1718 (size != st.st_size and size != st.st_size & _rangemask)
1713 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1719 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1714 )
1720 )
1715 or fn in copymap
1721 or fn in copymap
1716 ):
1722 ):
1717 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1723 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1718 # issue6456: Size returned may be longer due to
1724 # issue6456: Size returned may be longer due to
1719 # encryption on EXT-4 fscrypt, undecided.
1725 # encryption on EXT-4 fscrypt, undecided.
1720 ladd(fn)
1726 ladd(fn)
1721 else:
1727 else:
1722 madd(fn)
1728 madd(fn)
1723 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1729 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1724 # There might be a change in the future if for example the
1730 # There might be a change in the future if for example the
1725 # internal clock is off, but this is a case where the issues
1731 # internal clock is off, but this is a case where the issues
1726 # the user would face would be a lot worse and there is
1732 # the user would face would be a lot worse and there is
1727 # nothing we can really do.
1733 # nothing we can really do.
1728 ladd(fn)
1734 ladd(fn)
1729 elif listclean:
1735 elif listclean:
1730 cadd(fn)
1736 cadd(fn)
1731 status = scmutil.status(
1737 status = scmutil.status(
1732 modified, added, removed, deleted, unknown, ignored, clean
1738 modified, added, removed, deleted, unknown, ignored, clean
1733 )
1739 )
1734 return (lookup, status, mtime_boundary)
1740 return (lookup, status, mtime_boundary)
1735
1741
1736 def matches(self, match):
1742 def matches(self, match):
1737 """
1743 """
1738 return files in the dirstate (in whatever state) filtered by match
1744 return files in the dirstate (in whatever state) filtered by match
1739 """
1745 """
1740 dmap = self._map
1746 dmap = self._map
1741 if rustmod is not None:
1747 if rustmod is not None:
1742 dmap = self._map._map
1748 dmap = self._map._map
1743
1749
1744 if match.always():
1750 if match.always():
1745 return dmap.keys()
1751 return dmap.keys()
1746 files = match.files()
1752 files = match.files()
1747 if match.isexact():
1753 if match.isexact():
1748 # fast path -- filter the other way around, since typically files is
1754 # fast path -- filter the other way around, since typically files is
1749 # much smaller than dmap
1755 # much smaller than dmap
1750 return [f for f in files if f in dmap]
1756 return [f for f in files if f in dmap]
1751 if match.prefix() and all(fn in dmap for fn in files):
1757 if match.prefix() and all(fn in dmap for fn in files):
1752 # fast path -- all the values are known to be files, so just return
1758 # fast path -- all the values are known to be files, so just return
1753 # that
1759 # that
1754 return list(files)
1760 return list(files)
1755 return [f for f in dmap if match(f)]
1761 return [f for f in dmap if match(f)]
1756
1762
1757 def _actualfilename(self, tr):
1763 def _actualfilename(self, tr):
1758 if tr:
1764 if tr:
1759 return self._pendingfilename
1765 return self._pendingfilename
1760 else:
1766 else:
1761 return self._filename
1767 return self._filename
1762
1768
1763 def all_file_names(self):
1769 def all_file_names(self):
1764 """list all filename currently used by this dirstate
1770 """list all filename currently used by this dirstate
1765
1771
1766 This is only used to do `hg rollback` related backup in the transaction
1772 This is only used to do `hg rollback` related backup in the transaction
1767 """
1773 """
1768 files = [b'branch']
1774 files = [b'branch']
1769 if self._opener.exists(self._filename):
1775 if self._opener.exists(self._filename):
1770 files.append(self._filename)
1776 files.append(self._filename)
1771 if self._use_dirstate_v2:
1777 if self._use_dirstate_v2:
1772 files.append(self._map.docket.data_filename())
1778 files.append(self._map.docket.data_filename())
1773 return tuple(files)
1779 return tuple(files)
1774
1780
1775 def verify(self, m1, m2, p1, narrow_matcher=None):
1781 def verify(self, m1, m2, p1, narrow_matcher=None):
1776 """
1782 """
1777 check the dirstate contents against the parent manifest and yield errors
1783 check the dirstate contents against the parent manifest and yield errors
1778 """
1784 """
1779 missing_from_p1 = _(
1785 missing_from_p1 = _(
1780 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1786 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1781 )
1787 )
1782 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1788 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1783 missing_from_ps = _(
1789 missing_from_ps = _(
1784 b"%s marked as modified, but not in either manifest\n"
1790 b"%s marked as modified, but not in either manifest\n"
1785 )
1791 )
1786 missing_from_ds = _(
1792 missing_from_ds = _(
1787 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1793 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1788 )
1794 )
1789 for f, entry in self.items():
1795 for f, entry in self.items():
1790 if entry.p1_tracked:
1796 if entry.p1_tracked:
1791 if entry.modified and f not in m1 and f not in m2:
1797 if entry.modified and f not in m1 and f not in m2:
1792 yield missing_from_ps % f
1798 yield missing_from_ps % f
1793 elif f not in m1:
1799 elif f not in m1:
1794 yield missing_from_p1 % (f, node.short(p1))
1800 yield missing_from_p1 % (f, node.short(p1))
1795 if entry.added and f in m1:
1801 if entry.added and f in m1:
1796 yield unexpected_in_p1 % f
1802 yield unexpected_in_p1 % f
1797 for f in m1:
1803 for f in m1:
1798 if narrow_matcher is not None and not narrow_matcher(f):
1804 if narrow_matcher is not None and not narrow_matcher(f):
1799 continue
1805 continue
1800 entry = self.get_entry(f)
1806 entry = self.get_entry(f)
1801 if not entry.p1_tracked:
1807 if not entry.p1_tracked:
1802 yield missing_from_ds % (f, node.short(p1))
1808 yield missing_from_ds % (f, node.short(p1))
@@ -1,3983 +1,3984 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 # coding: utf-8
2 # coding: utf-8
3 #
3 #
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9
9
10 import functools
10 import functools
11 import os
11 import os
12 import random
12 import random
13 import re
13 import re
14 import sys
14 import sys
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from concurrent import futures
18 from concurrent import futures
19 from typing import (
19 from typing import (
20 Optional,
20 Optional,
21 )
21 )
22
22
23 from .i18n import _
23 from .i18n import _
24 from .node import (
24 from .node import (
25 bin,
25 bin,
26 hex,
26 hex,
27 nullrev,
27 nullrev,
28 sha1nodeconstants,
28 sha1nodeconstants,
29 short,
29 short,
30 )
30 )
31 from .pycompat import (
31 from .pycompat import (
32 delattr,
32 delattr,
33 getattr,
33 getattr,
34 )
34 )
35 from . import (
35 from . import (
36 bookmarks,
36 bookmarks,
37 branchmap,
37 branchmap,
38 bundle2,
38 bundle2,
39 bundlecaches,
39 bundlecaches,
40 changegroup,
40 changegroup,
41 color,
41 color,
42 commit,
42 commit,
43 context,
43 context,
44 dirstate,
44 dirstate,
45 discovery,
45 discovery,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filelog,
50 filelog,
51 hook,
51 hook,
52 lock as lockmod,
52 lock as lockmod,
53 match as matchmod,
53 match as matchmod,
54 mergestate as mergestatemod,
54 mergestate as mergestatemod,
55 mergeutil,
55 mergeutil,
56 namespaces,
56 namespaces,
57 narrowspec,
57 narrowspec,
58 obsolete,
58 obsolete,
59 pathutil,
59 pathutil,
60 phases,
60 phases,
61 pushkey,
61 pushkey,
62 pycompat,
62 pycompat,
63 rcutil,
63 rcutil,
64 repoview,
64 repoview,
65 requirements as requirementsmod,
65 requirements as requirementsmod,
66 revlog,
66 revlog,
67 revset,
67 revset,
68 revsetlang,
68 revsetlang,
69 scmutil,
69 scmutil,
70 sparse,
70 sparse,
71 store as storemod,
71 store as storemod,
72 subrepoutil,
72 subrepoutil,
73 tags as tagsmod,
73 tags as tagsmod,
74 transaction,
74 transaction,
75 txnutil,
75 txnutil,
76 util,
76 util,
77 vfs as vfsmod,
77 vfs as vfsmod,
78 wireprototypes,
78 wireprototypes,
79 )
79 )
80
80
81 from .interfaces import (
81 from .interfaces import (
82 repository,
82 repository,
83 util as interfaceutil,
83 util as interfaceutil,
84 )
84 )
85
85
86 from .utils import (
86 from .utils import (
87 hashutil,
87 hashutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 urlutil,
90 urlutil,
91 )
91 )
92
92
93 from .revlogutils import (
93 from .revlogutils import (
94 concurrency_checker as revlogchecker,
94 concurrency_checker as revlogchecker,
95 constants as revlogconst,
95 constants as revlogconst,
96 sidedata as sidedatamod,
96 sidedata as sidedatamod,
97 )
97 )
98
98
99 release = lockmod.release
99 release = lockmod.release
100 urlerr = util.urlerr
100 urlerr = util.urlerr
101 urlreq = util.urlreq
101 urlreq = util.urlreq
102
102
103 RE_SKIP_DIRSTATE_ROLLBACK = re.compile(
103 RE_SKIP_DIRSTATE_ROLLBACK = re.compile(
104 b"^((dirstate|narrowspec.dirstate).*|branch$)"
104 b"^((dirstate|narrowspec.dirstate).*|branch$)"
105 )
105 )
106
106
107 # set of (path, vfs-location) tuples. vfs-location is:
107 # set of (path, vfs-location) tuples. vfs-location is:
108 # - 'plain for vfs relative paths
108 # - 'plain for vfs relative paths
109 # - '' for svfs relative paths
109 # - '' for svfs relative paths
110 _cachedfiles = set()
110 _cachedfiles = set()
111
111
112
112
113 class _basefilecache(scmutil.filecache):
113 class _basefilecache(scmutil.filecache):
114 """All filecache usage on repo are done for logic that should be unfiltered"""
114 """All filecache usage on repo are done for logic that should be unfiltered"""
115
115
116 def __get__(self, repo, type=None):
116 def __get__(self, repo, type=None):
117 if repo is None:
117 if repo is None:
118 return self
118 return self
119 # proxy to unfiltered __dict__ since filtered repo has no entry
119 # proxy to unfiltered __dict__ since filtered repo has no entry
120 unfi = repo.unfiltered()
120 unfi = repo.unfiltered()
121 try:
121 try:
122 return unfi.__dict__[self.sname]
122 return unfi.__dict__[self.sname]
123 except KeyError:
123 except KeyError:
124 pass
124 pass
125 return super(_basefilecache, self).__get__(unfi, type)
125 return super(_basefilecache, self).__get__(unfi, type)
126
126
127 def set(self, repo, value):
127 def set(self, repo, value):
128 return super(_basefilecache, self).set(repo.unfiltered(), value)
128 return super(_basefilecache, self).set(repo.unfiltered(), value)
129
129
130
130
131 class repofilecache(_basefilecache):
131 class repofilecache(_basefilecache):
132 """filecache for files in .hg but outside of .hg/store"""
132 """filecache for files in .hg but outside of .hg/store"""
133
133
134 def __init__(self, *paths):
134 def __init__(self, *paths):
135 super(repofilecache, self).__init__(*paths)
135 super(repofilecache, self).__init__(*paths)
136 for path in paths:
136 for path in paths:
137 _cachedfiles.add((path, b'plain'))
137 _cachedfiles.add((path, b'plain'))
138
138
139 def join(self, obj, fname):
139 def join(self, obj, fname):
140 return obj.vfs.join(fname)
140 return obj.vfs.join(fname)
141
141
142
142
143 class storecache(_basefilecache):
143 class storecache(_basefilecache):
144 """filecache for files in the store"""
144 """filecache for files in the store"""
145
145
146 def __init__(self, *paths):
146 def __init__(self, *paths):
147 super(storecache, self).__init__(*paths)
147 super(storecache, self).__init__(*paths)
148 for path in paths:
148 for path in paths:
149 _cachedfiles.add((path, b''))
149 _cachedfiles.add((path, b''))
150
150
151 def join(self, obj, fname):
151 def join(self, obj, fname):
152 return obj.sjoin(fname)
152 return obj.sjoin(fname)
153
153
154
154
155 class changelogcache(storecache):
155 class changelogcache(storecache):
156 """filecache for the changelog"""
156 """filecache for the changelog"""
157
157
158 def __init__(self):
158 def __init__(self):
159 super(changelogcache, self).__init__()
159 super(changelogcache, self).__init__()
160 _cachedfiles.add((b'00changelog.i', b''))
160 _cachedfiles.add((b'00changelog.i', b''))
161 _cachedfiles.add((b'00changelog.n', b''))
161 _cachedfiles.add((b'00changelog.n', b''))
162
162
163 def tracked_paths(self, obj):
163 def tracked_paths(self, obj):
164 paths = [self.join(obj, b'00changelog.i')]
164 paths = [self.join(obj, b'00changelog.i')]
165 if obj.store.opener.options.get(b'persistent-nodemap', False):
165 if obj.store.opener.options.get(b'persistent-nodemap', False):
166 paths.append(self.join(obj, b'00changelog.n'))
166 paths.append(self.join(obj, b'00changelog.n'))
167 return paths
167 return paths
168
168
169
169
170 class manifestlogcache(storecache):
170 class manifestlogcache(storecache):
171 """filecache for the manifestlog"""
171 """filecache for the manifestlog"""
172
172
173 def __init__(self):
173 def __init__(self):
174 super(manifestlogcache, self).__init__()
174 super(manifestlogcache, self).__init__()
175 _cachedfiles.add((b'00manifest.i', b''))
175 _cachedfiles.add((b'00manifest.i', b''))
176 _cachedfiles.add((b'00manifest.n', b''))
176 _cachedfiles.add((b'00manifest.n', b''))
177
177
178 def tracked_paths(self, obj):
178 def tracked_paths(self, obj):
179 paths = [self.join(obj, b'00manifest.i')]
179 paths = [self.join(obj, b'00manifest.i')]
180 if obj.store.opener.options.get(b'persistent-nodemap', False):
180 if obj.store.opener.options.get(b'persistent-nodemap', False):
181 paths.append(self.join(obj, b'00manifest.n'))
181 paths.append(self.join(obj, b'00manifest.n'))
182 return paths
182 return paths
183
183
184
184
185 class mixedrepostorecache(_basefilecache):
185 class mixedrepostorecache(_basefilecache):
186 """filecache for a mix files in .hg/store and outside"""
186 """filecache for a mix files in .hg/store and outside"""
187
187
188 def __init__(self, *pathsandlocations):
188 def __init__(self, *pathsandlocations):
189 # scmutil.filecache only uses the path for passing back into our
189 # scmutil.filecache only uses the path for passing back into our
190 # join(), so we can safely pass a list of paths and locations
190 # join(), so we can safely pass a list of paths and locations
191 super(mixedrepostorecache, self).__init__(*pathsandlocations)
191 super(mixedrepostorecache, self).__init__(*pathsandlocations)
192 _cachedfiles.update(pathsandlocations)
192 _cachedfiles.update(pathsandlocations)
193
193
194 def join(self, obj, fnameandlocation):
194 def join(self, obj, fnameandlocation):
195 fname, location = fnameandlocation
195 fname, location = fnameandlocation
196 if location == b'plain':
196 if location == b'plain':
197 return obj.vfs.join(fname)
197 return obj.vfs.join(fname)
198 else:
198 else:
199 if location != b'':
199 if location != b'':
200 raise error.ProgrammingError(
200 raise error.ProgrammingError(
201 b'unexpected location: %s' % location
201 b'unexpected location: %s' % location
202 )
202 )
203 return obj.sjoin(fname)
203 return obj.sjoin(fname)
204
204
205
205
206 def isfilecached(repo, name):
206 def isfilecached(repo, name):
207 """check if a repo has already cached "name" filecache-ed property
207 """check if a repo has already cached "name" filecache-ed property
208
208
209 This returns (cachedobj-or-None, iscached) tuple.
209 This returns (cachedobj-or-None, iscached) tuple.
210 """
210 """
211 cacheentry = repo.unfiltered()._filecache.get(name, None)
211 cacheentry = repo.unfiltered()._filecache.get(name, None)
212 if not cacheentry:
212 if not cacheentry:
213 return None, False
213 return None, False
214 return cacheentry.obj, True
214 return cacheentry.obj, True
215
215
216
216
217 class unfilteredpropertycache(util.propertycache):
217 class unfilteredpropertycache(util.propertycache):
218 """propertycache that apply to unfiltered repo only"""
218 """propertycache that apply to unfiltered repo only"""
219
219
220 def __get__(self, repo, type=None):
220 def __get__(self, repo, type=None):
221 unfi = repo.unfiltered()
221 unfi = repo.unfiltered()
222 if unfi is repo:
222 if unfi is repo:
223 return super(unfilteredpropertycache, self).__get__(unfi)
223 return super(unfilteredpropertycache, self).__get__(unfi)
224 return getattr(unfi, self.name)
224 return getattr(unfi, self.name)
225
225
226
226
227 class filteredpropertycache(util.propertycache):
227 class filteredpropertycache(util.propertycache):
228 """propertycache that must take filtering in account"""
228 """propertycache that must take filtering in account"""
229
229
230 def cachevalue(self, obj, value):
230 def cachevalue(self, obj, value):
231 object.__setattr__(obj, self.name, value)
231 object.__setattr__(obj, self.name, value)
232
232
233
233
234 def hasunfilteredcache(repo, name):
234 def hasunfilteredcache(repo, name):
235 """check if a repo has an unfilteredpropertycache value for <name>"""
235 """check if a repo has an unfilteredpropertycache value for <name>"""
236 return name in vars(repo.unfiltered())
236 return name in vars(repo.unfiltered())
237
237
238
238
239 def unfilteredmethod(orig):
239 def unfilteredmethod(orig):
240 """decorate method that always need to be run on unfiltered version"""
240 """decorate method that always need to be run on unfiltered version"""
241
241
242 @functools.wraps(orig)
242 @functools.wraps(orig)
243 def wrapper(repo, *args, **kwargs):
243 def wrapper(repo, *args, **kwargs):
244 return orig(repo.unfiltered(), *args, **kwargs)
244 return orig(repo.unfiltered(), *args, **kwargs)
245
245
246 return wrapper
246 return wrapper
247
247
248
248
249 moderncaps = {
249 moderncaps = {
250 b'lookup',
250 b'lookup',
251 b'branchmap',
251 b'branchmap',
252 b'pushkey',
252 b'pushkey',
253 b'known',
253 b'known',
254 b'getbundle',
254 b'getbundle',
255 b'unbundle',
255 b'unbundle',
256 }
256 }
257 legacycaps = moderncaps.union({b'changegroupsubset'})
257 legacycaps = moderncaps.union({b'changegroupsubset'})
258
258
259
259
260 @interfaceutil.implementer(repository.ipeercommandexecutor)
260 @interfaceutil.implementer(repository.ipeercommandexecutor)
261 class localcommandexecutor:
261 class localcommandexecutor:
262 def __init__(self, peer):
262 def __init__(self, peer):
263 self._peer = peer
263 self._peer = peer
264 self._sent = False
264 self._sent = False
265 self._closed = False
265 self._closed = False
266
266
267 def __enter__(self):
267 def __enter__(self):
268 return self
268 return self
269
269
270 def __exit__(self, exctype, excvalue, exctb):
270 def __exit__(self, exctype, excvalue, exctb):
271 self.close()
271 self.close()
272
272
273 def callcommand(self, command, args):
273 def callcommand(self, command, args):
274 if self._sent:
274 if self._sent:
275 raise error.ProgrammingError(
275 raise error.ProgrammingError(
276 b'callcommand() cannot be used after sendcommands()'
276 b'callcommand() cannot be used after sendcommands()'
277 )
277 )
278
278
279 if self._closed:
279 if self._closed:
280 raise error.ProgrammingError(
280 raise error.ProgrammingError(
281 b'callcommand() cannot be used after close()'
281 b'callcommand() cannot be used after close()'
282 )
282 )
283
283
284 # We don't need to support anything fancy. Just call the named
284 # We don't need to support anything fancy. Just call the named
285 # method on the peer and return a resolved future.
285 # method on the peer and return a resolved future.
286 fn = getattr(self._peer, pycompat.sysstr(command))
286 fn = getattr(self._peer, pycompat.sysstr(command))
287
287
288 f = futures.Future()
288 f = futures.Future()
289
289
290 try:
290 try:
291 result = fn(**pycompat.strkwargs(args))
291 result = fn(**pycompat.strkwargs(args))
292 except Exception:
292 except Exception:
293 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
293 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
294 else:
294 else:
295 f.set_result(result)
295 f.set_result(result)
296
296
297 return f
297 return f
298
298
299 def sendcommands(self):
299 def sendcommands(self):
300 self._sent = True
300 self._sent = True
301
301
302 def close(self):
302 def close(self):
303 self._closed = True
303 self._closed = True
304
304
305
305
306 @interfaceutil.implementer(repository.ipeercommands)
306 @interfaceutil.implementer(repository.ipeercommands)
307 class localpeer(repository.peer):
307 class localpeer(repository.peer):
308 '''peer for a local repo; reflects only the most recent API'''
308 '''peer for a local repo; reflects only the most recent API'''
309
309
310 def __init__(self, repo, caps=None, path=None):
310 def __init__(self, repo, caps=None, path=None):
311 super(localpeer, self).__init__(repo.ui, path=path)
311 super(localpeer, self).__init__(repo.ui, path=path)
312
312
313 if caps is None:
313 if caps is None:
314 caps = moderncaps.copy()
314 caps = moderncaps.copy()
315 self._repo = repo.filtered(b'served')
315 self._repo = repo.filtered(b'served')
316
316
317 if repo._wanted_sidedata:
317 if repo._wanted_sidedata:
318 formatted = bundle2.format_remote_wanted_sidedata(repo)
318 formatted = bundle2.format_remote_wanted_sidedata(repo)
319 caps.add(b'exp-wanted-sidedata=' + formatted)
319 caps.add(b'exp-wanted-sidedata=' + formatted)
320
320
321 self._caps = repo._restrictcapabilities(caps)
321 self._caps = repo._restrictcapabilities(caps)
322
322
323 # Begin of _basepeer interface.
323 # Begin of _basepeer interface.
324
324
325 def url(self):
325 def url(self):
326 return self._repo.url()
326 return self._repo.url()
327
327
328 def local(self):
328 def local(self):
329 return self._repo
329 return self._repo
330
330
331 def canpush(self):
331 def canpush(self):
332 return True
332 return True
333
333
334 def close(self):
334 def close(self):
335 self._repo.close()
335 self._repo.close()
336
336
337 # End of _basepeer interface.
337 # End of _basepeer interface.
338
338
339 # Begin of _basewirecommands interface.
339 # Begin of _basewirecommands interface.
340
340
341 def branchmap(self):
341 def branchmap(self):
342 return self._repo.branchmap()
342 return self._repo.branchmap()
343
343
344 def capabilities(self):
344 def capabilities(self):
345 return self._caps
345 return self._caps
346
346
347 def clonebundles(self):
347 def clonebundles(self):
348 return self._repo.tryread(bundlecaches.CB_MANIFEST_FILE)
348 return self._repo.tryread(bundlecaches.CB_MANIFEST_FILE)
349
349
350 def debugwireargs(self, one, two, three=None, four=None, five=None):
350 def debugwireargs(self, one, two, three=None, four=None, five=None):
351 """Used to test argument passing over the wire"""
351 """Used to test argument passing over the wire"""
352 return b"%s %s %s %s %s" % (
352 return b"%s %s %s %s %s" % (
353 one,
353 one,
354 two,
354 two,
355 pycompat.bytestr(three),
355 pycompat.bytestr(three),
356 pycompat.bytestr(four),
356 pycompat.bytestr(four),
357 pycompat.bytestr(five),
357 pycompat.bytestr(five),
358 )
358 )
359
359
360 def getbundle(
360 def getbundle(
361 self,
361 self,
362 source,
362 source,
363 heads=None,
363 heads=None,
364 common=None,
364 common=None,
365 bundlecaps=None,
365 bundlecaps=None,
366 remote_sidedata=None,
366 remote_sidedata=None,
367 **kwargs
367 **kwargs
368 ):
368 ):
369 chunks = exchange.getbundlechunks(
369 chunks = exchange.getbundlechunks(
370 self._repo,
370 self._repo,
371 source,
371 source,
372 heads=heads,
372 heads=heads,
373 common=common,
373 common=common,
374 bundlecaps=bundlecaps,
374 bundlecaps=bundlecaps,
375 remote_sidedata=remote_sidedata,
375 remote_sidedata=remote_sidedata,
376 **kwargs
376 **kwargs
377 )[1]
377 )[1]
378 cb = util.chunkbuffer(chunks)
378 cb = util.chunkbuffer(chunks)
379
379
380 if exchange.bundle2requested(bundlecaps):
380 if exchange.bundle2requested(bundlecaps):
381 # When requesting a bundle2, getbundle returns a stream to make the
381 # When requesting a bundle2, getbundle returns a stream to make the
382 # wire level function happier. We need to build a proper object
382 # wire level function happier. We need to build a proper object
383 # from it in local peer.
383 # from it in local peer.
384 return bundle2.getunbundler(self.ui, cb)
384 return bundle2.getunbundler(self.ui, cb)
385 else:
385 else:
386 return changegroup.getunbundler(b'01', cb, None)
386 return changegroup.getunbundler(b'01', cb, None)
387
387
388 def heads(self):
388 def heads(self):
389 return self._repo.heads()
389 return self._repo.heads()
390
390
391 def known(self, nodes):
391 def known(self, nodes):
392 return self._repo.known(nodes)
392 return self._repo.known(nodes)
393
393
394 def listkeys(self, namespace):
394 def listkeys(self, namespace):
395 return self._repo.listkeys(namespace)
395 return self._repo.listkeys(namespace)
396
396
397 def lookup(self, key):
397 def lookup(self, key):
398 return self._repo.lookup(key)
398 return self._repo.lookup(key)
399
399
400 def pushkey(self, namespace, key, old, new):
400 def pushkey(self, namespace, key, old, new):
401 return self._repo.pushkey(namespace, key, old, new)
401 return self._repo.pushkey(namespace, key, old, new)
402
402
403 def stream_out(self):
403 def stream_out(self):
404 raise error.Abort(_(b'cannot perform stream clone against local peer'))
404 raise error.Abort(_(b'cannot perform stream clone against local peer'))
405
405
406 def unbundle(self, bundle, heads, url):
406 def unbundle(self, bundle, heads, url):
407 """apply a bundle on a repo
407 """apply a bundle on a repo
408
408
409 This function handles the repo locking itself."""
409 This function handles the repo locking itself."""
410 try:
410 try:
411 try:
411 try:
412 bundle = exchange.readbundle(self.ui, bundle, None)
412 bundle = exchange.readbundle(self.ui, bundle, None)
413 ret = exchange.unbundle(self._repo, bundle, heads, b'push', url)
413 ret = exchange.unbundle(self._repo, bundle, heads, b'push', url)
414 if util.safehasattr(ret, b'getchunks'):
414 if util.safehasattr(ret, b'getchunks'):
415 # This is a bundle20 object, turn it into an unbundler.
415 # This is a bundle20 object, turn it into an unbundler.
416 # This little dance should be dropped eventually when the
416 # This little dance should be dropped eventually when the
417 # API is finally improved.
417 # API is finally improved.
418 stream = util.chunkbuffer(ret.getchunks())
418 stream = util.chunkbuffer(ret.getchunks())
419 ret = bundle2.getunbundler(self.ui, stream)
419 ret = bundle2.getunbundler(self.ui, stream)
420 return ret
420 return ret
421 except Exception as exc:
421 except Exception as exc:
422 # If the exception contains output salvaged from a bundle2
422 # If the exception contains output salvaged from a bundle2
423 # reply, we need to make sure it is printed before continuing
423 # reply, we need to make sure it is printed before continuing
424 # to fail. So we build a bundle2 with such output and consume
424 # to fail. So we build a bundle2 with such output and consume
425 # it directly.
425 # it directly.
426 #
426 #
427 # This is not very elegant but allows a "simple" solution for
427 # This is not very elegant but allows a "simple" solution for
428 # issue4594
428 # issue4594
429 output = getattr(exc, '_bundle2salvagedoutput', ())
429 output = getattr(exc, '_bundle2salvagedoutput', ())
430 if output:
430 if output:
431 bundler = bundle2.bundle20(self._repo.ui)
431 bundler = bundle2.bundle20(self._repo.ui)
432 for out in output:
432 for out in output:
433 bundler.addpart(out)
433 bundler.addpart(out)
434 stream = util.chunkbuffer(bundler.getchunks())
434 stream = util.chunkbuffer(bundler.getchunks())
435 b = bundle2.getunbundler(self.ui, stream)
435 b = bundle2.getunbundler(self.ui, stream)
436 bundle2.processbundle(self._repo, b)
436 bundle2.processbundle(self._repo, b)
437 raise
437 raise
438 except error.PushRaced as exc:
438 except error.PushRaced as exc:
439 raise error.ResponseError(
439 raise error.ResponseError(
440 _(b'push failed:'), stringutil.forcebytestr(exc)
440 _(b'push failed:'), stringutil.forcebytestr(exc)
441 )
441 )
442
442
443 # End of _basewirecommands interface.
443 # End of _basewirecommands interface.
444
444
445 # Begin of peer interface.
445 # Begin of peer interface.
446
446
447 def commandexecutor(self):
447 def commandexecutor(self):
448 return localcommandexecutor(self)
448 return localcommandexecutor(self)
449
449
450 # End of peer interface.
450 # End of peer interface.
451
451
452
452
453 @interfaceutil.implementer(repository.ipeerlegacycommands)
453 @interfaceutil.implementer(repository.ipeerlegacycommands)
454 class locallegacypeer(localpeer):
454 class locallegacypeer(localpeer):
455 """peer extension which implements legacy methods too; used for tests with
455 """peer extension which implements legacy methods too; used for tests with
456 restricted capabilities"""
456 restricted capabilities"""
457
457
458 def __init__(self, repo, path=None):
458 def __init__(self, repo, path=None):
459 super(locallegacypeer, self).__init__(repo, caps=legacycaps, path=path)
459 super(locallegacypeer, self).__init__(repo, caps=legacycaps, path=path)
460
460
461 # Begin of baselegacywirecommands interface.
461 # Begin of baselegacywirecommands interface.
462
462
463 def between(self, pairs):
463 def between(self, pairs):
464 return self._repo.between(pairs)
464 return self._repo.between(pairs)
465
465
466 def branches(self, nodes):
466 def branches(self, nodes):
467 return self._repo.branches(nodes)
467 return self._repo.branches(nodes)
468
468
469 def changegroup(self, nodes, source):
469 def changegroup(self, nodes, source):
470 outgoing = discovery.outgoing(
470 outgoing = discovery.outgoing(
471 self._repo, missingroots=nodes, ancestorsof=self._repo.heads()
471 self._repo, missingroots=nodes, ancestorsof=self._repo.heads()
472 )
472 )
473 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
473 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
474
474
475 def changegroupsubset(self, bases, heads, source):
475 def changegroupsubset(self, bases, heads, source):
476 outgoing = discovery.outgoing(
476 outgoing = discovery.outgoing(
477 self._repo, missingroots=bases, ancestorsof=heads
477 self._repo, missingroots=bases, ancestorsof=heads
478 )
478 )
479 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
479 return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
480
480
481 # End of baselegacywirecommands interface.
481 # End of baselegacywirecommands interface.
482
482
483
483
484 # Functions receiving (ui, features) that extensions can register to impact
484 # Functions receiving (ui, features) that extensions can register to impact
485 # the ability to load repositories with custom requirements. Only
485 # the ability to load repositories with custom requirements. Only
486 # functions defined in loaded extensions are called.
486 # functions defined in loaded extensions are called.
487 #
487 #
488 # The function receives a set of requirement strings that the repository
488 # The function receives a set of requirement strings that the repository
489 # is capable of opening. Functions will typically add elements to the
489 # is capable of opening. Functions will typically add elements to the
490 # set to reflect that the extension knows how to handle that requirements.
490 # set to reflect that the extension knows how to handle that requirements.
491 featuresetupfuncs = set()
491 featuresetupfuncs = set()
492
492
493
493
494 def _getsharedvfs(hgvfs, requirements):
494 def _getsharedvfs(hgvfs, requirements):
495 """returns the vfs object pointing to root of shared source
495 """returns the vfs object pointing to root of shared source
496 repo for a shared repository
496 repo for a shared repository
497
497
498 hgvfs is vfs pointing at .hg/ of current repo (shared one)
498 hgvfs is vfs pointing at .hg/ of current repo (shared one)
499 requirements is a set of requirements of current repo (shared one)
499 requirements is a set of requirements of current repo (shared one)
500 """
500 """
501 # The ``shared`` or ``relshared`` requirements indicate the
501 # The ``shared`` or ``relshared`` requirements indicate the
502 # store lives in the path contained in the ``.hg/sharedpath`` file.
502 # store lives in the path contained in the ``.hg/sharedpath`` file.
503 # This is an absolute path for ``shared`` and relative to
503 # This is an absolute path for ``shared`` and relative to
504 # ``.hg/`` for ``relshared``.
504 # ``.hg/`` for ``relshared``.
505 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
505 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
506 if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements:
506 if requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements:
507 sharedpath = util.normpath(hgvfs.join(sharedpath))
507 sharedpath = util.normpath(hgvfs.join(sharedpath))
508
508
509 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
509 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
510
510
511 if not sharedvfs.exists():
511 if not sharedvfs.exists():
512 raise error.RepoError(
512 raise error.RepoError(
513 _(b'.hg/sharedpath points to nonexistent directory %s')
513 _(b'.hg/sharedpath points to nonexistent directory %s')
514 % sharedvfs.base
514 % sharedvfs.base
515 )
515 )
516 return sharedvfs
516 return sharedvfs
517
517
518
518
519 def _readrequires(vfs, allowmissing):
519 def _readrequires(vfs, allowmissing):
520 """reads the require file present at root of this vfs
520 """reads the require file present at root of this vfs
521 and return a set of requirements
521 and return a set of requirements
522
522
523 If allowmissing is True, we suppress FileNotFoundError if raised"""
523 If allowmissing is True, we suppress FileNotFoundError if raised"""
524 # requires file contains a newline-delimited list of
524 # requires file contains a newline-delimited list of
525 # features/capabilities the opener (us) must have in order to use
525 # features/capabilities the opener (us) must have in order to use
526 # the repository. This file was introduced in Mercurial 0.9.2,
526 # the repository. This file was introduced in Mercurial 0.9.2,
527 # which means very old repositories may not have one. We assume
527 # which means very old repositories may not have one. We assume
528 # a missing file translates to no requirements.
528 # a missing file translates to no requirements.
529 read = vfs.tryread if allowmissing else vfs.read
529 read = vfs.tryread if allowmissing else vfs.read
530 return set(read(b'requires').splitlines())
530 return set(read(b'requires').splitlines())
531
531
532
532
533 def makelocalrepository(baseui, path: bytes, intents=None):
533 def makelocalrepository(baseui, path: bytes, intents=None):
534 """Create a local repository object.
534 """Create a local repository object.
535
535
536 Given arguments needed to construct a local repository, this function
536 Given arguments needed to construct a local repository, this function
537 performs various early repository loading functionality (such as
537 performs various early repository loading functionality (such as
538 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
538 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
539 the repository can be opened, derives a type suitable for representing
539 the repository can be opened, derives a type suitable for representing
540 that repository, and returns an instance of it.
540 that repository, and returns an instance of it.
541
541
542 The returned object conforms to the ``repository.completelocalrepository``
542 The returned object conforms to the ``repository.completelocalrepository``
543 interface.
543 interface.
544
544
545 The repository type is derived by calling a series of factory functions
545 The repository type is derived by calling a series of factory functions
546 for each aspect/interface of the final repository. These are defined by
546 for each aspect/interface of the final repository. These are defined by
547 ``REPO_INTERFACES``.
547 ``REPO_INTERFACES``.
548
548
549 Each factory function is called to produce a type implementing a specific
549 Each factory function is called to produce a type implementing a specific
550 interface. The cumulative list of returned types will be combined into a
550 interface. The cumulative list of returned types will be combined into a
551 new type and that type will be instantiated to represent the local
551 new type and that type will be instantiated to represent the local
552 repository.
552 repository.
553
553
554 The factory functions each receive various state that may be consulted
554 The factory functions each receive various state that may be consulted
555 as part of deriving a type.
555 as part of deriving a type.
556
556
557 Extensions should wrap these factory functions to customize repository type
557 Extensions should wrap these factory functions to customize repository type
558 creation. Note that an extension's wrapped function may be called even if
558 creation. Note that an extension's wrapped function may be called even if
559 that extension is not loaded for the repo being constructed. Extensions
559 that extension is not loaded for the repo being constructed. Extensions
560 should check if their ``__name__`` appears in the
560 should check if their ``__name__`` appears in the
561 ``extensionmodulenames`` set passed to the factory function and no-op if
561 ``extensionmodulenames`` set passed to the factory function and no-op if
562 not.
562 not.
563 """
563 """
564 ui = baseui.copy()
564 ui = baseui.copy()
565 # Prevent copying repo configuration.
565 # Prevent copying repo configuration.
566 ui.copy = baseui.copy
566 ui.copy = baseui.copy
567
567
568 # Working directory VFS rooted at repository root.
568 # Working directory VFS rooted at repository root.
569 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
569 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
570
570
571 # Main VFS for .hg/ directory.
571 # Main VFS for .hg/ directory.
572 hgpath = wdirvfs.join(b'.hg')
572 hgpath = wdirvfs.join(b'.hg')
573 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
573 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
574 # Whether this repository is shared one or not
574 # Whether this repository is shared one or not
575 shared = False
575 shared = False
576 # If this repository is shared, vfs pointing to shared repo
576 # If this repository is shared, vfs pointing to shared repo
577 sharedvfs = None
577 sharedvfs = None
578
578
579 # The .hg/ path should exist and should be a directory. All other
579 # The .hg/ path should exist and should be a directory. All other
580 # cases are errors.
580 # cases are errors.
581 if not hgvfs.isdir():
581 if not hgvfs.isdir():
582 try:
582 try:
583 hgvfs.stat()
583 hgvfs.stat()
584 except FileNotFoundError:
584 except FileNotFoundError:
585 pass
585 pass
586 except ValueError as e:
586 except ValueError as e:
587 # Can be raised on Python 3.8 when path is invalid.
587 # Can be raised on Python 3.8 when path is invalid.
588 raise error.Abort(
588 raise error.Abort(
589 _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e))
589 _(b'invalid path %s: %s') % (path, stringutil.forcebytestr(e))
590 )
590 )
591
591
592 raise error.RepoError(_(b'repository %s not found') % path)
592 raise error.RepoError(_(b'repository %s not found') % path)
593
593
594 requirements = _readrequires(hgvfs, True)
594 requirements = _readrequires(hgvfs, True)
595 shared = (
595 shared = (
596 requirementsmod.SHARED_REQUIREMENT in requirements
596 requirementsmod.SHARED_REQUIREMENT in requirements
597 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
597 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
598 )
598 )
599 storevfs = None
599 storevfs = None
600 if shared:
600 if shared:
601 # This is a shared repo
601 # This is a shared repo
602 sharedvfs = _getsharedvfs(hgvfs, requirements)
602 sharedvfs = _getsharedvfs(hgvfs, requirements)
603 storevfs = vfsmod.vfs(sharedvfs.join(b'store'))
603 storevfs = vfsmod.vfs(sharedvfs.join(b'store'))
604 else:
604 else:
605 storevfs = vfsmod.vfs(hgvfs.join(b'store'))
605 storevfs = vfsmod.vfs(hgvfs.join(b'store'))
606
606
607 # if .hg/requires contains the sharesafe requirement, it means
607 # if .hg/requires contains the sharesafe requirement, it means
608 # there exists a `.hg/store/requires` too and we should read it
608 # there exists a `.hg/store/requires` too and we should read it
609 # NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
609 # NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
610 # is present. We never write SHARESAFE_REQUIREMENT for a repo if store
610 # is present. We never write SHARESAFE_REQUIREMENT for a repo if store
611 # is not present, refer checkrequirementscompat() for that
611 # is not present, refer checkrequirementscompat() for that
612 #
612 #
613 # However, if SHARESAFE_REQUIREMENT is not present, it means that the
613 # However, if SHARESAFE_REQUIREMENT is not present, it means that the
614 # repository was shared the old way. We check the share source .hg/requires
614 # repository was shared the old way. We check the share source .hg/requires
615 # for SHARESAFE_REQUIREMENT to detect whether the current repository needs
615 # for SHARESAFE_REQUIREMENT to detect whether the current repository needs
616 # to be reshared
616 # to be reshared
617 hint = _(b"see `hg help config.format.use-share-safe` for more information")
617 hint = _(b"see `hg help config.format.use-share-safe` for more information")
618 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
618 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
619 if (
619 if (
620 shared
620 shared
621 and requirementsmod.SHARESAFE_REQUIREMENT
621 and requirementsmod.SHARESAFE_REQUIREMENT
622 not in _readrequires(sharedvfs, True)
622 not in _readrequires(sharedvfs, True)
623 ):
623 ):
624 mismatch_warn = ui.configbool(
624 mismatch_warn = ui.configbool(
625 b'share', b'safe-mismatch.source-not-safe.warn'
625 b'share', b'safe-mismatch.source-not-safe.warn'
626 )
626 )
627 mismatch_config = ui.config(
627 mismatch_config = ui.config(
628 b'share', b'safe-mismatch.source-not-safe'
628 b'share', b'safe-mismatch.source-not-safe'
629 )
629 )
630 mismatch_verbose_upgrade = ui.configbool(
630 mismatch_verbose_upgrade = ui.configbool(
631 b'share', b'safe-mismatch.source-not-safe:verbose-upgrade'
631 b'share', b'safe-mismatch.source-not-safe:verbose-upgrade'
632 )
632 )
633 if mismatch_config in (
633 if mismatch_config in (
634 b'downgrade-allow',
634 b'downgrade-allow',
635 b'allow',
635 b'allow',
636 b'downgrade-abort',
636 b'downgrade-abort',
637 ):
637 ):
638 # prevent cyclic import localrepo -> upgrade -> localrepo
638 # prevent cyclic import localrepo -> upgrade -> localrepo
639 from . import upgrade
639 from . import upgrade
640
640
641 upgrade.downgrade_share_to_non_safe(
641 upgrade.downgrade_share_to_non_safe(
642 ui,
642 ui,
643 hgvfs,
643 hgvfs,
644 sharedvfs,
644 sharedvfs,
645 requirements,
645 requirements,
646 mismatch_config,
646 mismatch_config,
647 mismatch_warn,
647 mismatch_warn,
648 mismatch_verbose_upgrade,
648 mismatch_verbose_upgrade,
649 )
649 )
650 elif mismatch_config == b'abort':
650 elif mismatch_config == b'abort':
651 raise error.Abort(
651 raise error.Abort(
652 _(b"share source does not support share-safe requirement"),
652 _(b"share source does not support share-safe requirement"),
653 hint=hint,
653 hint=hint,
654 )
654 )
655 else:
655 else:
656 raise error.Abort(
656 raise error.Abort(
657 _(
657 _(
658 b"share-safe mismatch with source.\nUnrecognized"
658 b"share-safe mismatch with source.\nUnrecognized"
659 b" value '%s' of `share.safe-mismatch.source-not-safe`"
659 b" value '%s' of `share.safe-mismatch.source-not-safe`"
660 b" set."
660 b" set."
661 )
661 )
662 % mismatch_config,
662 % mismatch_config,
663 hint=hint,
663 hint=hint,
664 )
664 )
665 else:
665 else:
666 requirements |= _readrequires(storevfs, False)
666 requirements |= _readrequires(storevfs, False)
667 elif shared:
667 elif shared:
668 sourcerequires = _readrequires(sharedvfs, False)
668 sourcerequires = _readrequires(sharedvfs, False)
669 if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires:
669 if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires:
670 mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe')
670 mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe')
671 mismatch_warn = ui.configbool(
671 mismatch_warn = ui.configbool(
672 b'share', b'safe-mismatch.source-safe.warn'
672 b'share', b'safe-mismatch.source-safe.warn'
673 )
673 )
674 mismatch_verbose_upgrade = ui.configbool(
674 mismatch_verbose_upgrade = ui.configbool(
675 b'share', b'safe-mismatch.source-safe:verbose-upgrade'
675 b'share', b'safe-mismatch.source-safe:verbose-upgrade'
676 )
676 )
677 if mismatch_config in (
677 if mismatch_config in (
678 b'upgrade-allow',
678 b'upgrade-allow',
679 b'allow',
679 b'allow',
680 b'upgrade-abort',
680 b'upgrade-abort',
681 ):
681 ):
682 # prevent cyclic import localrepo -> upgrade -> localrepo
682 # prevent cyclic import localrepo -> upgrade -> localrepo
683 from . import upgrade
683 from . import upgrade
684
684
685 upgrade.upgrade_share_to_safe(
685 upgrade.upgrade_share_to_safe(
686 ui,
686 ui,
687 hgvfs,
687 hgvfs,
688 storevfs,
688 storevfs,
689 requirements,
689 requirements,
690 mismatch_config,
690 mismatch_config,
691 mismatch_warn,
691 mismatch_warn,
692 mismatch_verbose_upgrade,
692 mismatch_verbose_upgrade,
693 )
693 )
694 elif mismatch_config == b'abort':
694 elif mismatch_config == b'abort':
695 raise error.Abort(
695 raise error.Abort(
696 _(
696 _(
697 b'version mismatch: source uses share-safe'
697 b'version mismatch: source uses share-safe'
698 b' functionality while the current share does not'
698 b' functionality while the current share does not'
699 ),
699 ),
700 hint=hint,
700 hint=hint,
701 )
701 )
702 else:
702 else:
703 raise error.Abort(
703 raise error.Abort(
704 _(
704 _(
705 b"share-safe mismatch with source.\nUnrecognized"
705 b"share-safe mismatch with source.\nUnrecognized"
706 b" value '%s' of `share.safe-mismatch.source-safe` set."
706 b" value '%s' of `share.safe-mismatch.source-safe` set."
707 )
707 )
708 % mismatch_config,
708 % mismatch_config,
709 hint=hint,
709 hint=hint,
710 )
710 )
711
711
712 # The .hg/hgrc file may load extensions or contain config options
712 # The .hg/hgrc file may load extensions or contain config options
713 # that influence repository construction. Attempt to load it and
713 # that influence repository construction. Attempt to load it and
714 # process any new extensions that it may have pulled in.
714 # process any new extensions that it may have pulled in.
715 if loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs):
715 if loadhgrc(ui, wdirvfs, hgvfs, requirements, sharedvfs):
716 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
716 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
717 extensions.loadall(ui)
717 extensions.loadall(ui)
718 extensions.populateui(ui)
718 extensions.populateui(ui)
719
719
720 # Set of module names of extensions loaded for this repository.
720 # Set of module names of extensions loaded for this repository.
721 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
721 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
722
722
723 supportedrequirements = gathersupportedrequirements(ui)
723 supportedrequirements = gathersupportedrequirements(ui)
724
724
725 # We first validate the requirements are known.
725 # We first validate the requirements are known.
726 ensurerequirementsrecognized(requirements, supportedrequirements)
726 ensurerequirementsrecognized(requirements, supportedrequirements)
727
727
728 # Then we validate that the known set is reasonable to use together.
728 # Then we validate that the known set is reasonable to use together.
729 ensurerequirementscompatible(ui, requirements)
729 ensurerequirementscompatible(ui, requirements)
730
730
731 # TODO there are unhandled edge cases related to opening repositories with
731 # TODO there are unhandled edge cases related to opening repositories with
732 # shared storage. If storage is shared, we should also test for requirements
732 # shared storage. If storage is shared, we should also test for requirements
733 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
733 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
734 # that repo, as that repo may load extensions needed to open it. This is a
734 # that repo, as that repo may load extensions needed to open it. This is a
735 # bit complicated because we don't want the other hgrc to overwrite settings
735 # bit complicated because we don't want the other hgrc to overwrite settings
736 # in this hgrc.
736 # in this hgrc.
737 #
737 #
738 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
738 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
739 # file when sharing repos. But if a requirement is added after the share is
739 # file when sharing repos. But if a requirement is added after the share is
740 # performed, thereby introducing a new requirement for the opener, we may
740 # performed, thereby introducing a new requirement for the opener, we may
741 # will not see that and could encounter a run-time error interacting with
741 # will not see that and could encounter a run-time error interacting with
742 # that shared store since it has an unknown-to-us requirement.
742 # that shared store since it has an unknown-to-us requirement.
743
743
744 # At this point, we know we should be capable of opening the repository.
744 # At this point, we know we should be capable of opening the repository.
745 # Now get on with doing that.
745 # Now get on with doing that.
746
746
747 features = set()
747 features = set()
748
748
749 # The "store" part of the repository holds versioned data. How it is
749 # The "store" part of the repository holds versioned data. How it is
750 # accessed is determined by various requirements. If `shared` or
750 # accessed is determined by various requirements. If `shared` or
751 # `relshared` requirements are present, this indicates current repository
751 # `relshared` requirements are present, this indicates current repository
752 # is a share and store exists in path mentioned in `.hg/sharedpath`
752 # is a share and store exists in path mentioned in `.hg/sharedpath`
753 if shared:
753 if shared:
754 storebasepath = sharedvfs.base
754 storebasepath = sharedvfs.base
755 cachepath = sharedvfs.join(b'cache')
755 cachepath = sharedvfs.join(b'cache')
756 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
756 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
757 else:
757 else:
758 storebasepath = hgvfs.base
758 storebasepath = hgvfs.base
759 cachepath = hgvfs.join(b'cache')
759 cachepath = hgvfs.join(b'cache')
760 wcachepath = hgvfs.join(b'wcache')
760 wcachepath = hgvfs.join(b'wcache')
761
761
762 # The store has changed over time and the exact layout is dictated by
762 # The store has changed over time and the exact layout is dictated by
763 # requirements. The store interface abstracts differences across all
763 # requirements. The store interface abstracts differences across all
764 # of them.
764 # of them.
765 store = makestore(
765 store = makestore(
766 requirements,
766 requirements,
767 storebasepath,
767 storebasepath,
768 lambda base: vfsmod.vfs(base, cacheaudited=True),
768 lambda base: vfsmod.vfs(base, cacheaudited=True),
769 )
769 )
770 hgvfs.createmode = store.createmode
770 hgvfs.createmode = store.createmode
771
771
772 storevfs = store.vfs
772 storevfs = store.vfs
773 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
773 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
774
774
775 if (
775 if (
776 requirementsmod.REVLOGV2_REQUIREMENT in requirements
776 requirementsmod.REVLOGV2_REQUIREMENT in requirements
777 or requirementsmod.CHANGELOGV2_REQUIREMENT in requirements
777 or requirementsmod.CHANGELOGV2_REQUIREMENT in requirements
778 ):
778 ):
779 features.add(repository.REPO_FEATURE_SIDE_DATA)
779 features.add(repository.REPO_FEATURE_SIDE_DATA)
780 # the revlogv2 docket introduced race condition that we need to fix
780 # the revlogv2 docket introduced race condition that we need to fix
781 features.discard(repository.REPO_FEATURE_STREAM_CLONE)
781 features.discard(repository.REPO_FEATURE_STREAM_CLONE)
782
782
783 # The cache vfs is used to manage cache files.
783 # The cache vfs is used to manage cache files.
784 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
784 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
785 cachevfs.createmode = store.createmode
785 cachevfs.createmode = store.createmode
786 # The cache vfs is used to manage cache files related to the working copy
786 # The cache vfs is used to manage cache files related to the working copy
787 wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True)
787 wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True)
788 wcachevfs.createmode = store.createmode
788 wcachevfs.createmode = store.createmode
789
789
790 # Now resolve the type for the repository object. We do this by repeatedly
790 # Now resolve the type for the repository object. We do this by repeatedly
791 # calling a factory function to produces types for specific aspects of the
791 # calling a factory function to produces types for specific aspects of the
792 # repo's operation. The aggregate returned types are used as base classes
792 # repo's operation. The aggregate returned types are used as base classes
793 # for a dynamically-derived type, which will represent our new repository.
793 # for a dynamically-derived type, which will represent our new repository.
794
794
795 bases = []
795 bases = []
796 extrastate = {}
796 extrastate = {}
797
797
798 for iface, fn in REPO_INTERFACES:
798 for iface, fn in REPO_INTERFACES:
799 # We pass all potentially useful state to give extensions tons of
799 # We pass all potentially useful state to give extensions tons of
800 # flexibility.
800 # flexibility.
801 typ = fn()(
801 typ = fn()(
802 ui=ui,
802 ui=ui,
803 intents=intents,
803 intents=intents,
804 requirements=requirements,
804 requirements=requirements,
805 features=features,
805 features=features,
806 wdirvfs=wdirvfs,
806 wdirvfs=wdirvfs,
807 hgvfs=hgvfs,
807 hgvfs=hgvfs,
808 store=store,
808 store=store,
809 storevfs=storevfs,
809 storevfs=storevfs,
810 storeoptions=storevfs.options,
810 storeoptions=storevfs.options,
811 cachevfs=cachevfs,
811 cachevfs=cachevfs,
812 wcachevfs=wcachevfs,
812 wcachevfs=wcachevfs,
813 extensionmodulenames=extensionmodulenames,
813 extensionmodulenames=extensionmodulenames,
814 extrastate=extrastate,
814 extrastate=extrastate,
815 baseclasses=bases,
815 baseclasses=bases,
816 )
816 )
817
817
818 if not isinstance(typ, type):
818 if not isinstance(typ, type):
819 raise error.ProgrammingError(
819 raise error.ProgrammingError(
820 b'unable to construct type for %s' % iface
820 b'unable to construct type for %s' % iface
821 )
821 )
822
822
823 bases.append(typ)
823 bases.append(typ)
824
824
825 # type() allows you to use characters in type names that wouldn't be
825 # type() allows you to use characters in type names that wouldn't be
826 # recognized as Python symbols in source code. We abuse that to add
826 # recognized as Python symbols in source code. We abuse that to add
827 # rich information about our constructed repo.
827 # rich information about our constructed repo.
828 name = pycompat.sysstr(
828 name = pycompat.sysstr(
829 b'derivedrepo:%s<%s>' % (wdirvfs.base, b','.join(sorted(requirements)))
829 b'derivedrepo:%s<%s>' % (wdirvfs.base, b','.join(sorted(requirements)))
830 )
830 )
831
831
832 cls = type(name, tuple(bases), {})
832 cls = type(name, tuple(bases), {})
833
833
834 return cls(
834 return cls(
835 baseui=baseui,
835 baseui=baseui,
836 ui=ui,
836 ui=ui,
837 origroot=path,
837 origroot=path,
838 wdirvfs=wdirvfs,
838 wdirvfs=wdirvfs,
839 hgvfs=hgvfs,
839 hgvfs=hgvfs,
840 requirements=requirements,
840 requirements=requirements,
841 supportedrequirements=supportedrequirements,
841 supportedrequirements=supportedrequirements,
842 sharedpath=storebasepath,
842 sharedpath=storebasepath,
843 store=store,
843 store=store,
844 cachevfs=cachevfs,
844 cachevfs=cachevfs,
845 wcachevfs=wcachevfs,
845 wcachevfs=wcachevfs,
846 features=features,
846 features=features,
847 intents=intents,
847 intents=intents,
848 )
848 )
849
849
850
850
851 def loadhgrc(
851 def loadhgrc(
852 ui,
852 ui,
853 wdirvfs: vfsmod.vfs,
853 wdirvfs: vfsmod.vfs,
854 hgvfs: vfsmod.vfs,
854 hgvfs: vfsmod.vfs,
855 requirements,
855 requirements,
856 sharedvfs: Optional[vfsmod.vfs] = None,
856 sharedvfs: Optional[vfsmod.vfs] = None,
857 ):
857 ):
858 """Load hgrc files/content into a ui instance.
858 """Load hgrc files/content into a ui instance.
859
859
860 This is called during repository opening to load any additional
860 This is called during repository opening to load any additional
861 config files or settings relevant to the current repository.
861 config files or settings relevant to the current repository.
862
862
863 Returns a bool indicating whether any additional configs were loaded.
863 Returns a bool indicating whether any additional configs were loaded.
864
864
865 Extensions should monkeypatch this function to modify how per-repo
865 Extensions should monkeypatch this function to modify how per-repo
866 configs are loaded. For example, an extension may wish to pull in
866 configs are loaded. For example, an extension may wish to pull in
867 configs from alternate files or sources.
867 configs from alternate files or sources.
868
868
869 sharedvfs is vfs object pointing to source repo if the current one is a
869 sharedvfs is vfs object pointing to source repo if the current one is a
870 shared one
870 shared one
871 """
871 """
872 if not rcutil.use_repo_hgrc():
872 if not rcutil.use_repo_hgrc():
873 return False
873 return False
874
874
875 ret = False
875 ret = False
876 # first load config from shared source if we has to
876 # first load config from shared source if we has to
877 if requirementsmod.SHARESAFE_REQUIREMENT in requirements and sharedvfs:
877 if requirementsmod.SHARESAFE_REQUIREMENT in requirements and sharedvfs:
878 try:
878 try:
879 ui.readconfig(sharedvfs.join(b'hgrc'), root=sharedvfs.base)
879 ui.readconfig(sharedvfs.join(b'hgrc'), root=sharedvfs.base)
880 ret = True
880 ret = True
881 except IOError:
881 except IOError:
882 pass
882 pass
883
883
884 try:
884 try:
885 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
885 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
886 ret = True
886 ret = True
887 except IOError:
887 except IOError:
888 pass
888 pass
889
889
890 try:
890 try:
891 ui.readconfig(hgvfs.join(b'hgrc-not-shared'), root=wdirvfs.base)
891 ui.readconfig(hgvfs.join(b'hgrc-not-shared'), root=wdirvfs.base)
892 ret = True
892 ret = True
893 except IOError:
893 except IOError:
894 pass
894 pass
895
895
896 return ret
896 return ret
897
897
898
898
899 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
899 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
900 """Perform additional actions after .hg/hgrc is loaded.
900 """Perform additional actions after .hg/hgrc is loaded.
901
901
902 This function is called during repository loading immediately after
902 This function is called during repository loading immediately after
903 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
903 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
904
904
905 The function can be used to validate configs, automatically add
905 The function can be used to validate configs, automatically add
906 options (including extensions) based on requirements, etc.
906 options (including extensions) based on requirements, etc.
907 """
907 """
908
908
909 # Map of requirements to list of extensions to load automatically when
909 # Map of requirements to list of extensions to load automatically when
910 # requirement is present.
910 # requirement is present.
911 autoextensions = {
911 autoextensions = {
912 b'git': [b'git'],
912 b'git': [b'git'],
913 b'largefiles': [b'largefiles'],
913 b'largefiles': [b'largefiles'],
914 b'lfs': [b'lfs'],
914 b'lfs': [b'lfs'],
915 }
915 }
916
916
917 for requirement, names in sorted(autoextensions.items()):
917 for requirement, names in sorted(autoextensions.items()):
918 if requirement not in requirements:
918 if requirement not in requirements:
919 continue
919 continue
920
920
921 for name in names:
921 for name in names:
922 if not ui.hasconfig(b'extensions', name):
922 if not ui.hasconfig(b'extensions', name):
923 ui.setconfig(b'extensions', name, b'', source=b'autoload')
923 ui.setconfig(b'extensions', name, b'', source=b'autoload')
924
924
925
925
926 def gathersupportedrequirements(ui):
926 def gathersupportedrequirements(ui):
927 """Determine the complete set of recognized requirements."""
927 """Determine the complete set of recognized requirements."""
928 # Start with all requirements supported by this file.
928 # Start with all requirements supported by this file.
929 supported = set(localrepository._basesupported)
929 supported = set(localrepository._basesupported)
930
930
931 # Execute ``featuresetupfuncs`` entries if they belong to an extension
931 # Execute ``featuresetupfuncs`` entries if they belong to an extension
932 # relevant to this ui instance.
932 # relevant to this ui instance.
933 modules = {m.__name__ for n, m in extensions.extensions(ui)}
933 modules = {m.__name__ for n, m in extensions.extensions(ui)}
934
934
935 for fn in featuresetupfuncs:
935 for fn in featuresetupfuncs:
936 if fn.__module__ in modules:
936 if fn.__module__ in modules:
937 fn(ui, supported)
937 fn(ui, supported)
938
938
939 # Add derived requirements from registered compression engines.
939 # Add derived requirements from registered compression engines.
940 for name in util.compengines:
940 for name in util.compengines:
941 engine = util.compengines[name]
941 engine = util.compengines[name]
942 if engine.available() and engine.revlogheader():
942 if engine.available() and engine.revlogheader():
943 supported.add(b'exp-compression-%s' % name)
943 supported.add(b'exp-compression-%s' % name)
944 if engine.name() == b'zstd':
944 if engine.name() == b'zstd':
945 supported.add(requirementsmod.REVLOG_COMPRESSION_ZSTD)
945 supported.add(requirementsmod.REVLOG_COMPRESSION_ZSTD)
946
946
947 return supported
947 return supported
948
948
949
949
950 def ensurerequirementsrecognized(requirements, supported):
950 def ensurerequirementsrecognized(requirements, supported):
951 """Validate that a set of local requirements is recognized.
951 """Validate that a set of local requirements is recognized.
952
952
953 Receives a set of requirements. Raises an ``error.RepoError`` if there
953 Receives a set of requirements. Raises an ``error.RepoError`` if there
954 exists any requirement in that set that currently loaded code doesn't
954 exists any requirement in that set that currently loaded code doesn't
955 recognize.
955 recognize.
956
956
957 Returns a set of supported requirements.
957 Returns a set of supported requirements.
958 """
958 """
959 missing = set()
959 missing = set()
960
960
961 for requirement in requirements:
961 for requirement in requirements:
962 if requirement in supported:
962 if requirement in supported:
963 continue
963 continue
964
964
965 if not requirement or not requirement[0:1].isalnum():
965 if not requirement or not requirement[0:1].isalnum():
966 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
966 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
967
967
968 missing.add(requirement)
968 missing.add(requirement)
969
969
970 if missing:
970 if missing:
971 raise error.RequirementError(
971 raise error.RequirementError(
972 _(b'repository requires features unknown to this Mercurial: %s')
972 _(b'repository requires features unknown to this Mercurial: %s')
973 % b' '.join(sorted(missing)),
973 % b' '.join(sorted(missing)),
974 hint=_(
974 hint=_(
975 b'see https://mercurial-scm.org/wiki/MissingRequirement '
975 b'see https://mercurial-scm.org/wiki/MissingRequirement '
976 b'for more information'
976 b'for more information'
977 ),
977 ),
978 )
978 )
979
979
980
980
981 def ensurerequirementscompatible(ui, requirements):
981 def ensurerequirementscompatible(ui, requirements):
982 """Validates that a set of recognized requirements is mutually compatible.
982 """Validates that a set of recognized requirements is mutually compatible.
983
983
984 Some requirements may not be compatible with others or require
984 Some requirements may not be compatible with others or require
985 config options that aren't enabled. This function is called during
985 config options that aren't enabled. This function is called during
986 repository opening to ensure that the set of requirements needed
986 repository opening to ensure that the set of requirements needed
987 to open a repository is sane and compatible with config options.
987 to open a repository is sane and compatible with config options.
988
988
989 Extensions can monkeypatch this function to perform additional
989 Extensions can monkeypatch this function to perform additional
990 checking.
990 checking.
991
991
992 ``error.RepoError`` should be raised on failure.
992 ``error.RepoError`` should be raised on failure.
993 """
993 """
994 if (
994 if (
995 requirementsmod.SPARSE_REQUIREMENT in requirements
995 requirementsmod.SPARSE_REQUIREMENT in requirements
996 and not sparse.enabled
996 and not sparse.enabled
997 ):
997 ):
998 raise error.RepoError(
998 raise error.RepoError(
999 _(
999 _(
1000 b'repository is using sparse feature but '
1000 b'repository is using sparse feature but '
1001 b'sparse is not enabled; enable the '
1001 b'sparse is not enabled; enable the '
1002 b'"sparse" extensions to access'
1002 b'"sparse" extensions to access'
1003 )
1003 )
1004 )
1004 )
1005
1005
1006
1006
1007 def makestore(requirements, path, vfstype):
1007 def makestore(requirements, path, vfstype):
1008 """Construct a storage object for a repository."""
1008 """Construct a storage object for a repository."""
1009 if requirementsmod.STORE_REQUIREMENT in requirements:
1009 if requirementsmod.STORE_REQUIREMENT in requirements:
1010 if requirementsmod.FNCACHE_REQUIREMENT in requirements:
1010 if requirementsmod.FNCACHE_REQUIREMENT in requirements:
1011 dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements
1011 dotencode = requirementsmod.DOTENCODE_REQUIREMENT in requirements
1012 return storemod.fncachestore(path, vfstype, dotencode)
1012 return storemod.fncachestore(path, vfstype, dotencode)
1013
1013
1014 return storemod.encodedstore(path, vfstype)
1014 return storemod.encodedstore(path, vfstype)
1015
1015
1016 return storemod.basicstore(path, vfstype)
1016 return storemod.basicstore(path, vfstype)
1017
1017
1018
1018
1019 def resolvestorevfsoptions(ui, requirements, features):
1019 def resolvestorevfsoptions(ui, requirements, features):
1020 """Resolve the options to pass to the store vfs opener.
1020 """Resolve the options to pass to the store vfs opener.
1021
1021
1022 The returned dict is used to influence behavior of the storage layer.
1022 The returned dict is used to influence behavior of the storage layer.
1023 """
1023 """
1024 options = {}
1024 options = {}
1025
1025
1026 if requirementsmod.TREEMANIFEST_REQUIREMENT in requirements:
1026 if requirementsmod.TREEMANIFEST_REQUIREMENT in requirements:
1027 options[b'treemanifest'] = True
1027 options[b'treemanifest'] = True
1028
1028
1029 # experimental config: format.manifestcachesize
1029 # experimental config: format.manifestcachesize
1030 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
1030 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
1031 if manifestcachesize is not None:
1031 if manifestcachesize is not None:
1032 options[b'manifestcachesize'] = manifestcachesize
1032 options[b'manifestcachesize'] = manifestcachesize
1033
1033
1034 # In the absence of another requirement superseding a revlog-related
1034 # In the absence of another requirement superseding a revlog-related
1035 # requirement, we have to assume the repo is using revlog version 0.
1035 # requirement, we have to assume the repo is using revlog version 0.
1036 # This revlog format is super old and we don't bother trying to parse
1036 # This revlog format is super old and we don't bother trying to parse
1037 # opener options for it because those options wouldn't do anything
1037 # opener options for it because those options wouldn't do anything
1038 # meaningful on such old repos.
1038 # meaningful on such old repos.
1039 if (
1039 if (
1040 requirementsmod.REVLOGV1_REQUIREMENT in requirements
1040 requirementsmod.REVLOGV1_REQUIREMENT in requirements
1041 or requirementsmod.REVLOGV2_REQUIREMENT in requirements
1041 or requirementsmod.REVLOGV2_REQUIREMENT in requirements
1042 ):
1042 ):
1043 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
1043 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
1044 else: # explicitly mark repo as using revlogv0
1044 else: # explicitly mark repo as using revlogv0
1045 options[b'revlogv0'] = True
1045 options[b'revlogv0'] = True
1046
1046
1047 if requirementsmod.COPIESSDC_REQUIREMENT in requirements:
1047 if requirementsmod.COPIESSDC_REQUIREMENT in requirements:
1048 options[b'copies-storage'] = b'changeset-sidedata'
1048 options[b'copies-storage'] = b'changeset-sidedata'
1049 else:
1049 else:
1050 writecopiesto = ui.config(b'experimental', b'copies.write-to')
1050 writecopiesto = ui.config(b'experimental', b'copies.write-to')
1051 copiesextramode = (b'changeset-only', b'compatibility')
1051 copiesextramode = (b'changeset-only', b'compatibility')
1052 if writecopiesto in copiesextramode:
1052 if writecopiesto in copiesextramode:
1053 options[b'copies-storage'] = b'extra'
1053 options[b'copies-storage'] = b'extra'
1054
1054
1055 return options
1055 return options
1056
1056
1057
1057
1058 def resolverevlogstorevfsoptions(ui, requirements, features):
1058 def resolverevlogstorevfsoptions(ui, requirements, features):
1059 """Resolve opener options specific to revlogs."""
1059 """Resolve opener options specific to revlogs."""
1060
1060
1061 options = {}
1061 options = {}
1062 options[b'flagprocessors'] = {}
1062 options[b'flagprocessors'] = {}
1063
1063
1064 if requirementsmod.REVLOGV1_REQUIREMENT in requirements:
1064 if requirementsmod.REVLOGV1_REQUIREMENT in requirements:
1065 options[b'revlogv1'] = True
1065 options[b'revlogv1'] = True
1066 if requirementsmod.REVLOGV2_REQUIREMENT in requirements:
1066 if requirementsmod.REVLOGV2_REQUIREMENT in requirements:
1067 options[b'revlogv2'] = True
1067 options[b'revlogv2'] = True
1068 if requirementsmod.CHANGELOGV2_REQUIREMENT in requirements:
1068 if requirementsmod.CHANGELOGV2_REQUIREMENT in requirements:
1069 options[b'changelogv2'] = True
1069 options[b'changelogv2'] = True
1070 cmp_rank = ui.configbool(b'experimental', b'changelog-v2.compute-rank')
1070 cmp_rank = ui.configbool(b'experimental', b'changelog-v2.compute-rank')
1071 options[b'changelogv2.compute-rank'] = cmp_rank
1071 options[b'changelogv2.compute-rank'] = cmp_rank
1072
1072
1073 if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
1073 if requirementsmod.GENERALDELTA_REQUIREMENT in requirements:
1074 options[b'generaldelta'] = True
1074 options[b'generaldelta'] = True
1075
1075
1076 # experimental config: format.chunkcachesize
1076 # experimental config: format.chunkcachesize
1077 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
1077 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
1078 if chunkcachesize is not None:
1078 if chunkcachesize is not None:
1079 options[b'chunkcachesize'] = chunkcachesize
1079 options[b'chunkcachesize'] = chunkcachesize
1080
1080
1081 deltabothparents = ui.configbool(
1081 deltabothparents = ui.configbool(
1082 b'storage', b'revlog.optimize-delta-parent-choice'
1082 b'storage', b'revlog.optimize-delta-parent-choice'
1083 )
1083 )
1084 options[b'deltabothparents'] = deltabothparents
1084 options[b'deltabothparents'] = deltabothparents
1085 dps_cgds = ui.configint(
1085 dps_cgds = ui.configint(
1086 b'storage',
1086 b'storage',
1087 b'revlog.delta-parent-search.candidate-group-chunk-size',
1087 b'revlog.delta-parent-search.candidate-group-chunk-size',
1088 )
1088 )
1089 options[b'delta-parent-search.candidate-group-chunk-size'] = dps_cgds
1089 options[b'delta-parent-search.candidate-group-chunk-size'] = dps_cgds
1090 options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
1090 options[b'debug-delta'] = ui.configbool(b'debug', b'revlog.debug-delta')
1091
1091
1092 issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
1092 issue6528 = ui.configbool(b'storage', b'revlog.issue6528.fix-incoming')
1093 options[b'issue6528.fix-incoming'] = issue6528
1093 options[b'issue6528.fix-incoming'] = issue6528
1094
1094
1095 lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
1095 lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
1096 lazydeltabase = False
1096 lazydeltabase = False
1097 if lazydelta:
1097 if lazydelta:
1098 lazydeltabase = ui.configbool(
1098 lazydeltabase = ui.configbool(
1099 b'storage', b'revlog.reuse-external-delta-parent'
1099 b'storage', b'revlog.reuse-external-delta-parent'
1100 )
1100 )
1101 if lazydeltabase is None:
1101 if lazydeltabase is None:
1102 lazydeltabase = not scmutil.gddeltaconfig(ui)
1102 lazydeltabase = not scmutil.gddeltaconfig(ui)
1103 options[b'lazydelta'] = lazydelta
1103 options[b'lazydelta'] = lazydelta
1104 options[b'lazydeltabase'] = lazydeltabase
1104 options[b'lazydeltabase'] = lazydeltabase
1105
1105
1106 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
1106 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
1107 if 0 <= chainspan:
1107 if 0 <= chainspan:
1108 options[b'maxdeltachainspan'] = chainspan
1108 options[b'maxdeltachainspan'] = chainspan
1109
1109
1110 mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold')
1110 mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold')
1111 if mmapindexthreshold is not None:
1111 if mmapindexthreshold is not None:
1112 options[b'mmapindexthreshold'] = mmapindexthreshold
1112 options[b'mmapindexthreshold'] = mmapindexthreshold
1113
1113
1114 withsparseread = ui.configbool(b'experimental', b'sparse-read')
1114 withsparseread = ui.configbool(b'experimental', b'sparse-read')
1115 srdensitythres = float(
1115 srdensitythres = float(
1116 ui.config(b'experimental', b'sparse-read.density-threshold')
1116 ui.config(b'experimental', b'sparse-read.density-threshold')
1117 )
1117 )
1118 srmingapsize = ui.configbytes(b'experimental', b'sparse-read.min-gap-size')
1118 srmingapsize = ui.configbytes(b'experimental', b'sparse-read.min-gap-size')
1119 options[b'with-sparse-read'] = withsparseread
1119 options[b'with-sparse-read'] = withsparseread
1120 options[b'sparse-read-density-threshold'] = srdensitythres
1120 options[b'sparse-read-density-threshold'] = srdensitythres
1121 options[b'sparse-read-min-gap-size'] = srmingapsize
1121 options[b'sparse-read-min-gap-size'] = srmingapsize
1122
1122
1123 sparserevlog = requirementsmod.SPARSEREVLOG_REQUIREMENT in requirements
1123 sparserevlog = requirementsmod.SPARSEREVLOG_REQUIREMENT in requirements
1124 options[b'sparse-revlog'] = sparserevlog
1124 options[b'sparse-revlog'] = sparserevlog
1125 if sparserevlog:
1125 if sparserevlog:
1126 options[b'generaldelta'] = True
1126 options[b'generaldelta'] = True
1127
1127
1128 maxchainlen = None
1128 maxchainlen = None
1129 if sparserevlog:
1129 if sparserevlog:
1130 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
1130 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
1131 # experimental config: format.maxchainlen
1131 # experimental config: format.maxchainlen
1132 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
1132 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
1133 if maxchainlen is not None:
1133 if maxchainlen is not None:
1134 options[b'maxchainlen'] = maxchainlen
1134 options[b'maxchainlen'] = maxchainlen
1135
1135
1136 for r in requirements:
1136 for r in requirements:
1137 # we allow multiple compression engine requirement to co-exist because
1137 # we allow multiple compression engine requirement to co-exist because
1138 # strickly speaking, revlog seems to support mixed compression style.
1138 # strickly speaking, revlog seems to support mixed compression style.
1139 #
1139 #
1140 # The compression used for new entries will be "the last one"
1140 # The compression used for new entries will be "the last one"
1141 prefix = r.startswith
1141 prefix = r.startswith
1142 if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
1142 if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
1143 options[b'compengine'] = r.split(b'-', 2)[2]
1143 options[b'compengine'] = r.split(b'-', 2)[2]
1144
1144
1145 options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
1145 options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
1146 if options[b'zlib.level'] is not None:
1146 if options[b'zlib.level'] is not None:
1147 if not (0 <= options[b'zlib.level'] <= 9):
1147 if not (0 <= options[b'zlib.level'] <= 9):
1148 msg = _(b'invalid value for `storage.revlog.zlib.level` config: %d')
1148 msg = _(b'invalid value for `storage.revlog.zlib.level` config: %d')
1149 raise error.Abort(msg % options[b'zlib.level'])
1149 raise error.Abort(msg % options[b'zlib.level'])
1150 options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
1150 options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
1151 if options[b'zstd.level'] is not None:
1151 if options[b'zstd.level'] is not None:
1152 if not (0 <= options[b'zstd.level'] <= 22):
1152 if not (0 <= options[b'zstd.level'] <= 22):
1153 msg = _(b'invalid value for `storage.revlog.zstd.level` config: %d')
1153 msg = _(b'invalid value for `storage.revlog.zstd.level` config: %d')
1154 raise error.Abort(msg % options[b'zstd.level'])
1154 raise error.Abort(msg % options[b'zstd.level'])
1155
1155
1156 if requirementsmod.NARROW_REQUIREMENT in requirements:
1156 if requirementsmod.NARROW_REQUIREMENT in requirements:
1157 options[b'enableellipsis'] = True
1157 options[b'enableellipsis'] = True
1158
1158
1159 if ui.configbool(b'experimental', b'rust.index'):
1159 if ui.configbool(b'experimental', b'rust.index'):
1160 options[b'rust.index'] = True
1160 options[b'rust.index'] = True
1161 if requirementsmod.NODEMAP_REQUIREMENT in requirements:
1161 if requirementsmod.NODEMAP_REQUIREMENT in requirements:
1162 slow_path = ui.config(
1162 slow_path = ui.config(
1163 b'storage', b'revlog.persistent-nodemap.slow-path'
1163 b'storage', b'revlog.persistent-nodemap.slow-path'
1164 )
1164 )
1165 if slow_path not in (b'allow', b'warn', b'abort'):
1165 if slow_path not in (b'allow', b'warn', b'abort'):
1166 default = ui.config_default(
1166 default = ui.config_default(
1167 b'storage', b'revlog.persistent-nodemap.slow-path'
1167 b'storage', b'revlog.persistent-nodemap.slow-path'
1168 )
1168 )
1169 msg = _(
1169 msg = _(
1170 b'unknown value for config '
1170 b'unknown value for config '
1171 b'"storage.revlog.persistent-nodemap.slow-path": "%s"\n'
1171 b'"storage.revlog.persistent-nodemap.slow-path": "%s"\n'
1172 )
1172 )
1173 ui.warn(msg % slow_path)
1173 ui.warn(msg % slow_path)
1174 if not ui.quiet:
1174 if not ui.quiet:
1175 ui.warn(_(b'falling back to default value: %s\n') % default)
1175 ui.warn(_(b'falling back to default value: %s\n') % default)
1176 slow_path = default
1176 slow_path = default
1177
1177
1178 msg = _(
1178 msg = _(
1179 b"accessing `persistent-nodemap` repository without associated "
1179 b"accessing `persistent-nodemap` repository without associated "
1180 b"fast implementation."
1180 b"fast implementation."
1181 )
1181 )
1182 hint = _(
1182 hint = _(
1183 b"check `hg help config.format.use-persistent-nodemap` "
1183 b"check `hg help config.format.use-persistent-nodemap` "
1184 b"for details"
1184 b"for details"
1185 )
1185 )
1186 if not revlog.HAS_FAST_PERSISTENT_NODEMAP:
1186 if not revlog.HAS_FAST_PERSISTENT_NODEMAP:
1187 if slow_path == b'warn':
1187 if slow_path == b'warn':
1188 msg = b"warning: " + msg + b'\n'
1188 msg = b"warning: " + msg + b'\n'
1189 ui.warn(msg)
1189 ui.warn(msg)
1190 if not ui.quiet:
1190 if not ui.quiet:
1191 hint = b'(' + hint + b')\n'
1191 hint = b'(' + hint + b')\n'
1192 ui.warn(hint)
1192 ui.warn(hint)
1193 if slow_path == b'abort':
1193 if slow_path == b'abort':
1194 raise error.Abort(msg, hint=hint)
1194 raise error.Abort(msg, hint=hint)
1195 options[b'persistent-nodemap'] = True
1195 options[b'persistent-nodemap'] = True
1196 if requirementsmod.DIRSTATE_V2_REQUIREMENT in requirements:
1196 if requirementsmod.DIRSTATE_V2_REQUIREMENT in requirements:
1197 slow_path = ui.config(b'storage', b'dirstate-v2.slow-path')
1197 slow_path = ui.config(b'storage', b'dirstate-v2.slow-path')
1198 if slow_path not in (b'allow', b'warn', b'abort'):
1198 if slow_path not in (b'allow', b'warn', b'abort'):
1199 default = ui.config_default(b'storage', b'dirstate-v2.slow-path')
1199 default = ui.config_default(b'storage', b'dirstate-v2.slow-path')
1200 msg = _(b'unknown value for config "dirstate-v2.slow-path": "%s"\n')
1200 msg = _(b'unknown value for config "dirstate-v2.slow-path": "%s"\n')
1201 ui.warn(msg % slow_path)
1201 ui.warn(msg % slow_path)
1202 if not ui.quiet:
1202 if not ui.quiet:
1203 ui.warn(_(b'falling back to default value: %s\n') % default)
1203 ui.warn(_(b'falling back to default value: %s\n') % default)
1204 slow_path = default
1204 slow_path = default
1205
1205
1206 msg = _(
1206 msg = _(
1207 b"accessing `dirstate-v2` repository without associated "
1207 b"accessing `dirstate-v2` repository without associated "
1208 b"fast implementation."
1208 b"fast implementation."
1209 )
1209 )
1210 hint = _(
1210 hint = _(
1211 b"check `hg help config.format.use-dirstate-v2` " b"for details"
1211 b"check `hg help config.format.use-dirstate-v2` " b"for details"
1212 )
1212 )
1213 if not dirstate.HAS_FAST_DIRSTATE_V2:
1213 if not dirstate.HAS_FAST_DIRSTATE_V2:
1214 if slow_path == b'warn':
1214 if slow_path == b'warn':
1215 msg = b"warning: " + msg + b'\n'
1215 msg = b"warning: " + msg + b'\n'
1216 ui.warn(msg)
1216 ui.warn(msg)
1217 if not ui.quiet:
1217 if not ui.quiet:
1218 hint = b'(' + hint + b')\n'
1218 hint = b'(' + hint + b')\n'
1219 ui.warn(hint)
1219 ui.warn(hint)
1220 if slow_path == b'abort':
1220 if slow_path == b'abort':
1221 raise error.Abort(msg, hint=hint)
1221 raise error.Abort(msg, hint=hint)
1222 if ui.configbool(b'storage', b'revlog.persistent-nodemap.mmap'):
1222 if ui.configbool(b'storage', b'revlog.persistent-nodemap.mmap'):
1223 options[b'persistent-nodemap.mmap'] = True
1223 options[b'persistent-nodemap.mmap'] = True
1224 if ui.configbool(b'devel', b'persistent-nodemap'):
1224 if ui.configbool(b'devel', b'persistent-nodemap'):
1225 options[b'devel-force-nodemap'] = True
1225 options[b'devel-force-nodemap'] = True
1226
1226
1227 return options
1227 return options
1228
1228
1229
1229
1230 def makemain(**kwargs):
1230 def makemain(**kwargs):
1231 """Produce a type conforming to ``ilocalrepositorymain``."""
1231 """Produce a type conforming to ``ilocalrepositorymain``."""
1232 return localrepository
1232 return localrepository
1233
1233
1234
1234
1235 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1235 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1236 class revlogfilestorage:
1236 class revlogfilestorage:
1237 """File storage when using revlogs."""
1237 """File storage when using revlogs."""
1238
1238
1239 def file(self, path):
1239 def file(self, path):
1240 if path.startswith(b'/'):
1240 if path.startswith(b'/'):
1241 path = path[1:]
1241 path = path[1:]
1242
1242
1243 return filelog.filelog(self.svfs, path)
1243 return filelog.filelog(self.svfs, path)
1244
1244
1245
1245
1246 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1246 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1247 class revlognarrowfilestorage:
1247 class revlognarrowfilestorage:
1248 """File storage when using revlogs and narrow files."""
1248 """File storage when using revlogs and narrow files."""
1249
1249
1250 def file(self, path):
1250 def file(self, path):
1251 if path.startswith(b'/'):
1251 if path.startswith(b'/'):
1252 path = path[1:]
1252 path = path[1:]
1253
1253
1254 return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
1254 return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
1255
1255
1256
1256
1257 def makefilestorage(requirements, features, **kwargs):
1257 def makefilestorage(requirements, features, **kwargs):
1258 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
1258 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
1259 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
1259 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
1260 features.add(repository.REPO_FEATURE_STREAM_CLONE)
1260 features.add(repository.REPO_FEATURE_STREAM_CLONE)
1261
1261
1262 if requirementsmod.NARROW_REQUIREMENT in requirements:
1262 if requirementsmod.NARROW_REQUIREMENT in requirements:
1263 return revlognarrowfilestorage
1263 return revlognarrowfilestorage
1264 else:
1264 else:
1265 return revlogfilestorage
1265 return revlogfilestorage
1266
1266
1267
1267
1268 # List of repository interfaces and factory functions for them. Each
1268 # List of repository interfaces and factory functions for them. Each
1269 # will be called in order during ``makelocalrepository()`` to iteratively
1269 # will be called in order during ``makelocalrepository()`` to iteratively
1270 # derive the final type for a local repository instance. We capture the
1270 # derive the final type for a local repository instance. We capture the
1271 # function as a lambda so we don't hold a reference and the module-level
1271 # function as a lambda so we don't hold a reference and the module-level
1272 # functions can be wrapped.
1272 # functions can be wrapped.
1273 REPO_INTERFACES = [
1273 REPO_INTERFACES = [
1274 (repository.ilocalrepositorymain, lambda: makemain),
1274 (repository.ilocalrepositorymain, lambda: makemain),
1275 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
1275 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
1276 ]
1276 ]
1277
1277
1278
1278
1279 @interfaceutil.implementer(repository.ilocalrepositorymain)
1279 @interfaceutil.implementer(repository.ilocalrepositorymain)
1280 class localrepository:
1280 class localrepository:
1281 """Main class for representing local repositories.
1281 """Main class for representing local repositories.
1282
1282
1283 All local repositories are instances of this class.
1283 All local repositories are instances of this class.
1284
1284
1285 Constructed on its own, instances of this class are not usable as
1285 Constructed on its own, instances of this class are not usable as
1286 repository objects. To obtain a usable repository object, call
1286 repository objects. To obtain a usable repository object, call
1287 ``hg.repository()``, ``localrepo.instance()``, or
1287 ``hg.repository()``, ``localrepo.instance()``, or
1288 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
1288 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
1289 ``instance()`` adds support for creating new repositories.
1289 ``instance()`` adds support for creating new repositories.
1290 ``hg.repository()`` adds more extension integration, including calling
1290 ``hg.repository()`` adds more extension integration, including calling
1291 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
1291 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
1292 used.
1292 used.
1293 """
1293 """
1294
1294
1295 _basesupported = {
1295 _basesupported = {
1296 requirementsmod.ARCHIVED_PHASE_REQUIREMENT,
1296 requirementsmod.ARCHIVED_PHASE_REQUIREMENT,
1297 requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
1297 requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
1298 requirementsmod.CHANGELOGV2_REQUIREMENT,
1298 requirementsmod.CHANGELOGV2_REQUIREMENT,
1299 requirementsmod.COPIESSDC_REQUIREMENT,
1299 requirementsmod.COPIESSDC_REQUIREMENT,
1300 requirementsmod.DIRSTATE_TRACKED_HINT_V1,
1300 requirementsmod.DIRSTATE_TRACKED_HINT_V1,
1301 requirementsmod.DIRSTATE_V2_REQUIREMENT,
1301 requirementsmod.DIRSTATE_V2_REQUIREMENT,
1302 requirementsmod.DOTENCODE_REQUIREMENT,
1302 requirementsmod.DOTENCODE_REQUIREMENT,
1303 requirementsmod.FNCACHE_REQUIREMENT,
1303 requirementsmod.FNCACHE_REQUIREMENT,
1304 requirementsmod.GENERALDELTA_REQUIREMENT,
1304 requirementsmod.GENERALDELTA_REQUIREMENT,
1305 requirementsmod.INTERNAL_PHASE_REQUIREMENT,
1305 requirementsmod.INTERNAL_PHASE_REQUIREMENT,
1306 requirementsmod.NODEMAP_REQUIREMENT,
1306 requirementsmod.NODEMAP_REQUIREMENT,
1307 requirementsmod.RELATIVE_SHARED_REQUIREMENT,
1307 requirementsmod.RELATIVE_SHARED_REQUIREMENT,
1308 requirementsmod.REVLOGV1_REQUIREMENT,
1308 requirementsmod.REVLOGV1_REQUIREMENT,
1309 requirementsmod.REVLOGV2_REQUIREMENT,
1309 requirementsmod.REVLOGV2_REQUIREMENT,
1310 requirementsmod.SHARED_REQUIREMENT,
1310 requirementsmod.SHARED_REQUIREMENT,
1311 requirementsmod.SHARESAFE_REQUIREMENT,
1311 requirementsmod.SHARESAFE_REQUIREMENT,
1312 requirementsmod.SPARSE_REQUIREMENT,
1312 requirementsmod.SPARSE_REQUIREMENT,
1313 requirementsmod.SPARSEREVLOG_REQUIREMENT,
1313 requirementsmod.SPARSEREVLOG_REQUIREMENT,
1314 requirementsmod.STORE_REQUIREMENT,
1314 requirementsmod.STORE_REQUIREMENT,
1315 requirementsmod.TREEMANIFEST_REQUIREMENT,
1315 requirementsmod.TREEMANIFEST_REQUIREMENT,
1316 }
1316 }
1317
1317
1318 # list of prefix for file which can be written without 'wlock'
1318 # list of prefix for file which can be written without 'wlock'
1319 # Extensions should extend this list when needed
1319 # Extensions should extend this list when needed
1320 _wlockfreeprefix = {
1320 _wlockfreeprefix = {
1321 # We migh consider requiring 'wlock' for the next
1321 # We migh consider requiring 'wlock' for the next
1322 # two, but pretty much all the existing code assume
1322 # two, but pretty much all the existing code assume
1323 # wlock is not needed so we keep them excluded for
1323 # wlock is not needed so we keep them excluded for
1324 # now.
1324 # now.
1325 b'hgrc',
1325 b'hgrc',
1326 b'requires',
1326 b'requires',
1327 # XXX cache is a complicatged business someone
1327 # XXX cache is a complicatged business someone
1328 # should investigate this in depth at some point
1328 # should investigate this in depth at some point
1329 b'cache/',
1329 b'cache/',
1330 # XXX bisect was still a bit too messy at the time
1330 # XXX bisect was still a bit too messy at the time
1331 # this changeset was introduced. Someone should fix
1331 # this changeset was introduced. Someone should fix
1332 # the remainig bit and drop this line
1332 # the remainig bit and drop this line
1333 b'bisect.state',
1333 b'bisect.state',
1334 }
1334 }
1335
1335
1336 def __init__(
1336 def __init__(
1337 self,
1337 self,
1338 baseui,
1338 baseui,
1339 ui,
1339 ui,
1340 origroot: bytes,
1340 origroot: bytes,
1341 wdirvfs: vfsmod.vfs,
1341 wdirvfs: vfsmod.vfs,
1342 hgvfs: vfsmod.vfs,
1342 hgvfs: vfsmod.vfs,
1343 requirements,
1343 requirements,
1344 supportedrequirements,
1344 supportedrequirements,
1345 sharedpath: bytes,
1345 sharedpath: bytes,
1346 store,
1346 store,
1347 cachevfs: vfsmod.vfs,
1347 cachevfs: vfsmod.vfs,
1348 wcachevfs: vfsmod.vfs,
1348 wcachevfs: vfsmod.vfs,
1349 features,
1349 features,
1350 intents=None,
1350 intents=None,
1351 ):
1351 ):
1352 """Create a new local repository instance.
1352 """Create a new local repository instance.
1353
1353
1354 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
1354 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
1355 or ``localrepo.makelocalrepository()`` for obtaining a new repository
1355 or ``localrepo.makelocalrepository()`` for obtaining a new repository
1356 object.
1356 object.
1357
1357
1358 Arguments:
1358 Arguments:
1359
1359
1360 baseui
1360 baseui
1361 ``ui.ui`` instance that ``ui`` argument was based off of.
1361 ``ui.ui`` instance that ``ui`` argument was based off of.
1362
1362
1363 ui
1363 ui
1364 ``ui.ui`` instance for use by the repository.
1364 ``ui.ui`` instance for use by the repository.
1365
1365
1366 origroot
1366 origroot
1367 ``bytes`` path to working directory root of this repository.
1367 ``bytes`` path to working directory root of this repository.
1368
1368
1369 wdirvfs
1369 wdirvfs
1370 ``vfs.vfs`` rooted at the working directory.
1370 ``vfs.vfs`` rooted at the working directory.
1371
1371
1372 hgvfs
1372 hgvfs
1373 ``vfs.vfs`` rooted at .hg/
1373 ``vfs.vfs`` rooted at .hg/
1374
1374
1375 requirements
1375 requirements
1376 ``set`` of bytestrings representing repository opening requirements.
1376 ``set`` of bytestrings representing repository opening requirements.
1377
1377
1378 supportedrequirements
1378 supportedrequirements
1379 ``set`` of bytestrings representing repository requirements that we
1379 ``set`` of bytestrings representing repository requirements that we
1380 know how to open. May be a supetset of ``requirements``.
1380 know how to open. May be a supetset of ``requirements``.
1381
1381
1382 sharedpath
1382 sharedpath
1383 ``bytes`` Defining path to storage base directory. Points to a
1383 ``bytes`` Defining path to storage base directory. Points to a
1384 ``.hg/`` directory somewhere.
1384 ``.hg/`` directory somewhere.
1385
1385
1386 store
1386 store
1387 ``store.basicstore`` (or derived) instance providing access to
1387 ``store.basicstore`` (or derived) instance providing access to
1388 versioned storage.
1388 versioned storage.
1389
1389
1390 cachevfs
1390 cachevfs
1391 ``vfs.vfs`` used for cache files.
1391 ``vfs.vfs`` used for cache files.
1392
1392
1393 wcachevfs
1393 wcachevfs
1394 ``vfs.vfs`` used for cache files related to the working copy.
1394 ``vfs.vfs`` used for cache files related to the working copy.
1395
1395
1396 features
1396 features
1397 ``set`` of bytestrings defining features/capabilities of this
1397 ``set`` of bytestrings defining features/capabilities of this
1398 instance.
1398 instance.
1399
1399
1400 intents
1400 intents
1401 ``set`` of system strings indicating what this repo will be used
1401 ``set`` of system strings indicating what this repo will be used
1402 for.
1402 for.
1403 """
1403 """
1404 self.baseui = baseui
1404 self.baseui = baseui
1405 self.ui = ui
1405 self.ui = ui
1406 self.origroot = origroot
1406 self.origroot = origroot
1407 # vfs rooted at working directory.
1407 # vfs rooted at working directory.
1408 self.wvfs = wdirvfs
1408 self.wvfs = wdirvfs
1409 self.root = wdirvfs.base
1409 self.root = wdirvfs.base
1410 # vfs rooted at .hg/. Used to access most non-store paths.
1410 # vfs rooted at .hg/. Used to access most non-store paths.
1411 self.vfs = hgvfs
1411 self.vfs = hgvfs
1412 self.path = hgvfs.base
1412 self.path = hgvfs.base
1413 self.requirements = requirements
1413 self.requirements = requirements
1414 self.nodeconstants = sha1nodeconstants
1414 self.nodeconstants = sha1nodeconstants
1415 self.nullid = self.nodeconstants.nullid
1415 self.nullid = self.nodeconstants.nullid
1416 self.supported = supportedrequirements
1416 self.supported = supportedrequirements
1417 self.sharedpath = sharedpath
1417 self.sharedpath = sharedpath
1418 self.store = store
1418 self.store = store
1419 self.cachevfs = cachevfs
1419 self.cachevfs = cachevfs
1420 self.wcachevfs = wcachevfs
1420 self.wcachevfs = wcachevfs
1421 self.features = features
1421 self.features = features
1422
1422
1423 self.filtername = None
1423 self.filtername = None
1424
1424
1425 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1425 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1426 b'devel', b'check-locks'
1426 b'devel', b'check-locks'
1427 ):
1427 ):
1428 self.vfs.audit = self._getvfsward(self.vfs.audit)
1428 self.vfs.audit = self._getvfsward(self.vfs.audit)
1429 # A list of callback to shape the phase if no data were found.
1429 # A list of callback to shape the phase if no data were found.
1430 # Callback are in the form: func(repo, roots) --> processed root.
1430 # Callback are in the form: func(repo, roots) --> processed root.
1431 # This list it to be filled by extension during repo setup
1431 # This list it to be filled by extension during repo setup
1432 self._phasedefaults = []
1432 self._phasedefaults = []
1433
1433
1434 color.setup(self.ui)
1434 color.setup(self.ui)
1435
1435
1436 self.spath = self.store.path
1436 self.spath = self.store.path
1437 self.svfs = self.store.vfs
1437 self.svfs = self.store.vfs
1438 self.sjoin = self.store.join
1438 self.sjoin = self.store.join
1439 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1439 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
1440 b'devel', b'check-locks'
1440 b'devel', b'check-locks'
1441 ):
1441 ):
1442 if util.safehasattr(self.svfs, b'vfs'): # this is filtervfs
1442 if util.safehasattr(self.svfs, b'vfs'): # this is filtervfs
1443 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
1443 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
1444 else: # standard vfs
1444 else: # standard vfs
1445 self.svfs.audit = self._getsvfsward(self.svfs.audit)
1445 self.svfs.audit = self._getsvfsward(self.svfs.audit)
1446
1446
1447 self._dirstatevalidatewarned = False
1447 self._dirstatevalidatewarned = False
1448
1448
1449 self._branchcaches = branchmap.BranchMapCache()
1449 self._branchcaches = branchmap.BranchMapCache()
1450 self._revbranchcache = None
1450 self._revbranchcache = None
1451 self._filterpats = {}
1451 self._filterpats = {}
1452 self._datafilters = {}
1452 self._datafilters = {}
1453 self._transref = self._lockref = self._wlockref = None
1453 self._transref = self._lockref = self._wlockref = None
1454
1454
1455 # A cache for various files under .hg/ that tracks file changes,
1455 # A cache for various files under .hg/ that tracks file changes,
1456 # (used by the filecache decorator)
1456 # (used by the filecache decorator)
1457 #
1457 #
1458 # Maps a property name to its util.filecacheentry
1458 # Maps a property name to its util.filecacheentry
1459 self._filecache = {}
1459 self._filecache = {}
1460
1460
1461 # hold sets of revision to be filtered
1461 # hold sets of revision to be filtered
1462 # should be cleared when something might have changed the filter value:
1462 # should be cleared when something might have changed the filter value:
1463 # - new changesets,
1463 # - new changesets,
1464 # - phase change,
1464 # - phase change,
1465 # - new obsolescence marker,
1465 # - new obsolescence marker,
1466 # - working directory parent change,
1466 # - working directory parent change,
1467 # - bookmark changes
1467 # - bookmark changes
1468 self.filteredrevcache = {}
1468 self.filteredrevcache = {}
1469
1469
1470 self._dirstate = None
1470 self._dirstate = None
1471 # post-dirstate-status hooks
1471 # post-dirstate-status hooks
1472 self._postdsstatus = []
1472 self._postdsstatus = []
1473
1473
1474 self._pending_narrow_pats = None
1474 self._pending_narrow_pats = None
1475 self._pending_narrow_pats_dirstate = None
1475 self._pending_narrow_pats_dirstate = None
1476
1476
1477 # generic mapping between names and nodes
1477 # generic mapping between names and nodes
1478 self.names = namespaces.namespaces()
1478 self.names = namespaces.namespaces()
1479
1479
1480 # Key to signature value.
1480 # Key to signature value.
1481 self._sparsesignaturecache = {}
1481 self._sparsesignaturecache = {}
1482 # Signature to cached matcher instance.
1482 # Signature to cached matcher instance.
1483 self._sparsematchercache = {}
1483 self._sparsematchercache = {}
1484
1484
1485 self._extrafilterid = repoview.extrafilter(ui)
1485 self._extrafilterid = repoview.extrafilter(ui)
1486
1486
1487 self.filecopiesmode = None
1487 self.filecopiesmode = None
1488 if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements:
1488 if requirementsmod.COPIESSDC_REQUIREMENT in self.requirements:
1489 self.filecopiesmode = b'changeset-sidedata'
1489 self.filecopiesmode = b'changeset-sidedata'
1490
1490
1491 self._wanted_sidedata = set()
1491 self._wanted_sidedata = set()
1492 self._sidedata_computers = {}
1492 self._sidedata_computers = {}
1493 sidedatamod.set_sidedata_spec_for_repo(self)
1493 sidedatamod.set_sidedata_spec_for_repo(self)
1494
1494
1495 def _getvfsward(self, origfunc):
1495 def _getvfsward(self, origfunc):
1496 """build a ward for self.vfs"""
1496 """build a ward for self.vfs"""
1497 rref = weakref.ref(self)
1497 rref = weakref.ref(self)
1498
1498
1499 def checkvfs(path, mode=None):
1499 def checkvfs(path, mode=None):
1500 ret = origfunc(path, mode=mode)
1500 ret = origfunc(path, mode=mode)
1501 repo = rref()
1501 repo = rref()
1502 if (
1502 if (
1503 repo is None
1503 repo is None
1504 or not util.safehasattr(repo, b'_wlockref')
1504 or not util.safehasattr(repo, b'_wlockref')
1505 or not util.safehasattr(repo, b'_lockref')
1505 or not util.safehasattr(repo, b'_lockref')
1506 ):
1506 ):
1507 return
1507 return
1508 if mode in (None, b'r', b'rb'):
1508 if mode in (None, b'r', b'rb'):
1509 return
1509 return
1510 if path.startswith(repo.path):
1510 if path.startswith(repo.path):
1511 # truncate name relative to the repository (.hg)
1511 # truncate name relative to the repository (.hg)
1512 path = path[len(repo.path) + 1 :]
1512 path = path[len(repo.path) + 1 :]
1513 if path.startswith(b'cache/'):
1513 if path.startswith(b'cache/'):
1514 msg = b'accessing cache with vfs instead of cachevfs: "%s"'
1514 msg = b'accessing cache with vfs instead of cachevfs: "%s"'
1515 repo.ui.develwarn(msg % path, stacklevel=3, config=b"cache-vfs")
1515 repo.ui.develwarn(msg % path, stacklevel=3, config=b"cache-vfs")
1516 # path prefixes covered by 'lock'
1516 # path prefixes covered by 'lock'
1517 vfs_path_prefixes = (
1517 vfs_path_prefixes = (
1518 b'journal.',
1518 b'journal.',
1519 b'undo.',
1519 b'undo.',
1520 b'strip-backup/',
1520 b'strip-backup/',
1521 b'cache/',
1521 b'cache/',
1522 )
1522 )
1523 if any(path.startswith(prefix) for prefix in vfs_path_prefixes):
1523 if any(path.startswith(prefix) for prefix in vfs_path_prefixes):
1524 if repo._currentlock(repo._lockref) is None:
1524 if repo._currentlock(repo._lockref) is None:
1525 repo.ui.develwarn(
1525 repo.ui.develwarn(
1526 b'write with no lock: "%s"' % path,
1526 b'write with no lock: "%s"' % path,
1527 stacklevel=3,
1527 stacklevel=3,
1528 config=b'check-locks',
1528 config=b'check-locks',
1529 )
1529 )
1530 elif repo._currentlock(repo._wlockref) is None:
1530 elif repo._currentlock(repo._wlockref) is None:
1531 # rest of vfs files are covered by 'wlock'
1531 # rest of vfs files are covered by 'wlock'
1532 #
1532 #
1533 # exclude special files
1533 # exclude special files
1534 for prefix in self._wlockfreeprefix:
1534 for prefix in self._wlockfreeprefix:
1535 if path.startswith(prefix):
1535 if path.startswith(prefix):
1536 return
1536 return
1537 repo.ui.develwarn(
1537 repo.ui.develwarn(
1538 b'write with no wlock: "%s"' % path,
1538 b'write with no wlock: "%s"' % path,
1539 stacklevel=3,
1539 stacklevel=3,
1540 config=b'check-locks',
1540 config=b'check-locks',
1541 )
1541 )
1542 return ret
1542 return ret
1543
1543
1544 return checkvfs
1544 return checkvfs
1545
1545
1546 def _getsvfsward(self, origfunc):
1546 def _getsvfsward(self, origfunc):
1547 """build a ward for self.svfs"""
1547 """build a ward for self.svfs"""
1548 rref = weakref.ref(self)
1548 rref = weakref.ref(self)
1549
1549
1550 def checksvfs(path, mode=None):
1550 def checksvfs(path, mode=None):
1551 ret = origfunc(path, mode=mode)
1551 ret = origfunc(path, mode=mode)
1552 repo = rref()
1552 repo = rref()
1553 if repo is None or not util.safehasattr(repo, b'_lockref'):
1553 if repo is None or not util.safehasattr(repo, b'_lockref'):
1554 return
1554 return
1555 if mode in (None, b'r', b'rb'):
1555 if mode in (None, b'r', b'rb'):
1556 return
1556 return
1557 if path.startswith(repo.sharedpath):
1557 if path.startswith(repo.sharedpath):
1558 # truncate name relative to the repository (.hg)
1558 # truncate name relative to the repository (.hg)
1559 path = path[len(repo.sharedpath) + 1 :]
1559 path = path[len(repo.sharedpath) + 1 :]
1560 if repo._currentlock(repo._lockref) is None:
1560 if repo._currentlock(repo._lockref) is None:
1561 repo.ui.develwarn(
1561 repo.ui.develwarn(
1562 b'write with no lock: "%s"' % path, stacklevel=4
1562 b'write with no lock: "%s"' % path, stacklevel=4
1563 )
1563 )
1564 return ret
1564 return ret
1565
1565
1566 return checksvfs
1566 return checksvfs
1567
1567
1568 @property
1568 @property
1569 def vfs_map(self):
1569 def vfs_map(self):
1570 return {
1570 return {
1571 b'': self.svfs,
1571 b'': self.svfs,
1572 b'plain': self.vfs,
1572 b'plain': self.vfs,
1573 b'store': self.svfs,
1573 b'store': self.svfs,
1574 }
1574 }
1575
1575
1576 def close(self):
1576 def close(self):
1577 self._writecaches()
1577 self._writecaches()
1578
1578
1579 def _writecaches(self):
1579 def _writecaches(self):
1580 if self._revbranchcache:
1580 if self._revbranchcache:
1581 self._revbranchcache.write()
1581 self._revbranchcache.write()
1582
1582
1583 def _restrictcapabilities(self, caps):
1583 def _restrictcapabilities(self, caps):
1584 if self.ui.configbool(b'experimental', b'bundle2-advertise'):
1584 if self.ui.configbool(b'experimental', b'bundle2-advertise'):
1585 caps = set(caps)
1585 caps = set(caps)
1586 capsblob = bundle2.encodecaps(
1586 capsblob = bundle2.encodecaps(
1587 bundle2.getrepocaps(self, role=b'client')
1587 bundle2.getrepocaps(self, role=b'client')
1588 )
1588 )
1589 caps.add(b'bundle2=' + urlreq.quote(capsblob))
1589 caps.add(b'bundle2=' + urlreq.quote(capsblob))
1590 if self.ui.configbool(b'experimental', b'narrow'):
1590 if self.ui.configbool(b'experimental', b'narrow'):
1591 caps.add(wireprototypes.NARROWCAP)
1591 caps.add(wireprototypes.NARROWCAP)
1592 return caps
1592 return caps
1593
1593
1594 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1594 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1595 # self -> auditor -> self._checknested -> self
1595 # self -> auditor -> self._checknested -> self
1596
1596
1597 @property
1597 @property
1598 def auditor(self):
1598 def auditor(self):
1599 # This is only used by context.workingctx.match in order to
1599 # This is only used by context.workingctx.match in order to
1600 # detect files in subrepos.
1600 # detect files in subrepos.
1601 return pathutil.pathauditor(self.root, callback=self._checknested)
1601 return pathutil.pathauditor(self.root, callback=self._checknested)
1602
1602
1603 @property
1603 @property
1604 def nofsauditor(self):
1604 def nofsauditor(self):
1605 # This is only used by context.basectx.match in order to detect
1605 # This is only used by context.basectx.match in order to detect
1606 # files in subrepos.
1606 # files in subrepos.
1607 return pathutil.pathauditor(
1607 return pathutil.pathauditor(
1608 self.root, callback=self._checknested, realfs=False, cached=True
1608 self.root, callback=self._checknested, realfs=False, cached=True
1609 )
1609 )
1610
1610
1611 def _checknested(self, path):
1611 def _checknested(self, path):
1612 """Determine if path is a legal nested repository."""
1612 """Determine if path is a legal nested repository."""
1613 if not path.startswith(self.root):
1613 if not path.startswith(self.root):
1614 return False
1614 return False
1615 subpath = path[len(self.root) + 1 :]
1615 subpath = path[len(self.root) + 1 :]
1616 normsubpath = util.pconvert(subpath)
1616 normsubpath = util.pconvert(subpath)
1617
1617
1618 # XXX: Checking against the current working copy is wrong in
1618 # XXX: Checking against the current working copy is wrong in
1619 # the sense that it can reject things like
1619 # the sense that it can reject things like
1620 #
1620 #
1621 # $ hg cat -r 10 sub/x.txt
1621 # $ hg cat -r 10 sub/x.txt
1622 #
1622 #
1623 # if sub/ is no longer a subrepository in the working copy
1623 # if sub/ is no longer a subrepository in the working copy
1624 # parent revision.
1624 # parent revision.
1625 #
1625 #
1626 # However, it can of course also allow things that would have
1626 # However, it can of course also allow things that would have
1627 # been rejected before, such as the above cat command if sub/
1627 # been rejected before, such as the above cat command if sub/
1628 # is a subrepository now, but was a normal directory before.
1628 # is a subrepository now, but was a normal directory before.
1629 # The old path auditor would have rejected by mistake since it
1629 # The old path auditor would have rejected by mistake since it
1630 # panics when it sees sub/.hg/.
1630 # panics when it sees sub/.hg/.
1631 #
1631 #
1632 # All in all, checking against the working copy seems sensible
1632 # All in all, checking against the working copy seems sensible
1633 # since we want to prevent access to nested repositories on
1633 # since we want to prevent access to nested repositories on
1634 # the filesystem *now*.
1634 # the filesystem *now*.
1635 ctx = self[None]
1635 ctx = self[None]
1636 parts = util.splitpath(subpath)
1636 parts = util.splitpath(subpath)
1637 while parts:
1637 while parts:
1638 prefix = b'/'.join(parts)
1638 prefix = b'/'.join(parts)
1639 if prefix in ctx.substate:
1639 if prefix in ctx.substate:
1640 if prefix == normsubpath:
1640 if prefix == normsubpath:
1641 return True
1641 return True
1642 else:
1642 else:
1643 sub = ctx.sub(prefix)
1643 sub = ctx.sub(prefix)
1644 return sub.checknested(subpath[len(prefix) + 1 :])
1644 return sub.checknested(subpath[len(prefix) + 1 :])
1645 else:
1645 else:
1646 parts.pop()
1646 parts.pop()
1647 return False
1647 return False
1648
1648
1649 def peer(self, path=None):
1649 def peer(self, path=None):
1650 return localpeer(self, path=path) # not cached to avoid reference cycle
1650 return localpeer(self, path=path) # not cached to avoid reference cycle
1651
1651
1652 def unfiltered(self):
1652 def unfiltered(self):
1653 """Return unfiltered version of the repository
1653 """Return unfiltered version of the repository
1654
1654
1655 Intended to be overwritten by filtered repo."""
1655 Intended to be overwritten by filtered repo."""
1656 return self
1656 return self
1657
1657
1658 def filtered(self, name, visibilityexceptions=None):
1658 def filtered(self, name, visibilityexceptions=None):
1659 """Return a filtered version of a repository
1659 """Return a filtered version of a repository
1660
1660
1661 The `name` parameter is the identifier of the requested view. This
1661 The `name` parameter is the identifier of the requested view. This
1662 will return a repoview object set "exactly" to the specified view.
1662 will return a repoview object set "exactly" to the specified view.
1663
1663
1664 This function does not apply recursive filtering to a repository. For
1664 This function does not apply recursive filtering to a repository. For
1665 example calling `repo.filtered("served")` will return a repoview using
1665 example calling `repo.filtered("served")` will return a repoview using
1666 the "served" view, regardless of the initial view used by `repo`.
1666 the "served" view, regardless of the initial view used by `repo`.
1667
1667
1668 In other word, there is always only one level of `repoview` "filtering".
1668 In other word, there is always only one level of `repoview` "filtering".
1669 """
1669 """
1670 if self._extrafilterid is not None and b'%' not in name:
1670 if self._extrafilterid is not None and b'%' not in name:
1671 name = name + b'%' + self._extrafilterid
1671 name = name + b'%' + self._extrafilterid
1672
1672
1673 cls = repoview.newtype(self.unfiltered().__class__)
1673 cls = repoview.newtype(self.unfiltered().__class__)
1674 return cls(self, name, visibilityexceptions)
1674 return cls(self, name, visibilityexceptions)
1675
1675
1676 @mixedrepostorecache(
1676 @mixedrepostorecache(
1677 (b'bookmarks', b'plain'),
1677 (b'bookmarks', b'plain'),
1678 (b'bookmarks.current', b'plain'),
1678 (b'bookmarks.current', b'plain'),
1679 (b'bookmarks', b''),
1679 (b'bookmarks', b''),
1680 (b'00changelog.i', b''),
1680 (b'00changelog.i', b''),
1681 )
1681 )
1682 def _bookmarks(self):
1682 def _bookmarks(self):
1683 # Since the multiple files involved in the transaction cannot be
1683 # Since the multiple files involved in the transaction cannot be
1684 # written atomically (with current repository format), there is a race
1684 # written atomically (with current repository format), there is a race
1685 # condition here.
1685 # condition here.
1686 #
1686 #
1687 # 1) changelog content A is read
1687 # 1) changelog content A is read
1688 # 2) outside transaction update changelog to content B
1688 # 2) outside transaction update changelog to content B
1689 # 3) outside transaction update bookmark file referring to content B
1689 # 3) outside transaction update bookmark file referring to content B
1690 # 4) bookmarks file content is read and filtered against changelog-A
1690 # 4) bookmarks file content is read and filtered against changelog-A
1691 #
1691 #
1692 # When this happens, bookmarks against nodes missing from A are dropped.
1692 # When this happens, bookmarks against nodes missing from A are dropped.
1693 #
1693 #
1694 # Having this happening during read is not great, but it become worse
1694 # Having this happening during read is not great, but it become worse
1695 # when this happen during write because the bookmarks to the "unknown"
1695 # when this happen during write because the bookmarks to the "unknown"
1696 # nodes will be dropped for good. However, writes happen within locks.
1696 # nodes will be dropped for good. However, writes happen within locks.
1697 # This locking makes it possible to have a race free consistent read.
1697 # This locking makes it possible to have a race free consistent read.
1698 # For this purpose data read from disc before locking are
1698 # For this purpose data read from disc before locking are
1699 # "invalidated" right after the locks are taken. This invalidations are
1699 # "invalidated" right after the locks are taken. This invalidations are
1700 # "light", the `filecache` mechanism keep the data in memory and will
1700 # "light", the `filecache` mechanism keep the data in memory and will
1701 # reuse them if the underlying files did not changed. Not parsing the
1701 # reuse them if the underlying files did not changed. Not parsing the
1702 # same data multiple times helps performances.
1702 # same data multiple times helps performances.
1703 #
1703 #
1704 # Unfortunately in the case describe above, the files tracked by the
1704 # Unfortunately in the case describe above, the files tracked by the
1705 # bookmarks file cache might not have changed, but the in-memory
1705 # bookmarks file cache might not have changed, but the in-memory
1706 # content is still "wrong" because we used an older changelog content
1706 # content is still "wrong" because we used an older changelog content
1707 # to process the on-disk data. So after locking, the changelog would be
1707 # to process the on-disk data. So after locking, the changelog would be
1708 # refreshed but `_bookmarks` would be preserved.
1708 # refreshed but `_bookmarks` would be preserved.
1709 # Adding `00changelog.i` to the list of tracked file is not
1709 # Adding `00changelog.i` to the list of tracked file is not
1710 # enough, because at the time we build the content for `_bookmarks` in
1710 # enough, because at the time we build the content for `_bookmarks` in
1711 # (4), the changelog file has already diverged from the content used
1711 # (4), the changelog file has already diverged from the content used
1712 # for loading `changelog` in (1)
1712 # for loading `changelog` in (1)
1713 #
1713 #
1714 # To prevent the issue, we force the changelog to be explicitly
1714 # To prevent the issue, we force the changelog to be explicitly
1715 # reloaded while computing `_bookmarks`. The data race can still happen
1715 # reloaded while computing `_bookmarks`. The data race can still happen
1716 # without the lock (with a narrower window), but it would no longer go
1716 # without the lock (with a narrower window), but it would no longer go
1717 # undetected during the lock time refresh.
1717 # undetected during the lock time refresh.
1718 #
1718 #
1719 # The new schedule is as follow
1719 # The new schedule is as follow
1720 #
1720 #
1721 # 1) filecache logic detect that `_bookmarks` needs to be computed
1721 # 1) filecache logic detect that `_bookmarks` needs to be computed
1722 # 2) cachestat for `bookmarks` and `changelog` are captured (for book)
1722 # 2) cachestat for `bookmarks` and `changelog` are captured (for book)
1723 # 3) We force `changelog` filecache to be tested
1723 # 3) We force `changelog` filecache to be tested
1724 # 4) cachestat for `changelog` are captured (for changelog)
1724 # 4) cachestat for `changelog` are captured (for changelog)
1725 # 5) `_bookmarks` is computed and cached
1725 # 5) `_bookmarks` is computed and cached
1726 #
1726 #
1727 # The step in (3) ensure we have a changelog at least as recent as the
1727 # The step in (3) ensure we have a changelog at least as recent as the
1728 # cache stat computed in (1). As a result at locking time:
1728 # cache stat computed in (1). As a result at locking time:
1729 # * if the changelog did not changed since (1) -> we can reuse the data
1729 # * if the changelog did not changed since (1) -> we can reuse the data
1730 # * otherwise -> the bookmarks get refreshed.
1730 # * otherwise -> the bookmarks get refreshed.
1731 self._refreshchangelog()
1731 self._refreshchangelog()
1732 return bookmarks.bmstore(self)
1732 return bookmarks.bmstore(self)
1733
1733
1734 def _refreshchangelog(self):
1734 def _refreshchangelog(self):
1735 """make sure the in memory changelog match the on-disk one"""
1735 """make sure the in memory changelog match the on-disk one"""
1736 if 'changelog' in vars(self) and self.currenttransaction() is None:
1736 if 'changelog' in vars(self) and self.currenttransaction() is None:
1737 del self.changelog
1737 del self.changelog
1738
1738
1739 @property
1739 @property
1740 def _activebookmark(self):
1740 def _activebookmark(self):
1741 return self._bookmarks.active
1741 return self._bookmarks.active
1742
1742
1743 # _phasesets depend on changelog. what we need is to call
1743 # _phasesets depend on changelog. what we need is to call
1744 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1744 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1745 # can't be easily expressed in filecache mechanism.
1745 # can't be easily expressed in filecache mechanism.
1746 @storecache(b'phaseroots', b'00changelog.i')
1746 @storecache(b'phaseroots', b'00changelog.i')
1747 def _phasecache(self):
1747 def _phasecache(self):
1748 return phases.phasecache(self, self._phasedefaults)
1748 return phases.phasecache(self, self._phasedefaults)
1749
1749
1750 @storecache(b'obsstore')
1750 @storecache(b'obsstore')
1751 def obsstore(self):
1751 def obsstore(self):
1752 return obsolete.makestore(self.ui, self)
1752 return obsolete.makestore(self.ui, self)
1753
1753
1754 @changelogcache()
1754 @changelogcache()
1755 def changelog(repo):
1755 def changelog(repo):
1756 # load dirstate before changelog to avoid race see issue6303
1756 # load dirstate before changelog to avoid race see issue6303
1757 repo.dirstate.prefetch_parents()
1757 repo.dirstate.prefetch_parents()
1758 return repo.store.changelog(
1758 return repo.store.changelog(
1759 txnutil.mayhavepending(repo.root),
1759 txnutil.mayhavepending(repo.root),
1760 concurrencychecker=revlogchecker.get_checker(repo.ui, b'changelog'),
1760 concurrencychecker=revlogchecker.get_checker(repo.ui, b'changelog'),
1761 )
1761 )
1762
1762
1763 @manifestlogcache()
1763 @manifestlogcache()
1764 def manifestlog(self):
1764 def manifestlog(self):
1765 return self.store.manifestlog(self, self._storenarrowmatch)
1765 return self.store.manifestlog(self, self._storenarrowmatch)
1766
1766
1767 @unfilteredpropertycache
1767 @unfilteredpropertycache
1768 def dirstate(self):
1768 def dirstate(self):
1769 if self._dirstate is None:
1769 if self._dirstate is None:
1770 self._dirstate = self._makedirstate()
1770 self._dirstate = self._makedirstate()
1771 else:
1771 else:
1772 self._dirstate.refresh()
1772 self._dirstate.refresh()
1773 return self._dirstate
1773 return self._dirstate
1774
1774
1775 def _makedirstate(self):
1775 def _makedirstate(self):
1776 """Extension point for wrapping the dirstate per-repo."""
1776 """Extension point for wrapping the dirstate per-repo."""
1777 sparsematchfn = None
1777 sparsematchfn = None
1778 if sparse.use_sparse(self):
1778 if sparse.use_sparse(self):
1779 sparsematchfn = lambda: sparse.matcher(self)
1779 sparsematchfn = lambda: sparse.matcher(self)
1780 v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
1780 v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
1781 th = requirementsmod.DIRSTATE_TRACKED_HINT_V1
1781 th = requirementsmod.DIRSTATE_TRACKED_HINT_V1
1782 use_dirstate_v2 = v2_req in self.requirements
1782 use_dirstate_v2 = v2_req in self.requirements
1783 use_tracked_hint = th in self.requirements
1783 use_tracked_hint = th in self.requirements
1784
1784
1785 return dirstate.dirstate(
1785 return dirstate.dirstate(
1786 self.vfs,
1786 self.vfs,
1787 self.ui,
1787 self.ui,
1788 self.root,
1788 self.root,
1789 self._dirstatevalidate,
1789 self._dirstatevalidate,
1790 sparsematchfn,
1790 sparsematchfn,
1791 self.nodeconstants,
1791 self.nodeconstants,
1792 use_dirstate_v2,
1792 use_dirstate_v2,
1793 use_tracked_hint=use_tracked_hint,
1793 use_tracked_hint=use_tracked_hint,
1794 )
1794 )
1795
1795
1796 def _dirstatevalidate(self, node):
1796 def _dirstatevalidate(self, node):
1797 try:
1797 try:
1798 self.changelog.rev(node)
1798 self.changelog.rev(node)
1799 return node
1799 return node
1800 except error.LookupError:
1800 except error.LookupError:
1801 if not self._dirstatevalidatewarned:
1801 if not self._dirstatevalidatewarned:
1802 self._dirstatevalidatewarned = True
1802 self._dirstatevalidatewarned = True
1803 self.ui.warn(
1803 self.ui.warn(
1804 _(b"warning: ignoring unknown working parent %s!\n")
1804 _(b"warning: ignoring unknown working parent %s!\n")
1805 % short(node)
1805 % short(node)
1806 )
1806 )
1807 return self.nullid
1807 return self.nullid
1808
1808
1809 @storecache(narrowspec.FILENAME)
1809 @storecache(narrowspec.FILENAME)
1810 def narrowpats(self):
1810 def narrowpats(self):
1811 """matcher patterns for this repository's narrowspec
1811 """matcher patterns for this repository's narrowspec
1812
1812
1813 A tuple of (includes, excludes).
1813 A tuple of (includes, excludes).
1814 """
1814 """
1815 # the narrow management should probably move into its own object
1815 # the narrow management should probably move into its own object
1816 val = self._pending_narrow_pats
1816 val = self._pending_narrow_pats
1817 if val is None:
1817 if val is None:
1818 val = narrowspec.load(self)
1818 val = narrowspec.load(self)
1819 return val
1819 return val
1820
1820
1821 @storecache(narrowspec.FILENAME)
1821 @storecache(narrowspec.FILENAME)
1822 def _storenarrowmatch(self):
1822 def _storenarrowmatch(self):
1823 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1823 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1824 return matchmod.always()
1824 return matchmod.always()
1825 include, exclude = self.narrowpats
1825 include, exclude = self.narrowpats
1826 return narrowspec.match(self.root, include=include, exclude=exclude)
1826 return narrowspec.match(self.root, include=include, exclude=exclude)
1827
1827
1828 @storecache(narrowspec.FILENAME)
1828 @storecache(narrowspec.FILENAME)
1829 def _narrowmatch(self):
1829 def _narrowmatch(self):
1830 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1830 if requirementsmod.NARROW_REQUIREMENT not in self.requirements:
1831 return matchmod.always()
1831 return matchmod.always()
1832 narrowspec.checkworkingcopynarrowspec(self)
1832 narrowspec.checkworkingcopynarrowspec(self)
1833 include, exclude = self.narrowpats
1833 include, exclude = self.narrowpats
1834 return narrowspec.match(self.root, include=include, exclude=exclude)
1834 return narrowspec.match(self.root, include=include, exclude=exclude)
1835
1835
1836 def narrowmatch(self, match=None, includeexact=False):
1836 def narrowmatch(self, match=None, includeexact=False):
1837 """matcher corresponding the the repo's narrowspec
1837 """matcher corresponding the the repo's narrowspec
1838
1838
1839 If `match` is given, then that will be intersected with the narrow
1839 If `match` is given, then that will be intersected with the narrow
1840 matcher.
1840 matcher.
1841
1841
1842 If `includeexact` is True, then any exact matches from `match` will
1842 If `includeexact` is True, then any exact matches from `match` will
1843 be included even if they're outside the narrowspec.
1843 be included even if they're outside the narrowspec.
1844 """
1844 """
1845 if match:
1845 if match:
1846 if includeexact and not self._narrowmatch.always():
1846 if includeexact and not self._narrowmatch.always():
1847 # do not exclude explicitly-specified paths so that they can
1847 # do not exclude explicitly-specified paths so that they can
1848 # be warned later on
1848 # be warned later on
1849 em = matchmod.exact(match.files())
1849 em = matchmod.exact(match.files())
1850 nm = matchmod.unionmatcher([self._narrowmatch, em])
1850 nm = matchmod.unionmatcher([self._narrowmatch, em])
1851 return matchmod.intersectmatchers(match, nm)
1851 return matchmod.intersectmatchers(match, nm)
1852 return matchmod.intersectmatchers(match, self._narrowmatch)
1852 return matchmod.intersectmatchers(match, self._narrowmatch)
1853 return self._narrowmatch
1853 return self._narrowmatch
1854
1854
1855 def setnarrowpats(self, newincludes, newexcludes):
1855 def setnarrowpats(self, newincludes, newexcludes):
1856 narrowspec.save(self, newincludes, newexcludes)
1856 narrowspec.save(self, newincludes, newexcludes)
1857 self.invalidate(clearfilecache=True)
1857 self.invalidate(clearfilecache=True)
1858
1858
1859 @unfilteredpropertycache
1859 @unfilteredpropertycache
1860 def _quick_access_changeid_null(self):
1860 def _quick_access_changeid_null(self):
1861 return {
1861 return {
1862 b'null': (nullrev, self.nodeconstants.nullid),
1862 b'null': (nullrev, self.nodeconstants.nullid),
1863 nullrev: (nullrev, self.nodeconstants.nullid),
1863 nullrev: (nullrev, self.nodeconstants.nullid),
1864 self.nullid: (nullrev, self.nullid),
1864 self.nullid: (nullrev, self.nullid),
1865 }
1865 }
1866
1866
1867 @unfilteredpropertycache
1867 @unfilteredpropertycache
1868 def _quick_access_changeid_wc(self):
1868 def _quick_access_changeid_wc(self):
1869 # also fast path access to the working copy parents
1869 # also fast path access to the working copy parents
1870 # however, only do it for filter that ensure wc is visible.
1870 # however, only do it for filter that ensure wc is visible.
1871 quick = self._quick_access_changeid_null.copy()
1871 quick = self._quick_access_changeid_null.copy()
1872 cl = self.unfiltered().changelog
1872 cl = self.unfiltered().changelog
1873 for node in self.dirstate.parents():
1873 for node in self.dirstate.parents():
1874 if node == self.nullid:
1874 if node == self.nullid:
1875 continue
1875 continue
1876 rev = cl.index.get_rev(node)
1876 rev = cl.index.get_rev(node)
1877 if rev is None:
1877 if rev is None:
1878 # unknown working copy parent case:
1878 # unknown working copy parent case:
1879 #
1879 #
1880 # skip the fast path and let higher code deal with it
1880 # skip the fast path and let higher code deal with it
1881 continue
1881 continue
1882 pair = (rev, node)
1882 pair = (rev, node)
1883 quick[rev] = pair
1883 quick[rev] = pair
1884 quick[node] = pair
1884 quick[node] = pair
1885 # also add the parents of the parents
1885 # also add the parents of the parents
1886 for r in cl.parentrevs(rev):
1886 for r in cl.parentrevs(rev):
1887 if r == nullrev:
1887 if r == nullrev:
1888 continue
1888 continue
1889 n = cl.node(r)
1889 n = cl.node(r)
1890 pair = (r, n)
1890 pair = (r, n)
1891 quick[r] = pair
1891 quick[r] = pair
1892 quick[n] = pair
1892 quick[n] = pair
1893 p1node = self.dirstate.p1()
1893 p1node = self.dirstate.p1()
1894 if p1node != self.nullid:
1894 if p1node != self.nullid:
1895 quick[b'.'] = quick[p1node]
1895 quick[b'.'] = quick[p1node]
1896 return quick
1896 return quick
1897
1897
1898 @unfilteredmethod
1898 @unfilteredmethod
1899 def _quick_access_changeid_invalidate(self):
1899 def _quick_access_changeid_invalidate(self):
1900 if '_quick_access_changeid_wc' in vars(self):
1900 if '_quick_access_changeid_wc' in vars(self):
1901 del self.__dict__['_quick_access_changeid_wc']
1901 del self.__dict__['_quick_access_changeid_wc']
1902
1902
1903 @property
1903 @property
1904 def _quick_access_changeid(self):
1904 def _quick_access_changeid(self):
1905 """an helper dictionnary for __getitem__ calls
1905 """an helper dictionnary for __getitem__ calls
1906
1906
1907 This contains a list of symbol we can recognise right away without
1907 This contains a list of symbol we can recognise right away without
1908 further processing.
1908 further processing.
1909 """
1909 """
1910 if self.filtername in repoview.filter_has_wc:
1910 if self.filtername in repoview.filter_has_wc:
1911 return self._quick_access_changeid_wc
1911 return self._quick_access_changeid_wc
1912 return self._quick_access_changeid_null
1912 return self._quick_access_changeid_null
1913
1913
1914 def __getitem__(self, changeid):
1914 def __getitem__(self, changeid):
1915 # dealing with special cases
1915 # dealing with special cases
1916 if changeid is None:
1916 if changeid is None:
1917 return context.workingctx(self)
1917 return context.workingctx(self)
1918 if isinstance(changeid, context.basectx):
1918 if isinstance(changeid, context.basectx):
1919 return changeid
1919 return changeid
1920
1920
1921 # dealing with multiple revisions
1921 # dealing with multiple revisions
1922 if isinstance(changeid, slice):
1922 if isinstance(changeid, slice):
1923 # wdirrev isn't contiguous so the slice shouldn't include it
1923 # wdirrev isn't contiguous so the slice shouldn't include it
1924 return [
1924 return [
1925 self[i]
1925 self[i]
1926 for i in range(*changeid.indices(len(self)))
1926 for i in range(*changeid.indices(len(self)))
1927 if i not in self.changelog.filteredrevs
1927 if i not in self.changelog.filteredrevs
1928 ]
1928 ]
1929
1929
1930 # dealing with some special values
1930 # dealing with some special values
1931 quick_access = self._quick_access_changeid.get(changeid)
1931 quick_access = self._quick_access_changeid.get(changeid)
1932 if quick_access is not None:
1932 if quick_access is not None:
1933 rev, node = quick_access
1933 rev, node = quick_access
1934 return context.changectx(self, rev, node, maybe_filtered=False)
1934 return context.changectx(self, rev, node, maybe_filtered=False)
1935 if changeid == b'tip':
1935 if changeid == b'tip':
1936 node = self.changelog.tip()
1936 node = self.changelog.tip()
1937 rev = self.changelog.rev(node)
1937 rev = self.changelog.rev(node)
1938 return context.changectx(self, rev, node)
1938 return context.changectx(self, rev, node)
1939
1939
1940 # dealing with arbitrary values
1940 # dealing with arbitrary values
1941 try:
1941 try:
1942 if isinstance(changeid, int):
1942 if isinstance(changeid, int):
1943 node = self.changelog.node(changeid)
1943 node = self.changelog.node(changeid)
1944 rev = changeid
1944 rev = changeid
1945 elif changeid == b'.':
1945 elif changeid == b'.':
1946 # this is a hack to delay/avoid loading obsmarkers
1946 # this is a hack to delay/avoid loading obsmarkers
1947 # when we know that '.' won't be hidden
1947 # when we know that '.' won't be hidden
1948 node = self.dirstate.p1()
1948 node = self.dirstate.p1()
1949 rev = self.unfiltered().changelog.rev(node)
1949 rev = self.unfiltered().changelog.rev(node)
1950 elif len(changeid) == self.nodeconstants.nodelen:
1950 elif len(changeid) == self.nodeconstants.nodelen:
1951 try:
1951 try:
1952 node = changeid
1952 node = changeid
1953 rev = self.changelog.rev(changeid)
1953 rev = self.changelog.rev(changeid)
1954 except error.FilteredLookupError:
1954 except error.FilteredLookupError:
1955 changeid = hex(changeid) # for the error message
1955 changeid = hex(changeid) # for the error message
1956 raise
1956 raise
1957 except LookupError:
1957 except LookupError:
1958 # check if it might have come from damaged dirstate
1958 # check if it might have come from damaged dirstate
1959 #
1959 #
1960 # XXX we could avoid the unfiltered if we had a recognizable
1960 # XXX we could avoid the unfiltered if we had a recognizable
1961 # exception for filtered changeset access
1961 # exception for filtered changeset access
1962 if (
1962 if (
1963 self.local()
1963 self.local()
1964 and changeid in self.unfiltered().dirstate.parents()
1964 and changeid in self.unfiltered().dirstate.parents()
1965 ):
1965 ):
1966 msg = _(b"working directory has unknown parent '%s'!")
1966 msg = _(b"working directory has unknown parent '%s'!")
1967 raise error.Abort(msg % short(changeid))
1967 raise error.Abort(msg % short(changeid))
1968 changeid = hex(changeid) # for the error message
1968 changeid = hex(changeid) # for the error message
1969 raise
1969 raise
1970
1970
1971 elif len(changeid) == 2 * self.nodeconstants.nodelen:
1971 elif len(changeid) == 2 * self.nodeconstants.nodelen:
1972 node = bin(changeid)
1972 node = bin(changeid)
1973 rev = self.changelog.rev(node)
1973 rev = self.changelog.rev(node)
1974 else:
1974 else:
1975 raise error.ProgrammingError(
1975 raise error.ProgrammingError(
1976 b"unsupported changeid '%s' of type %s"
1976 b"unsupported changeid '%s' of type %s"
1977 % (changeid, pycompat.bytestr(type(changeid)))
1977 % (changeid, pycompat.bytestr(type(changeid)))
1978 )
1978 )
1979
1979
1980 return context.changectx(self, rev, node)
1980 return context.changectx(self, rev, node)
1981
1981
1982 except (error.FilteredIndexError, error.FilteredLookupError):
1982 except (error.FilteredIndexError, error.FilteredLookupError):
1983 raise error.FilteredRepoLookupError(
1983 raise error.FilteredRepoLookupError(
1984 _(b"filtered revision '%s'") % pycompat.bytestr(changeid)
1984 _(b"filtered revision '%s'") % pycompat.bytestr(changeid)
1985 )
1985 )
1986 except (IndexError, LookupError):
1986 except (IndexError, LookupError):
1987 raise error.RepoLookupError(
1987 raise error.RepoLookupError(
1988 _(b"unknown revision '%s'") % pycompat.bytestr(changeid)
1988 _(b"unknown revision '%s'") % pycompat.bytestr(changeid)
1989 )
1989 )
1990 except error.WdirUnsupported:
1990 except error.WdirUnsupported:
1991 return context.workingctx(self)
1991 return context.workingctx(self)
1992
1992
1993 def __contains__(self, changeid):
1993 def __contains__(self, changeid):
1994 """True if the given changeid exists"""
1994 """True if the given changeid exists"""
1995 try:
1995 try:
1996 self[changeid]
1996 self[changeid]
1997 return True
1997 return True
1998 except error.RepoLookupError:
1998 except error.RepoLookupError:
1999 return False
1999 return False
2000
2000
2001 def __nonzero__(self):
2001 def __nonzero__(self):
2002 return True
2002 return True
2003
2003
2004 __bool__ = __nonzero__
2004 __bool__ = __nonzero__
2005
2005
2006 def __len__(self):
2006 def __len__(self):
2007 # no need to pay the cost of repoview.changelog
2007 # no need to pay the cost of repoview.changelog
2008 unfi = self.unfiltered()
2008 unfi = self.unfiltered()
2009 return len(unfi.changelog)
2009 return len(unfi.changelog)
2010
2010
2011 def __iter__(self):
2011 def __iter__(self):
2012 return iter(self.changelog)
2012 return iter(self.changelog)
2013
2013
2014 def revs(self, expr: bytes, *args):
2014 def revs(self, expr: bytes, *args):
2015 """Find revisions matching a revset.
2015 """Find revisions matching a revset.
2016
2016
2017 The revset is specified as a string ``expr`` that may contain
2017 The revset is specified as a string ``expr`` that may contain
2018 %-formatting to escape certain types. See ``revsetlang.formatspec``.
2018 %-formatting to escape certain types. See ``revsetlang.formatspec``.
2019
2019
2020 Revset aliases from the configuration are not expanded. To expand
2020 Revset aliases from the configuration are not expanded. To expand
2021 user aliases, consider calling ``scmutil.revrange()`` or
2021 user aliases, consider calling ``scmutil.revrange()`` or
2022 ``repo.anyrevs([expr], user=True)``.
2022 ``repo.anyrevs([expr], user=True)``.
2023
2023
2024 Returns a smartset.abstractsmartset, which is a list-like interface
2024 Returns a smartset.abstractsmartset, which is a list-like interface
2025 that contains integer revisions.
2025 that contains integer revisions.
2026 """
2026 """
2027 tree = revsetlang.spectree(expr, *args)
2027 tree = revsetlang.spectree(expr, *args)
2028 return revset.makematcher(tree)(self)
2028 return revset.makematcher(tree)(self)
2029
2029
2030 def set(self, expr: bytes, *args):
2030 def set(self, expr: bytes, *args):
2031 """Find revisions matching a revset and emit changectx instances.
2031 """Find revisions matching a revset and emit changectx instances.
2032
2032
2033 This is a convenience wrapper around ``revs()`` that iterates the
2033 This is a convenience wrapper around ``revs()`` that iterates the
2034 result and is a generator of changectx instances.
2034 result and is a generator of changectx instances.
2035
2035
2036 Revset aliases from the configuration are not expanded. To expand
2036 Revset aliases from the configuration are not expanded. To expand
2037 user aliases, consider calling ``scmutil.revrange()``.
2037 user aliases, consider calling ``scmutil.revrange()``.
2038 """
2038 """
2039 for r in self.revs(expr, *args):
2039 for r in self.revs(expr, *args):
2040 yield self[r]
2040 yield self[r]
2041
2041
2042 def anyrevs(self, specs: bytes, user=False, localalias=None):
2042 def anyrevs(self, specs: bytes, user=False, localalias=None):
2043 """Find revisions matching one of the given revsets.
2043 """Find revisions matching one of the given revsets.
2044
2044
2045 Revset aliases from the configuration are not expanded by default. To
2045 Revset aliases from the configuration are not expanded by default. To
2046 expand user aliases, specify ``user=True``. To provide some local
2046 expand user aliases, specify ``user=True``. To provide some local
2047 definitions overriding user aliases, set ``localalias`` to
2047 definitions overriding user aliases, set ``localalias`` to
2048 ``{name: definitionstring}``.
2048 ``{name: definitionstring}``.
2049 """
2049 """
2050 if specs == [b'null']:
2050 if specs == [b'null']:
2051 return revset.baseset([nullrev])
2051 return revset.baseset([nullrev])
2052 if specs == [b'.']:
2052 if specs == [b'.']:
2053 quick_data = self._quick_access_changeid.get(b'.')
2053 quick_data = self._quick_access_changeid.get(b'.')
2054 if quick_data is not None:
2054 if quick_data is not None:
2055 return revset.baseset([quick_data[0]])
2055 return revset.baseset([quick_data[0]])
2056 if user:
2056 if user:
2057 m = revset.matchany(
2057 m = revset.matchany(
2058 self.ui,
2058 self.ui,
2059 specs,
2059 specs,
2060 lookup=revset.lookupfn(self),
2060 lookup=revset.lookupfn(self),
2061 localalias=localalias,
2061 localalias=localalias,
2062 )
2062 )
2063 else:
2063 else:
2064 m = revset.matchany(None, specs, localalias=localalias)
2064 m = revset.matchany(None, specs, localalias=localalias)
2065 return m(self)
2065 return m(self)
2066
2066
2067 def url(self) -> bytes:
2067 def url(self) -> bytes:
2068 return b'file:' + self.root
2068 return b'file:' + self.root
2069
2069
2070 def hook(self, name, throw=False, **args):
2070 def hook(self, name, throw=False, **args):
2071 """Call a hook, passing this repo instance.
2071 """Call a hook, passing this repo instance.
2072
2072
2073 This a convenience method to aid invoking hooks. Extensions likely
2073 This a convenience method to aid invoking hooks. Extensions likely
2074 won't call this unless they have registered a custom hook or are
2074 won't call this unless they have registered a custom hook or are
2075 replacing code that is expected to call a hook.
2075 replacing code that is expected to call a hook.
2076 """
2076 """
2077 return hook.hook(self.ui, self, name, throw, **args)
2077 return hook.hook(self.ui, self, name, throw, **args)
2078
2078
2079 @filteredpropertycache
2079 @filteredpropertycache
2080 def _tagscache(self):
2080 def _tagscache(self):
2081 """Returns a tagscache object that contains various tags related
2081 """Returns a tagscache object that contains various tags related
2082 caches."""
2082 caches."""
2083
2083
2084 # This simplifies its cache management by having one decorated
2084 # This simplifies its cache management by having one decorated
2085 # function (this one) and the rest simply fetch things from it.
2085 # function (this one) and the rest simply fetch things from it.
2086 class tagscache:
2086 class tagscache:
2087 def __init__(self):
2087 def __init__(self):
2088 # These two define the set of tags for this repository. tags
2088 # These two define the set of tags for this repository. tags
2089 # maps tag name to node; tagtypes maps tag name to 'global' or
2089 # maps tag name to node; tagtypes maps tag name to 'global' or
2090 # 'local'. (Global tags are defined by .hgtags across all
2090 # 'local'. (Global tags are defined by .hgtags across all
2091 # heads, and local tags are defined in .hg/localtags.)
2091 # heads, and local tags are defined in .hg/localtags.)
2092 # They constitute the in-memory cache of tags.
2092 # They constitute the in-memory cache of tags.
2093 self.tags = self.tagtypes = None
2093 self.tags = self.tagtypes = None
2094
2094
2095 self.nodetagscache = self.tagslist = None
2095 self.nodetagscache = self.tagslist = None
2096
2096
2097 cache = tagscache()
2097 cache = tagscache()
2098 cache.tags, cache.tagtypes = self._findtags()
2098 cache.tags, cache.tagtypes = self._findtags()
2099
2099
2100 return cache
2100 return cache
2101
2101
2102 def tags(self):
2102 def tags(self):
2103 '''return a mapping of tag to node'''
2103 '''return a mapping of tag to node'''
2104 t = {}
2104 t = {}
2105 if self.changelog.filteredrevs:
2105 if self.changelog.filteredrevs:
2106 tags, tt = self._findtags()
2106 tags, tt = self._findtags()
2107 else:
2107 else:
2108 tags = self._tagscache.tags
2108 tags = self._tagscache.tags
2109 rev = self.changelog.rev
2109 rev = self.changelog.rev
2110 for k, v in tags.items():
2110 for k, v in tags.items():
2111 try:
2111 try:
2112 # ignore tags to unknown nodes
2112 # ignore tags to unknown nodes
2113 rev(v)
2113 rev(v)
2114 t[k] = v
2114 t[k] = v
2115 except (error.LookupError, ValueError):
2115 except (error.LookupError, ValueError):
2116 pass
2116 pass
2117 return t
2117 return t
2118
2118
2119 def _findtags(self):
2119 def _findtags(self):
2120 """Do the hard work of finding tags. Return a pair of dicts
2120 """Do the hard work of finding tags. Return a pair of dicts
2121 (tags, tagtypes) where tags maps tag name to node, and tagtypes
2121 (tags, tagtypes) where tags maps tag name to node, and tagtypes
2122 maps tag name to a string like \'global\' or \'local\'.
2122 maps tag name to a string like \'global\' or \'local\'.
2123 Subclasses or extensions are free to add their own tags, but
2123 Subclasses or extensions are free to add their own tags, but
2124 should be aware that the returned dicts will be retained for the
2124 should be aware that the returned dicts will be retained for the
2125 duration of the localrepo object."""
2125 duration of the localrepo object."""
2126
2126
2127 # XXX what tagtype should subclasses/extensions use? Currently
2127 # XXX what tagtype should subclasses/extensions use? Currently
2128 # mq and bookmarks add tags, but do not set the tagtype at all.
2128 # mq and bookmarks add tags, but do not set the tagtype at all.
2129 # Should each extension invent its own tag type? Should there
2129 # Should each extension invent its own tag type? Should there
2130 # be one tagtype for all such "virtual" tags? Or is the status
2130 # be one tagtype for all such "virtual" tags? Or is the status
2131 # quo fine?
2131 # quo fine?
2132
2132
2133 # map tag name to (node, hist)
2133 # map tag name to (node, hist)
2134 alltags = tagsmod.findglobaltags(self.ui, self)
2134 alltags = tagsmod.findglobaltags(self.ui, self)
2135 # map tag name to tag type
2135 # map tag name to tag type
2136 tagtypes = {tag: b'global' for tag in alltags}
2136 tagtypes = {tag: b'global' for tag in alltags}
2137
2137
2138 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
2138 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
2139
2139
2140 # Build the return dicts. Have to re-encode tag names because
2140 # Build the return dicts. Have to re-encode tag names because
2141 # the tags module always uses UTF-8 (in order not to lose info
2141 # the tags module always uses UTF-8 (in order not to lose info
2142 # writing to the cache), but the rest of Mercurial wants them in
2142 # writing to the cache), but the rest of Mercurial wants them in
2143 # local encoding.
2143 # local encoding.
2144 tags = {}
2144 tags = {}
2145 for name, (node, hist) in alltags.items():
2145 for name, (node, hist) in alltags.items():
2146 if node != self.nullid:
2146 if node != self.nullid:
2147 tags[encoding.tolocal(name)] = node
2147 tags[encoding.tolocal(name)] = node
2148 tags[b'tip'] = self.changelog.tip()
2148 tags[b'tip'] = self.changelog.tip()
2149 tagtypes = {
2149 tagtypes = {
2150 encoding.tolocal(name): value for (name, value) in tagtypes.items()
2150 encoding.tolocal(name): value for (name, value) in tagtypes.items()
2151 }
2151 }
2152 return (tags, tagtypes)
2152 return (tags, tagtypes)
2153
2153
2154 def tagtype(self, tagname):
2154 def tagtype(self, tagname):
2155 """
2155 """
2156 return the type of the given tag. result can be:
2156 return the type of the given tag. result can be:
2157
2157
2158 'local' : a local tag
2158 'local' : a local tag
2159 'global' : a global tag
2159 'global' : a global tag
2160 None : tag does not exist
2160 None : tag does not exist
2161 """
2161 """
2162
2162
2163 return self._tagscache.tagtypes.get(tagname)
2163 return self._tagscache.tagtypes.get(tagname)
2164
2164
2165 def tagslist(self):
2165 def tagslist(self):
2166 '''return a list of tags ordered by revision'''
2166 '''return a list of tags ordered by revision'''
2167 if not self._tagscache.tagslist:
2167 if not self._tagscache.tagslist:
2168 l = []
2168 l = []
2169 for t, n in self.tags().items():
2169 for t, n in self.tags().items():
2170 l.append((self.changelog.rev(n), t, n))
2170 l.append((self.changelog.rev(n), t, n))
2171 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
2171 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
2172
2172
2173 return self._tagscache.tagslist
2173 return self._tagscache.tagslist
2174
2174
2175 def nodetags(self, node):
2175 def nodetags(self, node):
2176 '''return the tags associated with a node'''
2176 '''return the tags associated with a node'''
2177 if not self._tagscache.nodetagscache:
2177 if not self._tagscache.nodetagscache:
2178 nodetagscache = {}
2178 nodetagscache = {}
2179 for t, n in self._tagscache.tags.items():
2179 for t, n in self._tagscache.tags.items():
2180 nodetagscache.setdefault(n, []).append(t)
2180 nodetagscache.setdefault(n, []).append(t)
2181 for tags in nodetagscache.values():
2181 for tags in nodetagscache.values():
2182 tags.sort()
2182 tags.sort()
2183 self._tagscache.nodetagscache = nodetagscache
2183 self._tagscache.nodetagscache = nodetagscache
2184 return self._tagscache.nodetagscache.get(node, [])
2184 return self._tagscache.nodetagscache.get(node, [])
2185
2185
2186 def nodebookmarks(self, node):
2186 def nodebookmarks(self, node):
2187 """return the list of bookmarks pointing to the specified node"""
2187 """return the list of bookmarks pointing to the specified node"""
2188 return self._bookmarks.names(node)
2188 return self._bookmarks.names(node)
2189
2189
2190 def branchmap(self):
2190 def branchmap(self):
2191 """returns a dictionary {branch: [branchheads]} with branchheads
2191 """returns a dictionary {branch: [branchheads]} with branchheads
2192 ordered by increasing revision number"""
2192 ordered by increasing revision number"""
2193 return self._branchcaches[self]
2193 return self._branchcaches[self]
2194
2194
2195 @unfilteredmethod
2195 @unfilteredmethod
2196 def revbranchcache(self):
2196 def revbranchcache(self):
2197 if not self._revbranchcache:
2197 if not self._revbranchcache:
2198 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
2198 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
2199 return self._revbranchcache
2199 return self._revbranchcache
2200
2200
2201 def register_changeset(self, rev, changelogrevision):
2201 def register_changeset(self, rev, changelogrevision):
2202 self.revbranchcache().setdata(rev, changelogrevision)
2202 self.revbranchcache().setdata(rev, changelogrevision)
2203
2203
2204 def branchtip(self, branch, ignoremissing=False):
2204 def branchtip(self, branch, ignoremissing=False):
2205 """return the tip node for a given branch
2205 """return the tip node for a given branch
2206
2206
2207 If ignoremissing is True, then this method will not raise an error.
2207 If ignoremissing is True, then this method will not raise an error.
2208 This is helpful for callers that only expect None for a missing branch
2208 This is helpful for callers that only expect None for a missing branch
2209 (e.g. namespace).
2209 (e.g. namespace).
2210
2210
2211 """
2211 """
2212 try:
2212 try:
2213 return self.branchmap().branchtip(branch)
2213 return self.branchmap().branchtip(branch)
2214 except KeyError:
2214 except KeyError:
2215 if not ignoremissing:
2215 if not ignoremissing:
2216 raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
2216 raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
2217 else:
2217 else:
2218 pass
2218 pass
2219
2219
2220 def lookup(self, key):
2220 def lookup(self, key):
2221 node = scmutil.revsymbol(self, key).node()
2221 node = scmutil.revsymbol(self, key).node()
2222 if node is None:
2222 if node is None:
2223 raise error.RepoLookupError(_(b"unknown revision '%s'") % key)
2223 raise error.RepoLookupError(_(b"unknown revision '%s'") % key)
2224 return node
2224 return node
2225
2225
2226 def lookupbranch(self, key):
2226 def lookupbranch(self, key):
2227 if self.branchmap().hasbranch(key):
2227 if self.branchmap().hasbranch(key):
2228 return key
2228 return key
2229
2229
2230 return scmutil.revsymbol(self, key).branch()
2230 return scmutil.revsymbol(self, key).branch()
2231
2231
2232 def known(self, nodes):
2232 def known(self, nodes):
2233 cl = self.changelog
2233 cl = self.changelog
2234 get_rev = cl.index.get_rev
2234 get_rev = cl.index.get_rev
2235 filtered = cl.filteredrevs
2235 filtered = cl.filteredrevs
2236 result = []
2236 result = []
2237 for n in nodes:
2237 for n in nodes:
2238 r = get_rev(n)
2238 r = get_rev(n)
2239 resp = not (r is None or r in filtered)
2239 resp = not (r is None or r in filtered)
2240 result.append(resp)
2240 result.append(resp)
2241 return result
2241 return result
2242
2242
2243 def local(self):
2243 def local(self):
2244 return self
2244 return self
2245
2245
2246 def publishing(self):
2246 def publishing(self):
2247 # it's safe (and desirable) to trust the publish flag unconditionally
2247 # it's safe (and desirable) to trust the publish flag unconditionally
2248 # so that we don't finalize changes shared between users via ssh or nfs
2248 # so that we don't finalize changes shared between users via ssh or nfs
2249 return self.ui.configbool(b'phases', b'publish', untrusted=True)
2249 return self.ui.configbool(b'phases', b'publish', untrusted=True)
2250
2250
2251 def cancopy(self):
2251 def cancopy(self):
2252 # so statichttprepo's override of local() works
2252 # so statichttprepo's override of local() works
2253 if not self.local():
2253 if not self.local():
2254 return False
2254 return False
2255 if not self.publishing():
2255 if not self.publishing():
2256 return True
2256 return True
2257 # if publishing we can't copy if there is filtered content
2257 # if publishing we can't copy if there is filtered content
2258 return not self.filtered(b'visible').changelog.filteredrevs
2258 return not self.filtered(b'visible').changelog.filteredrevs
2259
2259
2260 def shared(self):
2260 def shared(self):
2261 '''the type of shared repository (None if not shared)'''
2261 '''the type of shared repository (None if not shared)'''
2262 if self.sharedpath != self.path:
2262 if self.sharedpath != self.path:
2263 return b'store'
2263 return b'store'
2264 return None
2264 return None
2265
2265
2266 def wjoin(self, f: bytes, *insidef: bytes) -> bytes:
2266 def wjoin(self, f: bytes, *insidef: bytes) -> bytes:
2267 return self.vfs.reljoin(self.root, f, *insidef)
2267 return self.vfs.reljoin(self.root, f, *insidef)
2268
2268
2269 def setparents(self, p1, p2=None):
2269 def setparents(self, p1, p2=None):
2270 if p2 is None:
2270 if p2 is None:
2271 p2 = self.nullid
2271 p2 = self.nullid
2272 self[None].setparents(p1, p2)
2272 self[None].setparents(p1, p2)
2273 self._quick_access_changeid_invalidate()
2273 self._quick_access_changeid_invalidate()
2274
2274
2275 def filectx(self, path: bytes, changeid=None, fileid=None, changectx=None):
2275 def filectx(self, path: bytes, changeid=None, fileid=None, changectx=None):
2276 """changeid must be a changeset revision, if specified.
2276 """changeid must be a changeset revision, if specified.
2277 fileid can be a file revision or node."""
2277 fileid can be a file revision or node."""
2278 return context.filectx(
2278 return context.filectx(
2279 self, path, changeid, fileid, changectx=changectx
2279 self, path, changeid, fileid, changectx=changectx
2280 )
2280 )
2281
2281
2282 def getcwd(self) -> bytes:
2282 def getcwd(self) -> bytes:
2283 return self.dirstate.getcwd()
2283 return self.dirstate.getcwd()
2284
2284
2285 def pathto(self, f: bytes, cwd: Optional[bytes] = None) -> bytes:
2285 def pathto(self, f: bytes, cwd: Optional[bytes] = None) -> bytes:
2286 return self.dirstate.pathto(f, cwd)
2286 return self.dirstate.pathto(f, cwd)
2287
2287
2288 def _loadfilter(self, filter):
2288 def _loadfilter(self, filter):
2289 if filter not in self._filterpats:
2289 if filter not in self._filterpats:
2290 l = []
2290 l = []
2291 for pat, cmd in self.ui.configitems(filter):
2291 for pat, cmd in self.ui.configitems(filter):
2292 if cmd == b'!':
2292 if cmd == b'!':
2293 continue
2293 continue
2294 mf = matchmod.match(self.root, b'', [pat])
2294 mf = matchmod.match(self.root, b'', [pat])
2295 fn = None
2295 fn = None
2296 params = cmd
2296 params = cmd
2297 for name, filterfn in self._datafilters.items():
2297 for name, filterfn in self._datafilters.items():
2298 if cmd.startswith(name):
2298 if cmd.startswith(name):
2299 fn = filterfn
2299 fn = filterfn
2300 params = cmd[len(name) :].lstrip()
2300 params = cmd[len(name) :].lstrip()
2301 break
2301 break
2302 if not fn:
2302 if not fn:
2303 fn = lambda s, c, **kwargs: procutil.filter(s, c)
2303 fn = lambda s, c, **kwargs: procutil.filter(s, c)
2304 fn.__name__ = 'commandfilter'
2304 fn.__name__ = 'commandfilter'
2305 # Wrap old filters not supporting keyword arguments
2305 # Wrap old filters not supporting keyword arguments
2306 if not pycompat.getargspec(fn)[2]:
2306 if not pycompat.getargspec(fn)[2]:
2307 oldfn = fn
2307 oldfn = fn
2308 fn = lambda s, c, oldfn=oldfn, **kwargs: oldfn(s, c)
2308 fn = lambda s, c, oldfn=oldfn, **kwargs: oldfn(s, c)
2309 fn.__name__ = 'compat-' + oldfn.__name__
2309 fn.__name__ = 'compat-' + oldfn.__name__
2310 l.append((mf, fn, params))
2310 l.append((mf, fn, params))
2311 self._filterpats[filter] = l
2311 self._filterpats[filter] = l
2312 return self._filterpats[filter]
2312 return self._filterpats[filter]
2313
2313
2314 def _filter(self, filterpats, filename, data):
2314 def _filter(self, filterpats, filename, data):
2315 for mf, fn, cmd in filterpats:
2315 for mf, fn, cmd in filterpats:
2316 if mf(filename):
2316 if mf(filename):
2317 self.ui.debug(
2317 self.ui.debug(
2318 b"filtering %s through %s\n"
2318 b"filtering %s through %s\n"
2319 % (filename, cmd or pycompat.sysbytes(fn.__name__))
2319 % (filename, cmd or pycompat.sysbytes(fn.__name__))
2320 )
2320 )
2321 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
2321 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
2322 break
2322 break
2323
2323
2324 return data
2324 return data
2325
2325
2326 @unfilteredpropertycache
2326 @unfilteredpropertycache
2327 def _encodefilterpats(self):
2327 def _encodefilterpats(self):
2328 return self._loadfilter(b'encode')
2328 return self._loadfilter(b'encode')
2329
2329
2330 @unfilteredpropertycache
2330 @unfilteredpropertycache
2331 def _decodefilterpats(self):
2331 def _decodefilterpats(self):
2332 return self._loadfilter(b'decode')
2332 return self._loadfilter(b'decode')
2333
2333
2334 def adddatafilter(self, name, filter):
2334 def adddatafilter(self, name, filter):
2335 self._datafilters[name] = filter
2335 self._datafilters[name] = filter
2336
2336
2337 def wread(self, filename: bytes) -> bytes:
2337 def wread(self, filename: bytes) -> bytes:
2338 if self.wvfs.islink(filename):
2338 if self.wvfs.islink(filename):
2339 data = self.wvfs.readlink(filename)
2339 data = self.wvfs.readlink(filename)
2340 else:
2340 else:
2341 data = self.wvfs.read(filename)
2341 data = self.wvfs.read(filename)
2342 return self._filter(self._encodefilterpats, filename, data)
2342 return self._filter(self._encodefilterpats, filename, data)
2343
2343
2344 def wwrite(
2344 def wwrite(
2345 self,
2345 self,
2346 filename: bytes,
2346 filename: bytes,
2347 data: bytes,
2347 data: bytes,
2348 flags: bytes,
2348 flags: bytes,
2349 backgroundclose=False,
2349 backgroundclose=False,
2350 **kwargs
2350 **kwargs
2351 ) -> int:
2351 ) -> int:
2352 """write ``data`` into ``filename`` in the working directory
2352 """write ``data`` into ``filename`` in the working directory
2353
2353
2354 This returns length of written (maybe decoded) data.
2354 This returns length of written (maybe decoded) data.
2355 """
2355 """
2356 data = self._filter(self._decodefilterpats, filename, data)
2356 data = self._filter(self._decodefilterpats, filename, data)
2357 if b'l' in flags:
2357 if b'l' in flags:
2358 self.wvfs.symlink(data, filename)
2358 self.wvfs.symlink(data, filename)
2359 else:
2359 else:
2360 self.wvfs.write(
2360 self.wvfs.write(
2361 filename, data, backgroundclose=backgroundclose, **kwargs
2361 filename, data, backgroundclose=backgroundclose, **kwargs
2362 )
2362 )
2363 if b'x' in flags:
2363 if b'x' in flags:
2364 self.wvfs.setflags(filename, False, True)
2364 self.wvfs.setflags(filename, False, True)
2365 else:
2365 else:
2366 self.wvfs.setflags(filename, False, False)
2366 self.wvfs.setflags(filename, False, False)
2367 return len(data)
2367 return len(data)
2368
2368
2369 def wwritedata(self, filename: bytes, data: bytes) -> bytes:
2369 def wwritedata(self, filename: bytes, data: bytes) -> bytes:
2370 return self._filter(self._decodefilterpats, filename, data)
2370 return self._filter(self._decodefilterpats, filename, data)
2371
2371
2372 def currenttransaction(self):
2372 def currenttransaction(self):
2373 """return the current transaction or None if non exists"""
2373 """return the current transaction or None if non exists"""
2374 if self._transref:
2374 if self._transref:
2375 tr = self._transref()
2375 tr = self._transref()
2376 else:
2376 else:
2377 tr = None
2377 tr = None
2378
2378
2379 if tr and tr.running():
2379 if tr and tr.running():
2380 return tr
2380 return tr
2381 return None
2381 return None
2382
2382
2383 def transaction(self, desc, report=None):
2383 def transaction(self, desc, report=None):
2384 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
2384 if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
2385 b'devel', b'check-locks'
2385 b'devel', b'check-locks'
2386 ):
2386 ):
2387 if self._currentlock(self._lockref) is None:
2387 if self._currentlock(self._lockref) is None:
2388 raise error.ProgrammingError(b'transaction requires locking')
2388 raise error.ProgrammingError(b'transaction requires locking')
2389 tr = self.currenttransaction()
2389 tr = self.currenttransaction()
2390 if tr is not None:
2390 if tr is not None:
2391 return tr.nest(name=desc)
2391 return tr.nest(name=desc)
2392
2392
2393 # abort here if the journal already exists
2393 # abort here if the journal already exists
2394 if self.svfs.exists(b"journal"):
2394 if self.svfs.exists(b"journal"):
2395 raise error.RepoError(
2395 raise error.RepoError(
2396 _(b"abandoned transaction found"),
2396 _(b"abandoned transaction found"),
2397 hint=_(b"run 'hg recover' to clean up transaction"),
2397 hint=_(b"run 'hg recover' to clean up transaction"),
2398 )
2398 )
2399
2399
2400 # At that point your dirstate should be clean:
2400 # At that point your dirstate should be clean:
2401 #
2401 #
2402 # - If you don't have the wlock, why would you still have a dirty
2402 # - If you don't have the wlock, why would you still have a dirty
2403 # dirstate ?
2403 # dirstate ?
2404 #
2404 #
2405 # - If you hold the wlock, you should not be opening a transaction in
2405 # - If you hold the wlock, you should not be opening a transaction in
2406 # the middle of a `distate.changing_*` block. The transaction needs to
2406 # the middle of a `distate.changing_*` block. The transaction needs to
2407 # be open before that and wrap the change-context.
2407 # be open before that and wrap the change-context.
2408 #
2408 #
2409 # - If you are not within a `dirstate.changing_*` context, why is our
2409 # - If you are not within a `dirstate.changing_*` context, why is our
2410 # dirstate dirty?
2410 # dirstate dirty?
2411 if self.dirstate._dirty:
2411 if self.dirstate._dirty:
2412 m = "cannot open a transaction with a dirty dirstate"
2412 m = "cannot open a transaction with a dirty dirstate"
2413 raise error.ProgrammingError(m)
2413 raise error.ProgrammingError(m)
2414
2414
2415 idbase = b"%.40f#%f" % (random.random(), time.time())
2415 idbase = b"%.40f#%f" % (random.random(), time.time())
2416 ha = hex(hashutil.sha1(idbase).digest())
2416 ha = hex(hashutil.sha1(idbase).digest())
2417 txnid = b'TXN:' + ha
2417 txnid = b'TXN:' + ha
2418 self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
2418 self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
2419
2419
2420 self._writejournal(desc)
2420 self._writejournal(desc)
2421 if report:
2421 if report:
2422 rp = report
2422 rp = report
2423 else:
2423 else:
2424 rp = self.ui.warn
2424 rp = self.ui.warn
2425 vfsmap = self.vfs_map
2425 vfsmap = self.vfs_map
2426 # we must avoid cyclic reference between repo and transaction.
2426 # we must avoid cyclic reference between repo and transaction.
2427 reporef = weakref.ref(self)
2427 reporef = weakref.ref(self)
2428 # Code to track tag movement
2428 # Code to track tag movement
2429 #
2429 #
2430 # Since tags are all handled as file content, it is actually quite hard
2430 # Since tags are all handled as file content, it is actually quite hard
2431 # to track these movement from a code perspective. So we fallback to a
2431 # to track these movement from a code perspective. So we fallback to a
2432 # tracking at the repository level. One could envision to track changes
2432 # tracking at the repository level. One could envision to track changes
2433 # to the '.hgtags' file through changegroup apply but that fails to
2433 # to the '.hgtags' file through changegroup apply but that fails to
2434 # cope with case where transaction expose new heads without changegroup
2434 # cope with case where transaction expose new heads without changegroup
2435 # being involved (eg: phase movement).
2435 # being involved (eg: phase movement).
2436 #
2436 #
2437 # For now, We gate the feature behind a flag since this likely comes
2437 # For now, We gate the feature behind a flag since this likely comes
2438 # with performance impacts. The current code run more often than needed
2438 # with performance impacts. The current code run more often than needed
2439 # and do not use caches as much as it could. The current focus is on
2439 # and do not use caches as much as it could. The current focus is on
2440 # the behavior of the feature so we disable it by default. The flag
2440 # the behavior of the feature so we disable it by default. The flag
2441 # will be removed when we are happy with the performance impact.
2441 # will be removed when we are happy with the performance impact.
2442 #
2442 #
2443 # Once this feature is no longer experimental move the following
2443 # Once this feature is no longer experimental move the following
2444 # documentation to the appropriate help section:
2444 # documentation to the appropriate help section:
2445 #
2445 #
2446 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
2446 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
2447 # tags (new or changed or deleted tags). In addition the details of
2447 # tags (new or changed or deleted tags). In addition the details of
2448 # these changes are made available in a file at:
2448 # these changes are made available in a file at:
2449 # ``REPOROOT/.hg/changes/tags.changes``.
2449 # ``REPOROOT/.hg/changes/tags.changes``.
2450 # Make sure you check for HG_TAG_MOVED before reading that file as it
2450 # Make sure you check for HG_TAG_MOVED before reading that file as it
2451 # might exist from a previous transaction even if no tag were touched
2451 # might exist from a previous transaction even if no tag were touched
2452 # in this one. Changes are recorded in a line base format::
2452 # in this one. Changes are recorded in a line base format::
2453 #
2453 #
2454 # <action> <hex-node> <tag-name>\n
2454 # <action> <hex-node> <tag-name>\n
2455 #
2455 #
2456 # Actions are defined as follow:
2456 # Actions are defined as follow:
2457 # "-R": tag is removed,
2457 # "-R": tag is removed,
2458 # "+A": tag is added,
2458 # "+A": tag is added,
2459 # "-M": tag is moved (old value),
2459 # "-M": tag is moved (old value),
2460 # "+M": tag is moved (new value),
2460 # "+M": tag is moved (new value),
2461 tracktags = lambda x: None
2461 tracktags = lambda x: None
2462 # experimental config: experimental.hook-track-tags
2462 # experimental config: experimental.hook-track-tags
2463 shouldtracktags = self.ui.configbool(
2463 shouldtracktags = self.ui.configbool(
2464 b'experimental', b'hook-track-tags'
2464 b'experimental', b'hook-track-tags'
2465 )
2465 )
2466 if desc != b'strip' and shouldtracktags:
2466 if desc != b'strip' and shouldtracktags:
2467 oldheads = self.changelog.headrevs()
2467 oldheads = self.changelog.headrevs()
2468
2468
2469 def tracktags(tr2):
2469 def tracktags(tr2):
2470 repo = reporef()
2470 repo = reporef()
2471 assert repo is not None # help pytype
2471 assert repo is not None # help pytype
2472 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
2472 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
2473 newheads = repo.changelog.headrevs()
2473 newheads = repo.changelog.headrevs()
2474 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
2474 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
2475 # notes: we compare lists here.
2475 # notes: we compare lists here.
2476 # As we do it only once buiding set would not be cheaper
2476 # As we do it only once buiding set would not be cheaper
2477 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
2477 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
2478 if changes:
2478 if changes:
2479 tr2.hookargs[b'tag_moved'] = b'1'
2479 tr2.hookargs[b'tag_moved'] = b'1'
2480 with repo.vfs(
2480 with repo.vfs(
2481 b'changes/tags.changes', b'w', atomictemp=True
2481 b'changes/tags.changes', b'w', atomictemp=True
2482 ) as changesfile:
2482 ) as changesfile:
2483 # note: we do not register the file to the transaction
2483 # note: we do not register the file to the transaction
2484 # because we needs it to still exist on the transaction
2484 # because we needs it to still exist on the transaction
2485 # is close (for txnclose hooks)
2485 # is close (for txnclose hooks)
2486 tagsmod.writediff(changesfile, changes)
2486 tagsmod.writediff(changesfile, changes)
2487
2487
2488 def validate(tr2):
2488 def validate(tr2):
2489 """will run pre-closing hooks"""
2489 """will run pre-closing hooks"""
2490 # XXX the transaction API is a bit lacking here so we take a hacky
2490 # XXX the transaction API is a bit lacking here so we take a hacky
2491 # path for now
2491 # path for now
2492 #
2492 #
2493 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
2493 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
2494 # dict is copied before these run. In addition we needs the data
2494 # dict is copied before these run. In addition we needs the data
2495 # available to in memory hooks too.
2495 # available to in memory hooks too.
2496 #
2496 #
2497 # Moreover, we also need to make sure this runs before txnclose
2497 # Moreover, we also need to make sure this runs before txnclose
2498 # hooks and there is no "pending" mechanism that would execute
2498 # hooks and there is no "pending" mechanism that would execute
2499 # logic only if hooks are about to run.
2499 # logic only if hooks are about to run.
2500 #
2500 #
2501 # Fixing this limitation of the transaction is also needed to track
2501 # Fixing this limitation of the transaction is also needed to track
2502 # other families of changes (bookmarks, phases, obsolescence).
2502 # other families of changes (bookmarks, phases, obsolescence).
2503 #
2503 #
2504 # This will have to be fixed before we remove the experimental
2504 # This will have to be fixed before we remove the experimental
2505 # gating.
2505 # gating.
2506 tracktags(tr2)
2506 tracktags(tr2)
2507 repo = reporef()
2507 repo = reporef()
2508 assert repo is not None # help pytype
2508 assert repo is not None # help pytype
2509
2509
2510 singleheadopt = (b'experimental', b'single-head-per-branch')
2510 singleheadopt = (b'experimental', b'single-head-per-branch')
2511 singlehead = repo.ui.configbool(*singleheadopt)
2511 singlehead = repo.ui.configbool(*singleheadopt)
2512 if singlehead:
2512 if singlehead:
2513 singleheadsub = repo.ui.configsuboptions(*singleheadopt)[1]
2513 singleheadsub = repo.ui.configsuboptions(*singleheadopt)[1]
2514 accountclosed = singleheadsub.get(
2514 accountclosed = singleheadsub.get(
2515 b"account-closed-heads", False
2515 b"account-closed-heads", False
2516 )
2516 )
2517 if singleheadsub.get(b"public-changes-only", False):
2517 if singleheadsub.get(b"public-changes-only", False):
2518 filtername = b"immutable"
2518 filtername = b"immutable"
2519 else:
2519 else:
2520 filtername = b"visible"
2520 filtername = b"visible"
2521 scmutil.enforcesinglehead(
2521 scmutil.enforcesinglehead(
2522 repo, tr2, desc, accountclosed, filtername
2522 repo, tr2, desc, accountclosed, filtername
2523 )
2523 )
2524 if hook.hashook(repo.ui, b'pretxnclose-bookmark'):
2524 if hook.hashook(repo.ui, b'pretxnclose-bookmark'):
2525 for name, (old, new) in sorted(
2525 for name, (old, new) in sorted(
2526 tr.changes[b'bookmarks'].items()
2526 tr.changes[b'bookmarks'].items()
2527 ):
2527 ):
2528 args = tr.hookargs.copy()
2528 args = tr.hookargs.copy()
2529 args.update(bookmarks.preparehookargs(name, old, new))
2529 args.update(bookmarks.preparehookargs(name, old, new))
2530 repo.hook(
2530 repo.hook(
2531 b'pretxnclose-bookmark',
2531 b'pretxnclose-bookmark',
2532 throw=True,
2532 throw=True,
2533 **pycompat.strkwargs(args)
2533 **pycompat.strkwargs(args)
2534 )
2534 )
2535 if hook.hashook(repo.ui, b'pretxnclose-phase'):
2535 if hook.hashook(repo.ui, b'pretxnclose-phase'):
2536 cl = repo.unfiltered().changelog
2536 cl = repo.unfiltered().changelog
2537 for revs, (old, new) in tr.changes[b'phases']:
2537 for revs, (old, new) in tr.changes[b'phases']:
2538 for rev in revs:
2538 for rev in revs:
2539 args = tr.hookargs.copy()
2539 args = tr.hookargs.copy()
2540 node = hex(cl.node(rev))
2540 node = hex(cl.node(rev))
2541 args.update(phases.preparehookargs(node, old, new))
2541 args.update(phases.preparehookargs(node, old, new))
2542 repo.hook(
2542 repo.hook(
2543 b'pretxnclose-phase',
2543 b'pretxnclose-phase',
2544 throw=True,
2544 throw=True,
2545 **pycompat.strkwargs(args)
2545 **pycompat.strkwargs(args)
2546 )
2546 )
2547
2547
2548 repo.hook(
2548 repo.hook(
2549 b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
2549 b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
2550 )
2550 )
2551
2551
2552 def releasefn(tr, success):
2552 def releasefn(tr, success):
2553 repo = reporef()
2553 repo = reporef()
2554 if repo is None:
2554 if repo is None:
2555 # If the repo has been GC'd (and this release function is being
2555 # If the repo has been GC'd (and this release function is being
2556 # called from transaction.__del__), there's not much we can do,
2556 # called from transaction.__del__), there's not much we can do,
2557 # so just leave the unfinished transaction there and let the
2557 # so just leave the unfinished transaction there and let the
2558 # user run `hg recover`.
2558 # user run `hg recover`.
2559 return
2559 return
2560 if success:
2560 if success:
2561 # this should be explicitly invoked here, because
2561 # this should be explicitly invoked here, because
2562 # in-memory changes aren't written out at closing
2562 # in-memory changes aren't written out at closing
2563 # transaction, if tr.addfilegenerator (via
2563 # transaction, if tr.addfilegenerator (via
2564 # dirstate.write or so) isn't invoked while
2564 # dirstate.write or so) isn't invoked while
2565 # transaction running
2565 # transaction running
2566 repo.dirstate.write(None)
2566 repo.dirstate.write(None)
2567 else:
2567 else:
2568 # discard all changes (including ones already written
2568 # discard all changes (including ones already written
2569 # out) in this transaction
2569 # out) in this transaction
2570 repo.invalidate(clearfilecache=True)
2570 repo.invalidate(clearfilecache=True)
2571
2571
2572 tr = transaction.transaction(
2572 tr = transaction.transaction(
2573 rp,
2573 rp,
2574 self.svfs,
2574 self.svfs,
2575 vfsmap,
2575 vfsmap,
2576 b"journal",
2576 b"journal",
2577 b"undo",
2577 b"undo",
2578 lambda: None,
2578 lambda: None,
2579 self.store.createmode,
2579 self.store.createmode,
2580 validator=validate,
2580 validator=validate,
2581 releasefn=releasefn,
2581 releasefn=releasefn,
2582 checkambigfiles=_cachedfiles,
2582 checkambigfiles=_cachedfiles,
2583 name=desc,
2583 name=desc,
2584 )
2584 )
2585 for vfs_id, path in self._journalfiles():
2585 for vfs_id, path in self._journalfiles():
2586 tr.add_journal(vfs_id, path)
2586 tr.add_journal(vfs_id, path)
2587 tr.changes[b'origrepolen'] = len(self)
2587 tr.changes[b'origrepolen'] = len(self)
2588 tr.changes[b'obsmarkers'] = set()
2588 tr.changes[b'obsmarkers'] = set()
2589 tr.changes[b'phases'] = []
2589 tr.changes[b'phases'] = []
2590 tr.changes[b'bookmarks'] = {}
2590 tr.changes[b'bookmarks'] = {}
2591
2591
2592 tr.hookargs[b'txnid'] = txnid
2592 tr.hookargs[b'txnid'] = txnid
2593 tr.hookargs[b'txnname'] = desc
2593 tr.hookargs[b'txnname'] = desc
2594 tr.hookargs[b'changes'] = tr.changes
2594 tr.hookargs[b'changes'] = tr.changes
2595 # note: writing the fncache only during finalize mean that the file is
2595 # note: writing the fncache only during finalize mean that the file is
2596 # outdated when running hooks. As fncache is used for streaming clone,
2596 # outdated when running hooks. As fncache is used for streaming clone,
2597 # this is not expected to break anything that happen during the hooks.
2597 # this is not expected to break anything that happen during the hooks.
2598 tr.addfinalize(b'flush-fncache', self.store.write)
2598 tr.addfinalize(b'flush-fncache', self.store.write)
2599
2599
2600 def txnclosehook(tr2):
2600 def txnclosehook(tr2):
2601 """To be run if transaction is successful, will schedule a hook run"""
2601 """To be run if transaction is successful, will schedule a hook run"""
2602 # Don't reference tr2 in hook() so we don't hold a reference.
2602 # Don't reference tr2 in hook() so we don't hold a reference.
2603 # This reduces memory consumption when there are multiple
2603 # This reduces memory consumption when there are multiple
2604 # transactions per lock. This can likely go away if issue5045
2604 # transactions per lock. This can likely go away if issue5045
2605 # fixes the function accumulation.
2605 # fixes the function accumulation.
2606 hookargs = tr2.hookargs
2606 hookargs = tr2.hookargs
2607
2607
2608 def hookfunc(unused_success):
2608 def hookfunc(unused_success):
2609 repo = reporef()
2609 repo = reporef()
2610 assert repo is not None # help pytype
2610 assert repo is not None # help pytype
2611
2611
2612 if hook.hashook(repo.ui, b'txnclose-bookmark'):
2612 if hook.hashook(repo.ui, b'txnclose-bookmark'):
2613 bmchanges = sorted(tr.changes[b'bookmarks'].items())
2613 bmchanges = sorted(tr.changes[b'bookmarks'].items())
2614 for name, (old, new) in bmchanges:
2614 for name, (old, new) in bmchanges:
2615 args = tr.hookargs.copy()
2615 args = tr.hookargs.copy()
2616 args.update(bookmarks.preparehookargs(name, old, new))
2616 args.update(bookmarks.preparehookargs(name, old, new))
2617 repo.hook(
2617 repo.hook(
2618 b'txnclose-bookmark',
2618 b'txnclose-bookmark',
2619 throw=False,
2619 throw=False,
2620 **pycompat.strkwargs(args)
2620 **pycompat.strkwargs(args)
2621 )
2621 )
2622
2622
2623 if hook.hashook(repo.ui, b'txnclose-phase'):
2623 if hook.hashook(repo.ui, b'txnclose-phase'):
2624 cl = repo.unfiltered().changelog
2624 cl = repo.unfiltered().changelog
2625 phasemv = sorted(
2625 phasemv = sorted(
2626 tr.changes[b'phases'], key=lambda r: r[0][0]
2626 tr.changes[b'phases'], key=lambda r: r[0][0]
2627 )
2627 )
2628 for revs, (old, new) in phasemv:
2628 for revs, (old, new) in phasemv:
2629 for rev in revs:
2629 for rev in revs:
2630 args = tr.hookargs.copy()
2630 args = tr.hookargs.copy()
2631 node = hex(cl.node(rev))
2631 node = hex(cl.node(rev))
2632 args.update(phases.preparehookargs(node, old, new))
2632 args.update(phases.preparehookargs(node, old, new))
2633 repo.hook(
2633 repo.hook(
2634 b'txnclose-phase',
2634 b'txnclose-phase',
2635 throw=False,
2635 throw=False,
2636 **pycompat.strkwargs(args)
2636 **pycompat.strkwargs(args)
2637 )
2637 )
2638
2638
2639 repo.hook(
2639 repo.hook(
2640 b'txnclose', throw=False, **pycompat.strkwargs(hookargs)
2640 b'txnclose', throw=False, **pycompat.strkwargs(hookargs)
2641 )
2641 )
2642
2642
2643 repo = reporef()
2643 repo = reporef()
2644 assert repo is not None # help pytype
2644 assert repo is not None # help pytype
2645 repo._afterlock(hookfunc)
2645 repo._afterlock(hookfunc)
2646
2646
2647 tr.addfinalize(b'txnclose-hook', txnclosehook)
2647 tr.addfinalize(b'txnclose-hook', txnclosehook)
2648 # Include a leading "-" to make it happen before the transaction summary
2648 # Include a leading "-" to make it happen before the transaction summary
2649 # reports registered via scmutil.registersummarycallback() whose names
2649 # reports registered via scmutil.registersummarycallback() whose names
2650 # are 00-txnreport etc. That way, the caches will be warm when the
2650 # are 00-txnreport etc. That way, the caches will be warm when the
2651 # callbacks run.
2651 # callbacks run.
2652 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2652 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2653
2653
2654 def txnaborthook(tr2):
2654 def txnaborthook(tr2):
2655 """To be run if transaction is aborted"""
2655 """To be run if transaction is aborted"""
2656 repo = reporef()
2656 repo = reporef()
2657 assert repo is not None # help pytype
2657 assert repo is not None # help pytype
2658 repo.hook(
2658 repo.hook(
2659 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2659 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2660 )
2660 )
2661
2661
2662 tr.addabort(b'txnabort-hook', txnaborthook)
2662 tr.addabort(b'txnabort-hook', txnaborthook)
2663 # avoid eager cache invalidation. in-memory data should be identical
2663 # avoid eager cache invalidation. in-memory data should be identical
2664 # to stored data if transaction has no error.
2664 # to stored data if transaction has no error.
2665 tr.addpostclose(b'refresh-filecachestats', self._refreshfilecachestats)
2665 tr.addpostclose(b'refresh-filecachestats', self._refreshfilecachestats)
2666 self._transref = weakref.ref(tr)
2666 self._transref = weakref.ref(tr)
2667 scmutil.registersummarycallback(self, tr, desc)
2667 scmutil.registersummarycallback(self, tr, desc)
2668 # This only exist to deal with the need of rollback to have viable
2668 # This only exist to deal with the need of rollback to have viable
2669 # parents at the end of the operation. So backup viable parents at the
2669 # parents at the end of the operation. So backup viable parents at the
2670 # time of this operation.
2670 # time of this operation.
2671 #
2671 #
2672 # We only do it when the `wlock` is taken, otherwise other might be
2672 # We only do it when the `wlock` is taken, otherwise other might be
2673 # altering the dirstate under us.
2673 # altering the dirstate under us.
2674 #
2674 #
2675 # This is really not a great way to do this (first, because we cannot
2675 # This is really not a great way to do this (first, because we cannot
2676 # always do it). There are more viable alternative that exists
2676 # always do it). There are more viable alternative that exists
2677 #
2677 #
2678 # - backing only the working copy parent in a dedicated files and doing
2678 # - backing only the working copy parent in a dedicated files and doing
2679 # a clean "keep-update" to them on `hg rollback`.
2679 # a clean "keep-update" to them on `hg rollback`.
2680 #
2680 #
2681 # - slightly changing the behavior an applying a logic similar to "hg
2681 # - slightly changing the behavior an applying a logic similar to "hg
2682 # strip" to pick a working copy destination on `hg rollback`
2682 # strip" to pick a working copy destination on `hg rollback`
2683 if self.currentwlock() is not None:
2683 if self.currentwlock() is not None:
2684 ds = self.dirstate
2684 ds = self.dirstate
2685 if not self.vfs.exists(b'branch'):
2685 if not self.vfs.exists(b'branch'):
2686 # force a file to be written if None exist
2686 # force a file to be written if None exist
2687 ds.setbranch(b'default', None)
2687 ds.setbranch(b'default', None)
2688
2688
2689 def backup_dirstate(tr):
2689 def backup_dirstate(tr):
2690 for f in ds.all_file_names():
2690 for f in ds.all_file_names():
2691 # hardlink backup is okay because `dirstate` is always
2691 # hardlink backup is okay because `dirstate` is always
2692 # atomically written and possible data file are append only
2692 # atomically written and possible data file are append only
2693 # and resistant to trailing data.
2693 # and resistant to trailing data.
2694 tr.addbackup(f, hardlink=True, location=b'plain')
2694 tr.addbackup(f, hardlink=True, location=b'plain')
2695
2695
2696 tr.addvalidator(b'dirstate-backup', backup_dirstate)
2696 tr.addvalidator(b'dirstate-backup', backup_dirstate)
2697 return tr
2697 return tr
2698
2698
2699 def _journalfiles(self):
2699 def _journalfiles(self):
2700 return (
2700 return (
2701 (self.svfs, b'journal'),
2701 (self.svfs, b'journal'),
2702 (self.vfs, b'journal.desc'),
2702 (self.vfs, b'journal.desc'),
2703 )
2703 )
2704
2704
2705 def undofiles(self):
2705 def undofiles(self):
2706 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
2706 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
2707
2707
2708 @unfilteredmethod
2708 @unfilteredmethod
2709 def _writejournal(self, desc):
2709 def _writejournal(self, desc):
2710 self.vfs.write(b"journal.desc", b"%d\n%s\n" % (len(self), desc))
2710 self.vfs.write(b"journal.desc", b"%d\n%s\n" % (len(self), desc))
2711
2711
2712 def recover(self):
2712 def recover(self):
2713 with self.lock():
2713 with self.lock():
2714 if self.svfs.exists(b"journal"):
2714 if self.svfs.exists(b"journal"):
2715 self.ui.status(_(b"rolling back interrupted transaction\n"))
2715 self.ui.status(_(b"rolling back interrupted transaction\n"))
2716 vfsmap = self.vfs_map
2716 vfsmap = self.vfs_map
2717 transaction.rollback(
2717 transaction.rollback(
2718 self.svfs,
2718 self.svfs,
2719 vfsmap,
2719 vfsmap,
2720 b"journal",
2720 b"journal",
2721 self.ui.warn,
2721 self.ui.warn,
2722 checkambigfiles=_cachedfiles,
2722 checkambigfiles=_cachedfiles,
2723 )
2723 )
2724 self.invalidate()
2724 self.invalidate()
2725 return True
2725 return True
2726 else:
2726 else:
2727 self.ui.warn(_(b"no interrupted transaction available\n"))
2727 self.ui.warn(_(b"no interrupted transaction available\n"))
2728 return False
2728 return False
2729
2729
2730 def rollback(self, dryrun=False, force=False):
2730 def rollback(self, dryrun=False, force=False):
2731 wlock = lock = None
2731 wlock = lock = None
2732 try:
2732 try:
2733 wlock = self.wlock()
2733 wlock = self.wlock()
2734 lock = self.lock()
2734 lock = self.lock()
2735 if self.svfs.exists(b"undo"):
2735 if self.svfs.exists(b"undo"):
2736 return self._rollback(dryrun, force)
2736 return self._rollback(dryrun, force)
2737 else:
2737 else:
2738 self.ui.warn(_(b"no rollback information available\n"))
2738 self.ui.warn(_(b"no rollback information available\n"))
2739 return 1
2739 return 1
2740 finally:
2740 finally:
2741 release(lock, wlock)
2741 release(lock, wlock)
2742
2742
2743 @unfilteredmethod # Until we get smarter cache management
2743 @unfilteredmethod # Until we get smarter cache management
2744 def _rollback(self, dryrun, force):
2744 def _rollback(self, dryrun, force):
2745 ui = self.ui
2745 ui = self.ui
2746
2746
2747 parents = self.dirstate.parents()
2747 parents = self.dirstate.parents()
2748 try:
2748 try:
2749 args = self.vfs.read(b'undo.desc').splitlines()
2749 args = self.vfs.read(b'undo.desc').splitlines()
2750 (oldlen, desc, detail) = (int(args[0]), args[1], None)
2750 (oldlen, desc, detail) = (int(args[0]), args[1], None)
2751 if len(args) >= 3:
2751 if len(args) >= 3:
2752 detail = args[2]
2752 detail = args[2]
2753 oldtip = oldlen - 1
2753 oldtip = oldlen - 1
2754
2754
2755 if detail and ui.verbose:
2755 if detail and ui.verbose:
2756 msg = _(
2756 msg = _(
2757 b'repository tip rolled back to revision %d'
2757 b'repository tip rolled back to revision %d'
2758 b' (undo %s: %s)\n'
2758 b' (undo %s: %s)\n'
2759 ) % (oldtip, desc, detail)
2759 ) % (oldtip, desc, detail)
2760 else:
2760 else:
2761 msg = _(
2761 msg = _(
2762 b'repository tip rolled back to revision %d (undo %s)\n'
2762 b'repository tip rolled back to revision %d (undo %s)\n'
2763 ) % (oldtip, desc)
2763 ) % (oldtip, desc)
2764 parentgone = any(self[p].rev() > oldtip for p in parents)
2764 parentgone = any(self[p].rev() > oldtip for p in parents)
2765 except IOError:
2765 except IOError:
2766 msg = _(b'rolling back unknown transaction\n')
2766 msg = _(b'rolling back unknown transaction\n')
2767 desc = None
2767 desc = None
2768 parentgone = True
2768 parentgone = True
2769
2769
2770 if not force and self[b'.'] != self[b'tip'] and desc == b'commit':
2770 if not force and self[b'.'] != self[b'tip'] and desc == b'commit':
2771 raise error.Abort(
2771 raise error.Abort(
2772 _(
2772 _(
2773 b'rollback of last commit while not checked out '
2773 b'rollback of last commit while not checked out '
2774 b'may lose data'
2774 b'may lose data'
2775 ),
2775 ),
2776 hint=_(b'use -f to force'),
2776 hint=_(b'use -f to force'),
2777 )
2777 )
2778
2778
2779 ui.status(msg)
2779 ui.status(msg)
2780 if dryrun:
2780 if dryrun:
2781 return 0
2781 return 0
2782
2782
2783 self.destroying()
2783 self.destroying()
2784 vfsmap = self.vfs_map
2784 vfsmap = self.vfs_map
2785 skip_journal_pattern = None
2785 skip_journal_pattern = None
2786 if not parentgone:
2786 if not parentgone:
2787 skip_journal_pattern = RE_SKIP_DIRSTATE_ROLLBACK
2787 skip_journal_pattern = RE_SKIP_DIRSTATE_ROLLBACK
2788 transaction.rollback(
2788 transaction.rollback(
2789 self.svfs,
2789 self.svfs,
2790 vfsmap,
2790 vfsmap,
2791 b'undo',
2791 b'undo',
2792 ui.warn,
2792 ui.warn,
2793 checkambigfiles=_cachedfiles,
2793 checkambigfiles=_cachedfiles,
2794 skip_journal_pattern=skip_journal_pattern,
2794 skip_journal_pattern=skip_journal_pattern,
2795 )
2795 )
2796 self.invalidate()
2796 self.invalidate()
2797 self.dirstate.invalidate()
2797 self.dirstate.invalidate()
2798
2798
2799 if parentgone:
2799 if parentgone:
2800 # replace this with some explicit parent update in the future.
2800 # replace this with some explicit parent update in the future.
2801 has_node = self.changelog.index.has_node
2801 has_node = self.changelog.index.has_node
2802 if not all(has_node(p) for p in self.dirstate._pl):
2802 if not all(has_node(p) for p in self.dirstate._pl):
2803 # There was no dirstate to backup initially, we need to drop
2803 # There was no dirstate to backup initially, we need to drop
2804 # the existing one.
2804 # the existing one.
2805 with self.dirstate.changing_parents(self):
2805 with self.dirstate.changing_parents(self):
2806 self.dirstate.setparents(self.nullid)
2806 self.dirstate.setparents(self.nullid)
2807 self.dirstate.clear()
2807 self.dirstate.clear()
2808
2808
2809 parents = tuple([p.rev() for p in self[None].parents()])
2809 parents = tuple([p.rev() for p in self[None].parents()])
2810 if len(parents) > 1:
2810 if len(parents) > 1:
2811 ui.status(
2811 ui.status(
2812 _(
2812 _(
2813 b'working directory now based on '
2813 b'working directory now based on '
2814 b'revisions %d and %d\n'
2814 b'revisions %d and %d\n'
2815 )
2815 )
2816 % parents
2816 % parents
2817 )
2817 )
2818 else:
2818 else:
2819 ui.status(
2819 ui.status(
2820 _(b'working directory now based on revision %d\n') % parents
2820 _(b'working directory now based on revision %d\n') % parents
2821 )
2821 )
2822 mergestatemod.mergestate.clean(self)
2822 mergestatemod.mergestate.clean(self)
2823
2823
2824 # TODO: if we know which new heads may result from this rollback, pass
2824 # TODO: if we know which new heads may result from this rollback, pass
2825 # them to destroy(), which will prevent the branchhead cache from being
2825 # them to destroy(), which will prevent the branchhead cache from being
2826 # invalidated.
2826 # invalidated.
2827 self.destroyed()
2827 self.destroyed()
2828 return 0
2828 return 0
2829
2829
2830 def _buildcacheupdater(self, newtransaction):
2830 def _buildcacheupdater(self, newtransaction):
2831 """called during transaction to build the callback updating cache
2831 """called during transaction to build the callback updating cache
2832
2832
2833 Lives on the repository to help extension who might want to augment
2833 Lives on the repository to help extension who might want to augment
2834 this logic. For this purpose, the created transaction is passed to the
2834 this logic. For this purpose, the created transaction is passed to the
2835 method.
2835 method.
2836 """
2836 """
2837 # we must avoid cyclic reference between repo and transaction.
2837 # we must avoid cyclic reference between repo and transaction.
2838 reporef = weakref.ref(self)
2838 reporef = weakref.ref(self)
2839
2839
2840 def updater(tr):
2840 def updater(tr):
2841 repo = reporef()
2841 repo = reporef()
2842 assert repo is not None # help pytype
2842 assert repo is not None # help pytype
2843 repo.updatecaches(tr)
2843 repo.updatecaches(tr)
2844
2844
2845 return updater
2845 return updater
2846
2846
2847 @unfilteredmethod
2847 @unfilteredmethod
2848 def updatecaches(self, tr=None, full=False, caches=None):
2848 def updatecaches(self, tr=None, full=False, caches=None):
2849 """warm appropriate caches
2849 """warm appropriate caches
2850
2850
2851 If this function is called after a transaction closed. The transaction
2851 If this function is called after a transaction closed. The transaction
2852 will be available in the 'tr' argument. This can be used to selectively
2852 will be available in the 'tr' argument. This can be used to selectively
2853 update caches relevant to the changes in that transaction.
2853 update caches relevant to the changes in that transaction.
2854
2854
2855 If 'full' is set, make sure all caches the function knows about have
2855 If 'full' is set, make sure all caches the function knows about have
2856 up-to-date data. Even the ones usually loaded more lazily.
2856 up-to-date data. Even the ones usually loaded more lazily.
2857
2857
2858 The `full` argument can take a special "post-clone" value. In this case
2858 The `full` argument can take a special "post-clone" value. In this case
2859 the cache warming is made after a clone and of the slower cache might
2859 the cache warming is made after a clone and of the slower cache might
2860 be skipped, namely the `.fnodetags` one. This argument is 5.8 specific
2860 be skipped, namely the `.fnodetags` one. This argument is 5.8 specific
2861 as we plan for a cleaner way to deal with this for 5.9.
2861 as we plan for a cleaner way to deal with this for 5.9.
2862 """
2862 """
2863 if tr is not None and tr.hookargs.get(b'source') == b'strip':
2863 if tr is not None and tr.hookargs.get(b'source') == b'strip':
2864 # During strip, many caches are invalid but
2864 # During strip, many caches are invalid but
2865 # later call to `destroyed` will refresh them.
2865 # later call to `destroyed` will refresh them.
2866 return
2866 return
2867
2867
2868 unfi = self.unfiltered()
2868 unfi = self.unfiltered()
2869
2869
2870 if full:
2870 if full:
2871 msg = (
2871 msg = (
2872 "`full` argument for `repo.updatecaches` is deprecated\n"
2872 "`full` argument for `repo.updatecaches` is deprecated\n"
2873 "(use `caches=repository.CACHE_ALL` instead)"
2873 "(use `caches=repository.CACHE_ALL` instead)"
2874 )
2874 )
2875 self.ui.deprecwarn(msg, b"5.9")
2875 self.ui.deprecwarn(msg, b"5.9")
2876 caches = repository.CACHES_ALL
2876 caches = repository.CACHES_ALL
2877 if full == b"post-clone":
2877 if full == b"post-clone":
2878 caches = repository.CACHES_POST_CLONE
2878 caches = repository.CACHES_POST_CLONE
2879 caches = repository.CACHES_ALL
2879 caches = repository.CACHES_ALL
2880 elif caches is None:
2880 elif caches is None:
2881 caches = repository.CACHES_DEFAULT
2881 caches = repository.CACHES_DEFAULT
2882
2882
2883 if repository.CACHE_BRANCHMAP_SERVED in caches:
2883 if repository.CACHE_BRANCHMAP_SERVED in caches:
2884 if tr is None or tr.changes[b'origrepolen'] < len(self):
2884 if tr is None or tr.changes[b'origrepolen'] < len(self):
2885 # accessing the 'served' branchmap should refresh all the others,
2885 # accessing the 'served' branchmap should refresh all the others,
2886 self.ui.debug(b'updating the branch cache\n')
2886 self.ui.debug(b'updating the branch cache\n')
2887 self.filtered(b'served').branchmap()
2887 self.filtered(b'served').branchmap()
2888 self.filtered(b'served.hidden').branchmap()
2888 self.filtered(b'served.hidden').branchmap()
2889 # flush all possibly delayed write.
2889 # flush all possibly delayed write.
2890 self._branchcaches.write_delayed(self)
2890 self._branchcaches.write_delayed(self)
2891
2891
2892 if repository.CACHE_CHANGELOG_CACHE in caches:
2892 if repository.CACHE_CHANGELOG_CACHE in caches:
2893 self.changelog.update_caches(transaction=tr)
2893 self.changelog.update_caches(transaction=tr)
2894
2894
2895 if repository.CACHE_MANIFESTLOG_CACHE in caches:
2895 if repository.CACHE_MANIFESTLOG_CACHE in caches:
2896 self.manifestlog.update_caches(transaction=tr)
2896 self.manifestlog.update_caches(transaction=tr)
2897
2897
2898 if repository.CACHE_REV_BRANCH in caches:
2898 if repository.CACHE_REV_BRANCH in caches:
2899 rbc = unfi.revbranchcache()
2899 rbc = unfi.revbranchcache()
2900 for r in unfi.changelog:
2900 for r in unfi.changelog:
2901 rbc.branchinfo(r)
2901 rbc.branchinfo(r)
2902 rbc.write()
2902 rbc.write()
2903
2903
2904 if repository.CACHE_FULL_MANIFEST in caches:
2904 if repository.CACHE_FULL_MANIFEST in caches:
2905 # ensure the working copy parents are in the manifestfulltextcache
2905 # ensure the working copy parents are in the manifestfulltextcache
2906 for ctx in self[b'.'].parents():
2906 for ctx in self[b'.'].parents():
2907 ctx.manifest() # accessing the manifest is enough
2907 ctx.manifest() # accessing the manifest is enough
2908
2908
2909 if repository.CACHE_FILE_NODE_TAGS in caches:
2909 if repository.CACHE_FILE_NODE_TAGS in caches:
2910 # accessing fnode cache warms the cache
2910 # accessing fnode cache warms the cache
2911 tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
2911 tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
2912
2912
2913 if repository.CACHE_TAGS_DEFAULT in caches:
2913 if repository.CACHE_TAGS_DEFAULT in caches:
2914 # accessing tags warm the cache
2914 # accessing tags warm the cache
2915 self.tags()
2915 self.tags()
2916 if repository.CACHE_TAGS_SERVED in caches:
2916 if repository.CACHE_TAGS_SERVED in caches:
2917 self.filtered(b'served').tags()
2917 self.filtered(b'served').tags()
2918
2918
2919 if repository.CACHE_BRANCHMAP_ALL in caches:
2919 if repository.CACHE_BRANCHMAP_ALL in caches:
2920 # The CACHE_BRANCHMAP_ALL updates lazily-loaded caches immediately,
2920 # The CACHE_BRANCHMAP_ALL updates lazily-loaded caches immediately,
2921 # so we're forcing a write to cause these caches to be warmed up
2921 # so we're forcing a write to cause these caches to be warmed up
2922 # even if they haven't explicitly been requested yet (if they've
2922 # even if they haven't explicitly been requested yet (if they've
2923 # never been used by hg, they won't ever have been written, even if
2923 # never been used by hg, they won't ever have been written, even if
2924 # they're a subset of another kind of cache that *has* been used).
2924 # they're a subset of another kind of cache that *has* been used).
2925 for filt in repoview.filtertable.keys():
2925 for filt in repoview.filtertable.keys():
2926 filtered = self.filtered(filt)
2926 filtered = self.filtered(filt)
2927 filtered.branchmap().write(filtered)
2927 filtered.branchmap().write(filtered)
2928
2928
2929 def invalidatecaches(self):
2929 def invalidatecaches(self):
2930 if '_tagscache' in vars(self):
2930 if '_tagscache' in vars(self):
2931 # can't use delattr on proxy
2931 # can't use delattr on proxy
2932 del self.__dict__['_tagscache']
2932 del self.__dict__['_tagscache']
2933
2933
2934 self._branchcaches.clear()
2934 self._branchcaches.clear()
2935 self.invalidatevolatilesets()
2935 self.invalidatevolatilesets()
2936 self._sparsesignaturecache.clear()
2936 self._sparsesignaturecache.clear()
2937
2937
2938 def invalidatevolatilesets(self):
2938 def invalidatevolatilesets(self):
2939 self.filteredrevcache.clear()
2939 self.filteredrevcache.clear()
2940 obsolete.clearobscaches(self)
2940 obsolete.clearobscaches(self)
2941 self._quick_access_changeid_invalidate()
2941 self._quick_access_changeid_invalidate()
2942
2942
2943 def invalidatedirstate(self):
2943 def invalidatedirstate(self):
2944 """Invalidates the dirstate, causing the next call to dirstate
2944 """Invalidates the dirstate, causing the next call to dirstate
2945 to check if it was modified since the last time it was read,
2945 to check if it was modified since the last time it was read,
2946 rereading it if it has.
2946 rereading it if it has.
2947
2947
2948 This is different to dirstate.invalidate() that it doesn't always
2948 This is different to dirstate.invalidate() that it doesn't always
2949 rereads the dirstate. Use dirstate.invalidate() if you want to
2949 rereads the dirstate. Use dirstate.invalidate() if you want to
2950 explicitly read the dirstate again (i.e. restoring it to a previous
2950 explicitly read the dirstate again (i.e. restoring it to a previous
2951 known good state)."""
2951 known good state)."""
2952 unfi = self.unfiltered()
2952 unfi = self.unfiltered()
2953 if 'dirstate' in unfi.__dict__:
2953 if 'dirstate' in unfi.__dict__:
2954 assert not self.dirstate.is_changing_any
2954 del unfi.__dict__['dirstate']
2955 del unfi.__dict__['dirstate']
2955
2956
2956 def invalidate(self, clearfilecache=False):
2957 def invalidate(self, clearfilecache=False):
2957 """Invalidates both store and non-store parts other than dirstate
2958 """Invalidates both store and non-store parts other than dirstate
2958
2959
2959 If a transaction is running, invalidation of store is omitted,
2960 If a transaction is running, invalidation of store is omitted,
2960 because discarding in-memory changes might cause inconsistency
2961 because discarding in-memory changes might cause inconsistency
2961 (e.g. incomplete fncache causes unintentional failure, but
2962 (e.g. incomplete fncache causes unintentional failure, but
2962 redundant one doesn't).
2963 redundant one doesn't).
2963 """
2964 """
2964 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2965 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2965 for k in list(self._filecache.keys()):
2966 for k in list(self._filecache.keys()):
2966 if (
2967 if (
2967 k == b'changelog'
2968 k == b'changelog'
2968 and self.currenttransaction()
2969 and self.currenttransaction()
2969 and self.changelog._delayed
2970 and self.changelog._delayed
2970 ):
2971 ):
2971 # The changelog object may store unwritten revisions. We don't
2972 # The changelog object may store unwritten revisions. We don't
2972 # want to lose them.
2973 # want to lose them.
2973 # TODO: Solve the problem instead of working around it.
2974 # TODO: Solve the problem instead of working around it.
2974 continue
2975 continue
2975
2976
2976 if clearfilecache:
2977 if clearfilecache:
2977 del self._filecache[k]
2978 del self._filecache[k]
2978 try:
2979 try:
2979 delattr(unfiltered, k)
2980 delattr(unfiltered, k)
2980 except AttributeError:
2981 except AttributeError:
2981 pass
2982 pass
2982 self.invalidatecaches()
2983 self.invalidatecaches()
2983 if not self.currenttransaction():
2984 if not self.currenttransaction():
2984 # TODO: Changing contents of store outside transaction
2985 # TODO: Changing contents of store outside transaction
2985 # causes inconsistency. We should make in-memory store
2986 # causes inconsistency. We should make in-memory store
2986 # changes detectable, and abort if changed.
2987 # changes detectable, and abort if changed.
2987 self.store.invalidatecaches()
2988 self.store.invalidatecaches()
2988
2989
2989 def invalidateall(self):
2990 def invalidateall(self):
2990 """Fully invalidates both store and non-store parts, causing the
2991 """Fully invalidates both store and non-store parts, causing the
2991 subsequent operation to reread any outside changes."""
2992 subsequent operation to reread any outside changes."""
2992 # extension should hook this to invalidate its caches
2993 # extension should hook this to invalidate its caches
2993 self.invalidate()
2994 self.invalidate()
2994 self.invalidatedirstate()
2995 self.invalidatedirstate()
2995
2996
2996 @unfilteredmethod
2997 @unfilteredmethod
2997 def _refreshfilecachestats(self, tr):
2998 def _refreshfilecachestats(self, tr):
2998 """Reload stats of cached files so that they are flagged as valid"""
2999 """Reload stats of cached files so that they are flagged as valid"""
2999 for k, ce in self._filecache.items():
3000 for k, ce in self._filecache.items():
3000 k = pycompat.sysstr(k)
3001 k = pycompat.sysstr(k)
3001 if k == 'dirstate' or k not in self.__dict__:
3002 if k == 'dirstate' or k not in self.__dict__:
3002 continue
3003 continue
3003 ce.refresh()
3004 ce.refresh()
3004
3005
3005 def _lock(
3006 def _lock(
3006 self,
3007 self,
3007 vfs,
3008 vfs,
3008 lockname,
3009 lockname,
3009 wait,
3010 wait,
3010 releasefn,
3011 releasefn,
3011 acquirefn,
3012 acquirefn,
3012 desc,
3013 desc,
3013 ):
3014 ):
3014 timeout = 0
3015 timeout = 0
3015 warntimeout = 0
3016 warntimeout = 0
3016 if wait:
3017 if wait:
3017 timeout = self.ui.configint(b"ui", b"timeout")
3018 timeout = self.ui.configint(b"ui", b"timeout")
3018 warntimeout = self.ui.configint(b"ui", b"timeout.warn")
3019 warntimeout = self.ui.configint(b"ui", b"timeout.warn")
3019 # internal config: ui.signal-safe-lock
3020 # internal config: ui.signal-safe-lock
3020 signalsafe = self.ui.configbool(b'ui', b'signal-safe-lock')
3021 signalsafe = self.ui.configbool(b'ui', b'signal-safe-lock')
3021
3022
3022 l = lockmod.trylock(
3023 l = lockmod.trylock(
3023 self.ui,
3024 self.ui,
3024 vfs,
3025 vfs,
3025 lockname,
3026 lockname,
3026 timeout,
3027 timeout,
3027 warntimeout,
3028 warntimeout,
3028 releasefn=releasefn,
3029 releasefn=releasefn,
3029 acquirefn=acquirefn,
3030 acquirefn=acquirefn,
3030 desc=desc,
3031 desc=desc,
3031 signalsafe=signalsafe,
3032 signalsafe=signalsafe,
3032 )
3033 )
3033 return l
3034 return l
3034
3035
3035 def _afterlock(self, callback):
3036 def _afterlock(self, callback):
3036 """add a callback to be run when the repository is fully unlocked
3037 """add a callback to be run when the repository is fully unlocked
3037
3038
3038 The callback will be executed when the outermost lock is released
3039 The callback will be executed when the outermost lock is released
3039 (with wlock being higher level than 'lock')."""
3040 (with wlock being higher level than 'lock')."""
3040 for ref in (self._wlockref, self._lockref):
3041 for ref in (self._wlockref, self._lockref):
3041 l = ref and ref()
3042 l = ref and ref()
3042 if l and l.held:
3043 if l and l.held:
3043 l.postrelease.append(callback)
3044 l.postrelease.append(callback)
3044 break
3045 break
3045 else: # no lock have been found.
3046 else: # no lock have been found.
3046 callback(True)
3047 callback(True)
3047
3048
3048 def lock(self, wait=True):
3049 def lock(self, wait=True):
3049 """Lock the repository store (.hg/store) and return a weak reference
3050 """Lock the repository store (.hg/store) and return a weak reference
3050 to the lock. Use this before modifying the store (e.g. committing or
3051 to the lock. Use this before modifying the store (e.g. committing or
3051 stripping). If you are opening a transaction, get a lock as well.)
3052 stripping). If you are opening a transaction, get a lock as well.)
3052
3053
3053 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3054 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3054 'wlock' first to avoid a dead-lock hazard."""
3055 'wlock' first to avoid a dead-lock hazard."""
3055 l = self._currentlock(self._lockref)
3056 l = self._currentlock(self._lockref)
3056 if l is not None:
3057 if l is not None:
3057 l.lock()
3058 l.lock()
3058 return l
3059 return l
3059
3060
3060 l = self._lock(
3061 l = self._lock(
3061 vfs=self.svfs,
3062 vfs=self.svfs,
3062 lockname=b"lock",
3063 lockname=b"lock",
3063 wait=wait,
3064 wait=wait,
3064 releasefn=None,
3065 releasefn=None,
3065 acquirefn=self.invalidate,
3066 acquirefn=self.invalidate,
3066 desc=_(b'repository %s') % self.origroot,
3067 desc=_(b'repository %s') % self.origroot,
3067 )
3068 )
3068 self._lockref = weakref.ref(l)
3069 self._lockref = weakref.ref(l)
3069 return l
3070 return l
3070
3071
3071 def wlock(self, wait=True):
3072 def wlock(self, wait=True):
3072 """Lock the non-store parts of the repository (everything under
3073 """Lock the non-store parts of the repository (everything under
3073 .hg except .hg/store) and return a weak reference to the lock.
3074 .hg except .hg/store) and return a weak reference to the lock.
3074
3075
3075 Use this before modifying files in .hg.
3076 Use this before modifying files in .hg.
3076
3077
3077 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3078 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
3078 'wlock' first to avoid a dead-lock hazard."""
3079 'wlock' first to avoid a dead-lock hazard."""
3079 l = self._wlockref() if self._wlockref else None
3080 l = self._wlockref() if self._wlockref else None
3080 if l is not None and l.held:
3081 if l is not None and l.held:
3081 l.lock()
3082 l.lock()
3082 return l
3083 return l
3083
3084
3084 # We do not need to check for non-waiting lock acquisition. Such
3085 # We do not need to check for non-waiting lock acquisition. Such
3085 # acquisition would not cause dead-lock as they would just fail.
3086 # acquisition would not cause dead-lock as they would just fail.
3086 if wait and (
3087 if wait and (
3087 self.ui.configbool(b'devel', b'all-warnings')
3088 self.ui.configbool(b'devel', b'all-warnings')
3088 or self.ui.configbool(b'devel', b'check-locks')
3089 or self.ui.configbool(b'devel', b'check-locks')
3089 ):
3090 ):
3090 if self._currentlock(self._lockref) is not None:
3091 if self._currentlock(self._lockref) is not None:
3091 self.ui.develwarn(b'"wlock" acquired after "lock"')
3092 self.ui.develwarn(b'"wlock" acquired after "lock"')
3092
3093
3093 def unlock():
3094 def unlock():
3094 if self.dirstate.is_changing_any:
3095 if self.dirstate.is_changing_any:
3095 msg = b"wlock release in the middle of a changing parents"
3096 msg = b"wlock release in the middle of a changing parents"
3096 self.ui.develwarn(msg)
3097 self.ui.develwarn(msg)
3097 self.dirstate.invalidate()
3098 self.dirstate.invalidate()
3098 else:
3099 else:
3099 if self.dirstate._dirty:
3100 if self.dirstate._dirty:
3100 msg = b"dirty dirstate on wlock release"
3101 msg = b"dirty dirstate on wlock release"
3101 self.ui.develwarn(msg)
3102 self.ui.develwarn(msg)
3102 self.dirstate.write(None)
3103 self.dirstate.write(None)
3103
3104
3104 unfi = self.unfiltered()
3105 unfi = self.unfiltered()
3105 if 'dirstate' in unfi.__dict__:
3106 if 'dirstate' in unfi.__dict__:
3106 del unfi.__dict__['dirstate']
3107 del unfi.__dict__['dirstate']
3107
3108
3108 l = self._lock(
3109 l = self._lock(
3109 self.vfs,
3110 self.vfs,
3110 b"wlock",
3111 b"wlock",
3111 wait,
3112 wait,
3112 unlock,
3113 unlock,
3113 self.invalidatedirstate,
3114 self.invalidatedirstate,
3114 _(b'working directory of %s') % self.origroot,
3115 _(b'working directory of %s') % self.origroot,
3115 )
3116 )
3116 self._wlockref = weakref.ref(l)
3117 self._wlockref = weakref.ref(l)
3117 return l
3118 return l
3118
3119
3119 def _currentlock(self, lockref):
3120 def _currentlock(self, lockref):
3120 """Returns the lock if it's held, or None if it's not."""
3121 """Returns the lock if it's held, or None if it's not."""
3121 if lockref is None:
3122 if lockref is None:
3122 return None
3123 return None
3123 l = lockref()
3124 l = lockref()
3124 if l is None or not l.held:
3125 if l is None or not l.held:
3125 return None
3126 return None
3126 return l
3127 return l
3127
3128
3128 def currentwlock(self):
3129 def currentwlock(self):
3129 """Returns the wlock if it's held, or None if it's not."""
3130 """Returns the wlock if it's held, or None if it's not."""
3130 return self._currentlock(self._wlockref)
3131 return self._currentlock(self._wlockref)
3131
3132
3132 def checkcommitpatterns(self, wctx, match, status, fail):
3133 def checkcommitpatterns(self, wctx, match, status, fail):
3133 """check for commit arguments that aren't committable"""
3134 """check for commit arguments that aren't committable"""
3134 if match.isexact() or match.prefix():
3135 if match.isexact() or match.prefix():
3135 matched = set(status.modified + status.added + status.removed)
3136 matched = set(status.modified + status.added + status.removed)
3136
3137
3137 for f in match.files():
3138 for f in match.files():
3138 f = self.dirstate.normalize(f)
3139 f = self.dirstate.normalize(f)
3139 if f == b'.' or f in matched or f in wctx.substate:
3140 if f == b'.' or f in matched or f in wctx.substate:
3140 continue
3141 continue
3141 if f in status.deleted:
3142 if f in status.deleted:
3142 fail(f, _(b'file not found!'))
3143 fail(f, _(b'file not found!'))
3143 # Is it a directory that exists or used to exist?
3144 # Is it a directory that exists or used to exist?
3144 if self.wvfs.isdir(f) or wctx.p1().hasdir(f):
3145 if self.wvfs.isdir(f) or wctx.p1().hasdir(f):
3145 d = f + b'/'
3146 d = f + b'/'
3146 for mf in matched:
3147 for mf in matched:
3147 if mf.startswith(d):
3148 if mf.startswith(d):
3148 break
3149 break
3149 else:
3150 else:
3150 fail(f, _(b"no match under directory!"))
3151 fail(f, _(b"no match under directory!"))
3151 elif f not in self.dirstate:
3152 elif f not in self.dirstate:
3152 fail(f, _(b"file not tracked!"))
3153 fail(f, _(b"file not tracked!"))
3153
3154
3154 @unfilteredmethod
3155 @unfilteredmethod
3155 def commit(
3156 def commit(
3156 self,
3157 self,
3157 text=b"",
3158 text=b"",
3158 user=None,
3159 user=None,
3159 date=None,
3160 date=None,
3160 match=None,
3161 match=None,
3161 force=False,
3162 force=False,
3162 editor=None,
3163 editor=None,
3163 extra=None,
3164 extra=None,
3164 ):
3165 ):
3165 """Add a new revision to current repository.
3166 """Add a new revision to current repository.
3166
3167
3167 Revision information is gathered from the working directory,
3168 Revision information is gathered from the working directory,
3168 match can be used to filter the committed files. If editor is
3169 match can be used to filter the committed files. If editor is
3169 supplied, it is called to get a commit message.
3170 supplied, it is called to get a commit message.
3170 """
3171 """
3171 if extra is None:
3172 if extra is None:
3172 extra = {}
3173 extra = {}
3173
3174
3174 def fail(f, msg):
3175 def fail(f, msg):
3175 raise error.InputError(b'%s: %s' % (f, msg))
3176 raise error.InputError(b'%s: %s' % (f, msg))
3176
3177
3177 if not match:
3178 if not match:
3178 match = matchmod.always()
3179 match = matchmod.always()
3179
3180
3180 if not force:
3181 if not force:
3181 match.bad = fail
3182 match.bad = fail
3182
3183
3183 # lock() for recent changelog (see issue4368)
3184 # lock() for recent changelog (see issue4368)
3184 with self.wlock(), self.lock():
3185 with self.wlock(), self.lock():
3185 wctx = self[None]
3186 wctx = self[None]
3186 merge = len(wctx.parents()) > 1
3187 merge = len(wctx.parents()) > 1
3187
3188
3188 if not force and merge and not match.always():
3189 if not force and merge and not match.always():
3189 raise error.Abort(
3190 raise error.Abort(
3190 _(
3191 _(
3191 b'cannot partially commit a merge '
3192 b'cannot partially commit a merge '
3192 b'(do not specify files or patterns)'
3193 b'(do not specify files or patterns)'
3193 )
3194 )
3194 )
3195 )
3195
3196
3196 status = self.status(match=match, clean=force)
3197 status = self.status(match=match, clean=force)
3197 if force:
3198 if force:
3198 status.modified.extend(
3199 status.modified.extend(
3199 status.clean
3200 status.clean
3200 ) # mq may commit clean files
3201 ) # mq may commit clean files
3201
3202
3202 # check subrepos
3203 # check subrepos
3203 subs, commitsubs, newstate = subrepoutil.precommit(
3204 subs, commitsubs, newstate = subrepoutil.precommit(
3204 self.ui, wctx, status, match, force=force
3205 self.ui, wctx, status, match, force=force
3205 )
3206 )
3206
3207
3207 # make sure all explicit patterns are matched
3208 # make sure all explicit patterns are matched
3208 if not force:
3209 if not force:
3209 self.checkcommitpatterns(wctx, match, status, fail)
3210 self.checkcommitpatterns(wctx, match, status, fail)
3210
3211
3211 cctx = context.workingcommitctx(
3212 cctx = context.workingcommitctx(
3212 self, status, text, user, date, extra
3213 self, status, text, user, date, extra
3213 )
3214 )
3214
3215
3215 ms = mergestatemod.mergestate.read(self)
3216 ms = mergestatemod.mergestate.read(self)
3216 mergeutil.checkunresolved(ms)
3217 mergeutil.checkunresolved(ms)
3217
3218
3218 # internal config: ui.allowemptycommit
3219 # internal config: ui.allowemptycommit
3219 if cctx.isempty() and not self.ui.configbool(
3220 if cctx.isempty() and not self.ui.configbool(
3220 b'ui', b'allowemptycommit'
3221 b'ui', b'allowemptycommit'
3221 ):
3222 ):
3222 self.ui.debug(b'nothing to commit, clearing merge state\n')
3223 self.ui.debug(b'nothing to commit, clearing merge state\n')
3223 ms.reset()
3224 ms.reset()
3224 return None
3225 return None
3225
3226
3226 if merge and cctx.deleted():
3227 if merge and cctx.deleted():
3227 raise error.Abort(_(b"cannot commit merge with missing files"))
3228 raise error.Abort(_(b"cannot commit merge with missing files"))
3228
3229
3229 if editor:
3230 if editor:
3230 cctx._text = editor(self, cctx, subs)
3231 cctx._text = editor(self, cctx, subs)
3231 edited = text != cctx._text
3232 edited = text != cctx._text
3232
3233
3233 # Save commit message in case this transaction gets rolled back
3234 # Save commit message in case this transaction gets rolled back
3234 # (e.g. by a pretxncommit hook). Leave the content alone on
3235 # (e.g. by a pretxncommit hook). Leave the content alone on
3235 # the assumption that the user will use the same editor again.
3236 # the assumption that the user will use the same editor again.
3236 msg_path = self.savecommitmessage(cctx._text)
3237 msg_path = self.savecommitmessage(cctx._text)
3237
3238
3238 # commit subs and write new state
3239 # commit subs and write new state
3239 if subs:
3240 if subs:
3240 uipathfn = scmutil.getuipathfn(self)
3241 uipathfn = scmutil.getuipathfn(self)
3241 for s in sorted(commitsubs):
3242 for s in sorted(commitsubs):
3242 sub = wctx.sub(s)
3243 sub = wctx.sub(s)
3243 self.ui.status(
3244 self.ui.status(
3244 _(b'committing subrepository %s\n')
3245 _(b'committing subrepository %s\n')
3245 % uipathfn(subrepoutil.subrelpath(sub))
3246 % uipathfn(subrepoutil.subrelpath(sub))
3246 )
3247 )
3247 sr = sub.commit(cctx._text, user, date)
3248 sr = sub.commit(cctx._text, user, date)
3248 newstate[s] = (newstate[s][0], sr)
3249 newstate[s] = (newstate[s][0], sr)
3249 subrepoutil.writestate(self, newstate)
3250 subrepoutil.writestate(self, newstate)
3250
3251
3251 p1, p2 = self.dirstate.parents()
3252 p1, p2 = self.dirstate.parents()
3252 hookp1, hookp2 = hex(p1), (p2 != self.nullid and hex(p2) or b'')
3253 hookp1, hookp2 = hex(p1), (p2 != self.nullid and hex(p2) or b'')
3253 try:
3254 try:
3254 self.hook(
3255 self.hook(
3255 b"precommit", throw=True, parent1=hookp1, parent2=hookp2
3256 b"precommit", throw=True, parent1=hookp1, parent2=hookp2
3256 )
3257 )
3257 with self.transaction(b'commit'):
3258 with self.transaction(b'commit'):
3258 ret = self.commitctx(cctx, True)
3259 ret = self.commitctx(cctx, True)
3259 # update bookmarks, dirstate and mergestate
3260 # update bookmarks, dirstate and mergestate
3260 bookmarks.update(self, [p1, p2], ret)
3261 bookmarks.update(self, [p1, p2], ret)
3261 cctx.markcommitted(ret)
3262 cctx.markcommitted(ret)
3262 ms.reset()
3263 ms.reset()
3263 except: # re-raises
3264 except: # re-raises
3264 if edited:
3265 if edited:
3265 self.ui.write(
3266 self.ui.write(
3266 _(b'note: commit message saved in %s\n') % msg_path
3267 _(b'note: commit message saved in %s\n') % msg_path
3267 )
3268 )
3268 self.ui.write(
3269 self.ui.write(
3269 _(
3270 _(
3270 b"note: use 'hg commit --logfile "
3271 b"note: use 'hg commit --logfile "
3271 b"%s --edit' to reuse it\n"
3272 b"%s --edit' to reuse it\n"
3272 )
3273 )
3273 % msg_path
3274 % msg_path
3274 )
3275 )
3275 raise
3276 raise
3276
3277
3277 def commithook(unused_success):
3278 def commithook(unused_success):
3278 # hack for command that use a temporary commit (eg: histedit)
3279 # hack for command that use a temporary commit (eg: histedit)
3279 # temporary commit got stripped before hook release
3280 # temporary commit got stripped before hook release
3280 if self.changelog.hasnode(ret):
3281 if self.changelog.hasnode(ret):
3281 self.hook(
3282 self.hook(
3282 b"commit", node=hex(ret), parent1=hookp1, parent2=hookp2
3283 b"commit", node=hex(ret), parent1=hookp1, parent2=hookp2
3283 )
3284 )
3284
3285
3285 self._afterlock(commithook)
3286 self._afterlock(commithook)
3286 return ret
3287 return ret
3287
3288
3288 @unfilteredmethod
3289 @unfilteredmethod
3289 def commitctx(self, ctx, error=False, origctx=None):
3290 def commitctx(self, ctx, error=False, origctx=None):
3290 return commit.commitctx(self, ctx, error=error, origctx=origctx)
3291 return commit.commitctx(self, ctx, error=error, origctx=origctx)
3291
3292
3292 @unfilteredmethod
3293 @unfilteredmethod
3293 def destroying(self):
3294 def destroying(self):
3294 """Inform the repository that nodes are about to be destroyed.
3295 """Inform the repository that nodes are about to be destroyed.
3295 Intended for use by strip and rollback, so there's a common
3296 Intended for use by strip and rollback, so there's a common
3296 place for anything that has to be done before destroying history.
3297 place for anything that has to be done before destroying history.
3297
3298
3298 This is mostly useful for saving state that is in memory and waiting
3299 This is mostly useful for saving state that is in memory and waiting
3299 to be flushed when the current lock is released. Because a call to
3300 to be flushed when the current lock is released. Because a call to
3300 destroyed is imminent, the repo will be invalidated causing those
3301 destroyed is imminent, the repo will be invalidated causing those
3301 changes to stay in memory (waiting for the next unlock), or vanish
3302 changes to stay in memory (waiting for the next unlock), or vanish
3302 completely.
3303 completely.
3303 """
3304 """
3304 # When using the same lock to commit and strip, the phasecache is left
3305 # When using the same lock to commit and strip, the phasecache is left
3305 # dirty after committing. Then when we strip, the repo is invalidated,
3306 # dirty after committing. Then when we strip, the repo is invalidated,
3306 # causing those changes to disappear.
3307 # causing those changes to disappear.
3307 if '_phasecache' in vars(self):
3308 if '_phasecache' in vars(self):
3308 self._phasecache.write()
3309 self._phasecache.write()
3309
3310
3310 @unfilteredmethod
3311 @unfilteredmethod
3311 def destroyed(self):
3312 def destroyed(self):
3312 """Inform the repository that nodes have been destroyed.
3313 """Inform the repository that nodes have been destroyed.
3313 Intended for use by strip and rollback, so there's a common
3314 Intended for use by strip and rollback, so there's a common
3314 place for anything that has to be done after destroying history.
3315 place for anything that has to be done after destroying history.
3315 """
3316 """
3316 # When one tries to:
3317 # When one tries to:
3317 # 1) destroy nodes thus calling this method (e.g. strip)
3318 # 1) destroy nodes thus calling this method (e.g. strip)
3318 # 2) use phasecache somewhere (e.g. commit)
3319 # 2) use phasecache somewhere (e.g. commit)
3319 #
3320 #
3320 # then 2) will fail because the phasecache contains nodes that were
3321 # then 2) will fail because the phasecache contains nodes that were
3321 # removed. We can either remove phasecache from the filecache,
3322 # removed. We can either remove phasecache from the filecache,
3322 # causing it to reload next time it is accessed, or simply filter
3323 # causing it to reload next time it is accessed, or simply filter
3323 # the removed nodes now and write the updated cache.
3324 # the removed nodes now and write the updated cache.
3324 self._phasecache.filterunknown(self)
3325 self._phasecache.filterunknown(self)
3325 self._phasecache.write()
3326 self._phasecache.write()
3326
3327
3327 # refresh all repository caches
3328 # refresh all repository caches
3328 self.updatecaches()
3329 self.updatecaches()
3329
3330
3330 # Ensure the persistent tag cache is updated. Doing it now
3331 # Ensure the persistent tag cache is updated. Doing it now
3331 # means that the tag cache only has to worry about destroyed
3332 # means that the tag cache only has to worry about destroyed
3332 # heads immediately after a strip/rollback. That in turn
3333 # heads immediately after a strip/rollback. That in turn
3333 # guarantees that "cachetip == currenttip" (comparing both rev
3334 # guarantees that "cachetip == currenttip" (comparing both rev
3334 # and node) always means no nodes have been added or destroyed.
3335 # and node) always means no nodes have been added or destroyed.
3335
3336
3336 # XXX this is suboptimal when qrefresh'ing: we strip the current
3337 # XXX this is suboptimal when qrefresh'ing: we strip the current
3337 # head, refresh the tag cache, then immediately add a new head.
3338 # head, refresh the tag cache, then immediately add a new head.
3338 # But I think doing it this way is necessary for the "instant
3339 # But I think doing it this way is necessary for the "instant
3339 # tag cache retrieval" case to work.
3340 # tag cache retrieval" case to work.
3340 self.invalidate()
3341 self.invalidate()
3341
3342
3342 def status(
3343 def status(
3343 self,
3344 self,
3344 node1=b'.',
3345 node1=b'.',
3345 node2=None,
3346 node2=None,
3346 match=None,
3347 match=None,
3347 ignored=False,
3348 ignored=False,
3348 clean=False,
3349 clean=False,
3349 unknown=False,
3350 unknown=False,
3350 listsubrepos=False,
3351 listsubrepos=False,
3351 ):
3352 ):
3352 '''a convenience method that calls node1.status(node2)'''
3353 '''a convenience method that calls node1.status(node2)'''
3353 return self[node1].status(
3354 return self[node1].status(
3354 node2, match, ignored, clean, unknown, listsubrepos
3355 node2, match, ignored, clean, unknown, listsubrepos
3355 )
3356 )
3356
3357
3357 def addpostdsstatus(self, ps):
3358 def addpostdsstatus(self, ps):
3358 """Add a callback to run within the wlock, at the point at which status
3359 """Add a callback to run within the wlock, at the point at which status
3359 fixups happen.
3360 fixups happen.
3360
3361
3361 On status completion, callback(wctx, status) will be called with the
3362 On status completion, callback(wctx, status) will be called with the
3362 wlock held, unless the dirstate has changed from underneath or the wlock
3363 wlock held, unless the dirstate has changed from underneath or the wlock
3363 couldn't be grabbed.
3364 couldn't be grabbed.
3364
3365
3365 Callbacks should not capture and use a cached copy of the dirstate --
3366 Callbacks should not capture and use a cached copy of the dirstate --
3366 it might change in the meanwhile. Instead, they should access the
3367 it might change in the meanwhile. Instead, they should access the
3367 dirstate via wctx.repo().dirstate.
3368 dirstate via wctx.repo().dirstate.
3368
3369
3369 This list is emptied out after each status run -- extensions should
3370 This list is emptied out after each status run -- extensions should
3370 make sure it adds to this list each time dirstate.status is called.
3371 make sure it adds to this list each time dirstate.status is called.
3371 Extensions should also make sure they don't call this for statuses
3372 Extensions should also make sure they don't call this for statuses
3372 that don't involve the dirstate.
3373 that don't involve the dirstate.
3373 """
3374 """
3374
3375
3375 # The list is located here for uniqueness reasons -- it is actually
3376 # The list is located here for uniqueness reasons -- it is actually
3376 # managed by the workingctx, but that isn't unique per-repo.
3377 # managed by the workingctx, but that isn't unique per-repo.
3377 self._postdsstatus.append(ps)
3378 self._postdsstatus.append(ps)
3378
3379
3379 def postdsstatus(self):
3380 def postdsstatus(self):
3380 """Used by workingctx to get the list of post-dirstate-status hooks."""
3381 """Used by workingctx to get the list of post-dirstate-status hooks."""
3381 return self._postdsstatus
3382 return self._postdsstatus
3382
3383
3383 def clearpostdsstatus(self):
3384 def clearpostdsstatus(self):
3384 """Used by workingctx to clear post-dirstate-status hooks."""
3385 """Used by workingctx to clear post-dirstate-status hooks."""
3385 del self._postdsstatus[:]
3386 del self._postdsstatus[:]
3386
3387
3387 def heads(self, start=None):
3388 def heads(self, start=None):
3388 if start is None:
3389 if start is None:
3389 cl = self.changelog
3390 cl = self.changelog
3390 headrevs = reversed(cl.headrevs())
3391 headrevs = reversed(cl.headrevs())
3391 return [cl.node(rev) for rev in headrevs]
3392 return [cl.node(rev) for rev in headrevs]
3392
3393
3393 heads = self.changelog.heads(start)
3394 heads = self.changelog.heads(start)
3394 # sort the output in rev descending order
3395 # sort the output in rev descending order
3395 return sorted(heads, key=self.changelog.rev, reverse=True)
3396 return sorted(heads, key=self.changelog.rev, reverse=True)
3396
3397
3397 def branchheads(self, branch=None, start=None, closed=False):
3398 def branchheads(self, branch=None, start=None, closed=False):
3398 """return a (possibly filtered) list of heads for the given branch
3399 """return a (possibly filtered) list of heads for the given branch
3399
3400
3400 Heads are returned in topological order, from newest to oldest.
3401 Heads are returned in topological order, from newest to oldest.
3401 If branch is None, use the dirstate branch.
3402 If branch is None, use the dirstate branch.
3402 If start is not None, return only heads reachable from start.
3403 If start is not None, return only heads reachable from start.
3403 If closed is True, return heads that are marked as closed as well.
3404 If closed is True, return heads that are marked as closed as well.
3404 """
3405 """
3405 if branch is None:
3406 if branch is None:
3406 branch = self[None].branch()
3407 branch = self[None].branch()
3407 branches = self.branchmap()
3408 branches = self.branchmap()
3408 if not branches.hasbranch(branch):
3409 if not branches.hasbranch(branch):
3409 return []
3410 return []
3410 # the cache returns heads ordered lowest to highest
3411 # the cache returns heads ordered lowest to highest
3411 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
3412 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
3412 if start is not None:
3413 if start is not None:
3413 # filter out the heads that cannot be reached from startrev
3414 # filter out the heads that cannot be reached from startrev
3414 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
3415 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
3415 bheads = [h for h in bheads if h in fbheads]
3416 bheads = [h for h in bheads if h in fbheads]
3416 return bheads
3417 return bheads
3417
3418
3418 def branches(self, nodes):
3419 def branches(self, nodes):
3419 if not nodes:
3420 if not nodes:
3420 nodes = [self.changelog.tip()]
3421 nodes = [self.changelog.tip()]
3421 b = []
3422 b = []
3422 for n in nodes:
3423 for n in nodes:
3423 t = n
3424 t = n
3424 while True:
3425 while True:
3425 p = self.changelog.parents(n)
3426 p = self.changelog.parents(n)
3426 if p[1] != self.nullid or p[0] == self.nullid:
3427 if p[1] != self.nullid or p[0] == self.nullid:
3427 b.append((t, n, p[0], p[1]))
3428 b.append((t, n, p[0], p[1]))
3428 break
3429 break
3429 n = p[0]
3430 n = p[0]
3430 return b
3431 return b
3431
3432
3432 def between(self, pairs):
3433 def between(self, pairs):
3433 r = []
3434 r = []
3434
3435
3435 for top, bottom in pairs:
3436 for top, bottom in pairs:
3436 n, l, i = top, [], 0
3437 n, l, i = top, [], 0
3437 f = 1
3438 f = 1
3438
3439
3439 while n != bottom and n != self.nullid:
3440 while n != bottom and n != self.nullid:
3440 p = self.changelog.parents(n)[0]
3441 p = self.changelog.parents(n)[0]
3441 if i == f:
3442 if i == f:
3442 l.append(n)
3443 l.append(n)
3443 f = f * 2
3444 f = f * 2
3444 n = p
3445 n = p
3445 i += 1
3446 i += 1
3446
3447
3447 r.append(l)
3448 r.append(l)
3448
3449
3449 return r
3450 return r
3450
3451
3451 def checkpush(self, pushop):
3452 def checkpush(self, pushop):
3452 """Extensions can override this function if additional checks have
3453 """Extensions can override this function if additional checks have
3453 to be performed before pushing, or call it if they override push
3454 to be performed before pushing, or call it if they override push
3454 command.
3455 command.
3455 """
3456 """
3456
3457
3457 @unfilteredpropertycache
3458 @unfilteredpropertycache
3458 def prepushoutgoinghooks(self):
3459 def prepushoutgoinghooks(self):
3459 """Return util.hooks consists of a pushop with repo, remote, outgoing
3460 """Return util.hooks consists of a pushop with repo, remote, outgoing
3460 methods, which are called before pushing changesets.
3461 methods, which are called before pushing changesets.
3461 """
3462 """
3462 return util.hooks()
3463 return util.hooks()
3463
3464
3464 def pushkey(self, namespace, key, old, new):
3465 def pushkey(self, namespace, key, old, new):
3465 try:
3466 try:
3466 tr = self.currenttransaction()
3467 tr = self.currenttransaction()
3467 hookargs = {}
3468 hookargs = {}
3468 if tr is not None:
3469 if tr is not None:
3469 hookargs.update(tr.hookargs)
3470 hookargs.update(tr.hookargs)
3470 hookargs = pycompat.strkwargs(hookargs)
3471 hookargs = pycompat.strkwargs(hookargs)
3471 hookargs['namespace'] = namespace
3472 hookargs['namespace'] = namespace
3472 hookargs['key'] = key
3473 hookargs['key'] = key
3473 hookargs['old'] = old
3474 hookargs['old'] = old
3474 hookargs['new'] = new
3475 hookargs['new'] = new
3475 self.hook(b'prepushkey', throw=True, **hookargs)
3476 self.hook(b'prepushkey', throw=True, **hookargs)
3476 except error.HookAbort as exc:
3477 except error.HookAbort as exc:
3477 self.ui.write_err(_(b"pushkey-abort: %s\n") % exc)
3478 self.ui.write_err(_(b"pushkey-abort: %s\n") % exc)
3478 if exc.hint:
3479 if exc.hint:
3479 self.ui.write_err(_(b"(%s)\n") % exc.hint)
3480 self.ui.write_err(_(b"(%s)\n") % exc.hint)
3480 return False
3481 return False
3481 self.ui.debug(b'pushing key for "%s:%s"\n' % (namespace, key))
3482 self.ui.debug(b'pushing key for "%s:%s"\n' % (namespace, key))
3482 ret = pushkey.push(self, namespace, key, old, new)
3483 ret = pushkey.push(self, namespace, key, old, new)
3483
3484
3484 def runhook(unused_success):
3485 def runhook(unused_success):
3485 self.hook(
3486 self.hook(
3486 b'pushkey',
3487 b'pushkey',
3487 namespace=namespace,
3488 namespace=namespace,
3488 key=key,
3489 key=key,
3489 old=old,
3490 old=old,
3490 new=new,
3491 new=new,
3491 ret=ret,
3492 ret=ret,
3492 )
3493 )
3493
3494
3494 self._afterlock(runhook)
3495 self._afterlock(runhook)
3495 return ret
3496 return ret
3496
3497
3497 def listkeys(self, namespace):
3498 def listkeys(self, namespace):
3498 self.hook(b'prelistkeys', throw=True, namespace=namespace)
3499 self.hook(b'prelistkeys', throw=True, namespace=namespace)
3499 self.ui.debug(b'listing keys for "%s"\n' % namespace)
3500 self.ui.debug(b'listing keys for "%s"\n' % namespace)
3500 values = pushkey.list(self, namespace)
3501 values = pushkey.list(self, namespace)
3501 self.hook(b'listkeys', namespace=namespace, values=values)
3502 self.hook(b'listkeys', namespace=namespace, values=values)
3502 return values
3503 return values
3503
3504
3504 def debugwireargs(self, one, two, three=None, four=None, five=None):
3505 def debugwireargs(self, one, two, three=None, four=None, five=None):
3505 '''used to test argument passing over the wire'''
3506 '''used to test argument passing over the wire'''
3506 return b"%s %s %s %s %s" % (
3507 return b"%s %s %s %s %s" % (
3507 one,
3508 one,
3508 two,
3509 two,
3509 pycompat.bytestr(three),
3510 pycompat.bytestr(three),
3510 pycompat.bytestr(four),
3511 pycompat.bytestr(four),
3511 pycompat.bytestr(five),
3512 pycompat.bytestr(five),
3512 )
3513 )
3513
3514
3514 def savecommitmessage(self, text):
3515 def savecommitmessage(self, text):
3515 fp = self.vfs(b'last-message.txt', b'wb')
3516 fp = self.vfs(b'last-message.txt', b'wb')
3516 try:
3517 try:
3517 fp.write(text)
3518 fp.write(text)
3518 finally:
3519 finally:
3519 fp.close()
3520 fp.close()
3520 return self.pathto(fp.name[len(self.root) + 1 :])
3521 return self.pathto(fp.name[len(self.root) + 1 :])
3521
3522
3522 def register_wanted_sidedata(self, category):
3523 def register_wanted_sidedata(self, category):
3523 if repository.REPO_FEATURE_SIDE_DATA not in self.features:
3524 if repository.REPO_FEATURE_SIDE_DATA not in self.features:
3524 # Only revlogv2 repos can want sidedata.
3525 # Only revlogv2 repos can want sidedata.
3525 return
3526 return
3526 self._wanted_sidedata.add(pycompat.bytestr(category))
3527 self._wanted_sidedata.add(pycompat.bytestr(category))
3527
3528
3528 def register_sidedata_computer(
3529 def register_sidedata_computer(
3529 self, kind, category, keys, computer, flags, replace=False
3530 self, kind, category, keys, computer, flags, replace=False
3530 ):
3531 ):
3531 if kind not in revlogconst.ALL_KINDS:
3532 if kind not in revlogconst.ALL_KINDS:
3532 msg = _(b"unexpected revlog kind '%s'.")
3533 msg = _(b"unexpected revlog kind '%s'.")
3533 raise error.ProgrammingError(msg % kind)
3534 raise error.ProgrammingError(msg % kind)
3534 category = pycompat.bytestr(category)
3535 category = pycompat.bytestr(category)
3535 already_registered = category in self._sidedata_computers.get(kind, [])
3536 already_registered = category in self._sidedata_computers.get(kind, [])
3536 if already_registered and not replace:
3537 if already_registered and not replace:
3537 msg = _(
3538 msg = _(
3538 b"cannot register a sidedata computer twice for category '%s'."
3539 b"cannot register a sidedata computer twice for category '%s'."
3539 )
3540 )
3540 raise error.ProgrammingError(msg % category)
3541 raise error.ProgrammingError(msg % category)
3541 if replace and not already_registered:
3542 if replace and not already_registered:
3542 msg = _(
3543 msg = _(
3543 b"cannot replace a sidedata computer that isn't registered "
3544 b"cannot replace a sidedata computer that isn't registered "
3544 b"for category '%s'."
3545 b"for category '%s'."
3545 )
3546 )
3546 raise error.ProgrammingError(msg % category)
3547 raise error.ProgrammingError(msg % category)
3547 self._sidedata_computers.setdefault(kind, {})
3548 self._sidedata_computers.setdefault(kind, {})
3548 self._sidedata_computers[kind][category] = (keys, computer, flags)
3549 self._sidedata_computers[kind][category] = (keys, computer, flags)
3549
3550
3550
3551
3551 def undoname(fn: bytes) -> bytes:
3552 def undoname(fn: bytes) -> bytes:
3552 base, name = os.path.split(fn)
3553 base, name = os.path.split(fn)
3553 assert name.startswith(b'journal')
3554 assert name.startswith(b'journal')
3554 return os.path.join(base, name.replace(b'journal', b'undo', 1))
3555 return os.path.join(base, name.replace(b'journal', b'undo', 1))
3555
3556
3556
3557
3557 def instance(ui, path: bytes, create, intents=None, createopts=None):
3558 def instance(ui, path: bytes, create, intents=None, createopts=None):
3558 # prevent cyclic import localrepo -> upgrade -> localrepo
3559 # prevent cyclic import localrepo -> upgrade -> localrepo
3559 from . import upgrade
3560 from . import upgrade
3560
3561
3561 localpath = urlutil.urllocalpath(path)
3562 localpath = urlutil.urllocalpath(path)
3562 if create:
3563 if create:
3563 createrepository(ui, localpath, createopts=createopts)
3564 createrepository(ui, localpath, createopts=createopts)
3564
3565
3565 def repo_maker():
3566 def repo_maker():
3566 return makelocalrepository(ui, localpath, intents=intents)
3567 return makelocalrepository(ui, localpath, intents=intents)
3567
3568
3568 repo = repo_maker()
3569 repo = repo_maker()
3569 repo = upgrade.may_auto_upgrade(repo, repo_maker)
3570 repo = upgrade.may_auto_upgrade(repo, repo_maker)
3570 return repo
3571 return repo
3571
3572
3572
3573
3573 def islocal(path: bytes) -> bool:
3574 def islocal(path: bytes) -> bool:
3574 return True
3575 return True
3575
3576
3576
3577
3577 def defaultcreateopts(ui, createopts=None):
3578 def defaultcreateopts(ui, createopts=None):
3578 """Populate the default creation options for a repository.
3579 """Populate the default creation options for a repository.
3579
3580
3580 A dictionary of explicitly requested creation options can be passed
3581 A dictionary of explicitly requested creation options can be passed
3581 in. Missing keys will be populated.
3582 in. Missing keys will be populated.
3582 """
3583 """
3583 createopts = dict(createopts or {})
3584 createopts = dict(createopts or {})
3584
3585
3585 if b'backend' not in createopts:
3586 if b'backend' not in createopts:
3586 # experimental config: storage.new-repo-backend
3587 # experimental config: storage.new-repo-backend
3587 createopts[b'backend'] = ui.config(b'storage', b'new-repo-backend')
3588 createopts[b'backend'] = ui.config(b'storage', b'new-repo-backend')
3588
3589
3589 return createopts
3590 return createopts
3590
3591
3591
3592
3592 def clone_requirements(ui, createopts, srcrepo):
3593 def clone_requirements(ui, createopts, srcrepo):
3593 """clone the requirements of a local repo for a local clone
3594 """clone the requirements of a local repo for a local clone
3594
3595
3595 The store requirements are unchanged while the working copy requirements
3596 The store requirements are unchanged while the working copy requirements
3596 depends on the configuration
3597 depends on the configuration
3597 """
3598 """
3598 target_requirements = set()
3599 target_requirements = set()
3599 if not srcrepo.requirements:
3600 if not srcrepo.requirements:
3600 # this is a legacy revlog "v0" repository, we cannot do anything fancy
3601 # this is a legacy revlog "v0" repository, we cannot do anything fancy
3601 # with it.
3602 # with it.
3602 return target_requirements
3603 return target_requirements
3603 createopts = defaultcreateopts(ui, createopts=createopts)
3604 createopts = defaultcreateopts(ui, createopts=createopts)
3604 for r in newreporequirements(ui, createopts):
3605 for r in newreporequirements(ui, createopts):
3605 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
3606 if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
3606 target_requirements.add(r)
3607 target_requirements.add(r)
3607
3608
3608 for r in srcrepo.requirements:
3609 for r in srcrepo.requirements:
3609 if r not in requirementsmod.WORKING_DIR_REQUIREMENTS:
3610 if r not in requirementsmod.WORKING_DIR_REQUIREMENTS:
3610 target_requirements.add(r)
3611 target_requirements.add(r)
3611 return target_requirements
3612 return target_requirements
3612
3613
3613
3614
3614 def newreporequirements(ui, createopts):
3615 def newreporequirements(ui, createopts):
3615 """Determine the set of requirements for a new local repository.
3616 """Determine the set of requirements for a new local repository.
3616
3617
3617 Extensions can wrap this function to specify custom requirements for
3618 Extensions can wrap this function to specify custom requirements for
3618 new repositories.
3619 new repositories.
3619 """
3620 """
3620
3621
3621 if b'backend' not in createopts:
3622 if b'backend' not in createopts:
3622 raise error.ProgrammingError(
3623 raise error.ProgrammingError(
3623 b'backend key not present in createopts; '
3624 b'backend key not present in createopts; '
3624 b'was defaultcreateopts() called?'
3625 b'was defaultcreateopts() called?'
3625 )
3626 )
3626
3627
3627 if createopts[b'backend'] != b'revlogv1':
3628 if createopts[b'backend'] != b'revlogv1':
3628 raise error.Abort(
3629 raise error.Abort(
3629 _(
3630 _(
3630 b'unable to determine repository requirements for '
3631 b'unable to determine repository requirements for '
3631 b'storage backend: %s'
3632 b'storage backend: %s'
3632 )
3633 )
3633 % createopts[b'backend']
3634 % createopts[b'backend']
3634 )
3635 )
3635
3636
3636 requirements = {requirementsmod.REVLOGV1_REQUIREMENT}
3637 requirements = {requirementsmod.REVLOGV1_REQUIREMENT}
3637 if ui.configbool(b'format', b'usestore'):
3638 if ui.configbool(b'format', b'usestore'):
3638 requirements.add(requirementsmod.STORE_REQUIREMENT)
3639 requirements.add(requirementsmod.STORE_REQUIREMENT)
3639 if ui.configbool(b'format', b'usefncache'):
3640 if ui.configbool(b'format', b'usefncache'):
3640 requirements.add(requirementsmod.FNCACHE_REQUIREMENT)
3641 requirements.add(requirementsmod.FNCACHE_REQUIREMENT)
3641 if ui.configbool(b'format', b'dotencode'):
3642 if ui.configbool(b'format', b'dotencode'):
3642 requirements.add(requirementsmod.DOTENCODE_REQUIREMENT)
3643 requirements.add(requirementsmod.DOTENCODE_REQUIREMENT)
3643
3644
3644 compengines = ui.configlist(b'format', b'revlog-compression')
3645 compengines = ui.configlist(b'format', b'revlog-compression')
3645 for compengine in compengines:
3646 for compengine in compengines:
3646 if compengine in util.compengines:
3647 if compengine in util.compengines:
3647 engine = util.compengines[compengine]
3648 engine = util.compengines[compengine]
3648 if engine.available() and engine.revlogheader():
3649 if engine.available() and engine.revlogheader():
3649 break
3650 break
3650 else:
3651 else:
3651 raise error.Abort(
3652 raise error.Abort(
3652 _(
3653 _(
3653 b'compression engines %s defined by '
3654 b'compression engines %s defined by '
3654 b'format.revlog-compression not available'
3655 b'format.revlog-compression not available'
3655 )
3656 )
3656 % b', '.join(b'"%s"' % e for e in compengines),
3657 % b', '.join(b'"%s"' % e for e in compengines),
3657 hint=_(
3658 hint=_(
3658 b'run "hg debuginstall" to list available '
3659 b'run "hg debuginstall" to list available '
3659 b'compression engines'
3660 b'compression engines'
3660 ),
3661 ),
3661 )
3662 )
3662
3663
3663 # zlib is the historical default and doesn't need an explicit requirement.
3664 # zlib is the historical default and doesn't need an explicit requirement.
3664 if compengine == b'zstd':
3665 if compengine == b'zstd':
3665 requirements.add(b'revlog-compression-zstd')
3666 requirements.add(b'revlog-compression-zstd')
3666 elif compengine != b'zlib':
3667 elif compengine != b'zlib':
3667 requirements.add(b'exp-compression-%s' % compengine)
3668 requirements.add(b'exp-compression-%s' % compengine)
3668
3669
3669 if scmutil.gdinitconfig(ui):
3670 if scmutil.gdinitconfig(ui):
3670 requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT)
3671 requirements.add(requirementsmod.GENERALDELTA_REQUIREMENT)
3671 if ui.configbool(b'format', b'sparse-revlog'):
3672 if ui.configbool(b'format', b'sparse-revlog'):
3672 requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT)
3673 requirements.add(requirementsmod.SPARSEREVLOG_REQUIREMENT)
3673
3674
3674 # experimental config: format.use-dirstate-v2
3675 # experimental config: format.use-dirstate-v2
3675 # Keep this logic in sync with `has_dirstate_v2()` in `tests/hghave.py`
3676 # Keep this logic in sync with `has_dirstate_v2()` in `tests/hghave.py`
3676 if ui.configbool(b'format', b'use-dirstate-v2'):
3677 if ui.configbool(b'format', b'use-dirstate-v2'):
3677 requirements.add(requirementsmod.DIRSTATE_V2_REQUIREMENT)
3678 requirements.add(requirementsmod.DIRSTATE_V2_REQUIREMENT)
3678
3679
3679 # experimental config: format.exp-use-copies-side-data-changeset
3680 # experimental config: format.exp-use-copies-side-data-changeset
3680 if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'):
3681 if ui.configbool(b'format', b'exp-use-copies-side-data-changeset'):
3681 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3682 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3682 requirements.add(requirementsmod.COPIESSDC_REQUIREMENT)
3683 requirements.add(requirementsmod.COPIESSDC_REQUIREMENT)
3683 if ui.configbool(b'experimental', b'treemanifest'):
3684 if ui.configbool(b'experimental', b'treemanifest'):
3684 requirements.add(requirementsmod.TREEMANIFEST_REQUIREMENT)
3685 requirements.add(requirementsmod.TREEMANIFEST_REQUIREMENT)
3685
3686
3686 changelogv2 = ui.config(b'format', b'exp-use-changelog-v2')
3687 changelogv2 = ui.config(b'format', b'exp-use-changelog-v2')
3687 if changelogv2 == b'enable-unstable-format-and-corrupt-my-data':
3688 if changelogv2 == b'enable-unstable-format-and-corrupt-my-data':
3688 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3689 requirements.add(requirementsmod.CHANGELOGV2_REQUIREMENT)
3689
3690
3690 revlogv2 = ui.config(b'experimental', b'revlogv2')
3691 revlogv2 = ui.config(b'experimental', b'revlogv2')
3691 if revlogv2 == b'enable-unstable-format-and-corrupt-my-data':
3692 if revlogv2 == b'enable-unstable-format-and-corrupt-my-data':
3692 requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
3693 requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
3693 requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
3694 requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
3694 # experimental config: format.internal-phase
3695 # experimental config: format.internal-phase
3695 if ui.configbool(b'format', b'use-internal-phase'):
3696 if ui.configbool(b'format', b'use-internal-phase'):
3696 requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
3697 requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
3697
3698
3698 # experimental config: format.exp-archived-phase
3699 # experimental config: format.exp-archived-phase
3699 if ui.configbool(b'format', b'exp-archived-phase'):
3700 if ui.configbool(b'format', b'exp-archived-phase'):
3700 requirements.add(requirementsmod.ARCHIVED_PHASE_REQUIREMENT)
3701 requirements.add(requirementsmod.ARCHIVED_PHASE_REQUIREMENT)
3701
3702
3702 if createopts.get(b'narrowfiles'):
3703 if createopts.get(b'narrowfiles'):
3703 requirements.add(requirementsmod.NARROW_REQUIREMENT)
3704 requirements.add(requirementsmod.NARROW_REQUIREMENT)
3704
3705
3705 if createopts.get(b'lfs'):
3706 if createopts.get(b'lfs'):
3706 requirements.add(b'lfs')
3707 requirements.add(b'lfs')
3707
3708
3708 if ui.configbool(b'format', b'bookmarks-in-store'):
3709 if ui.configbool(b'format', b'bookmarks-in-store'):
3709 requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3710 requirements.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3710
3711
3711 if ui.configbool(b'format', b'use-persistent-nodemap'):
3712 if ui.configbool(b'format', b'use-persistent-nodemap'):
3712 requirements.add(requirementsmod.NODEMAP_REQUIREMENT)
3713 requirements.add(requirementsmod.NODEMAP_REQUIREMENT)
3713
3714
3714 # if share-safe is enabled, let's create the new repository with the new
3715 # if share-safe is enabled, let's create the new repository with the new
3715 # requirement
3716 # requirement
3716 if ui.configbool(b'format', b'use-share-safe'):
3717 if ui.configbool(b'format', b'use-share-safe'):
3717 requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
3718 requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
3718
3719
3719 # if we are creating a share-repoΒΉ we have to handle requirement
3720 # if we are creating a share-repoΒΉ we have to handle requirement
3720 # differently.
3721 # differently.
3721 #
3722 #
3722 # [1] (i.e. reusing the store from another repository, just having a
3723 # [1] (i.e. reusing the store from another repository, just having a
3723 # working copy)
3724 # working copy)
3724 if b'sharedrepo' in createopts:
3725 if b'sharedrepo' in createopts:
3725 source_requirements = set(createopts[b'sharedrepo'].requirements)
3726 source_requirements = set(createopts[b'sharedrepo'].requirements)
3726
3727
3727 if requirementsmod.SHARESAFE_REQUIREMENT not in source_requirements:
3728 if requirementsmod.SHARESAFE_REQUIREMENT not in source_requirements:
3728 # share to an old school repository, we have to copy the
3729 # share to an old school repository, we have to copy the
3729 # requirements and hope for the best.
3730 # requirements and hope for the best.
3730 requirements = source_requirements
3731 requirements = source_requirements
3731 else:
3732 else:
3732 # We have control on the working copy only, so "copy" the non
3733 # We have control on the working copy only, so "copy" the non
3733 # working copy part over, ignoring previous logic.
3734 # working copy part over, ignoring previous logic.
3734 to_drop = set()
3735 to_drop = set()
3735 for req in requirements:
3736 for req in requirements:
3736 if req in requirementsmod.WORKING_DIR_REQUIREMENTS:
3737 if req in requirementsmod.WORKING_DIR_REQUIREMENTS:
3737 continue
3738 continue
3738 if req in source_requirements:
3739 if req in source_requirements:
3739 continue
3740 continue
3740 to_drop.add(req)
3741 to_drop.add(req)
3741 requirements -= to_drop
3742 requirements -= to_drop
3742 requirements |= source_requirements
3743 requirements |= source_requirements
3743
3744
3744 if createopts.get(b'sharedrelative'):
3745 if createopts.get(b'sharedrelative'):
3745 requirements.add(requirementsmod.RELATIVE_SHARED_REQUIREMENT)
3746 requirements.add(requirementsmod.RELATIVE_SHARED_REQUIREMENT)
3746 else:
3747 else:
3747 requirements.add(requirementsmod.SHARED_REQUIREMENT)
3748 requirements.add(requirementsmod.SHARED_REQUIREMENT)
3748
3749
3749 if ui.configbool(b'format', b'use-dirstate-tracked-hint'):
3750 if ui.configbool(b'format', b'use-dirstate-tracked-hint'):
3750 version = ui.configint(b'format', b'use-dirstate-tracked-hint.version')
3751 version = ui.configint(b'format', b'use-dirstate-tracked-hint.version')
3751 msg = _(b"ignoring unknown tracked key version: %d\n")
3752 msg = _(b"ignoring unknown tracked key version: %d\n")
3752 hint = _(
3753 hint = _(
3753 b"see `hg help config.format.use-dirstate-tracked-hint-version"
3754 b"see `hg help config.format.use-dirstate-tracked-hint-version"
3754 )
3755 )
3755 if version != 1:
3756 if version != 1:
3756 ui.warn(msg % version, hint=hint)
3757 ui.warn(msg % version, hint=hint)
3757 else:
3758 else:
3758 requirements.add(requirementsmod.DIRSTATE_TRACKED_HINT_V1)
3759 requirements.add(requirementsmod.DIRSTATE_TRACKED_HINT_V1)
3759
3760
3760 return requirements
3761 return requirements
3761
3762
3762
3763
3763 def checkrequirementscompat(ui, requirements):
3764 def checkrequirementscompat(ui, requirements):
3764 """Checks compatibility of repository requirements enabled and disabled.
3765 """Checks compatibility of repository requirements enabled and disabled.
3765
3766
3766 Returns a set of requirements which needs to be dropped because dependend
3767 Returns a set of requirements which needs to be dropped because dependend
3767 requirements are not enabled. Also warns users about it"""
3768 requirements are not enabled. Also warns users about it"""
3768
3769
3769 dropped = set()
3770 dropped = set()
3770
3771
3771 if requirementsmod.STORE_REQUIREMENT not in requirements:
3772 if requirementsmod.STORE_REQUIREMENT not in requirements:
3772 if requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT in requirements:
3773 if requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT in requirements:
3773 ui.warn(
3774 ui.warn(
3774 _(
3775 _(
3775 b'ignoring enabled \'format.bookmarks-in-store\' config '
3776 b'ignoring enabled \'format.bookmarks-in-store\' config '
3776 b'beacuse it is incompatible with disabled '
3777 b'beacuse it is incompatible with disabled '
3777 b'\'format.usestore\' config\n'
3778 b'\'format.usestore\' config\n'
3778 )
3779 )
3779 )
3780 )
3780 dropped.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3781 dropped.add(requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT)
3781
3782
3782 if (
3783 if (
3783 requirementsmod.SHARED_REQUIREMENT in requirements
3784 requirementsmod.SHARED_REQUIREMENT in requirements
3784 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
3785 or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
3785 ):
3786 ):
3786 raise error.Abort(
3787 raise error.Abort(
3787 _(
3788 _(
3788 b"cannot create shared repository as source was created"
3789 b"cannot create shared repository as source was created"
3789 b" with 'format.usestore' config disabled"
3790 b" with 'format.usestore' config disabled"
3790 )
3791 )
3791 )
3792 )
3792
3793
3793 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
3794 if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
3794 if ui.hasconfig(b'format', b'use-share-safe'):
3795 if ui.hasconfig(b'format', b'use-share-safe'):
3795 msg = _(
3796 msg = _(
3796 b"ignoring enabled 'format.use-share-safe' config because "
3797 b"ignoring enabled 'format.use-share-safe' config because "
3797 b"it is incompatible with disabled 'format.usestore'"
3798 b"it is incompatible with disabled 'format.usestore'"
3798 b" config\n"
3799 b" config\n"
3799 )
3800 )
3800 ui.warn(msg)
3801 ui.warn(msg)
3801 dropped.add(requirementsmod.SHARESAFE_REQUIREMENT)
3802 dropped.add(requirementsmod.SHARESAFE_REQUIREMENT)
3802
3803
3803 return dropped
3804 return dropped
3804
3805
3805
3806
3806 def filterknowncreateopts(ui, createopts):
3807 def filterknowncreateopts(ui, createopts):
3807 """Filters a dict of repo creation options against options that are known.
3808 """Filters a dict of repo creation options against options that are known.
3808
3809
3809 Receives a dict of repo creation options and returns a dict of those
3810 Receives a dict of repo creation options and returns a dict of those
3810 options that we don't know how to handle.
3811 options that we don't know how to handle.
3811
3812
3812 This function is called as part of repository creation. If the
3813 This function is called as part of repository creation. If the
3813 returned dict contains any items, repository creation will not
3814 returned dict contains any items, repository creation will not
3814 be allowed, as it means there was a request to create a repository
3815 be allowed, as it means there was a request to create a repository
3815 with options not recognized by loaded code.
3816 with options not recognized by loaded code.
3816
3817
3817 Extensions can wrap this function to filter out creation options
3818 Extensions can wrap this function to filter out creation options
3818 they know how to handle.
3819 they know how to handle.
3819 """
3820 """
3820 known = {
3821 known = {
3821 b'backend',
3822 b'backend',
3822 b'lfs',
3823 b'lfs',
3823 b'narrowfiles',
3824 b'narrowfiles',
3824 b'sharedrepo',
3825 b'sharedrepo',
3825 b'sharedrelative',
3826 b'sharedrelative',
3826 b'shareditems',
3827 b'shareditems',
3827 b'shallowfilestore',
3828 b'shallowfilestore',
3828 }
3829 }
3829
3830
3830 return {k: v for k, v in createopts.items() if k not in known}
3831 return {k: v for k, v in createopts.items() if k not in known}
3831
3832
3832
3833
3833 def createrepository(ui, path: bytes, createopts=None, requirements=None):
3834 def createrepository(ui, path: bytes, createopts=None, requirements=None):
3834 """Create a new repository in a vfs.
3835 """Create a new repository in a vfs.
3835
3836
3836 ``path`` path to the new repo's working directory.
3837 ``path`` path to the new repo's working directory.
3837 ``createopts`` options for the new repository.
3838 ``createopts`` options for the new repository.
3838 ``requirement`` predefined set of requirements.
3839 ``requirement`` predefined set of requirements.
3839 (incompatible with ``createopts``)
3840 (incompatible with ``createopts``)
3840
3841
3841 The following keys for ``createopts`` are recognized:
3842 The following keys for ``createopts`` are recognized:
3842
3843
3843 backend
3844 backend
3844 The storage backend to use.
3845 The storage backend to use.
3845 lfs
3846 lfs
3846 Repository will be created with ``lfs`` requirement. The lfs extension
3847 Repository will be created with ``lfs`` requirement. The lfs extension
3847 will automatically be loaded when the repository is accessed.
3848 will automatically be loaded when the repository is accessed.
3848 narrowfiles
3849 narrowfiles
3849 Set up repository to support narrow file storage.
3850 Set up repository to support narrow file storage.
3850 sharedrepo
3851 sharedrepo
3851 Repository object from which storage should be shared.
3852 Repository object from which storage should be shared.
3852 sharedrelative
3853 sharedrelative
3853 Boolean indicating if the path to the shared repo should be
3854 Boolean indicating if the path to the shared repo should be
3854 stored as relative. By default, the pointer to the "parent" repo
3855 stored as relative. By default, the pointer to the "parent" repo
3855 is stored as an absolute path.
3856 is stored as an absolute path.
3856 shareditems
3857 shareditems
3857 Set of items to share to the new repository (in addition to storage).
3858 Set of items to share to the new repository (in addition to storage).
3858 shallowfilestore
3859 shallowfilestore
3859 Indicates that storage for files should be shallow (not all ancestor
3860 Indicates that storage for files should be shallow (not all ancestor
3860 revisions are known).
3861 revisions are known).
3861 """
3862 """
3862
3863
3863 if requirements is not None:
3864 if requirements is not None:
3864 if createopts is not None:
3865 if createopts is not None:
3865 msg = b'cannot specify both createopts and requirements'
3866 msg = b'cannot specify both createopts and requirements'
3866 raise error.ProgrammingError(msg)
3867 raise error.ProgrammingError(msg)
3867 createopts = {}
3868 createopts = {}
3868 else:
3869 else:
3869 createopts = defaultcreateopts(ui, createopts=createopts)
3870 createopts = defaultcreateopts(ui, createopts=createopts)
3870
3871
3871 unknownopts = filterknowncreateopts(ui, createopts)
3872 unknownopts = filterknowncreateopts(ui, createopts)
3872
3873
3873 if not isinstance(unknownopts, dict):
3874 if not isinstance(unknownopts, dict):
3874 raise error.ProgrammingError(
3875 raise error.ProgrammingError(
3875 b'filterknowncreateopts() did not return a dict'
3876 b'filterknowncreateopts() did not return a dict'
3876 )
3877 )
3877
3878
3878 if unknownopts:
3879 if unknownopts:
3879 raise error.Abort(
3880 raise error.Abort(
3880 _(
3881 _(
3881 b'unable to create repository because of unknown '
3882 b'unable to create repository because of unknown '
3882 b'creation option: %s'
3883 b'creation option: %s'
3883 )
3884 )
3884 % b', '.join(sorted(unknownopts)),
3885 % b', '.join(sorted(unknownopts)),
3885 hint=_(b'is a required extension not loaded?'),
3886 hint=_(b'is a required extension not loaded?'),
3886 )
3887 )
3887
3888
3888 requirements = newreporequirements(ui, createopts=createopts)
3889 requirements = newreporequirements(ui, createopts=createopts)
3889 requirements -= checkrequirementscompat(ui, requirements)
3890 requirements -= checkrequirementscompat(ui, requirements)
3890
3891
3891 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
3892 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
3892
3893
3893 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
3894 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
3894 if hgvfs.exists():
3895 if hgvfs.exists():
3895 raise error.RepoError(_(b'repository %s already exists') % path)
3896 raise error.RepoError(_(b'repository %s already exists') % path)
3896
3897
3897 if b'sharedrepo' in createopts:
3898 if b'sharedrepo' in createopts:
3898 sharedpath = createopts[b'sharedrepo'].sharedpath
3899 sharedpath = createopts[b'sharedrepo'].sharedpath
3899
3900
3900 if createopts.get(b'sharedrelative'):
3901 if createopts.get(b'sharedrelative'):
3901 try:
3902 try:
3902 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
3903 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
3903 sharedpath = util.pconvert(sharedpath)
3904 sharedpath = util.pconvert(sharedpath)
3904 except (IOError, ValueError) as e:
3905 except (IOError, ValueError) as e:
3905 # ValueError is raised on Windows if the drive letters differ
3906 # ValueError is raised on Windows if the drive letters differ
3906 # on each path.
3907 # on each path.
3907 raise error.Abort(
3908 raise error.Abort(
3908 _(b'cannot calculate relative path'),
3909 _(b'cannot calculate relative path'),
3909 hint=stringutil.forcebytestr(e),
3910 hint=stringutil.forcebytestr(e),
3910 )
3911 )
3911
3912
3912 if not wdirvfs.exists():
3913 if not wdirvfs.exists():
3913 wdirvfs.makedirs()
3914 wdirvfs.makedirs()
3914
3915
3915 hgvfs.makedir(notindexed=True)
3916 hgvfs.makedir(notindexed=True)
3916 if b'sharedrepo' not in createopts:
3917 if b'sharedrepo' not in createopts:
3917 hgvfs.mkdir(b'cache')
3918 hgvfs.mkdir(b'cache')
3918 hgvfs.mkdir(b'wcache')
3919 hgvfs.mkdir(b'wcache')
3919
3920
3920 has_store = requirementsmod.STORE_REQUIREMENT in requirements
3921 has_store = requirementsmod.STORE_REQUIREMENT in requirements
3921 if has_store and b'sharedrepo' not in createopts:
3922 if has_store and b'sharedrepo' not in createopts:
3922 hgvfs.mkdir(b'store')
3923 hgvfs.mkdir(b'store')
3923
3924
3924 # We create an invalid changelog outside the store so very old
3925 # We create an invalid changelog outside the store so very old
3925 # Mercurial versions (which didn't know about the requirements
3926 # Mercurial versions (which didn't know about the requirements
3926 # file) encounter an error on reading the changelog. This
3927 # file) encounter an error on reading the changelog. This
3927 # effectively locks out old clients and prevents them from
3928 # effectively locks out old clients and prevents them from
3928 # mucking with a repo in an unknown format.
3929 # mucking with a repo in an unknown format.
3929 #
3930 #
3930 # The revlog header has version 65535, which won't be recognized by
3931 # The revlog header has version 65535, which won't be recognized by
3931 # such old clients.
3932 # such old clients.
3932 hgvfs.append(
3933 hgvfs.append(
3933 b'00changelog.i',
3934 b'00changelog.i',
3934 b'\0\0\xFF\xFF dummy changelog to prevent using the old repo '
3935 b'\0\0\xFF\xFF dummy changelog to prevent using the old repo '
3935 b'layout',
3936 b'layout',
3936 )
3937 )
3937
3938
3938 # Filter the requirements into working copy and store ones
3939 # Filter the requirements into working copy and store ones
3939 wcreq, storereq = scmutil.filterrequirements(requirements)
3940 wcreq, storereq = scmutil.filterrequirements(requirements)
3940 # write working copy ones
3941 # write working copy ones
3941 scmutil.writerequires(hgvfs, wcreq)
3942 scmutil.writerequires(hgvfs, wcreq)
3942 # If there are store requirements and the current repository
3943 # If there are store requirements and the current repository
3943 # is not a shared one, write stored requirements
3944 # is not a shared one, write stored requirements
3944 # For new shared repository, we don't need to write the store
3945 # For new shared repository, we don't need to write the store
3945 # requirements as they are already present in store requires
3946 # requirements as they are already present in store requires
3946 if storereq and b'sharedrepo' not in createopts:
3947 if storereq and b'sharedrepo' not in createopts:
3947 storevfs = vfsmod.vfs(hgvfs.join(b'store'), cacheaudited=True)
3948 storevfs = vfsmod.vfs(hgvfs.join(b'store'), cacheaudited=True)
3948 scmutil.writerequires(storevfs, storereq)
3949 scmutil.writerequires(storevfs, storereq)
3949
3950
3950 # Write out file telling readers where to find the shared store.
3951 # Write out file telling readers where to find the shared store.
3951 if b'sharedrepo' in createopts:
3952 if b'sharedrepo' in createopts:
3952 hgvfs.write(b'sharedpath', sharedpath)
3953 hgvfs.write(b'sharedpath', sharedpath)
3953
3954
3954 if createopts.get(b'shareditems'):
3955 if createopts.get(b'shareditems'):
3955 shared = b'\n'.join(sorted(createopts[b'shareditems'])) + b'\n'
3956 shared = b'\n'.join(sorted(createopts[b'shareditems'])) + b'\n'
3956 hgvfs.write(b'shared', shared)
3957 hgvfs.write(b'shared', shared)
3957
3958
3958
3959
3959 def poisonrepository(repo):
3960 def poisonrepository(repo):
3960 """Poison a repository instance so it can no longer be used."""
3961 """Poison a repository instance so it can no longer be used."""
3961 # Perform any cleanup on the instance.
3962 # Perform any cleanup on the instance.
3962 repo.close()
3963 repo.close()
3963
3964
3964 # Our strategy is to replace the type of the object with one that
3965 # Our strategy is to replace the type of the object with one that
3965 # has all attribute lookups result in error.
3966 # has all attribute lookups result in error.
3966 #
3967 #
3967 # But we have to allow the close() method because some constructors
3968 # But we have to allow the close() method because some constructors
3968 # of repos call close() on repo references.
3969 # of repos call close() on repo references.
3969 class poisonedrepository:
3970 class poisonedrepository:
3970 def __getattribute__(self, item):
3971 def __getattribute__(self, item):
3971 if item == 'close':
3972 if item == 'close':
3972 return object.__getattribute__(self, item)
3973 return object.__getattribute__(self, item)
3973
3974
3974 raise error.ProgrammingError(
3975 raise error.ProgrammingError(
3975 b'repo instances should not be used after unshare'
3976 b'repo instances should not be used after unshare'
3976 )
3977 )
3977
3978
3978 def close(self):
3979 def close(self):
3979 pass
3980 pass
3980
3981
3981 # We may have a repoview, which intercepts __setattr__. So be sure
3982 # We may have a repoview, which intercepts __setattr__. So be sure
3982 # we operate at the lowest level possible.
3983 # we operate at the lowest level possible.
3983 object.__setattr__(repo, '__class__', poisonedrepository)
3984 object.__setattr__(repo, '__class__', poisonedrepository)
@@ -1,497 +1,520 b''
1 setup
1 setup
2
2
3 $ cat > myextension.py <<EOF
3 $ cat > myextension.py <<EOF
4 > from mercurial import error, registrar
4 > from mercurial import error, registrar
5 > cmdtable = {}
5 > cmdtable = {}
6 > command = registrar.command(cmdtable)
6 > command = registrar.command(cmdtable)
7 > @command(b'crash', [], b'hg crash')
7 > @command(b'crash', [], b'hg crash')
8 > def crash(ui, *args, **kwargs):
8 > def crash(ui, *args, **kwargs):
9 > raise Exception("oops")
9 > raise Exception("oops")
10 > @command(b'abortcmd', [], b'hg abortcmd')
10 > @command(b'abortcmd', [], b'hg abortcmd')
11 > def abort(ui, *args, **kwargs):
11 > def abort(ui, *args, **kwargs):
12 > raise error.Abort(b"oops")
12 > raise error.Abort(b"oops")
13 > EOF
13 > EOF
14 $ abspath=`pwd`/myextension.py
14 $ abspath=`pwd`/myextension.py
15
15
16 $ cat >> $HGRCPATH <<EOF
16 $ cat >> $HGRCPATH <<EOF
17 > [extensions]
17 > [extensions]
18 > blackbox=
18 > blackbox=
19 > mock=$TESTDIR/mockblackbox.py
19 > mock=$TESTDIR/mockblackbox.py
20 > mq=
20 > mq=
21 > myextension=$TESTTMP/myextension.py
21 > myextension=$TESTTMP/myextension.py
22 > [alias]
22 > [alias]
23 > confuse = log --limit 3
23 > confuse = log --limit 3
24 > so-confusing = confuse --style compact
24 > so-confusing = confuse --style compact
25 > EOF
25 > EOF
26
26
27 $ hg init blackboxtest
27 $ hg init blackboxtest
28 $ cd blackboxtest
28 $ cd blackboxtest
29
29
30 command, exit codes, and duration
30 command, exit codes, and duration
31
31
32 $ echo a > a
32 $ echo a > a
33 $ hg add a
33 $ hg add a
34 $ hg blackbox --config blackbox.dirty=True
34 $ hg blackbox --config blackbox.dirty=True
35 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest exited 0 after * seconds (glob)
35 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest exited 0 after * seconds (glob)
36 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a
36 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a
37 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
37 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add a exited 0 after * seconds (glob)
38 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000+ (5000)> blackbox --config *blackbox.dirty=True* (glob)
38 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000+ (5000)> blackbox --config *blackbox.dirty=True* (glob)
39
39
40 failure exit code
40 failure exit code
41 $ rm ./.hg/blackbox.log
41 $ rm ./.hg/blackbox.log
42 $ hg add non-existent
42 $ hg add non-existent
43 non-existent: $ENOENT$
43 non-existent: $ENOENT$
44 [1]
44 [1]
45 $ hg blackbox
45 $ hg blackbox
46 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent
46 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent
47 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent exited 1 after * seconds (glob)
47 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> add non-existent exited 1 after * seconds (glob)
48 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
48 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
49
49
50 abort exit code
50 abort exit code
51 $ rm ./.hg/blackbox.log
51 $ rm ./.hg/blackbox.log
52 $ hg abortcmd 2> /dev/null
52 $ hg abortcmd 2> /dev/null
53 [255]
53 [255]
54 $ hg blackbox -l 2
54 $ hg blackbox -l 2
55 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> abortcmd exited 255 after * seconds (glob)
55 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> abortcmd exited 255 after * seconds (glob)
56 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
56 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
57
57
58 unhandled exception
58 unhandled exception
59 $ rm ./.hg/blackbox.log
59 $ rm ./.hg/blackbox.log
60 #if chg
60 #if chg
61 (chg exits 255 because it fails to receive an exit code)
61 (chg exits 255 because it fails to receive an exit code)
62 $ hg crash 2>/dev/null
62 $ hg crash 2>/dev/null
63 [255]
63 [255]
64 #else
64 #else
65 (hg exits 1 because Python default exit code for uncaught exception is 1)
65 (hg exits 1 because Python default exit code for uncaught exception is 1)
66 $ hg crash 2>/dev/null
66 $ hg crash 2>/dev/null
67 [1]
67 [1]
68 #endif
68 #endif
69 $ hg blackbox -l 2
69 $ hg blackbox -l 2
70 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> crash exited 1 after * seconds (glob)
70 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> crash exited 1 after * seconds (glob)
71 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
71 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox -l 2
72
72
73 alias expansion is logged
73 alias expansion is logged
74 $ rm ./.hg/blackbox.log
74 $ rm ./.hg/blackbox.log
75 $ hg confuse
75 $ hg confuse
76 $ hg blackbox
76 $ hg blackbox
77 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse
77 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse
78 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
78 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
79 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse exited 0 after * seconds (glob)
79 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> confuse exited 0 after * seconds (glob)
80 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
80 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
81
81
82 recursive aliases work correctly
82 recursive aliases work correctly
83 $ rm ./.hg/blackbox.log
83 $ rm ./.hg/blackbox.log
84 $ hg so-confusing
84 $ hg so-confusing
85 $ hg blackbox
85 $ hg blackbox
86 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing
86 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing
87 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'so-confusing' expands to 'confuse --style compact'
87 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'so-confusing' expands to 'confuse --style compact'
88 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
88 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> alias 'confuse' expands to 'log --limit 3'
89 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing exited 0 after * seconds (glob)
89 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> so-confusing exited 0 after * seconds (glob)
90 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
90 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
91
91
92 custom date format
92 custom date format
93 $ rm ./.hg/blackbox.log
93 $ rm ./.hg/blackbox.log
94 $ hg --config blackbox.date-format='%Y-%m-%d @ %H:%M:%S' \
94 $ hg --config blackbox.date-format='%Y-%m-%d @ %H:%M:%S' \
95 > --config devel.default-date='1334347993 0' --traceback status
95 > --config devel.default-date='1334347993 0' --traceback status
96 A a
96 A a
97 $ hg blackbox
97 $ hg blackbox
98 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status (glob)
98 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status (glob)
99 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status exited 0 after * seconds (glob)
99 2012-04-13 @ 20:13:13 bob @0000000000000000000000000000000000000000 (5000)> --config *blackbox.date-format=%Y-%m-%d @ %H:%M:%S* --config *devel.default-date=1334347993 0* --traceback status exited 0 after * seconds (glob)
100 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
100 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
101
101
102 incoming change tracking
102 incoming change tracking
103
103
104 create two heads to verify that we only see one change in the log later
104 create two heads to verify that we only see one change in the log later
105 $ hg commit -ma
105 $ hg commit -ma
106 $ hg up null
106 $ hg up null
107 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
108 $ echo b > b
108 $ echo b > b
109 $ hg commit -Amb
109 $ hg commit -Amb
110 adding b
110 adding b
111 created new head
111 created new head
112
112
113 clone, commit, pull
113 clone, commit, pull
114 $ hg clone . ../blackboxtest2
114 $ hg clone . ../blackboxtest2
115 updating to branch default
115 updating to branch default
116 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
116 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
117 $ echo c > c
117 $ echo c > c
118 $ hg commit -Amc
118 $ hg commit -Amc
119 adding c
119 adding c
120 $ cd ../blackboxtest2
120 $ cd ../blackboxtest2
121 $ hg pull
121 $ hg pull
122 pulling from $TESTTMP/blackboxtest
122 pulling from $TESTTMP/blackboxtest
123 searching for changes
123 searching for changes
124 adding changesets
124 adding changesets
125 adding manifests
125 adding manifests
126 adding file changes
126 adding file changes
127 added 1 changesets with 1 changes to 1 files
127 added 1 changesets with 1 changes to 1 files
128 new changesets d02f48003e62
128 new changesets d02f48003e62
129 (run 'hg update' to get a working copy)
129 (run 'hg update' to get a working copy)
130 $ hg blackbox -l 6
130 $ hg blackbox -l 6
131 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served) with 1 labels and 2 nodes
131 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served) with 1 labels and 2 nodes
132 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served.hidden) in * seconds (glob)
132 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served.hidden) in * seconds (glob)
133 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served.hidden) with 1 labels and 2 nodes
133 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served.hidden) with 1 labels and 2 nodes
134 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62
134 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62
135 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob)
135 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob)
136 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
136 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
137
137
138 we must not cause a failure if we cannot write to the log
138 we must not cause a failure if we cannot write to the log
139
139
140 $ hg rollback
140 $ hg rollback
141 repository tip rolled back to revision 1 (undo pull)
141 repository tip rolled back to revision 1 (undo pull)
142
142
143 $ mv .hg/blackbox.log .hg/blackbox.log-
143 $ mv .hg/blackbox.log .hg/blackbox.log-
144 $ mkdir .hg/blackbox.log
144 $ mkdir .hg/blackbox.log
145 $ hg --debug incoming
145 $ hg --debug incoming
146 warning: cannot write to blackbox.log: * (glob)
146 warning: cannot write to blackbox.log: * (glob)
147 comparing with $TESTTMP/blackboxtest
147 comparing with $TESTTMP/blackboxtest
148 query 1; heads
148 query 1; heads
149 searching for changes
149 searching for changes
150 all local changesets known remotely
150 all local changesets known remotely
151 changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51
151 changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51
152 tag: tip
152 tag: tip
153 phase: draft
153 phase: draft
154 parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06
154 parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06
155 parent: -1:0000000000000000000000000000000000000000
155 parent: -1:0000000000000000000000000000000000000000
156 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892
156 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892
157 user: test
157 user: test
158 date: Thu Jan 01 00:00:00 1970 +0000
158 date: Thu Jan 01 00:00:00 1970 +0000
159 files+: c
159 files+: c
160 extra: branch=default
160 extra: branch=default
161 description:
161 description:
162 c
162 c
163
163
164
164
165 $ hg pull
165 $ hg pull
166 pulling from $TESTTMP/blackboxtest
166 pulling from $TESTTMP/blackboxtest
167 searching for changes
167 searching for changes
168 adding changesets
168 adding changesets
169 adding manifests
169 adding manifests
170 adding file changes
170 adding file changes
171 added 1 changesets with 1 changes to 1 files
171 added 1 changesets with 1 changes to 1 files
172 new changesets d02f48003e62
172 new changesets d02f48003e62
173 (run 'hg update' to get a working copy)
173 (run 'hg update' to get a working copy)
174
174
175 a failure reading from the log is fatal
175 a failure reading from the log is fatal
176
176
177 $ hg blackbox -l 3
177 $ hg blackbox -l 3
178 abort: *$TESTTMP/blackboxtest2/.hg/blackbox.log* (glob)
178 abort: *$TESTTMP/blackboxtest2/.hg/blackbox.log* (glob)
179 [255]
179 [255]
180
180
181 $ rmdir .hg/blackbox.log
181 $ rmdir .hg/blackbox.log
182 $ mv .hg/blackbox.log- .hg/blackbox.log
182 $ mv .hg/blackbox.log- .hg/blackbox.log
183
183
184 backup bundles get logged
184 backup bundles get logged
185
185
186 $ touch d
186 $ touch d
187 $ hg commit -Amd
187 $ hg commit -Amd
188 adding d
188 adding d
189 created new head
189 created new head
190 $ hg strip tip
190 $ hg strip tip
191 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
191 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
192 saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
192 saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob)
193 $ hg blackbox -l 6
193 $ hg blackbox -l 6
194 1970-01-01 00:00:00.000 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
194 1970-01-01 00:00:00.000 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
195 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
195 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
196 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (immutable) in * seconds (glob)
196 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (immutable) in * seconds (glob)
197 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (immutable) with 1 labels and 2 nodes
197 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (immutable) with 1 labels and 2 nodes
198 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
198 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
199 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
199 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
200
200
201 extension and python hooks - use the eol extension for a pythonhook
201 extension and python hooks - use the eol extension for a pythonhook
202
202
203 $ echo '[extensions]' >> .hg/hgrc
203 $ echo '[extensions]' >> .hg/hgrc
204 $ echo 'eol=' >> .hg/hgrc
204 $ echo 'eol=' >> .hg/hgrc
205 $ echo '[hooks]' >> .hg/hgrc
205 $ echo '[hooks]' >> .hg/hgrc
206 $ echo 'update = echo hooked' >> .hg/hgrc
206 $ echo 'update = echo hooked' >> .hg/hgrc
207 $ hg update
207 $ hg update
208 The fsmonitor extension is incompatible with the eol extension and has been disabled. (fsmonitor !)
208 The fsmonitor extension is incompatible with the eol extension and has been disabled. (fsmonitor !)
209 hooked
209 hooked
210 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
210 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
211 updated to "d02f48003e62: c"
211 updated to "d02f48003e62: c"
212 1 other heads for branch "default"
212 1 other heads for branch "default"
213 $ cat >> .hg/hgrc <<EOF
213 $ cat >> .hg/hgrc <<EOF
214 > [extensions]
214 > [extensions]
215 > # disable eol, because it is not needed for subsequent tests
215 > # disable eol, because it is not needed for subsequent tests
216 > # (in addition, keeping it requires extra care for fsmonitor)
216 > # (in addition, keeping it requires extra care for fsmonitor)
217 > eol=!
217 > eol=!
218 > EOF
218 > EOF
219 $ hg blackbox -l 5
219 $ hg blackbox -l 5
220 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update (no-chg !)
220 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> update (no-chg !)
221 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
221 1970-01-01 00:00:00.000 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob)
222 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> exthook-update: echo hooked finished in * seconds (glob)
222 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> exthook-update: echo hooked finished in * seconds (glob)
223 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> update exited 0 after * seconds (glob)
223 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> update exited 0 after * seconds (glob)
224 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> serve --no-profile --cmdserver chgunix --address $TESTTMP.chgsock/server.* --daemon-postexec 'chdir:/' (glob) (chg !)
224 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> serve --no-profile --cmdserver chgunix --address $TESTTMP.chgsock/server.* --daemon-postexec 'chdir:/' (glob) (chg !)
225 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> blackbox -l 5
225 1970-01-01 00:00:00.000 bob @d02f48003e62c24e2659d97d30f2a83abe5d5d51 (5000)> blackbox -l 5
226
226
227 log rotation
227 log rotation
228
228
229 $ echo '[blackbox]' >> .hg/hgrc
229 $ echo '[blackbox]' >> .hg/hgrc
230 $ echo 'maxsize = 20 b' >> .hg/hgrc
230 $ echo 'maxsize = 20 b' >> .hg/hgrc
231 $ echo 'maxfiles = 3' >> .hg/hgrc
231 $ echo 'maxfiles = 3' >> .hg/hgrc
232 $ hg status
232 $ hg status
233 $ hg status
233 $ hg status
234 $ hg status
234 $ hg status
235 $ hg tip -q
235 $ hg tip -q
236 2:d02f48003e62
236 2:d02f48003e62
237 $ ls .hg/blackbox.log*
237 $ ls .hg/blackbox.log*
238 .hg/blackbox.log
238 .hg/blackbox.log
239 .hg/blackbox.log.1
239 .hg/blackbox.log.1
240 .hg/blackbox.log.2
240 .hg/blackbox.log.2
241 $ cd ..
241 $ cd ..
242
242
243 $ hg init blackboxtest3
243 $ hg init blackboxtest3
244 $ cd blackboxtest3
244 $ cd blackboxtest3
245 $ hg blackbox
245 $ hg blackbox
246 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest3 exited 0 after * seconds (glob)
246 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> init blackboxtest3 exited 0 after * seconds (glob)
247 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
247 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> blackbox
248 $ mv .hg/blackbox.log .hg/blackbox.log-
248 $ mv .hg/blackbox.log .hg/blackbox.log-
249 $ mkdir .hg/blackbox.log
249 $ mkdir .hg/blackbox.log
250 $ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\
250 $ sed -e 's/\(.*test1.*\)/#\1/; s#\(.*commit2.*\)#os.rmdir(".hg/blackbox.log")\
251 > os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
251 > os.rename(".hg/blackbox.log-", ".hg/blackbox.log")\
252 > \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
252 > \1#' $TESTDIR/test-dispatch.py > ../test-dispatch.py
253 $ "$PYTHON" $TESTDIR/blackbox-readonly-dispatch.py
253 $ "$PYTHON" $TESTDIR/blackbox-readonly-dispatch.py
254 running: --debug add foo
254 running: --debug add foo
255 warning: cannot write to blackbox.log: Is a directory (no-windows !)
255 warning: cannot write to blackbox.log: Is a directory (no-windows !)
256 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
256 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
257 adding foo
257 adding foo
258 result: 0
258 result: 0
259 running: --debug commit -m commit1 -d 2000-01-01 foo
259 running: --debug commit -m commit1 -d 2000-01-01 foo
260 warning: cannot write to blackbox.log: Is a directory (no-windows !)
260 warning: cannot write to blackbox.log: Is a directory (no-windows !)
261 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
261 warning: cannot write to blackbox.log: $TESTTMP/blackboxtest3/.hg/blackbox.log: Access is denied (windows !)
262 committing files:
262 committing files:
263 foo
263 foo
264 committing manifest
264 committing manifest
265 committing changelog
265 committing changelog
266 updating the branch cache
266 updating the branch cache
267 committed changeset 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
267 committed changeset 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
268 result: 0
268 result: 0
269 running: --debug commit -m commit2 -d 2000-01-02 foo
269 running: --debug commit -m commit2 -d 2000-01-02 foo
270 committing files:
270 committing files:
271 foo
271 foo
272 committing manifest
272 committing manifest
273 committing changelog
273 committing changelog
274 updating the branch cache
274 updating the branch cache
275 committed changeset 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
275 committed changeset 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
276 result: 0
276 result: 0
277 running: --debug log -r 0
277 running: --debug log -r 0
278 changeset: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
278 changeset: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
279 phase: draft
279 phase: draft
280 parent: -1:0000000000000000000000000000000000000000
280 parent: -1:0000000000000000000000000000000000000000
281 parent: -1:0000000000000000000000000000000000000000
281 parent: -1:0000000000000000000000000000000000000000
282 manifest: 0:9091aa5df980aea60860a2e39c95182e68d1ddec
282 manifest: 0:9091aa5df980aea60860a2e39c95182e68d1ddec
283 user: test
283 user: test
284 date: Sat Jan 01 00:00:00 2000 +0000
284 date: Sat Jan 01 00:00:00 2000 +0000
285 files+: foo
285 files+: foo
286 extra: branch=default
286 extra: branch=default
287 description:
287 description:
288 commit1
288 commit1
289
289
290
290
291 result: 0
291 result: 0
292 running: --debug log -r tip
292 running: --debug log -r tip
293 changeset: 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
293 changeset: 1:45589e459b2edfbf3dbde7e01f611d2c1e7453d7
294 tag: tip
294 tag: tip
295 phase: draft
295 phase: draft
296 parent: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
296 parent: 0:0e46349438790c460c5c9f7546bfcd39b267bbd2
297 parent: -1:0000000000000000000000000000000000000000
297 parent: -1:0000000000000000000000000000000000000000
298 manifest: 1:895aa9b7886f89dd017a6d62524e1f9180b04df9
298 manifest: 1:895aa9b7886f89dd017a6d62524e1f9180b04df9
299 user: test
299 user: test
300 date: Sun Jan 02 00:00:00 2000 +0000
300 date: Sun Jan 02 00:00:00 2000 +0000
301 files: foo
301 files: foo
302 extra: branch=default
302 extra: branch=default
303 description:
303 description:
304 commit2
304 commit2
305
305
306
306
307 result: 0
307 result: 0
308 $ hg blackbox
308 $ hg blackbox
309 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
309 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updating the branch cache
310 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated branch cache (served) in * seconds (glob)
310 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> updated branch cache (served) in * seconds (glob)
311 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote branch cache (served) with 1 labels and 1 nodes
311 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> wrote branch cache (served) with 1 labels and 1 nodes
312 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug commit -m commit2 -d 2000-01-02 foo exited 0 after *.?? seconds (glob)
312 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug commit -m commit2 -d 2000-01-02 foo exited 0 after *.?? seconds (glob)
313 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0
313 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0
314 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
314 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> writing .hg/cache/tags2-visible with 0 tags
315 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0 exited 0 after *.?? seconds (glob)
315 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r 0 exited 0 after *.?? seconds (glob)
316 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip
316 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip
317 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob)
317 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> --debug log -r tip exited 0 after *.?? seconds (glob)
318 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox
318 1970-01-01 00:00:00.000 bob @45589e459b2edfbf3dbde7e01f611d2c1e7453d7 (5000)> blackbox
319
319
320 Skip rotation if the .hg is read-only
320 Skip rotation if the .hg is read-only
321
321
322 #if unix-permissions
322 #if unix-permissions
323 $ chmod -w .hg
323 $ chmod -w .hg
324 $ hg log -r. -T '{rev}\n' --config blackbox.maxsize=1 --debug
324 $ hg log -r. -T '{rev}\n' --config blackbox.maxsize=1 --debug
325 warning: cannot rename '$TESTTMP/blackboxtest3/.hg/blackbox.log.1' to '$TESTTMP/blackboxtest3/.hg/blackbox.log': $EACCES$
325 warning: cannot rename '$TESTTMP/blackboxtest3/.hg/blackbox.log.1' to '$TESTTMP/blackboxtest3/.hg/blackbox.log': $EACCES$
326 warning: cannot write to blackbox.log: $EACCES$
326 warning: cannot write to blackbox.log: $EACCES$
327 1
327 1
328 $ chmod +w .hg
328 $ chmod +w .hg
329 #endif
329 #endif
330
330
331 Test log recursion from dirty status check
331 Test log recursion from dirty status check
332
332
333 $ cat > ../r.py <<EOF
333 $ cat > ../r.py <<EOF
334 > from mercurial import context, error, extensions
334 > from mercurial import context, error, extensions
335 > x=[False]
335 > x=[False]
336 > def status(orig, *args, **opts):
336 > def status(orig, *args, **opts):
337 > args[0].repo().ui.log(b"broken", b"recursion?")
337 > args[0].repo().ui.log(b"broken", b"recursion?")
338 > return orig(*args, **opts)
338 > return orig(*args, **opts)
339 > def reposetup(ui, repo):
339 > def reposetup(ui, repo):
340 > extensions.wrapfunction(context.basectx, 'status', status)
340 > extensions.wrapfunction(context.basectx, 'status', status)
341 > EOF
341 > EOF
342 $ hg id --config extensions.x=../r.py --config blackbox.dirty=True
342 $ hg id --config extensions.x=../r.py --config blackbox.dirty=True
343 45589e459b2e tip
343 45589e459b2e tip
344
344
345 cleanup
345 cleanup
346 $ cd ..
346 $ cd ..
347
347
348 Test missing log directory, which shouldn't be created automatically
348 Test missing log directory, which shouldn't be created automatically
349
349
350 $ cat <<'EOF' > closeremove.py
350 $ cat <<'EOF' > closeremove.py
351 > def reposetup(ui, repo):
351 > def reposetup(ui, repo):
352 > class rmrepo(repo.__class__):
352 > class rmrepo(repo.__class__):
353 > def close(self):
353 > def close(self):
354 > super(rmrepo, self).close()
354 > super(rmrepo, self).close()
355 > self.ui.debug(b'removing %s\n' % self.vfs.base)
355 > self.ui.debug(b'removing %s\n' % self.vfs.base)
356 > self.vfs.rmtree()
356 > self.vfs.rmtree()
357 > repo.__class__ = rmrepo
357 > repo.__class__ = rmrepo
358 > EOF
358 > EOF
359
359
360 $ hg init gone
360 $ hg init gone
361 $ cd gone
361 $ cd gone
362 $ cat <<'EOF' > .hg/hgrc
362 $ cat <<'EOF' > .hg/hgrc
363 > [extensions]
363 > [extensions]
364 > closeremove = ../closeremove.py
364 > closeremove = ../closeremove.py
365 > EOF
365 > EOF
366 $ hg log --debug
366 $ hg log --debug
367 removing $TESTTMP/gone/.hg
367 removing $TESTTMP/gone/.hg
368 warning: cannot write to blackbox.log: $ENOENT$ (no-windows !)
368 warning: cannot write to blackbox.log: $ENOENT$ (no-windows !)
369 warning: cannot write to blackbox.log: $TESTTMP/gone/.hg/blackbox.log: $ENOTDIR$ (windows !)
369 warning: cannot write to blackbox.log: $TESTTMP/gone/.hg/blackbox.log: $ENOTDIR$ (windows !)
370 $ cd ..
370 $ cd ..
371
371
372 blackbox should disable itself if track is empty
372 blackbox should disable itself if track is empty
373
373
374 $ hg --config blackbox.track= init nothing_tracked
374 $ hg --config blackbox.track= init nothing_tracked
375 $ cd nothing_tracked
375 $ cd nothing_tracked
376 $ cat >> .hg/hgrc << EOF
376 $ cat >> .hg/hgrc << EOF
377 > [blackbox]
377 > [blackbox]
378 > track =
378 > track =
379 > EOF
379 > EOF
380 $ hg blackbox
380 $ hg blackbox
381 $ cd $TESTTMP
381 $ cd $TESTTMP
382
382
383 a '*' entry in blackbox.track is interpreted as log everything
383 a '*' entry in blackbox.track is interpreted as log everything
384
384
385 $ hg --config blackbox.track='*' \
385 $ hg --config blackbox.track='*' \
386 > --config blackbox.logsource=True \
386 > --config blackbox.logsource=True \
387 > init track_star
387 > init track_star
388 $ cd track_star
388 $ cd track_star
389 $ cat >> .hg/hgrc << EOF
389 $ cat >> .hg/hgrc << EOF
390 > [blackbox]
390 > [blackbox]
391 > logsource = True
391 > logsource = True
392 > track = *
392 > track = *
393 > EOF
393 > EOF
394 (only look for entries with specific logged sources, otherwise this test is
394 (only look for entries with specific logged sources, otherwise this test is
395 pretty brittle)
395 pretty brittle)
396 $ hg blackbox | egrep '\[command(finish)?\]'
396 $ hg blackbox | egrep '\[command(finish)?\]'
397 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [commandfinish]> --config *blackbox.track=* --config *blackbox.logsource=True* init track_star exited 0 after * seconds (glob)
397 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [commandfinish]> --config *blackbox.track=* --config *blackbox.logsource=True* init track_star exited 0 after * seconds (glob)
398 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [command]> blackbox
398 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000) [command]> blackbox
399 $ cd $TESTTMP
399 $ cd $TESTTMP
400
400
401 #if chg
401 #if chg
402
402
403 when using chg, blackbox.log should get rotated correctly
403 when using chg, blackbox.log should get rotated correctly
404
404
405 $ cat > $TESTTMP/noop.py << EOF
405 $ cat > $TESTTMP/noop.py << EOF
406 > import time
406 > import time
407 > from mercurial import registrar, scmutil
407 > from mercurial import registrar, scmutil
408 > cmdtable = {}
408 > cmdtable = {}
409 > command = registrar.command(cmdtable)
409 > command = registrar.command(cmdtable)
410 > @command(b'noop')
410 > @command(b'noop')
411 > def noop(ui, repo):
411 > def noop(ui, repo):
412 > pass
412 > pass
413 > EOF
413 > EOF
414
414
415 $ hg init blackbox-chg
415 $ hg init blackbox-chg
416 $ cd blackbox-chg
416 $ cd blackbox-chg
417
417
418 $ cat > .hg/hgrc << EOF
418 $ cat > .hg/hgrc << EOF
419 > [blackbox]
419 > [blackbox]
420 > maxsize = 500B
420 > maxsize = 500B
421 > [extensions]
421 > [extensions]
422 > # extension change forces chg to restart
422 > # extension change forces chg to restart
423 > noop=$TESTTMP/noop.py
423 > noop=$TESTTMP/noop.py
424 > EOF
424 > EOF
425
425
426 $ "$PYTHON" -c 'print("a" * 400)' > .hg/blackbox.log
426 $ "$PYTHON" -c 'print("a" * 400)' > .hg/blackbox.log
427 $ chg noop
427 $ chg noop
428 $ chg noop
428 $ chg noop
429 $ chg noop
429 $ chg noop
430 $ chg noop
430 $ chg noop
431 $ chg noop
431 $ chg noop
432
432
433 $ cat > showsize.py << 'EOF'
433 $ cat > showsize.py << 'EOF'
434 > import os
434 > import os
435 > import sys
435 > import sys
436 > limit = 500
436 > limit = 500
437 > for p in sys.argv[1:]:
437 > for p in sys.argv[1:]:
438 > size = os.stat(p).st_size
438 > size = os.stat(p).st_size
439 > if size >= limit:
439 > if size >= limit:
440 > desc = '>='
440 > desc = '>='
441 > else:
441 > else:
442 > desc = '<'
442 > desc = '<'
443 > print('%s: %s %d' % (p, desc, limit))
443 > print('%s: %s %d' % (p, desc, limit))
444 > EOF
444 > EOF
445
445
446 $ "$PYTHON" showsize.py .hg/blackbox*
446 $ "$PYTHON" showsize.py .hg/blackbox*
447 .hg/blackbox.log: < 500
447 .hg/blackbox.log: < 500
448 .hg/blackbox.log.1: >= 500
448 .hg/blackbox.log.1: >= 500
449 .hg/blackbox.log.2: >= 500
449 .hg/blackbox.log.2: >= 500
450
450
451 $ cd ..
451 $ cd ..
452
452
453 With chg, blackbox should not create the log file if the repo is gone
453 With chg, blackbox should not create the log file if the repo is gone
454
454
455 $ hg init repo1
455 $ hg init repo1
456 $ hg --config extensions.a=! -R repo1 log
456 $ hg --config extensions.a=! -R repo1 log
457 $ rm -rf $TESTTMP/repo1
457 $ rm -rf $TESTTMP/repo1
458 $ hg --config extensions.a=! init repo1
458 $ hg --config extensions.a=! init repo1
459
459
460 #endif
460 #endif
461
461
462 blackbox should work if repo.ui.log is not called (issue5518)
462 blackbox should work if repo.ui.log is not called (issue5518)
463
463
464 $ cat > $TESTTMP/raise.py << EOF
464 $ cat > $TESTTMP/raise.py << EOF
465 > from mercurial import registrar, scmutil
465 > from mercurial import registrar, scmutil
466 > cmdtable = {}
466 > cmdtable = {}
467 > command = registrar.command(cmdtable)
467 > command = registrar.command(cmdtable)
468 > @command(b'raise')
468 > @command(b'raise')
469 > def raisecmd(*args):
469 > def raisecmd(*args):
470 > raise RuntimeError('raise')
470 > raise RuntimeError('raise')
471 > EOF
471 > EOF
472
472
473 $ cat >> $HGRCPATH << EOF
473
474 $ hg init $TESTTMP/blackbox-exception-only --config blackbox.track=commandexception
475 $ cat >> $TESTTMP/blackbox-exception-only/.hg/hgrc << EOF
474 > [blackbox]
476 > [blackbox]
475 > track = commandexception
477 > track = commandexception
476 > [extensions]
478 > [extensions]
477 > raise=$TESTTMP/raise.py
479 > raise=$TESTTMP/raise.py
478 > EOF
480 > EOF
481 $ cd $TESTTMP/blackbox-exception-only
479
482
480 $ hg init $TESTTMP/blackbox-exception-only
481 $ cd $TESTTMP/blackbox-exception-only
482
483
483 #if chg
484 #if chg
484 (chg exits 255 because it fails to receive an exit code)
485 (chg exits 255 because it fails to receive an exit code)
485 $ hg raise 2>/dev/null
486 $ hg raise 2>/dev/null
486 [255]
487 [255]
487 #else
488 #else
488 (hg exits 1 because Python default exit code for uncaught exception is 1)
489 (hg exits 1 because Python default exit code for uncaught exception is 1)
489 $ hg raise 2>/dev/null
490 $ hg raise 2>/dev/null
490 [1]
491 [1]
491 #endif
492 #endif
492
493
493 $ head -1 .hg/blackbox.log
494 $ head -1 .hg/blackbox.log
494 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension "mock" (version N/A)
495 1970-01-01 00:00:00.000 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension "mock" (version N/A)
495 $ tail -2 .hg/blackbox.log
496 $ tail -2 .hg/blackbox.log
496 RuntimeError: raise
497 RuntimeError: raise
497
498
499 $ cd ..
500
501 Check we did not broke `hg mv`
502 ------------------------------
503 (we did in 6.4rc)
504
505 basic setup
506
507 $ hg init blackbox-file-move
508 $ cd blackbox-file-move
509 $ echo foo > foo
510 $ hg add foo
511 $ hg commit -m 'foo'
512
513 copy a file
514
515 $ hg copy foo bar
516
517 move a file
518
519 $ hg mv foo goo
520
General Comments 0
You need to be logged in to leave comments. Login now