##// END OF EJS Templates
cleanup: some Yoda conditions, this patch removes...
Martin von Zweigbergk -
r40065:e2697acd default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,325 +1,325
1 1 #!/usr/bin/env python
2 2
3 3 # Measure the performance of a list of revsets against multiple revisions
4 4 # defined by parameter. Checkout one by one and run perfrevset with every
5 5 # revset in the list to benchmark its performance.
6 6 #
7 7 # You should run this from the root of your mercurial repository.
8 8 #
9 9 # call with --help for details
10 10
11 11 from __future__ import absolute_import, print_function
12 12 import math
13 13 import optparse # cannot use argparse, python 2.7 only
14 14 import os
15 15 import re
16 16 import subprocess
17 17 import sys
18 18
19 19 DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
20 20 'reverse', 'reverse+first', 'reverse+last',
21 21 'sort', 'sort+first', 'sort+last']
22 22
23 23 def check_output(*args, **kwargs):
24 24 kwargs.setdefault('stderr', subprocess.PIPE)
25 25 kwargs.setdefault('stdout', subprocess.PIPE)
26 26 proc = subprocess.Popen(*args, **kwargs)
27 27 output, error = proc.communicate()
28 28 if proc.returncode != 0:
29 29 raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
30 30 return output
31 31
32 32 def update(rev):
33 33 """update the repo to a revision"""
34 34 try:
35 35 subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
36 36 check_output(['make', 'local'],
37 37 stderr=None) # suppress output except for error/warning
38 38 except subprocess.CalledProcessError as exc:
39 39 print('update to revision %s failed, aborting'%rev, file=sys.stderr)
40 40 sys.exit(exc.returncode)
41 41
42 42
43 43 def hg(cmd, repo=None):
44 44 """run a mercurial command
45 45
46 46 <cmd> is the list of command + argument,
47 47 <repo> is an optional repository path to run this command in."""
48 48 fullcmd = ['./hg']
49 49 if repo is not None:
50 50 fullcmd += ['-R', repo]
51 51 fullcmd += ['--config',
52 52 'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
53 53 fullcmd += cmd
54 54 return check_output(fullcmd, stderr=subprocess.STDOUT)
55 55
56 56 def perf(revset, target=None, contexts=False):
57 57 """run benchmark for this very revset"""
58 58 try:
59 59 args = ['perfrevset', revset]
60 60 if contexts:
61 61 args.append('--contexts')
62 62 output = hg(args, repo=target)
63 63 return parseoutput(output)
64 64 except subprocess.CalledProcessError as exc:
65 65 print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
66 66 if getattr(exc, 'output', None) is None: # no output before 2.7
67 67 print('(no output)', file=sys.stderr)
68 68 else:
69 69 print(exc.output, file=sys.stderr)
70 70 return None
71 71
72 72 outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) '
73 73 'sys (\d+.\d+) \(best of (\d+)\)')
74 74
75 75 def parseoutput(output):
76 76 """parse a textual output into a dict
77 77
78 78 We cannot just use json because we want to compare with old
79 79 versions of Mercurial that may not support json output.
80 80 """
81 81 match = outputre.search(output)
82 82 if not match:
83 83 print('abort: invalid output:', file=sys.stderr)
84 84 print(output, file=sys.stderr)
85 85 sys.exit(1)
86 86 return {'comb': float(match.group(2)),
87 87 'count': int(match.group(5)),
88 88 'sys': float(match.group(3)),
89 89 'user': float(match.group(4)),
90 90 'wall': float(match.group(1)),
91 91 }
92 92
93 93 def printrevision(rev):
94 94 """print data about a revision"""
95 95 sys.stdout.write("Revision ")
96 96 sys.stdout.flush()
97 97 subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
98 98 '{if(tags, " ({tags})")} '
99 99 '{rev}:{node|short}: {desc|firstline}\n'])
100 100
101 101 def idxwidth(nbidx):
102 102 """return the max width of number used for index
103 103
104 104 This is similar to log10(nbidx), but we use custom code here
105 105 because we start with zero and we'd rather not deal with all the
106 106 extra rounding business that log10 would imply.
107 107 """
108 108 nbidx -= 1 # starts at 0
109 109 idxwidth = 0
110 110 while nbidx:
111 111 idxwidth += 1
112 112 nbidx //= 10
113 113 if not idxwidth:
114 114 idxwidth = 1
115 115 return idxwidth
116 116
117 117 def getfactor(main, other, field, sensitivity=0.05):
118 118 """return the relative factor between values for 'field' in main and other
119 119
120 120 Return None if the factor is insignificant (less than <sensitivity>
121 121 variation)."""
122 122 factor = 1
123 123 if main is not None:
124 124 factor = other[field] / main[field]
125 125 low, high = 1 - sensitivity, 1 + sensitivity
126 126 if (low < factor < high):
127 127 return None
128 128 return factor
129 129
130 130 def formatfactor(factor):
131 131 """format a factor into a 4 char string
132 132
133 133 22%
134 134 156%
135 135 x2.4
136 136 x23
137 137 x789
138 138 x1e4
139 139 x5x7
140 140
141 141 """
142 142 if factor is None:
143 143 return ' '
144 144 elif factor < 2:
145 145 return '%3i%%' % (factor * 100)
146 146 elif factor < 10:
147 147 return 'x%3.1f' % factor
148 148 elif factor < 1000:
149 149 return '%4s' % ('x%i' % factor)
150 150 else:
151 151 order = int(math.log(factor)) + 1
152 while 1 < math.log(factor):
152 while math.log(factor) > 1:
153 153 factor //= 0
154 154 return 'x%ix%i' % (factor, order)
155 155
156 156 def formattiming(value):
157 157 """format a value to strictly 8 char, dropping some precision if needed"""
158 158 if value < 10**7:
159 159 return ('%.6f' % value)[:8]
160 160 else:
161 161 # value is HUGE very unlikely to happen (4+ month run)
162 162 return '%i' % value
163 163
164 164 _marker = object()
165 165 def printresult(variants, idx, data, maxidx, verbose=False, reference=_marker):
166 166 """print a line of result to stdout"""
167 167 mask = '%%0%ii) %%s' % idxwidth(maxidx)
168 168
169 169 out = []
170 170 for var in variants:
171 171 if data[var] is None:
172 172 out.append('error ')
173 173 out.append(' ' * 4)
174 174 continue
175 175 out.append(formattiming(data[var]['wall']))
176 176 if reference is not _marker:
177 177 factor = None
178 178 if reference is not None:
179 179 factor = getfactor(reference[var], data[var], 'wall')
180 180 out.append(formatfactor(factor))
181 181 if verbose:
182 182 out.append(formattiming(data[var]['comb']))
183 183 out.append(formattiming(data[var]['user']))
184 184 out.append(formattiming(data[var]['sys']))
185 185 out.append('%6d' % data[var]['count'])
186 186 print(mask % (idx, ' '.join(out)))
187 187
188 188 def printheader(variants, maxidx, verbose=False, relative=False):
189 189 header = [' ' * (idxwidth(maxidx) + 1)]
190 190 for var in variants:
191 191 if not var:
192 192 var = 'iter'
193 if 8 < len(var):
193 if len(var) > 8:
194 194 var = var[:3] + '..' + var[-3:]
195 195 header.append('%-8s' % var)
196 196 if relative:
197 197 header.append(' ')
198 198 if verbose:
199 199 header.append('%-8s' % 'comb')
200 200 header.append('%-8s' % 'user')
201 201 header.append('%-8s' % 'sys')
202 202 header.append('%6s' % 'count')
203 203 print(' '.join(header))
204 204
205 205 def getrevs(spec):
206 206 """get the list of rev matched by a revset"""
207 207 try:
208 208 out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
209 209 except subprocess.CalledProcessError as exc:
210 210 print("abort, can't get revision from %s"%spec, file=sys.stderr)
211 211 sys.exit(exc.returncode)
212 212 return [r for r in out.split() if r]
213 213
214 214
215 215 def applyvariants(revset, variant):
216 216 if variant == 'plain':
217 217 return revset
218 218 for var in variant.split('+'):
219 219 revset = '%s(%s)' % (var, revset)
220 220 return revset
221 221
222 222 helptext="""This script will run multiple variants of provided revsets using
223 223 different revisions in your mercurial repository. After the benchmark are run
224 224 summary output is provided. Use it to demonstrate speed improvements or pin
225 225 point regressions. Revsets to run are specified in a file (or from stdin), one
226 226 revsets per line. Line starting with '#' will be ignored, allowing insertion of
227 227 comments."""
228 228 parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
229 229 description=helptext)
230 230 parser.add_option("-f", "--file",
231 231 help="read revset from FILE (stdin if omitted)",
232 232 metavar="FILE")
233 233 parser.add_option("-R", "--repo",
234 234 help="run benchmark on REPO", metavar="REPO")
235 235
236 236 parser.add_option("-v", "--verbose",
237 237 action='store_true',
238 238 help="display all timing data (not just best total time)")
239 239
240 240 parser.add_option("", "--variants",
241 241 default=','.join(DEFAULTVARIANTS),
242 242 help="comma separated list of variant to test "
243 243 "(eg: plain,min,sorted) (plain = no modification)")
244 244 parser.add_option('', '--contexts',
245 245 action='store_true',
246 246 help='obtain changectx from results instead of integer revs')
247 247
248 248 (options, args) = parser.parse_args()
249 249
250 250 if not args:
251 251 parser.print_help()
252 252 sys.exit(255)
253 253
254 254 # the directory where both this script and the perf.py extension live.
255 255 contribdir = os.path.dirname(__file__)
256 256
257 257 revsetsfile = sys.stdin
258 258 if options.file:
259 259 revsetsfile = open(options.file)
260 260
261 261 revsets = [l.strip() for l in revsetsfile if not l.startswith('#')]
262 262 revsets = [l for l in revsets if l]
263 263
264 264 print("Revsets to benchmark")
265 265 print("----------------------------")
266 266
267 267 for idx, rset in enumerate(revsets):
268 268 print("%i) %s" % (idx, rset))
269 269
270 270 print("----------------------------")
271 271 print()
272 272
273 273 revs = []
274 274 for a in args:
275 275 revs.extend(getrevs(a))
276 276
277 277 variants = options.variants.split(',')
278 278
279 279 results = []
280 280 for r in revs:
281 281 print("----------------------------")
282 282 printrevision(r)
283 283 print("----------------------------")
284 284 update(r)
285 285 res = []
286 286 results.append(res)
287 287 printheader(variants, len(revsets), verbose=options.verbose)
288 288 for idx, rset in enumerate(revsets):
289 289 varres = {}
290 290 for var in variants:
291 291 varrset = applyvariants(rset, var)
292 292 data = perf(varrset, target=options.repo, contexts=options.contexts)
293 293 varres[var] = data
294 294 res.append(varres)
295 295 printresult(variants, idx, varres, len(revsets),
296 296 verbose=options.verbose)
297 297 sys.stdout.flush()
298 298 print("----------------------------")
299 299
300 300
301 301 print("""
302 302
303 303 Result by revset
304 304 ================
305 305 """)
306 306
307 307 print('Revision:')
308 308 for idx, rev in enumerate(revs):
309 309 sys.stdout.write('%i) ' % idx)
310 310 sys.stdout.flush()
311 311 printrevision(rev)
312 312
313 313 print()
314 314 print()
315 315
316 316 for ridx, rset in enumerate(revsets):
317 317
318 318 print("revset #%i: %s" % (ridx, rset))
319 319 printheader(variants, len(results), verbose=options.verbose, relative=True)
320 320 ref = None
321 321 for idx, data in enumerate(results):
322 322 printresult(variants, idx, data[ridx], len(results),
323 323 verbose=options.verbose, reference=ref)
324 324 ref = data[ridx]
325 325 print()
@@ -1,1658 +1,1658
1 1 # histedit.py - interactive history editing for mercurial
2 2 #
3 3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """interactive history editing
8 8
9 9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 10 is as follows, assuming the following history::
11 11
12 12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 13 | Add delta
14 14 |
15 15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 16 | Add gamma
17 17 |
18 18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 19 | Add beta
20 20 |
21 21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 22 Add alpha
23 23
24 24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 25 file open in your editor::
26 26
27 27 pick c561b4e977df Add beta
28 28 pick 030b686bedc4 Add gamma
29 29 pick 7c2fd3b9020c Add delta
30 30
31 31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 32 #
33 33 # Commits are listed from least to most recent
34 34 #
35 35 # Commands:
36 36 # p, pick = use commit
37 37 # e, edit = use commit, but stop for amending
38 38 # f, fold = use commit, but combine it with the one above
39 39 # r, roll = like fold, but discard this commit's description and date
40 40 # d, drop = remove commit from history
41 41 # m, mess = edit commit message without changing commit content
42 42 # b, base = checkout changeset and apply further changesets from there
43 43 #
44 44
45 45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 46 for each revision in your history. For example, if you had meant to add gamma
47 47 before beta, and then wanted to add delta in the same revision as beta, you
48 48 would reorganize the file to look like this::
49 49
50 50 pick 030b686bedc4 Add gamma
51 51 pick c561b4e977df Add beta
52 52 fold 7c2fd3b9020c Add delta
53 53
54 54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 55 #
56 56 # Commits are listed from least to most recent
57 57 #
58 58 # Commands:
59 59 # p, pick = use commit
60 60 # e, edit = use commit, but stop for amending
61 61 # f, fold = use commit, but combine it with the one above
62 62 # r, roll = like fold, but discard this commit's description and date
63 63 # d, drop = remove commit from history
64 64 # m, mess = edit commit message without changing commit content
65 65 # b, base = checkout changeset and apply further changesets from there
66 66 #
67 67
68 68 At which point you close the editor and ``histedit`` starts working. When you
69 69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 70 those revisions together, offering you a chance to clean up the commit message::
71 71
72 72 Add beta
73 73 ***
74 74 Add delta
75 75
76 76 Edit the commit message to your liking, then close the editor. The date used
77 77 for the commit will be the later of the two commits' dates. For this example,
78 78 let's assume that the commit message was changed to ``Add beta and delta.``
79 79 After histedit has run and had a chance to remove any old or temporary
80 80 revisions it needed, the history looks like this::
81 81
82 82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 83 | Add beta and delta.
84 84 |
85 85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 86 | Add gamma
87 87 |
88 88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 89 Add alpha
90 90
91 91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 92 ones) until after it has completed all the editing operations, so it will
93 93 probably perform several strip operations when it's done. For the above example,
94 94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 95 so you might need to be a little patient. You can choose to keep the original
96 96 revisions by passing the ``--keep`` flag.
97 97
98 98 The ``edit`` operation will drop you back to a command prompt,
99 99 allowing you to edit files freely, or even use ``hg record`` to commit
100 100 some changes as a separate commit. When you're done, any remaining
101 101 uncommitted changes will be committed as well. When done, run ``hg
102 102 histedit --continue`` to finish this step. If there are uncommitted
103 103 changes, you'll be prompted for a new commit message, but the default
104 104 commit message will be the original message for the ``edit`` ed
105 105 revision, and the date of the original commit will be preserved.
106 106
107 107 The ``message`` operation will give you a chance to revise a commit
108 108 message without changing the contents. It's a shortcut for doing
109 109 ``edit`` immediately followed by `hg histedit --continue``.
110 110
111 111 If ``histedit`` encounters a conflict when moving a revision (while
112 112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 113 ``edit`` with the difference that it won't prompt you for a commit
114 114 message when done. If you decide at this point that you don't like how
115 115 much work it will be to rearrange history, or that you made a mistake,
116 116 you can use ``hg histedit --abort`` to abandon the new changes you
117 117 have made and return to the state before you attempted to edit your
118 118 history.
119 119
120 120 If we clone the histedit-ed example repository above and add four more
121 121 changes, such that we have the following history::
122 122
123 123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 124 | Add theta
125 125 |
126 126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 127 | Add eta
128 128 |
129 129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 130 | Add zeta
131 131 |
132 132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 133 | Add epsilon
134 134 |
135 135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 136 | Add beta and delta.
137 137 |
138 138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 139 | Add gamma
140 140 |
141 141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 142 Add alpha
143 143
144 144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 146 repository that Mercurial does not detect to be related to the source
147 147 repo, you can add a ``--force`` option.
148 148
149 149 Config
150 150 ------
151 151
152 152 Histedit rule lines are truncated to 80 characters by default. You
153 153 can customize this behavior by setting a different length in your
154 154 configuration file::
155 155
156 156 [histedit]
157 157 linelen = 120 # truncate rule lines at 120 characters
158 158
159 159 ``hg histedit`` attempts to automatically choose an appropriate base
160 160 revision to use. To change which base revision is used, define a
161 161 revset in your configuration file::
162 162
163 163 [histedit]
164 164 defaultrev = only(.) & draft()
165 165
166 166 By default each edited revision needs to be present in histedit commands.
167 167 To remove revision you need to use ``drop`` operation. You can configure
168 168 the drop to be implicit for missing commits by adding::
169 169
170 170 [histedit]
171 171 dropmissing = True
172 172
173 173 By default, histedit will close the transaction after each action. For
174 174 performance purposes, you can configure histedit to use a single transaction
175 175 across the entire histedit. WARNING: This setting introduces a significant risk
176 176 of losing the work you've done in a histedit if the histedit aborts
177 177 unexpectedly::
178 178
179 179 [histedit]
180 180 singletransaction = True
181 181
182 182 """
183 183
184 184 from __future__ import absolute_import
185 185
186 186 import os
187 187
188 188 from mercurial.i18n import _
189 189 from mercurial import (
190 190 bundle2,
191 191 cmdutil,
192 192 context,
193 193 copies,
194 194 destutil,
195 195 discovery,
196 196 error,
197 197 exchange,
198 198 extensions,
199 199 hg,
200 200 lock,
201 201 merge as mergemod,
202 202 mergeutil,
203 203 node,
204 204 obsolete,
205 205 pycompat,
206 206 registrar,
207 207 repair,
208 208 scmutil,
209 209 state as statemod,
210 210 util,
211 211 )
212 212 from mercurial.utils import (
213 213 stringutil,
214 214 )
215 215
216 216 pickle = util.pickle
217 217 release = lock.release
218 218 cmdtable = {}
219 219 command = registrar.command(cmdtable)
220 220
221 221 configtable = {}
222 222 configitem = registrar.configitem(configtable)
223 223 configitem('experimental', 'histedit.autoverb',
224 224 default=False,
225 225 )
226 226 configitem('histedit', 'defaultrev',
227 227 default=None,
228 228 )
229 229 configitem('histedit', 'dropmissing',
230 230 default=False,
231 231 )
232 232 configitem('histedit', 'linelen',
233 233 default=80,
234 234 )
235 235 configitem('histedit', 'singletransaction',
236 236 default=False,
237 237 )
238 238
239 239 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
240 240 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
241 241 # be specifying the version(s) of Mercurial they are tested with, or
242 242 # leave the attribute unspecified.
243 243 testedwith = 'ships-with-hg-core'
244 244
245 245 actiontable = {}
246 246 primaryactions = set()
247 247 secondaryactions = set()
248 248 tertiaryactions = set()
249 249 internalactions = set()
250 250
251 251 def geteditcomment(ui, first, last):
252 252 """ construct the editor comment
253 253 The comment includes::
254 254 - an intro
255 255 - sorted primary commands
256 256 - sorted short commands
257 257 - sorted long commands
258 258 - additional hints
259 259
260 260 Commands are only included once.
261 261 """
262 262 intro = _("""Edit history between %s and %s
263 263
264 264 Commits are listed from least to most recent
265 265
266 266 You can reorder changesets by reordering the lines
267 267
268 268 Commands:
269 269 """)
270 270 actions = []
271 271 def addverb(v):
272 272 a = actiontable[v]
273 273 lines = a.message.split("\n")
274 274 if len(a.verbs):
275 275 v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
276 276 actions.append(" %s = %s" % (v, lines[0]))
277 277 actions.extend([' %s' for l in lines[1:]])
278 278
279 279 for v in (
280 280 sorted(primaryactions) +
281 281 sorted(secondaryactions) +
282 282 sorted(tertiaryactions)
283 283 ):
284 284 addverb(v)
285 285 actions.append('')
286 286
287 287 hints = []
288 288 if ui.configbool('histedit', 'dropmissing'):
289 289 hints.append("Deleting a changeset from the list "
290 290 "will DISCARD it from the edited history!")
291 291
292 292 lines = (intro % (first, last)).split('\n') + actions + hints
293 293
294 294 return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
295 295
296 296 class histeditstate(object):
297 297 def __init__(self, repo, parentctxnode=None, actions=None, keep=None,
298 298 topmost=None, replacements=None, lock=None, wlock=None):
299 299 self.repo = repo
300 300 self.actions = actions
301 301 self.keep = keep
302 302 self.topmost = topmost
303 303 self.parentctxnode = parentctxnode
304 304 self.lock = lock
305 305 self.wlock = wlock
306 306 self.backupfile = None
307 307 self.stateobj = statemod.cmdstate(repo, 'histedit-state')
308 308 if replacements is None:
309 309 self.replacements = []
310 310 else:
311 311 self.replacements = replacements
312 312
313 313 def read(self):
314 314 """Load histedit state from disk and set fields appropriately."""
315 315 if not self.stateobj.exists():
316 316 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
317 317
318 318 data = self._read()
319 319
320 320 self.parentctxnode = data['parentctxnode']
321 321 actions = parserules(data['rules'], self)
322 322 self.actions = actions
323 323 self.keep = data['keep']
324 324 self.topmost = data['topmost']
325 325 self.replacements = data['replacements']
326 326 self.backupfile = data['backupfile']
327 327
328 328 def _read(self):
329 329 fp = self.repo.vfs.read('histedit-state')
330 330 if fp.startswith('v1\n'):
331 331 data = self._load()
332 332 parentctxnode, rules, keep, topmost, replacements, backupfile = data
333 333 else:
334 334 data = pickle.loads(fp)
335 335 parentctxnode, rules, keep, topmost, replacements = data
336 336 backupfile = None
337 337 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
338 338
339 339 return {'parentctxnode': parentctxnode, "rules": rules, "keep": keep,
340 340 "topmost": topmost, "replacements": replacements,
341 341 "backupfile": backupfile}
342 342
343 343 def write(self, tr=None):
344 344 if tr:
345 345 tr.addfilegenerator('histedit-state', ('histedit-state',),
346 346 self._write, location='plain')
347 347 else:
348 348 with self.repo.vfs("histedit-state", "w") as f:
349 349 self._write(f)
350 350
351 351 def _write(self, fp):
352 352 fp.write('v1\n')
353 353 fp.write('%s\n' % node.hex(self.parentctxnode))
354 354 fp.write('%s\n' % node.hex(self.topmost))
355 355 fp.write('%s\n' % ('True' if self.keep else 'False'))
356 356 fp.write('%d\n' % len(self.actions))
357 357 for action in self.actions:
358 358 fp.write('%s\n' % action.tostate())
359 359 fp.write('%d\n' % len(self.replacements))
360 360 for replacement in self.replacements:
361 361 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
362 362 for r in replacement[1])))
363 363 backupfile = self.backupfile
364 364 if not backupfile:
365 365 backupfile = ''
366 366 fp.write('%s\n' % backupfile)
367 367
368 368 def _load(self):
369 369 fp = self.repo.vfs('histedit-state', 'r')
370 370 lines = [l[:-1] for l in fp.readlines()]
371 371
372 372 index = 0
373 373 lines[index] # version number
374 374 index += 1
375 375
376 376 parentctxnode = node.bin(lines[index])
377 377 index += 1
378 378
379 379 topmost = node.bin(lines[index])
380 380 index += 1
381 381
382 382 keep = lines[index] == 'True'
383 383 index += 1
384 384
385 385 # Rules
386 386 rules = []
387 387 rulelen = int(lines[index])
388 388 index += 1
389 389 for i in pycompat.xrange(rulelen):
390 390 ruleaction = lines[index]
391 391 index += 1
392 392 rule = lines[index]
393 393 index += 1
394 394 rules.append((ruleaction, rule))
395 395
396 396 # Replacements
397 397 replacements = []
398 398 replacementlen = int(lines[index])
399 399 index += 1
400 400 for i in pycompat.xrange(replacementlen):
401 401 replacement = lines[index]
402 402 original = node.bin(replacement[:40])
403 403 succ = [node.bin(replacement[i:i + 40]) for i in
404 404 range(40, len(replacement), 40)]
405 405 replacements.append((original, succ))
406 406 index += 1
407 407
408 408 backupfile = lines[index]
409 409 index += 1
410 410
411 411 fp.close()
412 412
413 413 return parentctxnode, rules, keep, topmost, replacements, backupfile
414 414
415 415 def clear(self):
416 416 if self.inprogress():
417 417 self.repo.vfs.unlink('histedit-state')
418 418
419 419 def inprogress(self):
420 420 return self.repo.vfs.exists('histedit-state')
421 421
422 422
423 423 class histeditaction(object):
424 424 def __init__(self, state, node):
425 425 self.state = state
426 426 self.repo = state.repo
427 427 self.node = node
428 428
429 429 @classmethod
430 430 def fromrule(cls, state, rule):
431 431 """Parses the given rule, returning an instance of the histeditaction.
432 432 """
433 433 ruleid = rule.strip().split(' ', 1)[0]
434 434 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
435 435 # Check for validation of rule ids and get the rulehash
436 436 try:
437 437 rev = node.bin(ruleid)
438 438 except TypeError:
439 439 try:
440 440 _ctx = scmutil.revsingle(state.repo, ruleid)
441 441 rulehash = _ctx.hex()
442 442 rev = node.bin(rulehash)
443 443 except error.RepoLookupError:
444 444 raise error.ParseError(_("invalid changeset %s") % ruleid)
445 445 return cls(state, rev)
446 446
447 447 def verify(self, prev, expected, seen):
448 448 """ Verifies semantic correctness of the rule"""
449 449 repo = self.repo
450 450 ha = node.hex(self.node)
451 451 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
452 452 if self.node is None:
453 453 raise error.ParseError(_('unknown changeset %s listed') % ha[:12])
454 454 self._verifynodeconstraints(prev, expected, seen)
455 455
456 456 def _verifynodeconstraints(self, prev, expected, seen):
457 457 # by default command need a node in the edited list
458 458 if self.node not in expected:
459 459 raise error.ParseError(_('%s "%s" changeset was not a candidate')
460 460 % (self.verb, node.short(self.node)),
461 461 hint=_('only use listed changesets'))
462 462 # and only one command per node
463 463 if self.node in seen:
464 464 raise error.ParseError(_('duplicated command for changeset %s') %
465 465 node.short(self.node))
466 466
467 467 def torule(self):
468 468 """build a histedit rule line for an action
469 469
470 470 by default lines are in the form:
471 471 <hash> <rev> <summary>
472 472 """
473 473 ctx = self.repo[self.node]
474 474 summary = _getsummary(ctx)
475 475 line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
476 476 # trim to 75 columns by default so it's not stupidly wide in my editor
477 477 # (the 5 more are left for verb)
478 478 maxlen = self.repo.ui.configint('histedit', 'linelen')
479 479 maxlen = max(maxlen, 22) # avoid truncating hash
480 480 return stringutil.ellipsis(line, maxlen)
481 481
482 482 def tostate(self):
483 483 """Print an action in format used by histedit state files
484 484 (the first line is a verb, the remainder is the second)
485 485 """
486 486 return "%s\n%s" % (self.verb, node.hex(self.node))
487 487
488 488 def run(self):
489 489 """Runs the action. The default behavior is simply apply the action's
490 490 rulectx onto the current parentctx."""
491 491 self.applychange()
492 492 self.continuedirty()
493 493 return self.continueclean()
494 494
495 495 def applychange(self):
496 496 """Applies the changes from this action's rulectx onto the current
497 497 parentctx, but does not commit them."""
498 498 repo = self.repo
499 499 rulectx = repo[self.node]
500 500 repo.ui.pushbuffer(error=True, labeled=True)
501 501 hg.update(repo, self.state.parentctxnode, quietempty=True)
502 502 stats = applychanges(repo.ui, repo, rulectx, {})
503 503 repo.dirstate.setbranch(rulectx.branch())
504 504 if stats.unresolvedcount:
505 505 buf = repo.ui.popbuffer()
506 506 repo.ui.write(buf)
507 507 raise error.InterventionRequired(
508 508 _('Fix up the change (%s %s)') %
509 509 (self.verb, node.short(self.node)),
510 510 hint=_('hg histedit --continue to resume'))
511 511 else:
512 512 repo.ui.popbuffer()
513 513
514 514 def continuedirty(self):
515 515 """Continues the action when changes have been applied to the working
516 516 copy. The default behavior is to commit the dirty changes."""
517 517 repo = self.repo
518 518 rulectx = repo[self.node]
519 519
520 520 editor = self.commiteditor()
521 521 commit = commitfuncfor(repo, rulectx)
522 522
523 523 commit(text=rulectx.description(), user=rulectx.user(),
524 524 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
525 525
526 526 def commiteditor(self):
527 527 """The editor to be used to edit the commit message."""
528 528 return False
529 529
530 530 def continueclean(self):
531 531 """Continues the action when the working copy is clean. The default
532 532 behavior is to accept the current commit as the new version of the
533 533 rulectx."""
534 534 ctx = self.repo['.']
535 535 if ctx.node() == self.state.parentctxnode:
536 536 self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
537 537 node.short(self.node))
538 538 return ctx, [(self.node, tuple())]
539 539 if ctx.node() == self.node:
540 540 # Nothing changed
541 541 return ctx, []
542 542 return ctx, [(self.node, (ctx.node(),))]
543 543
544 544 def commitfuncfor(repo, src):
545 545 """Build a commit function for the replacement of <src>
546 546
547 547 This function ensure we apply the same treatment to all changesets.
548 548
549 549 - Add a 'histedit_source' entry in extra.
550 550
551 551 Note that fold has its own separated logic because its handling is a bit
552 552 different and not easily factored out of the fold method.
553 553 """
554 554 phasemin = src.phase()
555 555 def commitfunc(**kwargs):
556 556 overrides = {('phases', 'new-commit'): phasemin}
557 557 with repo.ui.configoverride(overrides, 'histedit'):
558 558 extra = kwargs.get(r'extra', {}).copy()
559 559 extra['histedit_source'] = src.hex()
560 560 kwargs[r'extra'] = extra
561 561 return repo.commit(**kwargs)
562 562 return commitfunc
563 563
564 564 def applychanges(ui, repo, ctx, opts):
565 565 """Merge changeset from ctx (only) in the current working directory"""
566 566 wcpar = repo.dirstate.parents()[0]
567 567 if ctx.p1().node() == wcpar:
568 568 # edits are "in place" we do not need to make any merge,
569 569 # just applies changes on parent for editing
570 570 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
571 571 stats = mergemod.updateresult(0, 0, 0, 0)
572 572 else:
573 573 try:
574 574 # ui.forcemerge is an internal variable, do not document
575 575 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
576 576 'histedit')
577 577 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
578 578 finally:
579 579 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
580 580 return stats
581 581
582 582 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
583 583 """collapse the set of revisions from first to last as new one.
584 584
585 585 Expected commit options are:
586 586 - message
587 587 - date
588 588 - username
589 589 Commit message is edited in all cases.
590 590
591 591 This function works in memory."""
592 592 ctxs = list(repo.set('%d::%d', firstctx.rev(), lastctx.rev()))
593 593 if not ctxs:
594 594 return None
595 595 for c in ctxs:
596 596 if not c.mutable():
597 597 raise error.ParseError(
598 598 _("cannot fold into public change %s") % node.short(c.node()))
599 599 base = firstctx.parents()[0]
600 600
601 601 # commit a new version of the old changeset, including the update
602 602 # collect all files which might be affected
603 603 files = set()
604 604 for ctx in ctxs:
605 605 files.update(ctx.files())
606 606
607 607 # Recompute copies (avoid recording a -> b -> a)
608 608 copied = copies.pathcopies(base, lastctx)
609 609
610 610 # prune files which were reverted by the updates
611 611 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
612 612 # commit version of these files as defined by head
613 613 headmf = lastctx.manifest()
614 614 def filectxfn(repo, ctx, path):
615 615 if path in headmf:
616 616 fctx = lastctx[path]
617 617 flags = fctx.flags()
618 618 mctx = context.memfilectx(repo, ctx,
619 619 fctx.path(), fctx.data(),
620 620 islink='l' in flags,
621 621 isexec='x' in flags,
622 622 copied=copied.get(path))
623 623 return mctx
624 624 return None
625 625
626 626 if commitopts.get('message'):
627 627 message = commitopts['message']
628 628 else:
629 629 message = firstctx.description()
630 630 user = commitopts.get('user')
631 631 date = commitopts.get('date')
632 632 extra = commitopts.get('extra')
633 633
634 634 parents = (firstctx.p1().node(), firstctx.p2().node())
635 635 editor = None
636 636 if not skipprompt:
637 637 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
638 638 new = context.memctx(repo,
639 639 parents=parents,
640 640 text=message,
641 641 files=files,
642 642 filectxfn=filectxfn,
643 643 user=user,
644 644 date=date,
645 645 extra=extra,
646 646 editor=editor)
647 647 return repo.commitctx(new)
648 648
649 649 def _isdirtywc(repo):
650 650 return repo[None].dirty(missing=True)
651 651
652 652 def abortdirty():
653 653 raise error.Abort(_('working copy has pending changes'),
654 654 hint=_('amend, commit, or revert them and run histedit '
655 655 '--continue, or abort with histedit --abort'))
656 656
657 657 def action(verbs, message, priority=False, internal=False):
658 658 def wrap(cls):
659 659 assert not priority or not internal
660 660 verb = verbs[0]
661 661 if priority:
662 662 primaryactions.add(verb)
663 663 elif internal:
664 664 internalactions.add(verb)
665 665 elif len(verbs) > 1:
666 666 secondaryactions.add(verb)
667 667 else:
668 668 tertiaryactions.add(verb)
669 669
670 670 cls.verb = verb
671 671 cls.verbs = verbs
672 672 cls.message = message
673 673 for verb in verbs:
674 674 actiontable[verb] = cls
675 675 return cls
676 676 return wrap
677 677
678 678 @action(['pick', 'p'],
679 679 _('use commit'),
680 680 priority=True)
681 681 class pick(histeditaction):
682 682 def run(self):
683 683 rulectx = self.repo[self.node]
684 684 if rulectx.parents()[0].node() == self.state.parentctxnode:
685 685 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
686 686 return rulectx, []
687 687
688 688 return super(pick, self).run()
689 689
690 690 @action(['edit', 'e'],
691 691 _('use commit, but stop for amending'),
692 692 priority=True)
693 693 class edit(histeditaction):
694 694 def run(self):
695 695 repo = self.repo
696 696 rulectx = repo[self.node]
697 697 hg.update(repo, self.state.parentctxnode, quietempty=True)
698 698 applychanges(repo.ui, repo, rulectx, {})
699 699 raise error.InterventionRequired(
700 700 _('Editing (%s), you may commit or record as needed now.')
701 701 % node.short(self.node),
702 702 hint=_('hg histedit --continue to resume'))
703 703
704 704 def commiteditor(self):
705 705 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
706 706
707 707 @action(['fold', 'f'],
708 708 _('use commit, but combine it with the one above'))
709 709 class fold(histeditaction):
710 710 def verify(self, prev, expected, seen):
711 711 """ Verifies semantic correctness of the fold rule"""
712 712 super(fold, self).verify(prev, expected, seen)
713 713 repo = self.repo
714 714 if not prev:
715 715 c = repo[self.node].parents()[0]
716 716 elif not prev.verb in ('pick', 'base'):
717 717 return
718 718 else:
719 719 c = repo[prev.node]
720 720 if not c.mutable():
721 721 raise error.ParseError(
722 722 _("cannot fold into public change %s") % node.short(c.node()))
723 723
724 724
725 725 def continuedirty(self):
726 726 repo = self.repo
727 727 rulectx = repo[self.node]
728 728
729 729 commit = commitfuncfor(repo, rulectx)
730 730 commit(text='fold-temp-revision %s' % node.short(self.node),
731 731 user=rulectx.user(), date=rulectx.date(),
732 732 extra=rulectx.extra())
733 733
734 734 def continueclean(self):
735 735 repo = self.repo
736 736 ctx = repo['.']
737 737 rulectx = repo[self.node]
738 738 parentctxnode = self.state.parentctxnode
739 739 if ctx.node() == parentctxnode:
740 740 repo.ui.warn(_('%s: empty changeset\n') %
741 741 node.short(self.node))
742 742 return ctx, [(self.node, (parentctxnode,))]
743 743
744 744 parentctx = repo[parentctxnode]
745 745 newcommits = set(c.node() for c in repo.set('(%d::. - %d)',
746 746 parentctx.rev(),
747 747 parentctx.rev()))
748 748 if not newcommits:
749 749 repo.ui.warn(_('%s: cannot fold - working copy is not a '
750 750 'descendant of previous commit %s\n') %
751 751 (node.short(self.node), node.short(parentctxnode)))
752 752 return ctx, [(self.node, (ctx.node(),))]
753 753
754 754 middlecommits = newcommits.copy()
755 755 middlecommits.discard(ctx.node())
756 756
757 757 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
758 758 middlecommits)
759 759
760 760 def skipprompt(self):
761 761 """Returns true if the rule should skip the message editor.
762 762
763 763 For example, 'fold' wants to show an editor, but 'rollup'
764 764 doesn't want to.
765 765 """
766 766 return False
767 767
768 768 def mergedescs(self):
769 769 """Returns true if the rule should merge messages of multiple changes.
770 770
771 771 This exists mainly so that 'rollup' rules can be a subclass of
772 772 'fold'.
773 773 """
774 774 return True
775 775
776 776 def firstdate(self):
777 777 """Returns true if the rule should preserve the date of the first
778 778 change.
779 779
780 780 This exists mainly so that 'rollup' rules can be a subclass of
781 781 'fold'.
782 782 """
783 783 return False
784 784
785 785 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
786 786 parent = ctx.parents()[0].node()
787 787 hg.updaterepo(repo, parent, overwrite=False)
788 788 ### prepare new commit data
789 789 commitopts = {}
790 790 commitopts['user'] = ctx.user()
791 791 # commit message
792 792 if not self.mergedescs():
793 793 newmessage = ctx.description()
794 794 else:
795 795 newmessage = '\n***\n'.join(
796 796 [ctx.description()] +
797 797 [repo[r].description() for r in internalchanges] +
798 798 [oldctx.description()]) + '\n'
799 799 commitopts['message'] = newmessage
800 800 # date
801 801 if self.firstdate():
802 802 commitopts['date'] = ctx.date()
803 803 else:
804 804 commitopts['date'] = max(ctx.date(), oldctx.date())
805 805 extra = ctx.extra().copy()
806 806 # histedit_source
807 807 # note: ctx is likely a temporary commit but that the best we can do
808 808 # here. This is sufficient to solve issue3681 anyway.
809 809 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
810 810 commitopts['extra'] = extra
811 811 phasemin = max(ctx.phase(), oldctx.phase())
812 812 overrides = {('phases', 'new-commit'): phasemin}
813 813 with repo.ui.configoverride(overrides, 'histedit'):
814 814 n = collapse(repo, ctx, repo[newnode], commitopts,
815 815 skipprompt=self.skipprompt())
816 816 if n is None:
817 817 return ctx, []
818 818 hg.updaterepo(repo, n, overwrite=False)
819 819 replacements = [(oldctx.node(), (newnode,)),
820 820 (ctx.node(), (n,)),
821 821 (newnode, (n,)),
822 822 ]
823 823 for ich in internalchanges:
824 824 replacements.append((ich, (n,)))
825 825 return repo[n], replacements
826 826
827 827 @action(['base', 'b'],
828 828 _('checkout changeset and apply further changesets from there'))
829 829 class base(histeditaction):
830 830
831 831 def run(self):
832 832 if self.repo['.'].node() != self.node:
833 833 mergemod.update(self.repo, self.node, False, True)
834 834 # branchmerge, force)
835 835 return self.continueclean()
836 836
837 837 def continuedirty(self):
838 838 abortdirty()
839 839
840 840 def continueclean(self):
841 841 basectx = self.repo['.']
842 842 return basectx, []
843 843
844 844 def _verifynodeconstraints(self, prev, expected, seen):
845 845 # base can only be use with a node not in the edited set
846 846 if self.node in expected:
847 847 msg = _('%s "%s" changeset was an edited list candidate')
848 848 raise error.ParseError(
849 849 msg % (self.verb, node.short(self.node)),
850 850 hint=_('base must only use unlisted changesets'))
851 851
852 852 @action(['_multifold'],
853 853 _(
854 854 """fold subclass used for when multiple folds happen in a row
855 855
856 856 We only want to fire the editor for the folded message once when
857 857 (say) four changes are folded down into a single change. This is
858 858 similar to rollup, but we should preserve both messages so that
859 859 when the last fold operation runs we can show the user all the
860 860 commit messages in their editor.
861 861 """),
862 862 internal=True)
863 863 class _multifold(fold):
864 864 def skipprompt(self):
865 865 return True
866 866
867 867 @action(["roll", "r"],
868 868 _("like fold, but discard this commit's description and date"))
869 869 class rollup(fold):
870 870 def mergedescs(self):
871 871 return False
872 872
873 873 def skipprompt(self):
874 874 return True
875 875
876 876 def firstdate(self):
877 877 return True
878 878
879 879 @action(["drop", "d"],
880 880 _('remove commit from history'))
881 881 class drop(histeditaction):
882 882 def run(self):
883 883 parentctx = self.repo[self.state.parentctxnode]
884 884 return parentctx, [(self.node, tuple())]
885 885
886 886 @action(["mess", "m"],
887 887 _('edit commit message without changing commit content'),
888 888 priority=True)
889 889 class message(histeditaction):
890 890 def commiteditor(self):
891 891 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
892 892
893 893 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
894 894 """utility function to find the first outgoing changeset
895 895
896 896 Used by initialization code"""
897 897 if opts is None:
898 898 opts = {}
899 899 dest = ui.expandpath(remote or 'default-push', remote or 'default')
900 900 dest, branches = hg.parseurl(dest, None)[:2]
901 901 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
902 902
903 903 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
904 904 other = hg.peer(repo, opts, dest)
905 905
906 906 if revs:
907 907 revs = [repo.lookup(rev) for rev in revs]
908 908
909 909 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
910 910 if not outgoing.missing:
911 911 raise error.Abort(_('no outgoing ancestors'))
912 912 roots = list(repo.revs("roots(%ln)", outgoing.missing))
913 if 1 < len(roots):
913 if len(roots) > 1:
914 914 msg = _('there are ambiguous outgoing revisions')
915 915 hint = _("see 'hg help histedit' for more detail")
916 916 raise error.Abort(msg, hint=hint)
917 917 return repo[roots[0]].node()
918 918
919 919 @command('histedit',
920 920 [('', 'commands', '',
921 921 _('read history edits from the specified file'), _('FILE')),
922 922 ('c', 'continue', False, _('continue an edit already in progress')),
923 923 ('', 'edit-plan', False, _('edit remaining actions list')),
924 924 ('k', 'keep', False,
925 925 _("don't strip old nodes after edit is complete")),
926 926 ('', 'abort', False, _('abort an edit in progress')),
927 927 ('o', 'outgoing', False, _('changesets not found in destination')),
928 928 ('f', 'force', False,
929 929 _('force outgoing even for unrelated repositories')),
930 930 ('r', 'rev', [], _('first revision to be edited'), _('REV'))] +
931 931 cmdutil.formatteropts,
932 932 _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
933 933 def histedit(ui, repo, *freeargs, **opts):
934 934 """interactively edit changeset history
935 935
936 936 This command lets you edit a linear series of changesets (up to
937 937 and including the working directory, which should be clean).
938 938 You can:
939 939
940 940 - `pick` to [re]order a changeset
941 941
942 942 - `drop` to omit changeset
943 943
944 944 - `mess` to reword the changeset commit message
945 945
946 946 - `fold` to combine it with the preceding changeset (using the later date)
947 947
948 948 - `roll` like fold, but discarding this commit's description and date
949 949
950 950 - `edit` to edit this changeset (preserving date)
951 951
952 952 - `base` to checkout changeset and apply further changesets from there
953 953
954 954 There are a number of ways to select the root changeset:
955 955
956 956 - Specify ANCESTOR directly
957 957
958 958 - Use --outgoing -- it will be the first linear changeset not
959 959 included in destination. (See :hg:`help config.paths.default-push`)
960 960
961 961 - Otherwise, the value from the "histedit.defaultrev" config option
962 962 is used as a revset to select the base revision when ANCESTOR is not
963 963 specified. The first revision returned by the revset is used. By
964 964 default, this selects the editable history that is unique to the
965 965 ancestry of the working directory.
966 966
967 967 .. container:: verbose
968 968
969 969 If you use --outgoing, this command will abort if there are ambiguous
970 970 outgoing revisions. For example, if there are multiple branches
971 971 containing outgoing revisions.
972 972
973 973 Use "min(outgoing() and ::.)" or similar revset specification
974 974 instead of --outgoing to specify edit target revision exactly in
975 975 such ambiguous situation. See :hg:`help revsets` for detail about
976 976 selecting revisions.
977 977
978 978 .. container:: verbose
979 979
980 980 Examples:
981 981
982 982 - A number of changes have been made.
983 983 Revision 3 is no longer needed.
984 984
985 985 Start history editing from revision 3::
986 986
987 987 hg histedit -r 3
988 988
989 989 An editor opens, containing the list of revisions,
990 990 with specific actions specified::
991 991
992 992 pick 5339bf82f0ca 3 Zworgle the foobar
993 993 pick 8ef592ce7cc4 4 Bedazzle the zerlog
994 994 pick 0a9639fcda9d 5 Morgify the cromulancy
995 995
996 996 Additional information about the possible actions
997 997 to take appears below the list of revisions.
998 998
999 999 To remove revision 3 from the history,
1000 1000 its action (at the beginning of the relevant line)
1001 1001 is changed to 'drop'::
1002 1002
1003 1003 drop 5339bf82f0ca 3 Zworgle the foobar
1004 1004 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1005 1005 pick 0a9639fcda9d 5 Morgify the cromulancy
1006 1006
1007 1007 - A number of changes have been made.
1008 1008 Revision 2 and 4 need to be swapped.
1009 1009
1010 1010 Start history editing from revision 2::
1011 1011
1012 1012 hg histedit -r 2
1013 1013
1014 1014 An editor opens, containing the list of revisions,
1015 1015 with specific actions specified::
1016 1016
1017 1017 pick 252a1af424ad 2 Blorb a morgwazzle
1018 1018 pick 5339bf82f0ca 3 Zworgle the foobar
1019 1019 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1020 1020
1021 1021 To swap revision 2 and 4, its lines are swapped
1022 1022 in the editor::
1023 1023
1024 1024 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1025 1025 pick 5339bf82f0ca 3 Zworgle the foobar
1026 1026 pick 252a1af424ad 2 Blorb a morgwazzle
1027 1027
1028 1028 Returns 0 on success, 1 if user intervention is required (not only
1029 1029 for intentional "edit" command, but also for resolving unexpected
1030 1030 conflicts).
1031 1031 """
1032 1032 state = histeditstate(repo)
1033 1033 try:
1034 1034 state.wlock = repo.wlock()
1035 1035 state.lock = repo.lock()
1036 1036 _histedit(ui, repo, state, *freeargs, **opts)
1037 1037 finally:
1038 1038 release(state.lock, state.wlock)
1039 1039
1040 1040 goalcontinue = 'continue'
1041 1041 goalabort = 'abort'
1042 1042 goaleditplan = 'edit-plan'
1043 1043 goalnew = 'new'
1044 1044
1045 1045 def _getgoal(opts):
1046 1046 if opts.get('continue'):
1047 1047 return goalcontinue
1048 1048 if opts.get('abort'):
1049 1049 return goalabort
1050 1050 if opts.get('edit_plan'):
1051 1051 return goaleditplan
1052 1052 return goalnew
1053 1053
1054 1054 def _readfile(ui, path):
1055 1055 if path == '-':
1056 1056 with ui.timeblockedsection('histedit'):
1057 1057 return ui.fin.read()
1058 1058 else:
1059 1059 with open(path, 'rb') as f:
1060 1060 return f.read()
1061 1061
1062 1062 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1063 1063 # TODO only abort if we try to histedit mq patches, not just
1064 1064 # blanket if mq patches are applied somewhere
1065 1065 mq = getattr(repo, 'mq', None)
1066 1066 if mq and mq.applied:
1067 1067 raise error.Abort(_('source has mq patches applied'))
1068 1068
1069 1069 # basic argument incompatibility processing
1070 1070 outg = opts.get('outgoing')
1071 1071 editplan = opts.get('edit_plan')
1072 1072 abort = opts.get('abort')
1073 1073 force = opts.get('force')
1074 1074 if force and not outg:
1075 1075 raise error.Abort(_('--force only allowed with --outgoing'))
1076 1076 if goal == 'continue':
1077 1077 if any((outg, abort, revs, freeargs, rules, editplan)):
1078 1078 raise error.Abort(_('no arguments allowed with --continue'))
1079 1079 elif goal == 'abort':
1080 1080 if any((outg, revs, freeargs, rules, editplan)):
1081 1081 raise error.Abort(_('no arguments allowed with --abort'))
1082 1082 elif goal == 'edit-plan':
1083 1083 if any((outg, revs, freeargs)):
1084 1084 raise error.Abort(_('only --commands argument allowed with '
1085 1085 '--edit-plan'))
1086 1086 else:
1087 1087 if state.inprogress():
1088 1088 raise error.Abort(_('history edit already in progress, try '
1089 1089 '--continue or --abort'))
1090 1090 if outg:
1091 1091 if revs:
1092 1092 raise error.Abort(_('no revisions allowed with --outgoing'))
1093 1093 if len(freeargs) > 1:
1094 1094 raise error.Abort(
1095 1095 _('only one repo argument allowed with --outgoing'))
1096 1096 else:
1097 1097 revs.extend(freeargs)
1098 1098 if len(revs) == 0:
1099 1099 defaultrev = destutil.desthistedit(ui, repo)
1100 1100 if defaultrev is not None:
1101 1101 revs.append(defaultrev)
1102 1102
1103 1103 if len(revs) != 1:
1104 1104 raise error.Abort(
1105 1105 _('histedit requires exactly one ancestor revision'))
1106 1106
1107 1107 def _histedit(ui, repo, state, *freeargs, **opts):
1108 1108 opts = pycompat.byteskwargs(opts)
1109 1109 fm = ui.formatter('histedit', opts)
1110 1110 fm.startitem()
1111 1111 goal = _getgoal(opts)
1112 1112 revs = opts.get('rev', [])
1113 1113 # experimental config: ui.history-editing-backup
1114 1114 nobackup = not ui.configbool('ui', 'history-editing-backup')
1115 1115 rules = opts.get('commands', '')
1116 1116 state.keep = opts.get('keep', False)
1117 1117
1118 1118 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1119 1119
1120 1120 # rebuild state
1121 1121 if goal == goalcontinue:
1122 1122 state.read()
1123 1123 state = bootstrapcontinue(ui, state, opts)
1124 1124 elif goal == goaleditplan:
1125 1125 _edithisteditplan(ui, repo, state, rules)
1126 1126 return
1127 1127 elif goal == goalabort:
1128 1128 _aborthistedit(ui, repo, state, nobackup=nobackup)
1129 1129 return
1130 1130 else:
1131 1131 # goal == goalnew
1132 1132 _newhistedit(ui, repo, state, revs, freeargs, opts)
1133 1133
1134 1134 _continuehistedit(ui, repo, state)
1135 1135 _finishhistedit(ui, repo, state, fm)
1136 1136 fm.end()
1137 1137
1138 1138 def _continuehistedit(ui, repo, state):
1139 1139 """This function runs after either:
1140 1140 - bootstrapcontinue (if the goal is 'continue')
1141 1141 - _newhistedit (if the goal is 'new')
1142 1142 """
1143 1143 # preprocess rules so that we can hide inner folds from the user
1144 1144 # and only show one editor
1145 1145 actions = state.actions[:]
1146 1146 for idx, (action, nextact) in enumerate(
1147 1147 zip(actions, actions[1:] + [None])):
1148 1148 if action.verb == 'fold' and nextact and nextact.verb == 'fold':
1149 1149 state.actions[idx].__class__ = _multifold
1150 1150
1151 1151 # Force an initial state file write, so the user can run --abort/continue
1152 1152 # even if there's an exception before the first transaction serialize.
1153 1153 state.write()
1154 1154
1155 1155 tr = None
1156 1156 # Don't use singletransaction by default since it rolls the entire
1157 1157 # transaction back if an unexpected exception happens (like a
1158 1158 # pretxncommit hook throws, or the user aborts the commit msg editor).
1159 1159 if ui.configbool("histedit", "singletransaction"):
1160 1160 # Don't use a 'with' for the transaction, since actions may close
1161 1161 # and reopen a transaction. For example, if the action executes an
1162 1162 # external process it may choose to commit the transaction first.
1163 1163 tr = repo.transaction('histedit')
1164 1164 progress = ui.makeprogress(_("editing"), unit=_('changes'),
1165 1165 total=len(state.actions))
1166 1166 with progress, util.acceptintervention(tr):
1167 1167 while state.actions:
1168 1168 state.write(tr=tr)
1169 1169 actobj = state.actions[0]
1170 1170 progress.increment(item=actobj.torule())
1171 1171 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1172 1172 actobj.torule()))
1173 1173 parentctx, replacement_ = actobj.run()
1174 1174 state.parentctxnode = parentctx.node()
1175 1175 state.replacements.extend(replacement_)
1176 1176 state.actions.pop(0)
1177 1177
1178 1178 state.write()
1179 1179
1180 1180 def _finishhistedit(ui, repo, state, fm):
1181 1181 """This action runs when histedit is finishing its session"""
1182 1182 hg.updaterepo(repo, state.parentctxnode, overwrite=False)
1183 1183
1184 1184 mapping, tmpnodes, created, ntm = processreplacement(state)
1185 1185 if mapping:
1186 1186 for prec, succs in mapping.iteritems():
1187 1187 if not succs:
1188 1188 ui.debug('histedit: %s is dropped\n' % node.short(prec))
1189 1189 else:
1190 1190 ui.debug('histedit: %s is replaced by %s\n' % (
1191 1191 node.short(prec), node.short(succs[0])))
1192 1192 if len(succs) > 1:
1193 1193 m = 'histedit: %s'
1194 1194 for n in succs[1:]:
1195 1195 ui.debug(m % node.short(n))
1196 1196
1197 1197 if not state.keep:
1198 1198 if mapping:
1199 1199 movetopmostbookmarks(repo, state.topmost, ntm)
1200 1200 # TODO update mq state
1201 1201 else:
1202 1202 mapping = {}
1203 1203
1204 1204 for n in tmpnodes:
1205 1205 if n in repo:
1206 1206 mapping[n] = ()
1207 1207
1208 1208 # remove entries about unknown nodes
1209 1209 nodemap = repo.unfiltered().changelog.nodemap
1210 1210 mapping = {k: v for k, v in mapping.items()
1211 1211 if k in nodemap and all(n in nodemap for n in v)}
1212 1212 scmutil.cleanupnodes(repo, mapping, 'histedit')
1213 1213 hf = fm.hexfunc
1214 1214 fl = fm.formatlist
1215 1215 fd = fm.formatdict
1216 1216 nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
1217 1217 for oldn, newn in mapping.iteritems()},
1218 1218 key="oldnode", value="newnodes")
1219 1219 fm.data(nodechanges=nodechanges)
1220 1220
1221 1221 state.clear()
1222 1222 if os.path.exists(repo.sjoin('undo')):
1223 1223 os.unlink(repo.sjoin('undo'))
1224 1224 if repo.vfs.exists('histedit-last-edit.txt'):
1225 1225 repo.vfs.unlink('histedit-last-edit.txt')
1226 1226
1227 1227 def _aborthistedit(ui, repo, state, nobackup=False):
1228 1228 try:
1229 1229 state.read()
1230 1230 __, leafs, tmpnodes, __ = processreplacement(state)
1231 1231 ui.debug('restore wc to old parent %s\n'
1232 1232 % node.short(state.topmost))
1233 1233
1234 1234 # Recover our old commits if necessary
1235 1235 if not state.topmost in repo and state.backupfile:
1236 1236 backupfile = repo.vfs.join(state.backupfile)
1237 1237 f = hg.openpath(ui, backupfile)
1238 1238 gen = exchange.readbundle(ui, f, backupfile)
1239 1239 with repo.transaction('histedit.abort') as tr:
1240 1240 bundle2.applybundle(repo, gen, tr, source='histedit',
1241 1241 url='bundle:' + backupfile)
1242 1242
1243 1243 os.remove(backupfile)
1244 1244
1245 1245 # check whether we should update away
1246 1246 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1247 1247 state.parentctxnode, leafs | tmpnodes):
1248 1248 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1249 1249 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
1250 1250 cleanupnode(ui, repo, leafs, nobackup=nobackup)
1251 1251 except Exception:
1252 1252 if state.inprogress():
1253 1253 ui.warn(_('warning: encountered an exception during histedit '
1254 1254 '--abort; the repository may not have been completely '
1255 1255 'cleaned up\n'))
1256 1256 raise
1257 1257 finally:
1258 1258 state.clear()
1259 1259
1260 1260 def _edithisteditplan(ui, repo, state, rules):
1261 1261 state.read()
1262 1262 if not rules:
1263 1263 comment = geteditcomment(ui,
1264 1264 node.short(state.parentctxnode),
1265 1265 node.short(state.topmost))
1266 1266 rules = ruleeditor(repo, ui, state.actions, comment)
1267 1267 else:
1268 1268 rules = _readfile(ui, rules)
1269 1269 actions = parserules(rules, state)
1270 1270 ctxs = [repo[act.node] \
1271 1271 for act in state.actions if act.node]
1272 1272 warnverifyactions(ui, repo, actions, state, ctxs)
1273 1273 state.actions = actions
1274 1274 state.write()
1275 1275
1276 1276 def _newhistedit(ui, repo, state, revs, freeargs, opts):
1277 1277 outg = opts.get('outgoing')
1278 1278 rules = opts.get('commands', '')
1279 1279 force = opts.get('force')
1280 1280
1281 1281 cmdutil.checkunfinished(repo)
1282 1282 cmdutil.bailifchanged(repo)
1283 1283
1284 1284 topmost, empty = repo.dirstate.parents()
1285 1285 if outg:
1286 1286 if freeargs:
1287 1287 remote = freeargs[0]
1288 1288 else:
1289 1289 remote = None
1290 1290 root = findoutgoing(ui, repo, remote, force, opts)
1291 1291 else:
1292 1292 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
1293 1293 if len(rr) != 1:
1294 1294 raise error.Abort(_('The specified revisions must have '
1295 1295 'exactly one common root'))
1296 1296 root = rr[0].node()
1297 1297
1298 1298 revs = between(repo, root, topmost, state.keep)
1299 1299 if not revs:
1300 1300 raise error.Abort(_('%s is not an ancestor of working directory') %
1301 1301 node.short(root))
1302 1302
1303 1303 ctxs = [repo[r] for r in revs]
1304 1304 if not rules:
1305 1305 comment = geteditcomment(ui, node.short(root), node.short(topmost))
1306 1306 actions = [pick(state, r) for r in revs]
1307 1307 rules = ruleeditor(repo, ui, actions, comment)
1308 1308 else:
1309 1309 rules = _readfile(ui, rules)
1310 1310 actions = parserules(rules, state)
1311 1311 warnverifyactions(ui, repo, actions, state, ctxs)
1312 1312
1313 1313 parentctxnode = repo[root].parents()[0].node()
1314 1314
1315 1315 state.parentctxnode = parentctxnode
1316 1316 state.actions = actions
1317 1317 state.topmost = topmost
1318 1318 state.replacements = []
1319 1319
1320 1320 ui.log("histedit", "%d actions to histedit", len(actions),
1321 1321 histedit_num_actions=len(actions))
1322 1322
1323 1323 # Create a backup so we can always abort completely.
1324 1324 backupfile = None
1325 1325 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1326 1326 backupfile = repair.backupbundle(repo, [parentctxnode],
1327 1327 [topmost], root, 'histedit')
1328 1328 state.backupfile = backupfile
1329 1329
1330 1330 def _getsummary(ctx):
1331 1331 # a common pattern is to extract the summary but default to the empty
1332 1332 # string
1333 1333 summary = ctx.description() or ''
1334 1334 if summary:
1335 1335 summary = summary.splitlines()[0]
1336 1336 return summary
1337 1337
1338 1338 def bootstrapcontinue(ui, state, opts):
1339 1339 repo = state.repo
1340 1340
1341 1341 ms = mergemod.mergestate.read(repo)
1342 1342 mergeutil.checkunresolved(ms)
1343 1343
1344 1344 if state.actions:
1345 1345 actobj = state.actions.pop(0)
1346 1346
1347 1347 if _isdirtywc(repo):
1348 1348 actobj.continuedirty()
1349 1349 if _isdirtywc(repo):
1350 1350 abortdirty()
1351 1351
1352 1352 parentctx, replacements = actobj.continueclean()
1353 1353
1354 1354 state.parentctxnode = parentctx.node()
1355 1355 state.replacements.extend(replacements)
1356 1356
1357 1357 return state
1358 1358
1359 1359 def between(repo, old, new, keep):
1360 1360 """select and validate the set of revision to edit
1361 1361
1362 1362 When keep is false, the specified set can't have children."""
1363 1363 revs = repo.revs('%n::%n', old, new)
1364 1364 if revs and not keep:
1365 1365 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
1366 1366 repo.revs('(%ld::) - (%ld)', revs, revs)):
1367 1367 raise error.Abort(_('can only histedit a changeset together '
1368 1368 'with all its descendants'))
1369 1369 if repo.revs('(%ld) and merge()', revs):
1370 1370 raise error.Abort(_('cannot edit history that contains merges'))
1371 1371 root = repo[revs.first()] # list is already sorted by repo.revs()
1372 1372 if not root.mutable():
1373 1373 raise error.Abort(_('cannot edit public changeset: %s') % root,
1374 1374 hint=_("see 'hg help phases' for details"))
1375 1375 return pycompat.maplist(repo.changelog.node, revs)
1376 1376
1377 1377 def ruleeditor(repo, ui, actions, editcomment=""):
1378 1378 """open an editor to edit rules
1379 1379
1380 1380 rules are in the format [ [act, ctx], ...] like in state.rules
1381 1381 """
1382 1382 if repo.ui.configbool("experimental", "histedit.autoverb"):
1383 1383 newact = util.sortdict()
1384 1384 for act in actions:
1385 1385 ctx = repo[act.node]
1386 1386 summary = _getsummary(ctx)
1387 1387 fword = summary.split(' ', 1)[0].lower()
1388 1388 added = False
1389 1389
1390 1390 # if it doesn't end with the special character '!' just skip this
1391 1391 if fword.endswith('!'):
1392 1392 fword = fword[:-1]
1393 1393 if fword in primaryactions | secondaryactions | tertiaryactions:
1394 1394 act.verb = fword
1395 1395 # get the target summary
1396 1396 tsum = summary[len(fword) + 1:].lstrip()
1397 1397 # safe but slow: reverse iterate over the actions so we
1398 1398 # don't clash on two commits having the same summary
1399 1399 for na, l in reversed(list(newact.iteritems())):
1400 1400 actx = repo[na.node]
1401 1401 asum = _getsummary(actx)
1402 1402 if asum == tsum:
1403 1403 added = True
1404 1404 l.append(act)
1405 1405 break
1406 1406
1407 1407 if not added:
1408 1408 newact[act] = []
1409 1409
1410 1410 # copy over and flatten the new list
1411 1411 actions = []
1412 1412 for na, l in newact.iteritems():
1413 1413 actions.append(na)
1414 1414 actions += l
1415 1415
1416 1416 rules = '\n'.join([act.torule() for act in actions])
1417 1417 rules += '\n\n'
1418 1418 rules += editcomment
1419 1419 rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
1420 1420 repopath=repo.path, action='histedit')
1421 1421
1422 1422 # Save edit rules in .hg/histedit-last-edit.txt in case
1423 1423 # the user needs to ask for help after something
1424 1424 # surprising happens.
1425 1425 with repo.vfs('histedit-last-edit.txt', 'wb') as f:
1426 1426 f.write(rules)
1427 1427
1428 1428 return rules
1429 1429
1430 1430 def parserules(rules, state):
1431 1431 """Read the histedit rules string and return list of action objects """
1432 1432 rules = [l for l in (r.strip() for r in rules.splitlines())
1433 1433 if l and not l.startswith('#')]
1434 1434 actions = []
1435 1435 for r in rules:
1436 1436 if ' ' not in r:
1437 1437 raise error.ParseError(_('malformed line "%s"') % r)
1438 1438 verb, rest = r.split(' ', 1)
1439 1439
1440 1440 if verb not in actiontable:
1441 1441 raise error.ParseError(_('unknown action "%s"') % verb)
1442 1442
1443 1443 action = actiontable[verb].fromrule(state, rest)
1444 1444 actions.append(action)
1445 1445 return actions
1446 1446
1447 1447 def warnverifyactions(ui, repo, actions, state, ctxs):
1448 1448 try:
1449 1449 verifyactions(actions, state, ctxs)
1450 1450 except error.ParseError:
1451 1451 if repo.vfs.exists('histedit-last-edit.txt'):
1452 1452 ui.warn(_('warning: histedit rules saved '
1453 1453 'to: .hg/histedit-last-edit.txt\n'))
1454 1454 raise
1455 1455
1456 1456 def verifyactions(actions, state, ctxs):
1457 1457 """Verify that there exists exactly one action per given changeset and
1458 1458 other constraints.
1459 1459
1460 1460 Will abort if there are to many or too few rules, a malformed rule,
1461 1461 or a rule on a changeset outside of the user-given range.
1462 1462 """
1463 1463 expected = set(c.node() for c in ctxs)
1464 1464 seen = set()
1465 1465 prev = None
1466 1466
1467 1467 if actions and actions[0].verb in ['roll', 'fold']:
1468 1468 raise error.ParseError(_('first changeset cannot use verb "%s"') %
1469 1469 actions[0].verb)
1470 1470
1471 1471 for action in actions:
1472 1472 action.verify(prev, expected, seen)
1473 1473 prev = action
1474 1474 if action.node is not None:
1475 1475 seen.add(action.node)
1476 1476 missing = sorted(expected - seen) # sort to stabilize output
1477 1477
1478 1478 if state.repo.ui.configbool('histedit', 'dropmissing'):
1479 1479 if len(actions) == 0:
1480 1480 raise error.ParseError(_('no rules provided'),
1481 1481 hint=_('use strip extension to remove commits'))
1482 1482
1483 1483 drops = [drop(state, n) for n in missing]
1484 1484 # put the in the beginning so they execute immediately and
1485 1485 # don't show in the edit-plan in the future
1486 1486 actions[:0] = drops
1487 1487 elif missing:
1488 1488 raise error.ParseError(_('missing rules for changeset %s') %
1489 1489 node.short(missing[0]),
1490 1490 hint=_('use "drop %s" to discard, see also: '
1491 1491 "'hg help -e histedit.config'")
1492 1492 % node.short(missing[0]))
1493 1493
1494 1494 def adjustreplacementsfrommarkers(repo, oldreplacements):
1495 1495 """Adjust replacements from obsolescence markers
1496 1496
1497 1497 Replacements structure is originally generated based on
1498 1498 histedit's state and does not account for changes that are
1499 1499 not recorded there. This function fixes that by adding
1500 1500 data read from obsolescence markers"""
1501 1501 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1502 1502 return oldreplacements
1503 1503
1504 1504 unfi = repo.unfiltered()
1505 1505 nm = unfi.changelog.nodemap
1506 1506 obsstore = repo.obsstore
1507 1507 newreplacements = list(oldreplacements)
1508 1508 oldsuccs = [r[1] for r in oldreplacements]
1509 1509 # successors that have already been added to succstocheck once
1510 1510 seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
1511 1511 succstocheck = list(seensuccs)
1512 1512 while succstocheck:
1513 1513 n = succstocheck.pop()
1514 1514 missing = nm.get(n) is None
1515 1515 markers = obsstore.successors.get(n, ())
1516 1516 if missing and not markers:
1517 1517 # dead end, mark it as such
1518 1518 newreplacements.append((n, ()))
1519 1519 for marker in markers:
1520 1520 nsuccs = marker[1]
1521 1521 newreplacements.append((n, nsuccs))
1522 1522 for nsucc in nsuccs:
1523 1523 if nsucc not in seensuccs:
1524 1524 seensuccs.add(nsucc)
1525 1525 succstocheck.append(nsucc)
1526 1526
1527 1527 return newreplacements
1528 1528
1529 1529 def processreplacement(state):
1530 1530 """process the list of replacements to return
1531 1531
1532 1532 1) the final mapping between original and created nodes
1533 1533 2) the list of temporary node created by histedit
1534 1534 3) the list of new commit created by histedit"""
1535 1535 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
1536 1536 allsuccs = set()
1537 1537 replaced = set()
1538 1538 fullmapping = {}
1539 1539 # initialize basic set
1540 1540 # fullmapping records all operations recorded in replacement
1541 1541 for rep in replacements:
1542 1542 allsuccs.update(rep[1])
1543 1543 replaced.add(rep[0])
1544 1544 fullmapping.setdefault(rep[0], set()).update(rep[1])
1545 1545 new = allsuccs - replaced
1546 1546 tmpnodes = allsuccs & replaced
1547 1547 # Reduce content fullmapping into direct relation between original nodes
1548 1548 # and final node created during history edition
1549 1549 # Dropped changeset are replaced by an empty list
1550 1550 toproceed = set(fullmapping)
1551 1551 final = {}
1552 1552 while toproceed:
1553 1553 for x in list(toproceed):
1554 1554 succs = fullmapping[x]
1555 1555 for s in list(succs):
1556 1556 if s in toproceed:
1557 1557 # non final node with unknown closure
1558 1558 # We can't process this now
1559 1559 break
1560 1560 elif s in final:
1561 1561 # non final node, replace with closure
1562 1562 succs.remove(s)
1563 1563 succs.update(final[s])
1564 1564 else:
1565 1565 final[x] = succs
1566 1566 toproceed.remove(x)
1567 1567 # remove tmpnodes from final mapping
1568 1568 for n in tmpnodes:
1569 1569 del final[n]
1570 1570 # we expect all changes involved in final to exist in the repo
1571 1571 # turn `final` into list (topologically sorted)
1572 1572 nm = state.repo.changelog.nodemap
1573 1573 for prec, succs in final.items():
1574 1574 final[prec] = sorted(succs, key=nm.get)
1575 1575
1576 1576 # computed topmost element (necessary for bookmark)
1577 1577 if new:
1578 1578 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1579 1579 elif not final:
1580 1580 # Nothing rewritten at all. we won't need `newtopmost`
1581 1581 # It is the same as `oldtopmost` and `processreplacement` know it
1582 1582 newtopmost = None
1583 1583 else:
1584 1584 # every body died. The newtopmost is the parent of the root.
1585 1585 r = state.repo.changelog.rev
1586 1586 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1587 1587
1588 1588 return final, tmpnodes, new, newtopmost
1589 1589
1590 1590 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
1591 1591 """Move bookmark from oldtopmost to newly created topmost
1592 1592
1593 1593 This is arguably a feature and we may only want that for the active
1594 1594 bookmark. But the behavior is kept compatible with the old version for now.
1595 1595 """
1596 1596 if not oldtopmost or not newtopmost:
1597 1597 return
1598 1598 oldbmarks = repo.nodebookmarks(oldtopmost)
1599 1599 if oldbmarks:
1600 1600 with repo.lock(), repo.transaction('histedit') as tr:
1601 1601 marks = repo._bookmarks
1602 1602 changes = []
1603 1603 for name in oldbmarks:
1604 1604 changes.append((name, newtopmost))
1605 1605 marks.applychanges(repo, tr, changes)
1606 1606
1607 1607 def cleanupnode(ui, repo, nodes, nobackup=False):
1608 1608 """strip a group of nodes from the repository
1609 1609
1610 1610 The set of node to strip may contains unknown nodes."""
1611 1611 with repo.lock():
1612 1612 # do not let filtering get in the way of the cleanse
1613 1613 # we should probably get rid of obsolescence marker created during the
1614 1614 # histedit, but we currently do not have such information.
1615 1615 repo = repo.unfiltered()
1616 1616 # Find all nodes that need to be stripped
1617 1617 # (we use %lr instead of %ln to silently ignore unknown items)
1618 1618 nm = repo.changelog.nodemap
1619 1619 nodes = sorted(n for n in nodes if n in nm)
1620 1620 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1621 1621 if roots:
1622 1622 backup = not nobackup
1623 1623 repair.strip(ui, repo, roots, backup=backup)
1624 1624
1625 1625 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1626 1626 if isinstance(nodelist, str):
1627 1627 nodelist = [nodelist]
1628 1628 state = histeditstate(repo)
1629 1629 if state.inprogress():
1630 1630 state.read()
1631 1631 histedit_nodes = {action.node for action
1632 1632 in state.actions if action.node}
1633 1633 common_nodes = histedit_nodes & set(nodelist)
1634 1634 if common_nodes:
1635 1635 raise error.Abort(_("histedit in progress, can't strip %s")
1636 1636 % ', '.join(node.short(x) for x in common_nodes))
1637 1637 return orig(ui, repo, nodelist, *args, **kwargs)
1638 1638
1639 1639 extensions.wrapfunction(repair, 'strip', stripwrapper)
1640 1640
1641 1641 def summaryhook(ui, repo):
1642 1642 state = histeditstate(repo)
1643 1643 if not state.inprogress():
1644 1644 return
1645 1645 state.read()
1646 1646 if state.actions:
1647 1647 # i18n: column positioning for "hg summary"
1648 1648 ui.write(_('hist: %s (histedit --continue)\n') %
1649 1649 (ui.label(_('%d remaining'), 'histedit.remaining') %
1650 1650 len(state.actions)))
1651 1651
1652 1652 def extsetup(ui):
1653 1653 cmdutil.summaryhooks.add('histedit', summaryhook)
1654 1654 cmdutil.unfinishedstates.append(
1655 1655 ['histedit-state', False, True, _('histedit in progress'),
1656 1656 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1657 1657 cmdutil.afterresolvedstates.append(
1658 1658 ['histedit-state', _('hg histedit --continue')])
@@ -1,835 +1,835
1 1 # patchbomb.py - sending Mercurial changesets as patch emails
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to send changesets as (a series of) patch emails
9 9
10 10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 11 describes the series as a whole.
12 12
13 13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 14 first line of the changeset description as the subject text. The
15 15 message contains two or three body parts:
16 16
17 17 - The changeset description.
18 18 - [Optional] The result of running diffstat on the patch.
19 19 - The patch itself, as generated by :hg:`export`.
20 20
21 21 Each message refers to the first in the series using the In-Reply-To
22 22 and References headers, so they will show up as a sequence in threaded
23 23 mail and news readers, and in mail archives.
24 24
25 25 To configure other defaults, add a section like this to your
26 26 configuration file::
27 27
28 28 [email]
29 29 from = My Name <my@email>
30 30 to = recipient1, recipient2, ...
31 31 cc = cc1, cc2, ...
32 32 bcc = bcc1, bcc2, ...
33 33 reply-to = address1, address2, ...
34 34
35 35 Use ``[patchbomb]`` as configuration section name if you need to
36 36 override global ``[email]`` address settings.
37 37
38 38 Then you can use the :hg:`email` command to mail a series of
39 39 changesets as a patchbomb.
40 40
41 41 You can also either configure the method option in the email section
42 42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 43 that the patchbomb extension can automatically send patchbombs
44 44 directly from the commandline. See the [email] and [smtp] sections in
45 45 hgrc(5) for details.
46 46
47 47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 48 you do not supply one via configuration or the command line. You can
49 49 override this to never prompt by configuring an empty value::
50 50
51 51 [email]
52 52 cc =
53 53
54 54 You can control the default inclusion of an introduction message with the
55 55 ``patchbomb.intro`` configuration option. The configuration is always
56 56 overwritten by command line flags like --intro and --desc::
57 57
58 58 [patchbomb]
59 59 intro=auto # include introduction message if more than 1 patch (default)
60 60 intro=never # never include an introduction message
61 61 intro=always # always include an introduction message
62 62
63 63 You can specify a template for flags to be added in subject prefixes. Flags
64 64 specified by --flag option are exported as ``{flags}`` keyword::
65 65
66 66 [patchbomb]
67 67 flagtemplate = "{separate(' ',
68 68 ifeq(branch, 'default', '', branch|upper),
69 69 flags)}"
70 70
71 71 You can set patchbomb to always ask for confirmation by setting
72 72 ``patchbomb.confirm`` to true.
73 73 '''
74 74 from __future__ import absolute_import
75 75
76 76 import email.encoders as emailencoders
77 77 import email.generator as emailgen
78 78 import email.mime.base as emimebase
79 79 import email.mime.multipart as emimemultipart
80 80 import email.utils as eutil
81 81 import errno
82 82 import os
83 83 import socket
84 84
85 85 from mercurial.i18n import _
86 86 from mercurial import (
87 87 cmdutil,
88 88 commands,
89 89 encoding,
90 90 error,
91 91 formatter,
92 92 hg,
93 93 mail,
94 94 node as nodemod,
95 95 patch,
96 96 pycompat,
97 97 registrar,
98 98 scmutil,
99 99 templater,
100 100 util,
101 101 )
102 102 from mercurial.utils import dateutil
103 103 stringio = util.stringio
104 104
105 105 cmdtable = {}
106 106 command = registrar.command(cmdtable)
107 107
108 108 configtable = {}
109 109 configitem = registrar.configitem(configtable)
110 110
111 111 configitem('patchbomb', 'bundletype',
112 112 default=None,
113 113 )
114 114 configitem('patchbomb', 'bcc',
115 115 default=None,
116 116 )
117 117 configitem('patchbomb', 'cc',
118 118 default=None,
119 119 )
120 120 configitem('patchbomb', 'confirm',
121 121 default=False,
122 122 )
123 123 configitem('patchbomb', 'flagtemplate',
124 124 default=None,
125 125 )
126 126 configitem('patchbomb', 'from',
127 127 default=None,
128 128 )
129 129 configitem('patchbomb', 'intro',
130 130 default='auto',
131 131 )
132 132 configitem('patchbomb', 'publicurl',
133 133 default=None,
134 134 )
135 135 configitem('patchbomb', 'reply-to',
136 136 default=None,
137 137 )
138 138 configitem('patchbomb', 'to',
139 139 default=None,
140 140 )
141 141
142 142 if pycompat.ispy3:
143 143 _bytesgenerator = emailgen.BytesGenerator
144 144 else:
145 145 _bytesgenerator = emailgen.Generator
146 146
147 147 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
148 148 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
149 149 # be specifying the version(s) of Mercurial they are tested with, or
150 150 # leave the attribute unspecified.
151 151 testedwith = 'ships-with-hg-core'
152 152
153 153 def _addpullheader(seq, ctx):
154 154 """Add a header pointing to a public URL where the changeset is available
155 155 """
156 156 repo = ctx.repo()
157 157 # experimental config: patchbomb.publicurl
158 158 # waiting for some logic that check that the changeset are available on the
159 159 # destination before patchbombing anything.
160 160 publicurl = repo.ui.config('patchbomb', 'publicurl')
161 161 if publicurl:
162 162 return ('Available At %s\n'
163 163 '# hg pull %s -r %s' % (publicurl, publicurl, ctx))
164 164 return None
165 165
166 166 def uisetup(ui):
167 167 cmdutil.extraexport.append('pullurl')
168 168 cmdutil.extraexportmap['pullurl'] = _addpullheader
169 169
170 170 def reposetup(ui, repo):
171 171 if not repo.local():
172 172 return
173 173 repo._wlockfreeprefix.add('last-email.txt')
174 174
175 175 def prompt(ui, prompt, default=None, rest=':'):
176 176 if default:
177 177 prompt += ' [%s]' % default
178 178 return ui.prompt(prompt + rest, default)
179 179
180 180 def introwanted(ui, opts, number):
181 181 '''is an introductory message apparently wanted?'''
182 182 introconfig = ui.config('patchbomb', 'intro')
183 183 if opts.get('intro') or opts.get('desc'):
184 184 intro = True
185 185 elif introconfig == 'always':
186 186 intro = True
187 187 elif introconfig == 'never':
188 188 intro = False
189 189 elif introconfig == 'auto':
190 intro = 1 < number
190 intro = number > 1
191 191 else:
192 192 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
193 193 % introconfig)
194 194 ui.write_err(_('(should be one of always, never, auto)\n'))
195 intro = 1 < number
195 intro = number > 1
196 196 return intro
197 197
198 198 def _formatflags(ui, repo, rev, flags):
199 199 """build flag string optionally by template"""
200 200 tmpl = ui.config('patchbomb', 'flagtemplate')
201 201 if not tmpl:
202 202 return ' '.join(flags)
203 203 out = util.stringio()
204 204 opts = {'template': templater.unquotestring(tmpl)}
205 205 with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
206 206 fm.startitem()
207 207 fm.context(ctx=repo[rev])
208 208 fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
209 209 return out.getvalue()
210 210
211 211 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
212 212 """build prefix to patch subject"""
213 213 flag = _formatflags(ui, repo, rev, flags)
214 214 if flag:
215 215 flag = ' ' + flag
216 216
217 217 if not numbered:
218 218 return '[PATCH%s]' % flag
219 219 else:
220 220 tlen = len("%d" % total)
221 221 return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
222 222
223 223 def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
224 224 patchname=None):
225 225
226 226 desc = []
227 227 node = None
228 228 body = ''
229 229
230 230 for line in patchlines:
231 231 if line.startswith('#'):
232 232 if line.startswith('# Node ID'):
233 233 node = line.split()[-1]
234 234 continue
235 235 if line.startswith('diff -r') or line.startswith('diff --git'):
236 236 break
237 237 desc.append(line)
238 238
239 239 if not patchname and not node:
240 240 raise ValueError
241 241
242 242 if opts.get('attach') and not opts.get('body'):
243 243 body = ('\n'.join(desc[1:]).strip() or
244 244 'Patch subject is complete summary.')
245 245 body += '\n\n\n'
246 246
247 247 if opts.get('plain'):
248 248 while patchlines and patchlines[0].startswith('# '):
249 249 patchlines.pop(0)
250 250 if patchlines:
251 251 patchlines.pop(0)
252 252 while patchlines and not patchlines[0].strip():
253 253 patchlines.pop(0)
254 254
255 255 ds = patch.diffstat(patchlines)
256 256 if opts.get('diffstat'):
257 257 body += ds + '\n\n'
258 258
259 259 addattachment = opts.get('attach') or opts.get('inline')
260 260 if not addattachment or opts.get('body'):
261 261 body += '\n'.join(patchlines)
262 262
263 263 if addattachment:
264 264 msg = emimemultipart.MIMEMultipart()
265 265 if body:
266 266 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
267 267 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
268 268 opts.get('test'))
269 269 binnode = nodemod.bin(node)
270 270 # if node is mq patch, it will have the patch file's name as a tag
271 271 if not patchname:
272 272 patchtags = [t for t in repo.nodetags(binnode)
273 273 if t.endswith('.patch') or t.endswith('.diff')]
274 274 if patchtags:
275 275 patchname = patchtags[0]
276 276 elif total > 1:
277 277 patchname = cmdutil.makefilename(repo[node], '%b-%n.patch',
278 278 seqno=idx, total=total)
279 279 else:
280 280 patchname = cmdutil.makefilename(repo[node], '%b.patch')
281 281 disposition = r'inline'
282 282 if opts.get('attach'):
283 283 disposition = r'attachment'
284 284 p[r'Content-Disposition'] = (
285 285 disposition + r'; filename=' + encoding.strfromlocal(patchname))
286 286 msg.attach(p)
287 287 else:
288 288 msg = mail.mimetextpatch(body, display=opts.get('test'))
289 289
290 290 prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
291 291 numbered)
292 292 subj = desc[0].strip().rstrip('. ')
293 293 if not numbered:
294 294 subj = ' '.join([prefix, opts.get('subject') or subj])
295 295 else:
296 296 subj = ' '.join([prefix, subj])
297 297 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
298 298 msg['X-Mercurial-Node'] = node
299 299 msg['X-Mercurial-Series-Index'] = '%i' % idx
300 300 msg['X-Mercurial-Series-Total'] = '%i' % total
301 301 return msg, subj, ds
302 302
303 303 def _getpatches(repo, revs, **opts):
304 304 """return a list of patches for a list of revisions
305 305
306 306 Each patch in the list is itself a list of lines.
307 307 """
308 308 ui = repo.ui
309 309 prev = repo['.'].rev()
310 310 for r in revs:
311 311 if r == prev and (repo[None].files() or repo[None].deleted()):
312 312 ui.warn(_('warning: working directory has '
313 313 'uncommitted changes\n'))
314 314 output = stringio()
315 315 cmdutil.exportfile(repo, [r], output,
316 316 opts=patch.difffeatureopts(ui, opts, git=True))
317 317 yield output.getvalue().split('\n')
318 318 def _getbundle(repo, dest, **opts):
319 319 """return a bundle containing changesets missing in "dest"
320 320
321 321 The `opts` keyword-arguments are the same as the one accepted by the
322 322 `bundle` command.
323 323
324 324 The bundle is a returned as a single in-memory binary blob.
325 325 """
326 326 ui = repo.ui
327 327 tmpdir = pycompat.mkdtemp(prefix='hg-email-bundle-')
328 328 tmpfn = os.path.join(tmpdir, 'bundle')
329 329 btype = ui.config('patchbomb', 'bundletype')
330 330 if btype:
331 331 opts[r'type'] = btype
332 332 try:
333 333 commands.bundle(ui, repo, tmpfn, dest, **opts)
334 334 return util.readfile(tmpfn)
335 335 finally:
336 336 try:
337 337 os.unlink(tmpfn)
338 338 except OSError:
339 339 pass
340 340 os.rmdir(tmpdir)
341 341
342 342 def _getdescription(repo, defaultbody, sender, **opts):
343 343 """obtain the body of the introduction message and return it
344 344
345 345 This is also used for the body of email with an attached bundle.
346 346
347 347 The body can be obtained either from the command line option or entered by
348 348 the user through the editor.
349 349 """
350 350 ui = repo.ui
351 351 if opts.get(r'desc'):
352 352 body = open(opts.get(r'desc')).read()
353 353 else:
354 354 ui.write(_('\nWrite the introductory message for the '
355 355 'patch series.\n\n'))
356 356 body = ui.edit(defaultbody, sender, repopath=repo.path,
357 357 action='patchbombbody')
358 358 # Save series description in case sendmail fails
359 359 msgfile = repo.vfs('last-email.txt', 'wb')
360 360 msgfile.write(body)
361 361 msgfile.close()
362 362 return body
363 363
364 364 def _getbundlemsgs(repo, sender, bundle, **opts):
365 365 """Get the full email for sending a given bundle
366 366
367 367 This function returns a list of "email" tuples (subject, content, None).
368 368 The list is always one message long in that case.
369 369 """
370 370 ui = repo.ui
371 371 _charsets = mail._charsets(ui)
372 372 subj = (opts.get(r'subject')
373 373 or prompt(ui, 'Subject:', 'A bundle for your repository'))
374 374
375 375 body = _getdescription(repo, '', sender, **opts)
376 376 msg = emimemultipart.MIMEMultipart()
377 377 if body:
378 378 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
379 379 datapart = emimebase.MIMEBase(r'application', r'x-mercurial-bundle')
380 380 datapart.set_payload(bundle)
381 381 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
382 382 datapart.add_header(r'Content-Disposition', r'attachment',
383 383 filename=encoding.strfromlocal(bundlename))
384 384 emailencoders.encode_base64(datapart)
385 385 msg.attach(datapart)
386 386 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
387 387 return [(msg, subj, None)]
388 388
389 389 def _makeintro(repo, sender, revs, patches, **opts):
390 390 """make an introduction email, asking the user for content if needed
391 391
392 392 email is returned as (subject, body, cumulative-diffstat)"""
393 393 ui = repo.ui
394 394 _charsets = mail._charsets(ui)
395 395
396 396 # use the last revision which is likely to be a bookmarked head
397 397 prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
398 398 0, len(patches), numbered=True)
399 399 subj = (opts.get(r'subject') or
400 400 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
401 401 if not subj:
402 402 return None # skip intro if the user doesn't bother
403 403
404 404 subj = prefix + ' ' + subj
405 405
406 406 body = ''
407 407 if opts.get(r'diffstat'):
408 408 # generate a cumulative diffstat of the whole patch series
409 409 diffstat = patch.diffstat(sum(patches, []))
410 410 body = '\n' + diffstat
411 411 else:
412 412 diffstat = None
413 413
414 414 body = _getdescription(repo, body, sender, **opts)
415 415 msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
416 416 msg['Subject'] = mail.headencode(ui, subj, _charsets,
417 417 opts.get(r'test'))
418 418 return (msg, subj, diffstat)
419 419
420 420 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
421 421 """return a list of emails from a list of patches
422 422
423 423 This involves introduction message creation if necessary.
424 424
425 425 This function returns a list of "email" tuples (subject, content, None).
426 426 """
427 427 bytesopts = pycompat.byteskwargs(opts)
428 428 ui = repo.ui
429 429 _charsets = mail._charsets(ui)
430 430 patches = list(_getpatches(repo, revs, **opts))
431 431 msgs = []
432 432
433 433 ui.write(_('this patch series consists of %d patches.\n\n')
434 434 % len(patches))
435 435
436 436 # build the intro message, or skip it if the user declines
437 437 if introwanted(ui, bytesopts, len(patches)):
438 438 msg = _makeintro(repo, sender, revs, patches, **opts)
439 439 if msg:
440 440 msgs.append(msg)
441 441
442 442 # are we going to send more than one message?
443 443 numbered = len(msgs) + len(patches) > 1
444 444
445 445 # now generate the actual patch messages
446 446 name = None
447 447 assert len(revs) == len(patches)
448 448 for i, (r, p) in enumerate(zip(revs, patches)):
449 449 if patchnames:
450 450 name = patchnames[i]
451 451 msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
452 452 i + 1, len(patches), numbered, name)
453 453 msgs.append(msg)
454 454
455 455 return msgs
456 456
457 457 def _getoutgoing(repo, dest, revs):
458 458 '''Return the revisions present locally but not in dest'''
459 459 ui = repo.ui
460 460 url = ui.expandpath(dest or 'default-push', dest or 'default')
461 461 url = hg.parseurl(url)[0]
462 462 ui.status(_('comparing with %s\n') % util.hidepassword(url))
463 463
464 464 revs = [r for r in revs if r >= 0]
465 465 if not revs:
466 466 revs = [repo.changelog.tiprev()]
467 467 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
468 468 if not revs:
469 469 ui.status(_("no changes found\n"))
470 470 return revs
471 471
472 472 def _msgid(node, timestamp):
473 473 hostname = encoding.strtolocal(socket.getfqdn())
474 474 hostname = encoding.environ.get('HGHOSTNAME', hostname)
475 475 return '<%s.%d@%s>' % (node, timestamp, hostname)
476 476
477 477 emailopts = [
478 478 ('', 'body', None, _('send patches as inline message text (default)')),
479 479 ('a', 'attach', None, _('send patches as attachments')),
480 480 ('i', 'inline', None, _('send patches as inline attachments')),
481 481 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
482 482 ('c', 'cc', [], _('email addresses of copy recipients')),
483 483 ('', 'confirm', None, _('ask for confirmation before sending')),
484 484 ('d', 'diffstat', None, _('add diffstat output to messages')),
485 485 ('', 'date', '', _('use the given date as the sending date')),
486 486 ('', 'desc', '', _('use the given file as the series description')),
487 487 ('f', 'from', '', _('email address of sender')),
488 488 ('n', 'test', None, _('print messages that would be sent')),
489 489 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
490 490 ('', 'reply-to', [], _('email addresses replies should be sent to')),
491 491 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
492 492 ('', 'in-reply-to', '', _('message identifier to reply to')),
493 493 ('', 'flag', [], _('flags to add in subject prefixes')),
494 494 ('t', 'to', [], _('email addresses of recipients'))]
495 495
496 496 @command('email',
497 497 [('g', 'git', None, _('use git extended diff format')),
498 498 ('', 'plain', None, _('omit hg patch header')),
499 499 ('o', 'outgoing', None,
500 500 _('send changes not found in the target repository')),
501 501 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
502 502 ('B', 'bookmark', '', _('send changes only reachable by given bookmark')),
503 503 ('', 'bundlename', 'bundle',
504 504 _('name of the bundle attachment file'), _('NAME')),
505 505 ('r', 'rev', [], _('a revision to send'), _('REV')),
506 506 ('', 'force', None, _('run even when remote repository is unrelated '
507 507 '(with -b/--bundle)')),
508 508 ('', 'base', [], _('a base changeset to specify instead of a destination '
509 509 '(with -b/--bundle)'), _('REV')),
510 510 ('', 'intro', None, _('send an introduction email for a single patch')),
511 511 ] + emailopts + cmdutil.remoteopts,
512 512 _('hg email [OPTION]... [DEST]...'))
513 513 def email(ui, repo, *revs, **opts):
514 514 '''send changesets by email
515 515
516 516 By default, diffs are sent in the format generated by
517 517 :hg:`export`, one per message. The series starts with a "[PATCH 0
518 518 of N]" introduction, which describes the series as a whole.
519 519
520 520 Each patch email has a Subject line of "[PATCH M of N] ...", using
521 521 the first line of the changeset description as the subject text.
522 522 The message contains two or three parts. First, the changeset
523 523 description.
524 524
525 525 With the -d/--diffstat option, if the diffstat program is
526 526 installed, the result of running diffstat on the patch is inserted.
527 527
528 528 Finally, the patch itself, as generated by :hg:`export`.
529 529
530 530 With the -d/--diffstat or --confirm options, you will be presented
531 531 with a final summary of all messages and asked for confirmation before
532 532 the messages are sent.
533 533
534 534 By default the patch is included as text in the email body for
535 535 easy reviewing. Using the -a/--attach option will instead create
536 536 an attachment for the patch. With -i/--inline an inline attachment
537 537 will be created. You can include a patch both as text in the email
538 538 body and as a regular or an inline attachment by combining the
539 539 -a/--attach or -i/--inline with the --body option.
540 540
541 541 With -B/--bookmark changesets reachable by the given bookmark are
542 542 selected.
543 543
544 544 With -o/--outgoing, emails will be generated for patches not found
545 545 in the destination repository (or only those which are ancestors
546 546 of the specified revisions if any are provided)
547 547
548 548 With -b/--bundle, changesets are selected as for --outgoing, but a
549 549 single email containing a binary Mercurial bundle as an attachment
550 550 will be sent. Use the ``patchbomb.bundletype`` config option to
551 551 control the bundle type as with :hg:`bundle --type`.
552 552
553 553 With -m/--mbox, instead of previewing each patchbomb message in a
554 554 pager or sending the messages directly, it will create a UNIX
555 555 mailbox file with the patch emails. This mailbox file can be
556 556 previewed with any mail user agent which supports UNIX mbox
557 557 files.
558 558
559 559 With -n/--test, all steps will run, but mail will not be sent.
560 560 You will be prompted for an email recipient address, a subject and
561 561 an introductory message describing the patches of your patchbomb.
562 562 Then when all is done, patchbomb messages are displayed.
563 563
564 564 In case email sending fails, you will find a backup of your series
565 565 introductory message in ``.hg/last-email.txt``.
566 566
567 567 The default behavior of this command can be customized through
568 568 configuration. (See :hg:`help patchbomb` for details)
569 569
570 570 Examples::
571 571
572 572 hg email -r 3000 # send patch 3000 only
573 573 hg email -r 3000 -r 3001 # send patches 3000 and 3001
574 574 hg email -r 3000:3005 # send patches 3000 through 3005
575 575 hg email 3000 # send patch 3000 (deprecated)
576 576
577 577 hg email -o # send all patches not in default
578 578 hg email -o DEST # send all patches not in DEST
579 579 hg email -o -r 3000 # send all ancestors of 3000 not in default
580 580 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
581 581
582 582 hg email -B feature # send all ancestors of feature bookmark
583 583
584 584 hg email -b # send bundle of all patches not in default
585 585 hg email -b DEST # send bundle of all patches not in DEST
586 586 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
587 587 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
588 588
589 589 hg email -o -m mbox && # generate an mbox file...
590 590 mutt -R -f mbox # ... and view it with mutt
591 591 hg email -o -m mbox && # generate an mbox file ...
592 592 formail -s sendmail \\ # ... and use formail to send from the mbox
593 593 -bm -t < mbox # ... using sendmail
594 594
595 595 Before using this command, you will need to enable email in your
596 596 hgrc. See the [email] section in hgrc(5) for details.
597 597 '''
598 598 opts = pycompat.byteskwargs(opts)
599 599
600 600 _charsets = mail._charsets(ui)
601 601
602 602 bundle = opts.get('bundle')
603 603 date = opts.get('date')
604 604 mbox = opts.get('mbox')
605 605 outgoing = opts.get('outgoing')
606 606 rev = opts.get('rev')
607 607 bookmark = opts.get('bookmark')
608 608
609 609 if not (opts.get('test') or mbox):
610 610 # really sending
611 611 mail.validateconfig(ui)
612 612
613 613 if not (revs or rev or outgoing or bundle or bookmark):
614 614 raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
615 615
616 616 if outgoing and bundle:
617 617 raise error.Abort(_("--outgoing mode always on with --bundle;"
618 618 " do not re-specify --outgoing"))
619 619 if rev and bookmark:
620 620 raise error.Abort(_("-r and -B are mutually exclusive"))
621 621
622 622 if outgoing or bundle:
623 623 if len(revs) > 1:
624 624 raise error.Abort(_("too many destinations"))
625 625 if revs:
626 626 dest = revs[0]
627 627 else:
628 628 dest = None
629 629 revs = []
630 630
631 631 if rev:
632 632 if revs:
633 633 raise error.Abort(_('use only one form to specify the revision'))
634 634 revs = rev
635 635 elif bookmark:
636 636 if bookmark not in repo._bookmarks:
637 637 raise error.Abort(_("bookmark '%s' not found") % bookmark)
638 638 revs = scmutil.bookmarkrevs(repo, bookmark)
639 639
640 640 revs = scmutil.revrange(repo, revs)
641 641 if outgoing:
642 642 revs = _getoutgoing(repo, dest, revs)
643 643 if bundle:
644 644 opts['revs'] = ["%d" % r for r in revs]
645 645
646 646 # check if revision exist on the public destination
647 647 publicurl = repo.ui.config('patchbomb', 'publicurl')
648 648 if publicurl:
649 649 repo.ui.debug('checking that revision exist in the public repo\n')
650 650 try:
651 651 publicpeer = hg.peer(repo, {}, publicurl)
652 652 except error.RepoError:
653 653 repo.ui.write_err(_('unable to access public repo: %s\n')
654 654 % publicurl)
655 655 raise
656 656 if not publicpeer.capable('known'):
657 657 repo.ui.debug('skipping existence checks: public repo too old\n')
658 658 else:
659 659 out = [repo[r] for r in revs]
660 660 known = publicpeer.known(h.node() for h in out)
661 661 missing = []
662 662 for idx, h in enumerate(out):
663 663 if not known[idx]:
664 664 missing.append(h)
665 665 if missing:
666 if 1 < len(missing):
666 if len(missing) > 1:
667 667 msg = _('public "%s" is missing %s and %i others')
668 668 msg %= (publicurl, missing[0], len(missing) - 1)
669 669 else:
670 670 msg = _('public url %s is missing %s')
671 671 msg %= (publicurl, missing[0])
672 672 missingrevs = [ctx.rev() for ctx in missing]
673 673 revhint = ' '.join('-r %s' % h
674 674 for h in repo.set('heads(%ld)', missingrevs))
675 675 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
676 676 raise error.Abort(msg, hint=hint)
677 677
678 678 # start
679 679 if date:
680 680 start_time = dateutil.parsedate(date)
681 681 else:
682 682 start_time = dateutil.makedate()
683 683
684 684 def genmsgid(id):
685 685 return _msgid(id[:20], int(start_time[0]))
686 686
687 687 # deprecated config: patchbomb.from
688 688 sender = (opts.get('from') or ui.config('email', 'from') or
689 689 ui.config('patchbomb', 'from') or
690 690 prompt(ui, 'From', ui.username()))
691 691
692 692 if bundle:
693 693 stropts = pycompat.strkwargs(opts)
694 694 bundledata = _getbundle(repo, dest, **stropts)
695 695 bundleopts = stropts.copy()
696 696 bundleopts.pop(r'bundle', None) # already processed
697 697 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
698 698 else:
699 699 msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))
700 700
701 701 showaddrs = []
702 702
703 703 def getaddrs(header, ask=False, default=None):
704 704 configkey = header.lower()
705 705 opt = header.replace('-', '_').lower()
706 706 addrs = opts.get(opt)
707 707 if addrs:
708 708 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
709 709 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
710 710
711 711 # not on the command line: fallback to config and then maybe ask
712 712 addr = (ui.config('email', configkey) or
713 713 ui.config('patchbomb', configkey))
714 714 if not addr:
715 715 specified = (ui.hasconfig('email', configkey) or
716 716 ui.hasconfig('patchbomb', configkey))
717 717 if not specified and ask:
718 718 addr = prompt(ui, header, default=default)
719 719 if addr:
720 720 showaddrs.append('%s: %s' % (header, addr))
721 721 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
722 722 elif default:
723 723 return mail.addrlistencode(
724 724 ui, [default], _charsets, opts.get('test'))
725 725 return []
726 726
727 727 to = getaddrs('To', ask=True)
728 728 if not to:
729 729 # we can get here in non-interactive mode
730 730 raise error.Abort(_('no recipient addresses provided'))
731 731 cc = getaddrs('Cc', ask=True, default='')
732 732 bcc = getaddrs('Bcc')
733 733 replyto = getaddrs('Reply-To')
734 734
735 735 confirm = ui.configbool('patchbomb', 'confirm')
736 736 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
737 737
738 738 if confirm:
739 739 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
740 740 ui.write(('From: %s\n' % sender), label='patchbomb.from')
741 741 for addr in showaddrs:
742 742 ui.write('%s\n' % addr, label='patchbomb.to')
743 743 for m, subj, ds in msgs:
744 744 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
745 745 if ds:
746 746 ui.write(ds, label='patchbomb.diffstats')
747 747 ui.write('\n')
748 748 if ui.promptchoice(_('are you sure you want to send (yn)?'
749 749 '$$ &Yes $$ &No')):
750 750 raise error.Abort(_('patchbomb canceled'))
751 751
752 752 ui.write('\n')
753 753
754 754 parent = opts.get('in_reply_to') or None
755 755 # angle brackets may be omitted, they're not semantically part of the msg-id
756 756 if parent is not None:
757 757 if not parent.startswith('<'):
758 758 parent = '<' + parent
759 759 if not parent.endswith('>'):
760 760 parent += '>'
761 761
762 762 sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
763 763 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
764 764 sendmail = None
765 765 firstpatch = None
766 766 progress = ui.makeprogress(_('sending'), unit=_('emails'), total=len(msgs))
767 767 for i, (m, subj, ds) in enumerate(msgs):
768 768 try:
769 769 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
770 770 if not firstpatch:
771 771 firstpatch = m['Message-Id']
772 772 m['X-Mercurial-Series-Id'] = firstpatch
773 773 except TypeError:
774 774 m['Message-Id'] = genmsgid('patchbomb')
775 775 if parent:
776 776 m['In-Reply-To'] = parent
777 777 m['References'] = parent
778 778 if not parent or 'X-Mercurial-Node' not in m:
779 779 parent = m['Message-Id']
780 780
781 781 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
782 782 m['Date'] = eutil.formatdate(start_time[0], localtime=True)
783 783
784 784 start_time = (start_time[0] + 1, start_time[1])
785 785 m['From'] = sender
786 786 m['To'] = ', '.join(to)
787 787 if cc:
788 788 m['Cc'] = ', '.join(cc)
789 789 if bcc:
790 790 m['Bcc'] = ', '.join(bcc)
791 791 if replyto:
792 792 m['Reply-To'] = ', '.join(replyto)
793 793 # Fix up all headers to be native strings.
794 794 # TODO(durin42): this should probably be cleaned up above in the future.
795 795 if pycompat.ispy3:
796 796 for hdr, val in list(m.items()):
797 797 change = False
798 798 if isinstance(hdr, bytes):
799 799 del m[hdr]
800 800 hdr = pycompat.strurl(hdr)
801 801 change = True
802 802 if isinstance(val, bytes):
803 803 val = pycompat.strurl(val)
804 804 if not change:
805 805 # prevent duplicate headers
806 806 del m[hdr]
807 807 change = True
808 808 if change:
809 809 m[hdr] = val
810 810 if opts.get('test'):
811 811 ui.status(_('displaying '), subj, ' ...\n')
812 812 ui.pager('email')
813 813 generator = _bytesgenerator(ui, mangle_from_=False)
814 814 try:
815 815 generator.flatten(m, 0)
816 816 ui.write('\n')
817 817 except IOError as inst:
818 818 if inst.errno != errno.EPIPE:
819 819 raise
820 820 else:
821 821 if not sendmail:
822 822 sendmail = mail.connect(ui, mbox=mbox)
823 823 ui.status(_('sending '), subj, ' ...\n')
824 824 progress.update(i, item=subj)
825 825 if not mbox:
826 826 # Exim does not remove the Bcc field
827 827 del m['Bcc']
828 828 fp = stringio()
829 829 generator = _bytesgenerator(fp, mangle_from_=False)
830 830 generator.flatten(m, 0)
831 831 alldests = to + bcc + cc
832 832 alldests = [encoding.strfromlocal(d) for d in alldests]
833 833 sendmail(sender_addr, alldests, fp.getvalue())
834 834
835 835 progress.complete()
@@ -1,1150 +1,1150
1 1 # shelve.py - save/restore working directory state
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """save and restore changes to the working directory
9 9
10 10 The "hg shelve" command saves changes made to the working directory
11 11 and reverts those changes, resetting the working directory to a clean
12 12 state.
13 13
14 14 Later on, the "hg unshelve" command restores the changes saved by "hg
15 15 shelve". Changes can be restored even after updating to a different
16 16 parent, in which case Mercurial's merge machinery will resolve any
17 17 conflicts if necessary.
18 18
19 19 You can have more than one shelved change outstanding at a time; each
20 20 shelved change has a distinct name. For details, see the help for "hg
21 21 shelve".
22 22 """
23 23 from __future__ import absolute_import
24 24
25 25 import collections
26 26 import errno
27 27 import itertools
28 28 import stat
29 29
30 30 from mercurial.i18n import _
31 31 from mercurial import (
32 32 bookmarks,
33 33 bundle2,
34 34 bundlerepo,
35 35 changegroup,
36 36 cmdutil,
37 37 discovery,
38 38 error,
39 39 exchange,
40 40 hg,
41 41 lock as lockmod,
42 42 mdiff,
43 43 merge,
44 44 narrowspec,
45 45 node as nodemod,
46 46 patch,
47 47 phases,
48 48 pycompat,
49 49 registrar,
50 50 repair,
51 51 scmutil,
52 52 templatefilters,
53 53 util,
54 54 vfs as vfsmod,
55 55 )
56 56
57 57 from . import (
58 58 rebase,
59 59 )
60 60 from mercurial.utils import (
61 61 dateutil,
62 62 stringutil,
63 63 )
64 64
65 65 cmdtable = {}
66 66 command = registrar.command(cmdtable)
67 67 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
68 68 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
69 69 # be specifying the version(s) of Mercurial they are tested with, or
70 70 # leave the attribute unspecified.
71 71 testedwith = 'ships-with-hg-core'
72 72
73 73 configtable = {}
74 74 configitem = registrar.configitem(configtable)
75 75
76 76 configitem('shelve', 'maxbackups',
77 77 default=10,
78 78 )
79 79
80 80 backupdir = 'shelve-backup'
81 81 shelvedir = 'shelved'
82 82 shelvefileextensions = ['hg', 'patch', 'shelve']
83 83 # universal extension is present in all types of shelves
84 84 patchextension = 'patch'
85 85
86 86 # we never need the user, so we use a
87 87 # generic user for all shelve operations
88 88 shelveuser = 'shelve@localhost'
89 89
90 90 class shelvedfile(object):
91 91 """Helper for the file storing a single shelve
92 92
93 93 Handles common functions on shelve files (.hg/.patch) using
94 94 the vfs layer"""
95 95 def __init__(self, repo, name, filetype=None):
96 96 self.repo = repo
97 97 self.name = name
98 98 self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
99 99 self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
100 100 self.ui = self.repo.ui
101 101 if filetype:
102 102 self.fname = name + '.' + filetype
103 103 else:
104 104 self.fname = name
105 105
106 106 def exists(self):
107 107 return self.vfs.exists(self.fname)
108 108
109 109 def filename(self):
110 110 return self.vfs.join(self.fname)
111 111
112 112 def backupfilename(self):
113 113 def gennames(base):
114 114 yield base
115 115 base, ext = base.rsplit('.', 1)
116 116 for i in itertools.count(1):
117 117 yield '%s-%d.%s' % (base, i, ext)
118 118
119 119 name = self.backupvfs.join(self.fname)
120 120 for n in gennames(name):
121 121 if not self.backupvfs.exists(n):
122 122 return n
123 123
124 124 def movetobackup(self):
125 125 if not self.backupvfs.isdir():
126 126 self.backupvfs.makedir()
127 127 util.rename(self.filename(), self.backupfilename())
128 128
129 129 def stat(self):
130 130 return self.vfs.stat(self.fname)
131 131
132 132 def opener(self, mode='rb'):
133 133 try:
134 134 return self.vfs(self.fname, mode)
135 135 except IOError as err:
136 136 if err.errno != errno.ENOENT:
137 137 raise
138 138 raise error.Abort(_("shelved change '%s' not found") % self.name)
139 139
140 140 def applybundle(self):
141 141 fp = self.opener()
142 142 try:
143 143 targetphase = phases.internal
144 144 if not phases.supportinternal(self.repo):
145 145 targetphase = phases.secret
146 146 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
147 147 pretip = self.repo['tip']
148 148 tr = self.repo.currenttransaction()
149 149 bundle2.applybundle(self.repo, gen, tr,
150 150 source='unshelve',
151 151 url='bundle:' + self.vfs.join(self.fname),
152 152 targetphase=targetphase)
153 153 shelvectx = self.repo['tip']
154 154 if pretip == shelvectx:
155 155 shelverev = tr.changes['revduplicates'][-1]
156 156 shelvectx = self.repo[shelverev]
157 157 return shelvectx
158 158 finally:
159 159 fp.close()
160 160
161 161 def bundlerepo(self):
162 162 path = self.vfs.join(self.fname)
163 163 return bundlerepo.instance(self.repo.baseui,
164 164 'bundle://%s+%s' % (self.repo.root, path))
165 165
166 166 def writebundle(self, bases, node):
167 167 cgversion = changegroup.safeversion(self.repo)
168 168 if cgversion == '01':
169 169 btype = 'HG10BZ'
170 170 compression = None
171 171 else:
172 172 btype = 'HG20'
173 173 compression = 'BZ'
174 174
175 175 repo = self.repo.unfiltered()
176 176
177 177 outgoing = discovery.outgoing(repo, missingroots=bases,
178 178 missingheads=[node])
179 179 cg = changegroup.makechangegroup(repo, outgoing, cgversion, 'shelve')
180 180
181 181 bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
182 182 compression=compression)
183 183
184 184 def writeinfo(self, info):
185 185 scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
186 186
187 187 def readinfo(self):
188 188 return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
189 189
190 190 class shelvedstate(object):
191 191 """Handle persistence during unshelving operations.
192 192
193 193 Handles saving and restoring a shelved state. Ensures that different
194 194 versions of a shelved state are possible and handles them appropriately.
195 195 """
196 196 _version = 2
197 197 _filename = 'shelvedstate'
198 198 _keep = 'keep'
199 199 _nokeep = 'nokeep'
200 200 # colon is essential to differentiate from a real bookmark name
201 201 _noactivebook = ':no-active-bookmark'
202 202
203 203 @classmethod
204 204 def _verifyandtransform(cls, d):
205 205 """Some basic shelvestate syntactic verification and transformation"""
206 206 try:
207 207 d['originalwctx'] = nodemod.bin(d['originalwctx'])
208 208 d['pendingctx'] = nodemod.bin(d['pendingctx'])
209 209 d['parents'] = [nodemod.bin(h)
210 210 for h in d['parents'].split(' ')]
211 211 d['nodestoremove'] = [nodemod.bin(h)
212 212 for h in d['nodestoremove'].split(' ')]
213 213 except (ValueError, TypeError, KeyError) as err:
214 214 raise error.CorruptedState(pycompat.bytestr(err))
215 215
216 216 @classmethod
217 217 def _getversion(cls, repo):
218 218 """Read version information from shelvestate file"""
219 219 fp = repo.vfs(cls._filename)
220 220 try:
221 221 version = int(fp.readline().strip())
222 222 except ValueError as err:
223 223 raise error.CorruptedState(pycompat.bytestr(err))
224 224 finally:
225 225 fp.close()
226 226 return version
227 227
228 228 @classmethod
229 229 def _readold(cls, repo):
230 230 """Read the old position-based version of a shelvestate file"""
231 231 # Order is important, because old shelvestate file uses it
232 232 # to detemine values of fields (i.g. name is on the second line,
233 233 # originalwctx is on the third and so forth). Please do not change.
234 234 keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
235 235 'nodestoremove', 'branchtorestore', 'keep', 'activebook']
236 236 # this is executed only seldomly, so it is not a big deal
237 237 # that we open this file twice
238 238 fp = repo.vfs(cls._filename)
239 239 d = {}
240 240 try:
241 241 for key in keys:
242 242 d[key] = fp.readline().strip()
243 243 finally:
244 244 fp.close()
245 245 return d
246 246
247 247 @classmethod
248 248 def load(cls, repo):
249 249 version = cls._getversion(repo)
250 250 if version < cls._version:
251 251 d = cls._readold(repo)
252 252 elif version == cls._version:
253 253 d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
254 254 .read(firstlinenonkeyval=True)
255 255 else:
256 256 raise error.Abort(_('this version of shelve is incompatible '
257 257 'with the version used in this repo'))
258 258
259 259 cls._verifyandtransform(d)
260 260 try:
261 261 obj = cls()
262 262 obj.name = d['name']
263 263 obj.wctx = repo[d['originalwctx']]
264 264 obj.pendingctx = repo[d['pendingctx']]
265 265 obj.parents = d['parents']
266 266 obj.nodestoremove = d['nodestoremove']
267 267 obj.branchtorestore = d.get('branchtorestore', '')
268 268 obj.keep = d.get('keep') == cls._keep
269 269 obj.activebookmark = ''
270 270 if d.get('activebook', '') != cls._noactivebook:
271 271 obj.activebookmark = d.get('activebook', '')
272 272 except (error.RepoLookupError, KeyError) as err:
273 273 raise error.CorruptedState(pycompat.bytestr(err))
274 274
275 275 return obj
276 276
277 277 @classmethod
278 278 def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
279 279 branchtorestore, keep=False, activebook=''):
280 280 info = {
281 281 "name": name,
282 282 "originalwctx": nodemod.hex(originalwctx.node()),
283 283 "pendingctx": nodemod.hex(pendingctx.node()),
284 284 "parents": ' '.join([nodemod.hex(p)
285 285 for p in repo.dirstate.parents()]),
286 286 "nodestoremove": ' '.join([nodemod.hex(n)
287 287 for n in nodestoremove]),
288 288 "branchtorestore": branchtorestore,
289 289 "keep": cls._keep if keep else cls._nokeep,
290 290 "activebook": activebook or cls._noactivebook
291 291 }
292 292 scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
293 293 .write(info, firstline=("%d" % cls._version))
294 294
295 295 @classmethod
296 296 def clear(cls, repo):
297 297 repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
298 298
299 299 def cleanupoldbackups(repo):
300 300 vfs = vfsmod.vfs(repo.vfs.join(backupdir))
301 301 maxbackups = repo.ui.configint('shelve', 'maxbackups')
302 302 hgfiles = [f for f in vfs.listdir()
303 303 if f.endswith('.' + patchextension)]
304 304 hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
305 if 0 < maxbackups and maxbackups < len(hgfiles):
305 if maxbackups > 0 and maxbackups < len(hgfiles):
306 306 bordermtime = hgfiles[-maxbackups][0]
307 307 else:
308 308 bordermtime = None
309 309 for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
310 310 if mtime == bordermtime:
311 311 # keep it, because timestamp can't decide exact order of backups
312 312 continue
313 313 base = f[:-(1 + len(patchextension))]
314 314 for ext in shelvefileextensions:
315 315 vfs.tryunlink(base + '.' + ext)
316 316
317 317 def _backupactivebookmark(repo):
318 318 activebookmark = repo._activebookmark
319 319 if activebookmark:
320 320 bookmarks.deactivate(repo)
321 321 return activebookmark
322 322
323 323 def _restoreactivebookmark(repo, mark):
324 324 if mark:
325 325 bookmarks.activate(repo, mark)
326 326
327 327 def _aborttransaction(repo):
328 328 '''Abort current transaction for shelve/unshelve, but keep dirstate
329 329 '''
330 330 tr = repo.currenttransaction()
331 331 dirstatebackupname = 'dirstate.shelve'
332 332 narrowspecbackupname = 'narrowspec.shelve'
333 333 repo.dirstate.savebackup(tr, dirstatebackupname)
334 334 narrowspec.savebackup(repo, narrowspecbackupname)
335 335 tr.abort()
336 336 narrowspec.restorebackup(repo, narrowspecbackupname)
337 337 repo.dirstate.restorebackup(None, dirstatebackupname)
338 338
339 339 def getshelvename(repo, parent, opts):
340 340 """Decide on the name this shelve is going to have"""
341 341 def gennames():
342 342 yield label
343 343 for i in itertools.count(1):
344 344 yield '%s-%02d' % (label, i)
345 345 name = opts.get('name')
346 346 label = repo._activebookmark or parent.branch() or 'default'
347 347 # slashes aren't allowed in filenames, therefore we rename it
348 348 label = label.replace('/', '_')
349 349 label = label.replace('\\', '_')
350 350 # filenames must not start with '.' as it should not be hidden
351 351 if label.startswith('.'):
352 352 label = label.replace('.', '_', 1)
353 353
354 354 if name:
355 355 if shelvedfile(repo, name, patchextension).exists():
356 356 e = _("a shelved change named '%s' already exists") % name
357 357 raise error.Abort(e)
358 358
359 359 # ensure we are not creating a subdirectory or a hidden file
360 360 if '/' in name or '\\' in name:
361 361 raise error.Abort(_('shelved change names can not contain slashes'))
362 362 if name.startswith('.'):
363 363 raise error.Abort(_("shelved change names can not start with '.'"))
364 364
365 365 else:
366 366 for n in gennames():
367 367 if not shelvedfile(repo, n, patchextension).exists():
368 368 name = n
369 369 break
370 370
371 371 return name
372 372
373 373 def mutableancestors(ctx):
374 374 """return all mutable ancestors for ctx (included)
375 375
376 376 Much faster than the revset ancestors(ctx) & draft()"""
377 377 seen = {nodemod.nullrev}
378 378 visit = collections.deque()
379 379 visit.append(ctx)
380 380 while visit:
381 381 ctx = visit.popleft()
382 382 yield ctx.node()
383 383 for parent in ctx.parents():
384 384 rev = parent.rev()
385 385 if rev not in seen:
386 386 seen.add(rev)
387 387 if parent.mutable():
388 388 visit.append(parent)
389 389
390 390 def getcommitfunc(extra, interactive, editor=False):
391 391 def commitfunc(ui, repo, message, match, opts):
392 392 hasmq = util.safehasattr(repo, 'mq')
393 393 if hasmq:
394 394 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
395 395
396 396 targetphase = phases.internal
397 397 if not phases.supportinternal(repo):
398 398 targetphase = phases.secret
399 399 overrides = {('phases', 'new-commit'): targetphase}
400 400 try:
401 401 editor_ = False
402 402 if editor:
403 403 editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
404 404 **pycompat.strkwargs(opts))
405 405 with repo.ui.configoverride(overrides):
406 406 return repo.commit(message, shelveuser, opts.get('date'),
407 407 match, editor=editor_, extra=extra)
408 408 finally:
409 409 if hasmq:
410 410 repo.mq.checkapplied = saved
411 411
412 412 def interactivecommitfunc(ui, repo, *pats, **opts):
413 413 opts = pycompat.byteskwargs(opts)
414 414 match = scmutil.match(repo['.'], pats, {})
415 415 message = opts['message']
416 416 return commitfunc(ui, repo, message, match, opts)
417 417
418 418 return interactivecommitfunc if interactive else commitfunc
419 419
420 420 def _nothingtoshelvemessaging(ui, repo, pats, opts):
421 421 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
422 422 if stat.deleted:
423 423 ui.status(_("nothing changed (%d missing files, see "
424 424 "'hg status')\n") % len(stat.deleted))
425 425 else:
426 426 ui.status(_("nothing changed\n"))
427 427
428 428 def _shelvecreatedcommit(repo, node, name):
429 429 info = {'node': nodemod.hex(node)}
430 430 shelvedfile(repo, name, 'shelve').writeinfo(info)
431 431 bases = list(mutableancestors(repo[node]))
432 432 shelvedfile(repo, name, 'hg').writebundle(bases, node)
433 433 with shelvedfile(repo, name, patchextension).opener('wb') as fp:
434 434 cmdutil.exportfile(repo, [node], fp, opts=mdiff.diffopts(git=True))
435 435
436 436 def _includeunknownfiles(repo, pats, opts, extra):
437 437 s = repo.status(match=scmutil.match(repo[None], pats, opts),
438 438 unknown=True)
439 439 if s.unknown:
440 440 extra['shelve_unknown'] = '\0'.join(s.unknown)
441 441 repo[None].add(s.unknown)
442 442
443 443 def _finishshelve(repo):
444 444 if phases.supportinternal(repo):
445 445 backupname = 'dirstate.shelve'
446 446 tr = repo.currenttransaction()
447 447 repo.dirstate.savebackup(tr, backupname)
448 448 tr.close()
449 449 repo.dirstate.restorebackup(None, backupname)
450 450 else:
451 451 _aborttransaction(repo)
452 452
453 453 def createcmd(ui, repo, pats, opts):
454 454 """subcommand that creates a new shelve"""
455 455 with repo.wlock():
456 456 cmdutil.checkunfinished(repo)
457 457 return _docreatecmd(ui, repo, pats, opts)
458 458
459 459 def _docreatecmd(ui, repo, pats, opts):
460 460 wctx = repo[None]
461 461 parents = wctx.parents()
462 462 if len(parents) > 1:
463 463 raise error.Abort(_('cannot shelve while merging'))
464 464 parent = parents[0]
465 465 origbranch = wctx.branch()
466 466
467 467 if parent.node() != nodemod.nullid:
468 468 desc = "changes to: %s" % parent.description().split('\n', 1)[0]
469 469 else:
470 470 desc = '(changes in empty repository)'
471 471
472 472 if not opts.get('message'):
473 473 opts['message'] = desc
474 474
475 475 lock = tr = activebookmark = None
476 476 try:
477 477 lock = repo.lock()
478 478
479 479 # use an uncommitted transaction to generate the bundle to avoid
480 480 # pull races. ensure we don't print the abort message to stderr.
481 481 tr = repo.transaction('commit', report=lambda x: None)
482 482
483 483 interactive = opts.get('interactive', False)
484 484 includeunknown = (opts.get('unknown', False) and
485 485 not opts.get('addremove', False))
486 486
487 487 name = getshelvename(repo, parent, opts)
488 488 activebookmark = _backupactivebookmark(repo)
489 489 extra = {'internal': 'shelve'}
490 490 if includeunknown:
491 491 _includeunknownfiles(repo, pats, opts, extra)
492 492
493 493 if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
494 494 # In non-bare shelve we don't store newly created branch
495 495 # at bundled commit
496 496 repo.dirstate.setbranch(repo['.'].branch())
497 497
498 498 commitfunc = getcommitfunc(extra, interactive, editor=True)
499 499 if not interactive:
500 500 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
501 501 else:
502 502 node = cmdutil.dorecord(ui, repo, commitfunc, None,
503 503 False, cmdutil.recordfilter, *pats,
504 504 **pycompat.strkwargs(opts))
505 505 if not node:
506 506 _nothingtoshelvemessaging(ui, repo, pats, opts)
507 507 return 1
508 508
509 509 _shelvecreatedcommit(repo, node, name)
510 510
511 511 if ui.formatted():
512 512 desc = stringutil.ellipsis(desc, ui.termwidth())
513 513 ui.status(_('shelved as %s\n') % name)
514 514 hg.update(repo, parent.node())
515 515 if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
516 516 repo.dirstate.setbranch(origbranch)
517 517
518 518 _finishshelve(repo)
519 519 finally:
520 520 _restoreactivebookmark(repo, activebookmark)
521 521 lockmod.release(tr, lock)
522 522
523 523 def _isbareshelve(pats, opts):
524 524 return (not pats
525 525 and not opts.get('interactive', False)
526 526 and not opts.get('include', False)
527 527 and not opts.get('exclude', False))
528 528
529 529 def _iswctxonnewbranch(repo):
530 530 return repo[None].branch() != repo['.'].branch()
531 531
532 532 def cleanupcmd(ui, repo):
533 533 """subcommand that deletes all shelves"""
534 534
535 535 with repo.wlock():
536 536 for (name, _type) in repo.vfs.readdir(shelvedir):
537 537 suffix = name.rsplit('.', 1)[-1]
538 538 if suffix in shelvefileextensions:
539 539 shelvedfile(repo, name).movetobackup()
540 540 cleanupoldbackups(repo)
541 541
542 542 def deletecmd(ui, repo, pats):
543 543 """subcommand that deletes a specific shelve"""
544 544 if not pats:
545 545 raise error.Abort(_('no shelved changes specified!'))
546 546 with repo.wlock():
547 547 try:
548 548 for name in pats:
549 549 for suffix in shelvefileextensions:
550 550 shfile = shelvedfile(repo, name, suffix)
551 551 # patch file is necessary, as it should
552 552 # be present for any kind of shelve,
553 553 # but the .hg file is optional as in future we
554 554 # will add obsolete shelve with does not create a
555 555 # bundle
556 556 if shfile.exists() or suffix == patchextension:
557 557 shfile.movetobackup()
558 558 cleanupoldbackups(repo)
559 559 except OSError as err:
560 560 if err.errno != errno.ENOENT:
561 561 raise
562 562 raise error.Abort(_("shelved change '%s' not found") % name)
563 563
564 564 def listshelves(repo):
565 565 """return all shelves in repo as list of (time, filename)"""
566 566 try:
567 567 names = repo.vfs.readdir(shelvedir)
568 568 except OSError as err:
569 569 if err.errno != errno.ENOENT:
570 570 raise
571 571 return []
572 572 info = []
573 573 for (name, _type) in names:
574 574 pfx, sfx = name.rsplit('.', 1)
575 575 if not pfx or sfx != patchextension:
576 576 continue
577 577 st = shelvedfile(repo, name).stat()
578 578 info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
579 579 return sorted(info, reverse=True)
580 580
581 581 def listcmd(ui, repo, pats, opts):
582 582 """subcommand that displays the list of shelves"""
583 583 pats = set(pats)
584 584 width = 80
585 585 if not ui.plain():
586 586 width = ui.termwidth()
587 587 namelabel = 'shelve.newest'
588 588 ui.pager('shelve')
589 589 for mtime, name in listshelves(repo):
590 590 sname = util.split(name)[1]
591 591 if pats and sname not in pats:
592 592 continue
593 593 ui.write(sname, label=namelabel)
594 594 namelabel = 'shelve.name'
595 595 if ui.quiet:
596 596 ui.write('\n')
597 597 continue
598 598 ui.write(' ' * (16 - len(sname)))
599 599 used = 16
600 600 date = dateutil.makedate(mtime)
601 601 age = '(%s)' % templatefilters.age(date, abbrev=True)
602 602 ui.write(age, label='shelve.age')
603 603 ui.write(' ' * (12 - len(age)))
604 604 used += 12
605 605 with open(name + '.' + patchextension, 'rb') as fp:
606 606 while True:
607 607 line = fp.readline()
608 608 if not line:
609 609 break
610 610 if not line.startswith('#'):
611 611 desc = line.rstrip()
612 612 if ui.formatted():
613 613 desc = stringutil.ellipsis(desc, width - used)
614 614 ui.write(desc)
615 615 break
616 616 ui.write('\n')
617 617 if not (opts['patch'] or opts['stat']):
618 618 continue
619 619 difflines = fp.readlines()
620 620 if opts['patch']:
621 621 for chunk, label in patch.difflabel(iter, difflines):
622 622 ui.write(chunk, label=label)
623 623 if opts['stat']:
624 624 for chunk, label in patch.diffstatui(difflines, width=width):
625 625 ui.write(chunk, label=label)
626 626
627 627 def patchcmds(ui, repo, pats, opts):
628 628 """subcommand that displays shelves"""
629 629 if len(pats) == 0:
630 630 shelves = listshelves(repo)
631 631 if not shelves:
632 632 raise error.Abort(_("there are no shelves to show"))
633 633 mtime, name = shelves[0]
634 634 sname = util.split(name)[1]
635 635 pats = [sname]
636 636
637 637 for shelfname in pats:
638 638 if not shelvedfile(repo, shelfname, patchextension).exists():
639 639 raise error.Abort(_("cannot find shelf %s") % shelfname)
640 640
641 641 listcmd(ui, repo, pats, opts)
642 642
643 643 def checkparents(repo, state):
644 644 """check parent while resuming an unshelve"""
645 645 if state.parents != repo.dirstate.parents():
646 646 raise error.Abort(_('working directory parents do not match unshelve '
647 647 'state'))
648 648
649 649 def pathtofiles(repo, files):
650 650 cwd = repo.getcwd()
651 651 return [repo.pathto(f, cwd) for f in files]
652 652
653 653 def unshelveabort(ui, repo, state, opts):
654 654 """subcommand that abort an in-progress unshelve"""
655 655 with repo.lock():
656 656 try:
657 657 checkparents(repo, state)
658 658
659 659 merge.update(repo, state.pendingctx, False, True)
660 660 if (state.activebookmark
661 661 and state.activebookmark in repo._bookmarks):
662 662 bookmarks.activate(repo, state.activebookmark)
663 663
664 664 if repo.vfs.exists('unshelverebasestate'):
665 665 repo.vfs.rename('unshelverebasestate', 'rebasestate')
666 666 rebase.clearstatus(repo)
667 667
668 668 mergefiles(ui, repo, state.wctx, state.pendingctx)
669 669 if not phases.supportinternal(repo):
670 670 repair.strip(ui, repo, state.nodestoremove, backup=False,
671 671 topic='shelve')
672 672 finally:
673 673 shelvedstate.clear(repo)
674 674 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
675 675
676 676 def mergefiles(ui, repo, wctx, shelvectx):
677 677 """updates to wctx and merges the changes from shelvectx into the
678 678 dirstate."""
679 679 with ui.configoverride({('ui', 'quiet'): True}):
680 680 hg.update(repo, wctx.node())
681 681 files = []
682 682 files.extend(shelvectx.files())
683 683 files.extend(shelvectx.parents()[0].files())
684 684
685 685 # revert will overwrite unknown files, so move them out of the way
686 686 for file in repo.status(unknown=True).unknown:
687 687 if file in files:
688 688 util.rename(file, scmutil.origpath(ui, repo, file))
689 689 ui.pushbuffer(True)
690 690 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
691 691 *pathtofiles(repo, files),
692 692 **{r'no_backup': True})
693 693 ui.popbuffer()
694 694
695 695 def restorebranch(ui, repo, branchtorestore):
696 696 if branchtorestore and branchtorestore != repo.dirstate.branch():
697 697 repo.dirstate.setbranch(branchtorestore)
698 698 ui.status(_('marked working directory as branch %s\n')
699 699 % branchtorestore)
700 700
701 701 def unshelvecleanup(ui, repo, name, opts):
702 702 """remove related files after an unshelve"""
703 703 if not opts.get('keep'):
704 704 for filetype in shelvefileextensions:
705 705 shfile = shelvedfile(repo, name, filetype)
706 706 if shfile.exists():
707 707 shfile.movetobackup()
708 708 cleanupoldbackups(repo)
709 709
710 710 def unshelvecontinue(ui, repo, state, opts):
711 711 """subcommand to continue an in-progress unshelve"""
712 712 # We're finishing off a merge. First parent is our original
713 713 # parent, second is the temporary "fake" commit we're unshelving.
714 714 with repo.lock():
715 715 checkparents(repo, state)
716 716 ms = merge.mergestate.read(repo)
717 717 if list(ms.unresolved()):
718 718 raise error.Abort(
719 719 _("unresolved conflicts, can't continue"),
720 720 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
721 721
722 722 shelvectx = repo[state.parents[1]]
723 723 pendingctx = state.pendingctx
724 724
725 725 with repo.dirstate.parentchange():
726 726 repo.setparents(state.pendingctx.node(), nodemod.nullid)
727 727 repo.dirstate.write(repo.currenttransaction())
728 728
729 729 targetphase = phases.internal
730 730 if not phases.supportinternal(repo):
731 731 targetphase = phases.secret
732 732 overrides = {('phases', 'new-commit'): targetphase}
733 733 with repo.ui.configoverride(overrides, 'unshelve'):
734 734 with repo.dirstate.parentchange():
735 735 repo.setparents(state.parents[0], nodemod.nullid)
736 736 newnode = repo.commit(text=shelvectx.description(),
737 737 extra=shelvectx.extra(),
738 738 user=shelvectx.user(),
739 739 date=shelvectx.date())
740 740
741 741 if newnode is None:
742 742 # If it ended up being a no-op commit, then the normal
743 743 # merge state clean-up path doesn't happen, so do it
744 744 # here. Fix issue5494
745 745 merge.mergestate.clean(repo)
746 746 shelvectx = state.pendingctx
747 747 msg = _('note: unshelved changes already existed '
748 748 'in the working copy\n')
749 749 ui.status(msg)
750 750 else:
751 751 # only strip the shelvectx if we produced one
752 752 state.nodestoremove.append(newnode)
753 753 shelvectx = repo[newnode]
754 754
755 755 hg.updaterepo(repo, pendingctx.node(), overwrite=False)
756 756
757 757 if repo.vfs.exists('unshelverebasestate'):
758 758 repo.vfs.rename('unshelverebasestate', 'rebasestate')
759 759 rebase.clearstatus(repo)
760 760
761 761 mergefiles(ui, repo, state.wctx, shelvectx)
762 762 restorebranch(ui, repo, state.branchtorestore)
763 763
764 764 if not phases.supportinternal(repo):
765 765 repair.strip(ui, repo, state.nodestoremove, backup=False,
766 766 topic='shelve')
767 767 _restoreactivebookmark(repo, state.activebookmark)
768 768 shelvedstate.clear(repo)
769 769 unshelvecleanup(ui, repo, state.name, opts)
770 770 ui.status(_("unshelve of '%s' complete\n") % state.name)
771 771
772 772 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
773 773 """Temporarily commit working copy changes before moving unshelve commit"""
774 774 # Store pending changes in a commit and remember added in case a shelve
775 775 # contains unknown files that are part of the pending change
776 776 s = repo.status()
777 777 addedbefore = frozenset(s.added)
778 778 if not (s.modified or s.added or s.removed):
779 779 return tmpwctx, addedbefore
780 780 ui.status(_("temporarily committing pending changes "
781 781 "(restore with 'hg unshelve --abort')\n"))
782 782 extra = {'internal': 'shelve'}
783 783 commitfunc = getcommitfunc(extra=extra, interactive=False,
784 784 editor=False)
785 785 tempopts = {}
786 786 tempopts['message'] = "pending changes temporary commit"
787 787 tempopts['date'] = opts.get('date')
788 788 with ui.configoverride({('ui', 'quiet'): True}):
789 789 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
790 790 tmpwctx = repo[node]
791 791 return tmpwctx, addedbefore
792 792
793 793 def _unshelverestorecommit(ui, repo, basename):
794 794 """Recreate commit in the repository during the unshelve"""
795 795 repo = repo.unfiltered()
796 796 node = None
797 797 if shelvedfile(repo, basename, 'shelve').exists():
798 798 node = shelvedfile(repo, basename, 'shelve').readinfo()['node']
799 799 if node is None or node not in repo:
800 800 with ui.configoverride({('ui', 'quiet'): True}):
801 801 shelvectx = shelvedfile(repo, basename, 'hg').applybundle()
802 802 # We might not strip the unbundled changeset, so we should keep track of
803 803 # the unshelve node in case we need to reuse it (eg: unshelve --keep)
804 804 if node is None:
805 805 info = {'node': nodemod.hex(shelvectx.node())}
806 806 shelvedfile(repo, basename, 'shelve').writeinfo(info)
807 807 else:
808 808 shelvectx = repo[node]
809 809
810 810 return repo, shelvectx
811 811
812 812 def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
813 813 tmpwctx, shelvectx, branchtorestore,
814 814 activebookmark):
815 815 """Rebase restored commit from its original location to a destination"""
816 816 # If the shelve is not immediately on top of the commit
817 817 # we'll be merging with, rebase it to be on top.
818 818 if tmpwctx.node() == shelvectx.parents()[0].node():
819 819 return shelvectx
820 820
821 821 overrides = {
822 822 ('ui', 'forcemerge'): opts.get('tool', ''),
823 823 ('phases', 'new-commit'): phases.secret,
824 824 }
825 825 with repo.ui.configoverride(overrides, 'unshelve'):
826 826 ui.status(_('rebasing shelved changes\n'))
827 827 stats = merge.graft(repo, shelvectx, shelvectx.p1(),
828 828 labels=['shelve', 'working-copy'],
829 829 keepconflictparent=True)
830 830 if stats.unresolvedcount:
831 831 tr.close()
832 832
833 833 nodestoremove = [repo.changelog.node(rev)
834 834 for rev in pycompat.xrange(oldtiprev, len(repo))]
835 835 shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
836 836 branchtorestore, opts.get('keep'), activebookmark)
837 837 raise error.InterventionRequired(
838 838 _("unresolved conflicts (see 'hg resolve', then "
839 839 "'hg unshelve --continue')"))
840 840
841 841 with repo.dirstate.parentchange():
842 842 repo.setparents(tmpwctx.node(), nodemod.nullid)
843 843 newnode = repo.commit(text=shelvectx.description(),
844 844 extra=shelvectx.extra(),
845 845 user=shelvectx.user(),
846 846 date=shelvectx.date())
847 847
848 848 if newnode is None:
849 849 # If it ended up being a no-op commit, then the normal
850 850 # merge state clean-up path doesn't happen, so do it
851 851 # here. Fix issue5494
852 852 merge.mergestate.clean(repo)
853 853 shelvectx = tmpwctx
854 854 msg = _('note: unshelved changes already existed '
855 855 'in the working copy\n')
856 856 ui.status(msg)
857 857 else:
858 858 shelvectx = repo[newnode]
859 859 hg.updaterepo(repo, tmpwctx.node(), False)
860 860
861 861 return shelvectx
862 862
863 863 def _forgetunknownfiles(repo, shelvectx, addedbefore):
864 864 # Forget any files that were unknown before the shelve, unknown before
865 865 # unshelve started, but are now added.
866 866 shelveunknown = shelvectx.extra().get('shelve_unknown')
867 867 if not shelveunknown:
868 868 return
869 869 shelveunknown = frozenset(shelveunknown.split('\0'))
870 870 addedafter = frozenset(repo.status().added)
871 871 toforget = (addedafter & shelveunknown) - addedbefore
872 872 repo[None].forget(toforget)
873 873
874 874 def _finishunshelve(repo, oldtiprev, tr, activebookmark):
875 875 _restoreactivebookmark(repo, activebookmark)
876 876 # The transaction aborting will strip all the commits for us,
877 877 # but it doesn't update the inmemory structures, so addchangegroup
878 878 # hooks still fire and try to operate on the missing commits.
879 879 # Clean up manually to prevent this.
880 880 repo.unfiltered().changelog.strip(oldtiprev, tr)
881 881 _aborttransaction(repo)
882 882
883 883 def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
884 884 """Check potential problems which may result from working
885 885 copy having untracked changes."""
886 886 wcdeleted = set(repo.status().deleted)
887 887 shelvetouched = set(shelvectx.files())
888 888 intersection = wcdeleted.intersection(shelvetouched)
889 889 if intersection:
890 890 m = _("shelved change touches missing files")
891 891 hint = _("run hg status to see which files are missing")
892 892 raise error.Abort(m, hint=hint)
893 893
894 894 @command('unshelve',
895 895 [('a', 'abort', None,
896 896 _('abort an incomplete unshelve operation')),
897 897 ('c', 'continue', None,
898 898 _('continue an incomplete unshelve operation')),
899 899 ('k', 'keep', None,
900 900 _('keep shelve after unshelving')),
901 901 ('n', 'name', '',
902 902 _('restore shelved change with given name'), _('NAME')),
903 903 ('t', 'tool', '', _('specify merge tool')),
904 904 ('', 'date', '',
905 905 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
906 906 _('hg unshelve [[-n] SHELVED]'))
907 907 def unshelve(ui, repo, *shelved, **opts):
908 908 """restore a shelved change to the working directory
909 909
910 910 This command accepts an optional name of a shelved change to
911 911 restore. If none is given, the most recent shelved change is used.
912 912
913 913 If a shelved change is applied successfully, the bundle that
914 914 contains the shelved changes is moved to a backup location
915 915 (.hg/shelve-backup).
916 916
917 917 Since you can restore a shelved change on top of an arbitrary
918 918 commit, it is possible that unshelving will result in a conflict
919 919 between your changes and the commits you are unshelving onto. If
920 920 this occurs, you must resolve the conflict, then use
921 921 ``--continue`` to complete the unshelve operation. (The bundle
922 922 will not be moved until you successfully complete the unshelve.)
923 923
924 924 (Alternatively, you can use ``--abort`` to abandon an unshelve
925 925 that causes a conflict. This reverts the unshelved changes, and
926 926 leaves the bundle in place.)
927 927
928 928 If bare shelved change(when no files are specified, without interactive,
929 929 include and exclude option) was done on newly created branch it would
930 930 restore branch information to the working directory.
931 931
932 932 After a successful unshelve, the shelved changes are stored in a
933 933 backup directory. Only the N most recent backups are kept. N
934 934 defaults to 10 but can be overridden using the ``shelve.maxbackups``
935 935 configuration option.
936 936
937 937 .. container:: verbose
938 938
939 939 Timestamp in seconds is used to decide order of backups. More
940 940 than ``maxbackups`` backups are kept, if same timestamp
941 941 prevents from deciding exact order of them, for safety.
942 942 """
943 943 with repo.wlock():
944 944 return _dounshelve(ui, repo, *shelved, **opts)
945 945
946 946 def _dounshelve(ui, repo, *shelved, **opts):
947 947 opts = pycompat.byteskwargs(opts)
948 948 abortf = opts.get('abort')
949 949 continuef = opts.get('continue')
950 950 if not abortf and not continuef:
951 951 cmdutil.checkunfinished(repo)
952 952 shelved = list(shelved)
953 953 if opts.get("name"):
954 954 shelved.append(opts["name"])
955 955
956 956 if abortf or continuef:
957 957 if abortf and continuef:
958 958 raise error.Abort(_('cannot use both abort and continue'))
959 959 if shelved:
960 960 raise error.Abort(_('cannot combine abort/continue with '
961 961 'naming a shelved change'))
962 962 if abortf and opts.get('tool', False):
963 963 ui.warn(_('tool option will be ignored\n'))
964 964
965 965 try:
966 966 state = shelvedstate.load(repo)
967 967 if opts.get('keep') is None:
968 968 opts['keep'] = state.keep
969 969 except IOError as err:
970 970 if err.errno != errno.ENOENT:
971 971 raise
972 972 cmdutil.wrongtooltocontinue(repo, _('unshelve'))
973 973 except error.CorruptedState as err:
974 974 ui.debug(pycompat.bytestr(err) + '\n')
975 975 if continuef:
976 976 msg = _('corrupted shelved state file')
977 977 hint = _('please run hg unshelve --abort to abort unshelve '
978 978 'operation')
979 979 raise error.Abort(msg, hint=hint)
980 980 elif abortf:
981 981 msg = _('could not read shelved state file, your working copy '
982 982 'may be in an unexpected state\nplease update to some '
983 983 'commit\n')
984 984 ui.warn(msg)
985 985 shelvedstate.clear(repo)
986 986 return
987 987
988 988 if abortf:
989 989 return unshelveabort(ui, repo, state, opts)
990 990 elif continuef:
991 991 return unshelvecontinue(ui, repo, state, opts)
992 992 elif len(shelved) > 1:
993 993 raise error.Abort(_('can only unshelve one change at a time'))
994 994 elif not shelved:
995 995 shelved = listshelves(repo)
996 996 if not shelved:
997 997 raise error.Abort(_('no shelved changes to apply!'))
998 998 basename = util.split(shelved[0][1])[1]
999 999 ui.status(_("unshelving change '%s'\n") % basename)
1000 1000 else:
1001 1001 basename = shelved[0]
1002 1002
1003 1003 if not shelvedfile(repo, basename, patchextension).exists():
1004 1004 raise error.Abort(_("shelved change '%s' not found") % basename)
1005 1005
1006 1006 repo = repo.unfiltered()
1007 1007 lock = tr = None
1008 1008 try:
1009 1009 lock = repo.lock()
1010 1010 tr = repo.transaction('unshelve', report=lambda x: None)
1011 1011 oldtiprev = len(repo)
1012 1012
1013 1013 pctx = repo['.']
1014 1014 tmpwctx = pctx
1015 1015 # The goal is to have a commit structure like so:
1016 1016 # ...-> pctx -> tmpwctx -> shelvectx
1017 1017 # where tmpwctx is an optional commit with the user's pending changes
1018 1018 # and shelvectx is the unshelved changes. Then we merge it all down
1019 1019 # to the original pctx.
1020 1020
1021 1021 activebookmark = _backupactivebookmark(repo)
1022 1022 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
1023 1023 tmpwctx)
1024 1024 repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
1025 1025 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
1026 1026 branchtorestore = ''
1027 1027 if shelvectx.branch() != shelvectx.p1().branch():
1028 1028 branchtorestore = shelvectx.branch()
1029 1029
1030 1030 shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
1031 1031 basename, pctx, tmpwctx,
1032 1032 shelvectx, branchtorestore,
1033 1033 activebookmark)
1034 1034 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
1035 1035 with ui.configoverride(overrides, 'unshelve'):
1036 1036 mergefiles(ui, repo, pctx, shelvectx)
1037 1037 restorebranch(ui, repo, branchtorestore)
1038 1038 _forgetunknownfiles(repo, shelvectx, addedbefore)
1039 1039
1040 1040 shelvedstate.clear(repo)
1041 1041 _finishunshelve(repo, oldtiprev, tr, activebookmark)
1042 1042 unshelvecleanup(ui, repo, basename, opts)
1043 1043 finally:
1044 1044 if tr:
1045 1045 tr.release()
1046 1046 lockmod.release(lock)
1047 1047
1048 1048 @command('shelve',
1049 1049 [('A', 'addremove', None,
1050 1050 _('mark new/missing files as added/removed before shelving')),
1051 1051 ('u', 'unknown', None,
1052 1052 _('store unknown files in the shelve')),
1053 1053 ('', 'cleanup', None,
1054 1054 _('delete all shelved changes')),
1055 1055 ('', 'date', '',
1056 1056 _('shelve with the specified commit date'), _('DATE')),
1057 1057 ('d', 'delete', None,
1058 1058 _('delete the named shelved change(s)')),
1059 1059 ('e', 'edit', False,
1060 1060 _('invoke editor on commit messages')),
1061 1061 ('l', 'list', None,
1062 1062 _('list current shelves')),
1063 1063 ('m', 'message', '',
1064 1064 _('use text as shelve message'), _('TEXT')),
1065 1065 ('n', 'name', '',
1066 1066 _('use the given name for the shelved commit'), _('NAME')),
1067 1067 ('p', 'patch', None,
1068 1068 _('output patches for changes (provide the names of the shelved '
1069 1069 'changes as positional arguments)')),
1070 1070 ('i', 'interactive', None,
1071 1071 _('interactive mode, only works while creating a shelve')),
1072 1072 ('', 'stat', None,
1073 1073 _('output diffstat-style summary of changes (provide the names of '
1074 1074 'the shelved changes as positional arguments)')
1075 1075 )] + cmdutil.walkopts,
1076 1076 _('hg shelve [OPTION]... [FILE]...'))
1077 1077 def shelvecmd(ui, repo, *pats, **opts):
1078 1078 '''save and set aside changes from the working directory
1079 1079
1080 1080 Shelving takes files that "hg status" reports as not clean, saves
1081 1081 the modifications to a bundle (a shelved change), and reverts the
1082 1082 files so that their state in the working directory becomes clean.
1083 1083
1084 1084 To restore these changes to the working directory, using "hg
1085 1085 unshelve"; this will work even if you switch to a different
1086 1086 commit.
1087 1087
1088 1088 When no files are specified, "hg shelve" saves all not-clean
1089 1089 files. If specific files or directories are named, only changes to
1090 1090 those files are shelved.
1091 1091
1092 1092 In bare shelve (when no files are specified, without interactive,
1093 1093 include and exclude option), shelving remembers information if the
1094 1094 working directory was on newly created branch, in other words working
1095 1095 directory was on different branch than its first parent. In this
1096 1096 situation unshelving restores branch information to the working directory.
1097 1097
1098 1098 Each shelved change has a name that makes it easier to find later.
1099 1099 The name of a shelved change defaults to being based on the active
1100 1100 bookmark, or if there is no active bookmark, the current named
1101 1101 branch. To specify a different name, use ``--name``.
1102 1102
1103 1103 To see a list of existing shelved changes, use the ``--list``
1104 1104 option. For each shelved change, this will print its name, age,
1105 1105 and description; use ``--patch`` or ``--stat`` for more details.
1106 1106
1107 1107 To delete specific shelved changes, use ``--delete``. To delete
1108 1108 all shelved changes, use ``--cleanup``.
1109 1109 '''
1110 1110 opts = pycompat.byteskwargs(opts)
1111 1111 allowables = [
1112 1112 ('addremove', {'create'}), # 'create' is pseudo action
1113 1113 ('unknown', {'create'}),
1114 1114 ('cleanup', {'cleanup'}),
1115 1115 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
1116 1116 ('delete', {'delete'}),
1117 1117 ('edit', {'create'}),
1118 1118 ('list', {'list'}),
1119 1119 ('message', {'create'}),
1120 1120 ('name', {'create'}),
1121 1121 ('patch', {'patch', 'list'}),
1122 1122 ('stat', {'stat', 'list'}),
1123 1123 ]
1124 1124 def checkopt(opt):
1125 1125 if opts.get(opt):
1126 1126 for i, allowable in allowables:
1127 1127 if opts[i] and opt not in allowable:
1128 1128 raise error.Abort(_("options '--%s' and '--%s' may not be "
1129 1129 "used together") % (opt, i))
1130 1130 return True
1131 1131 if checkopt('cleanup'):
1132 1132 if pats:
1133 1133 raise error.Abort(_("cannot specify names when using '--cleanup'"))
1134 1134 return cleanupcmd(ui, repo)
1135 1135 elif checkopt('delete'):
1136 1136 return deletecmd(ui, repo, pats)
1137 1137 elif checkopt('list'):
1138 1138 return listcmd(ui, repo, pats, opts)
1139 1139 elif checkopt('patch') or checkopt('stat'):
1140 1140 return patchcmds(ui, repo, pats, opts)
1141 1141 else:
1142 1142 return createcmd(ui, repo, pats, opts)
1143 1143
1144 1144 def extsetup(ui):
1145 1145 cmdutil.unfinishedstates.append(
1146 1146 [shelvedstate._filename, False, False,
1147 1147 _('unshelve already in progress'),
1148 1148 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
1149 1149 cmdutil.afterresolvedstates.append(
1150 1150 [shelvedstate._filename, _('hg unshelve --continue')])
@@ -1,3312 +1,3312
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13
14 14 from .i18n import _
15 15 from .node import (
16 16 hex,
17 17 nullid,
18 18 nullrev,
19 19 short,
20 20 )
21 21
22 22 from . import (
23 23 bookmarks,
24 24 changelog,
25 25 copies,
26 26 crecord as crecordmod,
27 27 dirstateguard,
28 28 encoding,
29 29 error,
30 30 formatter,
31 31 logcmdutil,
32 32 match as matchmod,
33 33 merge as mergemod,
34 34 mergeutil,
35 35 obsolete,
36 36 patch,
37 37 pathutil,
38 38 phases,
39 39 pycompat,
40 40 revlog,
41 41 rewriteutil,
42 42 scmutil,
43 43 smartset,
44 44 subrepoutil,
45 45 templatekw,
46 46 templater,
47 47 util,
48 48 vfs as vfsmod,
49 49 )
50 50
51 51 from .utils import (
52 52 dateutil,
53 53 stringutil,
54 54 )
55 55
56 56 stringio = util.stringio
57 57
58 58 # templates of common command options
59 59
60 60 dryrunopts = [
61 61 ('n', 'dry-run', None,
62 62 _('do not perform actions, just print output')),
63 63 ]
64 64
65 65 confirmopts = [
66 66 ('', 'confirm', None,
67 67 _('ask before applying actions')),
68 68 ]
69 69
70 70 remoteopts = [
71 71 ('e', 'ssh', '',
72 72 _('specify ssh command to use'), _('CMD')),
73 73 ('', 'remotecmd', '',
74 74 _('specify hg command to run on the remote side'), _('CMD')),
75 75 ('', 'insecure', None,
76 76 _('do not verify server certificate (ignoring web.cacerts config)')),
77 77 ]
78 78
79 79 walkopts = [
80 80 ('I', 'include', [],
81 81 _('include names matching the given patterns'), _('PATTERN')),
82 82 ('X', 'exclude', [],
83 83 _('exclude names matching the given patterns'), _('PATTERN')),
84 84 ]
85 85
86 86 commitopts = [
87 87 ('m', 'message', '',
88 88 _('use text as commit message'), _('TEXT')),
89 89 ('l', 'logfile', '',
90 90 _('read commit message from file'), _('FILE')),
91 91 ]
92 92
93 93 commitopts2 = [
94 94 ('d', 'date', '',
95 95 _('record the specified date as commit date'), _('DATE')),
96 96 ('u', 'user', '',
97 97 _('record the specified user as committer'), _('USER')),
98 98 ]
99 99
100 100 formatteropts = [
101 101 ('T', 'template', '',
102 102 _('display with template'), _('TEMPLATE')),
103 103 ]
104 104
105 105 templateopts = [
106 106 ('', 'style', '',
107 107 _('display using template map file (DEPRECATED)'), _('STYLE')),
108 108 ('T', 'template', '',
109 109 _('display with template'), _('TEMPLATE')),
110 110 ]
111 111
112 112 logopts = [
113 113 ('p', 'patch', None, _('show patch')),
114 114 ('g', 'git', None, _('use git extended diff format')),
115 115 ('l', 'limit', '',
116 116 _('limit number of changes displayed'), _('NUM')),
117 117 ('M', 'no-merges', None, _('do not show merges')),
118 118 ('', 'stat', None, _('output diffstat-style summary of changes')),
119 119 ('G', 'graph', None, _("show the revision DAG")),
120 120 ] + templateopts
121 121
122 122 diffopts = [
123 123 ('a', 'text', None, _('treat all files as text')),
124 124 ('g', 'git', None, _('use git extended diff format')),
125 125 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
126 126 ('', 'nodates', None, _('omit dates from diff headers'))
127 127 ]
128 128
129 129 diffwsopts = [
130 130 ('w', 'ignore-all-space', None,
131 131 _('ignore white space when comparing lines')),
132 132 ('b', 'ignore-space-change', None,
133 133 _('ignore changes in the amount of white space')),
134 134 ('B', 'ignore-blank-lines', None,
135 135 _('ignore changes whose lines are all blank')),
136 136 ('Z', 'ignore-space-at-eol', None,
137 137 _('ignore changes in whitespace at EOL')),
138 138 ]
139 139
140 140 diffopts2 = [
141 141 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
142 142 ('p', 'show-function', None, _('show which function each change is in')),
143 143 ('', 'reverse', None, _('produce a diff that undoes the changes')),
144 144 ] + diffwsopts + [
145 145 ('U', 'unified', '',
146 146 _('number of lines of context to show'), _('NUM')),
147 147 ('', 'stat', None, _('output diffstat-style summary of changes')),
148 148 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
149 149 ]
150 150
151 151 mergetoolopts = [
152 152 ('t', 'tool', '', _('specify merge tool')),
153 153 ]
154 154
155 155 similarityopts = [
156 156 ('s', 'similarity', '',
157 157 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
158 158 ]
159 159
160 160 subrepoopts = [
161 161 ('S', 'subrepos', None,
162 162 _('recurse into subrepositories'))
163 163 ]
164 164
165 165 debugrevlogopts = [
166 166 ('c', 'changelog', False, _('open changelog')),
167 167 ('m', 'manifest', False, _('open manifest')),
168 168 ('', 'dir', '', _('open directory manifest')),
169 169 ]
170 170
171 171 # special string such that everything below this line will be ingored in the
172 172 # editor text
173 173 _linebelow = "^HG: ------------------------ >8 ------------------------$"
174 174
175 175 def ishunk(x):
176 176 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
177 177 return isinstance(x, hunkclasses)
178 178
179 179 def newandmodified(chunks, originalchunks):
180 180 newlyaddedandmodifiedfiles = set()
181 181 for chunk in chunks:
182 182 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
183 183 originalchunks:
184 184 newlyaddedandmodifiedfiles.add(chunk.header.filename())
185 185 return newlyaddedandmodifiedfiles
186 186
187 187 def parsealiases(cmd):
188 188 return cmd.lstrip("^").split("|")
189 189
190 190 def setupwrapcolorwrite(ui):
191 191 # wrap ui.write so diff output can be labeled/colorized
192 192 def wrapwrite(orig, *args, **kw):
193 193 label = kw.pop(r'label', '')
194 194 for chunk, l in patch.difflabel(lambda: args):
195 195 orig(chunk, label=label + l)
196 196
197 197 oldwrite = ui.write
198 198 def wrap(*args, **kwargs):
199 199 return wrapwrite(oldwrite, *args, **kwargs)
200 200 setattr(ui, 'write', wrap)
201 201 return oldwrite
202 202
203 203 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
204 204 try:
205 205 if usecurses:
206 206 if testfile:
207 207 recordfn = crecordmod.testdecorator(
208 208 testfile, crecordmod.testchunkselector)
209 209 else:
210 210 recordfn = crecordmod.chunkselector
211 211
212 212 return crecordmod.filterpatch(ui, originalhunks, recordfn,
213 213 operation)
214 214 except crecordmod.fallbackerror as e:
215 215 ui.warn('%s\n' % e.message)
216 216 ui.warn(_('falling back to text mode\n'))
217 217
218 218 return patch.filterpatch(ui, originalhunks, operation)
219 219
220 220 def recordfilter(ui, originalhunks, operation=None):
221 221 """ Prompts the user to filter the originalhunks and return a list of
222 222 selected hunks.
223 223 *operation* is used for to build ui messages to indicate the user what
224 224 kind of filtering they are doing: reverting, committing, shelving, etc.
225 225 (see patch.filterpatch).
226 226 """
227 227 usecurses = crecordmod.checkcurses(ui)
228 228 testfile = ui.config('experimental', 'crecordtest')
229 229 oldwrite = setupwrapcolorwrite(ui)
230 230 try:
231 231 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
232 232 testfile, operation)
233 233 finally:
234 234 ui.write = oldwrite
235 235 return newchunks, newopts
236 236
237 237 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
238 238 filterfn, *pats, **opts):
239 239 opts = pycompat.byteskwargs(opts)
240 240 if not ui.interactive():
241 241 if cmdsuggest:
242 242 msg = _('running non-interactively, use %s instead') % cmdsuggest
243 243 else:
244 244 msg = _('running non-interactively')
245 245 raise error.Abort(msg)
246 246
247 247 # make sure username is set before going interactive
248 248 if not opts.get('user'):
249 249 ui.username() # raise exception, username not provided
250 250
251 251 def recordfunc(ui, repo, message, match, opts):
252 252 """This is generic record driver.
253 253
254 254 Its job is to interactively filter local changes, and
255 255 accordingly prepare working directory into a state in which the
256 256 job can be delegated to a non-interactive commit command such as
257 257 'commit' or 'qrefresh'.
258 258
259 259 After the actual job is done by non-interactive command, the
260 260 working directory is restored to its original state.
261 261
262 262 In the end we'll record interesting changes, and everything else
263 263 will be left in place, so the user can continue working.
264 264 """
265 265
266 266 checkunfinished(repo, commit=True)
267 267 wctx = repo[None]
268 268 merge = len(wctx.parents()) > 1
269 269 if merge:
270 270 raise error.Abort(_('cannot partially commit a merge '
271 271 '(use "hg commit" instead)'))
272 272
273 273 def fail(f, msg):
274 274 raise error.Abort('%s: %s' % (f, msg))
275 275
276 276 force = opts.get('force')
277 277 if not force:
278 278 vdirs = []
279 279 match.explicitdir = vdirs.append
280 280 match.bad = fail
281 281
282 282 status = repo.status(match=match)
283 283 if not force:
284 284 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
285 285 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
286 286 diffopts.nodates = True
287 287 diffopts.git = True
288 288 diffopts.showfunc = True
289 289 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
290 290 originalchunks = patch.parsepatch(originaldiff)
291 291
292 292 # 1. filter patch, since we are intending to apply subset of it
293 293 try:
294 294 chunks, newopts = filterfn(ui, originalchunks)
295 295 except error.PatchError as err:
296 296 raise error.Abort(_('error parsing patch: %s') % err)
297 297 opts.update(newopts)
298 298
299 299 # We need to keep a backup of files that have been newly added and
300 300 # modified during the recording process because there is a previous
301 301 # version without the edit in the workdir
302 302 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
303 303 contenders = set()
304 304 for h in chunks:
305 305 try:
306 306 contenders.update(set(h.files()))
307 307 except AttributeError:
308 308 pass
309 309
310 310 changed = status.modified + status.added + status.removed
311 311 newfiles = [f for f in changed if f in contenders]
312 312 if not newfiles:
313 313 ui.status(_('no changes to record\n'))
314 314 return 0
315 315
316 316 modified = set(status.modified)
317 317
318 318 # 2. backup changed files, so we can restore them in the end
319 319
320 320 if backupall:
321 321 tobackup = changed
322 322 else:
323 323 tobackup = [f for f in newfiles if f in modified or f in \
324 324 newlyaddedandmodifiedfiles]
325 325 backups = {}
326 326 if tobackup:
327 327 backupdir = repo.vfs.join('record-backups')
328 328 try:
329 329 os.mkdir(backupdir)
330 330 except OSError as err:
331 331 if err.errno != errno.EEXIST:
332 332 raise
333 333 try:
334 334 # backup continues
335 335 for f in tobackup:
336 336 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
337 337 dir=backupdir)
338 338 os.close(fd)
339 339 ui.debug('backup %r as %r\n' % (f, tmpname))
340 340 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
341 341 backups[f] = tmpname
342 342
343 343 fp = stringio()
344 344 for c in chunks:
345 345 fname = c.filename()
346 346 if fname in backups:
347 347 c.write(fp)
348 348 dopatch = fp.tell()
349 349 fp.seek(0)
350 350
351 351 # 2.5 optionally review / modify patch in text editor
352 352 if opts.get('review', False):
353 353 patchtext = (crecordmod.diffhelptext
354 354 + crecordmod.patchhelptext
355 355 + fp.read())
356 356 reviewedpatch = ui.edit(patchtext, "",
357 357 action="diff",
358 358 repopath=repo.path)
359 359 fp.truncate(0)
360 360 fp.write(reviewedpatch)
361 361 fp.seek(0)
362 362
363 363 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
364 364 # 3a. apply filtered patch to clean repo (clean)
365 365 if backups:
366 366 # Equivalent to hg.revert
367 367 m = scmutil.matchfiles(repo, backups.keys())
368 368 mergemod.update(repo, repo.dirstate.p1(),
369 369 False, True, matcher=m)
370 370
371 371 # 3b. (apply)
372 372 if dopatch:
373 373 try:
374 374 ui.debug('applying patch\n')
375 375 ui.debug(fp.getvalue())
376 376 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
377 377 except error.PatchError as err:
378 378 raise error.Abort(pycompat.bytestr(err))
379 379 del fp
380 380
381 381 # 4. We prepared working directory according to filtered
382 382 # patch. Now is the time to delegate the job to
383 383 # commit/qrefresh or the like!
384 384
385 385 # Make all of the pathnames absolute.
386 386 newfiles = [repo.wjoin(nf) for nf in newfiles]
387 387 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
388 388 finally:
389 389 # 5. finally restore backed-up files
390 390 try:
391 391 dirstate = repo.dirstate
392 392 for realname, tmpname in backups.iteritems():
393 393 ui.debug('restoring %r to %r\n' % (tmpname, realname))
394 394
395 395 if dirstate[realname] == 'n':
396 396 # without normallookup, restoring timestamp
397 397 # may cause partially committed files
398 398 # to be treated as unmodified
399 399 dirstate.normallookup(realname)
400 400
401 401 # copystat=True here and above are a hack to trick any
402 402 # editors that have f open that we haven't modified them.
403 403 #
404 404 # Also note that this racy as an editor could notice the
405 405 # file's mtime before we've finished writing it.
406 406 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
407 407 os.unlink(tmpname)
408 408 if tobackup:
409 409 os.rmdir(backupdir)
410 410 except OSError:
411 411 pass
412 412
413 413 def recordinwlock(ui, repo, message, match, opts):
414 414 with repo.wlock():
415 415 return recordfunc(ui, repo, message, match, opts)
416 416
417 417 return commit(ui, repo, recordinwlock, pats, opts)
418 418
419 419 class dirnode(object):
420 420 """
421 421 Represent a directory in user working copy with information required for
422 422 the purpose of tersing its status.
423 423
424 424 path is the path to the directory, without a trailing '/'
425 425
426 426 statuses is a set of statuses of all files in this directory (this includes
427 427 all the files in all the subdirectories too)
428 428
429 429 files is a list of files which are direct child of this directory
430 430
431 431 subdirs is a dictionary of sub-directory name as the key and it's own
432 432 dirnode object as the value
433 433 """
434 434
435 435 def __init__(self, dirpath):
436 436 self.path = dirpath
437 437 self.statuses = set([])
438 438 self.files = []
439 439 self.subdirs = {}
440 440
441 441 def _addfileindir(self, filename, status):
442 442 """Add a file in this directory as a direct child."""
443 443 self.files.append((filename, status))
444 444
445 445 def addfile(self, filename, status):
446 446 """
447 447 Add a file to this directory or to its direct parent directory.
448 448
449 449 If the file is not direct child of this directory, we traverse to the
450 450 directory of which this file is a direct child of and add the file
451 451 there.
452 452 """
453 453
454 454 # the filename contains a path separator, it means it's not the direct
455 455 # child of this directory
456 456 if '/' in filename:
457 457 subdir, filep = filename.split('/', 1)
458 458
459 459 # does the dirnode object for subdir exists
460 460 if subdir not in self.subdirs:
461 461 subdirpath = pathutil.join(self.path, subdir)
462 462 self.subdirs[subdir] = dirnode(subdirpath)
463 463
464 464 # try adding the file in subdir
465 465 self.subdirs[subdir].addfile(filep, status)
466 466
467 467 else:
468 468 self._addfileindir(filename, status)
469 469
470 470 if status not in self.statuses:
471 471 self.statuses.add(status)
472 472
473 473 def iterfilepaths(self):
474 474 """Yield (status, path) for files directly under this directory."""
475 475 for f, st in self.files:
476 476 yield st, pathutil.join(self.path, f)
477 477
478 478 def tersewalk(self, terseargs):
479 479 """
480 480 Yield (status, path) obtained by processing the status of this
481 481 dirnode.
482 482
483 483 terseargs is the string of arguments passed by the user with `--terse`
484 484 flag.
485 485
486 486 Following are the cases which can happen:
487 487
488 488 1) All the files in the directory (including all the files in its
489 489 subdirectories) share the same status and the user has asked us to terse
490 490 that status. -> yield (status, dirpath). dirpath will end in '/'.
491 491
492 492 2) Otherwise, we do following:
493 493
494 494 a) Yield (status, filepath) for all the files which are in this
495 495 directory (only the ones in this directory, not the subdirs)
496 496
497 497 b) Recurse the function on all the subdirectories of this
498 498 directory
499 499 """
500 500
501 501 if len(self.statuses) == 1:
502 502 onlyst = self.statuses.pop()
503 503
504 504 # Making sure we terse only when the status abbreviation is
505 505 # passed as terse argument
506 506 if onlyst in terseargs:
507 507 yield onlyst, self.path + '/'
508 508 return
509 509
510 510 # add the files to status list
511 511 for st, fpath in self.iterfilepaths():
512 512 yield st, fpath
513 513
514 514 #recurse on the subdirs
515 515 for dirobj in self.subdirs.values():
516 516 for st, fpath in dirobj.tersewalk(terseargs):
517 517 yield st, fpath
518 518
519 519 def tersedir(statuslist, terseargs):
520 520 """
521 521 Terse the status if all the files in a directory shares the same status.
522 522
523 523 statuslist is scmutil.status() object which contains a list of files for
524 524 each status.
525 525 terseargs is string which is passed by the user as the argument to `--terse`
526 526 flag.
527 527
528 528 The function makes a tree of objects of dirnode class, and at each node it
529 529 stores the information required to know whether we can terse a certain
530 530 directory or not.
531 531 """
532 532 # the order matters here as that is used to produce final list
533 533 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
534 534
535 535 # checking the argument validity
536 536 for s in pycompat.bytestr(terseargs):
537 537 if s not in allst:
538 538 raise error.Abort(_("'%s' not recognized") % s)
539 539
540 540 # creating a dirnode object for the root of the repo
541 541 rootobj = dirnode('')
542 542 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
543 543 'ignored', 'removed')
544 544
545 545 tersedict = {}
546 546 for attrname in pstatus:
547 547 statuschar = attrname[0:1]
548 548 for f in getattr(statuslist, attrname):
549 549 rootobj.addfile(f, statuschar)
550 550 tersedict[statuschar] = []
551 551
552 552 # we won't be tersing the root dir, so add files in it
553 553 for st, fpath in rootobj.iterfilepaths():
554 554 tersedict[st].append(fpath)
555 555
556 556 # process each sub-directory and build tersedict
557 557 for subdir in rootobj.subdirs.values():
558 558 for st, f in subdir.tersewalk(terseargs):
559 559 tersedict[st].append(f)
560 560
561 561 tersedlist = []
562 562 for st in allst:
563 563 tersedict[st].sort()
564 564 tersedlist.append(tersedict[st])
565 565
566 566 return tersedlist
567 567
568 568 def _commentlines(raw):
569 569 '''Surround lineswith a comment char and a new line'''
570 570 lines = raw.splitlines()
571 571 commentedlines = ['# %s' % line for line in lines]
572 572 return '\n'.join(commentedlines) + '\n'
573 573
574 574 def _conflictsmsg(repo):
575 575 mergestate = mergemod.mergestate.read(repo)
576 576 if not mergestate.active():
577 577 return
578 578
579 579 m = scmutil.match(repo[None])
580 580 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
581 581 if unresolvedlist:
582 582 mergeliststr = '\n'.join(
583 583 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
584 584 for path in unresolvedlist])
585 585 msg = _('''Unresolved merge conflicts:
586 586
587 587 %s
588 588
589 589 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
590 590 else:
591 591 msg = _('No unresolved merge conflicts.')
592 592
593 593 return _commentlines(msg)
594 594
595 595 def _helpmessage(continuecmd, abortcmd):
596 596 msg = _('To continue: %s\n'
597 597 'To abort: %s') % (continuecmd, abortcmd)
598 598 return _commentlines(msg)
599 599
600 600 def _rebasemsg():
601 601 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
602 602
603 603 def _histeditmsg():
604 604 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
605 605
606 606 def _unshelvemsg():
607 607 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
608 608
609 609 def _graftmsg():
610 610 # tweakdefaults requires `update` to have a rev hence the `.`
611 611 return _helpmessage('hg graft --continue', 'hg graft --abort')
612 612
613 613 def _mergemsg():
614 614 # tweakdefaults requires `update` to have a rev hence the `.`
615 615 return _helpmessage('hg commit', 'hg merge --abort')
616 616
617 617 def _bisectmsg():
618 618 msg = _('To mark the changeset good: hg bisect --good\n'
619 619 'To mark the changeset bad: hg bisect --bad\n'
620 620 'To abort: hg bisect --reset\n')
621 621 return _commentlines(msg)
622 622
623 623 def fileexistspredicate(filename):
624 624 return lambda repo: repo.vfs.exists(filename)
625 625
626 626 def _mergepredicate(repo):
627 627 return len(repo[None].parents()) > 1
628 628
629 629 STATES = (
630 630 # (state, predicate to detect states, helpful message function)
631 631 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
632 632 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
633 633 ('graft', fileexistspredicate('graftstate'), _graftmsg),
634 634 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
635 635 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
636 636 # The merge state is part of a list that will be iterated over.
637 637 # They need to be last because some of the other unfinished states may also
638 638 # be in a merge or update state (eg. rebase, histedit, graft, etc).
639 639 # We want those to have priority.
640 640 ('merge', _mergepredicate, _mergemsg),
641 641 )
642 642
643 643 def _getrepostate(repo):
644 644 # experimental config: commands.status.skipstates
645 645 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
646 646 for state, statedetectionpredicate, msgfn in STATES:
647 647 if state in skip:
648 648 continue
649 649 if statedetectionpredicate(repo):
650 650 return (state, statedetectionpredicate, msgfn)
651 651
652 652 def morestatus(repo, fm):
653 653 statetuple = _getrepostate(repo)
654 654 label = 'status.morestatus'
655 655 if statetuple:
656 656 state, statedetectionpredicate, helpfulmsg = statetuple
657 657 statemsg = _('The repository is in an unfinished *%s* state.') % state
658 658 fm.plain('%s\n' % _commentlines(statemsg), label=label)
659 659 conmsg = _conflictsmsg(repo)
660 660 if conmsg:
661 661 fm.plain('%s\n' % conmsg, label=label)
662 662 if helpfulmsg:
663 663 helpmsg = helpfulmsg()
664 664 fm.plain('%s\n' % helpmsg, label=label)
665 665
666 666 def findpossible(cmd, table, strict=False):
667 667 """
668 668 Return cmd -> (aliases, command table entry)
669 669 for each matching command.
670 670 Return debug commands (or their aliases) only if no normal command matches.
671 671 """
672 672 choice = {}
673 673 debugchoice = {}
674 674
675 675 if cmd in table:
676 676 # short-circuit exact matches, "log" alias beats "^log|history"
677 677 keys = [cmd]
678 678 else:
679 679 keys = table.keys()
680 680
681 681 allcmds = []
682 682 for e in keys:
683 683 aliases = parsealiases(e)
684 684 allcmds.extend(aliases)
685 685 found = None
686 686 if cmd in aliases:
687 687 found = cmd
688 688 elif not strict:
689 689 for a in aliases:
690 690 if a.startswith(cmd):
691 691 found = a
692 692 break
693 693 if found is not None:
694 694 if aliases[0].startswith("debug") or found.startswith("debug"):
695 695 debugchoice[found] = (aliases, table[e])
696 696 else:
697 697 choice[found] = (aliases, table[e])
698 698
699 699 if not choice and debugchoice:
700 700 choice = debugchoice
701 701
702 702 return choice, allcmds
703 703
704 704 def findcmd(cmd, table, strict=True):
705 705 """Return (aliases, command table entry) for command string."""
706 706 choice, allcmds = findpossible(cmd, table, strict)
707 707
708 708 if cmd in choice:
709 709 return choice[cmd]
710 710
711 711 if len(choice) > 1:
712 712 clist = sorted(choice)
713 713 raise error.AmbiguousCommand(cmd, clist)
714 714
715 715 if choice:
716 716 return list(choice.values())[0]
717 717
718 718 raise error.UnknownCommand(cmd, allcmds)
719 719
720 720 def changebranch(ui, repo, revs, label):
721 721 """ Change the branch name of given revs to label """
722 722
723 723 with repo.wlock(), repo.lock(), repo.transaction('branches'):
724 724 # abort in case of uncommitted merge or dirty wdir
725 725 bailifchanged(repo)
726 726 revs = scmutil.revrange(repo, revs)
727 727 if not revs:
728 728 raise error.Abort("empty revision set")
729 729 roots = repo.revs('roots(%ld)', revs)
730 730 if len(roots) > 1:
731 731 raise error.Abort(_("cannot change branch of non-linear revisions"))
732 732 rewriteutil.precheck(repo, revs, 'change branch of')
733 733
734 734 root = repo[roots.first()]
735 735 if not root.p1().branch() == label and label in repo.branchmap():
736 736 raise error.Abort(_("a branch of the same name already exists"))
737 737
738 738 if repo.revs('merge() and %ld', revs):
739 739 raise error.Abort(_("cannot change branch of a merge commit"))
740 740 if repo.revs('obsolete() and %ld', revs):
741 741 raise error.Abort(_("cannot change branch of a obsolete changeset"))
742 742
743 743 # make sure only topological heads
744 744 if repo.revs('heads(%ld) - head()', revs):
745 745 raise error.Abort(_("cannot change branch in middle of a stack"))
746 746
747 747 replacements = {}
748 748 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
749 749 # mercurial.subrepo -> mercurial.cmdutil
750 750 from . import context
751 751 for rev in revs:
752 752 ctx = repo[rev]
753 753 oldbranch = ctx.branch()
754 754 # check if ctx has same branch
755 755 if oldbranch == label:
756 756 continue
757 757
758 758 def filectxfn(repo, newctx, path):
759 759 try:
760 760 return ctx[path]
761 761 except error.ManifestLookupError:
762 762 return None
763 763
764 764 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
765 765 % (hex(ctx.node()), oldbranch, label))
766 766 extra = ctx.extra()
767 767 extra['branch_change'] = hex(ctx.node())
768 768 # While changing branch of set of linear commits, make sure that
769 769 # we base our commits on new parent rather than old parent which
770 770 # was obsoleted while changing the branch
771 771 p1 = ctx.p1().node()
772 772 p2 = ctx.p2().node()
773 773 if p1 in replacements:
774 774 p1 = replacements[p1][0]
775 775 if p2 in replacements:
776 776 p2 = replacements[p2][0]
777 777
778 778 mc = context.memctx(repo, (p1, p2),
779 779 ctx.description(),
780 780 ctx.files(),
781 781 filectxfn,
782 782 user=ctx.user(),
783 783 date=ctx.date(),
784 784 extra=extra,
785 785 branch=label)
786 786
787 787 newnode = repo.commitctx(mc)
788 788 replacements[ctx.node()] = (newnode,)
789 789 ui.debug('new node id is %s\n' % hex(newnode))
790 790
791 791 # create obsmarkers and move bookmarks
792 792 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
793 793
794 794 # move the working copy too
795 795 wctx = repo[None]
796 796 # in-progress merge is a bit too complex for now.
797 797 if len(wctx.parents()) == 1:
798 798 newid = replacements.get(wctx.p1().node())
799 799 if newid is not None:
800 800 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
801 801 # mercurial.cmdutil
802 802 from . import hg
803 803 hg.update(repo, newid[0], quietempty=True)
804 804
805 805 ui.status(_("changed branch on %d changesets\n") % len(replacements))
806 806
807 807 def findrepo(p):
808 808 while not os.path.isdir(os.path.join(p, ".hg")):
809 809 oldp, p = p, os.path.dirname(p)
810 810 if p == oldp:
811 811 return None
812 812
813 813 return p
814 814
815 815 def bailifchanged(repo, merge=True, hint=None):
816 816 """ enforce the precondition that working directory must be clean.
817 817
818 818 'merge' can be set to false if a pending uncommitted merge should be
819 819 ignored (such as when 'update --check' runs).
820 820
821 821 'hint' is the usual hint given to Abort exception.
822 822 """
823 823
824 824 if merge and repo.dirstate.p2() != nullid:
825 825 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
826 826 modified, added, removed, deleted = repo.status()[:4]
827 827 if modified or added or removed or deleted:
828 828 raise error.Abort(_('uncommitted changes'), hint=hint)
829 829 ctx = repo[None]
830 830 for s in sorted(ctx.substate):
831 831 ctx.sub(s).bailifchanged(hint=hint)
832 832
833 833 def logmessage(ui, opts):
834 834 """ get the log message according to -m and -l option """
835 835 message = opts.get('message')
836 836 logfile = opts.get('logfile')
837 837
838 838 if message and logfile:
839 839 raise error.Abort(_('options --message and --logfile are mutually '
840 840 'exclusive'))
841 841 if not message and logfile:
842 842 try:
843 843 if isstdiofilename(logfile):
844 844 message = ui.fin.read()
845 845 else:
846 846 message = '\n'.join(util.readfile(logfile).splitlines())
847 847 except IOError as inst:
848 848 raise error.Abort(_("can't read commit message '%s': %s") %
849 849 (logfile, encoding.strtolocal(inst.strerror)))
850 850 return message
851 851
852 852 def mergeeditform(ctxorbool, baseformname):
853 853 """return appropriate editform name (referencing a committemplate)
854 854
855 855 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
856 856 merging is committed.
857 857
858 858 This returns baseformname with '.merge' appended if it is a merge,
859 859 otherwise '.normal' is appended.
860 860 """
861 861 if isinstance(ctxorbool, bool):
862 862 if ctxorbool:
863 863 return baseformname + ".merge"
864 elif 1 < len(ctxorbool.parents()):
864 elif len(ctxorbool.parents()) > 1:
865 865 return baseformname + ".merge"
866 866
867 867 return baseformname + ".normal"
868 868
869 869 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
870 870 editform='', **opts):
871 871 """get appropriate commit message editor according to '--edit' option
872 872
873 873 'finishdesc' is a function to be called with edited commit message
874 874 (= 'description' of the new changeset) just after editing, but
875 875 before checking empty-ness. It should return actual text to be
876 876 stored into history. This allows to change description before
877 877 storing.
878 878
879 879 'extramsg' is a extra message to be shown in the editor instead of
880 880 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
881 881 is automatically added.
882 882
883 883 'editform' is a dot-separated list of names, to distinguish
884 884 the purpose of commit text editing.
885 885
886 886 'getcommiteditor' returns 'commitforceeditor' regardless of
887 887 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
888 888 they are specific for usage in MQ.
889 889 """
890 890 if edit or finishdesc or extramsg:
891 891 return lambda r, c, s: commitforceeditor(r, c, s,
892 892 finishdesc=finishdesc,
893 893 extramsg=extramsg,
894 894 editform=editform)
895 895 elif editform:
896 896 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
897 897 else:
898 898 return commiteditor
899 899
900 900 def _escapecommandtemplate(tmpl):
901 901 parts = []
902 902 for typ, start, end in templater.scantemplate(tmpl, raw=True):
903 903 if typ == b'string':
904 904 parts.append(stringutil.escapestr(tmpl[start:end]))
905 905 else:
906 906 parts.append(tmpl[start:end])
907 907 return b''.join(parts)
908 908
909 909 def rendercommandtemplate(ui, tmpl, props):
910 910 r"""Expand a literal template 'tmpl' in a way suitable for command line
911 911
912 912 '\' in outermost string is not taken as an escape character because it
913 913 is a directory separator on Windows.
914 914
915 915 >>> from . import ui as uimod
916 916 >>> ui = uimod.ui()
917 917 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
918 918 'c:\\foo'
919 919 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
920 920 'c:{path}'
921 921 """
922 922 if not tmpl:
923 923 return tmpl
924 924 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
925 925 return t.renderdefault(props)
926 926
927 927 def rendertemplate(ctx, tmpl, props=None):
928 928 """Expand a literal template 'tmpl' byte-string against one changeset
929 929
930 930 Each props item must be a stringify-able value or a callable returning
931 931 such value, i.e. no bare list nor dict should be passed.
932 932 """
933 933 repo = ctx.repo()
934 934 tres = formatter.templateresources(repo.ui, repo)
935 935 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
936 936 resources=tres)
937 937 mapping = {'ctx': ctx}
938 938 if props:
939 939 mapping.update(props)
940 940 return t.renderdefault(mapping)
941 941
942 942 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
943 943 r"""Convert old-style filename format string to template string
944 944
945 945 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
946 946 'foo-{reporoot|basename}-{seqno}.patch'
947 947 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
948 948 '{rev}{tags % "{tag}"}{node}'
949 949
950 950 '\' in outermost strings has to be escaped because it is a directory
951 951 separator on Windows:
952 952
953 953 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
954 954 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
955 955 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
956 956 '\\\\\\\\foo\\\\bar.patch'
957 957 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
958 958 '\\\\{tags % "{tag}"}'
959 959
960 960 but inner strings follow the template rules (i.e. '\' is taken as an
961 961 escape character):
962 962
963 963 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
964 964 '{"c:\\tmp"}'
965 965 """
966 966 expander = {
967 967 b'H': b'{node}',
968 968 b'R': b'{rev}',
969 969 b'h': b'{node|short}',
970 970 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
971 971 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
972 972 b'%': b'%',
973 973 b'b': b'{reporoot|basename}',
974 974 }
975 975 if total is not None:
976 976 expander[b'N'] = b'{total}'
977 977 if seqno is not None:
978 978 expander[b'n'] = b'{seqno}'
979 979 if total is not None and seqno is not None:
980 980 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
981 981 if pathname is not None:
982 982 expander[b's'] = b'{pathname|basename}'
983 983 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
984 984 expander[b'p'] = b'{pathname}'
985 985
986 986 newname = []
987 987 for typ, start, end in templater.scantemplate(pat, raw=True):
988 988 if typ != b'string':
989 989 newname.append(pat[start:end])
990 990 continue
991 991 i = start
992 992 while i < end:
993 993 n = pat.find(b'%', i, end)
994 994 if n < 0:
995 995 newname.append(stringutil.escapestr(pat[i:end]))
996 996 break
997 997 newname.append(stringutil.escapestr(pat[i:n]))
998 998 if n + 2 > end:
999 999 raise error.Abort(_("incomplete format spec in output "
1000 1000 "filename"))
1001 1001 c = pat[n + 1:n + 2]
1002 1002 i = n + 2
1003 1003 try:
1004 1004 newname.append(expander[c])
1005 1005 except KeyError:
1006 1006 raise error.Abort(_("invalid format spec '%%%s' in output "
1007 1007 "filename") % c)
1008 1008 return ''.join(newname)
1009 1009
1010 1010 def makefilename(ctx, pat, **props):
1011 1011 if not pat:
1012 1012 return pat
1013 1013 tmpl = _buildfntemplate(pat, **props)
1014 1014 # BUG: alias expansion shouldn't be made against template fragments
1015 1015 # rewritten from %-format strings, but we have no easy way to partially
1016 1016 # disable the expansion.
1017 1017 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1018 1018
1019 1019 def isstdiofilename(pat):
1020 1020 """True if the given pat looks like a filename denoting stdin/stdout"""
1021 1021 return not pat or pat == '-'
1022 1022
1023 1023 class _unclosablefile(object):
1024 1024 def __init__(self, fp):
1025 1025 self._fp = fp
1026 1026
1027 1027 def close(self):
1028 1028 pass
1029 1029
1030 1030 def __iter__(self):
1031 1031 return iter(self._fp)
1032 1032
1033 1033 def __getattr__(self, attr):
1034 1034 return getattr(self._fp, attr)
1035 1035
1036 1036 def __enter__(self):
1037 1037 return self
1038 1038
1039 1039 def __exit__(self, exc_type, exc_value, exc_tb):
1040 1040 pass
1041 1041
1042 1042 def makefileobj(ctx, pat, mode='wb', **props):
1043 1043 writable = mode not in ('r', 'rb')
1044 1044
1045 1045 if isstdiofilename(pat):
1046 1046 repo = ctx.repo()
1047 1047 if writable:
1048 1048 fp = repo.ui.fout
1049 1049 else:
1050 1050 fp = repo.ui.fin
1051 1051 return _unclosablefile(fp)
1052 1052 fn = makefilename(ctx, pat, **props)
1053 1053 return open(fn, mode)
1054 1054
1055 1055 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1056 1056 """opens the changelog, manifest, a filelog or a given revlog"""
1057 1057 cl = opts['changelog']
1058 1058 mf = opts['manifest']
1059 1059 dir = opts['dir']
1060 1060 msg = None
1061 1061 if cl and mf:
1062 1062 msg = _('cannot specify --changelog and --manifest at the same time')
1063 1063 elif cl and dir:
1064 1064 msg = _('cannot specify --changelog and --dir at the same time')
1065 1065 elif cl or mf or dir:
1066 1066 if file_:
1067 1067 msg = _('cannot specify filename with --changelog or --manifest')
1068 1068 elif not repo:
1069 1069 msg = _('cannot specify --changelog or --manifest or --dir '
1070 1070 'without a repository')
1071 1071 if msg:
1072 1072 raise error.Abort(msg)
1073 1073
1074 1074 r = None
1075 1075 if repo:
1076 1076 if cl:
1077 1077 r = repo.unfiltered().changelog
1078 1078 elif dir:
1079 1079 if 'treemanifest' not in repo.requirements:
1080 1080 raise error.Abort(_("--dir can only be used on repos with "
1081 1081 "treemanifest enabled"))
1082 1082 if not dir.endswith('/'):
1083 1083 dir = dir + '/'
1084 1084 dirlog = repo.manifestlog.getstorage(dir)
1085 1085 if len(dirlog):
1086 1086 r = dirlog
1087 1087 elif mf:
1088 1088 r = repo.manifestlog.getstorage(b'')
1089 1089 elif file_:
1090 1090 filelog = repo.file(file_)
1091 1091 if len(filelog):
1092 1092 r = filelog
1093 1093
1094 1094 # Not all storage may be revlogs. If requested, try to return an actual
1095 1095 # revlog instance.
1096 1096 if returnrevlog:
1097 1097 if isinstance(r, revlog.revlog):
1098 1098 pass
1099 1099 elif util.safehasattr(r, '_revlog'):
1100 1100 r = r._revlog
1101 1101 elif r is not None:
1102 1102 raise error.Abort(_('%r does not appear to be a revlog') % r)
1103 1103
1104 1104 if not r:
1105 1105 if not returnrevlog:
1106 1106 raise error.Abort(_('cannot give path to non-revlog'))
1107 1107
1108 1108 if not file_:
1109 1109 raise error.CommandError(cmd, _('invalid arguments'))
1110 1110 if not os.path.isfile(file_):
1111 1111 raise error.Abort(_("revlog '%s' not found") % file_)
1112 1112 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1113 1113 file_[:-2] + ".i")
1114 1114 return r
1115 1115
1116 1116 def openrevlog(repo, cmd, file_, opts):
1117 1117 """Obtain a revlog backing storage of an item.
1118 1118
1119 1119 This is similar to ``openstorage()`` except it always returns a revlog.
1120 1120
1121 1121 In most cases, a caller cares about the main storage object - not the
1122 1122 revlog backing it. Therefore, this function should only be used by code
1123 1123 that needs to examine low-level revlog implementation details. e.g. debug
1124 1124 commands.
1125 1125 """
1126 1126 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1127 1127
1128 1128 def copy(ui, repo, pats, opts, rename=False):
1129 1129 # called with the repo lock held
1130 1130 #
1131 1131 # hgsep => pathname that uses "/" to separate directories
1132 1132 # ossep => pathname that uses os.sep to separate directories
1133 1133 cwd = repo.getcwd()
1134 1134 targets = {}
1135 1135 after = opts.get("after")
1136 1136 dryrun = opts.get("dry_run")
1137 1137 wctx = repo[None]
1138 1138
1139 1139 def walkpat(pat):
1140 1140 srcs = []
1141 1141 if after:
1142 1142 badstates = '?'
1143 1143 else:
1144 1144 badstates = '?r'
1145 1145 m = scmutil.match(wctx, [pat], opts, globbed=True)
1146 1146 for abs in wctx.walk(m):
1147 1147 state = repo.dirstate[abs]
1148 1148 rel = m.rel(abs)
1149 1149 exact = m.exact(abs)
1150 1150 if state in badstates:
1151 1151 if exact and state == '?':
1152 1152 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1153 1153 if exact and state == 'r':
1154 1154 ui.warn(_('%s: not copying - file has been marked for'
1155 1155 ' remove\n') % rel)
1156 1156 continue
1157 1157 # abs: hgsep
1158 1158 # rel: ossep
1159 1159 srcs.append((abs, rel, exact))
1160 1160 return srcs
1161 1161
1162 1162 # abssrc: hgsep
1163 1163 # relsrc: ossep
1164 1164 # otarget: ossep
1165 1165 def copyfile(abssrc, relsrc, otarget, exact):
1166 1166 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1167 1167 if '/' in abstarget:
1168 1168 # We cannot normalize abstarget itself, this would prevent
1169 1169 # case only renames, like a => A.
1170 1170 abspath, absname = abstarget.rsplit('/', 1)
1171 1171 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1172 1172 reltarget = repo.pathto(abstarget, cwd)
1173 1173 target = repo.wjoin(abstarget)
1174 1174 src = repo.wjoin(abssrc)
1175 1175 state = repo.dirstate[abstarget]
1176 1176
1177 1177 scmutil.checkportable(ui, abstarget)
1178 1178
1179 1179 # check for collisions
1180 1180 prevsrc = targets.get(abstarget)
1181 1181 if prevsrc is not None:
1182 1182 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1183 1183 (reltarget, repo.pathto(abssrc, cwd),
1184 1184 repo.pathto(prevsrc, cwd)))
1185 1185 return True # report a failure
1186 1186
1187 1187 # check for overwrites
1188 1188 exists = os.path.lexists(target)
1189 1189 samefile = False
1190 1190 if exists and abssrc != abstarget:
1191 1191 if (repo.dirstate.normalize(abssrc) ==
1192 1192 repo.dirstate.normalize(abstarget)):
1193 1193 if not rename:
1194 1194 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1195 1195 return True # report a failure
1196 1196 exists = False
1197 1197 samefile = True
1198 1198
1199 1199 if not after and exists or after and state in 'mn':
1200 1200 if not opts['force']:
1201 1201 if state in 'mn':
1202 1202 msg = _('%s: not overwriting - file already committed\n')
1203 1203 if after:
1204 1204 flags = '--after --force'
1205 1205 else:
1206 1206 flags = '--force'
1207 1207 if rename:
1208 1208 hint = _("('hg rename %s' to replace the file by "
1209 1209 'recording a rename)\n') % flags
1210 1210 else:
1211 1211 hint = _("('hg copy %s' to replace the file by "
1212 1212 'recording a copy)\n') % flags
1213 1213 else:
1214 1214 msg = _('%s: not overwriting - file exists\n')
1215 1215 if rename:
1216 1216 hint = _("('hg rename --after' to record the rename)\n")
1217 1217 else:
1218 1218 hint = _("('hg copy --after' to record the copy)\n")
1219 1219 ui.warn(msg % reltarget)
1220 1220 ui.warn(hint)
1221 1221 return True # report a failure
1222 1222
1223 1223 if after:
1224 1224 if not exists:
1225 1225 if rename:
1226 1226 ui.warn(_('%s: not recording move - %s does not exist\n') %
1227 1227 (relsrc, reltarget))
1228 1228 else:
1229 1229 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1230 1230 (relsrc, reltarget))
1231 1231 return True # report a failure
1232 1232 elif not dryrun:
1233 1233 try:
1234 1234 if exists:
1235 1235 os.unlink(target)
1236 1236 targetdir = os.path.dirname(target) or '.'
1237 1237 if not os.path.isdir(targetdir):
1238 1238 os.makedirs(targetdir)
1239 1239 if samefile:
1240 1240 tmp = target + "~hgrename"
1241 1241 os.rename(src, tmp)
1242 1242 os.rename(tmp, target)
1243 1243 else:
1244 1244 # Preserve stat info on renames, not on copies; this matches
1245 1245 # Linux CLI behavior.
1246 1246 util.copyfile(src, target, copystat=rename)
1247 1247 srcexists = True
1248 1248 except IOError as inst:
1249 1249 if inst.errno == errno.ENOENT:
1250 1250 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1251 1251 srcexists = False
1252 1252 else:
1253 1253 ui.warn(_('%s: cannot copy - %s\n') %
1254 1254 (relsrc, encoding.strtolocal(inst.strerror)))
1255 1255 if rename:
1256 1256 hint = _("('hg rename --after' to record the rename)\n")
1257 1257 else:
1258 1258 hint = _("('hg copy --after' to record the copy)\n")
1259 1259 return True # report a failure
1260 1260
1261 1261 if ui.verbose or not exact:
1262 1262 if rename:
1263 1263 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1264 1264 else:
1265 1265 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1266 1266
1267 1267 targets[abstarget] = abssrc
1268 1268
1269 1269 # fix up dirstate
1270 1270 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1271 1271 dryrun=dryrun, cwd=cwd)
1272 1272 if rename and not dryrun:
1273 1273 if not after and srcexists and not samefile:
1274 1274 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1275 1275 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1276 1276 wctx.forget([abssrc])
1277 1277
1278 1278 # pat: ossep
1279 1279 # dest ossep
1280 1280 # srcs: list of (hgsep, hgsep, ossep, bool)
1281 1281 # return: function that takes hgsep and returns ossep
1282 1282 def targetpathfn(pat, dest, srcs):
1283 1283 if os.path.isdir(pat):
1284 1284 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1285 1285 abspfx = util.localpath(abspfx)
1286 1286 if destdirexists:
1287 1287 striplen = len(os.path.split(abspfx)[0])
1288 1288 else:
1289 1289 striplen = len(abspfx)
1290 1290 if striplen:
1291 1291 striplen += len(pycompat.ossep)
1292 1292 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1293 1293 elif destdirexists:
1294 1294 res = lambda p: os.path.join(dest,
1295 1295 os.path.basename(util.localpath(p)))
1296 1296 else:
1297 1297 res = lambda p: dest
1298 1298 return res
1299 1299
1300 1300 # pat: ossep
1301 1301 # dest ossep
1302 1302 # srcs: list of (hgsep, hgsep, ossep, bool)
1303 1303 # return: function that takes hgsep and returns ossep
1304 1304 def targetpathafterfn(pat, dest, srcs):
1305 1305 if matchmod.patkind(pat):
1306 1306 # a mercurial pattern
1307 1307 res = lambda p: os.path.join(dest,
1308 1308 os.path.basename(util.localpath(p)))
1309 1309 else:
1310 1310 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1311 1311 if len(abspfx) < len(srcs[0][0]):
1312 1312 # A directory. Either the target path contains the last
1313 1313 # component of the source path or it does not.
1314 1314 def evalpath(striplen):
1315 1315 score = 0
1316 1316 for s in srcs:
1317 1317 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1318 1318 if os.path.lexists(t):
1319 1319 score += 1
1320 1320 return score
1321 1321
1322 1322 abspfx = util.localpath(abspfx)
1323 1323 striplen = len(abspfx)
1324 1324 if striplen:
1325 1325 striplen += len(pycompat.ossep)
1326 1326 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1327 1327 score = evalpath(striplen)
1328 1328 striplen1 = len(os.path.split(abspfx)[0])
1329 1329 if striplen1:
1330 1330 striplen1 += len(pycompat.ossep)
1331 1331 if evalpath(striplen1) > score:
1332 1332 striplen = striplen1
1333 1333 res = lambda p: os.path.join(dest,
1334 1334 util.localpath(p)[striplen:])
1335 1335 else:
1336 1336 # a file
1337 1337 if destdirexists:
1338 1338 res = lambda p: os.path.join(dest,
1339 1339 os.path.basename(util.localpath(p)))
1340 1340 else:
1341 1341 res = lambda p: dest
1342 1342 return res
1343 1343
1344 1344 pats = scmutil.expandpats(pats)
1345 1345 if not pats:
1346 1346 raise error.Abort(_('no source or destination specified'))
1347 1347 if len(pats) == 1:
1348 1348 raise error.Abort(_('no destination specified'))
1349 1349 dest = pats.pop()
1350 1350 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1351 1351 if not destdirexists:
1352 1352 if len(pats) > 1 or matchmod.patkind(pats[0]):
1353 1353 raise error.Abort(_('with multiple sources, destination must be an '
1354 1354 'existing directory'))
1355 1355 if util.endswithsep(dest):
1356 1356 raise error.Abort(_('destination %s is not a directory') % dest)
1357 1357
1358 1358 tfn = targetpathfn
1359 1359 if after:
1360 1360 tfn = targetpathafterfn
1361 1361 copylist = []
1362 1362 for pat in pats:
1363 1363 srcs = walkpat(pat)
1364 1364 if not srcs:
1365 1365 continue
1366 1366 copylist.append((tfn(pat, dest, srcs), srcs))
1367 1367 if not copylist:
1368 1368 raise error.Abort(_('no files to copy'))
1369 1369
1370 1370 errors = 0
1371 1371 for targetpath, srcs in copylist:
1372 1372 for abssrc, relsrc, exact in srcs:
1373 1373 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1374 1374 errors += 1
1375 1375
1376 1376 return errors != 0
1377 1377
1378 1378 ## facility to let extension process additional data into an import patch
1379 1379 # list of identifier to be executed in order
1380 1380 extrapreimport = [] # run before commit
1381 1381 extrapostimport = [] # run after commit
1382 1382 # mapping from identifier to actual import function
1383 1383 #
1384 1384 # 'preimport' are run before the commit is made and are provided the following
1385 1385 # arguments:
1386 1386 # - repo: the localrepository instance,
1387 1387 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1388 1388 # - extra: the future extra dictionary of the changeset, please mutate it,
1389 1389 # - opts: the import options.
1390 1390 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1391 1391 # mutation of in memory commit and more. Feel free to rework the code to get
1392 1392 # there.
1393 1393 extrapreimportmap = {}
1394 1394 # 'postimport' are run after the commit is made and are provided the following
1395 1395 # argument:
1396 1396 # - ctx: the changectx created by import.
1397 1397 extrapostimportmap = {}
1398 1398
1399 1399 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1400 1400 """Utility function used by commands.import to import a single patch
1401 1401
1402 1402 This function is explicitly defined here to help the evolve extension to
1403 1403 wrap this part of the import logic.
1404 1404
1405 1405 The API is currently a bit ugly because it a simple code translation from
1406 1406 the import command. Feel free to make it better.
1407 1407
1408 1408 :patchdata: a dictionary containing parsed patch data (such as from
1409 1409 ``patch.extract()``)
1410 1410 :parents: nodes that will be parent of the created commit
1411 1411 :opts: the full dict of option passed to the import command
1412 1412 :msgs: list to save commit message to.
1413 1413 (used in case we need to save it when failing)
1414 1414 :updatefunc: a function that update a repo to a given node
1415 1415 updatefunc(<repo>, <node>)
1416 1416 """
1417 1417 # avoid cycle context -> subrepo -> cmdutil
1418 1418 from . import context
1419 1419
1420 1420 tmpname = patchdata.get('filename')
1421 1421 message = patchdata.get('message')
1422 1422 user = opts.get('user') or patchdata.get('user')
1423 1423 date = opts.get('date') or patchdata.get('date')
1424 1424 branch = patchdata.get('branch')
1425 1425 nodeid = patchdata.get('nodeid')
1426 1426 p1 = patchdata.get('p1')
1427 1427 p2 = patchdata.get('p2')
1428 1428
1429 1429 nocommit = opts.get('no_commit')
1430 1430 importbranch = opts.get('import_branch')
1431 1431 update = not opts.get('bypass')
1432 1432 strip = opts["strip"]
1433 1433 prefix = opts["prefix"]
1434 1434 sim = float(opts.get('similarity') or 0)
1435 1435
1436 1436 if not tmpname:
1437 1437 return None, None, False
1438 1438
1439 1439 rejects = False
1440 1440
1441 1441 cmdline_message = logmessage(ui, opts)
1442 1442 if cmdline_message:
1443 1443 # pickup the cmdline msg
1444 1444 message = cmdline_message
1445 1445 elif message:
1446 1446 # pickup the patch msg
1447 1447 message = message.strip()
1448 1448 else:
1449 1449 # launch the editor
1450 1450 message = None
1451 1451 ui.debug('message:\n%s\n' % (message or ''))
1452 1452
1453 1453 if len(parents) == 1:
1454 1454 parents.append(repo[nullid])
1455 1455 if opts.get('exact'):
1456 1456 if not nodeid or not p1:
1457 1457 raise error.Abort(_('not a Mercurial patch'))
1458 1458 p1 = repo[p1]
1459 1459 p2 = repo[p2 or nullid]
1460 1460 elif p2:
1461 1461 try:
1462 1462 p1 = repo[p1]
1463 1463 p2 = repo[p2]
1464 1464 # Without any options, consider p2 only if the
1465 1465 # patch is being applied on top of the recorded
1466 1466 # first parent.
1467 1467 if p1 != parents[0]:
1468 1468 p1 = parents[0]
1469 1469 p2 = repo[nullid]
1470 1470 except error.RepoError:
1471 1471 p1, p2 = parents
1472 1472 if p2.node() == nullid:
1473 1473 ui.warn(_("warning: import the patch as a normal revision\n"
1474 1474 "(use --exact to import the patch as a merge)\n"))
1475 1475 else:
1476 1476 p1, p2 = parents
1477 1477
1478 1478 n = None
1479 1479 if update:
1480 1480 if p1 != parents[0]:
1481 1481 updatefunc(repo, p1.node())
1482 1482 if p2 != parents[1]:
1483 1483 repo.setparents(p1.node(), p2.node())
1484 1484
1485 1485 if opts.get('exact') or importbranch:
1486 1486 repo.dirstate.setbranch(branch or 'default')
1487 1487
1488 1488 partial = opts.get('partial', False)
1489 1489 files = set()
1490 1490 try:
1491 1491 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1492 1492 files=files, eolmode=None, similarity=sim / 100.0)
1493 1493 except error.PatchError as e:
1494 1494 if not partial:
1495 1495 raise error.Abort(pycompat.bytestr(e))
1496 1496 if partial:
1497 1497 rejects = True
1498 1498
1499 1499 files = list(files)
1500 1500 if nocommit:
1501 1501 if message:
1502 1502 msgs.append(message)
1503 1503 else:
1504 1504 if opts.get('exact') or p2:
1505 1505 # If you got here, you either use --force and know what
1506 1506 # you are doing or used --exact or a merge patch while
1507 1507 # being updated to its first parent.
1508 1508 m = None
1509 1509 else:
1510 1510 m = scmutil.matchfiles(repo, files or [])
1511 1511 editform = mergeeditform(repo[None], 'import.normal')
1512 1512 if opts.get('exact'):
1513 1513 editor = None
1514 1514 else:
1515 1515 editor = getcommiteditor(editform=editform,
1516 1516 **pycompat.strkwargs(opts))
1517 1517 extra = {}
1518 1518 for idfunc in extrapreimport:
1519 1519 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1520 1520 overrides = {}
1521 1521 if partial:
1522 1522 overrides[('ui', 'allowemptycommit')] = True
1523 1523 with repo.ui.configoverride(overrides, 'import'):
1524 1524 n = repo.commit(message, user,
1525 1525 date, match=m,
1526 1526 editor=editor, extra=extra)
1527 1527 for idfunc in extrapostimport:
1528 1528 extrapostimportmap[idfunc](repo[n])
1529 1529 else:
1530 1530 if opts.get('exact') or importbranch:
1531 1531 branch = branch or 'default'
1532 1532 else:
1533 1533 branch = p1.branch()
1534 1534 store = patch.filestore()
1535 1535 try:
1536 1536 files = set()
1537 1537 try:
1538 1538 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1539 1539 files, eolmode=None)
1540 1540 except error.PatchError as e:
1541 1541 raise error.Abort(stringutil.forcebytestr(e))
1542 1542 if opts.get('exact'):
1543 1543 editor = None
1544 1544 else:
1545 1545 editor = getcommiteditor(editform='import.bypass')
1546 1546 memctx = context.memctx(repo, (p1.node(), p2.node()),
1547 1547 message,
1548 1548 files=files,
1549 1549 filectxfn=store,
1550 1550 user=user,
1551 1551 date=date,
1552 1552 branch=branch,
1553 1553 editor=editor)
1554 1554 n = memctx.commit()
1555 1555 finally:
1556 1556 store.close()
1557 1557 if opts.get('exact') and nocommit:
1558 1558 # --exact with --no-commit is still useful in that it does merge
1559 1559 # and branch bits
1560 1560 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1561 1561 elif opts.get('exact') and (not n or hex(n) != nodeid):
1562 1562 raise error.Abort(_('patch is damaged or loses information'))
1563 1563 msg = _('applied to working directory')
1564 1564 if n:
1565 1565 # i18n: refers to a short changeset id
1566 1566 msg = _('created %s') % short(n)
1567 1567 return msg, n, rejects
1568 1568
1569 1569 # facility to let extensions include additional data in an exported patch
1570 1570 # list of identifiers to be executed in order
1571 1571 extraexport = []
1572 1572 # mapping from identifier to actual export function
1573 1573 # function as to return a string to be added to the header or None
1574 1574 # it is given two arguments (sequencenumber, changectx)
1575 1575 extraexportmap = {}
1576 1576
1577 1577 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1578 1578 node = scmutil.binnode(ctx)
1579 1579 parents = [p.node() for p in ctx.parents() if p]
1580 1580 branch = ctx.branch()
1581 1581 if switch_parent:
1582 1582 parents.reverse()
1583 1583
1584 1584 if parents:
1585 1585 prev = parents[0]
1586 1586 else:
1587 1587 prev = nullid
1588 1588
1589 1589 fm.context(ctx=ctx)
1590 1590 fm.plain('# HG changeset patch\n')
1591 1591 fm.write('user', '# User %s\n', ctx.user())
1592 1592 fm.plain('# Date %d %d\n' % ctx.date())
1593 1593 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1594 1594 fm.condwrite(branch and branch != 'default',
1595 1595 'branch', '# Branch %s\n', branch)
1596 1596 fm.write('node', '# Node ID %s\n', hex(node))
1597 1597 fm.plain('# Parent %s\n' % hex(prev))
1598 1598 if len(parents) > 1:
1599 1599 fm.plain('# Parent %s\n' % hex(parents[1]))
1600 1600 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1601 1601
1602 1602 # TODO: redesign extraexportmap function to support formatter
1603 1603 for headerid in extraexport:
1604 1604 header = extraexportmap[headerid](seqno, ctx)
1605 1605 if header is not None:
1606 1606 fm.plain('# %s\n' % header)
1607 1607
1608 1608 fm.write('desc', '%s\n', ctx.description().rstrip())
1609 1609 fm.plain('\n')
1610 1610
1611 1611 if fm.isplain():
1612 1612 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1613 1613 for chunk, label in chunkiter:
1614 1614 fm.plain(chunk, label=label)
1615 1615 else:
1616 1616 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1617 1617 # TODO: make it structured?
1618 1618 fm.data(diff=b''.join(chunkiter))
1619 1619
1620 1620 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1621 1621 """Export changesets to stdout or a single file"""
1622 1622 for seqno, rev in enumerate(revs, 1):
1623 1623 ctx = repo[rev]
1624 1624 if not dest.startswith('<'):
1625 1625 repo.ui.note("%s\n" % dest)
1626 1626 fm.startitem()
1627 1627 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1628 1628
1629 1629 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1630 1630 match):
1631 1631 """Export changesets to possibly multiple files"""
1632 1632 total = len(revs)
1633 1633 revwidth = max(len(str(rev)) for rev in revs)
1634 1634 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1635 1635
1636 1636 for seqno, rev in enumerate(revs, 1):
1637 1637 ctx = repo[rev]
1638 1638 dest = makefilename(ctx, fntemplate,
1639 1639 total=total, seqno=seqno, revwidth=revwidth)
1640 1640 filemap.setdefault(dest, []).append((seqno, rev))
1641 1641
1642 1642 for dest in filemap:
1643 1643 with formatter.maybereopen(basefm, dest) as fm:
1644 1644 repo.ui.note("%s\n" % dest)
1645 1645 for seqno, rev in filemap[dest]:
1646 1646 fm.startitem()
1647 1647 ctx = repo[rev]
1648 1648 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1649 1649 diffopts)
1650 1650
1651 1651 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1652 1652 opts=None, match=None):
1653 1653 '''export changesets as hg patches
1654 1654
1655 1655 Args:
1656 1656 repo: The repository from which we're exporting revisions.
1657 1657 revs: A list of revisions to export as revision numbers.
1658 1658 basefm: A formatter to which patches should be written.
1659 1659 fntemplate: An optional string to use for generating patch file names.
1660 1660 switch_parent: If True, show diffs against second parent when not nullid.
1661 1661 Default is false, which always shows diff against p1.
1662 1662 opts: diff options to use for generating the patch.
1663 1663 match: If specified, only export changes to files matching this matcher.
1664 1664
1665 1665 Returns:
1666 1666 Nothing.
1667 1667
1668 1668 Side Effect:
1669 1669 "HG Changeset Patch" data is emitted to one of the following
1670 1670 destinations:
1671 1671 fntemplate specified: Each rev is written to a unique file named using
1672 1672 the given template.
1673 1673 Otherwise: All revs will be written to basefm.
1674 1674 '''
1675 1675 scmutil.prefetchfiles(repo, revs, match)
1676 1676
1677 1677 if not fntemplate:
1678 1678 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1679 1679 else:
1680 1680 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1681 1681 match)
1682 1682
1683 1683 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1684 1684 """Export changesets to the given file stream"""
1685 1685 scmutil.prefetchfiles(repo, revs, match)
1686 1686
1687 1687 dest = getattr(fp, 'name', '<unnamed>')
1688 1688 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1689 1689 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1690 1690
1691 1691 def showmarker(fm, marker, index=None):
1692 1692 """utility function to display obsolescence marker in a readable way
1693 1693
1694 1694 To be used by debug function."""
1695 1695 if index is not None:
1696 1696 fm.write('index', '%i ', index)
1697 1697 fm.write('prednode', '%s ', hex(marker.prednode()))
1698 1698 succs = marker.succnodes()
1699 1699 fm.condwrite(succs, 'succnodes', '%s ',
1700 1700 fm.formatlist(map(hex, succs), name='node'))
1701 1701 fm.write('flag', '%X ', marker.flags())
1702 1702 parents = marker.parentnodes()
1703 1703 if parents is not None:
1704 1704 fm.write('parentnodes', '{%s} ',
1705 1705 fm.formatlist(map(hex, parents), name='node', sep=', '))
1706 1706 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1707 1707 meta = marker.metadata().copy()
1708 1708 meta.pop('date', None)
1709 1709 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1710 1710 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1711 1711 fm.plain('\n')
1712 1712
1713 1713 def finddate(ui, repo, date):
1714 1714 """Find the tipmost changeset that matches the given date spec"""
1715 1715
1716 1716 df = dateutil.matchdate(date)
1717 1717 m = scmutil.matchall(repo)
1718 1718 results = {}
1719 1719
1720 1720 def prep(ctx, fns):
1721 1721 d = ctx.date()
1722 1722 if df(d[0]):
1723 1723 results[ctx.rev()] = d
1724 1724
1725 1725 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1726 1726 rev = ctx.rev()
1727 1727 if rev in results:
1728 1728 ui.status(_("found revision %s from %s\n") %
1729 1729 (rev, dateutil.datestr(results[rev])))
1730 1730 return '%d' % rev
1731 1731
1732 1732 raise error.Abort(_("revision matching date not found"))
1733 1733
1734 1734 def increasingwindows(windowsize=8, sizelimit=512):
1735 1735 while True:
1736 1736 yield windowsize
1737 1737 if windowsize < sizelimit:
1738 1738 windowsize *= 2
1739 1739
1740 1740 def _walkrevs(repo, opts):
1741 1741 # Default --rev value depends on --follow but --follow behavior
1742 1742 # depends on revisions resolved from --rev...
1743 1743 follow = opts.get('follow') or opts.get('follow_first')
1744 1744 if opts.get('rev'):
1745 1745 revs = scmutil.revrange(repo, opts['rev'])
1746 1746 elif follow and repo.dirstate.p1() == nullid:
1747 1747 revs = smartset.baseset()
1748 1748 elif follow:
1749 1749 revs = repo.revs('reverse(:.)')
1750 1750 else:
1751 1751 revs = smartset.spanset(repo)
1752 1752 revs.reverse()
1753 1753 return revs
1754 1754
1755 1755 class FileWalkError(Exception):
1756 1756 pass
1757 1757
1758 1758 def walkfilerevs(repo, match, follow, revs, fncache):
1759 1759 '''Walks the file history for the matched files.
1760 1760
1761 1761 Returns the changeset revs that are involved in the file history.
1762 1762
1763 1763 Throws FileWalkError if the file history can't be walked using
1764 1764 filelogs alone.
1765 1765 '''
1766 1766 wanted = set()
1767 1767 copies = []
1768 1768 minrev, maxrev = min(revs), max(revs)
1769 1769 def filerevgen(filelog, last):
1770 1770 """
1771 1771 Only files, no patterns. Check the history of each file.
1772 1772
1773 1773 Examines filelog entries within minrev, maxrev linkrev range
1774 1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1775 1775 tuples in backwards order
1776 1776 """
1777 1777 cl_count = len(repo)
1778 1778 revs = []
1779 1779 for j in pycompat.xrange(0, last + 1):
1780 1780 linkrev = filelog.linkrev(j)
1781 1781 if linkrev < minrev:
1782 1782 continue
1783 1783 # only yield rev for which we have the changelog, it can
1784 1784 # happen while doing "hg log" during a pull or commit
1785 1785 if linkrev >= cl_count:
1786 1786 break
1787 1787
1788 1788 parentlinkrevs = []
1789 1789 for p in filelog.parentrevs(j):
1790 1790 if p != nullrev:
1791 1791 parentlinkrevs.append(filelog.linkrev(p))
1792 1792 n = filelog.node(j)
1793 1793 revs.append((linkrev, parentlinkrevs,
1794 1794 follow and filelog.renamed(n)))
1795 1795
1796 1796 return reversed(revs)
1797 1797 def iterfiles():
1798 1798 pctx = repo['.']
1799 1799 for filename in match.files():
1800 1800 if follow:
1801 1801 if filename not in pctx:
1802 1802 raise error.Abort(_('cannot follow file not in parent '
1803 1803 'revision: "%s"') % filename)
1804 1804 yield filename, pctx[filename].filenode()
1805 1805 else:
1806 1806 yield filename, None
1807 1807 for filename_node in copies:
1808 1808 yield filename_node
1809 1809
1810 1810 for file_, node in iterfiles():
1811 1811 filelog = repo.file(file_)
1812 1812 if not len(filelog):
1813 1813 if node is None:
1814 1814 # A zero count may be a directory or deleted file, so
1815 1815 # try to find matching entries on the slow path.
1816 1816 if follow:
1817 1817 raise error.Abort(
1818 1818 _('cannot follow nonexistent file: "%s"') % file_)
1819 1819 raise FileWalkError("Cannot walk via filelog")
1820 1820 else:
1821 1821 continue
1822 1822
1823 1823 if node is None:
1824 1824 last = len(filelog) - 1
1825 1825 else:
1826 1826 last = filelog.rev(node)
1827 1827
1828 1828 # keep track of all ancestors of the file
1829 1829 ancestors = {filelog.linkrev(last)}
1830 1830
1831 1831 # iterate from latest to oldest revision
1832 1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1833 1833 if not follow:
1834 1834 if rev > maxrev:
1835 1835 continue
1836 1836 else:
1837 1837 # Note that last might not be the first interesting
1838 1838 # rev to us:
1839 1839 # if the file has been changed after maxrev, we'll
1840 1840 # have linkrev(last) > maxrev, and we still need
1841 1841 # to explore the file graph
1842 1842 if rev not in ancestors:
1843 1843 continue
1844 1844 # XXX insert 1327 fix here
1845 1845 if flparentlinkrevs:
1846 1846 ancestors.update(flparentlinkrevs)
1847 1847
1848 1848 fncache.setdefault(rev, []).append(file_)
1849 1849 wanted.add(rev)
1850 1850 if copied:
1851 1851 copies.append(copied)
1852 1852
1853 1853 return wanted
1854 1854
1855 1855 class _followfilter(object):
1856 1856 def __init__(self, repo, onlyfirst=False):
1857 1857 self.repo = repo
1858 1858 self.startrev = nullrev
1859 1859 self.roots = set()
1860 1860 self.onlyfirst = onlyfirst
1861 1861
1862 1862 def match(self, rev):
1863 1863 def realparents(rev):
1864 1864 if self.onlyfirst:
1865 1865 return self.repo.changelog.parentrevs(rev)[0:1]
1866 1866 else:
1867 1867 return filter(lambda x: x != nullrev,
1868 1868 self.repo.changelog.parentrevs(rev))
1869 1869
1870 1870 if self.startrev == nullrev:
1871 1871 self.startrev = rev
1872 1872 return True
1873 1873
1874 1874 if rev > self.startrev:
1875 1875 # forward: all descendants
1876 1876 if not self.roots:
1877 1877 self.roots.add(self.startrev)
1878 1878 for parent in realparents(rev):
1879 1879 if parent in self.roots:
1880 1880 self.roots.add(rev)
1881 1881 return True
1882 1882 else:
1883 1883 # backwards: all parents
1884 1884 if not self.roots:
1885 1885 self.roots.update(realparents(self.startrev))
1886 1886 if rev in self.roots:
1887 1887 self.roots.remove(rev)
1888 1888 self.roots.update(realparents(rev))
1889 1889 return True
1890 1890
1891 1891 return False
1892 1892
1893 1893 def walkchangerevs(repo, match, opts, prepare):
1894 1894 '''Iterate over files and the revs in which they changed.
1895 1895
1896 1896 Callers most commonly need to iterate backwards over the history
1897 1897 in which they are interested. Doing so has awful (quadratic-looking)
1898 1898 performance, so we use iterators in a "windowed" way.
1899 1899
1900 1900 We walk a window of revisions in the desired order. Within the
1901 1901 window, we first walk forwards to gather data, then in the desired
1902 1902 order (usually backwards) to display it.
1903 1903
1904 1904 This function returns an iterator yielding contexts. Before
1905 1905 yielding each context, the iterator will first call the prepare
1906 1906 function on each context in the window in forward order.'''
1907 1907
1908 1908 allfiles = opts.get('all_files')
1909 1909 follow = opts.get('follow') or opts.get('follow_first')
1910 1910 revs = _walkrevs(repo, opts)
1911 1911 if not revs:
1912 1912 return []
1913 1913 wanted = set()
1914 1914 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1915 1915 fncache = {}
1916 1916 change = repo.__getitem__
1917 1917
1918 1918 # First step is to fill wanted, the set of revisions that we want to yield.
1919 1919 # When it does not induce extra cost, we also fill fncache for revisions in
1920 1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1921 1921 # match the file filtering conditions.
1922 1922
1923 1923 if match.always() or allfiles:
1924 1924 # No files, no patterns. Display all revs.
1925 1925 wanted = revs
1926 1926 elif not slowpath:
1927 1927 # We only have to read through the filelog to find wanted revisions
1928 1928
1929 1929 try:
1930 1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1931 1931 except FileWalkError:
1932 1932 slowpath = True
1933 1933
1934 1934 # We decided to fall back to the slowpath because at least one
1935 1935 # of the paths was not a file. Check to see if at least one of them
1936 1936 # existed in history, otherwise simply return
1937 1937 for path in match.files():
1938 1938 if path == '.' or path in repo.store:
1939 1939 break
1940 1940 else:
1941 1941 return []
1942 1942
1943 1943 if slowpath:
1944 1944 # We have to read the changelog to match filenames against
1945 1945 # changed files
1946 1946
1947 1947 if follow:
1948 1948 raise error.Abort(_('can only follow copies/renames for explicit '
1949 1949 'filenames'))
1950 1950
1951 1951 # The slow path checks files modified in every changeset.
1952 1952 # This is really slow on large repos, so compute the set lazily.
1953 1953 class lazywantedset(object):
1954 1954 def __init__(self):
1955 1955 self.set = set()
1956 1956 self.revs = set(revs)
1957 1957
1958 1958 # No need to worry about locality here because it will be accessed
1959 1959 # in the same order as the increasing window below.
1960 1960 def __contains__(self, value):
1961 1961 if value in self.set:
1962 1962 return True
1963 1963 elif not value in self.revs:
1964 1964 return False
1965 1965 else:
1966 1966 self.revs.discard(value)
1967 1967 ctx = change(value)
1968 1968 matches = [f for f in ctx.files() if match(f)]
1969 1969 if matches:
1970 1970 fncache[value] = matches
1971 1971 self.set.add(value)
1972 1972 return True
1973 1973 return False
1974 1974
1975 1975 def discard(self, value):
1976 1976 self.revs.discard(value)
1977 1977 self.set.discard(value)
1978 1978
1979 1979 wanted = lazywantedset()
1980 1980
1981 1981 # it might be worthwhile to do this in the iterator if the rev range
1982 1982 # is descending and the prune args are all within that range
1983 1983 for rev in opts.get('prune', ()):
1984 1984 rev = repo[rev].rev()
1985 1985 ff = _followfilter(repo)
1986 1986 stop = min(revs[0], revs[-1])
1987 1987 for x in pycompat.xrange(rev, stop - 1, -1):
1988 1988 if ff.match(x):
1989 1989 wanted = wanted - [x]
1990 1990
1991 1991 # Now that wanted is correctly initialized, we can iterate over the
1992 1992 # revision range, yielding only revisions in wanted.
1993 1993 def iterate():
1994 1994 if follow and match.always():
1995 1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1996 1996 def want(rev):
1997 1997 return ff.match(rev) and rev in wanted
1998 1998 else:
1999 1999 def want(rev):
2000 2000 return rev in wanted
2001 2001
2002 2002 it = iter(revs)
2003 2003 stopiteration = False
2004 2004 for windowsize in increasingwindows():
2005 2005 nrevs = []
2006 2006 for i in pycompat.xrange(windowsize):
2007 2007 rev = next(it, None)
2008 2008 if rev is None:
2009 2009 stopiteration = True
2010 2010 break
2011 2011 elif want(rev):
2012 2012 nrevs.append(rev)
2013 2013 for rev in sorted(nrevs):
2014 2014 fns = fncache.get(rev)
2015 2015 ctx = change(rev)
2016 2016 if not fns:
2017 2017 def fns_generator():
2018 2018 if allfiles:
2019 2019 fiter = iter(ctx)
2020 2020 else:
2021 2021 fiter = ctx.files()
2022 2022 for f in fiter:
2023 2023 if match(f):
2024 2024 yield f
2025 2025 fns = fns_generator()
2026 2026 prepare(ctx, fns)
2027 2027 for rev in nrevs:
2028 2028 yield change(rev)
2029 2029
2030 2030 if stopiteration:
2031 2031 break
2032 2032
2033 2033 return iterate()
2034 2034
2035 2035 def add(ui, repo, match, prefix, explicitonly, **opts):
2036 2036 join = lambda f: os.path.join(prefix, f)
2037 2037 bad = []
2038 2038
2039 2039 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2040 2040 names = []
2041 2041 wctx = repo[None]
2042 2042 cca = None
2043 2043 abort, warn = scmutil.checkportabilityalert(ui)
2044 2044 if abort or warn:
2045 2045 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2046 2046
2047 2047 badmatch = matchmod.badmatch(match, badfn)
2048 2048 dirstate = repo.dirstate
2049 2049 # We don't want to just call wctx.walk here, since it would return a lot of
2050 2050 # clean files, which we aren't interested in and takes time.
2051 2051 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2052 2052 unknown=True, ignored=False, full=False)):
2053 2053 exact = match.exact(f)
2054 2054 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2055 2055 if cca:
2056 2056 cca(f)
2057 2057 names.append(f)
2058 2058 if ui.verbose or not exact:
2059 2059 ui.status(_('adding %s\n') % match.rel(f),
2060 2060 label='addremove.added')
2061 2061
2062 2062 for subpath in sorted(wctx.substate):
2063 2063 sub = wctx.sub(subpath)
2064 2064 try:
2065 2065 submatch = matchmod.subdirmatcher(subpath, match)
2066 2066 if opts.get(r'subrepos'):
2067 2067 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2068 2068 else:
2069 2069 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2070 2070 except error.LookupError:
2071 2071 ui.status(_("skipping missing subrepository: %s\n")
2072 2072 % join(subpath))
2073 2073
2074 2074 if not opts.get(r'dry_run'):
2075 2075 rejected = wctx.add(names, prefix)
2076 2076 bad.extend(f for f in rejected if f in match.files())
2077 2077 return bad
2078 2078
2079 2079 def addwebdirpath(repo, serverpath, webconf):
2080 2080 webconf[serverpath] = repo.root
2081 2081 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2082 2082
2083 2083 for r in repo.revs('filelog("path:.hgsub")'):
2084 2084 ctx = repo[r]
2085 2085 for subpath in ctx.substate:
2086 2086 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2087 2087
2088 2088 def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive):
2089 2089 if dryrun and interactive:
2090 2090 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2091 2091 join = lambda f: os.path.join(prefix, f)
2092 2092 bad = []
2093 2093 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2094 2094 wctx = repo[None]
2095 2095 forgot = []
2096 2096
2097 2097 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2098 2098 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2099 2099 if explicitonly:
2100 2100 forget = [f for f in forget if match.exact(f)]
2101 2101
2102 2102 for subpath in sorted(wctx.substate):
2103 2103 sub = wctx.sub(subpath)
2104 2104 try:
2105 2105 submatch = matchmod.subdirmatcher(subpath, match)
2106 2106 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun,
2107 2107 interactive=interactive)
2108 2108 bad.extend([subpath + '/' + f for f in subbad])
2109 2109 forgot.extend([subpath + '/' + f for f in subforgot])
2110 2110 except error.LookupError:
2111 2111 ui.status(_("skipping missing subrepository: %s\n")
2112 2112 % join(subpath))
2113 2113
2114 2114 if not explicitonly:
2115 2115 for f in match.files():
2116 2116 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2117 2117 if f not in forgot:
2118 2118 if repo.wvfs.exists(f):
2119 2119 # Don't complain if the exact case match wasn't given.
2120 2120 # But don't do this until after checking 'forgot', so
2121 2121 # that subrepo files aren't normalized, and this op is
2122 2122 # purely from data cached by the status walk above.
2123 2123 if repo.dirstate.normalize(f) in repo.dirstate:
2124 2124 continue
2125 2125 ui.warn(_('not removing %s: '
2126 2126 'file is already untracked\n')
2127 2127 % match.rel(f))
2128 2128 bad.append(f)
2129 2129
2130 2130 if interactive:
2131 2131 responses = _('[Ynsa?]'
2132 2132 '$$ &Yes, forget this file'
2133 2133 '$$ &No, skip this file'
2134 2134 '$$ &Skip remaining files'
2135 2135 '$$ Include &all remaining files'
2136 2136 '$$ &? (display help)')
2137 2137 for filename in forget[:]:
2138 2138 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2139 2139 if r == 4: # ?
2140 2140 while r == 4:
2141 2141 for c, t in ui.extractchoices(responses)[1]:
2142 2142 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2143 2143 r = ui.promptchoice(_('forget %s %s') % (filename,
2144 2144 responses))
2145 2145 if r == 0: # yes
2146 2146 continue
2147 2147 elif r == 1: # no
2148 2148 forget.remove(filename)
2149 2149 elif r == 2: # Skip
2150 2150 fnindex = forget.index(filename)
2151 2151 del forget[fnindex:]
2152 2152 break
2153 2153 elif r == 3: # All
2154 2154 break
2155 2155
2156 2156 for f in forget:
2157 2157 if ui.verbose or not match.exact(f) or interactive:
2158 2158 ui.status(_('removing %s\n') % match.rel(f),
2159 2159 label='addremove.removed')
2160 2160
2161 2161 if not dryrun:
2162 2162 rejected = wctx.forget(forget, prefix)
2163 2163 bad.extend(f for f in rejected if f in match.files())
2164 2164 forgot.extend(f for f in forget if f not in rejected)
2165 2165 return bad, forgot
2166 2166
2167 2167 def files(ui, ctx, m, fm, fmt, subrepos):
2168 2168 ret = 1
2169 2169
2170 2170 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2171 2171 for f in ctx.matches(m):
2172 2172 fm.startitem()
2173 2173 fm.context(ctx=ctx)
2174 2174 if needsfctx:
2175 2175 fc = ctx[f]
2176 2176 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2177 2177 fm.data(path=f)
2178 2178 fm.plain(fmt % m.rel(f))
2179 2179 ret = 0
2180 2180
2181 2181 for subpath in sorted(ctx.substate):
2182 2182 submatch = matchmod.subdirmatcher(subpath, m)
2183 2183 if (subrepos or m.exact(subpath) or any(submatch.files())):
2184 2184 sub = ctx.sub(subpath)
2185 2185 try:
2186 2186 recurse = m.exact(subpath) or subrepos
2187 2187 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2188 2188 ret = 0
2189 2189 except error.LookupError:
2190 2190 ui.status(_("skipping missing subrepository: %s\n")
2191 2191 % m.abs(subpath))
2192 2192
2193 2193 return ret
2194 2194
2195 2195 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2196 2196 join = lambda f: os.path.join(prefix, f)
2197 2197 ret = 0
2198 2198 s = repo.status(match=m, clean=True)
2199 2199 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2200 2200
2201 2201 wctx = repo[None]
2202 2202
2203 2203 if warnings is None:
2204 2204 warnings = []
2205 2205 warn = True
2206 2206 else:
2207 2207 warn = False
2208 2208
2209 2209 subs = sorted(wctx.substate)
2210 2210 progress = ui.makeprogress(_('searching'), total=len(subs),
2211 2211 unit=_('subrepos'))
2212 2212 for subpath in subs:
2213 2213 submatch = matchmod.subdirmatcher(subpath, m)
2214 2214 if subrepos or m.exact(subpath) or any(submatch.files()):
2215 2215 progress.increment()
2216 2216 sub = wctx.sub(subpath)
2217 2217 try:
2218 2218 if sub.removefiles(submatch, prefix, after, force, subrepos,
2219 2219 dryrun, warnings):
2220 2220 ret = 1
2221 2221 except error.LookupError:
2222 2222 warnings.append(_("skipping missing subrepository: %s\n")
2223 2223 % join(subpath))
2224 2224 progress.complete()
2225 2225
2226 2226 # warn about failure to delete explicit files/dirs
2227 2227 deleteddirs = util.dirs(deleted)
2228 2228 files = m.files()
2229 2229 progress = ui.makeprogress(_('deleting'), total=len(files),
2230 2230 unit=_('files'))
2231 2231 for f in files:
2232 2232 def insubrepo():
2233 2233 for subpath in wctx.substate:
2234 2234 if f.startswith(subpath + '/'):
2235 2235 return True
2236 2236 return False
2237 2237
2238 2238 progress.increment()
2239 2239 isdir = f in deleteddirs or wctx.hasdir(f)
2240 2240 if (f in repo.dirstate or isdir or f == '.'
2241 2241 or insubrepo() or f in subs):
2242 2242 continue
2243 2243
2244 2244 if repo.wvfs.exists(f):
2245 2245 if repo.wvfs.isdir(f):
2246 2246 warnings.append(_('not removing %s: no tracked files\n')
2247 2247 % m.rel(f))
2248 2248 else:
2249 2249 warnings.append(_('not removing %s: file is untracked\n')
2250 2250 % m.rel(f))
2251 2251 # missing files will generate a warning elsewhere
2252 2252 ret = 1
2253 2253 progress.complete()
2254 2254
2255 2255 if force:
2256 2256 list = modified + deleted + clean + added
2257 2257 elif after:
2258 2258 list = deleted
2259 2259 remaining = modified + added + clean
2260 2260 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2261 2261 unit=_('files'))
2262 2262 for f in remaining:
2263 2263 progress.increment()
2264 2264 if ui.verbose or (f in files):
2265 2265 warnings.append(_('not removing %s: file still exists\n')
2266 2266 % m.rel(f))
2267 2267 ret = 1
2268 2268 progress.complete()
2269 2269 else:
2270 2270 list = deleted + clean
2271 2271 progress = ui.makeprogress(_('skipping'),
2272 2272 total=(len(modified) + len(added)),
2273 2273 unit=_('files'))
2274 2274 for f in modified:
2275 2275 progress.increment()
2276 2276 warnings.append(_('not removing %s: file is modified (use -f'
2277 2277 ' to force removal)\n') % m.rel(f))
2278 2278 ret = 1
2279 2279 for f in added:
2280 2280 progress.increment()
2281 2281 warnings.append(_("not removing %s: file has been marked for add"
2282 2282 " (use 'hg forget' to undo add)\n") % m.rel(f))
2283 2283 ret = 1
2284 2284 progress.complete()
2285 2285
2286 2286 list = sorted(list)
2287 2287 progress = ui.makeprogress(_('deleting'), total=len(list),
2288 2288 unit=_('files'))
2289 2289 for f in list:
2290 2290 if ui.verbose or not m.exact(f):
2291 2291 progress.increment()
2292 2292 ui.status(_('removing %s\n') % m.rel(f),
2293 2293 label='addremove.removed')
2294 2294 progress.complete()
2295 2295
2296 2296 if not dryrun:
2297 2297 with repo.wlock():
2298 2298 if not after:
2299 2299 for f in list:
2300 2300 if f in added:
2301 2301 continue # we never unlink added files on remove
2302 2302 rmdir = repo.ui.configbool('experimental',
2303 2303 'removeemptydirs')
2304 2304 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2305 2305 repo[None].forget(list)
2306 2306
2307 2307 if warn:
2308 2308 for warning in warnings:
2309 2309 ui.warn(warning)
2310 2310
2311 2311 return ret
2312 2312
2313 2313 def _updatecatformatter(fm, ctx, matcher, path, decode):
2314 2314 """Hook for adding data to the formatter used by ``hg cat``.
2315 2315
2316 2316 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2317 2317 this method first."""
2318 2318 data = ctx[path].data()
2319 2319 if decode:
2320 2320 data = ctx.repo().wwritedata(path, data)
2321 2321 fm.startitem()
2322 2322 fm.context(ctx=ctx)
2323 2323 fm.write('data', '%s', data)
2324 2324 fm.data(path=path)
2325 2325
2326 2326 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2327 2327 err = 1
2328 2328 opts = pycompat.byteskwargs(opts)
2329 2329
2330 2330 def write(path):
2331 2331 filename = None
2332 2332 if fntemplate:
2333 2333 filename = makefilename(ctx, fntemplate,
2334 2334 pathname=os.path.join(prefix, path))
2335 2335 # attempt to create the directory if it does not already exist
2336 2336 try:
2337 2337 os.makedirs(os.path.dirname(filename))
2338 2338 except OSError:
2339 2339 pass
2340 2340 with formatter.maybereopen(basefm, filename) as fm:
2341 2341 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2342 2342
2343 2343 # Automation often uses hg cat on single files, so special case it
2344 2344 # for performance to avoid the cost of parsing the manifest.
2345 2345 if len(matcher.files()) == 1 and not matcher.anypats():
2346 2346 file = matcher.files()[0]
2347 2347 mfl = repo.manifestlog
2348 2348 mfnode = ctx.manifestnode()
2349 2349 try:
2350 2350 if mfnode and mfl[mfnode].find(file)[0]:
2351 2351 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2352 2352 write(file)
2353 2353 return 0
2354 2354 except KeyError:
2355 2355 pass
2356 2356
2357 2357 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2358 2358
2359 2359 for abs in ctx.walk(matcher):
2360 2360 write(abs)
2361 2361 err = 0
2362 2362
2363 2363 for subpath in sorted(ctx.substate):
2364 2364 sub = ctx.sub(subpath)
2365 2365 try:
2366 2366 submatch = matchmod.subdirmatcher(subpath, matcher)
2367 2367
2368 2368 if not sub.cat(submatch, basefm, fntemplate,
2369 2369 os.path.join(prefix, sub._path),
2370 2370 **pycompat.strkwargs(opts)):
2371 2371 err = 0
2372 2372 except error.RepoLookupError:
2373 2373 ui.status(_("skipping missing subrepository: %s\n")
2374 2374 % os.path.join(prefix, subpath))
2375 2375
2376 2376 return err
2377 2377
2378 2378 def commit(ui, repo, commitfunc, pats, opts):
2379 2379 '''commit the specified files or all outstanding changes'''
2380 2380 date = opts.get('date')
2381 2381 if date:
2382 2382 opts['date'] = dateutil.parsedate(date)
2383 2383 message = logmessage(ui, opts)
2384 2384 matcher = scmutil.match(repo[None], pats, opts)
2385 2385
2386 2386 dsguard = None
2387 2387 # extract addremove carefully -- this function can be called from a command
2388 2388 # that doesn't support addremove
2389 2389 if opts.get('addremove'):
2390 2390 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2391 2391 with dsguard or util.nullcontextmanager():
2392 2392 if dsguard:
2393 2393 if scmutil.addremove(repo, matcher, "", opts) != 0:
2394 2394 raise error.Abort(
2395 2395 _("failed to mark all new/missing files as added/removed"))
2396 2396
2397 2397 return commitfunc(ui, repo, message, matcher, opts)
2398 2398
2399 2399 def samefile(f, ctx1, ctx2):
2400 2400 if f in ctx1.manifest():
2401 2401 a = ctx1.filectx(f)
2402 2402 if f in ctx2.manifest():
2403 2403 b = ctx2.filectx(f)
2404 2404 return (not a.cmp(b)
2405 2405 and a.flags() == b.flags())
2406 2406 else:
2407 2407 return False
2408 2408 else:
2409 2409 return f not in ctx2.manifest()
2410 2410
2411 2411 def amend(ui, repo, old, extra, pats, opts):
2412 2412 # avoid cycle context -> subrepo -> cmdutil
2413 2413 from . import context
2414 2414
2415 2415 # amend will reuse the existing user if not specified, but the obsolete
2416 2416 # marker creation requires that the current user's name is specified.
2417 2417 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2418 2418 ui.username() # raise exception if username not set
2419 2419
2420 2420 ui.note(_('amending changeset %s\n') % old)
2421 2421 base = old.p1()
2422 2422
2423 2423 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2424 2424 # Participating changesets:
2425 2425 #
2426 2426 # wctx o - workingctx that contains changes from working copy
2427 2427 # | to go into amending commit
2428 2428 # |
2429 2429 # old o - changeset to amend
2430 2430 # |
2431 2431 # base o - first parent of the changeset to amend
2432 2432 wctx = repo[None]
2433 2433
2434 2434 # Copy to avoid mutating input
2435 2435 extra = extra.copy()
2436 2436 # Update extra dict from amended commit (e.g. to preserve graft
2437 2437 # source)
2438 2438 extra.update(old.extra())
2439 2439
2440 2440 # Also update it from the from the wctx
2441 2441 extra.update(wctx.extra())
2442 2442
2443 2443 user = opts.get('user') or old.user()
2444 2444 date = opts.get('date') or old.date()
2445 2445
2446 2446 # Parse the date to allow comparison between date and old.date()
2447 2447 date = dateutil.parsedate(date)
2448 2448
2449 2449 if len(old.parents()) > 1:
2450 2450 # ctx.files() isn't reliable for merges, so fall back to the
2451 2451 # slower repo.status() method
2452 2452 files = set([fn for st in base.status(old)[:3]
2453 2453 for fn in st])
2454 2454 else:
2455 2455 files = set(old.files())
2456 2456
2457 2457 # add/remove the files to the working copy if the "addremove" option
2458 2458 # was specified.
2459 2459 matcher = scmutil.match(wctx, pats, opts)
2460 2460 if (opts.get('addremove')
2461 2461 and scmutil.addremove(repo, matcher, "", opts)):
2462 2462 raise error.Abort(
2463 2463 _("failed to mark all new/missing files as added/removed"))
2464 2464
2465 2465 # Check subrepos. This depends on in-place wctx._status update in
2466 2466 # subrepo.precommit(). To minimize the risk of this hack, we do
2467 2467 # nothing if .hgsub does not exist.
2468 2468 if '.hgsub' in wctx or '.hgsub' in old:
2469 2469 subs, commitsubs, newsubstate = subrepoutil.precommit(
2470 2470 ui, wctx, wctx._status, matcher)
2471 2471 # amend should abort if commitsubrepos is enabled
2472 2472 assert not commitsubs
2473 2473 if subs:
2474 2474 subrepoutil.writestate(repo, newsubstate)
2475 2475
2476 2476 ms = mergemod.mergestate.read(repo)
2477 2477 mergeutil.checkunresolved(ms)
2478 2478
2479 2479 filestoamend = set(f for f in wctx.files() if matcher(f))
2480 2480
2481 2481 changes = (len(filestoamend) > 0)
2482 2482 if changes:
2483 2483 # Recompute copies (avoid recording a -> b -> a)
2484 2484 copied = copies.pathcopies(base, wctx, matcher)
2485 2485 if old.p2:
2486 2486 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2487 2487
2488 2488 # Prune files which were reverted by the updates: if old
2489 2489 # introduced file X and the file was renamed in the working
2490 2490 # copy, then those two files are the same and
2491 2491 # we can discard X from our list of files. Likewise if X
2492 2492 # was removed, it's no longer relevant. If X is missing (aka
2493 2493 # deleted), old X must be preserved.
2494 2494 files.update(filestoamend)
2495 2495 files = [f for f in files if (not samefile(f, wctx, base)
2496 2496 or f in wctx.deleted())]
2497 2497
2498 2498 def filectxfn(repo, ctx_, path):
2499 2499 try:
2500 2500 # If the file being considered is not amongst the files
2501 2501 # to be amended, we should return the file context from the
2502 2502 # old changeset. This avoids issues when only some files in
2503 2503 # the working copy are being amended but there are also
2504 2504 # changes to other files from the old changeset.
2505 2505 if path not in filestoamend:
2506 2506 return old.filectx(path)
2507 2507
2508 2508 # Return None for removed files.
2509 2509 if path in wctx.removed():
2510 2510 return None
2511 2511
2512 2512 fctx = wctx[path]
2513 2513 flags = fctx.flags()
2514 2514 mctx = context.memfilectx(repo, ctx_,
2515 2515 fctx.path(), fctx.data(),
2516 2516 islink='l' in flags,
2517 2517 isexec='x' in flags,
2518 2518 copied=copied.get(path))
2519 2519 return mctx
2520 2520 except KeyError:
2521 2521 return None
2522 2522 else:
2523 2523 ui.note(_('copying changeset %s to %s\n') % (old, base))
2524 2524
2525 2525 # Use version of files as in the old cset
2526 2526 def filectxfn(repo, ctx_, path):
2527 2527 try:
2528 2528 return old.filectx(path)
2529 2529 except KeyError:
2530 2530 return None
2531 2531
2532 2532 # See if we got a message from -m or -l, if not, open the editor with
2533 2533 # the message of the changeset to amend.
2534 2534 message = logmessage(ui, opts)
2535 2535
2536 2536 editform = mergeeditform(old, 'commit.amend')
2537 2537 editor = getcommiteditor(editform=editform,
2538 2538 **pycompat.strkwargs(opts))
2539 2539
2540 2540 if not message:
2541 2541 editor = getcommiteditor(edit=True, editform=editform)
2542 2542 message = old.description()
2543 2543
2544 2544 pureextra = extra.copy()
2545 2545 extra['amend_source'] = old.hex()
2546 2546
2547 2547 new = context.memctx(repo,
2548 2548 parents=[base.node(), old.p2().node()],
2549 2549 text=message,
2550 2550 files=files,
2551 2551 filectxfn=filectxfn,
2552 2552 user=user,
2553 2553 date=date,
2554 2554 extra=extra,
2555 2555 editor=editor)
2556 2556
2557 2557 newdesc = changelog.stripdesc(new.description())
2558 2558 if ((not changes)
2559 2559 and newdesc == old.description()
2560 2560 and user == old.user()
2561 2561 and date == old.date()
2562 2562 and pureextra == old.extra()):
2563 2563 # nothing changed. continuing here would create a new node
2564 2564 # anyway because of the amend_source noise.
2565 2565 #
2566 2566 # This not what we expect from amend.
2567 2567 return old.node()
2568 2568
2569 2569 commitphase = None
2570 2570 if opts.get('secret'):
2571 2571 commitphase = phases.secret
2572 2572 newid = repo.commitctx(new)
2573 2573
2574 2574 # Reroute the working copy parent to the new changeset
2575 2575 repo.setparents(newid, nullid)
2576 2576 mapping = {old.node(): (newid,)}
2577 2577 obsmetadata = None
2578 2578 if opts.get('note'):
2579 2579 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2580 2580 backup = ui.configbool('ui', 'history-editing-backup')
2581 2581 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2582 2582 fixphase=True, targetphase=commitphase,
2583 2583 backup=backup)
2584 2584
2585 2585 # Fixing the dirstate because localrepo.commitctx does not update
2586 2586 # it. This is rather convenient because we did not need to update
2587 2587 # the dirstate for all the files in the new commit which commitctx
2588 2588 # could have done if it updated the dirstate. Now, we can
2589 2589 # selectively update the dirstate only for the amended files.
2590 2590 dirstate = repo.dirstate
2591 2591
2592 2592 # Update the state of the files which were added and
2593 2593 # and modified in the amend to "normal" in the dirstate.
2594 2594 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2595 2595 for f in normalfiles:
2596 2596 dirstate.normal(f)
2597 2597
2598 2598 # Update the state of files which were removed in the amend
2599 2599 # to "removed" in the dirstate.
2600 2600 removedfiles = set(wctx.removed()) & filestoamend
2601 2601 for f in removedfiles:
2602 2602 dirstate.drop(f)
2603 2603
2604 2604 return newid
2605 2605
2606 2606 def commiteditor(repo, ctx, subs, editform=''):
2607 2607 if ctx.description():
2608 2608 return ctx.description()
2609 2609 return commitforceeditor(repo, ctx, subs, editform=editform,
2610 2610 unchangedmessagedetection=True)
2611 2611
2612 2612 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2613 2613 editform='', unchangedmessagedetection=False):
2614 2614 if not extramsg:
2615 2615 extramsg = _("Leave message empty to abort commit.")
2616 2616
2617 2617 forms = [e for e in editform.split('.') if e]
2618 2618 forms.insert(0, 'changeset')
2619 2619 templatetext = None
2620 2620 while forms:
2621 2621 ref = '.'.join(forms)
2622 2622 if repo.ui.config('committemplate', ref):
2623 2623 templatetext = committext = buildcommittemplate(
2624 2624 repo, ctx, subs, extramsg, ref)
2625 2625 break
2626 2626 forms.pop()
2627 2627 else:
2628 2628 committext = buildcommittext(repo, ctx, subs, extramsg)
2629 2629
2630 2630 # run editor in the repository root
2631 2631 olddir = encoding.getcwd()
2632 2632 os.chdir(repo.root)
2633 2633
2634 2634 # make in-memory changes visible to external process
2635 2635 tr = repo.currenttransaction()
2636 2636 repo.dirstate.write(tr)
2637 2637 pending = tr and tr.writepending() and repo.root
2638 2638
2639 2639 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2640 2640 editform=editform, pending=pending,
2641 2641 repopath=repo.path, action='commit')
2642 2642 text = editortext
2643 2643
2644 2644 # strip away anything below this special string (used for editors that want
2645 2645 # to display the diff)
2646 2646 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2647 2647 if stripbelow:
2648 2648 text = text[:stripbelow.start()]
2649 2649
2650 2650 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2651 2651 os.chdir(olddir)
2652 2652
2653 2653 if finishdesc:
2654 2654 text = finishdesc(text)
2655 2655 if not text.strip():
2656 2656 raise error.Abort(_("empty commit message"))
2657 2657 if unchangedmessagedetection and editortext == templatetext:
2658 2658 raise error.Abort(_("commit message unchanged"))
2659 2659
2660 2660 return text
2661 2661
2662 2662 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2663 2663 ui = repo.ui
2664 2664 spec = formatter.templatespec(ref, None, None)
2665 2665 t = logcmdutil.changesettemplater(ui, repo, spec)
2666 2666 t.t.cache.update((k, templater.unquotestring(v))
2667 2667 for k, v in repo.ui.configitems('committemplate'))
2668 2668
2669 2669 if not extramsg:
2670 2670 extramsg = '' # ensure that extramsg is string
2671 2671
2672 2672 ui.pushbuffer()
2673 2673 t.show(ctx, extramsg=extramsg)
2674 2674 return ui.popbuffer()
2675 2675
2676 2676 def hgprefix(msg):
2677 2677 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2678 2678
2679 2679 def buildcommittext(repo, ctx, subs, extramsg):
2680 2680 edittext = []
2681 2681 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2682 2682 if ctx.description():
2683 2683 edittext.append(ctx.description())
2684 2684 edittext.append("")
2685 2685 edittext.append("") # Empty line between message and comments.
2686 2686 edittext.append(hgprefix(_("Enter commit message."
2687 2687 " Lines beginning with 'HG:' are removed.")))
2688 2688 edittext.append(hgprefix(extramsg))
2689 2689 edittext.append("HG: --")
2690 2690 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2691 2691 if ctx.p2():
2692 2692 edittext.append(hgprefix(_("branch merge")))
2693 2693 if ctx.branch():
2694 2694 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2695 2695 if bookmarks.isactivewdirparent(repo):
2696 2696 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2697 2697 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2698 2698 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2699 2699 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2700 2700 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2701 2701 if not added and not modified and not removed:
2702 2702 edittext.append(hgprefix(_("no files changed")))
2703 2703 edittext.append("")
2704 2704
2705 2705 return "\n".join(edittext)
2706 2706
2707 2707 def commitstatus(repo, node, branch, bheads=None, opts=None):
2708 2708 if opts is None:
2709 2709 opts = {}
2710 2710 ctx = repo[node]
2711 2711 parents = ctx.parents()
2712 2712
2713 2713 if (not opts.get('amend') and bheads and node not in bheads and not
2714 2714 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2715 2715 repo.ui.status(_('created new head\n'))
2716 2716 # The message is not printed for initial roots. For the other
2717 2717 # changesets, it is printed in the following situations:
2718 2718 #
2719 2719 # Par column: for the 2 parents with ...
2720 2720 # N: null or no parent
2721 2721 # B: parent is on another named branch
2722 2722 # C: parent is a regular non head changeset
2723 2723 # H: parent was a branch head of the current branch
2724 2724 # Msg column: whether we print "created new head" message
2725 2725 # In the following, it is assumed that there already exists some
2726 2726 # initial branch heads of the current branch, otherwise nothing is
2727 2727 # printed anyway.
2728 2728 #
2729 2729 # Par Msg Comment
2730 2730 # N N y additional topo root
2731 2731 #
2732 2732 # B N y additional branch root
2733 2733 # C N y additional topo head
2734 2734 # H N n usual case
2735 2735 #
2736 2736 # B B y weird additional branch root
2737 2737 # C B y branch merge
2738 2738 # H B n merge with named branch
2739 2739 #
2740 2740 # C C y additional head from merge
2741 2741 # C H n merge with a head
2742 2742 #
2743 2743 # H H n head merge: head count decreases
2744 2744
2745 2745 if not opts.get('close_branch'):
2746 2746 for r in parents:
2747 2747 if r.closesbranch() and r.branch() == branch:
2748 2748 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2749 2749
2750 2750 if repo.ui.debugflag:
2751 2751 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2752 2752 elif repo.ui.verbose:
2753 2753 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2754 2754
2755 2755 def postcommitstatus(repo, pats, opts):
2756 2756 return repo.status(match=scmutil.match(repo[None], pats, opts))
2757 2757
2758 2758 def revert(ui, repo, ctx, parents, *pats, **opts):
2759 2759 opts = pycompat.byteskwargs(opts)
2760 2760 parent, p2 = parents
2761 2761 node = ctx.node()
2762 2762
2763 2763 mf = ctx.manifest()
2764 2764 if node == p2:
2765 2765 parent = p2
2766 2766
2767 2767 # need all matching names in dirstate and manifest of target rev,
2768 2768 # so have to walk both. do not print errors if files exist in one
2769 2769 # but not other. in both cases, filesets should be evaluated against
2770 2770 # workingctx to get consistent result (issue4497). this means 'set:**'
2771 2771 # cannot be used to select missing files from target rev.
2772 2772
2773 2773 # `names` is a mapping for all elements in working copy and target revision
2774 2774 # The mapping is in the form:
2775 2775 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2776 2776 names = {}
2777 2777
2778 2778 with repo.wlock():
2779 2779 ## filling of the `names` mapping
2780 2780 # walk dirstate to fill `names`
2781 2781
2782 2782 interactive = opts.get('interactive', False)
2783 2783 wctx = repo[None]
2784 2784 m = scmutil.match(wctx, pats, opts)
2785 2785
2786 2786 # we'll need this later
2787 2787 targetsubs = sorted(s for s in wctx.substate if m(s))
2788 2788
2789 2789 if not m.always():
2790 2790 matcher = matchmod.badmatch(m, lambda x, y: False)
2791 2791 for abs in wctx.walk(matcher):
2792 2792 names[abs] = m.rel(abs), m.exact(abs)
2793 2793
2794 2794 # walk target manifest to fill `names`
2795 2795
2796 2796 def badfn(path, msg):
2797 2797 if path in names:
2798 2798 return
2799 2799 if path in ctx.substate:
2800 2800 return
2801 2801 path_ = path + '/'
2802 2802 for f in names:
2803 2803 if f.startswith(path_):
2804 2804 return
2805 2805 ui.warn("%s: %s\n" % (m.rel(path), msg))
2806 2806
2807 2807 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2808 2808 if abs not in names:
2809 2809 names[abs] = m.rel(abs), m.exact(abs)
2810 2810
2811 2811 # Find status of all file in `names`.
2812 2812 m = scmutil.matchfiles(repo, names)
2813 2813
2814 2814 changes = repo.status(node1=node, match=m,
2815 2815 unknown=True, ignored=True, clean=True)
2816 2816 else:
2817 2817 changes = repo.status(node1=node, match=m)
2818 2818 for kind in changes:
2819 2819 for abs in kind:
2820 2820 names[abs] = m.rel(abs), m.exact(abs)
2821 2821
2822 2822 m = scmutil.matchfiles(repo, names)
2823 2823
2824 2824 modified = set(changes.modified)
2825 2825 added = set(changes.added)
2826 2826 removed = set(changes.removed)
2827 2827 _deleted = set(changes.deleted)
2828 2828 unknown = set(changes.unknown)
2829 2829 unknown.update(changes.ignored)
2830 2830 clean = set(changes.clean)
2831 2831 modadded = set()
2832 2832
2833 2833 # We need to account for the state of the file in the dirstate,
2834 2834 # even when we revert against something else than parent. This will
2835 2835 # slightly alter the behavior of revert (doing back up or not, delete
2836 2836 # or just forget etc).
2837 2837 if parent == node:
2838 2838 dsmodified = modified
2839 2839 dsadded = added
2840 2840 dsremoved = removed
2841 2841 # store all local modifications, useful later for rename detection
2842 2842 localchanges = dsmodified | dsadded
2843 2843 modified, added, removed = set(), set(), set()
2844 2844 else:
2845 2845 changes = repo.status(node1=parent, match=m)
2846 2846 dsmodified = set(changes.modified)
2847 2847 dsadded = set(changes.added)
2848 2848 dsremoved = set(changes.removed)
2849 2849 # store all local modifications, useful later for rename detection
2850 2850 localchanges = dsmodified | dsadded
2851 2851
2852 2852 # only take into account for removes between wc and target
2853 2853 clean |= dsremoved - removed
2854 2854 dsremoved &= removed
2855 2855 # distinct between dirstate remove and other
2856 2856 removed -= dsremoved
2857 2857
2858 2858 modadded = added & dsmodified
2859 2859 added -= modadded
2860 2860
2861 2861 # tell newly modified apart.
2862 2862 dsmodified &= modified
2863 2863 dsmodified |= modified & dsadded # dirstate added may need backup
2864 2864 modified -= dsmodified
2865 2865
2866 2866 # We need to wait for some post-processing to update this set
2867 2867 # before making the distinction. The dirstate will be used for
2868 2868 # that purpose.
2869 2869 dsadded = added
2870 2870
2871 2871 # in case of merge, files that are actually added can be reported as
2872 2872 # modified, we need to post process the result
2873 2873 if p2 != nullid:
2874 2874 mergeadd = set(dsmodified)
2875 2875 for path in dsmodified:
2876 2876 if path in mf:
2877 2877 mergeadd.remove(path)
2878 2878 dsadded |= mergeadd
2879 2879 dsmodified -= mergeadd
2880 2880
2881 2881 # if f is a rename, update `names` to also revert the source
2882 2882 cwd = repo.getcwd()
2883 2883 for f in localchanges:
2884 2884 src = repo.dirstate.copied(f)
2885 2885 # XXX should we check for rename down to target node?
2886 2886 if src and src not in names and repo.dirstate[src] == 'r':
2887 2887 dsremoved.add(src)
2888 2888 names[src] = (repo.pathto(src, cwd), True)
2889 2889
2890 2890 # determine the exact nature of the deleted changesets
2891 2891 deladded = set(_deleted)
2892 2892 for path in _deleted:
2893 2893 if path in mf:
2894 2894 deladded.remove(path)
2895 2895 deleted = _deleted - deladded
2896 2896
2897 2897 # distinguish between file to forget and the other
2898 2898 added = set()
2899 2899 for abs in dsadded:
2900 2900 if repo.dirstate[abs] != 'a':
2901 2901 added.add(abs)
2902 2902 dsadded -= added
2903 2903
2904 2904 for abs in deladded:
2905 2905 if repo.dirstate[abs] == 'a':
2906 2906 dsadded.add(abs)
2907 2907 deladded -= dsadded
2908 2908
2909 2909 # For files marked as removed, we check if an unknown file is present at
2910 2910 # the same path. If a such file exists it may need to be backed up.
2911 2911 # Making the distinction at this stage helps have simpler backup
2912 2912 # logic.
2913 2913 removunk = set()
2914 2914 for abs in removed:
2915 2915 target = repo.wjoin(abs)
2916 2916 if os.path.lexists(target):
2917 2917 removunk.add(abs)
2918 2918 removed -= removunk
2919 2919
2920 2920 dsremovunk = set()
2921 2921 for abs in dsremoved:
2922 2922 target = repo.wjoin(abs)
2923 2923 if os.path.lexists(target):
2924 2924 dsremovunk.add(abs)
2925 2925 dsremoved -= dsremovunk
2926 2926
2927 2927 # action to be actually performed by revert
2928 2928 # (<list of file>, message>) tuple
2929 2929 actions = {'revert': ([], _('reverting %s\n')),
2930 2930 'add': ([], _('adding %s\n')),
2931 2931 'remove': ([], _('removing %s\n')),
2932 2932 'drop': ([], _('removing %s\n')),
2933 2933 'forget': ([], _('forgetting %s\n')),
2934 2934 'undelete': ([], _('undeleting %s\n')),
2935 2935 'noop': (None, _('no changes needed to %s\n')),
2936 2936 'unknown': (None, _('file not managed: %s\n')),
2937 2937 }
2938 2938
2939 2939 # "constant" that convey the backup strategy.
2940 2940 # All set to `discard` if `no-backup` is set do avoid checking
2941 2941 # no_backup lower in the code.
2942 2942 # These values are ordered for comparison purposes
2943 2943 backupinteractive = 3 # do backup if interactively modified
2944 2944 backup = 2 # unconditionally do backup
2945 2945 check = 1 # check if the existing file differs from target
2946 2946 discard = 0 # never do backup
2947 2947 if opts.get('no_backup'):
2948 2948 backupinteractive = backup = check = discard
2949 2949 if interactive:
2950 2950 dsmodifiedbackup = backupinteractive
2951 2951 else:
2952 2952 dsmodifiedbackup = backup
2953 2953 tobackup = set()
2954 2954
2955 2955 backupanddel = actions['remove']
2956 2956 if not opts.get('no_backup'):
2957 2957 backupanddel = actions['drop']
2958 2958
2959 2959 disptable = (
2960 2960 # dispatch table:
2961 2961 # file state
2962 2962 # action
2963 2963 # make backup
2964 2964
2965 2965 ## Sets that results that will change file on disk
2966 2966 # Modified compared to target, no local change
2967 2967 (modified, actions['revert'], discard),
2968 2968 # Modified compared to target, but local file is deleted
2969 2969 (deleted, actions['revert'], discard),
2970 2970 # Modified compared to target, local change
2971 2971 (dsmodified, actions['revert'], dsmodifiedbackup),
2972 2972 # Added since target
2973 2973 (added, actions['remove'], discard),
2974 2974 # Added in working directory
2975 2975 (dsadded, actions['forget'], discard),
2976 2976 # Added since target, have local modification
2977 2977 (modadded, backupanddel, backup),
2978 2978 # Added since target but file is missing in working directory
2979 2979 (deladded, actions['drop'], discard),
2980 2980 # Removed since target, before working copy parent
2981 2981 (removed, actions['add'], discard),
2982 2982 # Same as `removed` but an unknown file exists at the same path
2983 2983 (removunk, actions['add'], check),
2984 2984 # Removed since targe, marked as such in working copy parent
2985 2985 (dsremoved, actions['undelete'], discard),
2986 2986 # Same as `dsremoved` but an unknown file exists at the same path
2987 2987 (dsremovunk, actions['undelete'], check),
2988 2988 ## the following sets does not result in any file changes
2989 2989 # File with no modification
2990 2990 (clean, actions['noop'], discard),
2991 2991 # Existing file, not tracked anywhere
2992 2992 (unknown, actions['unknown'], discard),
2993 2993 )
2994 2994
2995 2995 for abs, (rel, exact) in sorted(names.items()):
2996 2996 # target file to be touch on disk (relative to cwd)
2997 2997 target = repo.wjoin(abs)
2998 2998 # search the entry in the dispatch table.
2999 2999 # if the file is in any of these sets, it was touched in the working
3000 3000 # directory parent and we are sure it needs to be reverted.
3001 3001 for table, (xlist, msg), dobackup in disptable:
3002 3002 if abs not in table:
3003 3003 continue
3004 3004 if xlist is not None:
3005 3005 xlist.append(abs)
3006 3006 if dobackup:
3007 3007 # If in interactive mode, don't automatically create
3008 3008 # .orig files (issue4793)
3009 3009 if dobackup == backupinteractive:
3010 3010 tobackup.add(abs)
3011 3011 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3012 3012 bakname = scmutil.origpath(ui, repo, rel)
3013 3013 ui.note(_('saving current version of %s as %s\n') %
3014 3014 (rel, bakname))
3015 3015 if not opts.get('dry_run'):
3016 3016 if interactive:
3017 3017 util.copyfile(target, bakname)
3018 3018 else:
3019 3019 util.rename(target, bakname)
3020 3020 if opts.get('dry_run'):
3021 3021 if ui.verbose or not exact:
3022 3022 ui.status(msg % rel)
3023 3023 elif exact:
3024 3024 ui.warn(msg % rel)
3025 3025 break
3026 3026
3027 3027 if not opts.get('dry_run'):
3028 3028 needdata = ('revert', 'add', 'undelete')
3029 3029 oplist = [actions[name][0] for name in needdata]
3030 3030 prefetch = scmutil.prefetchfiles
3031 3031 matchfiles = scmutil.matchfiles
3032 3032 prefetch(repo, [ctx.rev()],
3033 3033 matchfiles(repo,
3034 3034 [f for sublist in oplist for f in sublist]))
3035 3035 _performrevert(repo, parents, ctx, names, actions, interactive,
3036 3036 tobackup)
3037 3037
3038 3038 if targetsubs:
3039 3039 # Revert the subrepos on the revert list
3040 3040 for sub in targetsubs:
3041 3041 try:
3042 3042 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3043 3043 **pycompat.strkwargs(opts))
3044 3044 except KeyError:
3045 3045 raise error.Abort("subrepository '%s' does not exist in %s!"
3046 3046 % (sub, short(ctx.node())))
3047 3047
3048 3048 def _performrevert(repo, parents, ctx, names, actions, interactive=False,
3049 3049 tobackup=None):
3050 3050 """function that actually perform all the actions computed for revert
3051 3051
3052 3052 This is an independent function to let extension to plug in and react to
3053 3053 the imminent revert.
3054 3054
3055 3055 Make sure you have the working directory locked when calling this function.
3056 3056 """
3057 3057 parent, p2 = parents
3058 3058 node = ctx.node()
3059 3059 excluded_files = []
3060 3060
3061 3061 def checkout(f):
3062 3062 fc = ctx[f]
3063 3063 repo.wwrite(f, fc.data(), fc.flags())
3064 3064
3065 3065 def doremove(f):
3066 3066 try:
3067 3067 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3068 3068 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3069 3069 except OSError:
3070 3070 pass
3071 3071 repo.dirstate.remove(f)
3072 3072
3073 3073 def prntstatusmsg(action, f):
3074 3074 rel, exact = names[f]
3075 3075 if repo.ui.verbose or not exact:
3076 3076 repo.ui.status(actions[action][1] % rel)
3077 3077
3078 3078 audit_path = pathutil.pathauditor(repo.root, cached=True)
3079 3079 for f in actions['forget'][0]:
3080 3080 if interactive:
3081 3081 choice = repo.ui.promptchoice(
3082 3082 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3083 3083 if choice == 0:
3084 3084 prntstatusmsg('forget', f)
3085 3085 repo.dirstate.drop(f)
3086 3086 else:
3087 3087 excluded_files.append(f)
3088 3088 else:
3089 3089 prntstatusmsg('forget', f)
3090 3090 repo.dirstate.drop(f)
3091 3091 for f in actions['remove'][0]:
3092 3092 audit_path(f)
3093 3093 if interactive:
3094 3094 choice = repo.ui.promptchoice(
3095 3095 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3096 3096 if choice == 0:
3097 3097 prntstatusmsg('remove', f)
3098 3098 doremove(f)
3099 3099 else:
3100 3100 excluded_files.append(f)
3101 3101 else:
3102 3102 prntstatusmsg('remove', f)
3103 3103 doremove(f)
3104 3104 for f in actions['drop'][0]:
3105 3105 audit_path(f)
3106 3106 prntstatusmsg('drop', f)
3107 3107 repo.dirstate.remove(f)
3108 3108
3109 3109 normal = None
3110 3110 if node == parent:
3111 3111 # We're reverting to our parent. If possible, we'd like status
3112 3112 # to report the file as clean. We have to use normallookup for
3113 3113 # merges to avoid losing information about merged/dirty files.
3114 3114 if p2 != nullid:
3115 3115 normal = repo.dirstate.normallookup
3116 3116 else:
3117 3117 normal = repo.dirstate.normal
3118 3118
3119 3119 newlyaddedandmodifiedfiles = set()
3120 3120 if interactive:
3121 3121 # Prompt the user for changes to revert
3122 3122 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3123 3123 m = scmutil.matchfiles(repo, torevert)
3124 3124 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3125 3125 diffopts.nodates = True
3126 3126 diffopts.git = True
3127 3127 operation = 'discard'
3128 3128 reversehunks = True
3129 3129 if node != parent:
3130 3130 operation = 'apply'
3131 3131 reversehunks = False
3132 3132 if reversehunks:
3133 3133 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3134 3134 else:
3135 3135 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3136 3136 originalchunks = patch.parsepatch(diff)
3137 3137
3138 3138 try:
3139 3139
3140 3140 chunks, opts = recordfilter(repo.ui, originalchunks,
3141 3141 operation=operation)
3142 3142 if reversehunks:
3143 3143 chunks = patch.reversehunks(chunks)
3144 3144
3145 3145 except error.PatchError as err:
3146 3146 raise error.Abort(_('error parsing patch: %s') % err)
3147 3147
3148 3148 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3149 3149 if tobackup is None:
3150 3150 tobackup = set()
3151 3151 # Apply changes
3152 3152 fp = stringio()
3153 3153 # chunks are serialized per file, but files aren't sorted
3154 3154 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3155 3155 prntstatusmsg('revert', f)
3156 3156 for c in chunks:
3157 3157 if ishunk(c):
3158 3158 abs = c.header.filename()
3159 3159 # Create a backup file only if this hunk should be backed up
3160 3160 if c.header.filename() in tobackup:
3161 3161 target = repo.wjoin(abs)
3162 3162 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3163 3163 util.copyfile(target, bakname)
3164 3164 tobackup.remove(abs)
3165 3165 c.write(fp)
3166 3166 dopatch = fp.tell()
3167 3167 fp.seek(0)
3168 3168 if dopatch:
3169 3169 try:
3170 3170 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3171 3171 except error.PatchError as err:
3172 3172 raise error.Abort(pycompat.bytestr(err))
3173 3173 del fp
3174 3174 else:
3175 3175 for f in actions['revert'][0]:
3176 3176 prntstatusmsg('revert', f)
3177 3177 checkout(f)
3178 3178 if normal:
3179 3179 normal(f)
3180 3180
3181 3181 for f in actions['add'][0]:
3182 3182 # Don't checkout modified files, they are already created by the diff
3183 3183 if f not in newlyaddedandmodifiedfiles:
3184 3184 prntstatusmsg('add', f)
3185 3185 checkout(f)
3186 3186 repo.dirstate.add(f)
3187 3187
3188 3188 normal = repo.dirstate.normallookup
3189 3189 if node == parent and p2 == nullid:
3190 3190 normal = repo.dirstate.normal
3191 3191 for f in actions['undelete'][0]:
3192 3192 prntstatusmsg('undelete', f)
3193 3193 checkout(f)
3194 3194 normal(f)
3195 3195
3196 3196 copied = copies.pathcopies(repo[parent], ctx)
3197 3197
3198 3198 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3199 3199 if f in copied:
3200 3200 repo.dirstate.copy(copied[f], f)
3201 3201
3202 3202 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3203 3203 # commands.outgoing. "missing" is "missing" of the result of
3204 3204 # "findcommonoutgoing()"
3205 3205 outgoinghooks = util.hooks()
3206 3206
3207 3207 # a list of (ui, repo) functions called by commands.summary
3208 3208 summaryhooks = util.hooks()
3209 3209
3210 3210 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3211 3211 #
3212 3212 # functions should return tuple of booleans below, if 'changes' is None:
3213 3213 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3214 3214 #
3215 3215 # otherwise, 'changes' is a tuple of tuples below:
3216 3216 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3217 3217 # - (desturl, destbranch, destpeer, outgoing)
3218 3218 summaryremotehooks = util.hooks()
3219 3219
3220 3220 # A list of state files kept by multistep operations like graft.
3221 3221 # Since graft cannot be aborted, it is considered 'clearable' by update.
3222 3222 # note: bisect is intentionally excluded
3223 3223 # (state file, clearable, allowcommit, error, hint)
3224 3224 unfinishedstates = [
3225 3225 ('graftstate', True, False, _('graft in progress'),
3226 3226 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3227 3227 ('updatestate', True, False, _('last update was interrupted'),
3228 3228 _("use 'hg update' to get a consistent checkout"))
3229 3229 ]
3230 3230
3231 3231 def checkunfinished(repo, commit=False):
3232 3232 '''Look for an unfinished multistep operation, like graft, and abort
3233 3233 if found. It's probably good to check this right before
3234 3234 bailifchanged().
3235 3235 '''
3236 3236 # Check for non-clearable states first, so things like rebase will take
3237 3237 # precedence over update.
3238 3238 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3239 3239 if clearable or (commit and allowcommit):
3240 3240 continue
3241 3241 if repo.vfs.exists(f):
3242 3242 raise error.Abort(msg, hint=hint)
3243 3243
3244 3244 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3245 3245 if not clearable or (commit and allowcommit):
3246 3246 continue
3247 3247 if repo.vfs.exists(f):
3248 3248 raise error.Abort(msg, hint=hint)
3249 3249
3250 3250 def clearunfinished(repo):
3251 3251 '''Check for unfinished operations (as above), and clear the ones
3252 3252 that are clearable.
3253 3253 '''
3254 3254 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3255 3255 if not clearable and repo.vfs.exists(f):
3256 3256 raise error.Abort(msg, hint=hint)
3257 3257 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3258 3258 if clearable and repo.vfs.exists(f):
3259 3259 util.unlink(repo.vfs.join(f))
3260 3260
3261 3261 afterresolvedstates = [
3262 3262 ('graftstate',
3263 3263 _('hg graft --continue')),
3264 3264 ]
3265 3265
3266 3266 def howtocontinue(repo):
3267 3267 '''Check for an unfinished operation and return the command to finish
3268 3268 it.
3269 3269
3270 3270 afterresolvedstates tuples define a .hg/{file} and the corresponding
3271 3271 command needed to finish it.
3272 3272
3273 3273 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3274 3274 a boolean.
3275 3275 '''
3276 3276 contmsg = _("continue: %s")
3277 3277 for f, msg in afterresolvedstates:
3278 3278 if repo.vfs.exists(f):
3279 3279 return contmsg % msg, True
3280 3280 if repo[None].dirty(missing=True, merge=False, branch=False):
3281 3281 return contmsg % _("hg commit"), False
3282 3282 return None, None
3283 3283
3284 3284 def checkafterresolved(repo):
3285 3285 '''Inform the user about the next action after completing hg resolve
3286 3286
3287 3287 If there's a matching afterresolvedstates, howtocontinue will yield
3288 3288 repo.ui.warn as the reporter.
3289 3289
3290 3290 Otherwise, it will yield repo.ui.note.
3291 3291 '''
3292 3292 msg, warning = howtocontinue(repo)
3293 3293 if msg is not None:
3294 3294 if warning:
3295 3295 repo.ui.warn("%s\n" % msg)
3296 3296 else:
3297 3297 repo.ui.note("%s\n" % msg)
3298 3298
3299 3299 def wrongtooltocontinue(repo, task):
3300 3300 '''Raise an abort suggesting how to properly continue if there is an
3301 3301 active task.
3302 3302
3303 3303 Uses howtocontinue() to find the active task.
3304 3304
3305 3305 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3306 3306 a hint.
3307 3307 '''
3308 3308 after = howtocontinue(repo)
3309 3309 hint = None
3310 3310 if after[1]:
3311 3311 hint = after[0]
3312 3312 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,3379 +1,3379
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from . import (
36 36 bundle2,
37 37 changegroup,
38 38 cmdutil,
39 39 color,
40 40 context,
41 41 dagparser,
42 42 encoding,
43 43 error,
44 44 exchange,
45 45 extensions,
46 46 filemerge,
47 47 filesetlang,
48 48 formatter,
49 49 hg,
50 50 httppeer,
51 51 localrepo,
52 52 lock as lockmod,
53 53 logcmdutil,
54 54 merge as mergemod,
55 55 obsolete,
56 56 obsutil,
57 57 phases,
58 58 policy,
59 59 pvec,
60 60 pycompat,
61 61 registrar,
62 62 repair,
63 63 revlog,
64 64 revset,
65 65 revsetlang,
66 66 scmutil,
67 67 setdiscovery,
68 68 simplemerge,
69 69 sshpeer,
70 70 sslutil,
71 71 streamclone,
72 72 templater,
73 73 treediscovery,
74 74 upgrade,
75 75 url as urlmod,
76 76 util,
77 77 vfs as vfsmod,
78 78 wireprotoframing,
79 79 wireprotoserver,
80 80 wireprotov2peer,
81 81 )
82 82 from .utils import (
83 83 cborutil,
84 84 dateutil,
85 85 procutil,
86 86 stringutil,
87 87 )
88 88
89 89 from .revlogutils import (
90 90 deltas as deltautil
91 91 )
92 92
93 93 release = lockmod.release
94 94
95 95 command = registrar.command()
96 96
97 97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 98 def debugancestor(ui, repo, *args):
99 99 """find the ancestor revision of two revisions in a given index"""
100 100 if len(args) == 3:
101 101 index, rev1, rev2 = args
102 102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 103 lookup = r.lookup
104 104 elif len(args) == 2:
105 105 if not repo:
106 106 raise error.Abort(_('there is no Mercurial repository here '
107 107 '(.hg not found)'))
108 108 rev1, rev2 = args
109 109 r = repo.changelog
110 110 lookup = repo.lookup
111 111 else:
112 112 raise error.Abort(_('either two or three arguments required'))
113 113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 115
116 116 @command('debugapplystreamclonebundle', [], 'FILE')
117 117 def debugapplystreamclonebundle(ui, repo, fname):
118 118 """apply a stream clone bundle file"""
119 119 f = hg.openpath(ui, fname)
120 120 gen = exchange.readbundle(ui, f, fname)
121 121 gen.apply(repo)
122 122
123 123 @command('debugbuilddag',
124 124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 126 ('n', 'new-file', None, _('add new file at each rev'))],
127 127 _('[OPTION]... [TEXT]'))
128 128 def debugbuilddag(ui, repo, text=None,
129 129 mergeable_file=False,
130 130 overwritten_file=False,
131 131 new_file=False):
132 132 """builds a repo with a given DAG from scratch in the current empty repo
133 133
134 134 The description of the DAG is read from stdin if not given on the
135 135 command line.
136 136
137 137 Elements:
138 138
139 139 - "+n" is a linear run of n nodes based on the current default parent
140 140 - "." is a single node based on the current default parent
141 141 - "$" resets the default parent to null (implied at the start);
142 142 otherwise the default parent is always the last node created
143 143 - "<p" sets the default parent to the backref p
144 144 - "*p" is a fork at parent p, which is a backref
145 145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 146 - "/p2" is a merge of the preceding node and p2
147 147 - ":tag" defines a local tag for the preceding node
148 148 - "@branch" sets the named branch for subsequent nodes
149 149 - "#...\\n" is a comment up to the end of the line
150 150
151 151 Whitespace between the above elements is ignored.
152 152
153 153 A backref is either
154 154
155 155 - a number n, which references the node curr-n, where curr is the current
156 156 node, or
157 157 - the name of a local tag you placed earlier using ":tag", or
158 158 - empty to denote the default parent.
159 159
160 160 All string valued-elements are either strictly alphanumeric, or must
161 161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 162 """
163 163
164 164 if text is None:
165 165 ui.status(_("reading DAG from stdin\n"))
166 166 text = ui.fin.read()
167 167
168 168 cl = repo.changelog
169 169 if len(cl) > 0:
170 170 raise error.Abort(_('repository is not empty'))
171 171
172 172 # determine number of revs in DAG
173 173 total = 0
174 174 for type, data in dagparser.parsedag(text):
175 175 if type == 'n':
176 176 total += 1
177 177
178 178 if mergeable_file:
179 179 linesperrev = 2
180 180 # make a file with k lines per rev
181 181 initialmergedlines = ['%d' % i
182 182 for i in pycompat.xrange(0, total * linesperrev)]
183 183 initialmergedlines.append("")
184 184
185 185 tags = []
186 186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 187 total=total)
188 188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 189 at = -1
190 190 atbranch = 'default'
191 191 nodeids = []
192 192 id = 0
193 193 progress.update(id)
194 194 for type, data in dagparser.parsedag(text):
195 195 if type == 'n':
196 196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 197 id, ps = data
198 198
199 199 files = []
200 200 filecontent = {}
201 201
202 202 p2 = None
203 203 if mergeable_file:
204 204 fn = "mf"
205 205 p1 = repo[ps[0]]
206 206 if len(ps) > 1:
207 207 p2 = repo[ps[1]]
208 208 pa = p1.ancestor(p2)
209 209 base, local, other = [x[fn].data() for x in (pa, p1,
210 210 p2)]
211 211 m3 = simplemerge.Merge3Text(base, local, other)
212 212 ml = [l.strip() for l in m3.merge_lines()]
213 213 ml.append("")
214 214 elif at > 0:
215 215 ml = p1[fn].data().split("\n")
216 216 else:
217 217 ml = initialmergedlines
218 218 ml[id * linesperrev] += " r%i" % id
219 219 mergedtext = "\n".join(ml)
220 220 files.append(fn)
221 221 filecontent[fn] = mergedtext
222 222
223 223 if overwritten_file:
224 224 fn = "of"
225 225 files.append(fn)
226 226 filecontent[fn] = "r%i\n" % id
227 227
228 228 if new_file:
229 229 fn = "nf%i" % id
230 230 files.append(fn)
231 231 filecontent[fn] = "r%i\n" % id
232 232 if len(ps) > 1:
233 233 if not p2:
234 234 p2 = repo[ps[1]]
235 235 for fn in p2:
236 236 if fn.startswith("nf"):
237 237 files.append(fn)
238 238 filecontent[fn] = p2[fn].data()
239 239
240 240 def fctxfn(repo, cx, path):
241 241 if path in filecontent:
242 242 return context.memfilectx(repo, cx, path,
243 243 filecontent[path])
244 244 return None
245 245
246 246 if len(ps) == 0 or ps[0] < 0:
247 247 pars = [None, None]
248 248 elif len(ps) == 1:
249 249 pars = [nodeids[ps[0]], None]
250 250 else:
251 251 pars = [nodeids[p] for p in ps]
252 252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 253 date=(id, 0),
254 254 user="debugbuilddag",
255 255 extra={'branch': atbranch})
256 256 nodeid = repo.commitctx(cx)
257 257 nodeids.append(nodeid)
258 258 at = id
259 259 elif type == 'l':
260 260 id, name = data
261 261 ui.note(('tag %s\n' % name))
262 262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 263 elif type == 'a':
264 264 ui.note(('branch %s\n' % data))
265 265 atbranch = data
266 266 progress.update(id)
267 267
268 268 if tags:
269 269 repo.vfs.write("localtags", "".join(tags))
270 270
271 271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 272 indent_string = ' ' * indent
273 273 if all:
274 274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 275 % indent_string)
276 276
277 277 def showchunks(named):
278 278 ui.write("\n%s%s\n" % (indent_string, named))
279 279 for deltadata in gen.deltaiter():
280 280 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 281 ui.write("%s%s %s %s %s %s %d\n" %
282 282 (indent_string, hex(node), hex(p1), hex(p2),
283 283 hex(cs), hex(deltabase), len(delta)))
284 284
285 285 chunkdata = gen.changelogheader()
286 286 showchunks("changelog")
287 287 chunkdata = gen.manifestheader()
288 288 showchunks("manifest")
289 289 for chunkdata in iter(gen.filelogheader, {}):
290 290 fname = chunkdata['filename']
291 291 showchunks(fname)
292 292 else:
293 293 if isinstance(gen, bundle2.unbundle20):
294 294 raise error.Abort(_('use debugbundle2 for this file'))
295 295 chunkdata = gen.changelogheader()
296 296 for deltadata in gen.deltaiter():
297 297 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 298 ui.write("%s%s\n" % (indent_string, hex(node)))
299 299
300 300 def _debugobsmarkers(ui, part, indent=0, **opts):
301 301 """display version and markers contained in 'data'"""
302 302 opts = pycompat.byteskwargs(opts)
303 303 data = part.read()
304 304 indent_string = ' ' * indent
305 305 try:
306 306 version, markers = obsolete._readmarkers(data)
307 307 except error.UnknownVersion as exc:
308 308 msg = "%sunsupported version: %s (%d bytes)\n"
309 309 msg %= indent_string, exc.version, len(data)
310 310 ui.write(msg)
311 311 else:
312 312 msg = "%sversion: %d (%d bytes)\n"
313 313 msg %= indent_string, version, len(data)
314 314 ui.write(msg)
315 315 fm = ui.formatter('debugobsolete', opts)
316 316 for rawmarker in sorted(markers):
317 317 m = obsutil.marker(None, rawmarker)
318 318 fm.startitem()
319 319 fm.plain(indent_string)
320 320 cmdutil.showmarker(fm, m)
321 321 fm.end()
322 322
323 323 def _debugphaseheads(ui, data, indent=0):
324 324 """display version and markers contained in 'data'"""
325 325 indent_string = ' ' * indent
326 326 headsbyphase = phases.binarydecode(data)
327 327 for phase in phases.allphases:
328 328 for head in headsbyphase[phase]:
329 329 ui.write(indent_string)
330 330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 331
332 332 def _quasirepr(thing):
333 333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 334 return '{%s}' % (
335 335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 336 return pycompat.bytestr(repr(thing))
337 337
338 338 def _debugbundle2(ui, gen, all=None, **opts):
339 339 """lists the contents of a bundle2"""
340 340 if not isinstance(gen, bundle2.unbundle20):
341 341 raise error.Abort(_('not a bundle2 file'))
342 342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 343 parttypes = opts.get(r'part_type', [])
344 344 for part in gen.iterparts():
345 345 if parttypes and part.type not in parttypes:
346 346 continue
347 347 msg = '%s -- %s (mandatory: %r)\n'
348 348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 349 if part.type == 'changegroup':
350 350 version = part.params.get('version', '01')
351 351 cg = changegroup.getunbundler(version, part, 'UN')
352 352 if not ui.quiet:
353 353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 354 if part.type == 'obsmarkers':
355 355 if not ui.quiet:
356 356 _debugobsmarkers(ui, part, indent=4, **opts)
357 357 if part.type == 'phase-heads':
358 358 if not ui.quiet:
359 359 _debugphaseheads(ui, part, indent=4)
360 360
361 361 @command('debugbundle',
362 362 [('a', 'all', None, _('show all details')),
363 363 ('', 'part-type', [], _('show only the named part type')),
364 364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 365 _('FILE'),
366 366 norepo=True)
367 367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 368 """lists the contents of a bundle"""
369 369 with hg.openpath(ui, bundlepath) as f:
370 370 if spec:
371 371 spec = exchange.getbundlespec(ui, f)
372 372 ui.write('%s\n' % spec)
373 373 return
374 374
375 375 gen = exchange.readbundle(ui, f, bundlepath)
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 return _debugbundle2(ui, gen, all=all, **opts)
378 378 _debugchangegroup(ui, gen, all=all, **opts)
379 379
380 380 @command('debugcapabilities',
381 381 [], _('PATH'),
382 382 norepo=True)
383 383 def debugcapabilities(ui, path, **opts):
384 384 """lists the capabilities of a remote peer"""
385 385 opts = pycompat.byteskwargs(opts)
386 386 peer = hg.peer(ui, opts, path)
387 387 caps = peer.capabilities()
388 388 ui.write(('Main capabilities:\n'))
389 389 for c in sorted(caps):
390 390 ui.write((' %s\n') % c)
391 391 b2caps = bundle2.bundle2caps(peer)
392 392 if b2caps:
393 393 ui.write(('Bundle2 capabilities:\n'))
394 394 for key, values in sorted(b2caps.iteritems()):
395 395 ui.write((' %s\n') % key)
396 396 for v in values:
397 397 ui.write((' %s\n') % v)
398 398
399 399 @command('debugcheckstate', [], '')
400 400 def debugcheckstate(ui, repo):
401 401 """validate the correctness of the current dirstate"""
402 402 parent1, parent2 = repo.dirstate.parents()
403 403 m1 = repo[parent1].manifest()
404 404 m2 = repo[parent2].manifest()
405 405 errors = 0
406 406 for f in repo.dirstate:
407 407 state = repo.dirstate[f]
408 408 if state in "nr" and f not in m1:
409 409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 410 errors += 1
411 411 if state in "a" and f in m1:
412 412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 413 errors += 1
414 414 if state in "m" and f not in m1 and f not in m2:
415 415 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 416 (f, state))
417 417 errors += 1
418 418 for f in m1:
419 419 state = repo.dirstate[f]
420 420 if state not in "nrm":
421 421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 422 errors += 1
423 423 if errors:
424 424 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 425 raise error.Abort(error)
426 426
427 427 @command('debugcolor',
428 428 [('', 'style', None, _('show all configured styles'))],
429 429 'hg debugcolor')
430 430 def debugcolor(ui, repo, **opts):
431 431 """show available color, effects or style"""
432 432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 433 if opts.get(r'style'):
434 434 return _debugdisplaystyle(ui)
435 435 else:
436 436 return _debugdisplaycolor(ui)
437 437
438 438 def _debugdisplaycolor(ui):
439 439 ui = ui.copy()
440 440 ui._styles.clear()
441 441 for effect in color._activeeffects(ui).keys():
442 442 ui._styles[effect] = effect
443 443 if ui._terminfoparams:
444 444 for k, v in ui.configitems('color'):
445 445 if k.startswith('color.'):
446 446 ui._styles[k] = k[6:]
447 447 elif k.startswith('terminfo.'):
448 448 ui._styles[k] = k[9:]
449 449 ui.write(_('available colors:\n'))
450 450 # sort label with a '_' after the other to group '_background' entry.
451 451 items = sorted(ui._styles.items(),
452 452 key=lambda i: ('_' in i[0], i[0], i[1]))
453 453 for colorname, label in items:
454 454 ui.write(('%s\n') % colorname, label=label)
455 455
456 456 def _debugdisplaystyle(ui):
457 457 ui.write(_('available style:\n'))
458 458 if not ui._styles:
459 459 return
460 460 width = max(len(s) for s in ui._styles)
461 461 for label, effects in sorted(ui._styles.items()):
462 462 ui.write('%s' % label, label=label)
463 463 if effects:
464 464 # 50
465 465 ui.write(': ')
466 466 ui.write(' ' * (max(0, width - len(label))))
467 467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 468 ui.write('\n')
469 469
470 470 @command('debugcreatestreamclonebundle', [], 'FILE')
471 471 def debugcreatestreamclonebundle(ui, repo, fname):
472 472 """create a stream clone bundle file
473 473
474 474 Stream bundles are special bundles that are essentially archives of
475 475 revlog files. They are commonly used for cloning very quickly.
476 476 """
477 477 # TODO we may want to turn this into an abort when this functionality
478 478 # is moved into `hg bundle`.
479 479 if phases.hassecret(repo):
480 480 ui.warn(_('(warning: stream clone bundle will contain secret '
481 481 'revisions)\n'))
482 482
483 483 requirements, gen = streamclone.generatebundlev1(repo)
484 484 changegroup.writechunks(ui, gen, fname)
485 485
486 486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 487
488 488 @command('debugdag',
489 489 [('t', 'tags', None, _('use tags as labels')),
490 490 ('b', 'branches', None, _('annotate with branch names')),
491 491 ('', 'dots', None, _('use dots for runs')),
492 492 ('s', 'spaces', None, _('separate elements by spaces'))],
493 493 _('[OPTION]... [FILE [REV]...]'),
494 494 optionalrepo=True)
495 495 def debugdag(ui, repo, file_=None, *revs, **opts):
496 496 """format the changelog or an index DAG as a concise textual description
497 497
498 498 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 499 revision numbers, they get labeled in the output as rN.
500 500
501 501 Otherwise, the changelog DAG of the current repo is emitted.
502 502 """
503 503 spaces = opts.get(r'spaces')
504 504 dots = opts.get(r'dots')
505 505 if file_:
506 506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 507 file_)
508 508 revs = set((int(r) for r in revs))
509 509 def events():
510 510 for r in rlog:
511 511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 512 if p != -1))
513 513 if r in revs:
514 514 yield 'l', (r, "r%i" % r)
515 515 elif repo:
516 516 cl = repo.changelog
517 517 tags = opts.get(r'tags')
518 518 branches = opts.get(r'branches')
519 519 if tags:
520 520 labels = {}
521 521 for l, n in repo.tags().items():
522 522 labels.setdefault(cl.rev(n), []).append(l)
523 523 def events():
524 524 b = "default"
525 525 for r in cl:
526 526 if branches:
527 527 newb = cl.read(cl.node(r))[5]['branch']
528 528 if newb != b:
529 529 yield 'a', newb
530 530 b = newb
531 531 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 532 if p != -1))
533 533 if tags:
534 534 ls = labels.get(r)
535 535 if ls:
536 536 for l in ls:
537 537 yield 'l', (r, l)
538 538 else:
539 539 raise error.Abort(_('need repo for changelog dag'))
540 540
541 541 for line in dagparser.dagtextlines(events(),
542 542 addspaces=spaces,
543 543 wraplabels=True,
544 544 wrapannotations=True,
545 545 wrapnonlinear=dots,
546 546 usedots=dots,
547 547 maxlinewidth=70):
548 548 ui.write(line)
549 549 ui.write("\n")
550 550
551 551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 552 def debugdata(ui, repo, file_, rev=None, **opts):
553 553 """dump the contents of a data file revision"""
554 554 opts = pycompat.byteskwargs(opts)
555 555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 556 if rev is not None:
557 557 raise error.CommandError('debugdata', _('invalid arguments'))
558 558 file_, rev = None, file_
559 559 elif rev is None:
560 560 raise error.CommandError('debugdata', _('invalid arguments'))
561 561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 562 try:
563 563 ui.write(r.revision(r.lookup(rev), raw=True))
564 564 except KeyError:
565 565 raise error.Abort(_('invalid revision identifier %s') % rev)
566 566
567 567 @command('debugdate',
568 568 [('e', 'extended', None, _('try extended date formats'))],
569 569 _('[-e] DATE [RANGE]'),
570 570 norepo=True, optionalrepo=True)
571 571 def debugdate(ui, date, range=None, **opts):
572 572 """parse and display a date"""
573 573 if opts[r"extended"]:
574 574 d = dateutil.parsedate(date, util.extendeddateformats)
575 575 else:
576 576 d = dateutil.parsedate(date)
577 577 ui.write(("internal: %d %d\n") % d)
578 578 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 579 if range:
580 580 m = dateutil.matchdate(range)
581 581 ui.write(("match: %s\n") % m(d[0]))
582 582
583 583 @command('debugdeltachain',
584 584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 585 _('-c|-m|FILE'),
586 586 optionalrepo=True)
587 587 def debugdeltachain(ui, repo, file_=None, **opts):
588 588 """dump information about delta chains in a revlog
589 589
590 590 Output can be templatized. Available template keywords are:
591 591
592 592 :``rev``: revision number
593 593 :``chainid``: delta chain identifier (numbered by unique base)
594 594 :``chainlen``: delta chain length to this revision
595 595 :``prevrev``: previous revision in delta chain
596 596 :``deltatype``: role of delta / how it was computed
597 597 :``compsize``: compressed size of revision
598 598 :``uncompsize``: uncompressed size of revision
599 599 :``chainsize``: total size of compressed revisions in chain
600 600 :``chainratio``: total chain size divided by uncompressed revision size
601 601 (new delta chains typically start at ratio 2.00)
602 602 :``lindist``: linear distance from base revision in delta chain to end
603 603 of this revision
604 604 :``extradist``: total size of revisions not part of this delta chain from
605 605 base of delta chain to end of this revision; a measurement
606 606 of how much extra data we need to read/seek across to read
607 607 the delta chain for this revision
608 608 :``extraratio``: extradist divided by chainsize; another representation of
609 609 how much unrelated data is needed to load this delta chain
610 610
611 611 If the repository is configured to use the sparse read, additional keywords
612 612 are available:
613 613
614 614 :``readsize``: total size of data read from the disk for a revision
615 615 (sum of the sizes of all the blocks)
616 616 :``largestblock``: size of the largest block of data read from the disk
617 617 :``readdensity``: density of useful bytes in the data read from the disk
618 618 :``srchunks``: in how many data hunks the whole revision would be read
619 619
620 620 The sparse read can be enabled with experimental.sparse-read = True
621 621 """
622 622 opts = pycompat.byteskwargs(opts)
623 623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 624 index = r.index
625 625 start = r.start
626 626 length = r.length
627 627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 628 withsparseread = getattr(r, '_withsparseread', False)
629 629
630 630 def revinfo(rev):
631 631 e = index[rev]
632 632 compsize = e[1]
633 633 uncompsize = e[2]
634 634 chainsize = 0
635 635
636 636 if generaldelta:
637 637 if e[3] == e[5]:
638 638 deltatype = 'p1'
639 639 elif e[3] == e[6]:
640 640 deltatype = 'p2'
641 641 elif e[3] == rev - 1:
642 642 deltatype = 'prev'
643 643 elif e[3] == rev:
644 644 deltatype = 'base'
645 645 else:
646 646 deltatype = 'other'
647 647 else:
648 648 if e[3] == rev:
649 649 deltatype = 'base'
650 650 else:
651 651 deltatype = 'prev'
652 652
653 653 chain = r._deltachain(rev)[0]
654 654 for iterrev in chain:
655 655 e = index[iterrev]
656 656 chainsize += e[1]
657 657
658 658 return compsize, uncompsize, deltatype, chain, chainsize
659 659
660 660 fm = ui.formatter('debugdeltachain', opts)
661 661
662 662 fm.plain(' rev chain# chainlen prev delta '
663 663 'size rawsize chainsize ratio lindist extradist '
664 664 'extraratio')
665 665 if withsparseread:
666 666 fm.plain(' readsize largestblk rddensity srchunks')
667 667 fm.plain('\n')
668 668
669 669 chainbases = {}
670 670 for rev in r:
671 671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 672 chainbase = chain[0]
673 673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 674 basestart = start(chainbase)
675 675 revstart = start(rev)
676 676 lineardist = revstart + comp - basestart
677 677 extradist = lineardist - chainsize
678 678 try:
679 679 prevrev = chain[-2]
680 680 except IndexError:
681 681 prevrev = -1
682 682
683 683 if uncomp != 0:
684 684 chainratio = float(chainsize) / float(uncomp)
685 685 else:
686 686 chainratio = chainsize
687 687
688 688 if chainsize != 0:
689 689 extraratio = float(extradist) / float(chainsize)
690 690 else:
691 691 extraratio = extradist
692 692
693 693 fm.startitem()
694 694 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 695 'uncompsize chainsize chainratio lindist extradist '
696 696 'extraratio',
697 697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 698 rev, chainid, len(chain), prevrev, deltatype, comp,
699 699 uncomp, chainsize, chainratio, lineardist, extradist,
700 700 extraratio,
701 701 rev=rev, chainid=chainid, chainlen=len(chain),
702 702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 703 uncompsize=uncomp, chainsize=chainsize,
704 704 chainratio=chainratio, lindist=lineardist,
705 705 extradist=extradist, extraratio=extraratio)
706 706 if withsparseread:
707 707 readsize = 0
708 708 largestblock = 0
709 709 srchunks = 0
710 710
711 711 for revschunk in deltautil.slicechunk(r, chain):
712 712 srchunks += 1
713 713 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 714 blksize = blkend - start(revschunk[0])
715 715
716 716 readsize += blksize
717 717 if largestblock < blksize:
718 718 largestblock = blksize
719 719
720 720 if readsize:
721 721 readdensity = float(chainsize) / float(readsize)
722 722 else:
723 723 readdensity = 1
724 724
725 725 fm.write('readsize largestblock readdensity srchunks',
726 726 ' %10d %10d %9.5f %8d',
727 727 readsize, largestblock, readdensity, srchunks,
728 728 readsize=readsize, largestblock=largestblock,
729 729 readdensity=readdensity, srchunks=srchunks)
730 730
731 731 fm.plain('\n')
732 732
733 733 fm.end()
734 734
735 735 @command('debugdirstate|debugstate',
736 736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 737 ('', 'dates', True, _('display the saved mtime')),
738 738 ('', 'datesort', None, _('sort by saved mtime'))],
739 739 _('[OPTION]...'))
740 740 def debugstate(ui, repo, **opts):
741 741 """show the contents of the current dirstate"""
742 742
743 743 nodates = not opts[r'dates']
744 744 if opts.get(r'nodates') is not None:
745 745 nodates = True
746 746 datesort = opts.get(r'datesort')
747 747
748 748 timestr = ""
749 749 if datesort:
750 750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 751 else:
752 752 keyfunc = None # sort by filename
753 753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 754 if ent[3] == -1:
755 755 timestr = 'unset '
756 756 elif nodates:
757 757 timestr = 'set '
758 758 else:
759 759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 760 time.localtime(ent[3]))
761 761 timestr = encoding.strtolocal(timestr)
762 762 if ent[1] & 0o20000:
763 763 mode = 'lnk'
764 764 else:
765 765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 767 for f in repo.dirstate.copies():
768 768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 769
770 770 @command('debugdiscovery',
771 771 [('', 'old', None, _('use old-style discovery')),
772 772 ('', 'nonheads', None,
773 773 _('use old-style discovery with non-heads included')),
774 774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 775 ] + cmdutil.remoteopts,
776 776 _('[--rev REV] [OTHER]'))
777 777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 778 """runs the changeset discovery protocol in isolation"""
779 779 opts = pycompat.byteskwargs(opts)
780 780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 781 remote = hg.peer(repo, opts, remoteurl)
782 782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 783
784 784 # make sure tests are repeatable
785 785 random.seed(12323)
786 786
787 787 def doit(pushedrevs, remoteheads, remote=remote):
788 788 if opts.get('old'):
789 789 if not util.safehasattr(remote, 'branches'):
790 790 # enable in-client legacy support
791 791 remote = localrepo.locallegacypeer(remote.local())
792 792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 793 force=True)
794 794 common = set(common)
795 795 if not opts.get('nonheads'):
796 796 ui.write(("unpruned common: %s\n") %
797 797 " ".join(sorted(short(n) for n in common)))
798 798
799 799 clnode = repo.changelog.node
800 800 common = repo.revs('heads(::%ln)', common)
801 801 common = {clnode(r) for r in common}
802 802 else:
803 803 nodes = None
804 804 if pushedrevs:
805 805 revs = scmutil.revrange(repo, pushedrevs)
806 806 nodes = [repo[r].node() for r in revs]
807 807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 808 ancestorsof=nodes)
809 809 common = set(common)
810 810 rheads = set(hds)
811 811 lheads = set(repo.heads())
812 812 ui.write(("common heads: %s\n") %
813 813 " ".join(sorted(short(n) for n in common)))
814 814 if lheads <= common:
815 815 ui.write(("local is subset\n"))
816 816 elif rheads <= common:
817 817 ui.write(("remote is subset\n"))
818 818
819 819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 820 localrevs = opts['rev']
821 821 doit(localrevs, remoterevs)
822 822
823 823 _chunksize = 4 << 10
824 824
825 825 @command('debugdownload',
826 826 [
827 827 ('o', 'output', '', _('path')),
828 828 ],
829 829 optionalrepo=True)
830 830 def debugdownload(ui, repo, url, output=None, **opts):
831 831 """download a resource using Mercurial logic and config
832 832 """
833 833 fh = urlmod.open(ui, url, output)
834 834
835 835 dest = ui
836 836 if output:
837 837 dest = open(output, "wb", _chunksize)
838 838 try:
839 839 data = fh.read(_chunksize)
840 840 while data:
841 841 dest.write(data)
842 842 data = fh.read(_chunksize)
843 843 finally:
844 844 if output:
845 845 dest.close()
846 846
847 847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 848 def debugextensions(ui, repo, **opts):
849 849 '''show information about active extensions'''
850 850 opts = pycompat.byteskwargs(opts)
851 851 exts = extensions.extensions(ui)
852 852 hgver = util.version()
853 853 fm = ui.formatter('debugextensions', opts)
854 854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 855 isinternal = extensions.ismoduleinternal(extmod)
856 856 extsource = pycompat.fsencode(extmod.__file__)
857 857 if isinternal:
858 858 exttestedwith = [] # never expose magic string to users
859 859 else:
860 860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 861 extbuglink = getattr(extmod, 'buglink', None)
862 862
863 863 fm.startitem()
864 864
865 865 if ui.quiet or ui.verbose:
866 866 fm.write('name', '%s\n', extname)
867 867 else:
868 868 fm.write('name', '%s', extname)
869 869 if isinternal or hgver in exttestedwith:
870 870 fm.plain('\n')
871 871 elif not exttestedwith:
872 872 fm.plain(_(' (untested!)\n'))
873 873 else:
874 874 lasttestedversion = exttestedwith[-1]
875 875 fm.plain(' (%s!)\n' % lasttestedversion)
876 876
877 877 fm.condwrite(ui.verbose and extsource, 'source',
878 878 _(' location: %s\n'), extsource or "")
879 879
880 880 if ui.verbose:
881 881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 882 fm.data(bundled=isinternal)
883 883
884 884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 885 _(' tested with: %s\n'),
886 886 fm.formatlist(exttestedwith, name='ver'))
887 887
888 888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 889 _(' bug reporting: %s\n'), extbuglink or "")
890 890
891 891 fm.end()
892 892
893 893 @command('debugfileset',
894 894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 895 ('', 'all-files', False,
896 896 _('test files from all revisions and working directory')),
897 897 ('s', 'show-matcher', None,
898 898 _('print internal representation of matcher')),
899 899 ('p', 'show-stage', [],
900 900 _('print parsed tree at the given stage'), _('NAME'))],
901 901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 902 def debugfileset(ui, repo, expr, **opts):
903 903 '''parse and apply a fileset specification'''
904 904 from . import fileset
905 905 fileset.symbols # force import of fileset so we have predicates to optimize
906 906 opts = pycompat.byteskwargs(opts)
907 907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908 908
909 909 stages = [
910 910 ('parsed', pycompat.identity),
911 911 ('analyzed', filesetlang.analyze),
912 912 ('optimized', filesetlang.optimize),
913 913 ]
914 914 stagenames = set(n for n, f in stages)
915 915
916 916 showalways = set()
917 917 if ui.verbose and not opts['show_stage']:
918 918 # show parsed tree by --verbose (deprecated)
919 919 showalways.add('parsed')
920 920 if opts['show_stage'] == ['all']:
921 921 showalways.update(stagenames)
922 922 else:
923 923 for n in opts['show_stage']:
924 924 if n not in stagenames:
925 925 raise error.Abort(_('invalid stage name: %s') % n)
926 926 showalways.update(opts['show_stage'])
927 927
928 928 tree = filesetlang.parse(expr)
929 929 for n, f in stages:
930 930 tree = f(tree)
931 931 if n in showalways:
932 932 if opts['show_stage'] or n != 'parsed':
933 933 ui.write(("* %s:\n") % n)
934 934 ui.write(filesetlang.prettyformat(tree), "\n")
935 935
936 936 files = set()
937 937 if opts['all_files']:
938 938 for r in repo:
939 939 c = repo[r]
940 940 files.update(c.files())
941 941 files.update(c.substate)
942 942 if opts['all_files'] or ctx.rev() is None:
943 943 wctx = repo[None]
944 944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 945 subrepos=list(wctx.substate),
946 946 unknown=True, ignored=True))
947 947 files.update(wctx.substate)
948 948 else:
949 949 files.update(ctx.files())
950 950 files.update(ctx.substate)
951 951
952 952 m = ctx.matchfileset(expr)
953 953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 955 for f in sorted(files):
956 956 if not m(f):
957 957 continue
958 958 ui.write("%s\n" % f)
959 959
960 960 @command('debugformat',
961 961 [] + cmdutil.formatteropts)
962 962 def debugformat(ui, repo, **opts):
963 963 """display format information about the current repository
964 964
965 965 Use --verbose to get extra information about current config value and
966 966 Mercurial default."""
967 967 opts = pycompat.byteskwargs(opts)
968 968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970 970
971 971 def makeformatname(name):
972 972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973 973
974 974 fm = ui.formatter('debugformat', opts)
975 975 if fm.isplain():
976 976 def formatvalue(value):
977 977 if util.safehasattr(value, 'startswith'):
978 978 return value
979 979 if value:
980 980 return 'yes'
981 981 else:
982 982 return 'no'
983 983 else:
984 984 formatvalue = pycompat.identity
985 985
986 986 fm.plain('format-variant')
987 987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 988 fm.plain(' repo')
989 989 if ui.verbose:
990 990 fm.plain(' config default')
991 991 fm.plain('\n')
992 992 for fv in upgrade.allformatvariant:
993 993 fm.startitem()
994 994 repovalue = fv.fromrepo(repo)
995 995 configvalue = fv.fromconfig(repo)
996 996
997 997 if repovalue != configvalue:
998 998 namelabel = 'formatvariant.name.mismatchconfig'
999 999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 1000 elif repovalue != fv.default:
1001 1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 1003 else:
1004 1004 namelabel = 'formatvariant.name.uptodate'
1005 1005 repolabel = 'formatvariant.repo.uptodate'
1006 1006
1007 1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 1008 label=namelabel)
1009 1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 1010 label=repolabel)
1011 1011 if fv.default != configvalue:
1012 1012 configlabel = 'formatvariant.config.special'
1013 1013 else:
1014 1014 configlabel = 'formatvariant.config.default'
1015 1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 1016 label=configlabel)
1017 1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 1018 label='formatvariant.default')
1019 1019 fm.plain('\n')
1020 1020 fm.end()
1021 1021
1022 1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 1023 def debugfsinfo(ui, path="."):
1024 1024 """show information detected about current filesystem"""
1025 1025 ui.write(('path: %s\n') % path)
1026 1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 1031 casesensitive = '(unknown)'
1032 1032 try:
1033 1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 1035 except OSError:
1036 1036 pass
1037 1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038 1038
1039 1039 @command('debuggetbundle',
1040 1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 1043 _('REPO FILE [-H|-C ID]...'),
1044 1044 norepo=True)
1045 1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 1046 """retrieves a bundle from a repo
1047 1047
1048 1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 1049 given file.
1050 1050 """
1051 1051 opts = pycompat.byteskwargs(opts)
1052 1052 repo = hg.peer(ui, opts, repopath)
1053 1053 if not repo.capable('getbundle'):
1054 1054 raise error.Abort("getbundle() not supported by target repository")
1055 1055 args = {}
1056 1056 if common:
1057 1057 args[r'common'] = [bin(s) for s in common]
1058 1058 if head:
1059 1059 args[r'heads'] = [bin(s) for s in head]
1060 1060 # TODO: get desired bundlecaps from command line.
1061 1061 args[r'bundlecaps'] = None
1062 1062 bundle = repo.getbundle('debug', **args)
1063 1063
1064 1064 bundletype = opts.get('type', 'bzip2').lower()
1065 1065 btypes = {'none': 'HG10UN',
1066 1066 'bzip2': 'HG10BZ',
1067 1067 'gzip': 'HG10GZ',
1068 1068 'bundle2': 'HG20'}
1069 1069 bundletype = btypes.get(bundletype)
1070 1070 if bundletype not in bundle2.bundletypes:
1071 1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073 1073
1074 1074 @command('debugignore', [], '[FILE]')
1075 1075 def debugignore(ui, repo, *files, **opts):
1076 1076 """display the combined ignore pattern and information about ignored files
1077 1077
1078 1078 With no argument display the combined ignore pattern.
1079 1079
1080 1080 Given space separated file names, shows if the given file is ignored and
1081 1081 if so, show the ignore rule (file and line number) that matched it.
1082 1082 """
1083 1083 ignore = repo.dirstate._ignore
1084 1084 if not files:
1085 1085 # Show all the patterns
1086 1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 1087 else:
1088 1088 m = scmutil.match(repo[None], pats=files)
1089 1089 for f in m.files():
1090 1090 nf = util.normpath(f)
1091 1091 ignored = None
1092 1092 ignoredata = None
1093 1093 if nf != '.':
1094 1094 if ignore(nf):
1095 1095 ignored = nf
1096 1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 1097 else:
1098 1098 for p in util.finddirs(nf):
1099 1099 if ignore(p):
1100 1100 ignored = p
1101 1101 ignoredata = repo.dirstate._ignorefileandline(p)
1102 1102 break
1103 1103 if ignored:
1104 1104 if ignored == nf:
1105 1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1106 1106 else:
1107 1107 ui.write(_("%s is ignored because of "
1108 1108 "containing folder %s\n")
1109 1109 % (m.uipath(f), ignored))
1110 1110 ignorefile, lineno, line = ignoredata
1111 1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 1112 % (ignorefile, lineno, line))
1113 1113 else:
1114 1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1115 1115
1116 1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 1117 _('-c|-m|FILE'))
1118 1118 def debugindex(ui, repo, file_=None, **opts):
1119 1119 """dump index data for a storage primitive"""
1120 1120 opts = pycompat.byteskwargs(opts)
1121 1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 1122
1123 1123 if ui.debugflag:
1124 1124 shortfn = hex
1125 1125 else:
1126 1126 shortfn = short
1127 1127
1128 1128 idlen = 12
1129 1129 for i in store:
1130 1130 idlen = len(shortfn(store.node(i)))
1131 1131 break
1132 1132
1133 1133 fm = ui.formatter('debugindex', opts)
1134 1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 1135 b'nodeid'.ljust(idlen),
1136 1136 b'p1'.ljust(idlen)))
1137 1137
1138 1138 for rev in store:
1139 1139 node = store.node(rev)
1140 1140 parents = store.parents(node)
1141 1141
1142 1142 fm.startitem()
1143 1143 fm.write(b'rev', b'%6d ', rev)
1144 1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 1145 fm.write(b'node', '%s ', shortfn(node))
1146 1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 1148 fm.plain(b'\n')
1149 1149
1150 1150 fm.end()
1151 1151
1152 1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 1153 _('-c|-m|FILE'), optionalrepo=True)
1154 1154 def debugindexdot(ui, repo, file_=None, **opts):
1155 1155 """dump an index DAG as a graphviz dot file"""
1156 1156 opts = pycompat.byteskwargs(opts)
1157 1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 1158 ui.write(("digraph G {\n"))
1159 1159 for i in r:
1160 1160 node = r.node(i)
1161 1161 pp = r.parents(node)
1162 1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 1163 if pp[1] != nullid:
1164 1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 1165 ui.write("}\n")
1166 1166
1167 1167 @command('debugindexstats', [])
1168 1168 def debugindexstats(ui, repo):
1169 1169 """show stats related to the changelog index"""
1170 1170 repo.changelog.shortest(nullid, 1)
1171 1171 for k, v in sorted(repo.changelog.index.stats().items()):
1172 1172 ui.write('%s: %s\n' % (k, v))
1173 1173
1174 1174 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1175 1175 def debuginstall(ui, **opts):
1176 1176 '''test Mercurial installation
1177 1177
1178 1178 Returns 0 on success.
1179 1179 '''
1180 1180 opts = pycompat.byteskwargs(opts)
1181 1181
1182 1182 def writetemp(contents):
1183 1183 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1184 1184 f = os.fdopen(fd, r"wb")
1185 1185 f.write(contents)
1186 1186 f.close()
1187 1187 return name
1188 1188
1189 1189 problems = 0
1190 1190
1191 1191 fm = ui.formatter('debuginstall', opts)
1192 1192 fm.startitem()
1193 1193
1194 1194 # encoding
1195 1195 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1196 1196 err = None
1197 1197 try:
1198 1198 codecs.lookup(pycompat.sysstr(encoding.encoding))
1199 1199 except LookupError as inst:
1200 1200 err = stringutil.forcebytestr(inst)
1201 1201 problems += 1
1202 1202 fm.condwrite(err, 'encodingerror', _(" %s\n"
1203 1203 " (check that your locale is properly set)\n"), err)
1204 1204
1205 1205 # Python
1206 1206 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1207 1207 pycompat.sysexecutable)
1208 1208 fm.write('pythonver', _("checking Python version (%s)\n"),
1209 1209 ("%d.%d.%d" % sys.version_info[:3]))
1210 1210 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1211 1211 os.path.dirname(pycompat.fsencode(os.__file__)))
1212 1212
1213 1213 security = set(sslutil.supportedprotocols)
1214 1214 if sslutil.hassni:
1215 1215 security.add('sni')
1216 1216
1217 1217 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1218 1218 fm.formatlist(sorted(security), name='protocol',
1219 1219 fmt='%s', sep=','))
1220 1220
1221 1221 # These are warnings, not errors. So don't increment problem count. This
1222 1222 # may change in the future.
1223 1223 if 'tls1.2' not in security:
1224 1224 fm.plain(_(' TLS 1.2 not supported by Python install; '
1225 1225 'network connections lack modern security\n'))
1226 1226 if 'sni' not in security:
1227 1227 fm.plain(_(' SNI not supported by Python install; may have '
1228 1228 'connectivity issues with some servers\n'))
1229 1229
1230 1230 # TODO print CA cert info
1231 1231
1232 1232 # hg version
1233 1233 hgver = util.version()
1234 1234 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1235 1235 hgver.split('+')[0])
1236 1236 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1237 1237 '+'.join(hgver.split('+')[1:]))
1238 1238
1239 1239 # compiled modules
1240 1240 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1241 1241 policy.policy)
1242 1242 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1243 1243 os.path.dirname(pycompat.fsencode(__file__)))
1244 1244
1245 1245 if policy.policy in ('c', 'allow'):
1246 1246 err = None
1247 1247 try:
1248 1248 from .cext import (
1249 1249 base85,
1250 1250 bdiff,
1251 1251 mpatch,
1252 1252 osutil,
1253 1253 )
1254 1254 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1255 1255 except Exception as inst:
1256 1256 err = stringutil.forcebytestr(inst)
1257 1257 problems += 1
1258 1258 fm.condwrite(err, 'extensionserror', " %s\n", err)
1259 1259
1260 1260 compengines = util.compengines._engines.values()
1261 1261 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1262 1262 fm.formatlist(sorted(e.name() for e in compengines),
1263 1263 name='compengine', fmt='%s', sep=', '))
1264 1264 fm.write('compenginesavail', _('checking available compression engines '
1265 1265 '(%s)\n'),
1266 1266 fm.formatlist(sorted(e.name() for e in compengines
1267 1267 if e.available()),
1268 1268 name='compengine', fmt='%s', sep=', '))
1269 1269 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1270 1270 fm.write('compenginesserver', _('checking available compression engines '
1271 1271 'for wire protocol (%s)\n'),
1272 1272 fm.formatlist([e.name() for e in wirecompengines
1273 1273 if e.wireprotosupport()],
1274 1274 name='compengine', fmt='%s', sep=', '))
1275 1275 re2 = 'missing'
1276 1276 if util._re2:
1277 1277 re2 = 'available'
1278 1278 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1279 1279 fm.data(re2=bool(util._re2))
1280 1280
1281 1281 # templates
1282 1282 p = templater.templatepaths()
1283 1283 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1284 1284 fm.condwrite(not p, '', _(" no template directories found\n"))
1285 1285 if p:
1286 1286 m = templater.templatepath("map-cmdline.default")
1287 1287 if m:
1288 1288 # template found, check if it is working
1289 1289 err = None
1290 1290 try:
1291 1291 templater.templater.frommapfile(m)
1292 1292 except Exception as inst:
1293 1293 err = stringutil.forcebytestr(inst)
1294 1294 p = None
1295 1295 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1296 1296 else:
1297 1297 p = None
1298 1298 fm.condwrite(p, 'defaulttemplate',
1299 1299 _("checking default template (%s)\n"), m)
1300 1300 fm.condwrite(not m, 'defaulttemplatenotfound',
1301 1301 _(" template '%s' not found\n"), "default")
1302 1302 if not p:
1303 1303 problems += 1
1304 1304 fm.condwrite(not p, '',
1305 1305 _(" (templates seem to have been installed incorrectly)\n"))
1306 1306
1307 1307 # editor
1308 1308 editor = ui.geteditor()
1309 1309 editor = util.expandpath(editor)
1310 1310 editorbin = procutil.shellsplit(editor)[0]
1311 1311 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1312 1312 cmdpath = procutil.findexe(editorbin)
1313 1313 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1314 1314 _(" No commit editor set and can't find %s in PATH\n"
1315 1315 " (specify a commit editor in your configuration"
1316 1316 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1317 1317 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1318 1318 _(" Can't find editor '%s' in PATH\n"
1319 1319 " (specify a commit editor in your configuration"
1320 1320 " file)\n"), not cmdpath and editorbin)
1321 1321 if not cmdpath and editor != 'vi':
1322 1322 problems += 1
1323 1323
1324 1324 # check username
1325 1325 username = None
1326 1326 err = None
1327 1327 try:
1328 1328 username = ui.username()
1329 1329 except error.Abort as e:
1330 1330 err = stringutil.forcebytestr(e)
1331 1331 problems += 1
1332 1332
1333 1333 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1334 1334 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1335 1335 " (specify a username in your configuration file)\n"), err)
1336 1336
1337 1337 fm.condwrite(not problems, '',
1338 1338 _("no problems detected\n"))
1339 1339 if not problems:
1340 1340 fm.data(problems=problems)
1341 1341 fm.condwrite(problems, 'problems',
1342 1342 _("%d problems detected,"
1343 1343 " please check your install!\n"), problems)
1344 1344 fm.end()
1345 1345
1346 1346 return problems
1347 1347
1348 1348 @command('debugknown', [], _('REPO ID...'), norepo=True)
1349 1349 def debugknown(ui, repopath, *ids, **opts):
1350 1350 """test whether node ids are known to a repo
1351 1351
1352 1352 Every ID must be a full-length hex node id string. Returns a list of 0s
1353 1353 and 1s indicating unknown/known.
1354 1354 """
1355 1355 opts = pycompat.byteskwargs(opts)
1356 1356 repo = hg.peer(ui, opts, repopath)
1357 1357 if not repo.capable('known'):
1358 1358 raise error.Abort("known() not supported by target repository")
1359 1359 flags = repo.known([bin(s) for s in ids])
1360 1360 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1361 1361
1362 1362 @command('debuglabelcomplete', [], _('LABEL...'))
1363 1363 def debuglabelcomplete(ui, repo, *args):
1364 1364 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1365 1365 debugnamecomplete(ui, repo, *args)
1366 1366
1367 1367 @command('debuglocks',
1368 1368 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1369 1369 ('W', 'force-wlock', None,
1370 1370 _('free the working state lock (DANGEROUS)')),
1371 1371 ('s', 'set-lock', None, _('set the store lock until stopped')),
1372 1372 ('S', 'set-wlock', None,
1373 1373 _('set the working state lock until stopped'))],
1374 1374 _('[OPTION]...'))
1375 1375 def debuglocks(ui, repo, **opts):
1376 1376 """show or modify state of locks
1377 1377
1378 1378 By default, this command will show which locks are held. This
1379 1379 includes the user and process holding the lock, the amount of time
1380 1380 the lock has been held, and the machine name where the process is
1381 1381 running if it's not local.
1382 1382
1383 1383 Locks protect the integrity of Mercurial's data, so should be
1384 1384 treated with care. System crashes or other interruptions may cause
1385 1385 locks to not be properly released, though Mercurial will usually
1386 1386 detect and remove such stale locks automatically.
1387 1387
1388 1388 However, detecting stale locks may not always be possible (for
1389 1389 instance, on a shared filesystem). Removing locks may also be
1390 1390 blocked by filesystem permissions.
1391 1391
1392 1392 Setting a lock will prevent other commands from changing the data.
1393 1393 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1394 1394 The set locks are removed when the command exits.
1395 1395
1396 1396 Returns 0 if no locks are held.
1397 1397
1398 1398 """
1399 1399
1400 1400 if opts.get(r'force_lock'):
1401 1401 repo.svfs.unlink('lock')
1402 1402 if opts.get(r'force_wlock'):
1403 1403 repo.vfs.unlink('wlock')
1404 1404 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1405 1405 return 0
1406 1406
1407 1407 locks = []
1408 1408 try:
1409 1409 if opts.get(r'set_wlock'):
1410 1410 try:
1411 1411 locks.append(repo.wlock(False))
1412 1412 except error.LockHeld:
1413 1413 raise error.Abort(_('wlock is already held'))
1414 1414 if opts.get(r'set_lock'):
1415 1415 try:
1416 1416 locks.append(repo.lock(False))
1417 1417 except error.LockHeld:
1418 1418 raise error.Abort(_('lock is already held'))
1419 1419 if len(locks):
1420 1420 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1421 1421 return 0
1422 1422 finally:
1423 1423 release(*locks)
1424 1424
1425 1425 now = time.time()
1426 1426 held = 0
1427 1427
1428 1428 def report(vfs, name, method):
1429 1429 # this causes stale locks to get reaped for more accurate reporting
1430 1430 try:
1431 1431 l = method(False)
1432 1432 except error.LockHeld:
1433 1433 l = None
1434 1434
1435 1435 if l:
1436 1436 l.release()
1437 1437 else:
1438 1438 try:
1439 1439 st = vfs.lstat(name)
1440 1440 age = now - st[stat.ST_MTIME]
1441 1441 user = util.username(st.st_uid)
1442 1442 locker = vfs.readlock(name)
1443 1443 if ":" in locker:
1444 1444 host, pid = locker.split(':')
1445 1445 if host == socket.gethostname():
1446 1446 locker = 'user %s, process %s' % (user or b'None', pid)
1447 1447 else:
1448 1448 locker = 'user %s, process %s, host %s' \
1449 1449 % (user or b'None', pid, host)
1450 1450 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1451 1451 return 1
1452 1452 except OSError as e:
1453 1453 if e.errno != errno.ENOENT:
1454 1454 raise
1455 1455
1456 1456 ui.write(("%-6s free\n") % (name + ":"))
1457 1457 return 0
1458 1458
1459 1459 held += report(repo.svfs, "lock", repo.lock)
1460 1460 held += report(repo.vfs, "wlock", repo.wlock)
1461 1461
1462 1462 return held
1463 1463
1464 1464 @command('debugmanifestfulltextcache', [
1465 1465 ('', 'clear', False, _('clear the cache')),
1466 1466 ('a', 'add', '', _('add the given manifest node to the cache'),
1467 1467 _('NODE'))
1468 1468 ], '')
1469 1469 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1470 1470 """show, clear or amend the contents of the manifest fulltext cache"""
1471 1471 with repo.lock():
1472 1472 r = repo.manifestlog.getstorage(b'')
1473 1473 try:
1474 1474 cache = r._fulltextcache
1475 1475 except AttributeError:
1476 1476 ui.warn(_(
1477 1477 "Current revlog implementation doesn't appear to have a "
1478 1478 'manifest fulltext cache\n'))
1479 1479 return
1480 1480
1481 1481 if opts.get(r'clear'):
1482 1482 cache.clear()
1483 1483
1484 1484 if add:
1485 1485 try:
1486 1486 manifest = repo.manifestlog[r.lookup(add)]
1487 1487 except error.LookupError as e:
1488 1488 raise error.Abort(e, hint="Check your manifest node id")
1489 1489 manifest.read() # stores revisision in cache too
1490 1490
1491 1491 if not len(cache):
1492 1492 ui.write(_('Cache empty'))
1493 1493 else:
1494 1494 ui.write(
1495 1495 _('Cache contains %d manifest entries, in order of most to '
1496 1496 'least recent:\n') % (len(cache),))
1497 1497 totalsize = 0
1498 1498 for nodeid in cache:
1499 1499 # Use cache.get to not update the LRU order
1500 1500 data = cache.get(nodeid)
1501 1501 size = len(data)
1502 1502 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1503 1503 ui.write(_('id: %s, size %s\n') % (
1504 1504 hex(nodeid), util.bytecount(size)))
1505 1505 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1506 1506 ui.write(
1507 1507 _('Total cache data size %s, on-disk %s\n') % (
1508 1508 util.bytecount(totalsize), util.bytecount(ondisk))
1509 1509 )
1510 1510
1511 1511 @command('debugmergestate', [], '')
1512 1512 def debugmergestate(ui, repo, *args):
1513 1513 """print merge state
1514 1514
1515 1515 Use --verbose to print out information about whether v1 or v2 merge state
1516 1516 was chosen."""
1517 1517 def _hashornull(h):
1518 1518 if h == nullhex:
1519 1519 return 'null'
1520 1520 else:
1521 1521 return h
1522 1522
1523 1523 def printrecords(version):
1524 1524 ui.write(('* version %d records\n') % version)
1525 1525 if version == 1:
1526 1526 records = v1records
1527 1527 else:
1528 1528 records = v2records
1529 1529
1530 1530 for rtype, record in records:
1531 1531 # pretty print some record types
1532 1532 if rtype == 'L':
1533 1533 ui.write(('local: %s\n') % record)
1534 1534 elif rtype == 'O':
1535 1535 ui.write(('other: %s\n') % record)
1536 1536 elif rtype == 'm':
1537 1537 driver, mdstate = record.split('\0', 1)
1538 1538 ui.write(('merge driver: %s (state "%s")\n')
1539 1539 % (driver, mdstate))
1540 1540 elif rtype in 'FDC':
1541 1541 r = record.split('\0')
1542 1542 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1543 1543 if version == 1:
1544 1544 onode = 'not stored in v1 format'
1545 1545 flags = r[7]
1546 1546 else:
1547 1547 onode, flags = r[7:9]
1548 1548 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1549 1549 % (f, rtype, state, _hashornull(hash)))
1550 1550 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1551 1551 ui.write((' ancestor path: %s (node %s)\n')
1552 1552 % (afile, _hashornull(anode)))
1553 1553 ui.write((' other path: %s (node %s)\n')
1554 1554 % (ofile, _hashornull(onode)))
1555 1555 elif rtype == 'f':
1556 1556 filename, rawextras = record.split('\0', 1)
1557 1557 extras = rawextras.split('\0')
1558 1558 i = 0
1559 1559 extrastrings = []
1560 1560 while i < len(extras):
1561 1561 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1562 1562 i += 2
1563 1563
1564 1564 ui.write(('file extras: %s (%s)\n')
1565 1565 % (filename, ', '.join(extrastrings)))
1566 1566 elif rtype == 'l':
1567 1567 labels = record.split('\0', 2)
1568 1568 labels = [l for l in labels if len(l) > 0]
1569 1569 ui.write(('labels:\n'))
1570 1570 ui.write((' local: %s\n' % labels[0]))
1571 1571 ui.write((' other: %s\n' % labels[1]))
1572 1572 if len(labels) > 2:
1573 1573 ui.write((' base: %s\n' % labels[2]))
1574 1574 else:
1575 1575 ui.write(('unrecognized entry: %s\t%s\n')
1576 1576 % (rtype, record.replace('\0', '\t')))
1577 1577
1578 1578 # Avoid mergestate.read() since it may raise an exception for unsupported
1579 1579 # merge state records. We shouldn't be doing this, but this is OK since this
1580 1580 # command is pretty low-level.
1581 1581 ms = mergemod.mergestate(repo)
1582 1582
1583 1583 # sort so that reasonable information is on top
1584 1584 v1records = ms._readrecordsv1()
1585 1585 v2records = ms._readrecordsv2()
1586 1586 order = 'LOml'
1587 1587 def key(r):
1588 1588 idx = order.find(r[0])
1589 1589 if idx == -1:
1590 1590 return (1, r[1])
1591 1591 else:
1592 1592 return (0, idx)
1593 1593 v1records.sort(key=key)
1594 1594 v2records.sort(key=key)
1595 1595
1596 1596 if not v1records and not v2records:
1597 1597 ui.write(('no merge state found\n'))
1598 1598 elif not v2records:
1599 1599 ui.note(('no version 2 merge state\n'))
1600 1600 printrecords(1)
1601 1601 elif ms._v1v2match(v1records, v2records):
1602 1602 ui.note(('v1 and v2 states match: using v2\n'))
1603 1603 printrecords(2)
1604 1604 else:
1605 1605 ui.note(('v1 and v2 states mismatch: using v1\n'))
1606 1606 printrecords(1)
1607 1607 if ui.verbose:
1608 1608 printrecords(2)
1609 1609
1610 1610 @command('debugnamecomplete', [], _('NAME...'))
1611 1611 def debugnamecomplete(ui, repo, *args):
1612 1612 '''complete "names" - tags, open branch names, bookmark names'''
1613 1613
1614 1614 names = set()
1615 1615 # since we previously only listed open branches, we will handle that
1616 1616 # specially (after this for loop)
1617 1617 for name, ns in repo.names.iteritems():
1618 1618 if name != 'branches':
1619 1619 names.update(ns.listnames(repo))
1620 1620 names.update(tag for (tag, heads, tip, closed)
1621 1621 in repo.branchmap().iterbranches() if not closed)
1622 1622 completions = set()
1623 1623 if not args:
1624 1624 args = ['']
1625 1625 for a in args:
1626 1626 completions.update(n for n in names if n.startswith(a))
1627 1627 ui.write('\n'.join(sorted(completions)))
1628 1628 ui.write('\n')
1629 1629
1630 1630 @command('debugobsolete',
1631 1631 [('', 'flags', 0, _('markers flag')),
1632 1632 ('', 'record-parents', False,
1633 1633 _('record parent information for the precursor')),
1634 1634 ('r', 'rev', [], _('display markers relevant to REV')),
1635 1635 ('', 'exclusive', False, _('restrict display to markers only '
1636 1636 'relevant to REV')),
1637 1637 ('', 'index', False, _('display index of the marker')),
1638 1638 ('', 'delete', [], _('delete markers specified by indices')),
1639 1639 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1640 1640 _('[OBSOLETED [REPLACEMENT ...]]'))
1641 1641 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1642 1642 """create arbitrary obsolete marker
1643 1643
1644 1644 With no arguments, displays the list of obsolescence markers."""
1645 1645
1646 1646 opts = pycompat.byteskwargs(opts)
1647 1647
1648 1648 def parsenodeid(s):
1649 1649 try:
1650 1650 # We do not use revsingle/revrange functions here to accept
1651 1651 # arbitrary node identifiers, possibly not present in the
1652 1652 # local repository.
1653 1653 n = bin(s)
1654 1654 if len(n) != len(nullid):
1655 1655 raise TypeError()
1656 1656 return n
1657 1657 except TypeError:
1658 1658 raise error.Abort('changeset references must be full hexadecimal '
1659 1659 'node identifiers')
1660 1660
1661 1661 if opts.get('delete'):
1662 1662 indices = []
1663 1663 for v in opts.get('delete'):
1664 1664 try:
1665 1665 indices.append(int(v))
1666 1666 except ValueError:
1667 1667 raise error.Abort(_('invalid index value: %r') % v,
1668 1668 hint=_('use integers for indices'))
1669 1669
1670 1670 if repo.currenttransaction():
1671 1671 raise error.Abort(_('cannot delete obsmarkers in the middle '
1672 1672 'of transaction.'))
1673 1673
1674 1674 with repo.lock():
1675 1675 n = repair.deleteobsmarkers(repo.obsstore, indices)
1676 1676 ui.write(_('deleted %i obsolescence markers\n') % n)
1677 1677
1678 1678 return
1679 1679
1680 1680 if precursor is not None:
1681 1681 if opts['rev']:
1682 1682 raise error.Abort('cannot select revision when creating marker')
1683 1683 metadata = {}
1684 1684 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1685 1685 succs = tuple(parsenodeid(succ) for succ in successors)
1686 1686 l = repo.lock()
1687 1687 try:
1688 1688 tr = repo.transaction('debugobsolete')
1689 1689 try:
1690 1690 date = opts.get('date')
1691 1691 if date:
1692 1692 date = dateutil.parsedate(date)
1693 1693 else:
1694 1694 date = None
1695 1695 prec = parsenodeid(precursor)
1696 1696 parents = None
1697 1697 if opts['record_parents']:
1698 1698 if prec not in repo.unfiltered():
1699 1699 raise error.Abort('cannot used --record-parents on '
1700 1700 'unknown changesets')
1701 1701 parents = repo.unfiltered()[prec].parents()
1702 1702 parents = tuple(p.node() for p in parents)
1703 1703 repo.obsstore.create(tr, prec, succs, opts['flags'],
1704 1704 parents=parents, date=date,
1705 1705 metadata=metadata, ui=ui)
1706 1706 tr.close()
1707 1707 except ValueError as exc:
1708 1708 raise error.Abort(_('bad obsmarker input: %s') %
1709 1709 pycompat.bytestr(exc))
1710 1710 finally:
1711 1711 tr.release()
1712 1712 finally:
1713 1713 l.release()
1714 1714 else:
1715 1715 if opts['rev']:
1716 1716 revs = scmutil.revrange(repo, opts['rev'])
1717 1717 nodes = [repo[r].node() for r in revs]
1718 1718 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1719 1719 exclusive=opts['exclusive']))
1720 1720 markers.sort(key=lambda x: x._data)
1721 1721 else:
1722 1722 markers = obsutil.getmarkers(repo)
1723 1723
1724 1724 markerstoiter = markers
1725 1725 isrelevant = lambda m: True
1726 1726 if opts.get('rev') and opts.get('index'):
1727 1727 markerstoiter = obsutil.getmarkers(repo)
1728 1728 markerset = set(markers)
1729 1729 isrelevant = lambda m: m in markerset
1730 1730
1731 1731 fm = ui.formatter('debugobsolete', opts)
1732 1732 for i, m in enumerate(markerstoiter):
1733 1733 if not isrelevant(m):
1734 1734 # marker can be irrelevant when we're iterating over a set
1735 1735 # of markers (markerstoiter) which is bigger than the set
1736 1736 # of markers we want to display (markers)
1737 1737 # this can happen if both --index and --rev options are
1738 1738 # provided and thus we need to iterate over all of the markers
1739 1739 # to get the correct indices, but only display the ones that
1740 1740 # are relevant to --rev value
1741 1741 continue
1742 1742 fm.startitem()
1743 1743 ind = i if opts.get('index') else None
1744 1744 cmdutil.showmarker(fm, m, index=ind)
1745 1745 fm.end()
1746 1746
1747 1747 @command('debugpathcomplete',
1748 1748 [('f', 'full', None, _('complete an entire path')),
1749 1749 ('n', 'normal', None, _('show only normal files')),
1750 1750 ('a', 'added', None, _('show only added files')),
1751 1751 ('r', 'removed', None, _('show only removed files'))],
1752 1752 _('FILESPEC...'))
1753 1753 def debugpathcomplete(ui, repo, *specs, **opts):
1754 1754 '''complete part or all of a tracked path
1755 1755
1756 1756 This command supports shells that offer path name completion. It
1757 1757 currently completes only files already known to the dirstate.
1758 1758
1759 1759 Completion extends only to the next path segment unless
1760 1760 --full is specified, in which case entire paths are used.'''
1761 1761
1762 1762 def complete(path, acceptable):
1763 1763 dirstate = repo.dirstate
1764 1764 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1765 1765 rootdir = repo.root + pycompat.ossep
1766 1766 if spec != repo.root and not spec.startswith(rootdir):
1767 1767 return [], []
1768 1768 if os.path.isdir(spec):
1769 1769 spec += '/'
1770 1770 spec = spec[len(rootdir):]
1771 1771 fixpaths = pycompat.ossep != '/'
1772 1772 if fixpaths:
1773 1773 spec = spec.replace(pycompat.ossep, '/')
1774 1774 speclen = len(spec)
1775 1775 fullpaths = opts[r'full']
1776 1776 files, dirs = set(), set()
1777 1777 adddir, addfile = dirs.add, files.add
1778 1778 for f, st in dirstate.iteritems():
1779 1779 if f.startswith(spec) and st[0] in acceptable:
1780 1780 if fixpaths:
1781 1781 f = f.replace('/', pycompat.ossep)
1782 1782 if fullpaths:
1783 1783 addfile(f)
1784 1784 continue
1785 1785 s = f.find(pycompat.ossep, speclen)
1786 1786 if s >= 0:
1787 1787 adddir(f[:s])
1788 1788 else:
1789 1789 addfile(f)
1790 1790 return files, dirs
1791 1791
1792 1792 acceptable = ''
1793 1793 if opts[r'normal']:
1794 1794 acceptable += 'nm'
1795 1795 if opts[r'added']:
1796 1796 acceptable += 'a'
1797 1797 if opts[r'removed']:
1798 1798 acceptable += 'r'
1799 1799 cwd = repo.getcwd()
1800 1800 if not specs:
1801 1801 specs = ['.']
1802 1802
1803 1803 files, dirs = set(), set()
1804 1804 for spec in specs:
1805 1805 f, d = complete(spec, acceptable or 'nmar')
1806 1806 files.update(f)
1807 1807 dirs.update(d)
1808 1808 files.update(dirs)
1809 1809 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1810 1810 ui.write('\n')
1811 1811
1812 1812 @command('debugpeer', [], _('PATH'), norepo=True)
1813 1813 def debugpeer(ui, path):
1814 1814 """establish a connection to a peer repository"""
1815 1815 # Always enable peer request logging. Requires --debug to display
1816 1816 # though.
1817 1817 overrides = {
1818 1818 ('devel', 'debug.peer-request'): True,
1819 1819 }
1820 1820
1821 1821 with ui.configoverride(overrides):
1822 1822 peer = hg.peer(ui, {}, path)
1823 1823
1824 1824 local = peer.local() is not None
1825 1825 canpush = peer.canpush()
1826 1826
1827 1827 ui.write(_('url: %s\n') % peer.url())
1828 1828 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1829 1829 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1830 1830
1831 1831 @command('debugpickmergetool',
1832 1832 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1833 1833 ('', 'changedelete', None, _('emulate merging change and delete')),
1834 1834 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1835 1835 _('[PATTERN]...'),
1836 1836 inferrepo=True)
1837 1837 def debugpickmergetool(ui, repo, *pats, **opts):
1838 1838 """examine which merge tool is chosen for specified file
1839 1839
1840 1840 As described in :hg:`help merge-tools`, Mercurial examines
1841 1841 configurations below in this order to decide which merge tool is
1842 1842 chosen for specified file.
1843 1843
1844 1844 1. ``--tool`` option
1845 1845 2. ``HGMERGE`` environment variable
1846 1846 3. configurations in ``merge-patterns`` section
1847 1847 4. configuration of ``ui.merge``
1848 1848 5. configurations in ``merge-tools`` section
1849 1849 6. ``hgmerge`` tool (for historical reason only)
1850 1850 7. default tool for fallback (``:merge`` or ``:prompt``)
1851 1851
1852 1852 This command writes out examination result in the style below::
1853 1853
1854 1854 FILE = MERGETOOL
1855 1855
1856 1856 By default, all files known in the first parent context of the
1857 1857 working directory are examined. Use file patterns and/or -I/-X
1858 1858 options to limit target files. -r/--rev is also useful to examine
1859 1859 files in another context without actual updating to it.
1860 1860
1861 1861 With --debug, this command shows warning messages while matching
1862 1862 against ``merge-patterns`` and so on, too. It is recommended to
1863 1863 use this option with explicit file patterns and/or -I/-X options,
1864 1864 because this option increases amount of output per file according
1865 1865 to configurations in hgrc.
1866 1866
1867 1867 With -v/--verbose, this command shows configurations below at
1868 1868 first (only if specified).
1869 1869
1870 1870 - ``--tool`` option
1871 1871 - ``HGMERGE`` environment variable
1872 1872 - configuration of ``ui.merge``
1873 1873
1874 1874 If merge tool is chosen before matching against
1875 1875 ``merge-patterns``, this command can't show any helpful
1876 1876 information, even with --debug. In such case, information above is
1877 1877 useful to know why a merge tool is chosen.
1878 1878 """
1879 1879 opts = pycompat.byteskwargs(opts)
1880 1880 overrides = {}
1881 1881 if opts['tool']:
1882 1882 overrides[('ui', 'forcemerge')] = opts['tool']
1883 1883 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1884 1884
1885 1885 with ui.configoverride(overrides, 'debugmergepatterns'):
1886 1886 hgmerge = encoding.environ.get("HGMERGE")
1887 1887 if hgmerge is not None:
1888 1888 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1889 1889 uimerge = ui.config("ui", "merge")
1890 1890 if uimerge:
1891 1891 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1892 1892
1893 1893 ctx = scmutil.revsingle(repo, opts.get('rev'))
1894 1894 m = scmutil.match(ctx, pats, opts)
1895 1895 changedelete = opts['changedelete']
1896 1896 for path in ctx.walk(m):
1897 1897 fctx = ctx[path]
1898 1898 try:
1899 1899 if not ui.debugflag:
1900 1900 ui.pushbuffer(error=True)
1901 1901 tool, toolpath = filemerge._picktool(repo, ui, path,
1902 1902 fctx.isbinary(),
1903 1903 'l' in fctx.flags(),
1904 1904 changedelete)
1905 1905 finally:
1906 1906 if not ui.debugflag:
1907 1907 ui.popbuffer()
1908 1908 ui.write(('%s = %s\n') % (path, tool))
1909 1909
1910 1910 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1911 1911 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1912 1912 '''access the pushkey key/value protocol
1913 1913
1914 1914 With two args, list the keys in the given namespace.
1915 1915
1916 1916 With five args, set a key to new if it currently is set to old.
1917 1917 Reports success or failure.
1918 1918 '''
1919 1919
1920 1920 target = hg.peer(ui, {}, repopath)
1921 1921 if keyinfo:
1922 1922 key, old, new = keyinfo
1923 1923 with target.commandexecutor() as e:
1924 1924 r = e.callcommand('pushkey', {
1925 1925 'namespace': namespace,
1926 1926 'key': key,
1927 1927 'old': old,
1928 1928 'new': new,
1929 1929 }).result()
1930 1930
1931 1931 ui.status(pycompat.bytestr(r) + '\n')
1932 1932 return not r
1933 1933 else:
1934 1934 for k, v in sorted(target.listkeys(namespace).iteritems()):
1935 1935 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1936 1936 stringutil.escapestr(v)))
1937 1937
1938 1938 @command('debugpvec', [], _('A B'))
1939 1939 def debugpvec(ui, repo, a, b=None):
1940 1940 ca = scmutil.revsingle(repo, a)
1941 1941 cb = scmutil.revsingle(repo, b)
1942 1942 pa = pvec.ctxpvec(ca)
1943 1943 pb = pvec.ctxpvec(cb)
1944 1944 if pa == pb:
1945 1945 rel = "="
1946 1946 elif pa > pb:
1947 1947 rel = ">"
1948 1948 elif pa < pb:
1949 1949 rel = "<"
1950 1950 elif pa | pb:
1951 1951 rel = "|"
1952 1952 ui.write(_("a: %s\n") % pa)
1953 1953 ui.write(_("b: %s\n") % pb)
1954 1954 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1955 1955 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1956 1956 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1957 1957 pa.distance(pb), rel))
1958 1958
1959 1959 @command('debugrebuilddirstate|debugrebuildstate',
1960 1960 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1961 1961 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1962 1962 'the working copy parent')),
1963 1963 ],
1964 1964 _('[-r REV]'))
1965 1965 def debugrebuilddirstate(ui, repo, rev, **opts):
1966 1966 """rebuild the dirstate as it would look like for the given revision
1967 1967
1968 1968 If no revision is specified the first current parent will be used.
1969 1969
1970 1970 The dirstate will be set to the files of the given revision.
1971 1971 The actual working directory content or existing dirstate
1972 1972 information such as adds or removes is not considered.
1973 1973
1974 1974 ``minimal`` will only rebuild the dirstate status for files that claim to be
1975 1975 tracked but are not in the parent manifest, or that exist in the parent
1976 1976 manifest but are not in the dirstate. It will not change adds, removes, or
1977 1977 modified files that are in the working copy parent.
1978 1978
1979 1979 One use of this command is to make the next :hg:`status` invocation
1980 1980 check the actual file content.
1981 1981 """
1982 1982 ctx = scmutil.revsingle(repo, rev)
1983 1983 with repo.wlock():
1984 1984 dirstate = repo.dirstate
1985 1985 changedfiles = None
1986 1986 # See command doc for what minimal does.
1987 1987 if opts.get(r'minimal'):
1988 1988 manifestfiles = set(ctx.manifest().keys())
1989 1989 dirstatefiles = set(dirstate)
1990 1990 manifestonly = manifestfiles - dirstatefiles
1991 1991 dsonly = dirstatefiles - manifestfiles
1992 1992 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1993 1993 changedfiles = manifestonly | dsnotadded
1994 1994
1995 1995 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1996 1996
1997 1997 @command('debugrebuildfncache', [], '')
1998 1998 def debugrebuildfncache(ui, repo):
1999 1999 """rebuild the fncache file"""
2000 2000 repair.rebuildfncache(ui, repo)
2001 2001
2002 2002 @command('debugrename',
2003 2003 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2004 2004 _('[-r REV] FILE'))
2005 2005 def debugrename(ui, repo, file1, *pats, **opts):
2006 2006 """dump rename information"""
2007 2007
2008 2008 opts = pycompat.byteskwargs(opts)
2009 2009 ctx = scmutil.revsingle(repo, opts.get('rev'))
2010 2010 m = scmutil.match(ctx, (file1,) + pats, opts)
2011 2011 for abs in ctx.walk(m):
2012 2012 fctx = ctx[abs]
2013 2013 o = fctx.filelog().renamed(fctx.filenode())
2014 2014 rel = m.rel(abs)
2015 2015 if o:
2016 2016 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2017 2017 else:
2018 2018 ui.write(_("%s not renamed\n") % rel)
2019 2019
2020 2020 @command('debugrevlog', cmdutil.debugrevlogopts +
2021 2021 [('d', 'dump', False, _('dump index data'))],
2022 2022 _('-c|-m|FILE'),
2023 2023 optionalrepo=True)
2024 2024 def debugrevlog(ui, repo, file_=None, **opts):
2025 2025 """show data and statistics about a revlog"""
2026 2026 opts = pycompat.byteskwargs(opts)
2027 2027 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2028 2028
2029 2029 if opts.get("dump"):
2030 2030 numrevs = len(r)
2031 2031 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2032 2032 " rawsize totalsize compression heads chainlen\n"))
2033 2033 ts = 0
2034 2034 heads = set()
2035 2035
2036 2036 for rev in pycompat.xrange(numrevs):
2037 2037 dbase = r.deltaparent(rev)
2038 2038 if dbase == -1:
2039 2039 dbase = rev
2040 2040 cbase = r.chainbase(rev)
2041 2041 clen = r.chainlen(rev)
2042 2042 p1, p2 = r.parentrevs(rev)
2043 2043 rs = r.rawsize(rev)
2044 2044 ts = ts + rs
2045 2045 heads -= set(r.parentrevs(rev))
2046 2046 heads.add(rev)
2047 2047 try:
2048 2048 compression = ts / r.end(rev)
2049 2049 except ZeroDivisionError:
2050 2050 compression = 0
2051 2051 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2052 2052 "%11d %5d %8d\n" %
2053 2053 (rev, p1, p2, r.start(rev), r.end(rev),
2054 2054 r.start(dbase), r.start(cbase),
2055 2055 r.start(p1), r.start(p2),
2056 2056 rs, ts, compression, len(heads), clen))
2057 2057 return 0
2058 2058
2059 2059 v = r.version
2060 2060 format = v & 0xFFFF
2061 2061 flags = []
2062 2062 gdelta = False
2063 2063 if v & revlog.FLAG_INLINE_DATA:
2064 2064 flags.append('inline')
2065 2065 if v & revlog.FLAG_GENERALDELTA:
2066 2066 gdelta = True
2067 2067 flags.append('generaldelta')
2068 2068 if not flags:
2069 2069 flags = ['(none)']
2070 2070
2071 2071 ### tracks merge vs single parent
2072 2072 nummerges = 0
2073 2073
2074 2074 ### tracks ways the "delta" are build
2075 2075 # nodelta
2076 2076 numempty = 0
2077 2077 numemptytext = 0
2078 2078 numemptydelta = 0
2079 2079 # full file content
2080 2080 numfull = 0
2081 2081 # intermediate snapshot against a prior snapshot
2082 2082 numsemi = 0
2083 2083 # snapshot count per depth
2084 2084 numsnapdepth = collections.defaultdict(lambda: 0)
2085 2085 # delta against previous revision
2086 2086 numprev = 0
2087 2087 # delta against first or second parent (not prev)
2088 2088 nump1 = 0
2089 2089 nump2 = 0
2090 2090 # delta against neither prev nor parents
2091 2091 numother = 0
2092 2092 # delta against prev that are also first or second parent
2093 2093 # (details of `numprev`)
2094 2094 nump1prev = 0
2095 2095 nump2prev = 0
2096 2096
2097 2097 # data about delta chain of each revs
2098 2098 chainlengths = []
2099 2099 chainbases = []
2100 2100 chainspans = []
2101 2101
2102 2102 # data about each revision
2103 2103 datasize = [None, 0, 0]
2104 2104 fullsize = [None, 0, 0]
2105 2105 semisize = [None, 0, 0]
2106 2106 # snapshot count per depth
2107 2107 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2108 2108 deltasize = [None, 0, 0]
2109 2109 chunktypecounts = {}
2110 2110 chunktypesizes = {}
2111 2111
2112 2112 def addsize(size, l):
2113 2113 if l[0] is None or size < l[0]:
2114 2114 l[0] = size
2115 2115 if size > l[1]:
2116 2116 l[1] = size
2117 2117 l[2] += size
2118 2118
2119 2119 numrevs = len(r)
2120 2120 for rev in pycompat.xrange(numrevs):
2121 2121 p1, p2 = r.parentrevs(rev)
2122 2122 delta = r.deltaparent(rev)
2123 2123 if format > 0:
2124 2124 addsize(r.rawsize(rev), datasize)
2125 2125 if p2 != nullrev:
2126 2126 nummerges += 1
2127 2127 size = r.length(rev)
2128 2128 if delta == nullrev:
2129 2129 chainlengths.append(0)
2130 2130 chainbases.append(r.start(rev))
2131 2131 chainspans.append(size)
2132 2132 if size == 0:
2133 2133 numempty += 1
2134 2134 numemptytext += 1
2135 2135 else:
2136 2136 numfull += 1
2137 2137 numsnapdepth[0] += 1
2138 2138 addsize(size, fullsize)
2139 2139 addsize(size, snapsizedepth[0])
2140 2140 else:
2141 2141 chainlengths.append(chainlengths[delta] + 1)
2142 2142 baseaddr = chainbases[delta]
2143 2143 revaddr = r.start(rev)
2144 2144 chainbases.append(baseaddr)
2145 2145 chainspans.append((revaddr - baseaddr) + size)
2146 2146 if size == 0:
2147 2147 numempty += 1
2148 2148 numemptydelta += 1
2149 2149 elif r.issnapshot(rev):
2150 2150 addsize(size, semisize)
2151 2151 numsemi += 1
2152 2152 depth = r.snapshotdepth(rev)
2153 2153 numsnapdepth[depth] += 1
2154 2154 addsize(size, snapsizedepth[depth])
2155 2155 else:
2156 2156 addsize(size, deltasize)
2157 2157 if delta == rev - 1:
2158 2158 numprev += 1
2159 2159 if delta == p1:
2160 2160 nump1prev += 1
2161 2161 elif delta == p2:
2162 2162 nump2prev += 1
2163 2163 elif delta == p1:
2164 2164 nump1 += 1
2165 2165 elif delta == p2:
2166 2166 nump2 += 1
2167 2167 elif delta != nullrev:
2168 2168 numother += 1
2169 2169
2170 2170 # Obtain data on the raw chunks in the revlog.
2171 2171 if util.safehasattr(r, '_getsegmentforrevs'):
2172 2172 segment = r._getsegmentforrevs(rev, rev)[1]
2173 2173 else:
2174 2174 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2175 2175 if segment:
2176 2176 chunktype = bytes(segment[0:1])
2177 2177 else:
2178 2178 chunktype = 'empty'
2179 2179
2180 2180 if chunktype not in chunktypecounts:
2181 2181 chunktypecounts[chunktype] = 0
2182 2182 chunktypesizes[chunktype] = 0
2183 2183
2184 2184 chunktypecounts[chunktype] += 1
2185 2185 chunktypesizes[chunktype] += size
2186 2186
2187 2187 # Adjust size min value for empty cases
2188 2188 for size in (datasize, fullsize, semisize, deltasize):
2189 2189 if size[0] is None:
2190 2190 size[0] = 0
2191 2191
2192 2192 numdeltas = numrevs - numfull - numempty - numsemi
2193 2193 numoprev = numprev - nump1prev - nump2prev
2194 2194 totalrawsize = datasize[2]
2195 2195 datasize[2] /= numrevs
2196 2196 fulltotal = fullsize[2]
2197 2197 fullsize[2] /= numfull
2198 2198 semitotal = semisize[2]
2199 2199 snaptotal = {}
2200 if 0 < numsemi:
2200 if numsemi > 0:
2201 2201 semisize[2] /= numsemi
2202 2202 for depth in snapsizedepth:
2203 2203 snaptotal[depth] = snapsizedepth[depth][2]
2204 2204 snapsizedepth[depth][2] /= numsnapdepth[depth]
2205 2205
2206 2206 deltatotal = deltasize[2]
2207 2207 if numdeltas > 0:
2208 2208 deltasize[2] /= numdeltas
2209 2209 totalsize = fulltotal + semitotal + deltatotal
2210 2210 avgchainlen = sum(chainlengths) / numrevs
2211 2211 maxchainlen = max(chainlengths)
2212 2212 maxchainspan = max(chainspans)
2213 2213 compratio = 1
2214 2214 if totalsize:
2215 2215 compratio = totalrawsize / totalsize
2216 2216
2217 2217 basedfmtstr = '%%%dd\n'
2218 2218 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2219 2219
2220 2220 def dfmtstr(max):
2221 2221 return basedfmtstr % len(str(max))
2222 2222 def pcfmtstr(max, padding=0):
2223 2223 return basepcfmtstr % (len(str(max)), ' ' * padding)
2224 2224
2225 2225 def pcfmt(value, total):
2226 2226 if total:
2227 2227 return (value, 100 * float(value) / total)
2228 2228 else:
2229 2229 return value, 100.0
2230 2230
2231 2231 ui.write(('format : %d\n') % format)
2232 2232 ui.write(('flags : %s\n') % ', '.join(flags))
2233 2233
2234 2234 ui.write('\n')
2235 2235 fmt = pcfmtstr(totalsize)
2236 2236 fmt2 = dfmtstr(totalsize)
2237 2237 ui.write(('revisions : ') + fmt2 % numrevs)
2238 2238 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2239 2239 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2240 2240 ui.write(('revisions : ') + fmt2 % numrevs)
2241 2241 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2242 2242 ui.write((' text : ')
2243 2243 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2244 2244 ui.write((' delta : ')
2245 2245 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2246 2246 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2247 2247 for depth in sorted(numsnapdepth):
2248 2248 ui.write((' lvl-%-3d : ' % depth)
2249 2249 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2250 2250 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2251 2251 ui.write(('revision size : ') + fmt2 % totalsize)
2252 2252 ui.write((' snapshot : ')
2253 2253 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2254 2254 for depth in sorted(numsnapdepth):
2255 2255 ui.write((' lvl-%-3d : ' % depth)
2256 2256 + fmt % pcfmt(snaptotal[depth], totalsize))
2257 2257 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2258 2258
2259 2259 def fmtchunktype(chunktype):
2260 2260 if chunktype == 'empty':
2261 2261 return ' %s : ' % chunktype
2262 2262 elif chunktype in pycompat.bytestr(string.ascii_letters):
2263 2263 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2264 2264 else:
2265 2265 return ' 0x%s : ' % hex(chunktype)
2266 2266
2267 2267 ui.write('\n')
2268 2268 ui.write(('chunks : ') + fmt2 % numrevs)
2269 2269 for chunktype in sorted(chunktypecounts):
2270 2270 ui.write(fmtchunktype(chunktype))
2271 2271 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2272 2272 ui.write(('chunks size : ') + fmt2 % totalsize)
2273 2273 for chunktype in sorted(chunktypecounts):
2274 2274 ui.write(fmtchunktype(chunktype))
2275 2275 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2276 2276
2277 2277 ui.write('\n')
2278 2278 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2279 2279 ui.write(('avg chain length : ') + fmt % avgchainlen)
2280 2280 ui.write(('max chain length : ') + fmt % maxchainlen)
2281 2281 ui.write(('max chain reach : ') + fmt % maxchainspan)
2282 2282 ui.write(('compression ratio : ') + fmt % compratio)
2283 2283
2284 2284 if format > 0:
2285 2285 ui.write('\n')
2286 2286 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2287 2287 % tuple(datasize))
2288 2288 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2289 2289 % tuple(fullsize))
2290 2290 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2291 2291 % tuple(semisize))
2292 2292 for depth in sorted(snapsizedepth):
2293 2293 if depth == 0:
2294 2294 continue
2295 2295 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2296 2296 % ((depth,) + tuple(snapsizedepth[depth])))
2297 2297 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2298 2298 % tuple(deltasize))
2299 2299
2300 2300 if numdeltas > 0:
2301 2301 ui.write('\n')
2302 2302 fmt = pcfmtstr(numdeltas)
2303 2303 fmt2 = pcfmtstr(numdeltas, 4)
2304 2304 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2305 2305 if numprev > 0:
2306 2306 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2307 2307 numprev))
2308 2308 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2309 2309 numprev))
2310 2310 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2311 2311 numprev))
2312 2312 if gdelta:
2313 2313 ui.write(('deltas against p1 : ')
2314 2314 + fmt % pcfmt(nump1, numdeltas))
2315 2315 ui.write(('deltas against p2 : ')
2316 2316 + fmt % pcfmt(nump2, numdeltas))
2317 2317 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2318 2318 numdeltas))
2319 2319
2320 2320 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2321 2321 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2322 2322 _('[-f FORMAT] -c|-m|FILE'),
2323 2323 optionalrepo=True)
2324 2324 def debugrevlogindex(ui, repo, file_=None, **opts):
2325 2325 """dump the contents of a revlog index"""
2326 2326 opts = pycompat.byteskwargs(opts)
2327 2327 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2328 2328 format = opts.get('format', 0)
2329 2329 if format not in (0, 1):
2330 2330 raise error.Abort(_("unknown format %d") % format)
2331 2331
2332 2332 if ui.debugflag:
2333 2333 shortfn = hex
2334 2334 else:
2335 2335 shortfn = short
2336 2336
2337 2337 # There might not be anything in r, so have a sane default
2338 2338 idlen = 12
2339 2339 for i in r:
2340 2340 idlen = len(shortfn(r.node(i)))
2341 2341 break
2342 2342
2343 2343 if format == 0:
2344 2344 if ui.verbose:
2345 2345 ui.write((" rev offset length linkrev"
2346 2346 " %s %s p2\n") % ("nodeid".ljust(idlen),
2347 2347 "p1".ljust(idlen)))
2348 2348 else:
2349 2349 ui.write((" rev linkrev %s %s p2\n") % (
2350 2350 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2351 2351 elif format == 1:
2352 2352 if ui.verbose:
2353 2353 ui.write((" rev flag offset length size link p1"
2354 2354 " p2 %s\n") % "nodeid".rjust(idlen))
2355 2355 else:
2356 2356 ui.write((" rev flag size link p1 p2 %s\n") %
2357 2357 "nodeid".rjust(idlen))
2358 2358
2359 2359 for i in r:
2360 2360 node = r.node(i)
2361 2361 if format == 0:
2362 2362 try:
2363 2363 pp = r.parents(node)
2364 2364 except Exception:
2365 2365 pp = [nullid, nullid]
2366 2366 if ui.verbose:
2367 2367 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2368 2368 i, r.start(i), r.length(i), r.linkrev(i),
2369 2369 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2370 2370 else:
2371 2371 ui.write("% 6d % 7d %s %s %s\n" % (
2372 2372 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2373 2373 shortfn(pp[1])))
2374 2374 elif format == 1:
2375 2375 pr = r.parentrevs(i)
2376 2376 if ui.verbose:
2377 2377 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2378 2378 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2379 2379 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2380 2380 else:
2381 2381 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2382 2382 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2383 2383 shortfn(node)))
2384 2384
2385 2385 @command('debugrevspec',
2386 2386 [('', 'optimize', None,
2387 2387 _('print parsed tree after optimizing (DEPRECATED)')),
2388 2388 ('', 'show-revs', True, _('print list of result revisions (default)')),
2389 2389 ('s', 'show-set', None, _('print internal representation of result set')),
2390 2390 ('p', 'show-stage', [],
2391 2391 _('print parsed tree at the given stage'), _('NAME')),
2392 2392 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2393 2393 ('', 'verify-optimized', False, _('verify optimized result')),
2394 2394 ],
2395 2395 ('REVSPEC'))
2396 2396 def debugrevspec(ui, repo, expr, **opts):
2397 2397 """parse and apply a revision specification
2398 2398
2399 2399 Use -p/--show-stage option to print the parsed tree at the given stages.
2400 2400 Use -p all to print tree at every stage.
2401 2401
2402 2402 Use --no-show-revs option with -s or -p to print only the set
2403 2403 representation or the parsed tree respectively.
2404 2404
2405 2405 Use --verify-optimized to compare the optimized result with the unoptimized
2406 2406 one. Returns 1 if the optimized result differs.
2407 2407 """
2408 2408 opts = pycompat.byteskwargs(opts)
2409 2409 aliases = ui.configitems('revsetalias')
2410 2410 stages = [
2411 2411 ('parsed', lambda tree: tree),
2412 2412 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2413 2413 ui.warn)),
2414 2414 ('concatenated', revsetlang.foldconcat),
2415 2415 ('analyzed', revsetlang.analyze),
2416 2416 ('optimized', revsetlang.optimize),
2417 2417 ]
2418 2418 if opts['no_optimized']:
2419 2419 stages = stages[:-1]
2420 2420 if opts['verify_optimized'] and opts['no_optimized']:
2421 2421 raise error.Abort(_('cannot use --verify-optimized with '
2422 2422 '--no-optimized'))
2423 2423 stagenames = set(n for n, f in stages)
2424 2424
2425 2425 showalways = set()
2426 2426 showchanged = set()
2427 2427 if ui.verbose and not opts['show_stage']:
2428 2428 # show parsed tree by --verbose (deprecated)
2429 2429 showalways.add('parsed')
2430 2430 showchanged.update(['expanded', 'concatenated'])
2431 2431 if opts['optimize']:
2432 2432 showalways.add('optimized')
2433 2433 if opts['show_stage'] and opts['optimize']:
2434 2434 raise error.Abort(_('cannot use --optimize with --show-stage'))
2435 2435 if opts['show_stage'] == ['all']:
2436 2436 showalways.update(stagenames)
2437 2437 else:
2438 2438 for n in opts['show_stage']:
2439 2439 if n not in stagenames:
2440 2440 raise error.Abort(_('invalid stage name: %s') % n)
2441 2441 showalways.update(opts['show_stage'])
2442 2442
2443 2443 treebystage = {}
2444 2444 printedtree = None
2445 2445 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2446 2446 for n, f in stages:
2447 2447 treebystage[n] = tree = f(tree)
2448 2448 if n in showalways or (n in showchanged and tree != printedtree):
2449 2449 if opts['show_stage'] or n != 'parsed':
2450 2450 ui.write(("* %s:\n") % n)
2451 2451 ui.write(revsetlang.prettyformat(tree), "\n")
2452 2452 printedtree = tree
2453 2453
2454 2454 if opts['verify_optimized']:
2455 2455 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2456 2456 brevs = revset.makematcher(treebystage['optimized'])(repo)
2457 2457 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2458 2458 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2459 2459 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2460 2460 arevs = list(arevs)
2461 2461 brevs = list(brevs)
2462 2462 if arevs == brevs:
2463 2463 return 0
2464 2464 ui.write(('--- analyzed\n'), label='diff.file_a')
2465 2465 ui.write(('+++ optimized\n'), label='diff.file_b')
2466 2466 sm = difflib.SequenceMatcher(None, arevs, brevs)
2467 2467 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2468 2468 if tag in ('delete', 'replace'):
2469 2469 for c in arevs[alo:ahi]:
2470 2470 ui.write('-%s\n' % c, label='diff.deleted')
2471 2471 if tag in ('insert', 'replace'):
2472 2472 for c in brevs[blo:bhi]:
2473 2473 ui.write('+%s\n' % c, label='diff.inserted')
2474 2474 if tag == 'equal':
2475 2475 for c in arevs[alo:ahi]:
2476 2476 ui.write(' %s\n' % c)
2477 2477 return 1
2478 2478
2479 2479 func = revset.makematcher(tree)
2480 2480 revs = func(repo)
2481 2481 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2482 2482 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2483 2483 if not opts['show_revs']:
2484 2484 return
2485 2485 for c in revs:
2486 2486 ui.write("%d\n" % c)
2487 2487
2488 2488 @command('debugserve', [
2489 2489 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2490 2490 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2491 2491 ('', 'logiofile', '', _('file to log server I/O to')),
2492 2492 ], '')
2493 2493 def debugserve(ui, repo, **opts):
2494 2494 """run a server with advanced settings
2495 2495
2496 2496 This command is similar to :hg:`serve`. It exists partially as a
2497 2497 workaround to the fact that ``hg serve --stdio`` must have specific
2498 2498 arguments for security reasons.
2499 2499 """
2500 2500 opts = pycompat.byteskwargs(opts)
2501 2501
2502 2502 if not opts['sshstdio']:
2503 2503 raise error.Abort(_('only --sshstdio is currently supported'))
2504 2504
2505 2505 logfh = None
2506 2506
2507 2507 if opts['logiofd'] and opts['logiofile']:
2508 2508 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2509 2509
2510 2510 if opts['logiofd']:
2511 2511 # Line buffered because output is line based.
2512 2512 try:
2513 2513 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2514 2514 except OSError as e:
2515 2515 if e.errno != errno.ESPIPE:
2516 2516 raise
2517 2517 # can't seek a pipe, so `ab` mode fails on py3
2518 2518 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2519 2519 elif opts['logiofile']:
2520 2520 logfh = open(opts['logiofile'], 'ab', 1)
2521 2521
2522 2522 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2523 2523 s.serve_forever()
2524 2524
2525 2525 @command('debugsetparents', [], _('REV1 [REV2]'))
2526 2526 def debugsetparents(ui, repo, rev1, rev2=None):
2527 2527 """manually set the parents of the current working directory
2528 2528
2529 2529 This is useful for writing repository conversion tools, but should
2530 2530 be used with care. For example, neither the working directory nor the
2531 2531 dirstate is updated, so file status may be incorrect after running this
2532 2532 command.
2533 2533
2534 2534 Returns 0 on success.
2535 2535 """
2536 2536
2537 2537 node1 = scmutil.revsingle(repo, rev1).node()
2538 2538 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2539 2539
2540 2540 with repo.wlock():
2541 2541 repo.setparents(node1, node2)
2542 2542
2543 2543 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2544 2544 def debugssl(ui, repo, source=None, **opts):
2545 2545 '''test a secure connection to a server
2546 2546
2547 2547 This builds the certificate chain for the server on Windows, installing the
2548 2548 missing intermediates and trusted root via Windows Update if necessary. It
2549 2549 does nothing on other platforms.
2550 2550
2551 2551 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2552 2552 that server is used. See :hg:`help urls` for more information.
2553 2553
2554 2554 If the update succeeds, retry the original operation. Otherwise, the cause
2555 2555 of the SSL error is likely another issue.
2556 2556 '''
2557 2557 if not pycompat.iswindows:
2558 2558 raise error.Abort(_('certificate chain building is only possible on '
2559 2559 'Windows'))
2560 2560
2561 2561 if not source:
2562 2562 if not repo:
2563 2563 raise error.Abort(_("there is no Mercurial repository here, and no "
2564 2564 "server specified"))
2565 2565 source = "default"
2566 2566
2567 2567 source, branches = hg.parseurl(ui.expandpath(source))
2568 2568 url = util.url(source)
2569 2569 addr = None
2570 2570
2571 2571 defaultport = {'https': 443, 'ssh': 22}
2572 2572 if url.scheme in defaultport:
2573 2573 try:
2574 2574 addr = (url.host, int(url.port or defaultport[url.scheme]))
2575 2575 except ValueError:
2576 2576 raise error.Abort(_("malformed port number in URL"))
2577 2577 else:
2578 2578 raise error.Abort(_("only https and ssh connections are supported"))
2579 2579
2580 2580 from . import win32
2581 2581
2582 2582 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2583 2583 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2584 2584
2585 2585 try:
2586 2586 s.connect(addr)
2587 2587 cert = s.getpeercert(True)
2588 2588
2589 2589 ui.status(_('checking the certificate chain for %s\n') % url.host)
2590 2590
2591 2591 complete = win32.checkcertificatechain(cert, build=False)
2592 2592
2593 2593 if not complete:
2594 2594 ui.status(_('certificate chain is incomplete, updating... '))
2595 2595
2596 2596 if not win32.checkcertificatechain(cert):
2597 2597 ui.status(_('failed.\n'))
2598 2598 else:
2599 2599 ui.status(_('done.\n'))
2600 2600 else:
2601 2601 ui.status(_('full certificate chain is available\n'))
2602 2602 finally:
2603 2603 s.close()
2604 2604
2605 2605 @command('debugsub',
2606 2606 [('r', 'rev', '',
2607 2607 _('revision to check'), _('REV'))],
2608 2608 _('[-r REV] [REV]'))
2609 2609 def debugsub(ui, repo, rev=None):
2610 2610 ctx = scmutil.revsingle(repo, rev, None)
2611 2611 for k, v in sorted(ctx.substate.items()):
2612 2612 ui.write(('path %s\n') % k)
2613 2613 ui.write((' source %s\n') % v[0])
2614 2614 ui.write((' revision %s\n') % v[1])
2615 2615
2616 2616 @command('debugsuccessorssets',
2617 2617 [('', 'closest', False, _('return closest successors sets only'))],
2618 2618 _('[REV]'))
2619 2619 def debugsuccessorssets(ui, repo, *revs, **opts):
2620 2620 """show set of successors for revision
2621 2621
2622 2622 A successors set of changeset A is a consistent group of revisions that
2623 2623 succeed A. It contains non-obsolete changesets only unless closests
2624 2624 successors set is set.
2625 2625
2626 2626 In most cases a changeset A has a single successors set containing a single
2627 2627 successor (changeset A replaced by A').
2628 2628
2629 2629 A changeset that is made obsolete with no successors are called "pruned".
2630 2630 Such changesets have no successors sets at all.
2631 2631
2632 2632 A changeset that has been "split" will have a successors set containing
2633 2633 more than one successor.
2634 2634
2635 2635 A changeset that has been rewritten in multiple different ways is called
2636 2636 "divergent". Such changesets have multiple successor sets (each of which
2637 2637 may also be split, i.e. have multiple successors).
2638 2638
2639 2639 Results are displayed as follows::
2640 2640
2641 2641 <rev1>
2642 2642 <successors-1A>
2643 2643 <rev2>
2644 2644 <successors-2A>
2645 2645 <successors-2B1> <successors-2B2> <successors-2B3>
2646 2646
2647 2647 Here rev2 has two possible (i.e. divergent) successors sets. The first
2648 2648 holds one element, whereas the second holds three (i.e. the changeset has
2649 2649 been split).
2650 2650 """
2651 2651 # passed to successorssets caching computation from one call to another
2652 2652 cache = {}
2653 2653 ctx2str = bytes
2654 2654 node2str = short
2655 2655 for rev in scmutil.revrange(repo, revs):
2656 2656 ctx = repo[rev]
2657 2657 ui.write('%s\n'% ctx2str(ctx))
2658 2658 for succsset in obsutil.successorssets(repo, ctx.node(),
2659 2659 closest=opts[r'closest'],
2660 2660 cache=cache):
2661 2661 if succsset:
2662 2662 ui.write(' ')
2663 2663 ui.write(node2str(succsset[0]))
2664 2664 for node in succsset[1:]:
2665 2665 ui.write(' ')
2666 2666 ui.write(node2str(node))
2667 2667 ui.write('\n')
2668 2668
2669 2669 @command('debugtemplate',
2670 2670 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2671 2671 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2672 2672 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2673 2673 optionalrepo=True)
2674 2674 def debugtemplate(ui, repo, tmpl, **opts):
2675 2675 """parse and apply a template
2676 2676
2677 2677 If -r/--rev is given, the template is processed as a log template and
2678 2678 applied to the given changesets. Otherwise, it is processed as a generic
2679 2679 template.
2680 2680
2681 2681 Use --verbose to print the parsed tree.
2682 2682 """
2683 2683 revs = None
2684 2684 if opts[r'rev']:
2685 2685 if repo is None:
2686 2686 raise error.RepoError(_('there is no Mercurial repository here '
2687 2687 '(.hg not found)'))
2688 2688 revs = scmutil.revrange(repo, opts[r'rev'])
2689 2689
2690 2690 props = {}
2691 2691 for d in opts[r'define']:
2692 2692 try:
2693 2693 k, v = (e.strip() for e in d.split('=', 1))
2694 2694 if not k or k == 'ui':
2695 2695 raise ValueError
2696 2696 props[k] = v
2697 2697 except ValueError:
2698 2698 raise error.Abort(_('malformed keyword definition: %s') % d)
2699 2699
2700 2700 if ui.verbose:
2701 2701 aliases = ui.configitems('templatealias')
2702 2702 tree = templater.parse(tmpl)
2703 2703 ui.note(templater.prettyformat(tree), '\n')
2704 2704 newtree = templater.expandaliases(tree, aliases)
2705 2705 if newtree != tree:
2706 2706 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2707 2707
2708 2708 if revs is None:
2709 2709 tres = formatter.templateresources(ui, repo)
2710 2710 t = formatter.maketemplater(ui, tmpl, resources=tres)
2711 2711 if ui.verbose:
2712 2712 kwds, funcs = t.symbolsuseddefault()
2713 2713 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2714 2714 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2715 2715 ui.write(t.renderdefault(props))
2716 2716 else:
2717 2717 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2718 2718 if ui.verbose:
2719 2719 kwds, funcs = displayer.t.symbolsuseddefault()
2720 2720 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2721 2721 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2722 2722 for r in revs:
2723 2723 displayer.show(repo[r], **pycompat.strkwargs(props))
2724 2724 displayer.close()
2725 2725
2726 2726 @command('debuguigetpass', [
2727 2727 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 2728 ], _('[-p TEXT]'), norepo=True)
2729 2729 def debuguigetpass(ui, prompt=''):
2730 2730 """show prompt to type password"""
2731 2731 r = ui.getpass(prompt)
2732 2732 ui.write(('respose: %s\n') % r)
2733 2733
2734 2734 @command('debuguiprompt', [
2735 2735 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2736 2736 ], _('[-p TEXT]'), norepo=True)
2737 2737 def debuguiprompt(ui, prompt=''):
2738 2738 """show plain prompt"""
2739 2739 r = ui.prompt(prompt)
2740 2740 ui.write(('response: %s\n') % r)
2741 2741
2742 2742 @command('debugupdatecaches', [])
2743 2743 def debugupdatecaches(ui, repo, *pats, **opts):
2744 2744 """warm all known caches in the repository"""
2745 2745 with repo.wlock(), repo.lock():
2746 2746 repo.updatecaches(full=True)
2747 2747
2748 2748 @command('debugupgraderepo', [
2749 2749 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2750 2750 ('', 'run', False, _('performs an upgrade')),
2751 2751 ])
2752 2752 def debugupgraderepo(ui, repo, run=False, optimize=None):
2753 2753 """upgrade a repository to use different features
2754 2754
2755 2755 If no arguments are specified, the repository is evaluated for upgrade
2756 2756 and a list of problems and potential optimizations is printed.
2757 2757
2758 2758 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2759 2759 can be influenced via additional arguments. More details will be provided
2760 2760 by the command output when run without ``--run``.
2761 2761
2762 2762 During the upgrade, the repository will be locked and no writes will be
2763 2763 allowed.
2764 2764
2765 2765 At the end of the upgrade, the repository may not be readable while new
2766 2766 repository data is swapped in. This window will be as long as it takes to
2767 2767 rename some directories inside the ``.hg`` directory. On most machines, this
2768 2768 should complete almost instantaneously and the chances of a consumer being
2769 2769 unable to access the repository should be low.
2770 2770 """
2771 2771 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2772 2772
2773 2773 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2774 2774 inferrepo=True)
2775 2775 def debugwalk(ui, repo, *pats, **opts):
2776 2776 """show how files match on given patterns"""
2777 2777 opts = pycompat.byteskwargs(opts)
2778 2778 m = scmutil.match(repo[None], pats, opts)
2779 2779 if ui.verbose:
2780 2780 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2781 2781 items = list(repo[None].walk(m))
2782 2782 if not items:
2783 2783 return
2784 2784 f = lambda fn: fn
2785 2785 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2786 2786 f = lambda fn: util.normpath(fn)
2787 2787 fmt = 'f %%-%ds %%-%ds %%s' % (
2788 2788 max([len(abs) for abs in items]),
2789 2789 max([len(m.rel(abs)) for abs in items]))
2790 2790 for abs in items:
2791 2791 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2792 2792 ui.write("%s\n" % line.rstrip())
2793 2793
2794 2794 @command('debugwhyunstable', [], _('REV'))
2795 2795 def debugwhyunstable(ui, repo, rev):
2796 2796 """explain instabilities of a changeset"""
2797 2797 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2798 2798 dnodes = ''
2799 2799 if entry.get('divergentnodes'):
2800 2800 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2801 2801 for ctx in entry['divergentnodes']) + ' '
2802 2802 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2803 2803 entry['reason'], entry['node']))
2804 2804
2805 2805 @command('debugwireargs',
2806 2806 [('', 'three', '', 'three'),
2807 2807 ('', 'four', '', 'four'),
2808 2808 ('', 'five', '', 'five'),
2809 2809 ] + cmdutil.remoteopts,
2810 2810 _('REPO [OPTIONS]... [ONE [TWO]]'),
2811 2811 norepo=True)
2812 2812 def debugwireargs(ui, repopath, *vals, **opts):
2813 2813 opts = pycompat.byteskwargs(opts)
2814 2814 repo = hg.peer(ui, opts, repopath)
2815 2815 for opt in cmdutil.remoteopts:
2816 2816 del opts[opt[1]]
2817 2817 args = {}
2818 2818 for k, v in opts.iteritems():
2819 2819 if v:
2820 2820 args[k] = v
2821 2821 args = pycompat.strkwargs(args)
2822 2822 # run twice to check that we don't mess up the stream for the next command
2823 2823 res1 = repo.debugwireargs(*vals, **args)
2824 2824 res2 = repo.debugwireargs(*vals, **args)
2825 2825 ui.write("%s\n" % res1)
2826 2826 if res1 != res2:
2827 2827 ui.warn("%s\n" % res2)
2828 2828
2829 2829 def _parsewirelangblocks(fh):
2830 2830 activeaction = None
2831 2831 blocklines = []
2832 2832
2833 2833 for line in fh:
2834 2834 line = line.rstrip()
2835 2835 if not line:
2836 2836 continue
2837 2837
2838 2838 if line.startswith(b'#'):
2839 2839 continue
2840 2840
2841 2841 if not line.startswith(b' '):
2842 2842 # New block. Flush previous one.
2843 2843 if activeaction:
2844 2844 yield activeaction, blocklines
2845 2845
2846 2846 activeaction = line
2847 2847 blocklines = []
2848 2848 continue
2849 2849
2850 2850 # Else we start with an indent.
2851 2851
2852 2852 if not activeaction:
2853 2853 raise error.Abort(_('indented line outside of block'))
2854 2854
2855 2855 blocklines.append(line)
2856 2856
2857 2857 # Flush last block.
2858 2858 if activeaction:
2859 2859 yield activeaction, blocklines
2860 2860
2861 2861 @command('debugwireproto',
2862 2862 [
2863 2863 ('', 'localssh', False, _('start an SSH server for this repo')),
2864 2864 ('', 'peer', '', _('construct a specific version of the peer')),
2865 2865 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2866 2866 ('', 'nologhandshake', False,
2867 2867 _('do not log I/O related to the peer handshake')),
2868 2868 ] + cmdutil.remoteopts,
2869 2869 _('[PATH]'),
2870 2870 optionalrepo=True)
2871 2871 def debugwireproto(ui, repo, path=None, **opts):
2872 2872 """send wire protocol commands to a server
2873 2873
2874 2874 This command can be used to issue wire protocol commands to remote
2875 2875 peers and to debug the raw data being exchanged.
2876 2876
2877 2877 ``--localssh`` will start an SSH server against the current repository
2878 2878 and connect to that. By default, the connection will perform a handshake
2879 2879 and establish an appropriate peer instance.
2880 2880
2881 2881 ``--peer`` can be used to bypass the handshake protocol and construct a
2882 2882 peer instance using the specified class type. Valid values are ``raw``,
2883 2883 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2884 2884 raw data payloads and don't support higher-level command actions.
2885 2885
2886 2886 ``--noreadstderr`` can be used to disable automatic reading from stderr
2887 2887 of the peer (for SSH connections only). Disabling automatic reading of
2888 2888 stderr is useful for making output more deterministic.
2889 2889
2890 2890 Commands are issued via a mini language which is specified via stdin.
2891 2891 The language consists of individual actions to perform. An action is
2892 2892 defined by a block. A block is defined as a line with no leading
2893 2893 space followed by 0 or more lines with leading space. Blocks are
2894 2894 effectively a high-level command with additional metadata.
2895 2895
2896 2896 Lines beginning with ``#`` are ignored.
2897 2897
2898 2898 The following sections denote available actions.
2899 2899
2900 2900 raw
2901 2901 ---
2902 2902
2903 2903 Send raw data to the server.
2904 2904
2905 2905 The block payload contains the raw data to send as one atomic send
2906 2906 operation. The data may not actually be delivered in a single system
2907 2907 call: it depends on the abilities of the transport being used.
2908 2908
2909 2909 Each line in the block is de-indented and concatenated. Then, that
2910 2910 value is evaluated as a Python b'' literal. This allows the use of
2911 2911 backslash escaping, etc.
2912 2912
2913 2913 raw+
2914 2914 ----
2915 2915
2916 2916 Behaves like ``raw`` except flushes output afterwards.
2917 2917
2918 2918 command <X>
2919 2919 -----------
2920 2920
2921 2921 Send a request to run a named command, whose name follows the ``command``
2922 2922 string.
2923 2923
2924 2924 Arguments to the command are defined as lines in this block. The format of
2925 2925 each line is ``<key> <value>``. e.g.::
2926 2926
2927 2927 command listkeys
2928 2928 namespace bookmarks
2929 2929
2930 2930 If the value begins with ``eval:``, it will be interpreted as a Python
2931 2931 literal expression. Otherwise values are interpreted as Python b'' literals.
2932 2932 This allows sending complex types and encoding special byte sequences via
2933 2933 backslash escaping.
2934 2934
2935 2935 The following arguments have special meaning:
2936 2936
2937 2937 ``PUSHFILE``
2938 2938 When defined, the *push* mechanism of the peer will be used instead
2939 2939 of the static request-response mechanism and the content of the
2940 2940 file specified in the value of this argument will be sent as the
2941 2941 command payload.
2942 2942
2943 2943 This can be used to submit a local bundle file to the remote.
2944 2944
2945 2945 batchbegin
2946 2946 ----------
2947 2947
2948 2948 Instruct the peer to begin a batched send.
2949 2949
2950 2950 All ``command`` blocks are queued for execution until the next
2951 2951 ``batchsubmit`` block.
2952 2952
2953 2953 batchsubmit
2954 2954 -----------
2955 2955
2956 2956 Submit previously queued ``command`` blocks as a batch request.
2957 2957
2958 2958 This action MUST be paired with a ``batchbegin`` action.
2959 2959
2960 2960 httprequest <method> <path>
2961 2961 ---------------------------
2962 2962
2963 2963 (HTTP peer only)
2964 2964
2965 2965 Send an HTTP request to the peer.
2966 2966
2967 2967 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2968 2968
2969 2969 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2970 2970 headers to add to the request. e.g. ``Accept: foo``.
2971 2971
2972 2972 The following arguments are special:
2973 2973
2974 2974 ``BODYFILE``
2975 2975 The content of the file defined as the value to this argument will be
2976 2976 transferred verbatim as the HTTP request body.
2977 2977
2978 2978 ``frame <type> <flags> <payload>``
2979 2979 Send a unified protocol frame as part of the request body.
2980 2980
2981 2981 All frames will be collected and sent as the body to the HTTP
2982 2982 request.
2983 2983
2984 2984 close
2985 2985 -----
2986 2986
2987 2987 Close the connection to the server.
2988 2988
2989 2989 flush
2990 2990 -----
2991 2991
2992 2992 Flush data written to the server.
2993 2993
2994 2994 readavailable
2995 2995 -------------
2996 2996
2997 2997 Close the write end of the connection and read all available data from
2998 2998 the server.
2999 2999
3000 3000 If the connection to the server encompasses multiple pipes, we poll both
3001 3001 pipes and read available data.
3002 3002
3003 3003 readline
3004 3004 --------
3005 3005
3006 3006 Read a line of output from the server. If there are multiple output
3007 3007 pipes, reads only the main pipe.
3008 3008
3009 3009 ereadline
3010 3010 ---------
3011 3011
3012 3012 Like ``readline``, but read from the stderr pipe, if available.
3013 3013
3014 3014 read <X>
3015 3015 --------
3016 3016
3017 3017 ``read()`` N bytes from the server's main output pipe.
3018 3018
3019 3019 eread <X>
3020 3020 ---------
3021 3021
3022 3022 ``read()`` N bytes from the server's stderr pipe, if available.
3023 3023
3024 3024 Specifying Unified Frame-Based Protocol Frames
3025 3025 ----------------------------------------------
3026 3026
3027 3027 It is possible to emit a *Unified Frame-Based Protocol* by using special
3028 3028 syntax.
3029 3029
3030 3030 A frame is composed as a type, flags, and payload. These can be parsed
3031 3031 from a string of the form:
3032 3032
3033 3033 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3034 3034
3035 3035 ``request-id`` and ``stream-id`` are integers defining the request and
3036 3036 stream identifiers.
3037 3037
3038 3038 ``type`` can be an integer value for the frame type or the string name
3039 3039 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3040 3040 ``command-name``.
3041 3041
3042 3042 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3043 3043 components. Each component (and there can be just one) can be an integer
3044 3044 or a flag name for stream flags or frame flags, respectively. Values are
3045 3045 resolved to integers and then bitwise OR'd together.
3046 3046
3047 3047 ``payload`` represents the raw frame payload. If it begins with
3048 3048 ``cbor:``, the following string is evaluated as Python code and the
3049 3049 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3050 3050 as a Python byte string literal.
3051 3051 """
3052 3052 opts = pycompat.byteskwargs(opts)
3053 3053
3054 3054 if opts['localssh'] and not repo:
3055 3055 raise error.Abort(_('--localssh requires a repository'))
3056 3056
3057 3057 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3058 3058 raise error.Abort(_('invalid value for --peer'),
3059 3059 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3060 3060
3061 3061 if path and opts['localssh']:
3062 3062 raise error.Abort(_('cannot specify --localssh with an explicit '
3063 3063 'path'))
3064 3064
3065 3065 if ui.interactive():
3066 3066 ui.write(_('(waiting for commands on stdin)\n'))
3067 3067
3068 3068 blocks = list(_parsewirelangblocks(ui.fin))
3069 3069
3070 3070 proc = None
3071 3071 stdin = None
3072 3072 stdout = None
3073 3073 stderr = None
3074 3074 opener = None
3075 3075
3076 3076 if opts['localssh']:
3077 3077 # We start the SSH server in its own process so there is process
3078 3078 # separation. This prevents a whole class of potential bugs around
3079 3079 # shared state from interfering with server operation.
3080 3080 args = procutil.hgcmd() + [
3081 3081 '-R', repo.root,
3082 3082 'debugserve', '--sshstdio',
3083 3083 ]
3084 3084 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3085 3085 stdin=subprocess.PIPE,
3086 3086 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3087 3087 bufsize=0)
3088 3088
3089 3089 stdin = proc.stdin
3090 3090 stdout = proc.stdout
3091 3091 stderr = proc.stderr
3092 3092
3093 3093 # We turn the pipes into observers so we can log I/O.
3094 3094 if ui.verbose or opts['peer'] == 'raw':
3095 3095 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3096 3096 logdata=True)
3097 3097 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3098 3098 logdata=True)
3099 3099 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3100 3100 logdata=True)
3101 3101
3102 3102 # --localssh also implies the peer connection settings.
3103 3103
3104 3104 url = 'ssh://localserver'
3105 3105 autoreadstderr = not opts['noreadstderr']
3106 3106
3107 3107 if opts['peer'] == 'ssh1':
3108 3108 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3109 3109 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3110 3110 None, autoreadstderr=autoreadstderr)
3111 3111 elif opts['peer'] == 'ssh2':
3112 3112 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3113 3113 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3114 3114 None, autoreadstderr=autoreadstderr)
3115 3115 elif opts['peer'] == 'raw':
3116 3116 ui.write(_('using raw connection to peer\n'))
3117 3117 peer = None
3118 3118 else:
3119 3119 ui.write(_('creating ssh peer from handshake results\n'))
3120 3120 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3121 3121 autoreadstderr=autoreadstderr)
3122 3122
3123 3123 elif path:
3124 3124 # We bypass hg.peer() so we can proxy the sockets.
3125 3125 # TODO consider not doing this because we skip
3126 3126 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3127 3127 u = util.url(path)
3128 3128 if u.scheme != 'http':
3129 3129 raise error.Abort(_('only http:// paths are currently supported'))
3130 3130
3131 3131 url, authinfo = u.authinfo()
3132 3132 openerargs = {
3133 3133 r'useragent': b'Mercurial debugwireproto',
3134 3134 }
3135 3135
3136 3136 # Turn pipes/sockets into observers so we can log I/O.
3137 3137 if ui.verbose:
3138 3138 openerargs.update({
3139 3139 r'loggingfh': ui,
3140 3140 r'loggingname': b's',
3141 3141 r'loggingopts': {
3142 3142 r'logdata': True,
3143 3143 r'logdataapis': False,
3144 3144 },
3145 3145 })
3146 3146
3147 3147 if ui.debugflag:
3148 3148 openerargs[r'loggingopts'][r'logdataapis'] = True
3149 3149
3150 3150 # Don't send default headers when in raw mode. This allows us to
3151 3151 # bypass most of the behavior of our URL handling code so we can
3152 3152 # have near complete control over what's sent on the wire.
3153 3153 if opts['peer'] == 'raw':
3154 3154 openerargs[r'sendaccept'] = False
3155 3155
3156 3156 opener = urlmod.opener(ui, authinfo, **openerargs)
3157 3157
3158 3158 if opts['peer'] == 'http2':
3159 3159 ui.write(_('creating http peer for wire protocol version 2\n'))
3160 3160 # We go through makepeer() because we need an API descriptor for
3161 3161 # the peer instance to be useful.
3162 3162 with ui.configoverride({
3163 3163 ('experimental', 'httppeer.advertise-v2'): True}):
3164 3164 if opts['nologhandshake']:
3165 3165 ui.pushbuffer()
3166 3166
3167 3167 peer = httppeer.makepeer(ui, path, opener=opener)
3168 3168
3169 3169 if opts['nologhandshake']:
3170 3170 ui.popbuffer()
3171 3171
3172 3172 if not isinstance(peer, httppeer.httpv2peer):
3173 3173 raise error.Abort(_('could not instantiate HTTP peer for '
3174 3174 'wire protocol version 2'),
3175 3175 hint=_('the server may not have the feature '
3176 3176 'enabled or is not allowing this '
3177 3177 'client version'))
3178 3178
3179 3179 elif opts['peer'] == 'raw':
3180 3180 ui.write(_('using raw connection to peer\n'))
3181 3181 peer = None
3182 3182 elif opts['peer']:
3183 3183 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3184 3184 opts['peer'])
3185 3185 else:
3186 3186 peer = httppeer.makepeer(ui, path, opener=opener)
3187 3187
3188 3188 # We /could/ populate stdin/stdout with sock.makefile()...
3189 3189 else:
3190 3190 raise error.Abort(_('unsupported connection configuration'))
3191 3191
3192 3192 batchedcommands = None
3193 3193
3194 3194 # Now perform actions based on the parsed wire language instructions.
3195 3195 for action, lines in blocks:
3196 3196 if action in ('raw', 'raw+'):
3197 3197 if not stdin:
3198 3198 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3199 3199
3200 3200 # Concatenate the data together.
3201 3201 data = ''.join(l.lstrip() for l in lines)
3202 3202 data = stringutil.unescapestr(data)
3203 3203 stdin.write(data)
3204 3204
3205 3205 if action == 'raw+':
3206 3206 stdin.flush()
3207 3207 elif action == 'flush':
3208 3208 if not stdin:
3209 3209 raise error.Abort(_('cannot call flush on this peer'))
3210 3210 stdin.flush()
3211 3211 elif action.startswith('command'):
3212 3212 if not peer:
3213 3213 raise error.Abort(_('cannot send commands unless peer instance '
3214 3214 'is available'))
3215 3215
3216 3216 command = action.split(' ', 1)[1]
3217 3217
3218 3218 args = {}
3219 3219 for line in lines:
3220 3220 # We need to allow empty values.
3221 3221 fields = line.lstrip().split(' ', 1)
3222 3222 if len(fields) == 1:
3223 3223 key = fields[0]
3224 3224 value = ''
3225 3225 else:
3226 3226 key, value = fields
3227 3227
3228 3228 if value.startswith('eval:'):
3229 3229 value = stringutil.evalpythonliteral(value[5:])
3230 3230 else:
3231 3231 value = stringutil.unescapestr(value)
3232 3232
3233 3233 args[key] = value
3234 3234
3235 3235 if batchedcommands is not None:
3236 3236 batchedcommands.append((command, args))
3237 3237 continue
3238 3238
3239 3239 ui.status(_('sending %s command\n') % command)
3240 3240
3241 3241 if 'PUSHFILE' in args:
3242 3242 with open(args['PUSHFILE'], r'rb') as fh:
3243 3243 del args['PUSHFILE']
3244 3244 res, output = peer._callpush(command, fh,
3245 3245 **pycompat.strkwargs(args))
3246 3246 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3247 3247 ui.status(_('remote output: %s\n') %
3248 3248 stringutil.escapestr(output))
3249 3249 else:
3250 3250 with peer.commandexecutor() as e:
3251 3251 res = e.callcommand(command, args).result()
3252 3252
3253 3253 if isinstance(res, wireprotov2peer.commandresponse):
3254 3254 val = res.objects()
3255 3255 ui.status(_('response: %s\n') %
3256 3256 stringutil.pprint(val, bprefix=True, indent=2))
3257 3257 else:
3258 3258 ui.status(_('response: %s\n') %
3259 3259 stringutil.pprint(res, bprefix=True, indent=2))
3260 3260
3261 3261 elif action == 'batchbegin':
3262 3262 if batchedcommands is not None:
3263 3263 raise error.Abort(_('nested batchbegin not allowed'))
3264 3264
3265 3265 batchedcommands = []
3266 3266 elif action == 'batchsubmit':
3267 3267 # There is a batching API we could go through. But it would be
3268 3268 # difficult to normalize requests into function calls. It is easier
3269 3269 # to bypass this layer and normalize to commands + args.
3270 3270 ui.status(_('sending batch with %d sub-commands\n') %
3271 3271 len(batchedcommands))
3272 3272 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3273 3273 ui.status(_('response #%d: %s\n') %
3274 3274 (i, stringutil.escapestr(chunk)))
3275 3275
3276 3276 batchedcommands = None
3277 3277
3278 3278 elif action.startswith('httprequest '):
3279 3279 if not opener:
3280 3280 raise error.Abort(_('cannot use httprequest without an HTTP '
3281 3281 'peer'))
3282 3282
3283 3283 request = action.split(' ', 2)
3284 3284 if len(request) != 3:
3285 3285 raise error.Abort(_('invalid httprequest: expected format is '
3286 3286 '"httprequest <method> <path>'))
3287 3287
3288 3288 method, httppath = request[1:]
3289 3289 headers = {}
3290 3290 body = None
3291 3291 frames = []
3292 3292 for line in lines:
3293 3293 line = line.lstrip()
3294 3294 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3295 3295 if m:
3296 3296 # Headers need to use native strings.
3297 3297 key = pycompat.strurl(m.group(1))
3298 3298 value = pycompat.strurl(m.group(2))
3299 3299 headers[key] = value
3300 3300 continue
3301 3301
3302 3302 if line.startswith(b'BODYFILE '):
3303 3303 with open(line.split(b' ', 1), 'rb') as fh:
3304 3304 body = fh.read()
3305 3305 elif line.startswith(b'frame '):
3306 3306 frame = wireprotoframing.makeframefromhumanstring(
3307 3307 line[len(b'frame '):])
3308 3308
3309 3309 frames.append(frame)
3310 3310 else:
3311 3311 raise error.Abort(_('unknown argument to httprequest: %s') %
3312 3312 line)
3313 3313
3314 3314 url = path + httppath
3315 3315
3316 3316 if frames:
3317 3317 body = b''.join(bytes(f) for f in frames)
3318 3318
3319 3319 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3320 3320
3321 3321 # urllib.Request insists on using has_data() as a proxy for
3322 3322 # determining the request method. Override that to use our
3323 3323 # explicitly requested method.
3324 3324 req.get_method = lambda: pycompat.sysstr(method)
3325 3325
3326 3326 try:
3327 3327 res = opener.open(req)
3328 3328 body = res.read()
3329 3329 except util.urlerr.urlerror as e:
3330 3330 # read() method must be called, but only exists in Python 2
3331 3331 getattr(e, 'read', lambda: None)()
3332 3332 continue
3333 3333
3334 3334 ct = res.headers.get(r'Content-Type')
3335 3335 if ct == r'application/mercurial-cbor':
3336 3336 ui.write(_('cbor> %s\n') %
3337 3337 stringutil.pprint(cborutil.decodeall(body),
3338 3338 bprefix=True,
3339 3339 indent=2))
3340 3340
3341 3341 elif action == 'close':
3342 3342 peer.close()
3343 3343 elif action == 'readavailable':
3344 3344 if not stdout or not stderr:
3345 3345 raise error.Abort(_('readavailable not available on this peer'))
3346 3346
3347 3347 stdin.close()
3348 3348 stdout.read()
3349 3349 stderr.read()
3350 3350
3351 3351 elif action == 'readline':
3352 3352 if not stdout:
3353 3353 raise error.Abort(_('readline not available on this peer'))
3354 3354 stdout.readline()
3355 3355 elif action == 'ereadline':
3356 3356 if not stderr:
3357 3357 raise error.Abort(_('ereadline not available on this peer'))
3358 3358 stderr.readline()
3359 3359 elif action.startswith('read '):
3360 3360 count = int(action.split(' ', 1)[1])
3361 3361 if not stdout:
3362 3362 raise error.Abort(_('read not available on this peer'))
3363 3363 stdout.read(count)
3364 3364 elif action.startswith('eread '):
3365 3365 count = int(action.split(' ', 1)[1])
3366 3366 if not stderr:
3367 3367 raise error.Abort(_('eread not available on this peer'))
3368 3368 stderr.read(count)
3369 3369 else:
3370 3370 raise error.Abort(_('unknown action: %s') % action)
3371 3371
3372 3372 if batchedcommands is not None:
3373 3373 raise error.Abort(_('unclosed "batchbegin" request'))
3374 3374
3375 3375 if peer:
3376 3376 peer.close()
3377 3377
3378 3378 if proc:
3379 3379 proc.kill()
@@ -1,1040 +1,1040
1 1 # obsolete.py - obsolete markers handling
2 2 #
3 3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 4 # Logilab SA <contact@logilab.fr>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """Obsolete marker handling
10 10
11 11 An obsolete marker maps an old changeset to a list of new
12 12 changesets. If the list of new changesets is empty, the old changeset
13 13 is said to be "killed". Otherwise, the old changeset is being
14 14 "replaced" by the new changesets.
15 15
16 16 Obsolete markers can be used to record and distribute changeset graph
17 17 transformations performed by history rewrite operations, and help
18 18 building new tools to reconcile conflicting rewrite actions. To
19 19 facilitate conflict resolution, markers include various annotations
20 20 besides old and news changeset identifiers, such as creation date or
21 21 author name.
22 22
23 23 The old obsoleted changeset is called a "predecessor" and possible
24 24 replacements are called "successors". Markers that used changeset X as
25 25 a predecessor are called "successor markers of X" because they hold
26 26 information about the successors of X. Markers that use changeset Y as
27 27 a successors are call "predecessor markers of Y" because they hold
28 28 information about the predecessors of Y.
29 29
30 30 Examples:
31 31
32 32 - When changeset A is replaced by changeset A', one marker is stored:
33 33
34 34 (A, (A',))
35 35
36 36 - When changesets A and B are folded into a new changeset C, two markers are
37 37 stored:
38 38
39 39 (A, (C,)) and (B, (C,))
40 40
41 41 - When changeset A is simply "pruned" from the graph, a marker is created:
42 42
43 43 (A, ())
44 44
45 45 - When changeset A is split into B and C, a single marker is used:
46 46
47 47 (A, (B, C))
48 48
49 49 We use a single marker to distinguish the "split" case from the "divergence"
50 50 case. If two independent operations rewrite the same changeset A in to A' and
51 51 A'', we have an error case: divergent rewriting. We can detect it because
52 52 two markers will be created independently:
53 53
54 54 (A, (B,)) and (A, (C,))
55 55
56 56 Format
57 57 ------
58 58
59 59 Markers are stored in an append-only file stored in
60 60 '.hg/store/obsstore'.
61 61
62 62 The file starts with a version header:
63 63
64 64 - 1 unsigned byte: version number, starting at zero.
65 65
66 66 The header is followed by the markers. Marker format depend of the version. See
67 67 comment associated with each format for details.
68 68
69 69 """
70 70 from __future__ import absolute_import
71 71
72 72 import errno
73 73 import struct
74 74
75 75 from .i18n import _
76 76 from . import (
77 77 encoding,
78 78 error,
79 79 node,
80 80 obsutil,
81 81 phases,
82 82 policy,
83 83 pycompat,
84 84 util,
85 85 )
86 86 from .utils import dateutil
87 87
88 88 parsers = policy.importmod(r'parsers')
89 89
90 90 _pack = struct.pack
91 91 _unpack = struct.unpack
92 92 _calcsize = struct.calcsize
93 93 propertycache = util.propertycache
94 94
95 95 # the obsolete feature is not mature enough to be enabled by default.
96 96 # you have to rely on third party extension extension to enable this.
97 97 _enabled = False
98 98
99 99 # Options for obsolescence
100 100 createmarkersopt = 'createmarkers'
101 101 allowunstableopt = 'allowunstable'
102 102 exchangeopt = 'exchange'
103 103
104 104 def _getoptionvalue(repo, option):
105 105 """Returns True if the given repository has the given obsolete option
106 106 enabled.
107 107 """
108 108 configkey = 'evolution.%s' % option
109 109 newconfig = repo.ui.configbool('experimental', configkey)
110 110
111 111 # Return the value only if defined
112 112 if newconfig is not None:
113 113 return newconfig
114 114
115 115 # Fallback on generic option
116 116 try:
117 117 return repo.ui.configbool('experimental', 'evolution')
118 118 except (error.ConfigError, AttributeError):
119 119 # Fallback on old-fashion config
120 120 # inconsistent config: experimental.evolution
121 121 result = set(repo.ui.configlist('experimental', 'evolution'))
122 122
123 123 if 'all' in result:
124 124 return True
125 125
126 126 # For migration purposes, temporarily return true if the config hasn't
127 127 # been set but _enabled is true.
128 128 if len(result) == 0 and _enabled:
129 129 return True
130 130
131 131 # Temporary hack for next check
132 132 newconfig = repo.ui.config('experimental', 'evolution.createmarkers')
133 133 if newconfig:
134 134 result.add('createmarkers')
135 135
136 136 return option in result
137 137
138 138 def getoptions(repo):
139 139 """Returns dicts showing state of obsolescence features."""
140 140
141 141 createmarkersvalue = _getoptionvalue(repo, createmarkersopt)
142 142 unstablevalue = _getoptionvalue(repo, allowunstableopt)
143 143 exchangevalue = _getoptionvalue(repo, exchangeopt)
144 144
145 145 # createmarkers must be enabled if other options are enabled
146 146 if ((unstablevalue or exchangevalue) and not createmarkersvalue):
147 147 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
148 148 "if other obsolete options are enabled"))
149 149
150 150 return {
151 151 createmarkersopt: createmarkersvalue,
152 152 allowunstableopt: unstablevalue,
153 153 exchangeopt: exchangevalue,
154 154 }
155 155
156 156 def isenabled(repo, option):
157 157 """Returns True if the given repository has the given obsolete option
158 158 enabled.
159 159 """
160 160 return getoptions(repo)[option]
161 161
162 162 # Creating aliases for marker flags because evolve extension looks for
163 163 # bumpedfix in obsolete.py
164 164 bumpedfix = obsutil.bumpedfix
165 165 usingsha256 = obsutil.usingsha256
166 166
167 167 ## Parsing and writing of version "0"
168 168 #
169 169 # The header is followed by the markers. Each marker is made of:
170 170 #
171 171 # - 1 uint8 : number of new changesets "N", can be zero.
172 172 #
173 173 # - 1 uint32: metadata size "M" in bytes.
174 174 #
175 175 # - 1 byte: a bit field. It is reserved for flags used in common
176 176 # obsolete marker operations, to avoid repeated decoding of metadata
177 177 # entries.
178 178 #
179 179 # - 20 bytes: obsoleted changeset identifier.
180 180 #
181 181 # - N*20 bytes: new changesets identifiers.
182 182 #
183 183 # - M bytes: metadata as a sequence of nul-terminated strings. Each
184 184 # string contains a key and a value, separated by a colon ':', without
185 185 # additional encoding. Keys cannot contain '\0' or ':' and values
186 186 # cannot contain '\0'.
187 187 _fm0version = 0
188 188 _fm0fixed = '>BIB20s'
189 189 _fm0node = '20s'
190 190 _fm0fsize = _calcsize(_fm0fixed)
191 191 _fm0fnodesize = _calcsize(_fm0node)
192 192
193 193 def _fm0readmarkers(data, off, stop):
194 194 # Loop on markers
195 195 while off < stop:
196 196 # read fixed part
197 197 cur = data[off:off + _fm0fsize]
198 198 off += _fm0fsize
199 199 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
200 200 # read replacement
201 201 sucs = ()
202 202 if numsuc:
203 203 s = (_fm0fnodesize * numsuc)
204 204 cur = data[off:off + s]
205 205 sucs = _unpack(_fm0node * numsuc, cur)
206 206 off += s
207 207 # read metadata
208 208 # (metadata will be decoded on demand)
209 209 metadata = data[off:off + mdsize]
210 210 if len(metadata) != mdsize:
211 211 raise error.Abort(_('parsing obsolete marker: metadata is too '
212 212 'short, %d bytes expected, got %d')
213 213 % (mdsize, len(metadata)))
214 214 off += mdsize
215 215 metadata = _fm0decodemeta(metadata)
216 216 try:
217 217 when, offset = metadata.pop('date', '0 0').split(' ')
218 218 date = float(when), int(offset)
219 219 except ValueError:
220 220 date = (0., 0)
221 221 parents = None
222 222 if 'p2' in metadata:
223 223 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
224 224 elif 'p1' in metadata:
225 225 parents = (metadata.pop('p1', None),)
226 226 elif 'p0' in metadata:
227 227 parents = ()
228 228 if parents is not None:
229 229 try:
230 230 parents = tuple(node.bin(p) for p in parents)
231 231 # if parent content is not a nodeid, drop the data
232 232 for p in parents:
233 233 if len(p) != 20:
234 234 parents = None
235 235 break
236 236 except TypeError:
237 237 # if content cannot be translated to nodeid drop the data.
238 238 parents = None
239 239
240 240 metadata = tuple(sorted(metadata.iteritems()))
241 241
242 242 yield (pre, sucs, flags, metadata, date, parents)
243 243
244 244 def _fm0encodeonemarker(marker):
245 245 pre, sucs, flags, metadata, date, parents = marker
246 246 if flags & usingsha256:
247 247 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
248 248 metadata = dict(metadata)
249 249 time, tz = date
250 250 metadata['date'] = '%r %i' % (time, tz)
251 251 if parents is not None:
252 252 if not parents:
253 253 # mark that we explicitly recorded no parents
254 254 metadata['p0'] = ''
255 255 for i, p in enumerate(parents, 1):
256 256 metadata['p%i' % i] = node.hex(p)
257 257 metadata = _fm0encodemeta(metadata)
258 258 numsuc = len(sucs)
259 259 format = _fm0fixed + (_fm0node * numsuc)
260 260 data = [numsuc, len(metadata), flags, pre]
261 261 data.extend(sucs)
262 262 return _pack(format, *data) + metadata
263 263
264 264 def _fm0encodemeta(meta):
265 265 """Return encoded metadata string to string mapping.
266 266
267 267 Assume no ':' in key and no '\0' in both key and value."""
268 268 for key, value in meta.iteritems():
269 269 if ':' in key or '\0' in key:
270 270 raise ValueError("':' and '\0' are forbidden in metadata key'")
271 271 if '\0' in value:
272 272 raise ValueError("':' is forbidden in metadata value'")
273 273 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
274 274
275 275 def _fm0decodemeta(data):
276 276 """Return string to string dictionary from encoded version."""
277 277 d = {}
278 278 for l in data.split('\0'):
279 279 if l:
280 280 key, value = l.split(':')
281 281 d[key] = value
282 282 return d
283 283
284 284 ## Parsing and writing of version "1"
285 285 #
286 286 # The header is followed by the markers. Each marker is made of:
287 287 #
288 288 # - uint32: total size of the marker (including this field)
289 289 #
290 290 # - float64: date in seconds since epoch
291 291 #
292 292 # - int16: timezone offset in minutes
293 293 #
294 294 # - uint16: a bit field. It is reserved for flags used in common
295 295 # obsolete marker operations, to avoid repeated decoding of metadata
296 296 # entries.
297 297 #
298 298 # - uint8: number of successors "N", can be zero.
299 299 #
300 300 # - uint8: number of parents "P", can be zero.
301 301 #
302 302 # 0: parents data stored but no parent,
303 303 # 1: one parent stored,
304 304 # 2: two parents stored,
305 305 # 3: no parent data stored
306 306 #
307 307 # - uint8: number of metadata entries M
308 308 #
309 309 # - 20 or 32 bytes: predecessor changeset identifier.
310 310 #
311 311 # - N*(20 or 32) bytes: successors changesets identifiers.
312 312 #
313 313 # - P*(20 or 32) bytes: parents of the predecessors changesets.
314 314 #
315 315 # - M*(uint8, uint8): size of all metadata entries (key and value)
316 316 #
317 317 # - remaining bytes: the metadata, each (key, value) pair after the other.
318 318 _fm1version = 1
319 319 _fm1fixed = '>IdhHBBB20s'
320 320 _fm1nodesha1 = '20s'
321 321 _fm1nodesha256 = '32s'
322 322 _fm1nodesha1size = _calcsize(_fm1nodesha1)
323 323 _fm1nodesha256size = _calcsize(_fm1nodesha256)
324 324 _fm1fsize = _calcsize(_fm1fixed)
325 325 _fm1parentnone = 3
326 326 _fm1parentshift = 14
327 327 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
328 328 _fm1metapair = 'BB'
329 329 _fm1metapairsize = _calcsize(_fm1metapair)
330 330
331 331 def _fm1purereadmarkers(data, off, stop):
332 332 # make some global constants local for performance
333 333 noneflag = _fm1parentnone
334 334 sha2flag = usingsha256
335 335 sha1size = _fm1nodesha1size
336 336 sha2size = _fm1nodesha256size
337 337 sha1fmt = _fm1nodesha1
338 338 sha2fmt = _fm1nodesha256
339 339 metasize = _fm1metapairsize
340 340 metafmt = _fm1metapair
341 341 fsize = _fm1fsize
342 342 unpack = _unpack
343 343
344 344 # Loop on markers
345 345 ufixed = struct.Struct(_fm1fixed).unpack
346 346
347 347 while off < stop:
348 348 # read fixed part
349 349 o1 = off + fsize
350 350 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
351 351
352 352 if flags & sha2flag:
353 353 # FIXME: prec was read as a SHA1, needs to be amended
354 354
355 355 # read 0 or more successors
356 356 if numsuc == 1:
357 357 o2 = o1 + sha2size
358 358 sucs = (data[o1:o2],)
359 359 else:
360 360 o2 = o1 + sha2size * numsuc
361 361 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
362 362
363 363 # read parents
364 364 if numpar == noneflag:
365 365 o3 = o2
366 366 parents = None
367 367 elif numpar == 1:
368 368 o3 = o2 + sha2size
369 369 parents = (data[o2:o3],)
370 370 else:
371 371 o3 = o2 + sha2size * numpar
372 372 parents = unpack(sha2fmt * numpar, data[o2:o3])
373 373 else:
374 374 # read 0 or more successors
375 375 if numsuc == 1:
376 376 o2 = o1 + sha1size
377 377 sucs = (data[o1:o2],)
378 378 else:
379 379 o2 = o1 + sha1size * numsuc
380 380 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
381 381
382 382 # read parents
383 383 if numpar == noneflag:
384 384 o3 = o2
385 385 parents = None
386 386 elif numpar == 1:
387 387 o3 = o2 + sha1size
388 388 parents = (data[o2:o3],)
389 389 else:
390 390 o3 = o2 + sha1size * numpar
391 391 parents = unpack(sha1fmt * numpar, data[o2:o3])
392 392
393 393 # read metadata
394 394 off = o3 + metasize * nummeta
395 395 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
396 396 metadata = []
397 397 for idx in pycompat.xrange(0, len(metapairsize), 2):
398 398 o1 = off + metapairsize[idx]
399 399 o2 = o1 + metapairsize[idx + 1]
400 400 metadata.append((data[off:o1], data[o1:o2]))
401 401 off = o2
402 402
403 403 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
404 404
405 405 def _fm1encodeonemarker(marker):
406 406 pre, sucs, flags, metadata, date, parents = marker
407 407 # determine node size
408 408 _fm1node = _fm1nodesha1
409 409 if flags & usingsha256:
410 410 _fm1node = _fm1nodesha256
411 411 numsuc = len(sucs)
412 412 numextranodes = numsuc
413 413 if parents is None:
414 414 numpar = _fm1parentnone
415 415 else:
416 416 numpar = len(parents)
417 417 numextranodes += numpar
418 418 formatnodes = _fm1node * numextranodes
419 419 formatmeta = _fm1metapair * len(metadata)
420 420 format = _fm1fixed + formatnodes + formatmeta
421 421 # tz is stored in minutes so we divide by 60
422 422 tz = date[1]//60
423 423 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
424 424 data.extend(sucs)
425 425 if parents is not None:
426 426 data.extend(parents)
427 427 totalsize = _calcsize(format)
428 428 for key, value in metadata:
429 429 lk = len(key)
430 430 lv = len(value)
431 431 if lk > 255:
432 432 msg = ('obsstore metadata key cannot be longer than 255 bytes'
433 433 ' (key "%s" is %u bytes)') % (key, lk)
434 434 raise error.ProgrammingError(msg)
435 435 if lv > 255:
436 436 msg = ('obsstore metadata value cannot be longer than 255 bytes'
437 437 ' (value "%s" for key "%s" is %u bytes)') % (value, key, lv)
438 438 raise error.ProgrammingError(msg)
439 439 data.append(lk)
440 440 data.append(lv)
441 441 totalsize += lk + lv
442 442 data[0] = totalsize
443 443 data = [_pack(format, *data)]
444 444 for key, value in metadata:
445 445 data.append(key)
446 446 data.append(value)
447 447 return ''.join(data)
448 448
449 449 def _fm1readmarkers(data, off, stop):
450 450 native = getattr(parsers, 'fm1readmarkers', None)
451 451 if not native:
452 452 return _fm1purereadmarkers(data, off, stop)
453 453 return native(data, off, stop)
454 454
455 455 # mapping to read/write various marker formats
456 456 # <version> -> (decoder, encoder)
457 457 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
458 458 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
459 459
460 460 def _readmarkerversion(data):
461 461 return _unpack('>B', data[0:1])[0]
462 462
463 463 @util.nogc
464 464 def _readmarkers(data, off=None, stop=None):
465 465 """Read and enumerate markers from raw data"""
466 466 diskversion = _readmarkerversion(data)
467 467 if not off:
468 468 off = 1 # skip 1 byte version number
469 469 if stop is None:
470 470 stop = len(data)
471 471 if diskversion not in formats:
472 472 msg = _('parsing obsolete marker: unknown version %r') % diskversion
473 473 raise error.UnknownVersion(msg, version=diskversion)
474 474 return diskversion, formats[diskversion][0](data, off, stop)
475 475
476 476 def encodeheader(version=_fm0version):
477 477 return _pack('>B', version)
478 478
479 479 def encodemarkers(markers, addheader=False, version=_fm0version):
480 480 # Kept separate from flushmarkers(), it will be reused for
481 481 # markers exchange.
482 482 encodeone = formats[version][1]
483 483 if addheader:
484 484 yield encodeheader(version)
485 485 for marker in markers:
486 486 yield encodeone(marker)
487 487
488 488 @util.nogc
489 489 def _addsuccessors(successors, markers):
490 490 for mark in markers:
491 491 successors.setdefault(mark[0], set()).add(mark)
492 492
493 493 @util.nogc
494 494 def _addpredecessors(predecessors, markers):
495 495 for mark in markers:
496 496 for suc in mark[1]:
497 497 predecessors.setdefault(suc, set()).add(mark)
498 498
499 499 @util.nogc
500 500 def _addchildren(children, markers):
501 501 for mark in markers:
502 502 parents = mark[5]
503 503 if parents is not None:
504 504 for p in parents:
505 505 children.setdefault(p, set()).add(mark)
506 506
507 507 def _checkinvalidmarkers(markers):
508 508 """search for marker with invalid data and raise error if needed
509 509
510 510 Exist as a separated function to allow the evolve extension for a more
511 511 subtle handling.
512 512 """
513 513 for mark in markers:
514 514 if node.nullid in mark[1]:
515 515 raise error.Abort(_('bad obsolescence marker detected: '
516 516 'invalid successors nullid'))
517 517
518 518 class obsstore(object):
519 519 """Store obsolete markers
520 520
521 521 Markers can be accessed with two mappings:
522 522 - predecessors[x] -> set(markers on predecessors edges of x)
523 523 - successors[x] -> set(markers on successors edges of x)
524 524 - children[x] -> set(markers on predecessors edges of children(x)
525 525 """
526 526
527 527 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
528 528 # prec: nodeid, predecessors changesets
529 529 # succs: tuple of nodeid, successor changesets (0-N length)
530 530 # flag: integer, flag field carrying modifier for the markers (see doc)
531 531 # meta: binary blob in UTF-8, encoded metadata dictionary
532 532 # date: (float, int) tuple, date of marker creation
533 533 # parents: (tuple of nodeid) or None, parents of predecessors
534 534 # None is used when no data has been recorded
535 535
536 536 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
537 537 # caches for various obsolescence related cache
538 538 self.caches = {}
539 539 self.svfs = svfs
540 540 self._defaultformat = defaultformat
541 541 self._readonly = readonly
542 542
543 543 def __iter__(self):
544 544 return iter(self._all)
545 545
546 546 def __len__(self):
547 547 return len(self._all)
548 548
549 549 def __nonzero__(self):
550 550 if not self._cached(r'_all'):
551 551 try:
552 552 return self.svfs.stat('obsstore').st_size > 1
553 553 except OSError as inst:
554 554 if inst.errno != errno.ENOENT:
555 555 raise
556 556 # just build an empty _all list if no obsstore exists, which
557 557 # avoids further stat() syscalls
558 558 return bool(self._all)
559 559
560 560 __bool__ = __nonzero__
561 561
562 562 @property
563 563 def readonly(self):
564 564 """True if marker creation is disabled
565 565
566 566 Remove me in the future when obsolete marker is always on."""
567 567 return self._readonly
568 568
569 569 def create(self, transaction, prec, succs=(), flag=0, parents=None,
570 570 date=None, metadata=None, ui=None):
571 571 """obsolete: add a new obsolete marker
572 572
573 573 * ensuring it is hashable
574 574 * check mandatory metadata
575 575 * encode metadata
576 576
577 577 If you are a human writing code creating marker you want to use the
578 578 `createmarkers` function in this module instead.
579 579
580 580 return True if a new marker have been added, False if the markers
581 581 already existed (no op).
582 582 """
583 583 if metadata is None:
584 584 metadata = {}
585 585 if date is None:
586 586 if 'date' in metadata:
587 587 # as a courtesy for out-of-tree extensions
588 588 date = dateutil.parsedate(metadata.pop('date'))
589 589 elif ui is not None:
590 590 date = ui.configdate('devel', 'default-date')
591 591 if date is None:
592 592 date = dateutil.makedate()
593 593 else:
594 594 date = dateutil.makedate()
595 595 if len(prec) != 20:
596 596 raise ValueError(prec)
597 597 for succ in succs:
598 598 if len(succ) != 20:
599 599 raise ValueError(succ)
600 600 if prec in succs:
601 601 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
602 602
603 603 metadata = tuple(sorted(metadata.iteritems()))
604 604 for k, v in metadata:
605 605 try:
606 606 # might be better to reject non-ASCII keys
607 607 k.decode('utf-8')
608 608 v.decode('utf-8')
609 609 except UnicodeDecodeError:
610 610 raise error.ProgrammingError(
611 611 'obsstore metadata must be valid UTF-8 sequence '
612 612 '(key = %r, value = %r)'
613 613 % (pycompat.bytestr(k), pycompat.bytestr(v)))
614 614
615 615 marker = (bytes(prec), tuple(succs), int(flag), metadata, date, parents)
616 616 return bool(self.add(transaction, [marker]))
617 617
618 618 def add(self, transaction, markers):
619 619 """Add new markers to the store
620 620
621 621 Take care of filtering duplicate.
622 622 Return the number of new marker."""
623 623 if self._readonly:
624 624 raise error.Abort(_('creating obsolete markers is not enabled on '
625 625 'this repo'))
626 626 known = set()
627 627 getsuccessors = self.successors.get
628 628 new = []
629 629 for m in markers:
630 630 if m not in getsuccessors(m[0], ()) and m not in known:
631 631 known.add(m)
632 632 new.append(m)
633 633 if new:
634 634 f = self.svfs('obsstore', 'ab')
635 635 try:
636 636 offset = f.tell()
637 637 transaction.add('obsstore', offset)
638 638 # offset == 0: new file - add the version header
639 639 data = b''.join(encodemarkers(new, offset == 0, self._version))
640 640 f.write(data)
641 641 finally:
642 642 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
643 643 # call 'filecacheentry.refresh()' here
644 644 f.close()
645 645 addedmarkers = transaction.changes.get('obsmarkers')
646 646 if addedmarkers is not None:
647 647 addedmarkers.update(new)
648 648 self._addmarkers(new, data)
649 649 # new marker *may* have changed several set. invalidate the cache.
650 650 self.caches.clear()
651 651 # records the number of new markers for the transaction hooks
652 652 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
653 653 transaction.hookargs['new_obsmarkers'] = '%d' % (previous + len(new))
654 654 return len(new)
655 655
656 656 def mergemarkers(self, transaction, data):
657 657 """merge a binary stream of markers inside the obsstore
658 658
659 659 Returns the number of new markers added."""
660 660 version, markers = _readmarkers(data)
661 661 return self.add(transaction, markers)
662 662
663 663 @propertycache
664 664 def _data(self):
665 665 return self.svfs.tryread('obsstore')
666 666
667 667 @propertycache
668 668 def _version(self):
669 669 if len(self._data) >= 1:
670 670 return _readmarkerversion(self._data)
671 671 else:
672 672 return self._defaultformat
673 673
674 674 @propertycache
675 675 def _all(self):
676 676 data = self._data
677 677 if not data:
678 678 return []
679 679 self._version, markers = _readmarkers(data)
680 680 markers = list(markers)
681 681 _checkinvalidmarkers(markers)
682 682 return markers
683 683
684 684 @propertycache
685 685 def successors(self):
686 686 successors = {}
687 687 _addsuccessors(successors, self._all)
688 688 return successors
689 689
690 690 @propertycache
691 691 def predecessors(self):
692 692 predecessors = {}
693 693 _addpredecessors(predecessors, self._all)
694 694 return predecessors
695 695
696 696 @propertycache
697 697 def children(self):
698 698 children = {}
699 699 _addchildren(children, self._all)
700 700 return children
701 701
702 702 def _cached(self, attr):
703 703 return attr in self.__dict__
704 704
705 705 def _addmarkers(self, markers, rawdata):
706 706 markers = list(markers) # to allow repeated iteration
707 707 self._data = self._data + rawdata
708 708 self._all.extend(markers)
709 709 if self._cached(r'successors'):
710 710 _addsuccessors(self.successors, markers)
711 711 if self._cached(r'predecessors'):
712 712 _addpredecessors(self.predecessors, markers)
713 713 if self._cached(r'children'):
714 714 _addchildren(self.children, markers)
715 715 _checkinvalidmarkers(markers)
716 716
717 717 def relevantmarkers(self, nodes):
718 718 """return a set of all obsolescence markers relevant to a set of nodes.
719 719
720 720 "relevant" to a set of nodes mean:
721 721
722 722 - marker that use this changeset as successor
723 723 - prune marker of direct children on this changeset
724 724 - recursive application of the two rules on predecessors of these
725 725 markers
726 726
727 727 It is a set so you cannot rely on order."""
728 728
729 729 pendingnodes = set(nodes)
730 730 seenmarkers = set()
731 731 seennodes = set(pendingnodes)
732 732 precursorsmarkers = self.predecessors
733 733 succsmarkers = self.successors
734 734 children = self.children
735 735 while pendingnodes:
736 736 direct = set()
737 737 for current in pendingnodes:
738 738 direct.update(precursorsmarkers.get(current, ()))
739 739 pruned = [m for m in children.get(current, ()) if not m[1]]
740 740 direct.update(pruned)
741 741 pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
742 742 direct.update(pruned)
743 743 direct -= seenmarkers
744 744 pendingnodes = set([m[0] for m in direct])
745 745 seenmarkers |= direct
746 746 pendingnodes -= seennodes
747 747 seennodes |= pendingnodes
748 748 return seenmarkers
749 749
750 750 def makestore(ui, repo):
751 751 """Create an obsstore instance from a repo."""
752 752 # read default format for new obsstore.
753 753 # developer config: format.obsstore-version
754 754 defaultformat = ui.configint('format', 'obsstore-version')
755 755 # rely on obsstore class default when possible.
756 756 kwargs = {}
757 757 if defaultformat is not None:
758 758 kwargs[r'defaultformat'] = defaultformat
759 759 readonly = not isenabled(repo, createmarkersopt)
760 760 store = obsstore(repo.svfs, readonly=readonly, **kwargs)
761 761 if store and readonly:
762 762 ui.warn(_('obsolete feature not enabled but %i markers found!\n')
763 763 % len(list(store)))
764 764 return store
765 765
766 766 def commonversion(versions):
767 767 """Return the newest version listed in both versions and our local formats.
768 768
769 769 Returns None if no common version exists.
770 770 """
771 771 versions.sort(reverse=True)
772 772 # search for highest version known on both side
773 773 for v in versions:
774 774 if v in formats:
775 775 return v
776 776 return None
777 777
778 778 # arbitrary picked to fit into 8K limit from HTTP server
779 779 # you have to take in account:
780 780 # - the version header
781 781 # - the base85 encoding
782 782 _maxpayload = 5300
783 783
784 784 def _pushkeyescape(markers):
785 785 """encode markers into a dict suitable for pushkey exchange
786 786
787 787 - binary data is base85 encoded
788 788 - split in chunks smaller than 5300 bytes"""
789 789 keys = {}
790 790 parts = []
791 791 currentlen = _maxpayload * 2 # ensure we create a new part
792 792 for marker in markers:
793 793 nextdata = _fm0encodeonemarker(marker)
794 794 if (len(nextdata) + currentlen > _maxpayload):
795 795 currentpart = []
796 796 currentlen = 0
797 797 parts.append(currentpart)
798 798 currentpart.append(nextdata)
799 799 currentlen += len(nextdata)
800 800 for idx, part in enumerate(reversed(parts)):
801 801 data = ''.join([_pack('>B', _fm0version)] + part)
802 802 keys['dump%i' % idx] = util.b85encode(data)
803 803 return keys
804 804
805 805 def listmarkers(repo):
806 806 """List markers over pushkey"""
807 807 if not repo.obsstore:
808 808 return {}
809 809 return _pushkeyescape(sorted(repo.obsstore))
810 810
811 811 def pushmarker(repo, key, old, new):
812 812 """Push markers over pushkey"""
813 813 if not key.startswith('dump'):
814 814 repo.ui.warn(_('unknown key: %r') % key)
815 815 return False
816 816 if old:
817 817 repo.ui.warn(_('unexpected old value for %r') % key)
818 818 return False
819 819 data = util.b85decode(new)
820 820 with repo.lock(), repo.transaction('pushkey: obsolete markers') as tr:
821 821 repo.obsstore.mergemarkers(tr, data)
822 822 repo.invalidatevolatilesets()
823 823 return True
824 824
825 825 # mapping of 'set-name' -> <function to compute this set>
826 826 cachefuncs = {}
827 827 def cachefor(name):
828 828 """Decorator to register a function as computing the cache for a set"""
829 829 def decorator(func):
830 830 if name in cachefuncs:
831 831 msg = "duplicated registration for volatileset '%s' (existing: %r)"
832 832 raise error.ProgrammingError(msg % (name, cachefuncs[name]))
833 833 cachefuncs[name] = func
834 834 return func
835 835 return decorator
836 836
837 837 def getrevs(repo, name):
838 838 """Return the set of revision that belong to the <name> set
839 839
840 840 Such access may compute the set and cache it for future use"""
841 841 repo = repo.unfiltered()
842 842 if not repo.obsstore:
843 843 return frozenset()
844 844 if name not in repo.obsstore.caches:
845 845 repo.obsstore.caches[name] = cachefuncs[name](repo)
846 846 return repo.obsstore.caches[name]
847 847
848 848 # To be simple we need to invalidate obsolescence cache when:
849 849 #
850 850 # - new changeset is added:
851 851 # - public phase is changed
852 852 # - obsolescence marker are added
853 853 # - strip is used a repo
854 854 def clearobscaches(repo):
855 855 """Remove all obsolescence related cache from a repo
856 856
857 857 This remove all cache in obsstore is the obsstore already exist on the
858 858 repo.
859 859
860 860 (We could be smarter here given the exact event that trigger the cache
861 861 clearing)"""
862 862 # only clear cache is there is obsstore data in this repo
863 863 if 'obsstore' in repo._filecache:
864 864 repo.obsstore.caches.clear()
865 865
866 866 def _mutablerevs(repo):
867 867 """the set of mutable revision in the repository"""
868 868 return repo._phasecache.getrevset(repo, phases.mutablephases)
869 869
870 870 @cachefor('obsolete')
871 871 def _computeobsoleteset(repo):
872 872 """the set of obsolete revisions"""
873 873 getnode = repo.changelog.node
874 874 notpublic = _mutablerevs(repo)
875 875 isobs = repo.obsstore.successors.__contains__
876 876 obs = set(r for r in notpublic if isobs(getnode(r)))
877 877 return obs
878 878
879 879 @cachefor('orphan')
880 880 def _computeorphanset(repo):
881 881 """the set of non obsolete revisions with obsolete parents"""
882 882 pfunc = repo.changelog.parentrevs
883 883 mutable = _mutablerevs(repo)
884 884 obsolete = getrevs(repo, 'obsolete')
885 885 others = mutable - obsolete
886 886 unstable = set()
887 887 for r in sorted(others):
888 888 # A rev is unstable if one of its parent is obsolete or unstable
889 889 # this works since we traverse following growing rev order
890 890 for p in pfunc(r):
891 891 if p in obsolete or p in unstable:
892 892 unstable.add(r)
893 893 break
894 894 return unstable
895 895
896 896 @cachefor('suspended')
897 897 def _computesuspendedset(repo):
898 898 """the set of obsolete parents with non obsolete descendants"""
899 899 suspended = repo.changelog.ancestors(getrevs(repo, 'orphan'))
900 900 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
901 901
902 902 @cachefor('extinct')
903 903 def _computeextinctset(repo):
904 904 """the set of obsolete parents without non obsolete descendants"""
905 905 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
906 906
907 907 @cachefor('phasedivergent')
908 908 def _computephasedivergentset(repo):
909 909 """the set of revs trying to obsolete public revisions"""
910 910 bumped = set()
911 911 # util function (avoid attribute lookup in the loop)
912 912 phase = repo._phasecache.phase # would be faster to grab the full list
913 913 public = phases.public
914 914 cl = repo.changelog
915 915 torev = cl.nodemap.get
916 916 tonode = cl.node
917 917 for rev in repo.revs('(not public()) and (not obsolete())'):
918 918 # We only evaluate mutable, non-obsolete revision
919 919 node = tonode(rev)
920 920 # (future) A cache of predecessors may worth if split is very common
921 921 for pnode in obsutil.allpredecessors(repo.obsstore, [node],
922 922 ignoreflags=bumpedfix):
923 923 prev = torev(pnode) # unfiltered! but so is phasecache
924 924 if (prev is not None) and (phase(repo, prev) <= public):
925 925 # we have a public predecessor
926 926 bumped.add(rev)
927 927 break # Next draft!
928 928 return bumped
929 929
930 930 @cachefor('contentdivergent')
931 931 def _computecontentdivergentset(repo):
932 932 """the set of rev that compete to be the final successors of some revision.
933 933 """
934 934 divergent = set()
935 935 obsstore = repo.obsstore
936 936 newermap = {}
937 937 tonode = repo.changelog.node
938 938 for rev in repo.revs('(not public()) - obsolete()'):
939 939 node = tonode(rev)
940 940 mark = obsstore.predecessors.get(node, ())
941 941 toprocess = set(mark)
942 942 seen = set()
943 943 while toprocess:
944 944 prec = toprocess.pop()[0]
945 945 if prec in seen:
946 946 continue # emergency cycle hanging prevention
947 947 seen.add(prec)
948 948 if prec not in newermap:
949 949 obsutil.successorssets(repo, prec, cache=newermap)
950 950 newer = [n for n in newermap[prec] if n]
951 951 if len(newer) > 1:
952 952 divergent.add(rev)
953 953 break
954 954 toprocess.update(obsstore.predecessors.get(prec, ()))
955 955 return divergent
956 956
957 957
958 958 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
959 959 operation=None):
960 960 """Add obsolete markers between changesets in a repo
961 961
962 962 <relations> must be an iterable of ((<old>,...), (<new>, ...)[,{metadata}])
963 963 tuple. `old` and `news` are changectx. metadata is an optional dictionary
964 964 containing metadata for this marker only. It is merged with the global
965 965 metadata specified through the `metadata` argument of this function.
966 966 Any string values in metadata must be UTF-8 bytes.
967 967
968 968 Trying to obsolete a public changeset will raise an exception.
969 969
970 970 Current user and date are used except if specified otherwise in the
971 971 metadata attribute.
972 972
973 973 This function operates within a transaction of its own, but does
974 974 not take any lock on the repo.
975 975 """
976 976 # prepare metadata
977 977 if metadata is None:
978 978 metadata = {}
979 979 if 'user' not in metadata:
980 980 luser = repo.ui.config('devel', 'user.obsmarker') or repo.ui.username()
981 981 metadata['user'] = encoding.fromlocal(luser)
982 982
983 983 # Operation metadata handling
984 984 useoperation = repo.ui.configbool('experimental',
985 985 'evolution.track-operation')
986 986 if useoperation and operation:
987 987 metadata['operation'] = operation
988 988
989 989 # Effect flag metadata handling
990 990 saveeffectflag = repo.ui.configbool('experimental',
991 991 'evolution.effect-flags')
992 992
993 993 with repo.transaction('add-obsolescence-marker') as tr:
994 994 markerargs = []
995 995 for rel in relations:
996 996 predecessors = rel[0]
997 997 if not isinstance(predecessors, tuple):
998 998 # preserve compat with old API until all caller are migrated
999 999 predecessors = (predecessors,)
1000 if 1 < len(predecessors) and len(rel[1]) != 1:
1000 if len(predecessors) > 1 and len(rel[1]) != 1:
1001 1001 msg = 'Fold markers can only have 1 successors, not %d'
1002 1002 raise error.ProgrammingError(msg % len(rel[1]))
1003 1003 for prec in predecessors:
1004 1004 sucs = rel[1]
1005 1005 localmetadata = metadata.copy()
1006 if 2 < len(rel):
1006 if len(rel) > 2:
1007 1007 localmetadata.update(rel[2])
1008 1008
1009 1009 if not prec.mutable():
1010 1010 raise error.Abort(_("cannot obsolete public changeset: %s")
1011 1011 % prec,
1012 1012 hint="see 'hg help phases' for details")
1013 1013 nprec = prec.node()
1014 1014 nsucs = tuple(s.node() for s in sucs)
1015 1015 npare = None
1016 1016 if not nsucs:
1017 1017 npare = tuple(p.node() for p in prec.parents())
1018 1018 if nprec in nsucs:
1019 1019 raise error.Abort(_("changeset %s cannot obsolete itself")
1020 1020 % prec)
1021 1021
1022 1022 # Effect flag can be different by relation
1023 1023 if saveeffectflag:
1024 1024 # The effect flag is saved in a versioned field name for
1025 1025 # future evolution
1026 1026 effectflag = obsutil.geteffectflag(prec, sucs)
1027 1027 localmetadata[obsutil.EFFECTFLAGFIELD] = "%d" % effectflag
1028 1028
1029 1029 # Creating the marker causes the hidden cache to become
1030 1030 # invalid, which causes recomputation when we ask for
1031 1031 # prec.parents() above. Resulting in n^2 behavior. So let's
1032 1032 # prepare all of the args first, then create the markers.
1033 1033 markerargs.append((nprec, nsucs, npare, localmetadata))
1034 1034
1035 1035 for args in markerargs:
1036 1036 nprec, nsucs, npare, localmetadata = args
1037 1037 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1038 1038 date=date, metadata=localmetadata,
1039 1039 ui=repo.ui)
1040 1040 repo.filteredrevcache.clear()
@@ -1,475 +1,475
1 1 # templatefilters.py - common template expansion filters
2 2 #
3 3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import os
11 11 import re
12 12 import time
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 encoding,
17 17 error,
18 18 node,
19 19 pycompat,
20 20 registrar,
21 21 templateutil,
22 22 url,
23 23 util,
24 24 )
25 25 from .utils import (
26 26 dateutil,
27 27 stringutil,
28 28 )
29 29
30 30 urlerr = util.urlerr
31 31 urlreq = util.urlreq
32 32
33 33 if pycompat.ispy3:
34 34 long = int
35 35
36 36 # filters are callables like:
37 37 # fn(obj)
38 38 # with:
39 39 # obj - object to be filtered (text, date, list and so on)
40 40 filters = {}
41 41
42 42 templatefilter = registrar.templatefilter(filters)
43 43
44 44 @templatefilter('addbreaks', intype=bytes)
45 45 def addbreaks(text):
46 46 """Any text. Add an XHTML "<br />" tag before the end of
47 47 every line except the last.
48 48 """
49 49 return text.replace('\n', '<br/>\n')
50 50
51 51 agescales = [("year", 3600 * 24 * 365, 'Y'),
52 52 ("month", 3600 * 24 * 30, 'M'),
53 53 ("week", 3600 * 24 * 7, 'W'),
54 54 ("day", 3600 * 24, 'd'),
55 55 ("hour", 3600, 'h'),
56 56 ("minute", 60, 'm'),
57 57 ("second", 1, 's')]
58 58
59 59 @templatefilter('age', intype=templateutil.date)
60 60 def age(date, abbrev=False):
61 61 """Date. Returns a human-readable date/time difference between the
62 62 given date/time and the current date/time.
63 63 """
64 64
65 65 def plural(t, c):
66 66 if c == 1:
67 67 return t
68 68 return t + "s"
69 69 def fmt(t, c, a):
70 70 if abbrev:
71 71 return "%d%s" % (c, a)
72 72 return "%d %s" % (c, plural(t, c))
73 73
74 74 now = time.time()
75 75 then = date[0]
76 76 future = False
77 77 if then > now:
78 78 future = True
79 79 delta = max(1, int(then - now))
80 80 if delta > agescales[0][1] * 30:
81 81 return 'in the distant future'
82 82 else:
83 83 delta = max(1, int(now - then))
84 84 if delta > agescales[0][1] * 2:
85 85 return dateutil.shortdate(date)
86 86
87 87 for t, s, a in agescales:
88 88 n = delta // s
89 89 if n >= 2 or s == 1:
90 90 if future:
91 91 return '%s from now' % fmt(t, n, a)
92 92 return '%s ago' % fmt(t, n, a)
93 93
94 94 @templatefilter('basename', intype=bytes)
95 95 def basename(path):
96 96 """Any text. Treats the text as a path, and returns the last
97 97 component of the path after splitting by the path separator.
98 98 For example, "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "".
99 99 """
100 100 return os.path.basename(path)
101 101
102 102 @templatefilter('commondir')
103 103 def commondir(filelist):
104 104 """List of text. Treats each list item as file name with /
105 105 as path separator and returns the longest common directory
106 106 prefix shared by all list items.
107 107 Returns the empty string if no common prefix exists.
108 108
109 109 The list items are not normalized, i.e. "foo/../bar" is handled as
110 110 file "bar" in the directory "foo/..". Leading slashes are ignored.
111 111
112 112 For example, ["foo/bar/baz", "foo/baz/bar"] becomes "foo" and
113 113 ["foo/bar", "baz"] becomes "".
114 114 """
115 115 def common(a, b):
116 116 if len(a) > len(b):
117 117 a = b[:len(a)]
118 118 elif len(b) > len(a):
119 119 b = b[:len(a)]
120 120 if a == b:
121 121 return a
122 122 for i in pycompat.xrange(len(a)):
123 123 if a[i] != b[i]:
124 124 return a[:i]
125 125 return a
126 126 try:
127 127 if not filelist:
128 128 return ""
129 129 dirlist = [f.lstrip('/').split('/')[:-1] for f in filelist]
130 130 if len(dirlist) == 1:
131 131 return '/'.join(dirlist[0])
132 132 a = min(dirlist)
133 133 b = max(dirlist)
134 134 # The common prefix of a and b is shared with all
135 135 # elements of the list since Python sorts lexicographical
136 136 # and [1, x] after [1].
137 137 return '/'.join(common(a, b))
138 138 except TypeError:
139 139 raise error.ParseError(_('argument is not a list of text'))
140 140
141 141 @templatefilter('count')
142 142 def count(i):
143 143 """List or text. Returns the length as an integer."""
144 144 try:
145 145 return len(i)
146 146 except TypeError:
147 147 raise error.ParseError(_('not countable'))
148 148
149 149 @templatefilter('dirname', intype=bytes)
150 150 def dirname(path):
151 151 """Any text. Treats the text as a path, and strips the last
152 152 component of the path after splitting by the path separator.
153 153 """
154 154 return os.path.dirname(path)
155 155
156 156 @templatefilter('domain', intype=bytes)
157 157 def domain(author):
158 158 """Any text. Finds the first string that looks like an email
159 159 address, and extracts just the domain component. Example: ``User
160 160 <user@example.com>`` becomes ``example.com``.
161 161 """
162 162 f = author.find('@')
163 163 if f == -1:
164 164 return ''
165 165 author = author[f + 1:]
166 166 f = author.find('>')
167 167 if f >= 0:
168 168 author = author[:f]
169 169 return author
170 170
171 171 @templatefilter('email', intype=bytes)
172 172 def email(text):
173 173 """Any text. Extracts the first string that looks like an email
174 174 address. Example: ``User <user@example.com>`` becomes
175 175 ``user@example.com``.
176 176 """
177 177 return stringutil.email(text)
178 178
179 179 @templatefilter('escape', intype=bytes)
180 180 def escape(text):
181 181 """Any text. Replaces the special XML/XHTML characters "&", "<"
182 182 and ">" with XML entities, and filters out NUL characters.
183 183 """
184 184 return url.escape(text.replace('\0', ''), True)
185 185
186 186 para_re = None
187 187 space_re = None
188 188
189 189 def fill(text, width, initindent='', hangindent=''):
190 190 '''fill many paragraphs with optional indentation.'''
191 191 global para_re, space_re
192 192 if para_re is None:
193 193 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
194 194 space_re = re.compile(br' +')
195 195
196 196 def findparas():
197 197 start = 0
198 198 while True:
199 199 m = para_re.search(text, start)
200 200 if not m:
201 201 uctext = encoding.unifromlocal(text[start:])
202 202 w = len(uctext)
203 while 0 < w and uctext[w - 1].isspace():
203 while w > 0 and uctext[w - 1].isspace():
204 204 w -= 1
205 205 yield (encoding.unitolocal(uctext[:w]),
206 206 encoding.unitolocal(uctext[w:]))
207 207 break
208 208 yield text[start:m.start(0)], m.group(1)
209 209 start = m.end(1)
210 210
211 211 return "".join([stringutil.wrap(space_re.sub(' ',
212 212 stringutil.wrap(para, width)),
213 213 width, initindent, hangindent) + rest
214 214 for para, rest in findparas()])
215 215
216 216 @templatefilter('fill68', intype=bytes)
217 217 def fill68(text):
218 218 """Any text. Wraps the text to fit in 68 columns."""
219 219 return fill(text, 68)
220 220
221 221 @templatefilter('fill76', intype=bytes)
222 222 def fill76(text):
223 223 """Any text. Wraps the text to fit in 76 columns."""
224 224 return fill(text, 76)
225 225
226 226 @templatefilter('firstline', intype=bytes)
227 227 def firstline(text):
228 228 """Any text. Returns the first line of text."""
229 229 try:
230 230 return text.splitlines(True)[0].rstrip('\r\n')
231 231 except IndexError:
232 232 return ''
233 233
234 234 @templatefilter('hex', intype=bytes)
235 235 def hexfilter(text):
236 236 """Any text. Convert a binary Mercurial node identifier into
237 237 its long hexadecimal representation.
238 238 """
239 239 return node.hex(text)
240 240
241 241 @templatefilter('hgdate', intype=templateutil.date)
242 242 def hgdate(text):
243 243 """Date. Returns the date as a pair of numbers: "1157407993
244 244 25200" (Unix timestamp, timezone offset).
245 245 """
246 246 return "%d %d" % text
247 247
248 248 @templatefilter('isodate', intype=templateutil.date)
249 249 def isodate(text):
250 250 """Date. Returns the date in ISO 8601 format: "2009-08-18 13:00
251 251 +0200".
252 252 """
253 253 return dateutil.datestr(text, '%Y-%m-%d %H:%M %1%2')
254 254
255 255 @templatefilter('isodatesec', intype=templateutil.date)
256 256 def isodatesec(text):
257 257 """Date. Returns the date in ISO 8601 format, including
258 258 seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date
259 259 filter.
260 260 """
261 261 return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2')
262 262
263 263 def indent(text, prefix):
264 264 '''indent each non-empty line of text after first with prefix.'''
265 265 lines = text.splitlines()
266 266 num_lines = len(lines)
267 267 endswithnewline = text[-1:] == '\n'
268 268 def indenter():
269 269 for i in pycompat.xrange(num_lines):
270 270 l = lines[i]
271 271 if i and l.strip():
272 272 yield prefix
273 273 yield l
274 274 if i < num_lines - 1 or endswithnewline:
275 275 yield '\n'
276 276 return "".join(indenter())
277 277
278 278 @templatefilter('json')
279 279 def json(obj, paranoid=True):
280 280 """Any object. Serializes the object to a JSON formatted text."""
281 281 if obj is None:
282 282 return 'null'
283 283 elif obj is False:
284 284 return 'false'
285 285 elif obj is True:
286 286 return 'true'
287 287 elif isinstance(obj, (int, long, float)):
288 288 return pycompat.bytestr(obj)
289 289 elif isinstance(obj, bytes):
290 290 return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
291 291 elif isinstance(obj, type(u'')):
292 292 raise error.ProgrammingError(
293 293 'Mercurial only does output with bytes: %r' % obj)
294 294 elif util.safehasattr(obj, 'keys'):
295 295 out = ['"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid),
296 296 json(v, paranoid))
297 297 for k, v in sorted(obj.iteritems())]
298 298 return '{' + ', '.join(out) + '}'
299 299 elif util.safehasattr(obj, '__iter__'):
300 300 out = [json(i, paranoid) for i in obj]
301 301 return '[' + ', '.join(out) + ']'
302 302 raise error.ProgrammingError('cannot encode %r' % obj)
303 303
304 304 @templatefilter('lower', intype=bytes)
305 305 def lower(text):
306 306 """Any text. Converts the text to lowercase."""
307 307 return encoding.lower(text)
308 308
309 309 @templatefilter('nonempty', intype=bytes)
310 310 def nonempty(text):
311 311 """Any text. Returns '(none)' if the string is empty."""
312 312 return text or "(none)"
313 313
314 314 @templatefilter('obfuscate', intype=bytes)
315 315 def obfuscate(text):
316 316 """Any text. Returns the input text rendered as a sequence of
317 317 XML entities.
318 318 """
319 319 text = unicode(text, pycompat.sysstr(encoding.encoding), r'replace')
320 320 return ''.join(['&#%d;' % ord(c) for c in text])
321 321
322 322 @templatefilter('permissions', intype=bytes)
323 323 def permissions(flags):
324 324 if "l" in flags:
325 325 return "lrwxrwxrwx"
326 326 if "x" in flags:
327 327 return "-rwxr-xr-x"
328 328 return "-rw-r--r--"
329 329
330 330 @templatefilter('person', intype=bytes)
331 331 def person(author):
332 332 """Any text. Returns the name before an email address,
333 333 interpreting it as per RFC 5322.
334 334 """
335 335 return stringutil.person(author)
336 336
337 337 @templatefilter('revescape', intype=bytes)
338 338 def revescape(text):
339 339 """Any text. Escapes all "special" characters, except @.
340 340 Forward slashes are escaped twice to prevent web servers from prematurely
341 341 unescaping them. For example, "@foo bar/baz" becomes "@foo%20bar%252Fbaz".
342 342 """
343 343 return urlreq.quote(text, safe='/@').replace('/', '%252F')
344 344
345 345 @templatefilter('rfc3339date', intype=templateutil.date)
346 346 def rfc3339date(text):
347 347 """Date. Returns a date using the Internet date format
348 348 specified in RFC 3339: "2009-08-18T13:00:13+02:00".
349 349 """
350 350 return dateutil.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2")
351 351
352 352 @templatefilter('rfc822date', intype=templateutil.date)
353 353 def rfc822date(text):
354 354 """Date. Returns a date using the same format used in email
355 355 headers: "Tue, 18 Aug 2009 13:00:13 +0200".
356 356 """
357 357 return dateutil.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2")
358 358
359 359 @templatefilter('short', intype=bytes)
360 360 def short(text):
361 361 """Changeset hash. Returns the short form of a changeset hash,
362 362 i.e. a 12 hexadecimal digit string.
363 363 """
364 364 return text[:12]
365 365
366 366 @templatefilter('shortbisect', intype=bytes)
367 367 def shortbisect(label):
368 368 """Any text. Treats `label` as a bisection status, and
369 369 returns a single-character representing the status (G: good, B: bad,
370 370 S: skipped, U: untested, I: ignored). Returns single space if `text`
371 371 is not a valid bisection status.
372 372 """
373 373 if label:
374 374 return label[0:1].upper()
375 375 return ' '
376 376
377 377 @templatefilter('shortdate', intype=templateutil.date)
378 378 def shortdate(text):
379 379 """Date. Returns a date like "2006-09-18"."""
380 380 return dateutil.shortdate(text)
381 381
382 382 @templatefilter('slashpath', intype=bytes)
383 383 def slashpath(path):
384 384 """Any text. Replaces the native path separator with slash."""
385 385 return util.pconvert(path)
386 386
387 387 @templatefilter('splitlines', intype=bytes)
388 388 def splitlines(text):
389 389 """Any text. Split text into a list of lines."""
390 390 return templateutil.hybridlist(text.splitlines(), name='line')
391 391
392 392 @templatefilter('stringescape', intype=bytes)
393 393 def stringescape(text):
394 394 return stringutil.escapestr(text)
395 395
396 396 @templatefilter('stringify', intype=bytes)
397 397 def stringify(thing):
398 398 """Any type. Turns the value into text by converting values into
399 399 text and concatenating them.
400 400 """
401 401 return thing # coerced by the intype
402 402
403 403 @templatefilter('stripdir', intype=bytes)
404 404 def stripdir(text):
405 405 """Treat the text as path and strip a directory level, if
406 406 possible. For example, "foo" and "foo/bar" becomes "foo".
407 407 """
408 408 dir = os.path.dirname(text)
409 409 if dir == "":
410 410 return os.path.basename(text)
411 411 else:
412 412 return dir
413 413
414 414 @templatefilter('tabindent', intype=bytes)
415 415 def tabindent(text):
416 416 """Any text. Returns the text, with every non-empty line
417 417 except the first starting with a tab character.
418 418 """
419 419 return indent(text, '\t')
420 420
421 421 @templatefilter('upper', intype=bytes)
422 422 def upper(text):
423 423 """Any text. Converts the text to uppercase."""
424 424 return encoding.upper(text)
425 425
426 426 @templatefilter('urlescape', intype=bytes)
427 427 def urlescape(text):
428 428 """Any text. Escapes all "special" characters. For example,
429 429 "foo bar" becomes "foo%20bar".
430 430 """
431 431 return urlreq.quote(text)
432 432
433 433 @templatefilter('user', intype=bytes)
434 434 def userfilter(text):
435 435 """Any text. Returns a short representation of a user name or email
436 436 address."""
437 437 return stringutil.shortuser(text)
438 438
439 439 @templatefilter('emailuser', intype=bytes)
440 440 def emailuser(text):
441 441 """Any text. Returns the user portion of an email address."""
442 442 return stringutil.emailuser(text)
443 443
444 444 @templatefilter('utf8', intype=bytes)
445 445 def utf8(text):
446 446 """Any text. Converts from the local character encoding to UTF-8."""
447 447 return encoding.fromlocal(text)
448 448
449 449 @templatefilter('xmlescape', intype=bytes)
450 450 def xmlescape(text):
451 451 text = (text
452 452 .replace('&', '&amp;')
453 453 .replace('<', '&lt;')
454 454 .replace('>', '&gt;')
455 455 .replace('"', '&quot;')
456 456 .replace("'", '&#39;')) # &apos; invalid in HTML
457 457 return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
458 458
459 459 def websub(text, websubtable):
460 460 """:websub: Any text. Only applies to hgweb. Applies the regular
461 461 expression replacements defined in the websub section.
462 462 """
463 463 if websubtable:
464 464 for regexp, format in websubtable:
465 465 text = regexp.sub(format, text)
466 466 return text
467 467
468 468 def loadfilter(ui, extname, registrarobj):
469 469 """Load template filter from specified registrarobj
470 470 """
471 471 for name, func in registrarobj._table.iteritems():
472 472 filters[name] = func
473 473
474 474 # tell hggettext to extract docstrings from these functions:
475 475 i18nfunctions = filters.values()
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now